@exaudeus/workrail 3.31.1 → 3.32.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +3 -0
- package/dist/console/assets/{index-6H9DeFxj.js → index-Cb_LO718.js} +1 -1
- package/dist/console/index.html +1 -1
- package/dist/daemon/agent-loop.js +14 -1
- package/dist/daemon/daemon-events.d.ts +57 -0
- package/dist/daemon/daemon-events.js +56 -0
- package/dist/daemon/workflow-runner.d.ts +5 -3
- package/dist/daemon/workflow-runner.js +42 -10
- package/dist/manifest.json +33 -25
- package/dist/trigger/delivery-client.d.ts +2 -1
- package/dist/trigger/delivery-client.js +4 -1
- package/dist/trigger/trigger-listener.d.ts +2 -0
- package/dist/trigger/trigger-listener.js +6 -1
- package/dist/trigger/trigger-router.d.ts +4 -2
- package/dist/trigger/trigger-router.js +7 -4
- package/docs/ideas/backlog.md +104 -0
- package/package.json +1 -1
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.DaemonEventEmitter = void 0;
|
|
37
|
+
const fs = __importStar(require("node:fs/promises"));
|
|
38
|
+
const path = __importStar(require("node:path"));
|
|
39
|
+
const os = __importStar(require("node:os"));
|
|
40
|
+
class DaemonEventEmitter {
|
|
41
|
+
constructor(dirOverride) {
|
|
42
|
+
this._dir = dirOverride ?? path.join(os.homedir(), '.workrail', 'events', 'daemon');
|
|
43
|
+
}
|
|
44
|
+
emit(event) {
|
|
45
|
+
void this._append(event).catch(() => {
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
async _append(event) {
|
|
49
|
+
const date = new Date().toISOString().slice(0, 10);
|
|
50
|
+
const filePath = path.join(this._dir, `${date}.jsonl`);
|
|
51
|
+
await fs.mkdir(this._dir, { recursive: true });
|
|
52
|
+
const line = JSON.stringify({ ...event, ts: Date.now() }) + '\n';
|
|
53
|
+
await fs.appendFile(filePath, line, 'utf8');
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
exports.DaemonEventEmitter = DaemonEventEmitter;
|
|
@@ -4,6 +4,7 @@ import type { V2ToolContext } from '../mcp/types.js';
|
|
|
4
4
|
import { executeContinueWorkflow } from '../mcp/handlers/v2-execution/index.js';
|
|
5
5
|
import type { DaemonRegistry } from '../v2/infra/in-memory/daemon-registry/index.js';
|
|
6
6
|
import type { V2StartWorkflowOutputSchema } from '../mcp/output-schemas.js';
|
|
7
|
+
import type { DaemonEventEmitter } from './daemon-events.js';
|
|
7
8
|
export declare const DAEMON_SESSIONS_DIR: string;
|
|
8
9
|
export { DAEMON_SOUL_DEFAULT, DAEMON_SOUL_TEMPLATE } from './soul-template.js';
|
|
9
10
|
export interface WorkflowTrigger {
|
|
@@ -31,6 +32,7 @@ export interface WorkflowRunError {
|
|
|
31
32
|
readonly workflowId: string;
|
|
32
33
|
readonly message: string;
|
|
33
34
|
readonly stopReason: string;
|
|
35
|
+
readonly lastStepNotes?: string;
|
|
34
36
|
}
|
|
35
37
|
export interface WorkflowRunTimeout {
|
|
36
38
|
readonly _tag: 'timeout';
|
|
@@ -58,8 +60,8 @@ export declare function readDaemonSessionState(sessionId: string): Promise<{
|
|
|
58
60
|
} | null>;
|
|
59
61
|
export declare function readAllDaemonSessions(sessionsDir?: string): Promise<OrphanedSession[]>;
|
|
60
62
|
export declare function runStartupRecovery(sessionsDir?: string): Promise<void>;
|
|
61
|
-
export declare function makeContinueWorkflowTool(sessionId: string, ctx: V2ToolContext, onAdvance: (nextStepText: string, continueToken: string) => void, onComplete: (notes: string | undefined) => void, schemas: Record<string, any>, _executeContinueWorkflowFn?: typeof executeContinueWorkflow): AgentTool;
|
|
62
|
-
export declare function makeBashTool(workspacePath: string, schemas: Record<string, any
|
|
63
|
+
export declare function makeContinueWorkflowTool(sessionId: string, ctx: V2ToolContext, onAdvance: (nextStepText: string, continueToken: string) => void, onComplete: (notes: string | undefined) => void, schemas: Record<string, any>, _executeContinueWorkflowFn?: typeof executeContinueWorkflow, emitter?: DaemonEventEmitter): AgentTool;
|
|
64
|
+
export declare function makeBashTool(workspacePath: string, schemas: Record<string, any>, sessionId?: string, emitter?: DaemonEventEmitter): AgentTool;
|
|
63
65
|
export declare function buildSessionRecap(notes: readonly string[]): string;
|
|
64
66
|
export declare function buildSystemPrompt(trigger: WorkflowTrigger, sessionState: string, soulContent: string, workspaceContext: string | null): string;
|
|
65
|
-
export declare function runWorkflow(trigger: WorkflowTrigger, ctx: V2ToolContext, apiKey: string, daemonRegistry?: DaemonRegistry): Promise<WorkflowRunResult>;
|
|
67
|
+
export declare function runWorkflow(trigger: WorkflowTrigger, ctx: V2ToolContext, apiKey: string, daemonRegistry?: DaemonRegistry, emitter?: DaemonEventEmitter): Promise<WorkflowRunResult>;
|
|
@@ -338,7 +338,7 @@ function getSchemas() {
|
|
|
338
338
|
};
|
|
339
339
|
return _schemas;
|
|
340
340
|
}
|
|
341
|
-
function makeContinueWorkflowTool(sessionId, ctx, onAdvance, onComplete, schemas, _executeContinueWorkflowFn = index_js_1.executeContinueWorkflow) {
|
|
341
|
+
function makeContinueWorkflowTool(sessionId, ctx, onAdvance, onComplete, schemas, _executeContinueWorkflowFn = index_js_1.executeContinueWorkflow, emitter) {
|
|
342
342
|
return {
|
|
343
343
|
name: 'continue_workflow',
|
|
344
344
|
description: 'Advance the WorkRail workflow to the next step. Call this after completing all work ' +
|
|
@@ -347,6 +347,7 @@ function makeContinueWorkflowTool(sessionId, ctx, onAdvance, onComplete, schemas
|
|
|
347
347
|
label: 'Continue Workflow',
|
|
348
348
|
execute: async (_toolCallId, params) => {
|
|
349
349
|
console.log(`[WorkflowRunner] Tool: continue_workflow sessionId=${sessionId}`);
|
|
350
|
+
emitter?.emit({ kind: 'tool_called', sessionId, toolName: 'continue_workflow', summary: params.intent ?? 'advance' });
|
|
350
351
|
const result = await _executeContinueWorkflowFn({
|
|
351
352
|
continueToken: params.continueToken,
|
|
352
353
|
intent: (params.intent ?? 'advance'),
|
|
@@ -425,7 +426,7 @@ function makeContinueWorkflowTool(sessionId, ctx, onAdvance, onComplete, schemas
|
|
|
425
426
|
},
|
|
426
427
|
};
|
|
427
428
|
}
|
|
428
|
-
function makeBashTool(workspacePath, schemas) {
|
|
429
|
+
function makeBashTool(workspacePath, schemas, sessionId, emitter) {
|
|
429
430
|
return {
|
|
430
431
|
name: 'Bash',
|
|
431
432
|
description: 'Execute a shell command. Throws on failure (non-zero exit with stderr, or exit code 2+). ' +
|
|
@@ -436,6 +437,8 @@ function makeBashTool(workspacePath, schemas) {
|
|
|
436
437
|
label: 'Bash',
|
|
437
438
|
execute: async (_toolCallId, params) => {
|
|
438
439
|
console.log(`[WorkflowRunner] Tool: bash "${String(params.command).slice(0, 80)}"`);
|
|
440
|
+
if (sessionId)
|
|
441
|
+
emitter?.emit({ kind: 'tool_called', sessionId, toolName: 'Bash', summary: String(params.command).slice(0, 80) });
|
|
439
442
|
const cwd = params.cwd ?? workspacePath;
|
|
440
443
|
try {
|
|
441
444
|
const { stdout, stderr } = await execAsync(params.command, {
|
|
@@ -470,13 +473,15 @@ function makeBashTool(workspacePath, schemas) {
|
|
|
470
473
|
},
|
|
471
474
|
};
|
|
472
475
|
}
|
|
473
|
-
function makeReadTool(schemas) {
|
|
476
|
+
function makeReadTool(schemas, sessionId, emitter) {
|
|
474
477
|
return {
|
|
475
478
|
name: 'Read',
|
|
476
479
|
description: 'Read the contents of a file at the given absolute path.',
|
|
477
480
|
inputSchema: schemas['ReadParams'],
|
|
478
481
|
label: 'Read',
|
|
479
482
|
execute: async (_toolCallId, params) => {
|
|
483
|
+
if (sessionId)
|
|
484
|
+
emitter?.emit({ kind: 'tool_called', sessionId, toolName: 'Read', summary: String(params.filePath).slice(0, 80) });
|
|
480
485
|
const content = await fs.readFile(params.filePath, 'utf8');
|
|
481
486
|
return {
|
|
482
487
|
content: [{ type: 'text', text: content }],
|
|
@@ -485,13 +490,15 @@ function makeReadTool(schemas) {
|
|
|
485
490
|
},
|
|
486
491
|
};
|
|
487
492
|
}
|
|
488
|
-
function makeWriteTool(schemas) {
|
|
493
|
+
function makeWriteTool(schemas, sessionId, emitter) {
|
|
489
494
|
return {
|
|
490
495
|
name: 'Write',
|
|
491
496
|
description: 'Write content to a file at the given absolute path. Creates parent directories if needed.',
|
|
492
497
|
inputSchema: schemas['WriteParams'],
|
|
493
498
|
label: 'Write',
|
|
494
499
|
execute: async (_toolCallId, params) => {
|
|
500
|
+
if (sessionId)
|
|
501
|
+
emitter?.emit({ kind: 'tool_called', sessionId, toolName: 'Write', summary: String(params.filePath).slice(0, 80) });
|
|
495
502
|
await fs.mkdir(path.dirname(params.filePath), { recursive: true });
|
|
496
503
|
await fs.writeFile(params.filePath, params.content, 'utf8');
|
|
497
504
|
return {
|
|
@@ -554,9 +561,15 @@ function buildUserMessage(text) {
|
|
|
554
561
|
timestamp: Date.now(),
|
|
555
562
|
};
|
|
556
563
|
}
|
|
557
|
-
async function runWorkflow(trigger, ctx, apiKey, daemonRegistry) {
|
|
564
|
+
async function runWorkflow(trigger, ctx, apiKey, daemonRegistry, emitter) {
|
|
558
565
|
const sessionId = (0, node_crypto_1.randomUUID)();
|
|
559
566
|
console.log(`[WorkflowRunner] Session started: sessionId=${sessionId} workflowId=${trigger.workflowId}`);
|
|
567
|
+
emitter?.emit({
|
|
568
|
+
kind: 'session_started',
|
|
569
|
+
sessionId,
|
|
570
|
+
workflowId: trigger.workflowId,
|
|
571
|
+
workspacePath: trigger.workspacePath,
|
|
572
|
+
});
|
|
560
573
|
daemonRegistry?.register(sessionId, trigger.workflowId);
|
|
561
574
|
let agentClient;
|
|
562
575
|
let modelId;
|
|
@@ -592,6 +605,7 @@ async function runWorkflow(trigger, ctx, apiKey, daemonRegistry) {
|
|
|
592
605
|
const onAdvance = (stepText, _continueToken) => {
|
|
593
606
|
pendingSteerText = stepText;
|
|
594
607
|
daemonRegistry?.heartbeat(sessionId);
|
|
608
|
+
emitter?.emit({ kind: 'step_advanced', sessionId });
|
|
595
609
|
};
|
|
596
610
|
const onComplete = (notes) => {
|
|
597
611
|
isComplete = true;
|
|
@@ -621,15 +635,16 @@ async function runWorkflow(trigger, ctx, apiKey, daemonRegistry) {
|
|
|
621
635
|
}
|
|
622
636
|
if (firstStep.isComplete) {
|
|
623
637
|
await fs.unlink(path.join(exports.DAEMON_SESSIONS_DIR, `${sessionId}.json`)).catch(() => { });
|
|
638
|
+
emitter?.emit({ kind: 'session_completed', sessionId, workflowId: trigger.workflowId, outcome: 'success', detail: 'stop' });
|
|
624
639
|
daemonRegistry?.unregister(sessionId, 'completed');
|
|
625
640
|
return { _tag: 'success', workflowId: trigger.workflowId, stopReason: 'stop' };
|
|
626
641
|
}
|
|
627
642
|
const schemas = getSchemas();
|
|
628
643
|
const tools = [
|
|
629
|
-
makeContinueWorkflowTool(sessionId, ctx, onAdvance, onComplete, schemas),
|
|
630
|
-
makeBashTool(trigger.workspacePath, schemas),
|
|
631
|
-
makeReadTool(schemas),
|
|
632
|
-
makeWriteTool(schemas),
|
|
644
|
+
makeContinueWorkflowTool(sessionId, ctx, onAdvance, onComplete, schemas, index_js_1.executeContinueWorkflow, emitter),
|
|
645
|
+
makeBashTool(trigger.workspacePath, schemas, sessionId, emitter),
|
|
646
|
+
makeReadTool(schemas, sessionId, emitter),
|
|
647
|
+
makeWriteTool(schemas, sessionId, emitter),
|
|
633
648
|
];
|
|
634
649
|
const [soulContent, workspaceContext, sessionNotes] = await Promise.all([
|
|
635
650
|
loadDaemonSoul(trigger.soulFile),
|
|
@@ -658,6 +673,12 @@ async function runWorkflow(trigger, ctx, apiKey, daemonRegistry) {
|
|
|
658
673
|
const unsubscribe = agent.subscribe(async (event) => {
|
|
659
674
|
if (event.type !== 'turn_end')
|
|
660
675
|
return;
|
|
676
|
+
for (const toolResult of event.toolResults) {
|
|
677
|
+
if (toolResult.isError) {
|
|
678
|
+
const errorText = toolResult.result?.content[0]?.text ?? 'tool error';
|
|
679
|
+
emitter?.emit({ kind: 'tool_error', sessionId, toolName: toolResult.toolName, error: errorText.slice(0, 200) });
|
|
680
|
+
}
|
|
681
|
+
}
|
|
661
682
|
turnCount++;
|
|
662
683
|
if (maxTurns > 0 && turnCount >= maxTurns && timeoutReason === null) {
|
|
663
684
|
timeoutReason = 'max_turns';
|
|
@@ -711,6 +732,7 @@ async function runWorkflow(trigger, ctx, apiKey, daemonRegistry) {
|
|
|
711
732
|
console.log(`[WorkflowRunner] Agent loop ended: sessionId=${sessionId} stopReason=${stopReason}${errorMessage ? ` error=${errorMessage.slice(0, 120)}` : ''}`);
|
|
712
733
|
}
|
|
713
734
|
if (timeoutReason !== null) {
|
|
735
|
+
emitter?.emit({ kind: 'session_completed', sessionId, workflowId: trigger.workflowId, outcome: 'timeout', detail: timeoutReason });
|
|
714
736
|
daemonRegistry?.unregister(sessionId, 'failed');
|
|
715
737
|
const limitDescription = timeoutReason === 'wall_clock'
|
|
716
738
|
? `${trigger.agentConfig?.maxSessionMinutes ?? DEFAULT_SESSION_TIMEOUT_MINUTES} minutes`
|
|
@@ -724,16 +746,26 @@ async function runWorkflow(trigger, ctx, apiKey, daemonRegistry) {
|
|
|
724
746
|
};
|
|
725
747
|
}
|
|
726
748
|
if (stopReason === 'error' || errorMessage) {
|
|
749
|
+
const errMsg = errorMessage ?? 'Agent stopped with error reason';
|
|
750
|
+
emitter?.emit({ kind: 'session_completed', sessionId, workflowId: trigger.workflowId, outcome: 'error', detail: errMsg.slice(0, 200) });
|
|
727
751
|
daemonRegistry?.unregister(sessionId, 'failed');
|
|
752
|
+
const stuckMarker = `\n\nWORKTRAIN_STUCK: ${JSON.stringify({
|
|
753
|
+
reason: 'session_error',
|
|
754
|
+
error: errMsg.slice(0, 500),
|
|
755
|
+
workflowId: trigger.workflowId,
|
|
756
|
+
sessionId,
|
|
757
|
+
})}`;
|
|
728
758
|
return {
|
|
729
759
|
_tag: 'error',
|
|
730
760
|
workflowId: trigger.workflowId,
|
|
731
|
-
message:
|
|
761
|
+
message: errMsg,
|
|
732
762
|
stopReason,
|
|
763
|
+
lastStepNotes: stuckMarker,
|
|
733
764
|
};
|
|
734
765
|
}
|
|
735
766
|
await fs.unlink(path.join(exports.DAEMON_SESSIONS_DIR, `${sessionId}.json`)).catch(() => {
|
|
736
767
|
});
|
|
768
|
+
emitter?.emit({ kind: 'session_completed', sessionId, workflowId: trigger.workflowId, outcome: 'success', detail: stopReason });
|
|
737
769
|
daemonRegistry?.unregister(sessionId, 'completed');
|
|
738
770
|
return {
|
|
739
771
|
_tag: 'success',
|
package/dist/manifest.json
CHANGED
|
@@ -246,8 +246,8 @@
|
|
|
246
246
|
"bytes": 31
|
|
247
247
|
},
|
|
248
248
|
"cli.js": {
|
|
249
|
-
"sha256": "
|
|
250
|
-
"bytes":
|
|
249
|
+
"sha256": "b7012b4643571bf67a4601ecc4cb7457405ac613750253583f4a7be95709bc7b",
|
|
250
|
+
"bytes": 13483
|
|
251
251
|
},
|
|
252
252
|
"cli/commands/cleanup.d.ts": {
|
|
253
253
|
"sha256": "efe1f9e2ecd58e92007ed38b9581a3852c2babe4b3f2a97237dccd878eebe7ec",
|
|
@@ -425,16 +425,16 @@
|
|
|
425
425
|
"sha256": "5fe866e54f796975dec5d8ba9983aefd86074db212d3fccd64eed04bc9f0b3da",
|
|
426
426
|
"bytes": 8011
|
|
427
427
|
},
|
|
428
|
-
"console/assets/index-6H9DeFxj.js": {
|
|
429
|
-
"sha256": "bb75a622414fc3783148f546860248ad3e7cde1056d0e4a8a1abf2f143947fef",
|
|
430
|
-
"bytes": 754653
|
|
431
|
-
},
|
|
432
428
|
"console/assets/index-8dh0Psu-.css": {
|
|
433
429
|
"sha256": "cf9d09641f1c31fffe6c7835b30bbbad52572befec1acab7fb9a0c188431af36",
|
|
434
430
|
"bytes": 60355
|
|
435
431
|
},
|
|
432
|
+
"console/assets/index-Cb_LO718.js": {
|
|
433
|
+
"sha256": "5b8a9e605a533ff62465a53cdba560471b5e244d6823df40e726a2a31c63482a",
|
|
434
|
+
"bytes": 754653
|
|
435
|
+
},
|
|
436
436
|
"console/index.html": {
|
|
437
|
-
"sha256": "
|
|
437
|
+
"sha256": "bbe6f516754067df4f17051732f6dad173f16863c01d9387a116018724ae13b4",
|
|
438
438
|
"bytes": 417
|
|
439
439
|
},
|
|
440
440
|
"core/error-handler.d.ts": {
|
|
@@ -450,8 +450,16 @@
|
|
|
450
450
|
"bytes": 2864
|
|
451
451
|
},
|
|
452
452
|
"daemon/agent-loop.js": {
|
|
453
|
-
"sha256": "
|
|
454
|
-
"bytes":
|
|
453
|
+
"sha256": "713c68b2c50ba3e8e2698a6f444f2793e35d6ab10a9a8b3f375d121ce923dc8d",
|
|
454
|
+
"bytes": 8137
|
|
455
|
+
},
|
|
456
|
+
"daemon/daemon-events.d.ts": {
|
|
457
|
+
"sha256": "084144fec2c235d0c33207f074f966a3b961c3c1950c812addd2d5fefd8bd935",
|
|
458
|
+
"bytes": 1866
|
|
459
|
+
},
|
|
460
|
+
"daemon/daemon-events.js": {
|
|
461
|
+
"sha256": "b6841eef4634bb266faf81961c1e387b535dd64a74d58582f3f2bad8c3469d95",
|
|
462
|
+
"bytes": 2252
|
|
455
463
|
},
|
|
456
464
|
"daemon/pi-mono-loader.d.ts": {
|
|
457
465
|
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
|
@@ -470,12 +478,12 @@
|
|
|
470
478
|
"bytes": 1009
|
|
471
479
|
},
|
|
472
480
|
"daemon/workflow-runner.d.ts": {
|
|
473
|
-
"sha256": "
|
|
474
|
-
"bytes":
|
|
481
|
+
"sha256": "ef6669d3274c211e59ac2467a52694f1a8594d001eadd8551850195e4cabd18a",
|
|
482
|
+
"bytes": 3387
|
|
475
483
|
},
|
|
476
484
|
"daemon/workflow-runner.js": {
|
|
477
|
-
"sha256": "
|
|
478
|
-
"bytes":
|
|
485
|
+
"sha256": "227f1299d82bed696b774caeb5da597695646dab8e52c1f6c91da435360a6370",
|
|
486
|
+
"bytes": 34930
|
|
479
487
|
},
|
|
480
488
|
"di/container.d.ts": {
|
|
481
489
|
"sha256": "003bb7fb7478d627524b9b1e76bd0a963a243794a687ff233b96dc0e33a06d9f",
|
|
@@ -1518,12 +1526,12 @@
|
|
|
1518
1526
|
"bytes": 8026
|
|
1519
1527
|
},
|
|
1520
1528
|
"trigger/delivery-client.d.ts": {
|
|
1521
|
-
"sha256": "
|
|
1522
|
-
"bytes":
|
|
1529
|
+
"sha256": "0cb2be24b854cb31e3d2fe7eeaba6032de7a9b2a5290c8bc886df94faf5306f7",
|
|
1530
|
+
"bytes": 533
|
|
1523
1531
|
},
|
|
1524
1532
|
"trigger/delivery-client.js": {
|
|
1525
|
-
"sha256": "
|
|
1526
|
-
"bytes":
|
|
1533
|
+
"sha256": "da358ced4e99c327493b6d3ca975a623aca21f72e68787a092b2760601801c99",
|
|
1534
|
+
"bytes": 1269
|
|
1527
1535
|
},
|
|
1528
1536
|
"trigger/index.d.ts": {
|
|
1529
1537
|
"sha256": "ac0df6f84ff1ca493f28870b4303e5d68c3bfb2e8109678a10c1cfe11c28c802",
|
|
@@ -1534,20 +1542,20 @@
|
|
|
1534
1542
|
"bytes": 749
|
|
1535
1543
|
},
|
|
1536
1544
|
"trigger/trigger-listener.d.ts": {
|
|
1537
|
-
"sha256": "
|
|
1538
|
-
"bytes":
|
|
1545
|
+
"sha256": "110f4c0d3f118355e00ec16245edaf7d7721cbd3dace61d60594be3eb21c75f0",
|
|
1546
|
+
"bytes": 1370
|
|
1539
1547
|
},
|
|
1540
1548
|
"trigger/trigger-listener.js": {
|
|
1541
|
-
"sha256": "
|
|
1542
|
-
"bytes":
|
|
1549
|
+
"sha256": "4aaac56d569d658c1a6af14a0f9fd3160e305fd9e80e7538d99371563ad17583",
|
|
1550
|
+
"bytes": 7913
|
|
1543
1551
|
},
|
|
1544
1552
|
"trigger/trigger-router.d.ts": {
|
|
1545
|
-
"sha256": "
|
|
1546
|
-
"bytes":
|
|
1553
|
+
"sha256": "c60fa099ea236255d2a51799f3f8c550af1990d167565a976ecb9ec2eb42c6ae",
|
|
1554
|
+
"bytes": 1855
|
|
1547
1555
|
},
|
|
1548
1556
|
"trigger/trigger-router.js": {
|
|
1549
|
-
"sha256": "
|
|
1550
|
-
"bytes":
|
|
1557
|
+
"sha256": "16241f6376d2b6718ee1df4d1873270fd3c0cac69bdeaf2cca9fdaad2ca2bd33",
|
|
1558
|
+
"bytes": 15336
|
|
1551
1559
|
},
|
|
1552
1560
|
"trigger/trigger-store.d.ts": {
|
|
1553
1561
|
"sha256": "7afb05127d55bc3757a550dd15d4b797766b3fff29d1bfe76b303764b93322e7",
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import type { Result } from '../runtime/result.js';
|
|
2
2
|
import type { WorkflowRunResult } from '../daemon/workflow-runner.js';
|
|
3
|
+
import type { DaemonEventEmitter } from '../daemon/daemon-events.js';
|
|
3
4
|
export type DeliveryError = {
|
|
4
5
|
readonly kind: 'http_error';
|
|
5
6
|
readonly status: number;
|
|
@@ -8,4 +9,4 @@ export type DeliveryError = {
|
|
|
8
9
|
readonly kind: 'network_error';
|
|
9
10
|
readonly message: string;
|
|
10
11
|
};
|
|
11
|
-
export declare function post(callbackUrl: string, result: WorkflowRunResult): Promise<Result<void, DeliveryError>>;
|
|
12
|
+
export declare function post(callbackUrl: string, result: WorkflowRunResult, emitter?: DaemonEventEmitter): Promise<Result<void, DeliveryError>>;
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.post = post;
|
|
4
4
|
const result_js_1 = require("../runtime/result.js");
|
|
5
|
-
async function post(callbackUrl, result) {
|
|
5
|
+
async function post(callbackUrl, result, emitter) {
|
|
6
6
|
const controller = new AbortController();
|
|
7
7
|
const timer = setTimeout(() => controller.abort(), 30000);
|
|
8
8
|
try {
|
|
@@ -14,11 +14,14 @@ async function post(callbackUrl, result) {
|
|
|
14
14
|
});
|
|
15
15
|
if (!res.ok) {
|
|
16
16
|
const body = await res.text().catch(() => '');
|
|
17
|
+
emitter?.emit({ kind: 'delivery_attempted', callbackUrl, outcome: 'http_error', statusCode: res.status });
|
|
17
18
|
return (0, result_js_1.err)({ kind: 'http_error', status: res.status, body });
|
|
18
19
|
}
|
|
20
|
+
emitter?.emit({ kind: 'delivery_attempted', callbackUrl, outcome: 'success', statusCode: res.status });
|
|
19
21
|
return (0, result_js_1.ok)(undefined);
|
|
20
22
|
}
|
|
21
23
|
catch (e) {
|
|
24
|
+
emitter?.emit({ kind: 'delivery_attempted', callbackUrl, outcome: 'network_error' });
|
|
22
25
|
return (0, result_js_1.err)({ kind: 'network_error', message: String(e) });
|
|
23
26
|
}
|
|
24
27
|
finally {
|
|
@@ -4,6 +4,7 @@ import type { V2ToolContext } from '../mcp/types.js';
|
|
|
4
4
|
import type { TriggerStoreError } from './trigger-store.js';
|
|
5
5
|
import { TriggerRouter, type RunWorkflowFn } from './trigger-router.js';
|
|
6
6
|
import type { WorkspaceConfig } from './types.js';
|
|
7
|
+
import type { DaemonEventEmitter } from '../daemon/daemon-events.js';
|
|
7
8
|
export type TriggerListenerError = TriggerStoreError | {
|
|
8
9
|
readonly kind: 'port_conflict';
|
|
9
10
|
readonly port: number;
|
|
@@ -24,6 +25,7 @@ export interface StartTriggerListenerOptions {
|
|
|
24
25
|
readonly env?: Record<string, string | undefined>;
|
|
25
26
|
readonly runWorkflowFn?: RunWorkflowFn;
|
|
26
27
|
readonly workspaces?: Readonly<Record<string, WorkspaceConfig>>;
|
|
28
|
+
readonly emitter?: DaemonEventEmitter;
|
|
27
29
|
}
|
|
28
30
|
export declare function createTriggerApp(router: TriggerRouter): express.Application;
|
|
29
31
|
export declare function startTriggerListener(ctx: V2ToolContext, options: StartTriggerListenerOptions): Promise<TriggerListenerHandle | null | {
|
|
@@ -143,7 +143,7 @@ async function startTriggerListener(ctx, options) {
|
|
|
143
143
|
const parsed = parseInt(maxConcurrencyRaw ?? '', 10);
|
|
144
144
|
const maxConcurrentSessions = !isNaN(parsed) ? parsed : undefined;
|
|
145
145
|
const runWorkflowFn = options.runWorkflowFn ?? workflow_runner_js_1.runWorkflow;
|
|
146
|
-
const router = new trigger_router_js_1.TriggerRouter(triggerIndex, ctx, apiKey, runWorkflowFn, undefined, maxConcurrentSessions);
|
|
146
|
+
const router = new trigger_router_js_1.TriggerRouter(triggerIndex, ctx, apiKey, runWorkflowFn, undefined, maxConcurrentSessions, options.emitter);
|
|
147
147
|
const app = createTriggerApp(router);
|
|
148
148
|
await (0, workflow_runner_js_1.runStartupRecovery)().catch((err) => {
|
|
149
149
|
console.warn('[TriggerListener] Startup recovery encountered an unexpected error:', err instanceof Error ? err.message : String(err));
|
|
@@ -164,6 +164,11 @@ async function startTriggerListener(ctx, options) {
|
|
|
164
164
|
const addr = server.address();
|
|
165
165
|
const actualPort = (addr && typeof addr === 'object') ? addr.port : port;
|
|
166
166
|
console.log(`[TriggerListener] Webhook server listening on port ${actualPort}`);
|
|
167
|
+
options.emitter?.emit({
|
|
168
|
+
kind: 'daemon_started',
|
|
169
|
+
port: actualPort,
|
|
170
|
+
workspacePath: options.workspacePath,
|
|
171
|
+
});
|
|
167
172
|
resolve({
|
|
168
173
|
port: actualPort,
|
|
169
174
|
router,
|
|
@@ -2,6 +2,7 @@ import type { WorkflowTrigger, WorkflowRunResult } from '../daemon/workflow-runn
|
|
|
2
2
|
import type { V2ToolContext } from '../mcp/types.js';
|
|
3
3
|
import type { TriggerDefinition, WebhookEvent } from './types.js';
|
|
4
4
|
import type { ExecFn } from './delivery-action.js';
|
|
5
|
+
import type { DaemonEventEmitter } from '../daemon/daemon-events.js';
|
|
5
6
|
export type RouteError = {
|
|
6
7
|
readonly kind: 'not_found';
|
|
7
8
|
readonly triggerId: string;
|
|
@@ -18,7 +19,7 @@ export type RouteResult = {
|
|
|
18
19
|
readonly _tag: 'error';
|
|
19
20
|
readonly error: RouteError;
|
|
20
21
|
};
|
|
21
|
-
export type RunWorkflowFn = (trigger: WorkflowTrigger, ctx: V2ToolContext, apiKey: string) => Promise<WorkflowRunResult>;
|
|
22
|
+
export type RunWorkflowFn = (trigger: WorkflowTrigger, ctx: V2ToolContext, apiKey: string, daemonRegistry?: import('../v2/infra/in-memory/daemon-registry/index.js').DaemonRegistry, emitter?: DaemonEventEmitter) => Promise<WorkflowRunResult>;
|
|
22
23
|
export declare function interpolateGoalTemplate(template: string, staticGoal: string, payload: Readonly<Record<string, unknown>>, triggerId: string): string;
|
|
23
24
|
export declare class TriggerRouter {
|
|
24
25
|
private readonly index;
|
|
@@ -29,7 +30,8 @@ export declare class TriggerRouter {
|
|
|
29
30
|
private readonly execFn;
|
|
30
31
|
private readonly semaphore;
|
|
31
32
|
private readonly _maxConcurrentSessions;
|
|
32
|
-
|
|
33
|
+
private readonly emitter;
|
|
34
|
+
constructor(index: ReadonlyMap<string, TriggerDefinition>, ctx: V2ToolContext, apiKey: string, runWorkflowFn: RunWorkflowFn, execFn?: ExecFn, maxConcurrentSessions?: number, emitter?: DaemonEventEmitter);
|
|
33
35
|
get activeSessions(): number;
|
|
34
36
|
get maxConcurrentSessions(): number;
|
|
35
37
|
route(event: WebhookEvent): RouteResult;
|
|
@@ -182,13 +182,14 @@ class Semaphore {
|
|
|
182
182
|
}
|
|
183
183
|
const DEFAULT_MAX_CONCURRENT_SESSIONS = 3;
|
|
184
184
|
class TriggerRouter {
|
|
185
|
-
constructor(index, ctx, apiKey, runWorkflowFn, execFn, maxConcurrentSessions) {
|
|
185
|
+
constructor(index, ctx, apiKey, runWorkflowFn, execFn, maxConcurrentSessions, emitter) {
|
|
186
186
|
this.index = index;
|
|
187
187
|
this.ctx = ctx;
|
|
188
188
|
this.apiKey = apiKey;
|
|
189
189
|
this.runWorkflowFn = runWorkflowFn;
|
|
190
190
|
this.queue = new index_js_1.KeyedAsyncQueue();
|
|
191
191
|
this.execFn = execFn ?? execFileAsync;
|
|
192
|
+
this.emitter = emitter;
|
|
192
193
|
const requested = maxConcurrentSessions ?? DEFAULT_MAX_CONCURRENT_SESSIONS;
|
|
193
194
|
const cap = Number.isNaN(requested) ? DEFAULT_MAX_CONCURRENT_SESSIONS : requested;
|
|
194
195
|
if (cap < 1) {
|
|
@@ -243,10 +244,12 @@ class TriggerRouter {
|
|
|
243
244
|
...(trigger.agentConfig !== undefined ? { agentConfig: trigger.agentConfig } : {}),
|
|
244
245
|
...(trigger.soulFile !== undefined ? { soulFile: trigger.soulFile } : {}),
|
|
245
246
|
};
|
|
247
|
+
this.emitter?.emit({ kind: 'trigger_fired', triggerId: trigger.id, workflowId: trigger.workflowId });
|
|
246
248
|
const queueKey = trigger.concurrencyMode === 'parallel'
|
|
247
249
|
? `${trigger.id}:${crypto.randomUUID()}`
|
|
248
250
|
: trigger.id;
|
|
249
251
|
void this.queue.enqueue(queueKey, async () => {
|
|
252
|
+
this.emitter?.emit({ kind: 'session_queued', triggerId: trigger.id, workflowId: trigger.workflowId });
|
|
250
253
|
if (this.semaphore.activeCount >= this._maxConcurrentSessions) {
|
|
251
254
|
console.warn(`[TriggerRouter] Concurrency limit reached ` +
|
|
252
255
|
`(${this.semaphore.activeCount}/${this._maxConcurrentSessions} active): ` +
|
|
@@ -255,7 +258,7 @@ class TriggerRouter {
|
|
|
255
258
|
await this.semaphore.acquire();
|
|
256
259
|
let result;
|
|
257
260
|
try {
|
|
258
|
-
result = await this.runWorkflowFn(workflowTrigger, this.ctx, this.apiKey);
|
|
261
|
+
result = await this.runWorkflowFn(workflowTrigger, this.ctx, this.apiKey, undefined, this.emitter);
|
|
259
262
|
}
|
|
260
263
|
finally {
|
|
261
264
|
this.semaphore.release();
|
|
@@ -263,7 +266,7 @@ class TriggerRouter {
|
|
|
263
266
|
const originalTag = result._tag;
|
|
264
267
|
const originalResult = result;
|
|
265
268
|
if (trigger.callbackUrl) {
|
|
266
|
-
const deliveryResult = await (0, delivery_client_js_1.post)(trigger.callbackUrl, result);
|
|
269
|
+
const deliveryResult = await (0, delivery_client_js_1.post)(trigger.callbackUrl, result, this.emitter);
|
|
267
270
|
if (deliveryResult.kind === 'err') {
|
|
268
271
|
const deliveryError = deliveryResult.error.kind === 'http_error'
|
|
269
272
|
? `HTTP ${deliveryResult.error.status}: ${deliveryResult.error.body}`
|
|
@@ -312,7 +315,7 @@ class TriggerRouter {
|
|
|
312
315
|
await this.semaphore.acquire();
|
|
313
316
|
let result;
|
|
314
317
|
try {
|
|
315
|
-
result = await this.runWorkflowFn(workflowTrigger, this.ctx, this.apiKey);
|
|
318
|
+
result = await this.runWorkflowFn(workflowTrigger, this.ctx, this.apiKey, undefined, this.emitter);
|
|
316
319
|
}
|
|
317
320
|
finally {
|
|
318
321
|
this.semaphore.release();
|
package/docs/ideas/backlog.md
CHANGED
|
@@ -3925,3 +3925,107 @@ More critically: if a session is restarted by the daemon but then stalls (Bedroc
|
|
|
3925
3925
|
3. **Orphaned session cleanup should be user-facing.** `worktrain cleanup` or `worktrain status` should surface orphaned sessions with their age and offer to clear them. Right now they silently accumulate.
|
|
3926
3926
|
|
|
3927
3927
|
4. **Better logging when runWorkflow() swallows errors.** The `void runWorkflow(...)` pattern in `console-routes.ts` and `trigger-router.ts` drops errors silently. Every path that ends in silence (no log, no session advance, no error) should at minimum log `[WorkflowRunner] Session died silently` with the session ID.
|
|
3928
|
+
|
|
3929
|
+
---
|
|
3930
|
+
|
|
3931
|
+
### Observability and logging as first-class citizens (Apr 17, 2026)
|
|
3932
|
+
|
|
3933
|
+
**The principle:** WorkTrain should never be a black box. Every action, decision, failure, and state transition should be traceable after the fact -- by a human, by another agent, or by a coordinator script. Logging and observability are not afterthoughts; they are core infrastructure.
|
|
3934
|
+
|
|
3935
|
+
**What "first-class" means:**
|
|
3936
|
+
|
|
3937
|
+
1. **Structured, not prose.** Every log line should be machine-parseable. Use consistent prefixes (`[WorkflowRunner]`, `[TriggerRouter]`, `[DaemonConsole]`), consistent key=value pairs, and structured JSON for rich payloads. No freeform strings that require regex to parse.
|
|
3938
|
+
|
|
3939
|
+
2. **Levels matter.** INFO for normal operations, WARN for recoverable anomalies, ERROR for failures that need attention. Silence = actively working, not unknown. A session that produces no logs for 5+ minutes should emit a heartbeat.
|
|
3940
|
+
|
|
3941
|
+
3. **Every state transition logged.** Session start, step advance, tool call, tool result (including errors), session end (success/timeout/error). No silent gaps. The daemon observability logs (#442) are a start -- extend this everywhere.
|
|
3942
|
+
|
|
3943
|
+
4. **Errors always include context.** Not just the message -- which session, which tool, which step, which trigger, how long it had been running, what the last successful action was. Enough to diagnose without re-running.
|
|
3944
|
+
|
|
3945
|
+
5. **Correlation IDs.** Every session has a `sessionId`. Every tool call has a `toolCallId`. Log entries should include the relevant ID so you can filter across a full session's history. Today the daemon logs include `sessionId` -- extend this to trigger IDs, workflow IDs, and step IDs.
|
|
3946
|
+
|
|
3947
|
+
6. **Log destinations are configurable.** Today: stdout → daemon.log file via redirect. Long-term: structured JSON to a log aggregator (Datadog, CloudWatch, file), separate log files per workspace, log rotation. The daemon should accept a `--log-level` flag and a `--log-format json|human` flag.
|
|
3948
|
+
|
|
3949
|
+
7. **The session store IS the audit log.** Every `advance_recorded`, `node_output_appended`, `validation_performed` event is a durable structured record. The session store should be queryable as a post-mortem tool. `worktrain session logs <id>` should reconstruct the full story of what happened.
|
|
3950
|
+
|
|
3951
|
+
**Specific gaps to close:**
|
|
3952
|
+
|
|
3953
|
+
- `continue_workflow` tool: log the step ID and notes length being submitted, not just "continue_workflow called"
|
|
3954
|
+
- `makeBashTool`: log exit code and output length in addition to the command
|
|
3955
|
+
- `makeReadTool` / `makeWriteTool`: log file path and bytes
|
|
3956
|
+
- `AgentLoop`: log each LLM turn (turn number, stop reason, tool count) -- today nothing is logged between tool calls
|
|
3957
|
+
- `TriggerRouter`: log when a session is queued (semaphore at capacity) and when it dequeues
|
|
3958
|
+
- `PollingScheduler`: log each poll cycle result (N events found, N new, N dispatched)
|
|
3959
|
+
- `DeliveryClient`: log delivery attempt, HTTP status, response time
|
|
3960
|
+
- `DaemonConsole`: log when the console HTTP server starts, stops, or fails a request
|
|
3961
|
+
|
|
3962
|
+
**The `worktrain logs` command:**
|
|
3963
|
+
```bash
|
|
3964
|
+
worktrain logs # tail daemon.log
|
|
3965
|
+
worktrain logs --session sess_abc123 # replay full session from event store
|
|
3966
|
+
worktrain logs --trigger test-task # all sessions for this trigger
|
|
3967
|
+
worktrain logs --level error # only errors across all sources
|
|
3968
|
+
worktrain logs --since 1h # last hour
|
|
3969
|
+
worktrain logs --format json # machine-readable output
|
|
3970
|
+
```
|
|
3971
|
+
|
|
3972
|
+
**Self-healing dependency:** The automatic gap detection, WORKTRAIN_STUCK routing, and coordinator self-healing patterns all depend on logs being structured and complete. You can't auto-fix what you can't observe. Logging quality is a prerequisite for autonomous operation at scale.
|
|
3973
|
+
|
|
3974
|
+
---
|
|
3975
|
+
|
|
3976
|
+
### Event sourcing for orchestration: extend the session store to daemon and coordinator events (Apr 17, 2026)
|
|
3977
|
+
|
|
3978
|
+
**The decision:** extend the existing WorkRail event store infrastructure to cover orchestration-level events, not build a separate system. The session store is already append-only, crash-safe, content-addressed, and queryable -- rebuilding those properties would be wasteful.
|
|
3979
|
+
|
|
3980
|
+
**The model: multiple event streams, same infrastructure**
|
|
3981
|
+
|
|
3982
|
+
```
|
|
3983
|
+
~/.workrail/events/
|
|
3984
|
+
sessions/ ← already exists (per-session workflow events)
|
|
3985
|
+
daemon/ ← new: lifecycle, triggers, delivery, errors
|
|
3986
|
+
triggers/ ← new: per-trigger poll history and outcomes
|
|
3987
|
+
coordinator/ ← future: coordinator script decisions and routing
|
|
3988
|
+
```
|
|
3989
|
+
|
|
3990
|
+
Each stream is append-only JSONL with the same segment/manifest pattern as the session store. The `worktrain logs` command queries across streams. Watchdog and coordinator scripts subscribe to streams.
|
|
3991
|
+
|
|
3992
|
+
**Daemon event stream: what gets recorded**
|
|
3993
|
+
|
|
3994
|
+
Every significant daemon action becomes a structured event:
|
|
3995
|
+
|
|
3996
|
+
```jsonl
|
|
3997
|
+
{"ts":"2026-04-17T...","kind":"daemon_started","port":3200,"workspacePath":"...","version":"3.31.0"}
|
|
3998
|
+
{"ts":"...","kind":"trigger_fired","triggerId":"test-task","workflowId":"coding-task-workflow-agentic"}
|
|
3999
|
+
{"ts":"...","kind":"session_queued","sessionId":"sess_abc","triggerId":"test-task","queueDepth":0}
|
|
4000
|
+
{"ts":"...","kind":"session_started","sessionId":"sess_abc","workflowId":"coding-task-workflow-agentic","modelId":"..."}
|
|
4001
|
+
{"ts":"...","kind":"tool_called","sessionId":"sess_abc","tool":"Bash","command":"ls docs/ | grep trigger"}
|
|
4002
|
+
{"ts":"...","kind":"tool_error","sessionId":"sess_abc","tool":"Bash","error":"exit 1","isError":true}
|
|
4003
|
+
{"ts":"...","kind":"step_advanced","sessionId":"sess_abc","stepId":"phase-0-triage-and-mode","advance":1}
|
|
4004
|
+
{"ts":"...","kind":"session_completed","sessionId":"sess_abc","stopReason":"stop","durationMs":1847000}
|
|
4005
|
+
{"ts":"...","kind":"delivery_attempted","sessionId":"sess_abc","callbackUrl":"https://...","status":200}
|
|
4006
|
+
{"ts":"...","kind":"poll_cycle","triggerId":"pr-review","eventsFound":3,"newEvents":1,"dispatched":1}
|
|
4007
|
+
```
|
|
4008
|
+
|
|
4009
|
+
**`DaemonEventEmitter`:** thin wrapper around the event store, called from TriggerRouter, workflow-runner, delivery-client, and polling-scheduler. Each call appends one event to `~/.workrail/events/daemon/YYYY-MM-DD.jsonl`. Zero overhead when nothing is listening.
|
|
4010
|
+
|
|
4011
|
+
**`worktrain logs` CLI:** reads from both session store and daemon event stream, correlates by `sessionId`, presents a unified timeline:
|
|
4012
|
+
|
|
4013
|
+
```
|
|
4014
|
+
worktrain logs # tail current daemon events
|
|
4015
|
+
worktrain logs --session sess_abc123 # full timeline: trigger → steps → delivery
|
|
4016
|
+
worktrain logs --trigger test-task # all sessions for this trigger
|
|
4017
|
+
worktrain logs --level error # only errors across all streams
|
|
4018
|
+
worktrain logs --since 1h # last hour of activity
|
|
4019
|
+
worktrain logs --format json # machine-readable for scripts
|
|
4020
|
+
```
|
|
4021
|
+
|
|
4022
|
+
**SSE extension:** the console already streams session events via SSE. Extend to also stream daemon events so the console live feed shows everything: trigger fires, tool calls, delivery attempts, errors -- not just step advances. This is the "more than just the DAG" console improvement.
|
|
4023
|
+
|
|
4024
|
+
**Why this matters for self-healing:** The coordinator self-healing pattern requires the coordinator to observe what happened. Today it reads `lastStepNotes` and session store snapshots -- both batch reads after the fact. With a subscribable daemon event stream, the coordinator can react in real time: "tool_error event for session X → spawn diagnostic sub-session now" rather than "check for WORKTRAIN_STUCK markers after the fact."
|
|
4025
|
+
|
|
4026
|
+
**Build order:**
|
|
4027
|
+
1. `DaemonEventEmitter` + daemon event stream file (append-only JSONL, no fancy infra needed to start)
|
|
4028
|
+
2. Wire emitter calls into TriggerRouter, workflow-runner, delivery-client
|
|
4029
|
+
3. `worktrain logs` CLI commands (reads files, correlates by sessionId)
|
|
4030
|
+
4. SSE extension in DaemonConsole for live event streaming
|
|
4031
|
+
5. Coordinator script subscription to event streams (replaces polling session store)
|