@exaudeus/workrail 3.72.0 → 3.72.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/dist/cli-worktrain.js +4 -6
  2. package/dist/console-ui/assets/{index-CTza1zb5.js → index-Yj9NHqbR.js} +1 -1
  3. package/dist/console-ui/index.html +1 -1
  4. package/dist/daemon/active-sessions.d.ts +17 -0
  5. package/dist/daemon/active-sessions.js +55 -0
  6. package/dist/daemon/context-loader.d.ts +32 -0
  7. package/dist/daemon/context-loader.js +34 -0
  8. package/dist/daemon/session-scope.d.ts +3 -2
  9. package/dist/daemon/tools/_shared.d.ts +38 -0
  10. package/dist/daemon/tools/_shared.js +101 -0
  11. package/dist/daemon/tools/bash.d.ts +3 -0
  12. package/dist/daemon/tools/bash.js +57 -0
  13. package/dist/daemon/tools/continue-workflow.d.ts +6 -0
  14. package/dist/daemon/tools/continue-workflow.js +208 -0
  15. package/dist/daemon/tools/file-tools.d.ts +6 -0
  16. package/dist/daemon/tools/file-tools.js +195 -0
  17. package/dist/daemon/tools/glob-grep.d.ts +4 -0
  18. package/dist/daemon/tools/glob-grep.js +172 -0
  19. package/dist/daemon/tools/report-issue.d.ts +3 -0
  20. package/dist/daemon/tools/report-issue.js +129 -0
  21. package/dist/daemon/tools/signal-coordinator.d.ts +4 -0
  22. package/dist/daemon/tools/signal-coordinator.js +105 -0
  23. package/dist/daemon/tools/spawn-agent.d.ts +6 -0
  24. package/dist/daemon/tools/spawn-agent.js +135 -0
  25. package/dist/daemon/workflow-runner.d.ts +54 -29
  26. package/dist/daemon/workflow-runner.js +156 -980
  27. package/dist/infrastructure/storage/workflow-resolution.js +5 -6
  28. package/dist/manifest.json +131 -27
  29. package/dist/mcp/handlers/shared/request-workflow-reader.js +14 -0
  30. package/dist/trigger/coordinator-deps.d.ts +15 -0
  31. package/dist/trigger/coordinator-deps.js +322 -0
  32. package/dist/trigger/delivery-pipeline.d.ts +18 -0
  33. package/dist/trigger/delivery-pipeline.js +148 -0
  34. package/dist/trigger/dispatch-deduplicator.d.ts +6 -0
  35. package/dist/trigger/dispatch-deduplicator.js +24 -0
  36. package/dist/trigger/trigger-listener.d.ts +2 -3
  37. package/dist/trigger/trigger-listener.js +9 -276
  38. package/dist/trigger/trigger-router.d.ts +8 -7
  39. package/dist/trigger/trigger-router.js +19 -97
  40. package/dist/v2/usecases/console-routes.js +10 -2
  41. package/docs/ideas/backlog.md +82 -48
  42. package/package.json +1 -1
@@ -0,0 +1,148 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.DEFAULT_DELIVERY_PIPELINE = void 0;
37
+ exports.runDeliveryPipeline = runDeliveryPipeline;
38
+ const fs = __importStar(require("node:fs/promises"));
39
+ const path = __importStar(require("node:path"));
40
+ const workflow_runner_js_1 = require("../daemon/workflow-runner.js");
41
+ const delivery_action_js_1 = require("./delivery-action.js");
42
+ async function runDeliveryPipeline(stages, result, trigger, execFn, triggerId) {
43
+ const ctx = {};
44
+ try {
45
+ for (const stage of stages) {
46
+ const outcome = await stage.run(result, trigger, execFn, ctx);
47
+ if (outcome.kind === 'stop') {
48
+ console.log(`[DeliveryPipeline] Stage "${stage.name}" stopped pipeline: triggerId=${triggerId} reason=${outcome.reason}`);
49
+ return;
50
+ }
51
+ }
52
+ }
53
+ catch (err) {
54
+ console.error(`[DeliveryPipeline] Unexpected error in pipeline: triggerId=${triggerId} ` +
55
+ `error=${err instanceof Error ? err.message : String(err)}`);
56
+ }
57
+ }
58
+ const parseHandoffStage = {
59
+ name: 'parseHandoff',
60
+ async run(result, trigger, _execFn, ctx) {
61
+ const parseResult = (0, delivery_action_js_1.parseHandoffArtifact)(result.lastStepNotes);
62
+ if (parseResult.kind === 'err') {
63
+ console.warn(`[DeliveryPipeline] Delivery skipped: triggerId=${trigger.id} -- ` +
64
+ `handoff artifact not parseable: ${parseResult.error}. ` +
65
+ `Ensure the workflow's final step produces a JSON block with commitType, filesChanged, etc.`);
66
+ return { kind: 'stop', reason: `handoff artifact parse failed: ${parseResult.error}` };
67
+ }
68
+ ctx.handoffArtifact = parseResult.value;
69
+ return { kind: 'continue' };
70
+ },
71
+ };
72
+ const gitDeliveryStage = {
73
+ name: 'gitDelivery',
74
+ async run(result, trigger, execFn, ctx) {
75
+ if (ctx.handoffArtifact === undefined) {
76
+ return {
77
+ kind: 'stop',
78
+ reason: 'handoffArtifact not available -- parseHandoffStage must run before gitDeliveryStage',
79
+ };
80
+ }
81
+ const deliveryCwd = result.sessionWorkspacePath ?? trigger.workspacePath;
82
+ const deliveryResult = await (0, delivery_action_js_1.runDelivery)(ctx.handoffArtifact, deliveryCwd, {
83
+ autoCommit: trigger.autoCommit,
84
+ autoOpenPR: trigger.autoOpenPR,
85
+ secretScan: trigger.secretScan ?? true,
86
+ triggerId: trigger.id,
87
+ workflowId: trigger.workflowId,
88
+ ...(result.botIdentity !== undefined ? { botIdentity: result.botIdentity } : {}),
89
+ ...(trigger.branchStrategy === 'worktree' && result.sessionWorkspacePath
90
+ ? {
91
+ sessionId: result.sessionId ?? '',
92
+ branchPrefix: trigger.branchPrefix ?? 'worktrain/',
93
+ }
94
+ : {}),
95
+ }, execFn);
96
+ switch (deliveryResult._tag) {
97
+ case 'committed':
98
+ console.log(`[DeliveryPipeline] Delivery committed: triggerId=${trigger.id} sha=${deliveryResult.sha}`);
99
+ break;
100
+ case 'pr_opened':
101
+ console.log(`[DeliveryPipeline] Delivery PR opened: triggerId=${trigger.id} url=${deliveryResult.url}`);
102
+ break;
103
+ case 'skipped':
104
+ console.log(`[DeliveryPipeline] Delivery skipped: triggerId=${trigger.id} reason=${deliveryResult.reason}`);
105
+ break;
106
+ case 'error':
107
+ console.warn(`[DeliveryPipeline] Delivery error: triggerId=${trigger.id} phase=${deliveryResult.phase} ` +
108
+ `details=${deliveryResult.details}`);
109
+ break;
110
+ }
111
+ return { kind: 'continue' };
112
+ },
113
+ };
114
+ const cleanupWorktreeStage = {
115
+ name: 'cleanupWorktree',
116
+ async run(result, trigger, execFn, _ctx) {
117
+ if (trigger.branchStrategy !== 'worktree' || !result.sessionWorkspacePath) {
118
+ return { kind: 'continue' };
119
+ }
120
+ try {
121
+ await execFn('git', ['-C', trigger.workspacePath, 'worktree', 'remove', '--force', result.sessionWorkspacePath], { cwd: trigger.workspacePath, timeout: 60000 });
122
+ console.log(`[DeliveryPipeline] Worktree removed: triggerId=${trigger.id} path=${result.sessionWorkspacePath}`);
123
+ }
124
+ catch (err) {
125
+ console.warn(`[DeliveryPipeline] Could not remove worktree: triggerId=${trigger.id} ` +
126
+ `path=${result.sessionWorkspacePath}: ${err instanceof Error ? err.message : String(err)}`);
127
+ }
128
+ return { kind: 'continue' };
129
+ },
130
+ };
131
+ const deleteSidecarStage = {
132
+ name: 'deleteSidecar',
133
+ async run(result, trigger, _execFn, _ctx) {
134
+ if (trigger.branchStrategy !== 'worktree' || result.sessionId === undefined) {
135
+ return { kind: 'continue' };
136
+ }
137
+ await fs.unlink(path.join(workflow_runner_js_1.DAEMON_SESSIONS_DIR, `${result.sessionId}.json`)).catch(() => { });
138
+ await fs.unlink(path.join(workflow_runner_js_1.DAEMON_SESSIONS_DIR, `${result.sessionId}-conversation.jsonl`)).catch(() => { });
139
+ console.log(`[DeliveryPipeline] Session sidecar removed: triggerId=${trigger.id} sessionId=${result.sessionId}`);
140
+ return { kind: 'continue' };
141
+ },
142
+ };
143
+ exports.DEFAULT_DELIVERY_PIPELINE = [
144
+ parseHandoffStage,
145
+ gitDeliveryStage,
146
+ cleanupWorktreeStage,
147
+ deleteSidecarStage,
148
+ ];
@@ -0,0 +1,6 @@
1
+ export declare class DispatchDeduplicator {
2
+ private readonly _recent;
3
+ private readonly _ttlMs;
4
+ constructor(ttlMs: number);
5
+ checkAndRecord(key: string): boolean;
6
+ }
@@ -0,0 +1,24 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DispatchDeduplicator = void 0;
4
+ class DispatchDeduplicator {
5
+ constructor(ttlMs) {
6
+ this._recent = new Map();
7
+ this._ttlMs = ttlMs;
8
+ }
9
+ checkAndRecord(key) {
10
+ const now = Date.now();
11
+ for (const [k, ts] of this._recent) {
12
+ if (now - ts >= this._ttlMs) {
13
+ this._recent.delete(k);
14
+ }
15
+ }
16
+ const lastDispatch = this._recent.get(key);
17
+ if (lastDispatch !== undefined && now - lastDispatch < this._ttlMs) {
18
+ return true;
19
+ }
20
+ this._recent.set(key, now);
21
+ return false;
22
+ }
23
+ }
24
+ exports.DispatchDeduplicator = DispatchDeduplicator;
@@ -3,7 +3,7 @@ import express from 'express';
3
3
  import type { V2ToolContext } from '../mcp/types.js';
4
4
  import type { TriggerStoreError } from './trigger-store.js';
5
5
  import { TriggerRouter, type RunWorkflowFn } from './trigger-router.js';
6
- import type { SteerRegistry, AbortRegistry } from '../daemon/workflow-runner.js';
6
+ import { ActiveSessionSet } from '../daemon/active-sessions.js';
7
7
  import type { WorkspaceConfig } from './types.js';
8
8
  import type { DaemonEventEmitter } from '../daemon/daemon-events.js';
9
9
  import { PollingScheduler } from './polling-scheduler.js';
@@ -19,8 +19,7 @@ export type TriggerListenerError = TriggerStoreError | {
19
19
  export interface TriggerListenerHandle {
20
20
  readonly port: number;
21
21
  readonly router: TriggerRouter;
22
- readonly steerRegistry: SteerRegistry;
23
- readonly abortRegistry: AbortRegistry;
22
+ readonly activeSessionSet: ActiveSessionSet;
24
23
  readonly scheduler: PollingScheduler;
25
24
  stop(): Promise<void>;
26
25
  }
@@ -41,28 +41,22 @@ exports.startTriggerListener = startTriggerListener;
41
41
  require("reflect-metadata");
42
42
  const express_1 = __importDefault(require("express"));
43
43
  const http = __importStar(require("node:http"));
44
- const fs = __importStar(require("node:fs"));
45
- const os = __importStar(require("node:os"));
46
- const path = __importStar(require("node:path"));
47
44
  const node_child_process_1 = require("node:child_process");
48
45
  const node_util_1 = require("node:util");
49
- const node_crypto_1 = require("node:crypto");
50
46
  const trigger_store_js_1 = require("./trigger-store.js");
51
47
  const trigger_router_js_1 = require("./trigger-router.js");
48
+ const active_sessions_js_1 = require("../daemon/active-sessions.js");
52
49
  const config_file_js_1 = require("../config/config-file.js");
53
50
  const notification_service_js_1 = require("./notification-service.js");
54
51
  const workflow_runner_js_1 = require("../daemon/workflow-runner.js");
55
52
  const types_js_1 = require("./types.js");
56
53
  const polling_scheduler_js_1 = require("./polling-scheduler.js");
57
54
  const polled_event_store_js_1 = require("./polled-event-store.js");
58
- const index_js_1 = require("../context-assembly/index.js");
59
- const infra_js_1 = require("../context-assembly/infra.js");
60
55
  const quick_review_js_1 = require("../coordinators/modes/quick-review.js");
61
56
  const review_only_js_1 = require("../coordinators/modes/review-only.js");
62
57
  const implement_js_1 = require("../coordinators/modes/implement.js");
63
58
  const full_pipeline_js_1 = require("../coordinators/modes/full-pipeline.js");
64
- const start_js_1 = require("../mcp/handlers/v2-execution/start.js");
65
- const v2_token_ops_js_1 = require("../mcp/handlers/v2-token-ops.js");
59
+ const coordinator_deps_js_1 = require("./coordinator-deps.js");
66
60
  const DEFAULT_TRIGGER_PORT = 3200;
67
61
  function createTriggerApp(router) {
68
62
  const app = (0, express_1.default)();
@@ -134,9 +128,8 @@ async function startTriggerListener(ctx, options) {
134
128
  if (env['WORKRAIL_TRIGGERS_ENABLED'] !== 'true') {
135
129
  return null;
136
130
  }
137
- const apiKey = options.apiKey ?? env['ANTHROPIC_API_KEY'] ?? '';
138
- const hasBedrock = !!(env['AWS_PROFILE'] || env['AWS_ACCESS_KEY_ID']);
139
- if (!apiKey && !hasBedrock) {
131
+ const apiKey = options.apiKey ?? env['ANTHROPIC_API_KEY'];
132
+ if (!apiKey) {
140
133
  return { _kind: 'err', error: { kind: 'missing_api_key' } };
141
134
  }
142
135
  const workspaceResult = (0, config_file_js_1.loadWorkspacesFromConfigFile)();
@@ -210,8 +203,7 @@ async function startTriggerListener(ctx, options) {
210
203
  const notificationService = (notifyMacOs || (notifyWebhook !== undefined && notifyWebhook !== ''))
211
204
  ? new notification_service_js_1.NotificationService({ macOs: notifyMacOs, webhookUrl: notifyWebhook })
212
205
  : undefined;
213
- const steerRegistry = new Map();
214
- const abortRegistry = new Map();
206
+ const activeSessionSet = new active_sessions_js_1.ActiveSessionSet();
215
207
  const execFileAsync = (0, node_util_1.promisify)(node_child_process_1.execFile);
216
208
  const { ConsoleService } = await Promise.resolve().then(() => __importStar(require('../v2/usecases/console-service.js')));
217
209
  let consoleService = null;
@@ -227,265 +219,7 @@ async function startTriggerListener(ctx, options) {
227
219
  pinnedWorkflowStore: ctx.v2.pinnedStore,
228
220
  });
229
221
  }
230
- let routerRef;
231
- const coordinatorDeps = {
232
- spawnSession: async (workflowId, goal, workspace, context, agentConfig) => {
233
- if (routerRef === undefined) {
234
- return { kind: 'err', error: 'in-process router not initialized -- coordinator deps not ready' };
235
- }
236
- const startResult = await (0, start_js_1.executeStartWorkflow)({ workflowId, workspacePath: workspace, goal }, ctx, { is_autonomous: 'true', workspacePath: workspace });
237
- if (startResult.isErr()) {
238
- const detail = `${startResult.error.kind}${'message' in startResult.error ? ': ' + startResult.error.message : ''}`;
239
- return { kind: 'err', error: `Session creation failed: ${detail}` };
240
- }
241
- const startContinueToken = startResult.value.response.continueToken;
242
- if (!startContinueToken) {
243
- return { kind: 'ok', value: workflowId };
244
- }
245
- const tokenResult = await (0, v2_token_ops_js_1.parseContinueTokenOrFail)(startContinueToken, ctx.v2.tokenCodecPorts, ctx.v2.tokenAliasStore);
246
- if (tokenResult.isErr()) {
247
- process.stderr.write(`[ERROR trigger-listener:spawnSession] Failed to decode session handle from new session: ${tokenResult.error.message}\n`);
248
- return { kind: 'err', error: 'Internal error: could not extract session handle from new session' };
249
- }
250
- const sessionHandle = tokenResult.value.sessionId;
251
- routerRef.dispatch({
252
- workflowId,
253
- goal,
254
- workspacePath: workspace,
255
- context,
256
- ...(agentConfig !== undefined ? { agentConfig } : {}),
257
- _preAllocatedStartResponse: startResult.value.response,
258
- });
259
- return { kind: 'ok', value: sessionHandle };
260
- },
261
- contextAssembler: (0, index_js_1.createContextAssembler)({
262
- execGit: async (args, cwd) => {
263
- try {
264
- const { stdout } = await execFileAsync('git', [...args], { cwd });
265
- return { kind: 'ok', value: stdout };
266
- }
267
- catch (e) {
268
- return { kind: 'err', error: e instanceof Error ? e.message : String(e) };
269
- }
270
- },
271
- execGh: async (args, cwd) => {
272
- try {
273
- const { stdout } = await execFileAsync('gh', [...args], { cwd });
274
- return { kind: 'ok', value: stdout };
275
- }
276
- catch (e) {
277
- return { kind: 'err', error: e instanceof Error ? e.message : String(e) };
278
- }
279
- },
280
- listRecentSessions: (0, infra_js_1.createListRecentSessions)(),
281
- nowIso: () => new Date().toISOString(),
282
- }),
283
- awaitSessions: async (handles, timeoutMs) => {
284
- const POLL_INTERVAL_MS = 3000;
285
- if (consoleService === null) {
286
- process.stderr.write(`[WARN coord:reason=await_degraded] awaitSessions: ConsoleService unavailable -- returning all ${handles.length} session(s) as failed.\n`);
287
- return {
288
- results: [...handles].map((h) => ({
289
- handle: h,
290
- outcome: 'failed',
291
- status: null,
292
- durationMs: 0,
293
- })),
294
- allSucceeded: false,
295
- };
296
- }
297
- const startMs = Date.now();
298
- const pending = new Set(handles);
299
- const results = new Map();
300
- while (pending.size > 0) {
301
- const elapsed = Date.now() - startMs;
302
- if (elapsed >= timeoutMs) {
303
- break;
304
- }
305
- for (const handle of [...pending]) {
306
- try {
307
- const detail = await consoleService.getSessionDetail(handle);
308
- if (detail.isErr()) {
309
- continue;
310
- }
311
- const run = detail.value.runs[0];
312
- if (!run)
313
- continue;
314
- const status = run.status;
315
- if (status === 'complete' || status === 'complete_with_gaps') {
316
- results.set(handle, { handle, outcome: 'success', status, durationMs: Date.now() - startMs });
317
- pending.delete(handle);
318
- }
319
- else if (status === 'blocked') {
320
- results.set(handle, { handle, outcome: 'failed', status, durationMs: Date.now() - startMs });
321
- pending.delete(handle);
322
- }
323
- }
324
- catch {
325
- results.set(handle, { handle, outcome: 'failed', status: null, durationMs: Date.now() - startMs });
326
- pending.delete(handle);
327
- }
328
- }
329
- if (pending.size > 0) {
330
- await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS));
331
- }
332
- }
333
- for (const handle of pending) {
334
- results.set(handle, { handle, outcome: 'timeout', status: null, durationMs: timeoutMs });
335
- }
336
- const resultsArray = [...results.values()];
337
- return {
338
- results: resultsArray,
339
- allSucceeded: resultsArray.every((r) => r.outcome === 'success'),
340
- };
341
- },
342
- getAgentResult: async (sessionHandle) => {
343
- const emptyResult = { recapMarkdown: null, artifacts: [] };
344
- if (consoleService === null) {
345
- return emptyResult;
346
- }
347
- try {
348
- const detailResult = await consoleService.getSessionDetail(sessionHandle);
349
- if (detailResult.isErr())
350
- return emptyResult;
351
- const run = detailResult.value.runs[0];
352
- if (!run)
353
- return emptyResult;
354
- const tipNodeId = run.preferredTipNodeId;
355
- if (!tipNodeId)
356
- return emptyResult;
357
- const allNodeIds = run.nodes.map((n) => n.nodeId).filter((id) => typeof id === 'string' && id !== '');
358
- const nodeIdsToFetch = allNodeIds.length > 0 ? allNodeIds : [tipNodeId];
359
- let recap = null;
360
- const collectedArtifacts = [];
361
- for (const nodeId of nodeIdsToFetch) {
362
- try {
363
- const nodeResult = await consoleService.getNodeDetail(sessionHandle, nodeId);
364
- if (nodeResult.isErr())
365
- continue;
366
- if (nodeId === tipNodeId) {
367
- recap = nodeResult.value.recapMarkdown;
368
- }
369
- if (nodeResult.value.artifacts.length > 0) {
370
- collectedArtifacts.push(...nodeResult.value.artifacts);
371
- }
372
- }
373
- catch {
374
- continue;
375
- }
376
- }
377
- return { recapMarkdown: recap, artifacts: collectedArtifacts };
378
- }
379
- catch (e) {
380
- const msg = e instanceof Error ? e.message : String(e);
381
- process.stderr.write(`[WARN coord:reason=exception handle=${sessionHandle.slice(0, 16)}] getAgentResult: ${msg}\n`);
382
- return emptyResult;
383
- }
384
- },
385
- listOpenPRs: async (workspace) => {
386
- try {
387
- const { stdout } = await execFileAsync('gh', ['pr', 'list', '--json', 'number,title,headRefName'], {
388
- cwd: workspace,
389
- timeout: 30000,
390
- });
391
- const parsed = JSON.parse(stdout);
392
- return parsed.map((p) => ({ number: p.number, title: p.title, headRef: p.headRefName }));
393
- }
394
- catch {
395
- return [];
396
- }
397
- },
398
- mergePR: async (prNumber, workspace) => {
399
- try {
400
- await execFileAsync('gh', ['pr', 'merge', String(prNumber), '--squash', '--auto'], {
401
- cwd: workspace,
402
- timeout: 60000,
403
- });
404
- return { kind: 'ok', value: undefined };
405
- }
406
- catch (e) {
407
- const msg = e instanceof Error ? e.message : String(e);
408
- return { kind: 'err', error: msg };
409
- }
410
- },
411
- writeFile: async (filePath, content) => {
412
- await fs.promises.writeFile(filePath, content, 'utf-8');
413
- },
414
- readFile: (filePath) => fs.promises.readFile(filePath, 'utf-8'),
415
- appendFile: (filePath, content) => fs.promises.appendFile(filePath, content, 'utf-8'),
416
- mkdir: (dirPath, opts) => fs.promises.mkdir(dirPath, opts),
417
- homedir: os.homedir,
418
- joinPath: path.join,
419
- nowIso: () => new Date().toISOString(),
420
- generateId: () => (0, node_crypto_1.randomUUID)(),
421
- stderr: (line) => process.stderr.write(line + '\n'),
422
- now: () => Date.now(),
423
- fileExists: (p) => fs.existsSync(p),
424
- archiveFile: (src, dest) => fs.promises.rename(src, dest),
425
- pollForPR: async (branchPattern, timeoutMs) => {
426
- const pollIntervalMs = 30000;
427
- const deadline = Date.now() + timeoutMs;
428
- while (Date.now() < deadline) {
429
- try {
430
- const { stdout } = await execFileAsync('gh', ['pr', 'list', '--head', branchPattern, '--json', 'url', '--limit', '1'], { timeout: 30000 });
431
- const parsed = JSON.parse(stdout);
432
- if (parsed.length > 0 && parsed[0] && parsed[0].url) {
433
- return parsed[0].url;
434
- }
435
- }
436
- catch {
437
- }
438
- const remaining = deadline - Date.now();
439
- if (remaining <= 0)
440
- break;
441
- await new Promise((resolve) => setTimeout(resolve, Math.min(pollIntervalMs, remaining)));
442
- }
443
- return null;
444
- },
445
- postToOutbox: async (message, metadata) => {
446
- const workrailDir = path.join(os.homedir(), '.workrail');
447
- const outboxPath = path.join(workrailDir, 'outbox.jsonl');
448
- await fs.promises.mkdir(workrailDir, { recursive: true });
449
- const entry = JSON.stringify({
450
- id: (0, node_crypto_1.randomUUID)(),
451
- message,
452
- metadata,
453
- timestamp: new Date().toISOString(),
454
- });
455
- await fs.promises.appendFile(outboxPath, entry + '\n', 'utf-8');
456
- },
457
- pollOutboxAck: async (requestId, timeoutMs) => {
458
- const pollIntervalMs = 5 * 60 * 1000;
459
- const workrailDir = path.join(os.homedir(), '.workrail');
460
- const outboxPath = path.join(workrailDir, 'outbox.jsonl');
461
- const cursorPath = path.join(workrailDir, 'inbox-cursor.json');
462
- let snapshotCount = 0;
463
- try {
464
- const outboxContent = await fs.promises.readFile(outboxPath, 'utf-8');
465
- snapshotCount = outboxContent.split('\n').filter((l) => l.trim() !== '').length;
466
- }
467
- catch {
468
- }
469
- void requestId;
470
- const deadline = Date.now() + timeoutMs;
471
- while (Date.now() < deadline) {
472
- const remaining = deadline - Date.now();
473
- if (remaining <= 0)
474
- break;
475
- await new Promise((resolve) => setTimeout(resolve, Math.min(pollIntervalMs, remaining)));
476
- try {
477
- const cursorContent = await fs.promises.readFile(cursorPath, 'utf-8');
478
- const cursor = JSON.parse(cursorContent);
479
- if (typeof cursor.lastReadCount === 'number' && cursor.lastReadCount > snapshotCount) {
480
- return 'acked';
481
- }
482
- }
483
- catch {
484
- }
485
- }
486
- return 'timeout';
487
- },
488
- };
222
+ const coordinatorDeps = (0, coordinator_deps_js_1.createCoordinatorDeps)({ ctx, execFileAsync, consoleService });
489
223
  const modeExecutors = {
490
224
  runQuickReview: quick_review_js_1.runQuickReviewPipeline,
491
225
  runReviewOnly: review_only_js_1.runReviewOnlyPipeline,
@@ -493,8 +227,8 @@ async function startTriggerListener(ctx, options) {
493
227
  runFull: full_pipeline_js_1.runFullPipeline,
494
228
  };
495
229
  const runWorkflowFn = options.runWorkflowFn ?? workflow_runner_js_1.runWorkflow;
496
- const router = new trigger_router_js_1.TriggerRouter(triggerIndex, ctx, apiKey, runWorkflowFn, undefined, maxConcurrentSessions, options.emitter, notificationService, steerRegistry, abortRegistry, coordinatorDeps, modeExecutors);
497
- routerRef = router;
230
+ const router = new trigger_router_js_1.TriggerRouter(triggerIndex, ctx, apiKey, runWorkflowFn, undefined, maxConcurrentSessions, options.emitter, notificationService, activeSessionSet, coordinatorDeps, modeExecutors);
231
+ coordinatorDeps.setDispatch(router.dispatch.bind(router));
498
232
  const app = createTriggerApp(router);
499
233
  const allTriggers = [...triggerIndex.values()];
500
234
  const polledEventStore = new polled_event_store_js_1.PolledEventStore(env);
@@ -529,8 +263,7 @@ async function startTriggerListener(ctx, options) {
529
263
  resolve({
530
264
  port: actualPort,
531
265
  router,
532
- steerRegistry,
533
- abortRegistry,
266
+ activeSessionSet,
534
267
  scheduler: pollingScheduler,
535
268
  stop: async () => {
536
269
  pollingScheduler.stop();
@@ -1,4 +1,5 @@
1
- import type { WorkflowTrigger, WorkflowRunResult, SteerRegistry, AbortRegistry } from '../daemon/workflow-runner.js';
1
+ import type { WorkflowTrigger, WorkflowRunResult, SessionSource } from '../daemon/workflow-runner.js';
2
+ import type { ActiveSessionSet } from '../daemon/active-sessions.js';
2
3
  import type { V2ToolContext } from '../mcp/types.js';
3
4
  import type { TriggerDefinition, WebhookEvent } from './types.js';
4
5
  import type { ExecFn } from './delivery-action.js';
@@ -6,6 +7,7 @@ import type { DaemonEventEmitter } from '../daemon/daemon-events.js';
6
7
  import type { NotificationService } from './notification-service.js';
7
8
  import type { AdaptiveCoordinatorDeps, ModeExecutors } from '../coordinators/adaptive-pipeline.js';
8
9
  import { runAdaptivePipeline } from '../coordinators/adaptive-pipeline.js';
10
+ import { DispatchDeduplicator } from './dispatch-deduplicator.js';
9
11
  export type RouteError = {
10
12
  readonly kind: 'not_found';
11
13
  readonly triggerId: string;
@@ -28,7 +30,7 @@ export type RouteResult = {
28
30
  readonly _tag: 'error';
29
31
  readonly error: RouteError;
30
32
  };
31
- export type RunWorkflowFn = (trigger: WorkflowTrigger, ctx: V2ToolContext, apiKey: string, daemonRegistry?: import('../v2/infra/in-memory/daemon-registry/index.js').DaemonRegistry, emitter?: DaemonEventEmitter, steerRegistry?: SteerRegistry, abortRegistry?: AbortRegistry) => Promise<WorkflowRunResult>;
33
+ export type RunWorkflowFn = (trigger: WorkflowTrigger, ctx: V2ToolContext, apiKey: string, daemonRegistry?: import('../v2/infra/in-memory/daemon-registry/index.js').DaemonRegistry, emitter?: DaemonEventEmitter, activeSessionSet?: ActiveSessionSet, _statsDir?: string, _sessionsDir?: string, source?: SessionSource) => Promise<WorkflowRunResult>;
32
34
  export declare function interpolateGoalTemplate(template: string, staticGoal: string, payload: Readonly<Record<string, unknown>>, triggerId: string): string;
33
35
  export declare class TriggerRouter {
34
36
  private readonly index;
@@ -41,17 +43,16 @@ export declare class TriggerRouter {
41
43
  private readonly _maxConcurrentSessions;
42
44
  private readonly emitter;
43
45
  private readonly notificationService;
44
- private readonly steerRegistry;
45
- private readonly abortRegistry;
46
+ private readonly _activeSessionSet;
46
47
  private readonly _coordinatorDeps;
47
48
  private readonly _modeExecutors;
48
- private readonly _recentAdaptiveDispatches;
49
+ private readonly _deduplicator;
49
50
  private static readonly ADAPTIVE_DEDUPE_TTL_MS;
50
- constructor(index: ReadonlyMap<string, TriggerDefinition>, ctx: V2ToolContext, apiKey: string, runWorkflowFn: RunWorkflowFn, execFn?: ExecFn, maxConcurrentSessions?: number, emitter?: DaemonEventEmitter, notificationService?: NotificationService, steerRegistry?: SteerRegistry, abortRegistry?: AbortRegistry, coordinatorDeps?: AdaptiveCoordinatorDeps, modeExecutors?: ModeExecutors);
51
+ constructor(index: ReadonlyMap<string, TriggerDefinition>, ctx: V2ToolContext, apiKey: string, runWorkflowFn: RunWorkflowFn, execFn?: ExecFn, maxConcurrentSessions?: number, emitter?: DaemonEventEmitter, notificationService?: NotificationService, activeSessionSet?: ActiveSessionSet, coordinatorDeps?: AdaptiveCoordinatorDeps, modeExecutors?: ModeExecutors, deduplicator?: DispatchDeduplicator);
51
52
  get activeSessions(): number;
52
53
  get maxConcurrentSessions(): number;
53
54
  route(event: WebhookEvent): RouteResult;
54
- dispatch(workflowTrigger: WorkflowTrigger): string;
55
+ dispatch(workflowTrigger: WorkflowTrigger, source?: SessionSource): string;
55
56
  listTriggers(): readonly TriggerDefinition[];
56
57
  dispatchAdaptivePipeline(goal: string, workspace: string, context?: Readonly<Record<string, unknown>>, coordinatorDeps?: AdaptiveCoordinatorDeps, modeExecutors?: ModeExecutors): ReturnType<typeof runAdaptivePipeline>;
57
58
  }