@exaudeus/workrail 3.31.1 → 3.33.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/commands/index.d.ts +1 -0
- package/dist/cli/commands/index.js +3 -1
- package/dist/cli/commands/worktrain-await.js +11 -9
- package/dist/cli/commands/worktrain-daemon-install.d.ts +35 -0
- package/dist/cli/commands/worktrain-daemon-install.js +291 -0
- package/dist/cli/commands/worktrain-daemon.d.ts +31 -0
- package/dist/cli/commands/worktrain-daemon.js +272 -0
- package/dist/cli/commands/worktrain-spawn.js +11 -9
- package/dist/cli-worktrain.js +329 -0
- package/dist/cli.js +4 -22
- package/dist/console/standalone-console.d.ts +28 -0
- package/dist/console/standalone-console.js +142 -0
- package/dist/{console/assets/index-6H9DeFxj.js → console-ui/assets/index-BuJFLLfY.js} +1 -1
- package/dist/{console → console-ui}/index.html +1 -1
- package/dist/daemon/agent-loop.d.ts +26 -0
- package/dist/daemon/agent-loop.js +53 -2
- package/dist/daemon/daemon-events.d.ts +103 -0
- package/dist/daemon/daemon-events.js +56 -0
- package/dist/daemon/workflow-runner.d.ts +6 -3
- package/dist/daemon/workflow-runner.js +229 -33
- package/dist/infrastructure/session/HttpServer.js +133 -34
- package/dist/manifest.json +134 -70
- package/dist/mcp/output-schemas.d.ts +30 -30
- package/dist/mcp/transports/bridge-events.d.ts +4 -0
- package/dist/mcp/transports/fatal-exit.js +4 -0
- package/dist/mcp/transports/http-entry.js +2 -0
- package/dist/mcp/transports/stdio-entry.js +26 -6
- package/dist/mcp/v2/tools.d.ts +4 -4
- package/dist/trigger/adapters/github-poller.d.ts +44 -0
- package/dist/trigger/adapters/github-poller.js +190 -0
- package/dist/trigger/adapters/gitlab-poller.d.ts +27 -0
- package/dist/trigger/adapters/gitlab-poller.js +81 -0
- package/dist/trigger/delivery-client.d.ts +2 -1
- package/dist/trigger/delivery-client.js +4 -1
- package/dist/trigger/index.d.ts +4 -1
- package/dist/trigger/index.js +5 -1
- package/dist/trigger/polled-event-store.d.ts +22 -0
- package/dist/trigger/polled-event-store.js +173 -0
- package/dist/trigger/polling-scheduler.d.ts +20 -0
- package/dist/trigger/polling-scheduler.js +249 -0
- package/dist/trigger/trigger-listener.d.ts +5 -0
- package/dist/trigger/trigger-listener.js +53 -4
- package/dist/trigger/trigger-router.d.ts +4 -2
- package/dist/trigger/trigger-router.js +7 -4
- package/dist/trigger/trigger-store.js +114 -33
- package/dist/trigger/types.d.ts +17 -1
- package/dist/v2/durable-core/schemas/export-bundle/index.d.ts +224 -224
- package/dist/v2/durable-core/schemas/session/events.d.ts +42 -42
- package/dist/v2/durable-core/schemas/session/manifest.d.ts +6 -6
- package/dist/v2/durable-core/schemas/session/validation-event.d.ts +2 -2
- package/dist/v2/durable-core/tokens/payloads.d.ts +52 -52
- package/dist/v2/usecases/console-routes.js +3 -3
- package/dist/v2/usecases/console-service.js +133 -9
- package/dist/v2/usecases/console-types.d.ts +7 -0
- package/docs/design/daemon-conversation-logging-plan.md +98 -0
- package/docs/design/daemon-conversation-logging-review.md +55 -0
- package/docs/design/daemon-conversation-logging.md +129 -0
- package/docs/design/github-polling-adapter-design-candidates.md +226 -0
- package/docs/design/github-polling-adapter-design-review-findings.md +131 -0
- package/docs/design/github-polling-adapter-implementation-plan.md +284 -0
- package/docs/design/implementation_plan.md +192 -0
- package/docs/design/workflow-id-validation-at-startup.md +146 -0
- package/docs/design/workflow-id-validation-design-review.md +87 -0
- package/docs/design/workflow-id-validation-implementation-plan.md +185 -0
- package/docs/design/worktrain-system-prompt-report-issue-candidates.md +135 -0
- package/docs/design/worktrain-system-prompt-report-issue-design-review.md +73 -0
- package/docs/ideas/backlog.md +465 -0
- package/package.json +1 -1
- package/workflows/architecture-scalability-audit.json +1 -1
- package/workflows/bug-investigation.agentic.v2.json +3 -3
- package/workflows/coding-task-workflow-agentic.json +32 -32
- package/workflows/coding-task-workflow-agentic.lean.v2.json +1 -1
- package/workflows/coding-task-workflow-agentic.v2.json +7 -7
- package/workflows/mr-review-workflow.agentic.v2.json +21 -12
- package/workflows/personal-learning-materials-creation-branched.json +2 -2
- package/workflows/production-readiness-audit.json +1 -1
- package/workflows/relocation-workflow-us.json +2 -2
- package/workflows/ui-ux-design-workflow.json +14 -14
- package/workflows/workflow-for-workflows.json +3 -3
- package/workflows/workflow-for-workflows.v2.json +2 -2
- package/workflows/wr.discovery.json +1 -1
- /package/dist/{console → console-ui}/assets/index-8dh0Psu-.css +0 -0
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.PolledEventStore = void 0;
|
|
37
|
+
const fs = __importStar(require("node:fs/promises"));
|
|
38
|
+
const path = __importStar(require("node:path"));
|
|
39
|
+
const os = __importStar(require("node:os"));
|
|
40
|
+
const node_crypto_1 = require("node:crypto");
|
|
41
|
+
const result_js_1 = require("../runtime/result.js");
|
|
42
|
+
const MAX_PROCESSED_IDS = 500;
|
|
43
|
+
function polledEventsDir(env = process.env) {
|
|
44
|
+
const workrailHome = env['WORKRAIL_HOME'] ?? path.join(os.homedir(), '.workrail');
|
|
45
|
+
return path.join(workrailHome, 'polled-events');
|
|
46
|
+
}
|
|
47
|
+
function stateFilePath(triggerId, env = process.env) {
|
|
48
|
+
const safeId = String(triggerId).replace(/[^a-zA-Z0-9_-]/g, '_');
|
|
49
|
+
return path.join(polledEventsDir(env), `${safeId}.json`);
|
|
50
|
+
}
|
|
51
|
+
class PolledEventStore {
|
|
52
|
+
constructor(env = process.env) {
|
|
53
|
+
this.env = env;
|
|
54
|
+
}
|
|
55
|
+
async load(triggerId) {
|
|
56
|
+
const filePath = stateFilePath(triggerId, this.env);
|
|
57
|
+
let raw;
|
|
58
|
+
try {
|
|
59
|
+
raw = await fs.readFile(filePath, 'utf8');
|
|
60
|
+
}
|
|
61
|
+
catch (e) {
|
|
62
|
+
const error = e;
|
|
63
|
+
if (error.code === 'ENOENT') {
|
|
64
|
+
return (0, result_js_1.ok)(freshState());
|
|
65
|
+
}
|
|
66
|
+
return (0, result_js_1.err)({ kind: 'io_error', message: error.message ?? String(e) });
|
|
67
|
+
}
|
|
68
|
+
try {
|
|
69
|
+
const parsed = JSON.parse(raw);
|
|
70
|
+
if (isValidState(parsed)) {
|
|
71
|
+
return (0, result_js_1.ok)({
|
|
72
|
+
processedIds: parsed.processedIds,
|
|
73
|
+
lastPollAt: parsed.lastPollAt,
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
console.warn(`[PolledEventStore] State file for trigger '${triggerId}' has unexpected schema. ` +
|
|
77
|
+
`Starting fresh with lastPollAt=now to prevent burst firing.`);
|
|
78
|
+
return (0, result_js_1.ok)(freshState());
|
|
79
|
+
}
|
|
80
|
+
catch {
|
|
81
|
+
console.warn(`[PolledEventStore] Could not parse state file for trigger '${triggerId}'. ` +
|
|
82
|
+
`Starting fresh with lastPollAt=now to prevent burst firing.`);
|
|
83
|
+
return (0, result_js_1.ok)(freshState());
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
async save(triggerId, state) {
|
|
87
|
+
const filePath = stateFilePath(triggerId, this.env);
|
|
88
|
+
const dir = path.dirname(filePath);
|
|
89
|
+
try {
|
|
90
|
+
await fs.mkdir(dir, { recursive: true });
|
|
91
|
+
}
|
|
92
|
+
catch (e) {
|
|
93
|
+
return (0, result_js_1.err)({ kind: 'write_error', message: `Failed to create directory ${dir}: ${String(e)}` });
|
|
94
|
+
}
|
|
95
|
+
const pruned = state.processedIds.length > MAX_PROCESSED_IDS
|
|
96
|
+
? state.processedIds.slice(state.processedIds.length - MAX_PROCESSED_IDS)
|
|
97
|
+
: state.processedIds;
|
|
98
|
+
const serialized = JSON.stringify({ processedIds: pruned, lastPollAt: state.lastPollAt }, null, 2);
|
|
99
|
+
const tmpPath = `${filePath}.${(0, node_crypto_1.randomUUID)()}.tmp`;
|
|
100
|
+
try {
|
|
101
|
+
await fs.writeFile(tmpPath, serialized, 'utf8');
|
|
102
|
+
const fh = await fs.open(tmpPath, 'r+');
|
|
103
|
+
try {
|
|
104
|
+
await fh.sync();
|
|
105
|
+
}
|
|
106
|
+
finally {
|
|
107
|
+
await fh.close();
|
|
108
|
+
}
|
|
109
|
+
await fs.rename(tmpPath, filePath);
|
|
110
|
+
const dirFh = await fs.open(dir, 'r');
|
|
111
|
+
try {
|
|
112
|
+
await dirFh.sync();
|
|
113
|
+
}
|
|
114
|
+
finally {
|
|
115
|
+
await dirFh.close();
|
|
116
|
+
}
|
|
117
|
+
return (0, result_js_1.ok)(undefined);
|
|
118
|
+
}
|
|
119
|
+
catch (e) {
|
|
120
|
+
await fs.unlink(tmpPath).catch(() => undefined);
|
|
121
|
+
return (0, result_js_1.err)({ kind: 'write_error', message: `Failed to save state for trigger '${triggerId}': ${String(e)}` });
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
async filterNew(triggerId, candidateIds) {
|
|
125
|
+
if (candidateIds.length === 0)
|
|
126
|
+
return (0, result_js_1.ok)([]);
|
|
127
|
+
const stateResult = await this.load(triggerId);
|
|
128
|
+
if (stateResult.kind === 'err')
|
|
129
|
+
return stateResult;
|
|
130
|
+
const processed = new Set(stateResult.value.processedIds);
|
|
131
|
+
const newIds = candidateIds.filter(id => !processed.has(id));
|
|
132
|
+
return (0, result_js_1.ok)(newIds);
|
|
133
|
+
}
|
|
134
|
+
async record(triggerId, newIds, lastPollAt) {
|
|
135
|
+
if (newIds.length === 0) {
|
|
136
|
+
const stateResult = await this.load(triggerId);
|
|
137
|
+
if (stateResult.kind === 'err')
|
|
138
|
+
return stateResult;
|
|
139
|
+
return this.save(triggerId, { ...stateResult.value, lastPollAt });
|
|
140
|
+
}
|
|
141
|
+
const stateResult = await this.load(triggerId);
|
|
142
|
+
if (stateResult.kind === 'err')
|
|
143
|
+
return stateResult;
|
|
144
|
+
const existing = stateResult.value.processedIds;
|
|
145
|
+
const combined = [...existing, ...newIds];
|
|
146
|
+
return this.save(triggerId, { processedIds: combined, lastPollAt });
|
|
147
|
+
}
|
|
148
|
+
async getLastPollAt(triggerId) {
|
|
149
|
+
const stateResult = await this.load(triggerId);
|
|
150
|
+
if (stateResult.kind === 'err')
|
|
151
|
+
return new Date().toISOString();
|
|
152
|
+
return stateResult.value.lastPollAt;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
exports.PolledEventStore = PolledEventStore;
|
|
156
|
+
function freshState() {
|
|
157
|
+
return {
|
|
158
|
+
processedIds: [],
|
|
159
|
+
lastPollAt: new Date().toISOString(),
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
function isValidState(value) {
|
|
163
|
+
if (typeof value !== 'object' || value === null)
|
|
164
|
+
return false;
|
|
165
|
+
const obj = value;
|
|
166
|
+
if (!Array.isArray(obj['processedIds']))
|
|
167
|
+
return false;
|
|
168
|
+
if (typeof obj['lastPollAt'] !== 'string')
|
|
169
|
+
return false;
|
|
170
|
+
if (!obj['processedIds'].every(id => typeof id === 'string'))
|
|
171
|
+
return false;
|
|
172
|
+
return true;
|
|
173
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import type { TriggerDefinition } from './types.js';
|
|
2
|
+
import type { TriggerRouter } from './trigger-router.js';
|
|
3
|
+
import type { PolledEventStore } from './polled-event-store.js';
|
|
4
|
+
import { type FetchFn } from './adapters/gitlab-poller.js';
|
|
5
|
+
export declare class PollingScheduler {
|
|
6
|
+
private readonly triggers;
|
|
7
|
+
private readonly router;
|
|
8
|
+
private readonly store;
|
|
9
|
+
private readonly fetchFn?;
|
|
10
|
+
private readonly intervals;
|
|
11
|
+
private readonly polling;
|
|
12
|
+
constructor(triggers: readonly TriggerDefinition[], router: TriggerRouter, store: PolledEventStore, fetchFn?: FetchFn | undefined);
|
|
13
|
+
start(): void;
|
|
14
|
+
stop(): void;
|
|
15
|
+
private runPollCycle;
|
|
16
|
+
private doPoll;
|
|
17
|
+
private doPollGitLab;
|
|
18
|
+
private doPollGitHub;
|
|
19
|
+
private dispatchAndRecord;
|
|
20
|
+
}
|
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.PollingScheduler = void 0;
|
|
4
|
+
const gitlab_poller_js_1 = require("./adapters/gitlab-poller.js");
|
|
5
|
+
const github_poller_js_1 = require("./adapters/github-poller.js");
|
|
6
|
+
function isPollingTrigger(trigger) {
|
|
7
|
+
return trigger.pollingSource !== undefined;
|
|
8
|
+
}
|
|
9
|
+
class PollingScheduler {
|
|
10
|
+
constructor(triggers, router, store, fetchFn) {
|
|
11
|
+
this.triggers = triggers;
|
|
12
|
+
this.router = router;
|
|
13
|
+
this.store = store;
|
|
14
|
+
this.fetchFn = fetchFn;
|
|
15
|
+
this.intervals = new Map();
|
|
16
|
+
this.polling = new Map();
|
|
17
|
+
}
|
|
18
|
+
start() {
|
|
19
|
+
const pollingTriggers = this.triggers.filter(isPollingTrigger);
|
|
20
|
+
if (pollingTriggers.length === 0) {
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
console.log(`[PollingScheduler] Starting polling for ${pollingTriggers.length} trigger(s)`);
|
|
24
|
+
for (const trigger of pollingTriggers) {
|
|
25
|
+
if (this.intervals.has(trigger.id)) {
|
|
26
|
+
continue;
|
|
27
|
+
}
|
|
28
|
+
const intervalMs = trigger.pollingSource.pollIntervalSeconds * 1000;
|
|
29
|
+
this.polling.set(trigger.id, false);
|
|
30
|
+
const firstPollTimeout = setTimeout(() => {
|
|
31
|
+
void this.runPollCycle(trigger);
|
|
32
|
+
}, 5000);
|
|
33
|
+
const handle = setInterval(() => {
|
|
34
|
+
void this.runPollCycle(trigger);
|
|
35
|
+
}, intervalMs);
|
|
36
|
+
this.intervals.set(trigger.id, handle);
|
|
37
|
+
this.intervals.set(`${trigger.id}__first`, firstPollTimeout);
|
|
38
|
+
console.log(`[PollingScheduler] Started polling trigger '${trigger.id}' ` +
|
|
39
|
+
`(provider: ${trigger.provider}, interval: ${trigger.pollingSource.pollIntervalSeconds}s)`);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
stop() {
|
|
43
|
+
for (const [id, handle] of this.intervals) {
|
|
44
|
+
clearInterval(handle);
|
|
45
|
+
this.intervals.delete(id);
|
|
46
|
+
}
|
|
47
|
+
console.log('[PollingScheduler] All polling loops stopped.');
|
|
48
|
+
}
|
|
49
|
+
async runPollCycle(trigger) {
|
|
50
|
+
const triggerId = trigger.id;
|
|
51
|
+
if (this.polling.get(triggerId)) {
|
|
52
|
+
console.warn(`[PollingScheduler] Skipping poll cycle for trigger '${triggerId}' -- ` +
|
|
53
|
+
`previous cycle is still running. Consider increasing pollIntervalSeconds.`);
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
this.polling.set(triggerId, true);
|
|
57
|
+
try {
|
|
58
|
+
await this.doPoll(trigger);
|
|
59
|
+
}
|
|
60
|
+
catch (e) {
|
|
61
|
+
console.warn(`[PollingScheduler] Unexpected error in poll cycle for trigger '${triggerId}':`, e instanceof Error ? e.message : String(e));
|
|
62
|
+
}
|
|
63
|
+
finally {
|
|
64
|
+
this.polling.set(triggerId, false);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
async doPoll(trigger) {
|
|
68
|
+
const triggerId = trigger.id;
|
|
69
|
+
const pollStartAt = new Date().toISOString();
|
|
70
|
+
const lastPollAt = await this.store.getLastPollAt(triggerId);
|
|
71
|
+
switch (trigger.pollingSource.provider) {
|
|
72
|
+
case 'gitlab_poll':
|
|
73
|
+
await this.doPollGitLab(trigger, triggerId, pollStartAt, lastPollAt, trigger.pollingSource);
|
|
74
|
+
break;
|
|
75
|
+
case 'github_issues_poll':
|
|
76
|
+
await this.doPollGitHub(trigger, triggerId, pollStartAt, lastPollAt, trigger.pollingSource, 'issues');
|
|
77
|
+
break;
|
|
78
|
+
case 'github_prs_poll':
|
|
79
|
+
await this.doPollGitHub(trigger, triggerId, pollStartAt, lastPollAt, trigger.pollingSource, 'prs');
|
|
80
|
+
break;
|
|
81
|
+
default: {
|
|
82
|
+
const _exhaustive = trigger.pollingSource;
|
|
83
|
+
console.warn(`[PollingScheduler] Unknown provider '${String(_exhaustive.provider)}' ` +
|
|
84
|
+
`for trigger '${triggerId}'. Skipping cycle.`);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
async doPollGitLab(trigger, triggerId, pollStartAt, lastPollAt, source) {
|
|
89
|
+
const pollResult = await (0, gitlab_poller_js_1.pollGitLabMRs)(source, lastPollAt, this.fetchFn);
|
|
90
|
+
if (pollResult.kind === 'err') {
|
|
91
|
+
console.warn(`[PollingScheduler] GitLab poll failed for trigger '${triggerId}': ` +
|
|
92
|
+
`${pollResult.error.kind}: ${pollResult.error.message}. ` +
|
|
93
|
+
`Skipping this cycle, will retry at next interval.`);
|
|
94
|
+
return;
|
|
95
|
+
}
|
|
96
|
+
const mrs = pollResult.value;
|
|
97
|
+
await this.dispatchAndRecord(trigger, triggerId, pollStartAt, mrs.map(mr => String(mr.id)), (id) => {
|
|
98
|
+
const mr = mrs.find(m => String(m.id) === id);
|
|
99
|
+
return mr ? buildGitLabWorkflowTrigger(trigger, mr) : null;
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
async doPollGitHub(trigger, triggerId, pollStartAt, lastPollAt, source, kind) {
|
|
103
|
+
let pollResult;
|
|
104
|
+
if (kind === 'issues') {
|
|
105
|
+
pollResult = await (0, github_poller_js_1.pollGitHubIssues)(source, lastPollAt, this.fetchFn);
|
|
106
|
+
}
|
|
107
|
+
else {
|
|
108
|
+
pollResult = await (0, github_poller_js_1.pollGitHubPRs)(source, lastPollAt, this.fetchFn);
|
|
109
|
+
}
|
|
110
|
+
if (pollResult.kind === 'err') {
|
|
111
|
+
console.warn(`[PollingScheduler] GitHub ${kind} poll failed for trigger '${triggerId}': ` +
|
|
112
|
+
`${pollResult.error.kind}: ${pollResult.error.message}. ` +
|
|
113
|
+
`Skipping this cycle, will retry at next interval.`);
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
const items = pollResult.value;
|
|
117
|
+
await this.dispatchAndRecord(trigger, triggerId, pollStartAt, items.map(item => String(item.id)), (id) => {
|
|
118
|
+
const item = items.find(i => String(i.id) === id);
|
|
119
|
+
return item ? buildGitHubWorkflowTrigger(trigger, item) : null;
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
async dispatchAndRecord(trigger, triggerId, pollStartAt, candidateIds, buildTrigger) {
|
|
123
|
+
if (candidateIds.length === 0) {
|
|
124
|
+
await this.store.record(triggerId, [], pollStartAt);
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
const filterResult = await this.store.filterNew(triggerId, candidateIds);
|
|
128
|
+
if (filterResult.kind === 'err') {
|
|
129
|
+
console.warn(`[PollingScheduler] Failed to read event store for trigger '${triggerId}': ` +
|
|
130
|
+
`${filterResult.error.message}. Skipping dispatch to avoid duplicates.`);
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
const newIds = filterResult.value;
|
|
134
|
+
if (newIds.length === 0) {
|
|
135
|
+
await this.store.record(triggerId, [], pollStartAt);
|
|
136
|
+
return;
|
|
137
|
+
}
|
|
138
|
+
for (const newId of newIds) {
|
|
139
|
+
const workflowTrigger = buildTrigger(newId);
|
|
140
|
+
if (!workflowTrigger)
|
|
141
|
+
continue;
|
|
142
|
+
this.router.dispatch(workflowTrigger);
|
|
143
|
+
}
|
|
144
|
+
const recordResult = await this.store.record(triggerId, newIds, pollStartAt);
|
|
145
|
+
if (recordResult.kind === 'err') {
|
|
146
|
+
console.warn(`[PollingScheduler] Failed to record processed events for trigger '${triggerId}': ` +
|
|
147
|
+
`${recordResult.error.message}. Events may be re-dispatched on the next cycle.`);
|
|
148
|
+
}
|
|
149
|
+
else {
|
|
150
|
+
console.log(`[PollingScheduler] Dispatched ${newIds.length} new event(s) for trigger '${triggerId}'`);
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
exports.PollingScheduler = PollingScheduler;
|
|
155
|
+
function buildGitLabWorkflowTrigger(trigger, mr) {
|
|
156
|
+
const context = {
|
|
157
|
+
mrId: mr.id,
|
|
158
|
+
mrIid: mr.iid,
|
|
159
|
+
mrTitle: mr.title,
|
|
160
|
+
mrUrl: mr.web_url,
|
|
161
|
+
mrUpdatedAt: mr.updated_at,
|
|
162
|
+
...(mr.author?.username ? { mrAuthorUsername: mr.author.username } : {}),
|
|
163
|
+
};
|
|
164
|
+
const goal = interpolateGoalFromPayload(trigger, {
|
|
165
|
+
id: mr.id,
|
|
166
|
+
iid: mr.iid,
|
|
167
|
+
title: mr.title,
|
|
168
|
+
web_url: mr.web_url,
|
|
169
|
+
updated_at: mr.updated_at,
|
|
170
|
+
state: mr.state,
|
|
171
|
+
author: mr.author ?? {},
|
|
172
|
+
});
|
|
173
|
+
return {
|
|
174
|
+
workflowId: trigger.workflowId,
|
|
175
|
+
goal,
|
|
176
|
+
workspacePath: trigger.workspacePath,
|
|
177
|
+
context,
|
|
178
|
+
...(trigger.referenceUrls !== undefined ? { referenceUrls: trigger.referenceUrls } : {}),
|
|
179
|
+
...(trigger.agentConfig !== undefined ? { agentConfig: trigger.agentConfig } : {}),
|
|
180
|
+
...(trigger.soulFile !== undefined ? { soulFile: trigger.soulFile } : {}),
|
|
181
|
+
};
|
|
182
|
+
}
|
|
183
|
+
function buildGitHubWorkflowTrigger(trigger, item) {
|
|
184
|
+
const context = {
|
|
185
|
+
itemId: item.id,
|
|
186
|
+
itemNumber: item.number,
|
|
187
|
+
itemTitle: item.title,
|
|
188
|
+
itemUrl: item.html_url,
|
|
189
|
+
itemUpdatedAt: item.updated_at,
|
|
190
|
+
...(item.user?.login ? { itemAuthorLogin: item.user.login } : {}),
|
|
191
|
+
};
|
|
192
|
+
const goal = interpolateGoalFromPayload(trigger, {
|
|
193
|
+
id: item.id,
|
|
194
|
+
number: item.number,
|
|
195
|
+
title: item.title,
|
|
196
|
+
html_url: item.html_url,
|
|
197
|
+
updated_at: item.updated_at,
|
|
198
|
+
state: item.state,
|
|
199
|
+
user: item.user ?? {},
|
|
200
|
+
});
|
|
201
|
+
return {
|
|
202
|
+
workflowId: trigger.workflowId,
|
|
203
|
+
goal,
|
|
204
|
+
workspacePath: trigger.workspacePath,
|
|
205
|
+
context,
|
|
206
|
+
...(trigger.referenceUrls !== undefined ? { referenceUrls: trigger.referenceUrls } : {}),
|
|
207
|
+
...(trigger.agentConfig !== undefined ? { agentConfig: trigger.agentConfig } : {}),
|
|
208
|
+
...(trigger.soulFile !== undefined ? { soulFile: trigger.soulFile } : {}),
|
|
209
|
+
};
|
|
210
|
+
}
|
|
211
|
+
function interpolateGoalFromPayload(trigger, payload) {
|
|
212
|
+
const template = trigger.goalTemplate;
|
|
213
|
+
if (!template)
|
|
214
|
+
return trigger.goal;
|
|
215
|
+
const TOKEN_RE = /\{\{([^}]+)\}\}/g;
|
|
216
|
+
const tokens = [];
|
|
217
|
+
let match;
|
|
218
|
+
while ((match = TOKEN_RE.exec(template)) !== null) {
|
|
219
|
+
if (match[1] !== undefined)
|
|
220
|
+
tokens.push(match[1]);
|
|
221
|
+
}
|
|
222
|
+
if (tokens.length === 0)
|
|
223
|
+
return template;
|
|
224
|
+
const resolved = new Map();
|
|
225
|
+
for (const token of tokens) {
|
|
226
|
+
const value = extractDotPath(payload, token);
|
|
227
|
+
if (value === undefined || value === null) {
|
|
228
|
+
return trigger.goal;
|
|
229
|
+
}
|
|
230
|
+
resolved.set(token, String(value));
|
|
231
|
+
}
|
|
232
|
+
return template.replace(/\{\{([^}]+)\}\}/g, (_, token) => resolved.get(token) ?? trigger.goal);
|
|
233
|
+
}
|
|
234
|
+
function extractDotPath(obj, rawPath) {
|
|
235
|
+
let path = rawPath.trim();
|
|
236
|
+
if (path.startsWith('$.'))
|
|
237
|
+
path = path.slice(2);
|
|
238
|
+
else if (path.startsWith('$'))
|
|
239
|
+
path = path.slice(1);
|
|
240
|
+
const segments = path.split('.');
|
|
241
|
+
let current = obj;
|
|
242
|
+
for (const segment of segments) {
|
|
243
|
+
if (segment.includes('[') || current === null || typeof current !== 'object') {
|
|
244
|
+
return undefined;
|
|
245
|
+
}
|
|
246
|
+
current = current[segment];
|
|
247
|
+
}
|
|
248
|
+
return current;
|
|
249
|
+
}
|
|
@@ -4,6 +4,8 @@ import type { V2ToolContext } from '../mcp/types.js';
|
|
|
4
4
|
import type { TriggerStoreError } from './trigger-store.js';
|
|
5
5
|
import { TriggerRouter, type RunWorkflowFn } from './trigger-router.js';
|
|
6
6
|
import type { WorkspaceConfig } from './types.js';
|
|
7
|
+
import type { DaemonEventEmitter } from '../daemon/daemon-events.js';
|
|
8
|
+
import type { FetchFn } from './adapters/gitlab-poller.js';
|
|
7
9
|
export type TriggerListenerError = TriggerStoreError | {
|
|
8
10
|
readonly kind: 'port_conflict';
|
|
9
11
|
readonly port: number;
|
|
@@ -24,6 +26,9 @@ export interface StartTriggerListenerOptions {
|
|
|
24
26
|
readonly env?: Record<string, string | undefined>;
|
|
25
27
|
readonly runWorkflowFn?: RunWorkflowFn;
|
|
26
28
|
readonly workspaces?: Readonly<Record<string, WorkspaceConfig>>;
|
|
29
|
+
readonly emitter?: DaemonEventEmitter;
|
|
30
|
+
readonly fetchFn?: FetchFn;
|
|
31
|
+
readonly getWorkflowByIdFn?: (id: string) => Promise<boolean>;
|
|
27
32
|
}
|
|
28
33
|
export declare function createTriggerApp(router: TriggerRouter): express.Application;
|
|
29
34
|
export declare function startTriggerListener(ctx: V2ToolContext, options: StartTriggerListenerOptions): Promise<TriggerListenerHandle | null | {
|
|
@@ -46,6 +46,8 @@ const trigger_router_js_1 = require("./trigger-router.js");
|
|
|
46
46
|
const config_file_js_1 = require("../config/config-file.js");
|
|
47
47
|
const workflow_runner_js_1 = require("../daemon/workflow-runner.js");
|
|
48
48
|
const types_js_1 = require("./types.js");
|
|
49
|
+
const polling_scheduler_js_1 = require("./polling-scheduler.js");
|
|
50
|
+
const polled_event_store_js_1 = require("./polled-event-store.js");
|
|
49
51
|
const DEFAULT_TRIGGER_PORT = 3200;
|
|
50
52
|
function createTriggerApp(router) {
|
|
51
53
|
const app = (0, express_1.default)();
|
|
@@ -136,6 +138,39 @@ async function startTriggerListener(ctx, options) {
|
|
|
136
138
|
triggerIndex = indexResult.value;
|
|
137
139
|
console.log(`[TriggerListener] Loaded ${configResult.value.triggers.length} trigger(s) from triggers.yml`);
|
|
138
140
|
}
|
|
141
|
+
const getWorkflowByIdFn = options.getWorkflowByIdFn
|
|
142
|
+
?? (ctx.workflowService
|
|
143
|
+
? async (id) => (await ctx.workflowService.getWorkflowById(id)) !== null
|
|
144
|
+
: undefined);
|
|
145
|
+
if (getWorkflowByIdFn) {
|
|
146
|
+
const unknownTriggerIds = [];
|
|
147
|
+
for (const [triggerId, trigger] of triggerIndex) {
|
|
148
|
+
let found;
|
|
149
|
+
try {
|
|
150
|
+
found = await getWorkflowByIdFn(trigger.workflowId);
|
|
151
|
+
}
|
|
152
|
+
catch (e) {
|
|
153
|
+
found = false;
|
|
154
|
+
console.warn(`[TriggerListener] Error validating workflowId '${trigger.workflowId}' for trigger '${triggerId}': ` +
|
|
155
|
+
(e instanceof Error ? e.message : String(e)));
|
|
156
|
+
}
|
|
157
|
+
if (!found) {
|
|
158
|
+
unknownTriggerIds.push(triggerId);
|
|
159
|
+
console.warn(`[TriggerListener] Skipping trigger '${triggerId}': workflowId '${trigger.workflowId}' was not found. ` +
|
|
160
|
+
`Fix the workflowId in triggers.yml and restart the daemon.`);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
for (const id of unknownTriggerIds) {
|
|
164
|
+
triggerIndex.delete(id);
|
|
165
|
+
}
|
|
166
|
+
if (unknownTriggerIds.length > 0) {
|
|
167
|
+
console.warn(`[TriggerListener] Skipped ${unknownTriggerIds.length} trigger(s) with unknown workflowId(s). ` +
|
|
168
|
+
`${triggerIndex.size} trigger(s) will be active.`);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
else {
|
|
172
|
+
console.log(`[TriggerListener] workflowId validation skipped (no resolver provided).`);
|
|
173
|
+
}
|
|
139
174
|
const workrailConfig = (0, config_file_js_1.loadWorkrailConfigFile)();
|
|
140
175
|
const maxConcurrencyRaw = workrailConfig.kind === 'ok'
|
|
141
176
|
? workrailConfig.value['maxConcurrentSessions']
|
|
@@ -143,8 +178,12 @@ async function startTriggerListener(ctx, options) {
|
|
|
143
178
|
const parsed = parseInt(maxConcurrencyRaw ?? '', 10);
|
|
144
179
|
const maxConcurrentSessions = !isNaN(parsed) ? parsed : undefined;
|
|
145
180
|
const runWorkflowFn = options.runWorkflowFn ?? workflow_runner_js_1.runWorkflow;
|
|
146
|
-
const router = new trigger_router_js_1.TriggerRouter(triggerIndex, ctx, apiKey, runWorkflowFn, undefined, maxConcurrentSessions);
|
|
181
|
+
const router = new trigger_router_js_1.TriggerRouter(triggerIndex, ctx, apiKey, runWorkflowFn, undefined, maxConcurrentSessions, options.emitter);
|
|
147
182
|
const app = createTriggerApp(router);
|
|
183
|
+
const allTriggers = [...triggerIndex.values()];
|
|
184
|
+
const polledEventStore = new polled_event_store_js_1.PolledEventStore(env);
|
|
185
|
+
const pollingScheduler = new polling_scheduler_js_1.PollingScheduler(allTriggers, router, polledEventStore, options.fetchFn);
|
|
186
|
+
pollingScheduler.start();
|
|
148
187
|
await (0, workflow_runner_js_1.runStartupRecovery)().catch((err) => {
|
|
149
188
|
console.warn('[TriggerListener] Startup recovery encountered an unexpected error:', err instanceof Error ? err.message : String(err));
|
|
150
189
|
});
|
|
@@ -154,9 +193,11 @@ async function startTriggerListener(ctx, options) {
|
|
|
154
193
|
const server = http.createServer(app);
|
|
155
194
|
server.on('error', (error) => {
|
|
156
195
|
if (error.code === 'EADDRINUSE') {
|
|
196
|
+
pollingScheduler.stop();
|
|
157
197
|
resolve({ _kind: 'err', error: { kind: 'port_conflict', port } });
|
|
158
198
|
}
|
|
159
199
|
else {
|
|
200
|
+
pollingScheduler.stop();
|
|
160
201
|
resolve({ _kind: 'err', error: { kind: 'io_error', message: error.message } });
|
|
161
202
|
}
|
|
162
203
|
});
|
|
@@ -164,12 +205,20 @@ async function startTriggerListener(ctx, options) {
|
|
|
164
205
|
const addr = server.address();
|
|
165
206
|
const actualPort = (addr && typeof addr === 'object') ? addr.port : port;
|
|
166
207
|
console.log(`[TriggerListener] Webhook server listening on port ${actualPort}`);
|
|
208
|
+
options.emitter?.emit({
|
|
209
|
+
kind: 'daemon_started',
|
|
210
|
+
port: actualPort,
|
|
211
|
+
workspacePath: options.workspacePath,
|
|
212
|
+
});
|
|
167
213
|
resolve({
|
|
168
214
|
port: actualPort,
|
|
169
215
|
router,
|
|
170
|
-
stop:
|
|
171
|
-
|
|
172
|
-
|
|
216
|
+
stop: async () => {
|
|
217
|
+
pollingScheduler.stop();
|
|
218
|
+
return new Promise((res, rej) => {
|
|
219
|
+
server.close((e) => (e ? rej(e) : res()));
|
|
220
|
+
});
|
|
221
|
+
},
|
|
173
222
|
});
|
|
174
223
|
});
|
|
175
224
|
});
|
|
@@ -2,6 +2,7 @@ import type { WorkflowTrigger, WorkflowRunResult } from '../daemon/workflow-runn
|
|
|
2
2
|
import type { V2ToolContext } from '../mcp/types.js';
|
|
3
3
|
import type { TriggerDefinition, WebhookEvent } from './types.js';
|
|
4
4
|
import type { ExecFn } from './delivery-action.js';
|
|
5
|
+
import type { DaemonEventEmitter } from '../daemon/daemon-events.js';
|
|
5
6
|
export type RouteError = {
|
|
6
7
|
readonly kind: 'not_found';
|
|
7
8
|
readonly triggerId: string;
|
|
@@ -18,7 +19,7 @@ export type RouteResult = {
|
|
|
18
19
|
readonly _tag: 'error';
|
|
19
20
|
readonly error: RouteError;
|
|
20
21
|
};
|
|
21
|
-
export type RunWorkflowFn = (trigger: WorkflowTrigger, ctx: V2ToolContext, apiKey: string) => Promise<WorkflowRunResult>;
|
|
22
|
+
export type RunWorkflowFn = (trigger: WorkflowTrigger, ctx: V2ToolContext, apiKey: string, daemonRegistry?: import('../v2/infra/in-memory/daemon-registry/index.js').DaemonRegistry, emitter?: DaemonEventEmitter) => Promise<WorkflowRunResult>;
|
|
22
23
|
export declare function interpolateGoalTemplate(template: string, staticGoal: string, payload: Readonly<Record<string, unknown>>, triggerId: string): string;
|
|
23
24
|
export declare class TriggerRouter {
|
|
24
25
|
private readonly index;
|
|
@@ -29,7 +30,8 @@ export declare class TriggerRouter {
|
|
|
29
30
|
private readonly execFn;
|
|
30
31
|
private readonly semaphore;
|
|
31
32
|
private readonly _maxConcurrentSessions;
|
|
32
|
-
|
|
33
|
+
private readonly emitter;
|
|
34
|
+
constructor(index: ReadonlyMap<string, TriggerDefinition>, ctx: V2ToolContext, apiKey: string, runWorkflowFn: RunWorkflowFn, execFn?: ExecFn, maxConcurrentSessions?: number, emitter?: DaemonEventEmitter);
|
|
33
35
|
get activeSessions(): number;
|
|
34
36
|
get maxConcurrentSessions(): number;
|
|
35
37
|
route(event: WebhookEvent): RouteResult;
|
|
@@ -182,13 +182,14 @@ class Semaphore {
|
|
|
182
182
|
}
|
|
183
183
|
const DEFAULT_MAX_CONCURRENT_SESSIONS = 3;
|
|
184
184
|
class TriggerRouter {
|
|
185
|
-
constructor(index, ctx, apiKey, runWorkflowFn, execFn, maxConcurrentSessions) {
|
|
185
|
+
constructor(index, ctx, apiKey, runWorkflowFn, execFn, maxConcurrentSessions, emitter) {
|
|
186
186
|
this.index = index;
|
|
187
187
|
this.ctx = ctx;
|
|
188
188
|
this.apiKey = apiKey;
|
|
189
189
|
this.runWorkflowFn = runWorkflowFn;
|
|
190
190
|
this.queue = new index_js_1.KeyedAsyncQueue();
|
|
191
191
|
this.execFn = execFn ?? execFileAsync;
|
|
192
|
+
this.emitter = emitter;
|
|
192
193
|
const requested = maxConcurrentSessions ?? DEFAULT_MAX_CONCURRENT_SESSIONS;
|
|
193
194
|
const cap = Number.isNaN(requested) ? DEFAULT_MAX_CONCURRENT_SESSIONS : requested;
|
|
194
195
|
if (cap < 1) {
|
|
@@ -243,10 +244,12 @@ class TriggerRouter {
|
|
|
243
244
|
...(trigger.agentConfig !== undefined ? { agentConfig: trigger.agentConfig } : {}),
|
|
244
245
|
...(trigger.soulFile !== undefined ? { soulFile: trigger.soulFile } : {}),
|
|
245
246
|
};
|
|
247
|
+
this.emitter?.emit({ kind: 'trigger_fired', triggerId: trigger.id, workflowId: trigger.workflowId });
|
|
246
248
|
const queueKey = trigger.concurrencyMode === 'parallel'
|
|
247
249
|
? `${trigger.id}:${crypto.randomUUID()}`
|
|
248
250
|
: trigger.id;
|
|
249
251
|
void this.queue.enqueue(queueKey, async () => {
|
|
252
|
+
this.emitter?.emit({ kind: 'session_queued', triggerId: trigger.id, workflowId: trigger.workflowId });
|
|
250
253
|
if (this.semaphore.activeCount >= this._maxConcurrentSessions) {
|
|
251
254
|
console.warn(`[TriggerRouter] Concurrency limit reached ` +
|
|
252
255
|
`(${this.semaphore.activeCount}/${this._maxConcurrentSessions} active): ` +
|
|
@@ -255,7 +258,7 @@ class TriggerRouter {
|
|
|
255
258
|
await this.semaphore.acquire();
|
|
256
259
|
let result;
|
|
257
260
|
try {
|
|
258
|
-
result = await this.runWorkflowFn(workflowTrigger, this.ctx, this.apiKey);
|
|
261
|
+
result = await this.runWorkflowFn(workflowTrigger, this.ctx, this.apiKey, undefined, this.emitter);
|
|
259
262
|
}
|
|
260
263
|
finally {
|
|
261
264
|
this.semaphore.release();
|
|
@@ -263,7 +266,7 @@ class TriggerRouter {
|
|
|
263
266
|
const originalTag = result._tag;
|
|
264
267
|
const originalResult = result;
|
|
265
268
|
if (trigger.callbackUrl) {
|
|
266
|
-
const deliveryResult = await (0, delivery_client_js_1.post)(trigger.callbackUrl, result);
|
|
269
|
+
const deliveryResult = await (0, delivery_client_js_1.post)(trigger.callbackUrl, result, this.emitter);
|
|
267
270
|
if (deliveryResult.kind === 'err') {
|
|
268
271
|
const deliveryError = deliveryResult.error.kind === 'http_error'
|
|
269
272
|
? `HTTP ${deliveryResult.error.status}: ${deliveryResult.error.body}`
|
|
@@ -312,7 +315,7 @@ class TriggerRouter {
|
|
|
312
315
|
await this.semaphore.acquire();
|
|
313
316
|
let result;
|
|
314
317
|
try {
|
|
315
|
-
result = await this.runWorkflowFn(workflowTrigger, this.ctx, this.apiKey);
|
|
318
|
+
result = await this.runWorkflowFn(workflowTrigger, this.ctx, this.apiKey, undefined, this.emitter);
|
|
316
319
|
}
|
|
317
320
|
finally {
|
|
318
321
|
this.semaphore.release();
|