symphifo 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/NOTICE +13 -0
- package/README.md +394 -0
- package/SYMPHIFO.md +171 -0
- package/WORKFLOW.md +39 -0
- package/bin/symphifo.js +37 -0
- package/package.json +46 -0
- package/src/cli.ts +213 -0
- package/src/dashboard/app.js +1390 -0
- package/src/dashboard/index.html +139 -0
- package/src/dashboard/styles.css +1528 -0
- package/src/fixtures/local-issues.json +13 -0
- package/src/integrations/catalog.ts +151 -0
- package/src/mcp/server.ts +1237 -0
- package/src/routing/capability-resolver.ts +390 -0
- package/src/runtime/agent.ts +1050 -0
- package/src/runtime/api-server.ts +306 -0
- package/src/runtime/constants.ts +102 -0
- package/src/runtime/helpers.ts +134 -0
- package/src/runtime/issues.ts +456 -0
- package/src/runtime/logger.ts +59 -0
- package/src/runtime/providers.ts +310 -0
- package/src/runtime/run-local.ts +146 -0
- package/src/runtime/scheduler.ts +214 -0
- package/src/runtime/skills.ts +55 -0
- package/src/runtime/store.ts +313 -0
- package/src/runtime/types.ts +274 -0
- package/src/runtime/workflow.ts +185 -0
|
@@ -0,0 +1,456 @@
|
|
|
1
|
+
import { dirname, join } from "node:path";
|
|
2
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
|
3
|
+
import { env } from "node:process";
|
|
4
|
+
import type {
|
|
5
|
+
IssueEntry,
|
|
6
|
+
IssueState,
|
|
7
|
+
JsonRecord,
|
|
8
|
+
RuntimeConfig,
|
|
9
|
+
RuntimeEvent,
|
|
10
|
+
RuntimeEventType,
|
|
11
|
+
RuntimeMetrics,
|
|
12
|
+
RuntimeState,
|
|
13
|
+
WorkflowDefinition,
|
|
14
|
+
} from "./types.ts";
|
|
15
|
+
import {
|
|
16
|
+
ALLOWED_STATES,
|
|
17
|
+
PERSIST_EVENTS_MAX,
|
|
18
|
+
TERMINAL_STATES,
|
|
19
|
+
STATE_ROOT,
|
|
20
|
+
TARGET_ROOT,
|
|
21
|
+
WORKFLOW_RENDERED,
|
|
22
|
+
} from "./constants.ts";
|
|
23
|
+
import {
|
|
24
|
+
now,
|
|
25
|
+
toStringValue,
|
|
26
|
+
toNumberValue,
|
|
27
|
+
toBooleanValue,
|
|
28
|
+
toStringArray,
|
|
29
|
+
clamp,
|
|
30
|
+
normalizeState,
|
|
31
|
+
parseEnvNumber,
|
|
32
|
+
parseIntArg,
|
|
33
|
+
parsePositiveIntEnv,
|
|
34
|
+
withRetryBackoff,
|
|
35
|
+
getNestedRecord,
|
|
36
|
+
getNestedString,
|
|
37
|
+
getNestedNumber,
|
|
38
|
+
fail,
|
|
39
|
+
} from "./helpers.ts";
|
|
40
|
+
import { logger } from "./logger.ts";
|
|
41
|
+
import {
|
|
42
|
+
normalizeAgentProvider,
|
|
43
|
+
resolveAgentCommand,
|
|
44
|
+
getCapabilityRoutingOptions,
|
|
45
|
+
applyCapabilityMetadata,
|
|
46
|
+
} from "./providers.ts";
|
|
47
|
+
import { resolveTaskCapabilities, type CapabilityResolverOptions } from "../routing/capability-resolver.ts";
|
|
48
|
+
|
|
49
|
+
export function normalizeIssue(
|
|
50
|
+
raw: JsonRecord,
|
|
51
|
+
workflowDefinition: WorkflowDefinition | null,
|
|
52
|
+
): IssueEntry | null {
|
|
53
|
+
const id = toStringValue(raw.id, "") || toStringValue(raw.identifier, "");
|
|
54
|
+
if (!id) return null;
|
|
55
|
+
|
|
56
|
+
const createdAt = toStringValue(raw.created_at, now());
|
|
57
|
+
const updatedAt = toStringValue(raw.updated_at, createdAt);
|
|
58
|
+
const paths = toStringArray(raw.paths);
|
|
59
|
+
const legacyFiles = toStringArray(raw.files);
|
|
60
|
+
|
|
61
|
+
const issue: IssueEntry = {
|
|
62
|
+
id,
|
|
63
|
+
identifier: toStringValue(raw.identifier, id),
|
|
64
|
+
title: toStringValue(raw.title, `Issue ${id}`),
|
|
65
|
+
description: toStringValue(raw.description, ""),
|
|
66
|
+
priority: toNumberValue(raw.priority, 1),
|
|
67
|
+
state: normalizeState(raw.state),
|
|
68
|
+
branchName: toStringValue(raw.branch_name) || toStringValue(raw.branchName),
|
|
69
|
+
url: toStringValue(raw.url),
|
|
70
|
+
assigneeId: toStringValue(raw.assignee_id),
|
|
71
|
+
labels: toStringArray(raw.labels),
|
|
72
|
+
paths: paths.length > 0 ? paths : legacyFiles,
|
|
73
|
+
inferredPaths: toStringArray(raw.inferredPaths),
|
|
74
|
+
capabilityCategory: toStringValue(raw.capabilityCategory),
|
|
75
|
+
capabilityOverlays: toStringArray(raw.capabilityOverlays),
|
|
76
|
+
capabilityRationale: toStringArray(raw.capabilityRationale),
|
|
77
|
+
blockedBy: toStringArray(raw.blocked_by),
|
|
78
|
+
assignedToWorker: toBooleanValue(raw.assigned_to_worker, true),
|
|
79
|
+
createdAt,
|
|
80
|
+
updatedAt,
|
|
81
|
+
history: [],
|
|
82
|
+
attempts: toNumberValue(raw.attempts, 0),
|
|
83
|
+
maxAttempts: toNumberValue(raw.max_attempts, 3),
|
|
84
|
+
nextRetryAt: toStringValue(raw.next_retry_at),
|
|
85
|
+
};
|
|
86
|
+
|
|
87
|
+
if (!issue.capabilityCategory) {
|
|
88
|
+
applyCapabilityMetadata(issue, resolveTaskCapabilities({
|
|
89
|
+
id: issue.id,
|
|
90
|
+
identifier: issue.identifier,
|
|
91
|
+
title: issue.title,
|
|
92
|
+
description: issue.description,
|
|
93
|
+
labels: issue.labels,
|
|
94
|
+
paths: issue.paths,
|
|
95
|
+
}, getCapabilityRoutingOptions(workflowDefinition)));
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return issue;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
export function loadSeedIssues(
|
|
102
|
+
path: string,
|
|
103
|
+
workflowDefinition: WorkflowDefinition | null,
|
|
104
|
+
): IssueEntry[] {
|
|
105
|
+
const sourcePath = env.SYMPHIFO_ISSUES_JSON ?? path;
|
|
106
|
+
|
|
107
|
+
if (sourcePath !== path && sourcePath) {
|
|
108
|
+
mkdirSync(dirname(path), { recursive: true });
|
|
109
|
+
writeFileSync(path, `${sourcePath}\n`, "utf8");
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if (!existsSync(path)) return [];
|
|
113
|
+
|
|
114
|
+
const raw = readFileSync(path, "utf8");
|
|
115
|
+
let parsed: unknown;
|
|
116
|
+
try {
|
|
117
|
+
parsed = JSON.parse(raw);
|
|
118
|
+
} catch (error) {
|
|
119
|
+
fail(`Invalid local issues JSON: ${String(error)}`);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
if (!Array.isArray(parsed)) {
|
|
123
|
+
fail("Local issues payload must be an array.");
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
return parsed
|
|
127
|
+
.filter((candidate): candidate is JsonRecord => typeof candidate === "object" && candidate !== null)
|
|
128
|
+
.map((candidate) => normalizeIssue(candidate, workflowDefinition))
|
|
129
|
+
.filter((issue): issue is IssueEntry => issue !== null);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
export function nextLocalIssueId(issues: IssueEntry[]): string {
|
|
133
|
+
const maxId = issues.reduce((current, issue) => {
|
|
134
|
+
const match = issue.identifier.match(/^LOCAL-(\d+)$/);
|
|
135
|
+
if (!match) return current;
|
|
136
|
+
const parsed = Number.parseInt(match[1], 10);
|
|
137
|
+
return Number.isFinite(parsed) ? Math.max(current, parsed) : current;
|
|
138
|
+
}, 0);
|
|
139
|
+
|
|
140
|
+
return `LOCAL-${maxId + 1}`;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
export function createIssueFromPayload(
|
|
144
|
+
payload: JsonRecord,
|
|
145
|
+
issues: IssueEntry[],
|
|
146
|
+
workflowDefinition: WorkflowDefinition | null,
|
|
147
|
+
): IssueEntry {
|
|
148
|
+
const identifier = toStringValue(payload.identifier, nextLocalIssueId(issues));
|
|
149
|
+
const id = toStringValue(payload.id, identifier);
|
|
150
|
+
const createdAt = now();
|
|
151
|
+
const blockedBy = toStringArray(payload.blockedBy);
|
|
152
|
+
const legacyBlockedBy = toStringArray(payload.blocked_by);
|
|
153
|
+
const paths = toStringArray(payload.paths);
|
|
154
|
+
const legacyFiles = toStringArray(payload.files);
|
|
155
|
+
|
|
156
|
+
const issue: IssueEntry = {
|
|
157
|
+
id,
|
|
158
|
+
identifier,
|
|
159
|
+
title: toStringValue(payload.title, `Issue ${identifier}`),
|
|
160
|
+
description: toStringValue(payload.description, ""),
|
|
161
|
+
priority: clamp(toNumberValue(payload.priority, 1), 1, 10),
|
|
162
|
+
state: "Todo",
|
|
163
|
+
branchName: toStringValue(payload.branchName) || toStringValue(payload.branch_name),
|
|
164
|
+
url: toStringValue(payload.url),
|
|
165
|
+
assigneeId: toStringValue(payload.assigneeId) || toStringValue(payload.assignee_id),
|
|
166
|
+
labels: toStringArray(payload.labels),
|
|
167
|
+
paths: paths.length > 0 ? paths : legacyFiles,
|
|
168
|
+
inferredPaths: [],
|
|
169
|
+
capabilityCategory: "",
|
|
170
|
+
capabilityOverlays: [],
|
|
171
|
+
capabilityRationale: [],
|
|
172
|
+
blockedBy: blockedBy.length > 0 ? blockedBy : legacyBlockedBy,
|
|
173
|
+
assignedToWorker: true,
|
|
174
|
+
createdAt,
|
|
175
|
+
updatedAt: createdAt,
|
|
176
|
+
history: [`[${createdAt}] Issue created via API.`],
|
|
177
|
+
attempts: 0,
|
|
178
|
+
maxAttempts: clamp(toNumberValue(payload.maxAttempts ?? payload.max_attempts, 3), 1, 10),
|
|
179
|
+
};
|
|
180
|
+
|
|
181
|
+
applyCapabilityMetadata(issue, resolveTaskCapabilities({
|
|
182
|
+
id: issue.id,
|
|
183
|
+
identifier: issue.identifier,
|
|
184
|
+
title: issue.title,
|
|
185
|
+
description: issue.description,
|
|
186
|
+
labels: issue.labels,
|
|
187
|
+
paths: issue.paths,
|
|
188
|
+
}, getCapabilityRoutingOptions(workflowDefinition)));
|
|
189
|
+
|
|
190
|
+
return issue;
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
export function deriveConfig(args: string[]): RuntimeConfig {
|
|
194
|
+
const parsedConcurrency = parsePositiveIntEnv("SYMPHIFO_WORKER_CONCURRENCY", 2);
|
|
195
|
+
let pollIntervalMs = parseEnvNumber("SYMPHIFO_POLL_INTERVAL_MS", 1200);
|
|
196
|
+
let workerConcurrency = parsedConcurrency;
|
|
197
|
+
let maxAttemptsDefault = parseEnvNumber("SYMPHIFO_MAX_ATTEMPTS", 3);
|
|
198
|
+
let commandTimeoutMs = parseEnvNumber("SYMPHIFO_AGENT_TIMEOUT_MS", 120000);
|
|
199
|
+
|
|
200
|
+
for (let i = 0; i < args.length; i += 1) {
|
|
201
|
+
const arg = args[i];
|
|
202
|
+
if (arg === "--poll") {
|
|
203
|
+
const value = args[i + 1] ?? "";
|
|
204
|
+
if (!/^\d+$/.test(value)) fail(`Invalid value for --poll: ${value}`);
|
|
205
|
+
pollIntervalMs = parseIntArg(value, pollIntervalMs);
|
|
206
|
+
}
|
|
207
|
+
if (arg === "--concurrency") {
|
|
208
|
+
const value = args[i + 1] ?? "";
|
|
209
|
+
if (!/^\d+$/.test(value)) fail(`Invalid value for --concurrency: ${value}`);
|
|
210
|
+
workerConcurrency = parseIntArg(value, workerConcurrency);
|
|
211
|
+
}
|
|
212
|
+
if (arg === "--attempts") {
|
|
213
|
+
const value = args[i + 1] ?? "";
|
|
214
|
+
if (!/^\d+$/.test(value)) fail(`Invalid value for --attempts: ${value}`);
|
|
215
|
+
maxAttemptsDefault = parseIntArg(value, maxAttemptsDefault);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
return {
|
|
220
|
+
pollIntervalMs: clamp(pollIntervalMs, 200, 10_000),
|
|
221
|
+
workerConcurrency: clamp(workerConcurrency, 1, 16),
|
|
222
|
+
commandTimeoutMs: clamp(commandTimeoutMs, 1_000, 600_000),
|
|
223
|
+
maxAttemptsDefault: clamp(maxAttemptsDefault, 1, 10),
|
|
224
|
+
maxTurns: clamp(parseEnvNumber("SYMPHIFO_AGENT_MAX_TURNS", 4), 1, 16),
|
|
225
|
+
retryDelayMs: parseEnvNumber("SYMPHIFO_RETRY_DELAY_MS", 3_000),
|
|
226
|
+
staleInProgressTimeoutMs: parseEnvNumber("SYMPHIFO_STALE_IN_PROGRESS_MS", 20_000),
|
|
227
|
+
logLinesTail: parseEnvNumber("SYMPHIFO_LOG_TAIL_CHARS", 12_000),
|
|
228
|
+
agentProvider: normalizeAgentProvider(env.SYMPHIFO_AGENT_PROVIDER ?? "codex"),
|
|
229
|
+
agentCommand: toStringValue(env.SYMPHIFO_AGENT_COMMAND, ""),
|
|
230
|
+
runMode: "filesystem",
|
|
231
|
+
};
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
export function applyWorkflowConfig(
|
|
235
|
+
config: RuntimeConfig,
|
|
236
|
+
definition: WorkflowDefinition,
|
|
237
|
+
port: number | undefined,
|
|
238
|
+
): RuntimeConfig {
|
|
239
|
+
const pollConfig = getNestedRecord(definition.config, "poll");
|
|
240
|
+
const agentConfig = getNestedRecord(definition.config, "agent");
|
|
241
|
+
const codexConfig = getNestedRecord(definition.config, "codex");
|
|
242
|
+
const claudeConfig = getNestedRecord(definition.config, "claude");
|
|
243
|
+
const serverConfig = getNestedRecord(definition.config, "server");
|
|
244
|
+
const agentProvider = normalizeAgentProvider(
|
|
245
|
+
getNestedString(agentConfig, "provider", definition.agentProvider || config.agentProvider),
|
|
246
|
+
);
|
|
247
|
+
const codexCommand = getNestedString(codexConfig, "command");
|
|
248
|
+
const claudeCommand = getNestedString(claudeConfig, "command");
|
|
249
|
+
|
|
250
|
+
return {
|
|
251
|
+
...config,
|
|
252
|
+
pollIntervalMs: clamp(getNestedNumber(pollConfig, "interval_ms", config.pollIntervalMs), 200, 10_000),
|
|
253
|
+
workerConcurrency: clamp(
|
|
254
|
+
getNestedNumber(agentConfig, "max_concurrent_agents", config.workerConcurrency),
|
|
255
|
+
1, 16,
|
|
256
|
+
),
|
|
257
|
+
maxAttemptsDefault: clamp(getNestedNumber(agentConfig, "max_attempts", config.maxAttemptsDefault), 1, 10),
|
|
258
|
+
maxTurns: clamp(getNestedNumber(agentConfig, "max_turns", config.maxTurns), 1, 16),
|
|
259
|
+
commandTimeoutMs: clamp(
|
|
260
|
+
getNestedNumber(codexConfig, "timeout_ms", config.commandTimeoutMs),
|
|
261
|
+
1_000, 600_000,
|
|
262
|
+
),
|
|
263
|
+
agentProvider,
|
|
264
|
+
agentCommand: resolveAgentCommand(agentProvider, config.agentCommand, codexCommand, claudeCommand),
|
|
265
|
+
dashboardPort: String(
|
|
266
|
+
port ?? (getNestedNumber(serverConfig, "port", Number.parseInt(config.dashboardPort ?? "0", 10) || 0) || 0),
|
|
267
|
+
),
|
|
268
|
+
runMode: "filesystem",
|
|
269
|
+
};
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
export function dedupHistoryEntries(issues: IssueEntry[]): void {
|
|
273
|
+
for (const issue of issues) {
|
|
274
|
+
const seen = new Set<string>();
|
|
275
|
+
issue.history = issue.history.filter((entry) => {
|
|
276
|
+
const key = entry.toLowerCase();
|
|
277
|
+
if (seen.has(key)) return false;
|
|
278
|
+
seen.add(key);
|
|
279
|
+
return true;
|
|
280
|
+
});
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
export function mergeStateWithSeed(
|
|
285
|
+
seedIssues: IssueEntry[],
|
|
286
|
+
previous: RuntimeState | null,
|
|
287
|
+
config: RuntimeConfig,
|
|
288
|
+
definition: WorkflowDefinition,
|
|
289
|
+
): RuntimeState {
|
|
290
|
+
const previousMap = new Map((previous?.issues ?? []).map((issue) => [issue.id, issue]));
|
|
291
|
+
|
|
292
|
+
const mergedIssues = seedIssues.map((seed) => {
|
|
293
|
+
const saved = previousMap.get(seed.id);
|
|
294
|
+
if (!saved) return seed;
|
|
295
|
+
|
|
296
|
+
return {
|
|
297
|
+
...seed,
|
|
298
|
+
state: normalizeState(saved.state),
|
|
299
|
+
history: saved.history,
|
|
300
|
+
attempts: clamp(saved.attempts, 0, config.maxAttemptsDefault),
|
|
301
|
+
maxAttempts: clamp(saved.maxAttempts, 1, config.maxAttemptsDefault),
|
|
302
|
+
nextRetryAt: toStringValue(saved.nextRetryAt),
|
|
303
|
+
startedAt: saved.startedAt,
|
|
304
|
+
completedAt: saved.completedAt,
|
|
305
|
+
updatedAt: saved.updatedAt,
|
|
306
|
+
workspacePath: saved.workspacePath,
|
|
307
|
+
workspacePreparedAt: saved.workspacePreparedAt,
|
|
308
|
+
lastError: saved.lastError,
|
|
309
|
+
durationMs: typeof saved.durationMs === "number" ? saved.durationMs : undefined,
|
|
310
|
+
commandExitCode: typeof saved.commandExitCode === "number" ? saved.commandExitCode : saved.commandExitCode,
|
|
311
|
+
commandOutputTail: toStringValue(saved.commandOutputTail),
|
|
312
|
+
};
|
|
313
|
+
});
|
|
314
|
+
|
|
315
|
+
dedupHistoryEntries(mergedIssues);
|
|
316
|
+
|
|
317
|
+
const metrics = computeMetrics(mergedIssues);
|
|
318
|
+
|
|
319
|
+
return {
|
|
320
|
+
startedAt: previous?.startedAt ?? now(),
|
|
321
|
+
updatedAt: now(),
|
|
322
|
+
trackerKind: "filesystem",
|
|
323
|
+
sourceRepoUrl: TARGET_ROOT,
|
|
324
|
+
sourceRef: "workspace",
|
|
325
|
+
workflowPath: WORKFLOW_RENDERED,
|
|
326
|
+
config: {
|
|
327
|
+
...config,
|
|
328
|
+
dashboardPort: previous?.config.dashboardPort,
|
|
329
|
+
},
|
|
330
|
+
issues: mergedIssues,
|
|
331
|
+
events: previous?.events ?? [],
|
|
332
|
+
metrics,
|
|
333
|
+
notes: previous?.notes ?? [
|
|
334
|
+
"Local TypeScript runtime bootstrapped.",
|
|
335
|
+
`Workflow loaded from ${definition.workflowPath}.`,
|
|
336
|
+
"Codex-only execution path enabled.",
|
|
337
|
+
"No external tracker dependency (filesystem-backed local mode).",
|
|
338
|
+
],
|
|
339
|
+
};
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
export function computeMetrics(issues: IssueEntry[]): RuntimeMetrics {
|
|
343
|
+
let queued = 0;
|
|
344
|
+
let inProgress = 0;
|
|
345
|
+
let blocked = 0;
|
|
346
|
+
let done = 0;
|
|
347
|
+
let cancelled = 0;
|
|
348
|
+
|
|
349
|
+
for (const issue of issues) {
|
|
350
|
+
switch (issue.state) {
|
|
351
|
+
case "Todo":
|
|
352
|
+
case "Blocked":
|
|
353
|
+
queued += 1;
|
|
354
|
+
break;
|
|
355
|
+
case "In Progress":
|
|
356
|
+
case "In Review":
|
|
357
|
+
inProgress += 1;
|
|
358
|
+
break;
|
|
359
|
+
case "Done":
|
|
360
|
+
done += 1;
|
|
361
|
+
break;
|
|
362
|
+
case "Cancelled":
|
|
363
|
+
cancelled += 1;
|
|
364
|
+
break;
|
|
365
|
+
}
|
|
366
|
+
if (issue.state === "Blocked") blocked += 1;
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
return { total: issues.length, queued, inProgress, blocked, done, cancelled, activeWorkers: 0 };
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
export function computeCapabilityCounts(issues: IssueEntry[]): Record<string, number> {
|
|
373
|
+
return issues.reduce<Record<string, number>>((accumulator, issue) => {
|
|
374
|
+
const key = issue.capabilityCategory?.trim() || "default";
|
|
375
|
+
accumulator[key] = (accumulator[key] ?? 0) + 1;
|
|
376
|
+
return accumulator;
|
|
377
|
+
}, {});
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
export function addEvent(
|
|
381
|
+
state: RuntimeState,
|
|
382
|
+
issueId: string | undefined,
|
|
383
|
+
kind: RuntimeEventType,
|
|
384
|
+
message: string,
|
|
385
|
+
): void {
|
|
386
|
+
const event: RuntimeEvent = {
|
|
387
|
+
id: `${Date.now()}-${state.events.length + 1}`,
|
|
388
|
+
issueId,
|
|
389
|
+
kind,
|
|
390
|
+
message,
|
|
391
|
+
at: now(),
|
|
392
|
+
};
|
|
393
|
+
|
|
394
|
+
state.events = [event, ...state.events].slice(0, PERSIST_EVENTS_MAX);
|
|
395
|
+
logger.info({ issueId, kind }, message);
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
export function transition(issue: IssueEntry, target: IssueState, note: string): void {
|
|
399
|
+
const previous = issue.state;
|
|
400
|
+
issue.state = target;
|
|
401
|
+
issue.updatedAt = now();
|
|
402
|
+
issue.history.push(`[${issue.updatedAt}] ${note}`);
|
|
403
|
+
|
|
404
|
+
if (previous === "Blocked" && target === "Todo") {
|
|
405
|
+
issue.lastError = undefined;
|
|
406
|
+
issue.nextRetryAt = undefined;
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
if (TERMINAL_STATES.has(target)) {
|
|
410
|
+
issue.completedAt = now();
|
|
411
|
+
issue.nextRetryAt = undefined;
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
if (target === "Todo") {
|
|
415
|
+
issue.attempts = Math.max(0, issue.attempts - 1);
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
if (target === "Done") {
|
|
419
|
+
issue.lastError = undefined;
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
export function issueDependenciesResolved(issue: IssueEntry, allIssues: IssueEntry[]): boolean {
|
|
424
|
+
if (issue.blockedBy.length === 0) return true;
|
|
425
|
+
const map = new Map(allIssues.map((entry) => [entry.id, entry]));
|
|
426
|
+
return issue.blockedBy.every((dependencyId) => {
|
|
427
|
+
const dep = map.get(dependencyId);
|
|
428
|
+
return dep?.state === "Done";
|
|
429
|
+
});
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
export function getNextRetryAt(issue: IssueEntry, baseMs: number): string {
|
|
433
|
+
const nextAttempt = issue.attempts + 1;
|
|
434
|
+
const nextDelay = withRetryBackoff(nextAttempt, baseMs);
|
|
435
|
+
return new Date(Date.now() + nextDelay).toISOString();
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
export function handleStatePatch(state: RuntimeState, issue: IssueEntry, payload: JsonRecord): void {
|
|
439
|
+
const nextState = normalizeState(payload.state);
|
|
440
|
+
const allowed = new Set([...ALLOWED_STATES]);
|
|
441
|
+
|
|
442
|
+
if (!allowed.has(nextState)) {
|
|
443
|
+
throw new Error(`Unsupported state: ${String(payload.state)}`);
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
transition(issue, nextState, `Manual state update: ${nextState}`);
|
|
447
|
+
if (nextState === "Todo") {
|
|
448
|
+
issue.nextRetryAt = undefined;
|
|
449
|
+
issue.lastError = undefined;
|
|
450
|
+
}
|
|
451
|
+
if (nextState === "Cancelled") {
|
|
452
|
+
issue.lastError = toStringValue(payload.reason);
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
addEvent(state, issue.id, "manual", `Manual state transition to ${nextState}`);
|
|
456
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import pino from "pino";
|
|
2
|
+
import { env } from "node:process";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
|
|
5
|
+
const level = env.SYMPHIFO_LOG_LEVEL ?? "info";
|
|
6
|
+
const pretty = env.SYMPHIFO_LOG_PRETTY === "1";
|
|
7
|
+
|
|
8
|
+
function createTransports(logPath?: string) {
|
|
9
|
+
const targets: pino.TransportTargetOptions[] = [];
|
|
10
|
+
|
|
11
|
+
if (pretty) {
|
|
12
|
+
targets.push({
|
|
13
|
+
target: "pino-pretty",
|
|
14
|
+
options: { colorize: true, translateTime: "HH:MM:ss" },
|
|
15
|
+
level,
|
|
16
|
+
});
|
|
17
|
+
} else {
|
|
18
|
+
targets.push({
|
|
19
|
+
target: "pino/file",
|
|
20
|
+
options: { destination: 1 },
|
|
21
|
+
level,
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if (logPath) {
|
|
26
|
+
targets.push({
|
|
27
|
+
target: "pino/file",
|
|
28
|
+
options: { destination: logPath, mkdir: true },
|
|
29
|
+
level,
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
return pino.transport({ targets });
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
let _logger: pino.Logger | null = null;
|
|
37
|
+
let _logPath: string | undefined;
|
|
38
|
+
|
|
39
|
+
export function initLogger(stateRoot?: string): pino.Logger {
|
|
40
|
+
_logPath = stateRoot ? join(stateRoot, "symphifo-local.log") : undefined;
|
|
41
|
+
_logger = pino({ name: "symphifo", level }, createTransports(_logPath));
|
|
42
|
+
return _logger;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export function getLogger(): pino.Logger {
|
|
46
|
+
if (!_logger) {
|
|
47
|
+
_logger = pino({ name: "symphifo", level }, createTransports());
|
|
48
|
+
}
|
|
49
|
+
return _logger;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export const logger = {
|
|
53
|
+
get info() { return getLogger().info.bind(getLogger()); },
|
|
54
|
+
get warn() { return getLogger().warn.bind(getLogger()); },
|
|
55
|
+
get error() { return getLogger().error.bind(getLogger()); },
|
|
56
|
+
get debug() { return getLogger().debug.bind(getLogger()); },
|
|
57
|
+
get fatal() { return getLogger().fatal.bind(getLogger()); },
|
|
58
|
+
get child() { return getLogger().child.bind(getLogger()); },
|
|
59
|
+
};
|