opencode-swarm-plugin 0.37.0 → 0.39.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env +2 -0
- package/.hive/eval-results.json +26 -0
- package/.hive/issues.jsonl +20 -5
- package/.hive/memories.jsonl +35 -1
- package/.opencode/eval-history.jsonl +12 -0
- package/.turbo/turbo-build.log +4 -4
- package/.turbo/turbo-test.log +319 -319
- package/CHANGELOG.md +258 -0
- package/README.md +50 -0
- package/bin/swarm.test.ts +475 -0
- package/bin/swarm.ts +385 -208
- package/dist/compaction-hook.d.ts +1 -1
- package/dist/compaction-hook.d.ts.map +1 -1
- package/dist/compaction-prompt-scoring.d.ts +124 -0
- package/dist/compaction-prompt-scoring.d.ts.map +1 -0
- package/dist/eval-capture.d.ts +81 -1
- package/dist/eval-capture.d.ts.map +1 -1
- package/dist/eval-gates.d.ts +84 -0
- package/dist/eval-gates.d.ts.map +1 -0
- package/dist/eval-history.d.ts +117 -0
- package/dist/eval-history.d.ts.map +1 -0
- package/dist/eval-learning.d.ts +216 -0
- package/dist/eval-learning.d.ts.map +1 -0
- package/dist/hive.d.ts +59 -0
- package/dist/hive.d.ts.map +1 -1
- package/dist/index.d.ts +87 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +823 -131
- package/dist/plugin.js +655 -131
- package/dist/post-compaction-tracker.d.ts +133 -0
- package/dist/post-compaction-tracker.d.ts.map +1 -0
- package/dist/swarm-decompose.d.ts +30 -0
- package/dist/swarm-decompose.d.ts.map +1 -1
- package/dist/swarm-orchestrate.d.ts +23 -0
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts +25 -1
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/dist/swarm.d.ts +19 -0
- package/dist/swarm.d.ts.map +1 -1
- package/evals/README.md +595 -94
- package/evals/compaction-prompt.eval.ts +149 -0
- package/evals/coordinator-behavior.eval.ts +8 -8
- package/evals/fixtures/compaction-prompt-cases.ts +305 -0
- package/evals/lib/compaction-loader.test.ts +248 -0
- package/evals/lib/compaction-loader.ts +320 -0
- package/evals/lib/data-loader.test.ts +345 -0
- package/evals/lib/data-loader.ts +107 -6
- package/evals/scorers/compaction-prompt-scorers.ts +145 -0
- package/evals/scorers/compaction-scorers.ts +13 -13
- package/evals/scorers/coordinator-discipline.evalite-test.ts +3 -2
- package/evals/scorers/coordinator-discipline.ts +13 -13
- package/examples/plugin-wrapper-template.ts +177 -8
- package/package.json +7 -2
- package/scripts/migrate-unknown-sessions.ts +349 -0
- package/src/compaction-capture.integration.test.ts +257 -0
- package/src/compaction-hook.test.ts +139 -2
- package/src/compaction-hook.ts +113 -2
- package/src/compaction-prompt-scorers.test.ts +299 -0
- package/src/compaction-prompt-scoring.ts +298 -0
- package/src/eval-capture.test.ts +422 -0
- package/src/eval-capture.ts +94 -2
- package/src/eval-gates.test.ts +306 -0
- package/src/eval-gates.ts +218 -0
- package/src/eval-history.test.ts +508 -0
- package/src/eval-history.ts +214 -0
- package/src/eval-learning.test.ts +378 -0
- package/src/eval-learning.ts +360 -0
- package/src/index.ts +61 -1
- package/src/post-compaction-tracker.test.ts +251 -0
- package/src/post-compaction-tracker.ts +237 -0
- package/src/swarm-decompose.test.ts +40 -47
- package/src/swarm-decompose.ts +2 -2
- package/src/swarm-orchestrate.test.ts +270 -7
- package/src/swarm-orchestrate.ts +100 -13
- package/src/swarm-prompts.test.ts +121 -0
- package/src/swarm-prompts.ts +297 -4
- package/src/swarm-research.integration.test.ts +157 -0
- package/src/swarm-review.ts +3 -3
- /package/evals/{evalite.config.ts → evalite.config.ts.bak} +0 -0
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Compaction Data Loader Tests
|
|
3
|
+
*
|
|
4
|
+
* Tests loading COMPACTION events from session JSONL files.
|
|
5
|
+
*/
|
|
6
|
+
import { afterAll, beforeAll, describe, expect, test } from "bun:test";
|
|
7
|
+
import * as fs from "node:fs";
|
|
8
|
+
import * as os from "node:os";
|
|
9
|
+
import * as path from "node:path";
|
|
10
|
+
import type { CoordinatorEvent } from "../../src/eval-capture.js";
|
|
11
|
+
import {
|
|
12
|
+
loadCompactionEvents,
|
|
13
|
+
loadCompactionSessions,
|
|
14
|
+
} from "./compaction-loader.js";
|
|
15
|
+
|
|
16
|
+
// Test fixtures directory
|
|
17
|
+
const TEST_SESSION_DIR = path.join(
|
|
18
|
+
os.tmpdir(),
|
|
19
|
+
`test-sessions-${Date.now()}`,
|
|
20
|
+
);
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Create a test session JSONL file
|
|
24
|
+
*/
|
|
25
|
+
function createSessionFile(
|
|
26
|
+
sessionId: string,
|
|
27
|
+
events: CoordinatorEvent[],
|
|
28
|
+
): void {
|
|
29
|
+
const sessionPath = path.join(TEST_SESSION_DIR, `${sessionId}.jsonl`);
|
|
30
|
+
const lines = events.map((e) => JSON.stringify(e)).join("\n");
|
|
31
|
+
fs.writeFileSync(sessionPath, `${lines}\n`, "utf-8");
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
beforeAll(() => {
|
|
35
|
+
// Create test session directory
|
|
36
|
+
fs.mkdirSync(TEST_SESSION_DIR, { recursive: true });
|
|
37
|
+
|
|
38
|
+
// Create test session files with COMPACTION events
|
|
39
|
+
createSessionFile("session-1", [
|
|
40
|
+
{
|
|
41
|
+
session_id: "session-1",
|
|
42
|
+
epic_id: "epic-1",
|
|
43
|
+
timestamp: "2025-01-01T10:00:00.000Z",
|
|
44
|
+
event_type: "DECISION",
|
|
45
|
+
decision_type: "decomposition_complete",
|
|
46
|
+
payload: { subtask_count: 3 },
|
|
47
|
+
},
|
|
48
|
+
{
|
|
49
|
+
session_id: "session-1",
|
|
50
|
+
epic_id: "epic-1",
|
|
51
|
+
timestamp: "2025-01-01T10:05:00.000Z",
|
|
52
|
+
event_type: "COMPACTION",
|
|
53
|
+
compaction_type: "detection_complete",
|
|
54
|
+
payload: {
|
|
55
|
+
confidence: "high",
|
|
56
|
+
context_type: "full",
|
|
57
|
+
epic_id: "epic-1",
|
|
58
|
+
},
|
|
59
|
+
},
|
|
60
|
+
{
|
|
61
|
+
session_id: "session-1",
|
|
62
|
+
epic_id: "epic-1",
|
|
63
|
+
timestamp: "2025-01-01T10:06:00.000Z",
|
|
64
|
+
event_type: "COMPACTION",
|
|
65
|
+
compaction_type: "prompt_generated",
|
|
66
|
+
payload: {
|
|
67
|
+
prompt_length: 5000,
|
|
68
|
+
full_prompt: "You are a coordinator...",
|
|
69
|
+
context_type: "full",
|
|
70
|
+
},
|
|
71
|
+
},
|
|
72
|
+
]);
|
|
73
|
+
|
|
74
|
+
createSessionFile("session-2", [
|
|
75
|
+
{
|
|
76
|
+
session_id: "session-2",
|
|
77
|
+
epic_id: "epic-2",
|
|
78
|
+
timestamp: "2025-01-02T10:00:00.000Z",
|
|
79
|
+
event_type: "COMPACTION",
|
|
80
|
+
compaction_type: "context_injected",
|
|
81
|
+
payload: {
|
|
82
|
+
injection_point: "tool_call",
|
|
83
|
+
context_length: 3000,
|
|
84
|
+
},
|
|
85
|
+
},
|
|
86
|
+
{
|
|
87
|
+
session_id: "session-2",
|
|
88
|
+
epic_id: "epic-2",
|
|
89
|
+
timestamp: "2025-01-02T10:01:00.000Z",
|
|
90
|
+
event_type: "COMPACTION",
|
|
91
|
+
compaction_type: "resumption_started",
|
|
92
|
+
payload: {
|
|
93
|
+
epic_id: "epic-2",
|
|
94
|
+
resumption_type: "coordinator",
|
|
95
|
+
},
|
|
96
|
+
},
|
|
97
|
+
]);
|
|
98
|
+
|
|
99
|
+
// Session with no COMPACTION events
|
|
100
|
+
createSessionFile("session-3", [
|
|
101
|
+
{
|
|
102
|
+
session_id: "session-3",
|
|
103
|
+
epic_id: "epic-3",
|
|
104
|
+
timestamp: "2025-01-03T10:00:00.000Z",
|
|
105
|
+
event_type: "DECISION",
|
|
106
|
+
decision_type: "worker_spawned",
|
|
107
|
+
payload: { worker: "BlueLake", bead_id: "epic-3.1" },
|
|
108
|
+
},
|
|
109
|
+
]);
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
afterAll(() => {
|
|
113
|
+
// Clean up test session directory
|
|
114
|
+
if (fs.existsSync(TEST_SESSION_DIR)) {
|
|
115
|
+
fs.rmSync(TEST_SESSION_DIR, { recursive: true });
|
|
116
|
+
}
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
describe("loadCompactionEvents", () => {
|
|
120
|
+
test("loads all COMPACTION events from session directory", async () => {
|
|
121
|
+
const events = await loadCompactionEvents(TEST_SESSION_DIR);
|
|
122
|
+
|
|
123
|
+
expect(events.length).toBe(4);
|
|
124
|
+
expect(events.every((e) => e.event_type === "COMPACTION")).toBe(true);
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
test("filters by compaction_type", async () => {
|
|
128
|
+
const events = await loadCompactionEvents(TEST_SESSION_DIR, {
|
|
129
|
+
compaction_type: "detection_complete",
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
expect(events.length).toBe(1);
|
|
133
|
+
expect(events[0].compaction_type).toBe("detection_complete");
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
test("filters by session_ids", async () => {
|
|
137
|
+
const events = await loadCompactionEvents(TEST_SESSION_DIR, {
|
|
138
|
+
sessionIds: ["session-1"],
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
expect(events.length).toBe(2);
|
|
142
|
+
expect(events.every((e) => e.session_id === "session-1")).toBe(true);
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
test("applies limit", async () => {
|
|
146
|
+
const events = await loadCompactionEvents(TEST_SESSION_DIR, {
|
|
147
|
+
limit: 2,
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
expect(events.length).toBe(2);
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
test("combines filters", async () => {
|
|
154
|
+
const events = await loadCompactionEvents(TEST_SESSION_DIR, {
|
|
155
|
+
compaction_type: "prompt_generated",
|
|
156
|
+
sessionIds: ["session-1"],
|
|
157
|
+
limit: 1,
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
expect(events.length).toBe(1);
|
|
161
|
+
expect(events[0].compaction_type).toBe("prompt_generated");
|
|
162
|
+
expect(events[0].session_id).toBe("session-1");
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
test("returns empty array for non-existent directory", async () => {
|
|
166
|
+
const events = await loadCompactionEvents("/non/existent/path");
|
|
167
|
+
|
|
168
|
+
expect(events).toEqual([]);
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
test("skips invalid JSONL lines", async () => {
|
|
172
|
+
// Create session with invalid JSON
|
|
173
|
+
const invalidPath = path.join(TEST_SESSION_DIR, "session-invalid.jsonl");
|
|
174
|
+
fs.writeFileSync(
|
|
175
|
+
invalidPath,
|
|
176
|
+
'invalid json\n{"session_id": "session-valid", "event_type": "COMPACTION", "compaction_type": "detection_complete", "epic_id": "epic-4", "timestamp": "2025-01-04T10:00:00.000Z", "payload": {}}\n',
|
|
177
|
+
"utf-8",
|
|
178
|
+
);
|
|
179
|
+
|
|
180
|
+
const events = await loadCompactionEvents(TEST_SESSION_DIR);
|
|
181
|
+
|
|
182
|
+
// Should skip invalid line but include valid one
|
|
183
|
+
expect(events.some((e) => e.session_id === "session-valid")).toBe(true);
|
|
184
|
+
|
|
185
|
+
// Clean up
|
|
186
|
+
fs.unlinkSync(invalidPath);
|
|
187
|
+
});
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
describe("loadCompactionSessions", () => {
|
|
191
|
+
test("groups events by session_id", async () => {
|
|
192
|
+
const sessions = await loadCompactionSessions(TEST_SESSION_DIR);
|
|
193
|
+
|
|
194
|
+
expect(sessions.length).toBe(2); // session-1 and session-2 (session-3 has no COMPACTION events)
|
|
195
|
+
expect(sessions[0].session_id).toBeDefined();
|
|
196
|
+
expect(sessions[0].events.length).toBeGreaterThan(0);
|
|
197
|
+
});
|
|
198
|
+
|
|
199
|
+
test("includes session metadata", async () => {
|
|
200
|
+
const sessions = await loadCompactionSessions(TEST_SESSION_DIR);
|
|
201
|
+
|
|
202
|
+
const session1 = sessions.find((s) => s.session_id === "session-1");
|
|
203
|
+
expect(session1).toBeDefined();
|
|
204
|
+
if (session1) {
|
|
205
|
+
expect(session1.epic_id).toBe("epic-1");
|
|
206
|
+
expect(session1.start_time).toBeDefined();
|
|
207
|
+
expect(session1.end_time).toBeDefined();
|
|
208
|
+
}
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
test("filters by compaction_type", async () => {
|
|
212
|
+
const sessions = await loadCompactionSessions(TEST_SESSION_DIR, {
|
|
213
|
+
compaction_type: "detection_complete",
|
|
214
|
+
});
|
|
215
|
+
|
|
216
|
+
expect(sessions.length).toBe(1);
|
|
217
|
+
expect(sessions[0].session_id).toBe("session-1");
|
|
218
|
+
});
|
|
219
|
+
|
|
220
|
+
test("filters by session_ids", async () => {
|
|
221
|
+
const sessions = await loadCompactionSessions(TEST_SESSION_DIR, {
|
|
222
|
+
sessionIds: ["session-2"],
|
|
223
|
+
});
|
|
224
|
+
|
|
225
|
+
expect(sessions.length).toBe(1);
|
|
226
|
+
expect(sessions[0].session_id).toBe("session-2");
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
test("applies limit", async () => {
|
|
230
|
+
const sessions = await loadCompactionSessions(TEST_SESSION_DIR, {
|
|
231
|
+
limit: 1,
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
expect(sessions.length).toBe(1);
|
|
235
|
+
});
|
|
236
|
+
|
|
237
|
+
test("returns empty array for non-existent directory", async () => {
|
|
238
|
+
const sessions = await loadCompactionSessions("/non/existent/path");
|
|
239
|
+
|
|
240
|
+
expect(sessions).toEqual([]);
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
test("excludes sessions with no COMPACTION events", async () => {
|
|
244
|
+
const sessions = await loadCompactionSessions(TEST_SESSION_DIR);
|
|
245
|
+
|
|
246
|
+
expect(sessions.every((s) => s.session_id !== "session-3")).toBe(true);
|
|
247
|
+
});
|
|
248
|
+
});
|
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Compaction Data Loader
|
|
3
|
+
*
|
|
4
|
+
* Loads COMPACTION events from session JSONL files for use in evals.
|
|
5
|
+
*
|
|
6
|
+
* Features:
|
|
7
|
+
* - Lazy loading with early termination for large datasets
|
|
8
|
+
* - Filtering by compaction_type, sessionIds, and limit
|
|
9
|
+
* - Graceful error handling (skips invalid lines)
|
|
10
|
+
* - Type-safe with Zod validation
|
|
11
|
+
*
|
|
12
|
+
* @module compaction-loader
|
|
13
|
+
*/
|
|
14
|
+
import * as fs from "node:fs";
|
|
15
|
+
import { createInterface } from "node:readline";
|
|
16
|
+
import * as path from "node:path";
|
|
17
|
+
import type { CoordinatorEvent } from "../../src/eval-capture.js";
|
|
18
|
+
import { CoordinatorEventSchema } from "../../src/eval-capture.js";
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Compaction event - subset of CoordinatorEvent with event_type === "COMPACTION"
|
|
22
|
+
*/
|
|
23
|
+
export type CompactionEvent = Extract<
|
|
24
|
+
CoordinatorEvent,
|
|
25
|
+
{ event_type: "COMPACTION" }
|
|
26
|
+
>;
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Compaction session - session with only COMPACTION events
|
|
30
|
+
*/
|
|
31
|
+
export interface CompactionSession {
|
|
32
|
+
session_id: string;
|
|
33
|
+
epic_id: string;
|
|
34
|
+
start_time: string;
|
|
35
|
+
end_time: string;
|
|
36
|
+
events: CompactionEvent[];
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Load options
|
|
41
|
+
*/
|
|
42
|
+
export interface LoadOptions {
|
|
43
|
+
/** Filter by compaction_type */
|
|
44
|
+
compaction_type?:
|
|
45
|
+
| "detection_complete"
|
|
46
|
+
| "prompt_generated"
|
|
47
|
+
| "context_injected"
|
|
48
|
+
| "resumption_started"
|
|
49
|
+
| "tool_call_tracked";
|
|
50
|
+
/** Filter by session IDs */
|
|
51
|
+
sessionIds?: string[];
|
|
52
|
+
/** Limit number of results */
|
|
53
|
+
limit?: number;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Load COMPACTION events from session JSONL files
|
|
58
|
+
*
|
|
59
|
+
* Reads all .jsonl files in the session directory, parses events,
|
|
60
|
+
* and returns only COMPACTION events matching the filters.
|
|
61
|
+
*
|
|
62
|
+
* @param sessionDir - Path to session directory (default: ~/.config/swarm-tools/sessions)
|
|
63
|
+
* @param options - Filter options
|
|
64
|
+
* @returns Array of compaction events
|
|
65
|
+
*
|
|
66
|
+
* @example
|
|
67
|
+
* // Load all COMPACTION events
|
|
68
|
+
* const events = await loadCompactionEvents("/path/to/sessions");
|
|
69
|
+
*
|
|
70
|
+
* @example
|
|
71
|
+
* // Load only detection_complete events
|
|
72
|
+
* const events = await loadCompactionEvents("/path/to/sessions", {
|
|
73
|
+
* compaction_type: "detection_complete",
|
|
74
|
+
* });
|
|
75
|
+
*
|
|
76
|
+
* @example
|
|
77
|
+
* // Load events from specific sessions
|
|
78
|
+
* const events = await loadCompactionEvents("/path/to/sessions", {
|
|
79
|
+
* sessionIds: ["session-1", "session-2"],
|
|
80
|
+
* limit: 10,
|
|
81
|
+
* });
|
|
82
|
+
*/
|
|
83
|
+
export async function loadCompactionEvents(
|
|
84
|
+
sessionDir: string,
|
|
85
|
+
options?: LoadOptions,
|
|
86
|
+
): Promise<CompactionEvent[]> {
|
|
87
|
+
const { compaction_type, sessionIds, limit } = options ?? {};
|
|
88
|
+
|
|
89
|
+
// Check if directory exists
|
|
90
|
+
if (!fs.existsSync(sessionDir)) {
|
|
91
|
+
return [];
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
let files: string[];
|
|
95
|
+
try {
|
|
96
|
+
// Read all .jsonl files
|
|
97
|
+
files = fs.readdirSync(sessionDir).filter((f) => f.endsWith(".jsonl"));
|
|
98
|
+
} catch (error) {
|
|
99
|
+
// Directory exists but can't be read - log and return empty
|
|
100
|
+
console.warn(`Failed to read session directory ${sessionDir}:`, error);
|
|
101
|
+
return [];
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// Filter by sessionIds if provided
|
|
105
|
+
const targetFiles = sessionIds
|
|
106
|
+
? files.filter((f) => sessionIds.includes(f.replace(".jsonl", "")))
|
|
107
|
+
: files;
|
|
108
|
+
|
|
109
|
+
const events: CompactionEvent[] = [];
|
|
110
|
+
|
|
111
|
+
for (const file of targetFiles) {
|
|
112
|
+
const filePath = path.join(sessionDir, file);
|
|
113
|
+
|
|
114
|
+
try {
|
|
115
|
+
// Stream large files line-by-line to avoid loading entire file into memory
|
|
116
|
+
const shouldStream = limit && limit < 100; // For small limits, streaming is overkill
|
|
117
|
+
|
|
118
|
+
if (shouldStream) {
|
|
119
|
+
// Use streaming for better memory efficiency
|
|
120
|
+
const found = await loadFromFileStream(filePath, {
|
|
121
|
+
compaction_type,
|
|
122
|
+
remainingLimit: limit - events.length,
|
|
123
|
+
});
|
|
124
|
+
events.push(...found);
|
|
125
|
+
} else {
|
|
126
|
+
// For small files or no limit, read entire file (faster)
|
|
127
|
+
const content = fs.readFileSync(filePath, "utf-8");
|
|
128
|
+
const lines = content.trim().split("\n").filter(Boolean);
|
|
129
|
+
|
|
130
|
+
for (const line of lines) {
|
|
131
|
+
const event = parseLine(line);
|
|
132
|
+
if (event && event.event_type === "COMPACTION") {
|
|
133
|
+
// Filter by compaction_type if provided
|
|
134
|
+
if (!compaction_type || event.compaction_type === compaction_type) {
|
|
135
|
+
events.push(event);
|
|
136
|
+
|
|
137
|
+
// Apply limit early to avoid processing unnecessary files
|
|
138
|
+
if (limit && events.length >= limit) {
|
|
139
|
+
return events.slice(0, limit);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// Early termination if limit reached
|
|
147
|
+
if (limit && events.length >= limit) {
|
|
148
|
+
return events.slice(0, limit);
|
|
149
|
+
}
|
|
150
|
+
} catch (error) {
|
|
151
|
+
// Log file read errors but continue processing other files
|
|
152
|
+
console.warn(`Failed to read session file ${filePath}:`, error);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
return limit ? events.slice(0, limit) : events;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* Parse a JSONL line into a CoordinatorEvent
|
|
161
|
+
*
|
|
162
|
+
* @param line - JSONL line to parse
|
|
163
|
+
* @returns Parsed and validated event, or null if invalid
|
|
164
|
+
*/
|
|
165
|
+
function parseLine(line: string): CoordinatorEvent | null {
|
|
166
|
+
try {
|
|
167
|
+
const parsed = JSON.parse(line);
|
|
168
|
+
return CoordinatorEventSchema.parse(parsed);
|
|
169
|
+
} catch {
|
|
170
|
+
// Invalid JSON or failed validation - skip silently
|
|
171
|
+
return null;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
/**
|
|
176
|
+
* Load COMPACTION events from a file using streaming (for large files)
|
|
177
|
+
*
|
|
178
|
+
* @param filePath - Path to session JSONL file
|
|
179
|
+
* @param options - Filter options
|
|
180
|
+
* @returns Array of matching compaction events
|
|
181
|
+
*/
|
|
182
|
+
async function loadFromFileStream(
|
|
183
|
+
filePath: string,
|
|
184
|
+
options: {
|
|
185
|
+
compaction_type?: LoadOptions["compaction_type"];
|
|
186
|
+
remainingLimit?: number;
|
|
187
|
+
},
|
|
188
|
+
): Promise<CompactionEvent[]> {
|
|
189
|
+
const { compaction_type, remainingLimit } = options;
|
|
190
|
+
const events: CompactionEvent[] = [];
|
|
191
|
+
|
|
192
|
+
const fileStream = fs.createReadStream(filePath, { encoding: "utf-8" });
|
|
193
|
+
const rl = createInterface({ input: fileStream, crlfDelay: Number.POSITIVE_INFINITY });
|
|
194
|
+
|
|
195
|
+
for await (const line of rl) {
|
|
196
|
+
const event = parseLine(line);
|
|
197
|
+
if (event && event.event_type === "COMPACTION") {
|
|
198
|
+
if (!compaction_type || event.compaction_type === compaction_type) {
|
|
199
|
+
events.push(event);
|
|
200
|
+
|
|
201
|
+
// Early termination for streaming
|
|
202
|
+
if (remainingLimit && events.length >= remainingLimit) {
|
|
203
|
+
rl.close();
|
|
204
|
+
fileStream.close();
|
|
205
|
+
break;
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
return events;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
/**
|
|
215
|
+
* Load COMPACTION sessions grouped by session_id
|
|
216
|
+
*
|
|
217
|
+
* Groups COMPACTION events by session_id and returns session metadata.
|
|
218
|
+
*
|
|
219
|
+
* @param sessionDir - Path to session directory
|
|
220
|
+
* @param options - Filter options
|
|
221
|
+
* @returns Array of compaction sessions
|
|
222
|
+
*
|
|
223
|
+
* @example
|
|
224
|
+
* // Load all sessions with COMPACTION events
|
|
225
|
+
* const sessions = await loadCompactionSessions("/path/to/sessions");
|
|
226
|
+
*
|
|
227
|
+
* @example
|
|
228
|
+
* // Load sessions with specific compaction_type
|
|
229
|
+
* const sessions = await loadCompactionSessions("/path/to/sessions", {
|
|
230
|
+
* compaction_type: "prompt_generated",
|
|
231
|
+
* });
|
|
232
|
+
*/
|
|
233
|
+
export async function loadCompactionSessions(
|
|
234
|
+
sessionDir: string,
|
|
235
|
+
options?: LoadOptions,
|
|
236
|
+
): Promise<CompactionSession[]> {
|
|
237
|
+
const events = await loadCompactionEvents(sessionDir, options);
|
|
238
|
+
|
|
239
|
+
if (events.length === 0) {
|
|
240
|
+
return [];
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
// Group events by session_id
|
|
244
|
+
const sessionMap = new Map<string, CompactionEvent[]>();
|
|
245
|
+
|
|
246
|
+
for (const event of events) {
|
|
247
|
+
const existing = sessionMap.get(event.session_id);
|
|
248
|
+
if (existing) {
|
|
249
|
+
existing.push(event);
|
|
250
|
+
} else {
|
|
251
|
+
sessionMap.set(event.session_id, [event]);
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// Build sessions with metadata
|
|
256
|
+
const sessions: CompactionSession[] = [];
|
|
257
|
+
|
|
258
|
+
for (const [sessionId, sessionEvents] of sessionMap.entries()) {
|
|
259
|
+
if (sessionEvents.length === 0) {
|
|
260
|
+
continue;
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// Get epic_id from first event
|
|
264
|
+
const epicId = sessionEvents[0].epic_id;
|
|
265
|
+
|
|
266
|
+
// Get timestamps
|
|
267
|
+
const timestamps = sessionEvents.map((e) => new Date(e.timestamp).getTime());
|
|
268
|
+
const startTime = new Date(Math.min(...timestamps)).toISOString();
|
|
269
|
+
const endTime = new Date(Math.max(...timestamps)).toISOString();
|
|
270
|
+
|
|
271
|
+
sessions.push({
|
|
272
|
+
session_id: sessionId,
|
|
273
|
+
epic_id: epicId,
|
|
274
|
+
start_time: startTime,
|
|
275
|
+
end_time: endTime,
|
|
276
|
+
events: sessionEvents,
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
// Apply limit
|
|
281
|
+
return options?.limit ? sessions.slice(0, options.limit) : sessions;
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
/**
|
|
285
|
+
* Load COMPACTION events from default session directory
|
|
286
|
+
*
|
|
287
|
+
* Convenience wrapper that uses the default ~/.config/swarm-tools/sessions directory.
|
|
288
|
+
*
|
|
289
|
+
* @param options - Filter options
|
|
290
|
+
* @returns Array of compaction events
|
|
291
|
+
*
|
|
292
|
+
* @example
|
|
293
|
+
* // Load recent compaction events
|
|
294
|
+
* const events = await loadDefaultCompactionEvents({ limit: 10 });
|
|
295
|
+
*/
|
|
296
|
+
export async function loadDefaultCompactionEvents(
|
|
297
|
+
options?: LoadOptions,
|
|
298
|
+
): Promise<CompactionEvent[]> {
|
|
299
|
+
const { getSessionDir } = await import("../../src/eval-capture.js");
|
|
300
|
+
return loadCompactionEvents(getSessionDir(), options);
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
/**
|
|
304
|
+
* Load COMPACTION sessions from default session directory
|
|
305
|
+
*
|
|
306
|
+
* Convenience wrapper that uses the default ~/.config/swarm-tools/sessions directory.
|
|
307
|
+
*
|
|
308
|
+
* @param options - Filter options
|
|
309
|
+
* @returns Array of compaction sessions
|
|
310
|
+
*
|
|
311
|
+
* @example
|
|
312
|
+
* // Load all compaction sessions
|
|
313
|
+
* const sessions = await loadDefaultCompactionSessions();
|
|
314
|
+
*/
|
|
315
|
+
export async function loadDefaultCompactionSessions(
|
|
316
|
+
options?: LoadOptions,
|
|
317
|
+
): Promise<CompactionSession[]> {
|
|
318
|
+
const { getSessionDir } = await import("../../src/eval-capture.js");
|
|
319
|
+
return loadCompactionSessions(getSessionDir(), options);
|
|
320
|
+
}
|