opencode-swarm-plugin 0.38.0 → 0.39.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env +2 -0
- package/.hive/eval-results.json +26 -0
- package/.hive/issues.jsonl +11 -0
- package/.hive/memories.jsonl +23 -1
- package/.opencode/eval-history.jsonl +12 -0
- package/CHANGELOG.md +130 -0
- package/README.md +29 -12
- package/bin/swarm.test.ts +475 -0
- package/bin/swarm.ts +383 -0
- package/dist/compaction-hook.d.ts +1 -1
- package/dist/compaction-hook.d.ts.map +1 -1
- package/dist/compaction-prompt-scoring.d.ts +124 -0
- package/dist/compaction-prompt-scoring.d.ts.map +1 -0
- package/dist/eval-capture.d.ts +81 -1
- package/dist/eval-capture.d.ts.map +1 -1
- package/dist/eval-gates.d.ts +84 -0
- package/dist/eval-gates.d.ts.map +1 -0
- package/dist/eval-history.d.ts +117 -0
- package/dist/eval-history.d.ts.map +1 -0
- package/dist/eval-learning.d.ts +216 -0
- package/dist/eval-learning.d.ts.map +1 -0
- package/dist/index.d.ts +44 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +370 -13
- package/dist/plugin.js +203 -13
- package/dist/post-compaction-tracker.d.ts +133 -0
- package/dist/post-compaction-tracker.d.ts.map +1 -0
- package/dist/swarm-orchestrate.d.ts +23 -0
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts +25 -1
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/dist/swarm.d.ts +4 -0
- package/dist/swarm.d.ts.map +1 -1
- package/evals/README.md +589 -105
- package/evals/compaction-prompt.eval.ts +149 -0
- package/evals/coordinator-behavior.eval.ts +8 -8
- package/evals/fixtures/compaction-prompt-cases.ts +305 -0
- package/evals/lib/compaction-loader.test.ts +248 -0
- package/evals/lib/compaction-loader.ts +320 -0
- package/evals/lib/data-loader.test.ts +345 -0
- package/evals/lib/data-loader.ts +107 -6
- package/evals/scorers/compaction-prompt-scorers.ts +145 -0
- package/evals/scorers/compaction-scorers.ts +13 -13
- package/evals/scorers/coordinator-discipline.evalite-test.ts +3 -2
- package/evals/scorers/coordinator-discipline.ts +13 -13
- package/examples/plugin-wrapper-template.ts +117 -0
- package/package.json +7 -5
- package/scripts/migrate-unknown-sessions.ts +349 -0
- package/src/compaction-capture.integration.test.ts +257 -0
- package/src/compaction-hook.test.ts +42 -0
- package/src/compaction-hook.ts +81 -0
- package/src/compaction-prompt-scorers.test.ts +299 -0
- package/src/compaction-prompt-scoring.ts +298 -0
- package/src/eval-capture.test.ts +422 -0
- package/src/eval-capture.ts +94 -2
- package/src/eval-gates.test.ts +306 -0
- package/src/eval-gates.ts +218 -0
- package/src/eval-history.test.ts +508 -0
- package/src/eval-history.ts +214 -0
- package/src/eval-learning.test.ts +378 -0
- package/src/eval-learning.ts +360 -0
- package/src/index.ts +61 -1
- package/src/post-compaction-tracker.test.ts +251 -0
- package/src/post-compaction-tracker.ts +237 -0
- package/src/swarm-decompose.ts +2 -2
- package/src/swarm-orchestrate.ts +2 -2
- package/src/swarm-prompts.ts +2 -2
- package/src/swarm-review.ts +3 -3
- /package/evals/{evalite.config.ts → evalite.config.ts.bak} +0 -0
|
@@ -12,7 +12,7 @@ import {
|
|
|
12
12
|
} from "./coordinator-discipline.js";
|
|
13
13
|
|
|
14
14
|
describe("violationCount", () => {
|
|
15
|
-
it("scores 1.0 for zero violations", () => {
|
|
15
|
+
it("scores 1.0 for zero violations", async () => {
|
|
16
16
|
const session: CoordinatorSession = {
|
|
17
17
|
session_id: "test-session",
|
|
18
18
|
epic_id: "test-epic",
|
|
@@ -30,9 +30,10 @@ describe("violationCount", () => {
|
|
|
30
30
|
],
|
|
31
31
|
};
|
|
32
32
|
|
|
33
|
-
const result = violationCount
|
|
33
|
+
const result = await violationCount({
|
|
34
34
|
output: JSON.stringify(session),
|
|
35
35
|
expected: {},
|
|
36
|
+
input: undefined,
|
|
36
37
|
});
|
|
37
38
|
|
|
38
39
|
expect(result.score).toBe(1.0);
|
|
@@ -270,14 +270,14 @@ export const timeToFirstSpawn = createScorer({
|
|
|
270
270
|
export const overallDiscipline = createScorer({
|
|
271
271
|
name: "Overall Coordinator Discipline",
|
|
272
272
|
description: "Composite score for coordinator protocol adherence",
|
|
273
|
-
scorer: ({ output, expected }) => {
|
|
273
|
+
scorer: async ({ output, expected, input }) => {
|
|
274
274
|
try {
|
|
275
275
|
// Run all scorers
|
|
276
276
|
const scores = {
|
|
277
|
-
violations: violationCount
|
|
278
|
-
spawn: spawnEfficiency
|
|
279
|
-
review: reviewThoroughness
|
|
280
|
-
speed: timeToFirstSpawn
|
|
277
|
+
violations: await violationCount({ output, expected, input }),
|
|
278
|
+
spawn: await spawnEfficiency({ output, expected, input }),
|
|
279
|
+
review: await reviewThoroughness({ output, expected, input }),
|
|
280
|
+
speed: await timeToFirstSpawn({ output, expected, input }),
|
|
281
281
|
};
|
|
282
282
|
|
|
283
283
|
// Weighted average
|
|
@@ -289,16 +289,16 @@ export const overallDiscipline = createScorer({
|
|
|
289
289
|
};
|
|
290
290
|
|
|
291
291
|
const totalScore =
|
|
292
|
-
scores.violations.score * weights.violations +
|
|
293
|
-
scores.spawn.score * weights.spawn +
|
|
294
|
-
scores.review.score * weights.review +
|
|
295
|
-
scores.speed.score * weights.speed;
|
|
292
|
+
(scores.violations.score ?? 0) * weights.violations +
|
|
293
|
+
(scores.spawn.score ?? 0) * weights.spawn +
|
|
294
|
+
(scores.review.score ?? 0) * weights.review +
|
|
295
|
+
(scores.speed.score ?? 0) * weights.speed;
|
|
296
296
|
|
|
297
297
|
const details = [
|
|
298
|
-
`Violations: ${(scores.violations.score * 100).toFixed(0)}%`,
|
|
299
|
-
`Spawn: ${(scores.spawn.score * 100).toFixed(0)}%`,
|
|
300
|
-
`Review: ${(scores.review.score * 100).toFixed(0)}%`,
|
|
301
|
-
`Speed: ${(scores.speed.score * 100).toFixed(0)}%`,
|
|
298
|
+
`Violations: ${((scores.violations.score ?? 0) * 100).toFixed(0)}%`,
|
|
299
|
+
`Spawn: ${((scores.spawn.score ?? 0) * 100).toFixed(0)}%`,
|
|
300
|
+
`Review: ${((scores.review.score ?? 0) * 100).toFixed(0)}%`,
|
|
301
|
+
`Speed: ${((scores.speed.score ?? 0) * 100).toFixed(0)}%`,
|
|
302
302
|
].join(", ");
|
|
303
303
|
|
|
304
304
|
return {
|
|
@@ -65,6 +65,42 @@ function logCompaction(
|
|
|
65
65
|
}
|
|
66
66
|
}
|
|
67
67
|
|
|
68
|
+
/**
|
|
69
|
+
* Capture compaction event for evals (non-fatal dynamic import)
|
|
70
|
+
*
|
|
71
|
+
* Uses dynamic import to avoid circular dependencies and keep the plugin wrapper
|
|
72
|
+
* self-contained. Captures COMPACTION events to session JSONL for eval analysis.
|
|
73
|
+
*
|
|
74
|
+
* @param sessionID - Session ID
|
|
75
|
+
* @param epicID - Epic ID (or "unknown" if not detected)
|
|
76
|
+
* @param compactionType - Event type (detection_complete, prompt_generated, context_injected)
|
|
77
|
+
* @param payload - Event-specific data (full prompts, detection results, etc.)
|
|
78
|
+
*/
|
|
79
|
+
async function captureCompaction(
|
|
80
|
+
sessionID: string,
|
|
81
|
+
epicID: string,
|
|
82
|
+
compactionType: "detection_complete" | "prompt_generated" | "context_injected",
|
|
83
|
+
payload: any,
|
|
84
|
+
): Promise<void> {
|
|
85
|
+
try {
|
|
86
|
+
// Dynamic import to avoid circular deps (plugin wrapper → src → plugin wrapper)
|
|
87
|
+
const { captureCompactionEvent } = await import("../src/eval-capture");
|
|
88
|
+
captureCompactionEvent({
|
|
89
|
+
session_id: sessionID,
|
|
90
|
+
epic_id: epicID,
|
|
91
|
+
compaction_type: compactionType,
|
|
92
|
+
payload,
|
|
93
|
+
});
|
|
94
|
+
} catch (err) {
|
|
95
|
+
// Non-fatal - capture failures shouldn't break compaction
|
|
96
|
+
logCompaction("warn", "compaction_capture_failed", {
|
|
97
|
+
session_id: sessionID,
|
|
98
|
+
compaction_type: compactionType,
|
|
99
|
+
error: err instanceof Error ? err.message : String(err),
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
68
104
|
// Module-level project directory - set during plugin initialization
|
|
69
105
|
// This is CRITICAL: without it, the CLI uses process.cwd() which may be wrong
|
|
70
106
|
let projectDirectory: string = process.cwd();
|
|
@@ -2152,6 +2188,25 @@ const SwarmPlugin: Plugin = async (
|
|
|
2152
2188
|
full_snapshot: snapshot, // Log the entire snapshot
|
|
2153
2189
|
});
|
|
2154
2190
|
|
|
2191
|
+
// =======================================================================
|
|
2192
|
+
// CAPTURE POINT 1: Detection complete - record confidence and reasons
|
|
2193
|
+
// =======================================================================
|
|
2194
|
+
await captureCompaction(
|
|
2195
|
+
input.sessionID,
|
|
2196
|
+
snapshot.epic?.id || "unknown",
|
|
2197
|
+
"detection_complete",
|
|
2198
|
+
{
|
|
2199
|
+
confidence: snapshot.detection.confidence,
|
|
2200
|
+
detected: detection.detected,
|
|
2201
|
+
reasons: snapshot.detection.reasons,
|
|
2202
|
+
session_scan_contributed: sessionScan.swarmDetected,
|
|
2203
|
+
session_scan_reasons: sessionScan.reasons,
|
|
2204
|
+
epic_id: snapshot.epic?.id,
|
|
2205
|
+
epic_title: snapshot.epic?.title,
|
|
2206
|
+
subtask_count: snapshot.epic?.subtasks?.length ?? 0,
|
|
2207
|
+
},
|
|
2208
|
+
);
|
|
2209
|
+
|
|
2155
2210
|
// Level 2: Generate prompt with LLM
|
|
2156
2211
|
const llmStart = Date.now();
|
|
2157
2212
|
const llmPrompt = await generateCompactionPrompt(snapshot);
|
|
@@ -2165,6 +2220,23 @@ const SwarmPlugin: Plugin = async (
|
|
|
2165
2220
|
prompt_preview: llmPrompt?.substring(0, 500),
|
|
2166
2221
|
});
|
|
2167
2222
|
|
|
2223
|
+
// =======================================================================
|
|
2224
|
+
// CAPTURE POINT 2: Prompt generated - record FULL prompt content
|
|
2225
|
+
// =======================================================================
|
|
2226
|
+
if (llmPrompt) {
|
|
2227
|
+
await captureCompaction(
|
|
2228
|
+
input.sessionID,
|
|
2229
|
+
snapshot.epic?.id || "unknown",
|
|
2230
|
+
"prompt_generated",
|
|
2231
|
+
{
|
|
2232
|
+
prompt_length: llmPrompt.length,
|
|
2233
|
+
full_prompt: llmPrompt, // FULL content, not truncated
|
|
2234
|
+
context_type: "llm_generated",
|
|
2235
|
+
duration_ms: llmDuration,
|
|
2236
|
+
},
|
|
2237
|
+
);
|
|
2238
|
+
}
|
|
2239
|
+
|
|
2168
2240
|
if (llmPrompt) {
|
|
2169
2241
|
// SUCCESS: Use LLM-generated prompt
|
|
2170
2242
|
const header = `[Swarm compaction: LLM-generated, ${detection.reasons.join(", ")}]\n\n`;
|
|
@@ -2188,6 +2260,21 @@ const SwarmPlugin: Plugin = async (
|
|
|
2188
2260
|
});
|
|
2189
2261
|
}
|
|
2190
2262
|
|
|
2263
|
+
// =======================================================================
|
|
2264
|
+
// CAPTURE POINT 3a: Context injected (LLM path) - record FULL content
|
|
2265
|
+
// =======================================================================
|
|
2266
|
+
await captureCompaction(
|
|
2267
|
+
input.sessionID,
|
|
2268
|
+
snapshot.epic?.id || "unknown",
|
|
2269
|
+
"context_injected",
|
|
2270
|
+
{
|
|
2271
|
+
full_content: fullContent, // FULL content, not truncated
|
|
2272
|
+
content_length: fullContent.length,
|
|
2273
|
+
injection_method: "prompt" in output ? "output.prompt" : "output.context.push",
|
|
2274
|
+
context_type: "llm_generated",
|
|
2275
|
+
},
|
|
2276
|
+
);
|
|
2277
|
+
|
|
2191
2278
|
const totalDuration = Date.now() - startTime;
|
|
2192
2279
|
logCompaction("info", "compaction_complete_llm_success", {
|
|
2193
2280
|
session_id: input.sessionID,
|
|
@@ -2223,6 +2310,21 @@ const SwarmPlugin: Plugin = async (
|
|
|
2223
2310
|
const staticContent = header + SWARM_COMPACTION_CONTEXT;
|
|
2224
2311
|
output.context.push(staticContent);
|
|
2225
2312
|
|
|
2313
|
+
// =======================================================================
|
|
2314
|
+
// CAPTURE POINT 3b: Context injected (static fallback) - record FULL content
|
|
2315
|
+
// =======================================================================
|
|
2316
|
+
await captureCompaction(
|
|
2317
|
+
input.sessionID,
|
|
2318
|
+
"unknown", // No snapshot available in this path
|
|
2319
|
+
"context_injected",
|
|
2320
|
+
{
|
|
2321
|
+
full_content: staticContent,
|
|
2322
|
+
content_length: staticContent.length,
|
|
2323
|
+
injection_method: "output.context.push",
|
|
2324
|
+
context_type: "static_swarm_context",
|
|
2325
|
+
},
|
|
2326
|
+
);
|
|
2327
|
+
|
|
2226
2328
|
const totalDuration = Date.now() - startTime;
|
|
2227
2329
|
logCompaction("info", "compaction_complete_static_fallback", {
|
|
2228
2330
|
session_id: input.sessionID,
|
|
@@ -2238,6 +2340,21 @@ const SwarmPlugin: Plugin = async (
|
|
|
2238
2340
|
const fallbackContent = header + SWARM_DETECTION_FALLBACK;
|
|
2239
2341
|
output.context.push(fallbackContent);
|
|
2240
2342
|
|
|
2343
|
+
// =======================================================================
|
|
2344
|
+
// CAPTURE POINT 3c: Context injected (detection fallback) - record FULL content
|
|
2345
|
+
// =======================================================================
|
|
2346
|
+
await captureCompaction(
|
|
2347
|
+
input.sessionID,
|
|
2348
|
+
"unknown", // No snapshot for low confidence
|
|
2349
|
+
"context_injected",
|
|
2350
|
+
{
|
|
2351
|
+
full_content: fallbackContent,
|
|
2352
|
+
content_length: fallbackContent.length,
|
|
2353
|
+
injection_method: "output.context.push",
|
|
2354
|
+
context_type: "detection_fallback",
|
|
2355
|
+
},
|
|
2356
|
+
);
|
|
2357
|
+
|
|
2241
2358
|
const totalDuration = Date.now() - startTime;
|
|
2242
2359
|
logCompaction("info", "compaction_complete_detection_fallback", {
|
|
2243
2360
|
session_id: input.sessionID,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "opencode-swarm-plugin",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.39.1",
|
|
4
4
|
"description": "Multi-agent swarm coordination for OpenCode with learning capabilities, beads integration, and Agent Mail",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -30,9 +30,11 @@
|
|
|
30
30
|
"test:all": "bun test --timeout 60000 src/",
|
|
31
31
|
"test:watch": "bun test --watch src/",
|
|
32
32
|
"typecheck": "tsc --noEmit",
|
|
33
|
-
"eval:run": "bunx evalite run evals/",
|
|
34
|
-
"eval:decomposition": "bunx evalite run evals/swarm-decomposition.eval.ts",
|
|
35
|
-
"eval:coordinator": "bunx evalite run evals/coordinator-session.eval.ts",
|
|
33
|
+
"eval:run": "bun --env-file=.env run bunx evalite run evals/",
|
|
34
|
+
"eval:decomposition": "bun --env-file=.env run bunx evalite run evals/swarm-decomposition.eval.ts",
|
|
35
|
+
"eval:coordinator": "bun --env-file=.env run bunx evalite run evals/coordinator-session.eval.ts",
|
|
36
|
+
"eval:compaction": "bun --env-file=.env run bunx evalite run evals/compaction-prompt.eval.ts",
|
|
37
|
+
"migrate:sessions": "bun run scripts/migrate-unknown-sessions.ts",
|
|
36
38
|
"postinstall": "node -e \"console.log('\\n\\x1b[33m Run \\x1b[36mswarm setup\\x1b[33m to configure OpenCode integration\\x1b[0m\\n')\""
|
|
37
39
|
},
|
|
38
40
|
"dependencies": {
|
|
@@ -44,7 +46,7 @@
|
|
|
44
46
|
"minimatch": "^10.1.1",
|
|
45
47
|
"pino": "^9.6.0",
|
|
46
48
|
"pino-roll": "^1.3.0",
|
|
47
|
-
"swarm-mail": "1.5.
|
|
49
|
+
"swarm-mail": "1.5.2",
|
|
48
50
|
"yaml": "^2.8.2",
|
|
49
51
|
"zod": "4.1.8"
|
|
50
52
|
},
|
|
@@ -0,0 +1,349 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Migration script to re-attribute unknown.jsonl events to proper session files
|
|
5
|
+
*
|
|
6
|
+
* Strategy:
|
|
7
|
+
* 1. Read all events from unknown.jsonl
|
|
8
|
+
* 2. For each event, find matching session by epic_id
|
|
9
|
+
* 3. Append to existing session or create new session file
|
|
10
|
+
* 4. Rename unknown.jsonl to unknown.jsonl.migrated
|
|
11
|
+
*
|
|
12
|
+
* Usage:
|
|
13
|
+
* bun run scripts/migrate-unknown-sessions.ts [--dry-run]
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import { execSync } from "node:child_process";
|
|
17
|
+
import { readFileSync, renameSync, writeFileSync } from "node:fs";
|
|
18
|
+
import { join } from "node:path";
|
|
19
|
+
|
|
20
|
+
interface SessionEvent {
|
|
21
|
+
session_id: string;
|
|
22
|
+
epic_id: string;
|
|
23
|
+
timestamp: string;
|
|
24
|
+
event_type: string;
|
|
25
|
+
[key: string]: unknown;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
interface MigrationStats {
|
|
29
|
+
totalEvents: number;
|
|
30
|
+
migratedEvents: number;
|
|
31
|
+
sessionsUpdated: number;
|
|
32
|
+
sessionsCreated: number;
|
|
33
|
+
unattributableEvents: number;
|
|
34
|
+
eventsByEpic: Map<string, number>;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const SESSIONS_DIR = join(process.env.HOME || "~", ".config/swarm-tools/sessions");
|
|
38
|
+
const UNKNOWN_FILE = join(SESSIONS_DIR, "unknown.jsonl");
|
|
39
|
+
const MIGRATED_FILE = join(SESSIONS_DIR, "unknown.jsonl.migrated");
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Atomic file write using temp file + rename
|
|
43
|
+
* Based on learned pattern for crash-safe state persistence
|
|
44
|
+
*/
|
|
45
|
+
function atomicWriteFile(path: string, content: string): void {
|
|
46
|
+
const dir = join(path, "..");
|
|
47
|
+
const tempFile = `${dir}/.${Date.now()}.tmp`;
|
|
48
|
+
|
|
49
|
+
try {
|
|
50
|
+
// Write to temp file in same directory (required for atomic rename)
|
|
51
|
+
writeFileSync(tempFile, content, "utf-8");
|
|
52
|
+
|
|
53
|
+
// Atomic rename (POSIX guarantees atomicity on same filesystem)
|
|
54
|
+
renameSync(tempFile, path);
|
|
55
|
+
|
|
56
|
+
// Sync directory entry (ensures rename is flushed)
|
|
57
|
+
execSync(`sync "${dir}"`, { stdio: "ignore" });
|
|
58
|
+
} catch (error) {
|
|
59
|
+
// Cleanup temp file on error
|
|
60
|
+
try {
|
|
61
|
+
execSync(`rm -f "${tempFile}"`, { stdio: "ignore" });
|
|
62
|
+
} catch {
|
|
63
|
+
// Ignore cleanup errors
|
|
64
|
+
}
|
|
65
|
+
throw error;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* Read JSONL file and parse events
|
|
71
|
+
*/
|
|
72
|
+
function readJSONL(path: string): SessionEvent[] {
|
|
73
|
+
try {
|
|
74
|
+
const content = readFileSync(path, "utf-8");
|
|
75
|
+
return content
|
|
76
|
+
.trim()
|
|
77
|
+
.split("\n")
|
|
78
|
+
.filter((line) => line.trim())
|
|
79
|
+
.map((line) => JSON.parse(line));
|
|
80
|
+
} catch (error) {
|
|
81
|
+
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
|
82
|
+
return [];
|
|
83
|
+
}
|
|
84
|
+
throw error;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Build index of epic_id -> session_id from all existing session files
|
|
90
|
+
*/
|
|
91
|
+
function buildEpicIndex(): Map<string, string> {
|
|
92
|
+
const epicIndex = new Map<string, string>();
|
|
93
|
+
|
|
94
|
+
try {
|
|
95
|
+
const files = execSync(`ls "${SESSIONS_DIR}"/ses_*.jsonl 2>/dev/null || true`, {
|
|
96
|
+
encoding: "utf-8",
|
|
97
|
+
})
|
|
98
|
+
.trim()
|
|
99
|
+
.split("\n")
|
|
100
|
+
.filter((f) => f);
|
|
101
|
+
|
|
102
|
+
for (const sessionFile of files) {
|
|
103
|
+
const events = readJSONL(sessionFile);
|
|
104
|
+
const sessionId = events[0]?.session_id;
|
|
105
|
+
|
|
106
|
+
if (!sessionId) continue;
|
|
107
|
+
|
|
108
|
+
// Index all epic_ids in this session
|
|
109
|
+
for (const event of events) {
|
|
110
|
+
if (event.epic_id && !epicIndex.has(event.epic_id)) {
|
|
111
|
+
epicIndex.set(event.epic_id, sessionId);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
} catch (error) {
|
|
116
|
+
console.error("Error building epic index:", error);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
return epicIndex;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Generate a new session ID
|
|
124
|
+
* Format: ses_<base58-like-id>
|
|
125
|
+
*/
|
|
126
|
+
function generateSessionId(): string {
|
|
127
|
+
// Generate random base58-like suffix (avoiding 0, O, I, l for readability)
|
|
128
|
+
const chars = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz";
|
|
129
|
+
let suffix = "";
|
|
130
|
+
for (let i = 0; i < 22; i++) {
|
|
131
|
+
suffix += chars[Math.floor(Math.random() * chars.length)];
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
return `ses_${suffix}`;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Append events to a session file atomically
|
|
139
|
+
*/
|
|
140
|
+
function appendToSession(sessionId: string, events: SessionEvent[], dryRun: boolean): void {
|
|
141
|
+
const sessionFile = `${SESSIONS_DIR}/${sessionId}.jsonl`;
|
|
142
|
+
|
|
143
|
+
// Read existing events (if file exists)
|
|
144
|
+
const existingEvents = readJSONL(sessionFile);
|
|
145
|
+
|
|
146
|
+
// Create set of existing event fingerprints for idempotency check
|
|
147
|
+
const existingFingerprints = new Set(
|
|
148
|
+
existingEvents.map((e) =>
|
|
149
|
+
JSON.stringify({ epic_id: e.epic_id, timestamp: e.timestamp, event_type: e.event_type })
|
|
150
|
+
)
|
|
151
|
+
);
|
|
152
|
+
|
|
153
|
+
// Filter out events that already exist (idempotency)
|
|
154
|
+
const newEvents = events.filter((e) => {
|
|
155
|
+
const fingerprint = JSON.stringify({
|
|
156
|
+
epic_id: e.epic_id,
|
|
157
|
+
timestamp: e.timestamp,
|
|
158
|
+
event_type: e.event_type,
|
|
159
|
+
});
|
|
160
|
+
return !existingFingerprints.has(fingerprint);
|
|
161
|
+
});
|
|
162
|
+
|
|
163
|
+
if (newEvents.length === 0) {
|
|
164
|
+
console.log(` → No new events to add to ${sessionId}.jsonl (all already exist)`);
|
|
165
|
+
return;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// Update session_id for all events
|
|
169
|
+
const updatedEvents = newEvents.map((e) => ({ ...e, session_id: sessionId }));
|
|
170
|
+
|
|
171
|
+
// Combine and write
|
|
172
|
+
const allEvents = [...existingEvents, ...updatedEvents];
|
|
173
|
+
const content = allEvents.map((e) => JSON.stringify(e)).join("\n") + "\n";
|
|
174
|
+
|
|
175
|
+
if (dryRun) {
|
|
176
|
+
console.log(` → Would write ${newEvents.length} events to ${sessionId}.jsonl`);
|
|
177
|
+
} else {
|
|
178
|
+
atomicWriteFile(sessionFile, content);
|
|
179
|
+
console.log(` → Wrote ${newEvents.length} events to ${sessionId}.jsonl`);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Main migration logic
|
|
185
|
+
*/
|
|
186
|
+
function migrate(dryRun: boolean = false): MigrationStats {
|
|
187
|
+
const stats: MigrationStats = {
|
|
188
|
+
totalEvents: 0,
|
|
189
|
+
migratedEvents: 0,
|
|
190
|
+
sessionsUpdated: 0,
|
|
191
|
+
sessionsCreated: 0,
|
|
192
|
+
unattributableEvents: 0,
|
|
193
|
+
eventsByEpic: new Map(),
|
|
194
|
+
};
|
|
195
|
+
|
|
196
|
+
console.log("🔍 Reading unknown.jsonl...");
|
|
197
|
+
const unknownEvents = readJSONL(UNKNOWN_FILE);
|
|
198
|
+
stats.totalEvents = unknownEvents.length;
|
|
199
|
+
|
|
200
|
+
if (stats.totalEvents === 0) {
|
|
201
|
+
console.log("✅ No events to migrate (unknown.jsonl is empty)");
|
|
202
|
+
return stats;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
console.log(`📊 Found ${stats.totalEvents} events in unknown.jsonl`);
|
|
206
|
+
|
|
207
|
+
console.log("🗂️ Building epic_id index from existing sessions...");
|
|
208
|
+
const epicIndex = buildEpicIndex();
|
|
209
|
+
console.log(`📇 Indexed ${epicIndex.size} epic_ids across existing sessions`);
|
|
210
|
+
|
|
211
|
+
// Group events by target session
|
|
212
|
+
const eventsBySession = new Map<string, SessionEvent[]>();
|
|
213
|
+
const newSessions = new Set<string>();
|
|
214
|
+
|
|
215
|
+
for (const event of unknownEvents) {
|
|
216
|
+
const { epic_id } = event;
|
|
217
|
+
|
|
218
|
+
if (!epic_id) {
|
|
219
|
+
console.warn(`⚠️ Event without epic_id: ${JSON.stringify(event)}`);
|
|
220
|
+
stats.unattributableEvents++;
|
|
221
|
+
continue;
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
// Track events per epic
|
|
225
|
+
stats.eventsByEpic.set(epic_id, (stats.eventsByEpic.get(epic_id) || 0) + 1);
|
|
226
|
+
|
|
227
|
+
// Find or create session
|
|
228
|
+
let sessionId = epicIndex.get(epic_id);
|
|
229
|
+
|
|
230
|
+
if (!sessionId) {
|
|
231
|
+
// Create new session for this epic_id
|
|
232
|
+
sessionId = generateSessionId();
|
|
233
|
+
epicIndex.set(epic_id, sessionId);
|
|
234
|
+
newSessions.add(sessionId);
|
|
235
|
+
console.log(`🆕 Creating new session ${sessionId} for epic ${epic_id}`);
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
// Group events by session
|
|
239
|
+
if (!eventsBySession.has(sessionId)) {
|
|
240
|
+
eventsBySession.set(sessionId, []);
|
|
241
|
+
}
|
|
242
|
+
const sessionEvents = eventsBySession.get(sessionId);
|
|
243
|
+
if (sessionEvents) {
|
|
244
|
+
sessionEvents.push(event);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// Write events to sessions
|
|
249
|
+
console.log(`\n📝 Writing events to ${eventsBySession.size} session files...`);
|
|
250
|
+
|
|
251
|
+
for (const [sessionId, events] of eventsBySession) {
|
|
252
|
+
const isNew = newSessions.has(sessionId);
|
|
253
|
+
console.log(`\n${isNew ? "🆕" : "➕"} Session ${sessionId} (${events.length} events)`);
|
|
254
|
+
|
|
255
|
+
appendToSession(sessionId, events, dryRun);
|
|
256
|
+
|
|
257
|
+
stats.migratedEvents += events.length;
|
|
258
|
+
if (isNew) {
|
|
259
|
+
stats.sessionsCreated++;
|
|
260
|
+
} else {
|
|
261
|
+
stats.sessionsUpdated++;
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
// Rename unknown.jsonl to .migrated
|
|
266
|
+
if (!dryRun) {
|
|
267
|
+
console.log(`\n🏷️ Renaming unknown.jsonl to unknown.jsonl.migrated...`);
|
|
268
|
+
renameSync(UNKNOWN_FILE, MIGRATED_FILE);
|
|
269
|
+
} else {
|
|
270
|
+
console.log(`\n🏷️ Would rename unknown.jsonl to unknown.jsonl.migrated`);
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
return stats;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
/**
|
|
277
|
+
* Print summary
|
|
278
|
+
*/
|
|
279
|
+
function printSummary(stats: MigrationStats, dryRun: boolean): void {
|
|
280
|
+
console.log("\n" + "=".repeat(60));
|
|
281
|
+
console.log(dryRun ? "DRY RUN SUMMARY" : "MIGRATION SUMMARY");
|
|
282
|
+
console.log("=".repeat(60));
|
|
283
|
+
console.log(`Total events in unknown.jsonl: ${stats.totalEvents}`);
|
|
284
|
+
console.log(`Events migrated: ${stats.migratedEvents}`);
|
|
285
|
+
console.log(`Sessions updated: ${stats.sessionsUpdated}`);
|
|
286
|
+
console.log(`Sessions created: ${stats.sessionsCreated}`);
|
|
287
|
+
console.log(`Unattributable events: ${stats.unattributableEvents}`);
|
|
288
|
+
console.log(`Unique epic_ids: ${stats.eventsByEpic.size}`);
|
|
289
|
+
|
|
290
|
+
if (stats.eventsByEpic.size > 0 && stats.eventsByEpic.size <= 10) {
|
|
291
|
+
console.log("\nEvents by epic_id:");
|
|
292
|
+
for (const [epicId, count] of Array.from(stats.eventsByEpic.entries()).sort((a, b) => b[1] - a[1])) {
|
|
293
|
+
console.log(` ${epicId}: ${count} events`);
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
console.log("=".repeat(60));
|
|
298
|
+
|
|
299
|
+
if (dryRun) {
|
|
300
|
+
console.log("\n💡 Run without --dry-run to perform actual migration");
|
|
301
|
+
} else {
|
|
302
|
+
console.log("\n✅ Migration complete!");
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
// Show help
|
|
307
|
+
if (process.argv.includes("--help") || process.argv.includes("-h")) {
|
|
308
|
+
console.log(`
|
|
309
|
+
Migration script to re-attribute unknown.jsonl events to proper session files
|
|
310
|
+
|
|
311
|
+
USAGE:
|
|
312
|
+
bun run scripts/migrate-unknown-sessions.ts [OPTIONS]
|
|
313
|
+
|
|
314
|
+
OPTIONS:
|
|
315
|
+
--dry-run Preview changes without modifying files
|
|
316
|
+
--help, -h Show this help message
|
|
317
|
+
|
|
318
|
+
DESCRIPTION:
|
|
319
|
+
This script reads events from unknown.jsonl and re-attributes them to the
|
|
320
|
+
correct session files based on their epic_id. Events are matched to existing
|
|
321
|
+
sessions, or new session files are created as needed.
|
|
322
|
+
|
|
323
|
+
The script is idempotent - running it multiple times will not duplicate events.
|
|
324
|
+
|
|
325
|
+
EXAMPLES:
|
|
326
|
+
# Preview migration
|
|
327
|
+
bun run scripts/migrate-unknown-sessions.ts --dry-run
|
|
328
|
+
|
|
329
|
+
# Perform migration
|
|
330
|
+
bun run scripts/migrate-unknown-sessions.ts
|
|
331
|
+
`);
|
|
332
|
+
process.exit(0);
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
// Main execution
|
|
336
|
+
const dryRun = process.argv.includes("--dry-run");
|
|
337
|
+
|
|
338
|
+
if (dryRun) {
|
|
339
|
+
console.log("🧪 DRY RUN MODE - No files will be modified\n");
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
try {
|
|
343
|
+
const stats = migrate(dryRun);
|
|
344
|
+
printSummary(stats, dryRun);
|
|
345
|
+
process.exit(0);
|
|
346
|
+
} catch (error) {
|
|
347
|
+
console.error("\n❌ Migration failed:", error);
|
|
348
|
+
process.exit(1);
|
|
349
|
+
}
|