opencode-swarm-plugin 0.39.1 → 0.40.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/.hive/issues.jsonl +16 -0
  2. package/CHANGELOG.md +52 -0
  3. package/bin/swarm.test.ts +406 -0
  4. package/bin/swarm.ts +303 -0
  5. package/dist/compaction-hook.d.ts +8 -1
  6. package/dist/compaction-hook.d.ts.map +1 -1
  7. package/dist/compaction-observability.d.ts +173 -0
  8. package/dist/compaction-observability.d.ts.map +1 -0
  9. package/dist/eval-capture.d.ts +93 -0
  10. package/dist/eval-capture.d.ts.map +1 -1
  11. package/dist/hive.d.ts.map +1 -1
  12. package/dist/index.d.ts +36 -1
  13. package/dist/index.d.ts.map +1 -1
  14. package/dist/index.js +15670 -580
  15. package/dist/plugin.js +15623 -557
  16. package/dist/schemas/task.d.ts +3 -3
  17. package/evals/README.md +113 -0
  18. package/evals/scorers/coordinator-discipline.evalite-test.ts +163 -0
  19. package/evals/scorers/coordinator-discipline.ts +335 -2
  20. package/evals/scorers/index.test.ts +146 -0
  21. package/evals/scorers/index.ts +104 -0
  22. package/evals/swarm-decomposition.eval.ts +9 -2
  23. package/examples/commands/swarm.md +291 -21
  24. package/package.json +1 -1
  25. package/src/compaction-hook.ts +258 -110
  26. package/src/compaction-observability.integration.test.ts +139 -0
  27. package/src/compaction-observability.test.ts +187 -0
  28. package/src/compaction-observability.ts +324 -0
  29. package/src/eval-capture.test.ts +204 -1
  30. package/src/eval-capture.ts +194 -2
  31. package/src/eval-runner.test.ts +96 -0
  32. package/src/eval-runner.ts +356 -0
  33. package/src/hive.ts +34 -0
  34. package/src/index.ts +54 -1
  35. package/src/memory.test.ts +110 -0
  36. package/src/memory.ts +34 -0
  37. package/dist/beads.d.ts +0 -386
  38. package/dist/beads.d.ts.map +0 -1
  39. package/dist/schemas/bead-events.d.ts +0 -698
  40. package/dist/schemas/bead-events.d.ts.map +0 -1
  41. package/dist/schemas/bead.d.ts +0 -255
  42. package/dist/schemas/bead.d.ts.map +0 -1
@@ -37,3 +37,19 @@
37
37
  {"id":"opencode-swarm-plugin--ys7z8-mjkwt9s6xoa","title":"Run migration and verify data integrity","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-25T03:55:11.430Z","updated_at":"2025-12-25T04:14:16.676Z","closed_at":"2025-12-25T04:14:16.676Z","parent_id":"opencode-swarm-plugin--ys7z8-mjkwt9rqf2s","dependencies":[],"labels":[],"comments":[]}
38
38
  {"id":"opencode-swarm-plugin--ys7z8-mjkyhrqmecc","title":"Add quality gate filters to eval session loader","description":"Filter eval sessions by quality signals: minEvents >= 3, hasWorkerSpawn, hasReviewCompleted. Currently 67 of 82 sessions are noise (<3 events). Quality gate will keep ~15 high-signal sessions.","status":"closed","priority":1,"issue_type":"epic","created_at":"2025-12-25T04:42:14.062Z","updated_at":"2025-12-25T04:49:40.809Z","closed_at":"2025-12-25T04:49:40.809Z","dependencies":[],"labels":[],"comments":[]}
39
39
  {"id":"opencode-swarm-plugin--ys7z8-mjkyhrr2qm7","title":"Add quality filter options to loadCapturedSessions with TDD","status":"closed","priority":0,"issue_type":"task","created_at":"2025-12-25T04:42:14.078Z","updated_at":"2025-12-25T04:49:39.904Z","closed_at":"2025-12-25T04:49:39.904Z","parent_id":"opencode-swarm-plugin--ys7z8-mjkyhrqmecc","dependencies":[],"labels":[],"comments":[]}
40
+ {"id":"opencode-swarm-plugin--ys7z8-mjl04zmvv7c","title":"Eval System Improvements: Tool + Event Capture + Scorers","description":"Improve eval system with:\n1. Plugin tool for running evals (eval_run)\n2. Capture decomposition_complete events\n3. Capture VIOLATION events\n4. Improve compaction prompt structure\n5. Add review efficiency scorer\n6. Enforce knowledge gathering validation\n\nTarget: 70% → 85% overall eval score","status":"open","priority":1,"issue_type":"epic","created_at":"2025-12-25T05:28:16.999Z","updated_at":"2025-12-25T05:28:16.999Z","dependencies":[],"labels":[],"comments":[]}
41
+ {"id":"opencode-swarm-plugin--ys7z8-mjl04zn4u31","title":"Add eval_run plugin tool","status":"open","priority":1,"issue_type":"task","created_at":"2025-12-25T05:28:17.008Z","updated_at":"2025-12-25T05:28:17.008Z","parent_id":"opencode-swarm-plugin--ys7z8-mjl04zmvv7c","dependencies":[],"labels":[],"comments":[]}
42
+ {"id":"opencode-swarm-plugin--ys7z8-mjl04znglws","title":"Capture VIOLATION events for coordinator discipline","status":"open","priority":1,"issue_type":"task","created_at":"2025-12-25T05:28:17.020Z","updated_at":"2025-12-25T05:28:17.020Z","parent_id":"opencode-swarm-plugin--ys7z8-mjl04zmvv7c","dependencies":[],"labels":[],"comments":[]}
43
+ {"id":"opencode-swarm-plugin--ys7z8-mjl04znlxzw","title":"Improve compaction prompt structure","status":"open","priority":2,"issue_type":"task","created_at":"2025-12-25T05:28:17.025Z","updated_at":"2025-12-25T05:28:17.025Z","parent_id":"opencode-swarm-plugin--ys7z8-mjl04zmvv7c","dependencies":[],"labels":[],"comments":[]}
44
+ {"id":"opencode-swarm-plugin--ys7z8-mjl04zn8by5","title":"Capture decomposition_complete event","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-25T05:28:17.012Z","updated_at":"2025-12-25T05:38:07.026Z","closed_at":"2025-12-25T05:38:07.026Z","parent_id":"opencode-swarm-plugin--ys7z8-mjl04zmvv7c","dependencies":[],"labels":[],"comments":[]}
45
+ {"id":"opencode-swarm-plugin--ys7z8-mjl04znn0uk","title":"Add review efficiency scorer","status":"in_progress","priority":2,"issue_type":"task","created_at":"2025-12-25T05:28:17.027Z","updated_at":"2025-12-25T05:37:03.084Z","parent_id":"opencode-swarm-plugin--ys7z8-mjl04zmvv7c","dependencies":[],"labels":[],"comments":[]}
46
+ {"id":"opencode-swarm-plugin--ys7z8-mjl04znqie9","title":"Update spawnEfficiency scorer fallback","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-25T05:28:17.030Z","updated_at":"2025-12-25T05:29:34.561Z","closed_at":"2025-12-25T05:29:34.561Z","parent_id":"opencode-swarm-plugin--ys7z8-mjl04zmvv7c","dependencies":[],"labels":[],"comments":[]}
47
+ {"id":"opencode-swarm-plugin--ys7z8-mjljadmo9mg","title":"ADR-009: Pattern Catalog and Innovation Documentation","description":"Comprehensive analysis of opencode-swarm-plugin to document all patterns, innovations, and ideas. Each research worker analyzes a specific domain, then a synthesis worker consolidates findings into ADR-009.","status":"open","priority":1,"issue_type":"epic","created_at":"2025-12-25T14:24:21.120Z","updated_at":"2025-12-25T14:24:21.120Z","dependencies":[],"labels":[],"comments":[]}
48
+ {"id":"opencode-swarm-plugin--ys7z8-mjljadmu3bx","title":"Research: Learning Systems (confidence decay, pattern maturity, anti-patterns)","status":"open","priority":1,"issue_type":"task","created_at":"2025-12-25T14:24:21.126Z","updated_at":"2025-12-25T14:24:21.126Z","parent_id":"opencode-swarm-plugin--ys7z8-mjljadmo9mg","dependencies":[],"labels":[],"comments":[]}
49
+ {"id":"opencode-swarm-plugin--ys7z8-mjljadmw66u","title":"Research: Swarm Coordination (decomposition, orchestration, review, worktree)","status":"open","priority":1,"issue_type":"task","created_at":"2025-12-25T14:24:21.128Z","updated_at":"2025-12-25T14:24:21.128Z","parent_id":"opencode-swarm-plugin--ys7z8-mjljadmo9mg","dependencies":[],"labels":[],"comments":[]}
50
+ {"id":"opencode-swarm-plugin--ys7z8-mjljadmyadr","title":"Research: Memory & Context Preservation (compaction, semantic memory)","status":"open","priority":1,"issue_type":"task","created_at":"2025-12-25T14:24:21.130Z","updated_at":"2025-12-25T14:24:21.130Z","parent_id":"opencode-swarm-plugin--ys7z8-mjljadmo9mg","dependencies":[],"labels":[],"comments":[]}
51
+ {"id":"opencode-swarm-plugin--ys7z8-mjljadn06xp","title":"Research: Observability & Evaluation (logging, eval capture, gates)","status":"open","priority":1,"issue_type":"task","created_at":"2025-12-25T14:24:21.132Z","updated_at":"2025-12-25T14:24:21.132Z","parent_id":"opencode-swarm-plugin--ys7z8-mjljadmo9mg","dependencies":[],"labels":[],"comments":[]}
52
+ {"id":"opencode-swarm-plugin--ys7z8-mjljadn1c2k","title":"Research: Skills System & Knowledge Injection","status":"open","priority":1,"issue_type":"task","created_at":"2025-12-25T14:24:21.133Z","updated_at":"2025-12-25T14:24:21.133Z","parent_id":"opencode-swarm-plugin--ys7z8-mjljadmo9mg","dependencies":[],"labels":[],"comments":[]}
53
+ {"id":"opencode-swarm-plugin--ys7z8-mjljadn7knk","title":"Research: Mandates, Guardrails & Structured Output","status":"open","priority":1,"issue_type":"task","created_at":"2025-12-25T14:24:21.139Z","updated_at":"2025-12-25T14:24:21.139Z","parent_id":"opencode-swarm-plugin--ys7z8-mjljadmo9mg","dependencies":[],"labels":[],"comments":[]}
54
+ {"id":"opencode-swarm-plugin--ys7z8-mjljadn8e6u","title":"Research: Existing ADRs & Documentation Gaps","status":"open","priority":1,"issue_type":"task","created_at":"2025-12-25T14:24:21.140Z","updated_at":"2025-12-25T14:24:21.140Z","parent_id":"opencode-swarm-plugin--ys7z8-mjljadmo9mg","dependencies":[],"labels":[],"comments":[]}
55
+ {"id":"opencode-swarm-plugin--ys7z8-mjljadnaj6o","title":"Synthesize: Write ADR-009 Pattern Catalog","status":"open","priority":0,"issue_type":"task","created_at":"2025-12-25T14:24:21.142Z","updated_at":"2025-12-25T14:24:21.142Z","parent_id":"opencode-swarm-plugin--ys7z8-mjljadmo9mg","dependencies":[],"labels":[],"comments":[]}
package/CHANGELOG.md CHANGED
@@ -1,5 +1,57 @@
1
1
  # opencode-swarm-plugin
2
2
 
3
+ ## 0.40.0
4
+
5
+ ### Minor Changes
6
+
7
+ - [`948e031`](https://github.com/joelhooks/swarm-tools/commit/948e0318fe5e2c1a5d695a56533fc2a2a7753887) Thanks [@joelhooks](https://github.com/joelhooks)! - ## 🔭 Observability Swarm: See What the Bees Are Doing
8
+
9
+ > "The unexamined swarm is not worth coordinating." — Socrates, probably
10
+
11
+ Four parallel workers descended on the observability stack and emerged victorious. The compaction hook no longer runs in darkness, coordinator sessions are now viewable, and the docs finally explain what all those JSONL files are for.
12
+
13
+ ### What's New
14
+
15
+ **Compaction Observability** (`src/compaction-observability.ts`)
16
+
17
+ - Metrics collector tracks phases: START → GATHER → DETECT → INJECT → COMPLETE
18
+ - Pattern extraction/skipping with reasons ("why didn't this get captured?")
19
+ - Timing breakdown per phase (analysis vs extraction vs storage)
20
+ - 15 tests (11 unit + 4 integration)
21
+
22
+ **`swarm log sessions` CLI**
23
+
24
+ - `swarm log sessions` — list all captured coordinator sessions
25
+ - `swarm log sessions <id>` — view events for a session (partial ID matching)
26
+ - `swarm log sessions --latest` — quick access to most recent
27
+ - `--type`, `--since`, `--limit`, `--json` filters
28
+ - 64 tests covering parsing, listing, filtering
29
+
30
+ **Coordinator Observability Docs**
31
+
32
+ - AGENTS.md: overview with quick commands
33
+ - evals/README.md: deep dive with ASCII flow diagrams, event type reference, JSONL examples, jq recipes
34
+
35
+ **Research: Coordinator Prompt Eval** (`.hive/analysis/coordinator-prompt-eval-research.md`)
36
+
37
+ - 26KB analysis of prompt iteration strategies
38
+ - Recommends: versioning + evalite (defer LLM-as-Judge to v0.34+)
39
+ - Implementation plan with effort estimates
40
+
41
+ ### The Observability Story
42
+
43
+ ```
44
+ CAPTURE ──────────► VIEW ──────────► SCORE
45
+ (eval-capture.ts) (swarm log (coordinator
46
+ sessions) evals)
47
+ ```
48
+
49
+ Now you can answer:
50
+
51
+ - "What did the last 10 compaction runs extract?"
52
+ - "Why didn't this pattern get captured?"
53
+ - "Which coordinator sessions had violations?"
54
+
3
55
  ## 0.39.1
4
56
 
5
57
  ### Patch Changes
package/bin/swarm.test.ts CHANGED
@@ -197,6 +197,412 @@ READ-ONLY research agent. Never modifies code - only gathers intel and stores fi
197
197
  // Log Command Tests (TDD)
198
198
  // ============================================================================
199
199
 
200
+ // ============================================================================
201
+ // Session Log Tests (TDD)
202
+ // ============================================================================
203
+
204
+ import type { CoordinatorEvent } from "../src/eval-capture";
205
+
206
+ const TEST_SESSIONS_DIR = join(tmpdir(), "swarm-test-sessions");
207
+
208
+ describe("swarm log sessions", () => {
209
+ beforeEach(() => {
210
+ // Create test sessions directory
211
+ if (!existsSync(TEST_SESSIONS_DIR)) {
212
+ mkdirSync(TEST_SESSIONS_DIR, { recursive: true });
213
+ }
214
+ });
215
+
216
+ afterEach(() => {
217
+ // Cleanup test directory
218
+ if (existsSync(TEST_SESSIONS_DIR)) {
219
+ rmSync(TEST_SESSIONS_DIR, { recursive: true, force: true });
220
+ }
221
+ });
222
+
223
+ // ========================================================================
224
+ // Helper Functions (to be implemented in swarm.ts)
225
+ // ========================================================================
226
+
227
+ function createTestSession(
228
+ sessionId: string,
229
+ epicId: string,
230
+ eventCount: number,
231
+ baseTimestamp?: number,
232
+ ): void {
233
+ const filePath = join(TEST_SESSIONS_DIR, `${sessionId}.jsonl`);
234
+ const lines: string[] = [];
235
+ const base = baseTimestamp || Date.now();
236
+
237
+ for (let i = 0; i < eventCount; i++) {
238
+ const event: CoordinatorEvent = {
239
+ session_id: sessionId,
240
+ epic_id: epicId,
241
+ timestamp: new Date(base - (eventCount - i) * 1000).toISOString(),
242
+ event_type: "DECISION",
243
+ decision_type: "worker_spawned",
244
+ payload: { worker_id: `worker-${i}` },
245
+ };
246
+ lines.push(JSON.stringify(event));
247
+ }
248
+
249
+ writeFileSync(filePath, lines.join("\n") + "\n");
250
+ }
251
+
252
+ /**
253
+ * Parse a session file and return events
254
+ */
255
+ function parseSessionFile(filePath: string): CoordinatorEvent[] {
256
+ if (!existsSync(filePath)) {
257
+ throw new Error(`Session file not found: ${filePath}`);
258
+ }
259
+
260
+ const content = readFileSync(filePath, "utf-8");
261
+ const lines = content.split("\n").filter((line) => line.trim());
262
+ const events: CoordinatorEvent[] = [];
263
+
264
+ for (const line of lines) {
265
+ try {
266
+ const parsed = JSON.parse(line);
267
+ events.push(parsed);
268
+ } catch {
269
+ // Skip invalid JSON lines
270
+ }
271
+ }
272
+
273
+ return events;
274
+ }
275
+
276
+ /**
277
+ * List all session files in a directory
278
+ */
279
+ function listSessionFiles(
280
+ dir: string,
281
+ ): Array<{
282
+ session_id: string;
283
+ file_path: string;
284
+ event_count: number;
285
+ start_time: string;
286
+ end_time?: string;
287
+ }> {
288
+ if (!existsSync(dir)) return [];
289
+
290
+ const files = readdirSync(dir).filter((f) => f.endsWith(".jsonl"));
291
+ const sessions: Array<{
292
+ session_id: string;
293
+ file_path: string;
294
+ event_count: number;
295
+ start_time: string;
296
+ end_time?: string;
297
+ }> = [];
298
+
299
+ for (const file of files) {
300
+ const filePath = join(dir, file);
301
+ try {
302
+ const events = parseSessionFile(filePath);
303
+ if (events.length === 0) continue;
304
+
305
+ const timestamps = events.map((e) => new Date(e.timestamp).getTime());
306
+ const startTime = new Date(Math.min(...timestamps)).toISOString();
307
+ const endTime =
308
+ timestamps.length > 1
309
+ ? new Date(Math.max(...timestamps)).toISOString()
310
+ : undefined;
311
+
312
+ sessions.push({
313
+ session_id: events[0].session_id,
314
+ file_path: filePath,
315
+ event_count: events.length,
316
+ start_time: startTime,
317
+ end_time: endTime,
318
+ });
319
+ } catch {
320
+ // Skip invalid files
321
+ }
322
+ }
323
+
324
+ // Sort by start time (newest first)
325
+ return sessions.sort((a, b) =>
326
+ new Date(b.start_time).getTime() - new Date(a.start_time).getTime()
327
+ );
328
+ }
329
+
330
+ /**
331
+ * Get the latest session file
332
+ */
333
+ function getLatestSession(
334
+ dir: string,
335
+ ): {
336
+ session_id: string;
337
+ file_path: string;
338
+ event_count: number;
339
+ start_time: string;
340
+ end_time?: string;
341
+ } | null {
342
+ const sessions = listSessionFiles(dir);
343
+ return sessions.length > 0 ? sessions[0] : null;
344
+ }
345
+
346
+ /**
347
+ * Filter events by type
348
+ */
349
+ function filterEventsByType(
350
+ events: CoordinatorEvent[],
351
+ eventType: string,
352
+ ): CoordinatorEvent[] {
353
+ if (eventType === "all") return events;
354
+ return events.filter((e) => e.event_type === eventType.toUpperCase());
355
+ }
356
+
357
+ /**
358
+ * Filter events by time
359
+ */
360
+ function filterEventsSince(
361
+ events: CoordinatorEvent[],
362
+ sinceMs: number,
363
+ ): CoordinatorEvent[] {
364
+ const cutoffTime = Date.now() - sinceMs;
365
+ return events.filter((e) =>
366
+ new Date(e.timestamp).getTime() >= cutoffTime
367
+ );
368
+ }
369
+
370
+ // ========================================================================
371
+ // Tests
372
+ // ========================================================================
373
+
374
+ describe("listSessionFiles", () => {
375
+ test("returns empty array when directory doesn't exist", () => {
376
+ const result = listSessionFiles("/nonexistent/directory");
377
+ expect(result).toEqual([]);
378
+ });
379
+
380
+ test("returns empty array when directory is empty", () => {
381
+ const result = listSessionFiles(TEST_SESSIONS_DIR);
382
+ expect(result).toEqual([]);
383
+ });
384
+
385
+ test("lists all session files with metadata", () => {
386
+ createTestSession("ses_abc123", "epic-1", 5);
387
+ createTestSession("ses_def456", "epic-2", 3);
388
+
389
+ const result = listSessionFiles(TEST_SESSIONS_DIR);
390
+
391
+ expect(result).toHaveLength(2);
392
+ expect(result[0].session_id).toMatch(/^ses_/);
393
+ expect(result[0].event_count).toBeGreaterThan(0);
394
+ expect(result[0].start_time).toBeTruthy();
395
+ });
396
+
397
+ test("calculates event count correctly", () => {
398
+ createTestSession("ses_test", "epic-1", 10);
399
+
400
+ const result = listSessionFiles(TEST_SESSIONS_DIR);
401
+
402
+ expect(result[0].event_count).toBe(10);
403
+ });
404
+
405
+ test("extracts start and end times from events", () => {
406
+ createTestSession("ses_test", "epic-1", 5);
407
+
408
+ const result = listSessionFiles(TEST_SESSIONS_DIR);
409
+
410
+ expect(result[0].start_time).toBeTruthy();
411
+ expect(new Date(result[0].start_time).getTime()).toBeLessThan(Date.now());
412
+ });
413
+
414
+ test("sorts sessions by start time (newest first)", () => {
415
+ // Create sessions with explicit different timestamps
416
+ const oldTime = Date.now() - 60000; // 1 minute ago
417
+ const newTime = Date.now();
418
+
419
+ createTestSession("ses_old", "epic-1", 2, oldTime);
420
+ createTestSession("ses_new", "epic-2", 2, newTime);
421
+
422
+ const result = listSessionFiles(TEST_SESSIONS_DIR);
423
+
424
+ expect(result[0].session_id).toBe("ses_new");
425
+ expect(result[1].session_id).toBe("ses_old");
426
+ });
427
+ });
428
+
429
+ describe("parseSessionFile", () => {
430
+ test("parses valid JSONL session file", () => {
431
+ createTestSession("ses_parse", "epic-1", 3);
432
+ const filePath = join(TEST_SESSIONS_DIR, "ses_parse.jsonl");
433
+
434
+ const events = parseSessionFile(filePath);
435
+
436
+ expect(events).toHaveLength(3);
437
+ expect(events[0].session_id).toBe("ses_parse");
438
+ expect(events[0].event_type).toBe("DECISION");
439
+ });
440
+
441
+ test("handles file with trailing newlines", () => {
442
+ const filePath = join(TEST_SESSIONS_DIR, "ses_trailing.jsonl");
443
+ writeFileSync(
444
+ filePath,
445
+ '{"session_id":"test","epic_id":"e1","timestamp":"2025-01-01T00:00:00Z","event_type":"DECISION","decision_type":"worker_spawned","payload":{}}\n\n\n',
446
+ );
447
+
448
+ const events = parseSessionFile(filePath);
449
+
450
+ expect(events).toHaveLength(1);
451
+ });
452
+
453
+ test("skips invalid JSON lines", () => {
454
+ const filePath = join(TEST_SESSIONS_DIR, "ses_invalid.jsonl");
455
+ writeFileSync(
456
+ filePath,
457
+ '{"session_id":"test","epic_id":"e1","timestamp":"2025-01-01T00:00:00Z","event_type":"DECISION","decision_type":"worker_spawned","payload":{}}\ninvalid json\n{"session_id":"test","epic_id":"e1","timestamp":"2025-01-01T00:00:00Z","event_type":"OUTCOME","outcome_type":"subtask_success","payload":{}}\n',
458
+ );
459
+
460
+ const events = parseSessionFile(filePath);
461
+
462
+ expect(events).toHaveLength(2);
463
+ });
464
+
465
+ test("throws error for non-existent file", () => {
466
+ expect(() => parseSessionFile("/nonexistent/file.jsonl")).toThrow();
467
+ });
468
+ });
469
+
470
+ describe("getLatestSession", () => {
471
+ test("returns null when directory is empty", () => {
472
+ const result = getLatestSession(TEST_SESSIONS_DIR);
473
+ expect(result).toBeNull();
474
+ });
475
+
476
+ test("returns the most recent session", () => {
477
+ const oldTime = Date.now() - 60000; // 1 minute ago
478
+ const newTime = Date.now();
479
+
480
+ createTestSession("ses_old", "epic-1", 2, oldTime);
481
+ createTestSession("ses_new", "epic-2", 3, newTime);
482
+
483
+ const result = getLatestSession(TEST_SESSIONS_DIR);
484
+
485
+ expect(result).not.toBeNull();
486
+ expect(result!.session_id).toBe("ses_new");
487
+ });
488
+ });
489
+
490
+ describe("filterEventsByType", () => {
491
+ test("filters DECISION events only", () => {
492
+ const events: CoordinatorEvent[] = [
493
+ {
494
+ session_id: "s1",
495
+ epic_id: "e1",
496
+ timestamp: "2025-01-01T00:00:00Z",
497
+ event_type: "DECISION",
498
+ decision_type: "worker_spawned",
499
+ payload: {},
500
+ },
501
+ {
502
+ session_id: "s1",
503
+ epic_id: "e1",
504
+ timestamp: "2025-01-01T00:01:00Z",
505
+ event_type: "VIOLATION",
506
+ violation_type: "coordinator_edited_file",
507
+ payload: {},
508
+ },
509
+ {
510
+ session_id: "s1",
511
+ epic_id: "e1",
512
+ timestamp: "2025-01-01T00:02:00Z",
513
+ event_type: "DECISION",
514
+ decision_type: "review_completed",
515
+ payload: {},
516
+ },
517
+ ];
518
+
519
+ const result = filterEventsByType(events, "DECISION");
520
+
521
+ expect(result).toHaveLength(2);
522
+ expect(result.every((e) => e.event_type === "DECISION")).toBe(true);
523
+ });
524
+
525
+ test("returns all events when type is 'all'", () => {
526
+ const events: CoordinatorEvent[] = [
527
+ {
528
+ session_id: "s1",
529
+ epic_id: "e1",
530
+ timestamp: "2025-01-01T00:00:00Z",
531
+ event_type: "DECISION",
532
+ decision_type: "worker_spawned",
533
+ payload: {},
534
+ },
535
+ {
536
+ session_id: "s1",
537
+ epic_id: "e1",
538
+ timestamp: "2025-01-01T00:01:00Z",
539
+ event_type: "VIOLATION",
540
+ violation_type: "coordinator_edited_file",
541
+ payload: {},
542
+ },
543
+ ];
544
+
545
+ const result = filterEventsByType(events, "all");
546
+
547
+ expect(result).toHaveLength(2);
548
+ });
549
+ });
550
+
551
+ describe("filterEventsSince", () => {
552
+ test("filters events within time window", () => {
553
+ const now = Date.now();
554
+ const events: CoordinatorEvent[] = [
555
+ {
556
+ session_id: "s1",
557
+ epic_id: "e1",
558
+ timestamp: new Date(now - 10000).toISOString(), // 10s ago
559
+ event_type: "DECISION",
560
+ decision_type: "worker_spawned",
561
+ payload: {},
562
+ },
563
+ {
564
+ session_id: "s1",
565
+ epic_id: "e1",
566
+ timestamp: new Date(now - 60000).toISOString(), // 1m ago
567
+ event_type: "VIOLATION",
568
+ violation_type: "coordinator_edited_file",
569
+ payload: {},
570
+ },
571
+ {
572
+ session_id: "s1",
573
+ epic_id: "e1",
574
+ timestamp: new Date(now - 3000).toISOString(), // 3s ago
575
+ event_type: "OUTCOME",
576
+ outcome_type: "subtask_success",
577
+ payload: {},
578
+ },
579
+ ];
580
+
581
+ const result = filterEventsSince(events, 30000); // Last 30s
582
+
583
+ expect(result).toHaveLength(2); // 10s and 3s ago
584
+ });
585
+
586
+ test("returns all events when sinceMs is very large", () => {
587
+ const now = Date.now();
588
+ const events: CoordinatorEvent[] = [
589
+ {
590
+ session_id: "s1",
591
+ epic_id: "e1",
592
+ timestamp: new Date(now - 1000).toISOString(),
593
+ event_type: "DECISION",
594
+ decision_type: "worker_spawned",
595
+ payload: {},
596
+ },
597
+ ];
598
+
599
+ const result = filterEventsSince(events, 86400000); // 1 day
600
+
601
+ expect(result).toHaveLength(1);
602
+ });
603
+ });
604
+ });
605
+
200
606
  // ============================================================================
201
607
  // Cells Command Tests (TDD)
202
608
  // ============================================================================