opencode-swarm-plugin 0.33.0 → 0.35.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.hive/issues.jsonl +12 -0
- package/.hive/memories.jsonl +255 -1
- package/.turbo/turbo-build.log +4 -4
- package/.turbo/turbo-test.log +289 -289
- package/CHANGELOG.md +133 -0
- package/README.md +29 -1
- package/bin/swarm.test.ts +342 -1
- package/bin/swarm.ts +351 -4
- package/dist/compaction-hook.d.ts +1 -1
- package/dist/compaction-hook.d.ts.map +1 -1
- package/dist/index.d.ts +95 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +11848 -124
- package/dist/logger.d.ts +34 -0
- package/dist/logger.d.ts.map +1 -0
- package/dist/plugin.js +11722 -112
- package/dist/swarm-orchestrate.d.ts +105 -0
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts +54 -2
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/dist/swarm-research.d.ts +127 -0
- package/dist/swarm-research.d.ts.map +1 -0
- package/dist/swarm-review.d.ts.map +1 -1
- package/dist/swarm.d.ts +56 -1
- package/dist/swarm.d.ts.map +1 -1
- package/evals/compaction-resumption.eval.ts +289 -0
- package/evals/coordinator-behavior.eval.ts +307 -0
- package/evals/fixtures/compaction-cases.ts +350 -0
- package/evals/scorers/compaction-scorers.ts +305 -0
- package/evals/scorers/index.ts +12 -0
- package/package.json +5 -2
- package/src/compaction-hook.test.ts +639 -1
- package/src/compaction-hook.ts +488 -18
- package/src/index.ts +29 -0
- package/src/logger.test.ts +189 -0
- package/src/logger.ts +135 -0
- package/src/swarm-decompose.ts +0 -7
- package/src/swarm-prompts.test.ts +164 -1
- package/src/swarm-prompts.ts +179 -12
- package/src/swarm-review.test.ts +177 -0
- package/src/swarm-review.ts +12 -47
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,138 @@
|
|
|
1
1
|
# opencode-swarm-plugin
|
|
2
2
|
|
|
3
|
+
## 0.35.0
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- [`084f888`](https://github.com/joelhooks/swarm-tools/commit/084f888fcac4912f594428b1ac7148c8a8aaa422) Thanks [@joelhooks](https://github.com/joelhooks)! - ## 👁️ Watch Your Swarm in Real-Time
|
|
8
|
+
|
|
9
|
+
`swarm log` now has a `--watch` mode for continuous log monitoring. No more running the command repeatedly - just sit back and watch the bees work.
|
|
10
|
+
|
|
11
|
+
```bash
|
|
12
|
+
# Watch all logs
|
|
13
|
+
swarm log --watch
|
|
14
|
+
|
|
15
|
+
# Watch with filters
|
|
16
|
+
swarm log compaction -w --level error
|
|
17
|
+
|
|
18
|
+
# Faster polling (500ms instead of default 1s)
|
|
19
|
+
swarm log --watch --interval 500
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
**New flags:**
|
|
23
|
+
|
|
24
|
+
- `--watch`, `-w` - Enable continuous monitoring mode
|
|
25
|
+
- `--interval <ms>` - Poll interval in milliseconds (default: 1000, min: 100)
|
|
26
|
+
|
|
27
|
+
**How it works:**
|
|
28
|
+
|
|
29
|
+
- Shows initial logs (last N lines based on `--limit`)
|
|
30
|
+
- Polls log files for new entries at the specified interval
|
|
31
|
+
- Tracks file positions for efficient incremental reads
|
|
32
|
+
- Handles log rotation gracefully (detects file truncation)
|
|
33
|
+
- All existing filters work: `--level`, `--since`, module name
|
|
34
|
+
- Clean shutdown on Ctrl+C
|
|
35
|
+
|
|
36
|
+
_"The hive that watches itself, debugs itself."_
|
|
37
|
+
|
|
38
|
+
## 0.34.0
|
|
39
|
+
|
|
40
|
+
### Minor Changes
|
|
41
|
+
|
|
42
|
+
- [`704c366`](https://github.com/joelhooks/swarm-tools/commit/704c36690fb6fd52cfb9222ddeef3b663dfdb9ed) Thanks [@joelhooks](https://github.com/joelhooks)! - ## 🪵 Pino Logging Infrastructure
|
|
43
|
+
|
|
44
|
+
> "You can't improve what you can't measure." — Peter Drucker
|
|
45
|
+
|
|
46
|
+
Finally, visibility into what the swarm is actually doing.
|
|
47
|
+
|
|
48
|
+
### What's New
|
|
49
|
+
|
|
50
|
+
**Structured Logging with Pino**
|
|
51
|
+
|
|
52
|
+
- Daily log rotation via `pino-roll` (14-day retention)
|
|
53
|
+
- Logs to `~/.config/swarm-tools/logs/`
|
|
54
|
+
- Module-specific log files (e.g., `compaction.1log`, `swarm.1log`)
|
|
55
|
+
- Pretty mode for development: `SWARM_LOG_PRETTY=1`
|
|
56
|
+
|
|
57
|
+
**Compaction Hook Instrumented**
|
|
58
|
+
|
|
59
|
+
- 14 strategic log points across all phases
|
|
60
|
+
- START: session context, trigger reason
|
|
61
|
+
- GATHER: per-source timing (hive, swarm-mail, skills)
|
|
62
|
+
- DETECT/INJECT: confidence scores, context decisions
|
|
63
|
+
- COMPLETE: duration, success, what was injected
|
|
64
|
+
|
|
65
|
+
**New CLI: `swarm log`**
|
|
66
|
+
|
|
67
|
+
```bash
|
|
68
|
+
swarm log # Tail recent logs
|
|
69
|
+
swarm log compaction # Filter by module
|
|
70
|
+
swarm log --level warn # Filter by severity
|
|
71
|
+
swarm log --since 1h # Last hour only
|
|
72
|
+
swarm log --json | jq # Pipe to jq for analysis
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
### Why This Matters
|
|
76
|
+
|
|
77
|
+
The compaction hook does a LOT of work with zero visibility:
|
|
78
|
+
|
|
79
|
+
- Context injection decisions
|
|
80
|
+
- Data gathering from multiple sources
|
|
81
|
+
- Template rendering and size calculations
|
|
82
|
+
|
|
83
|
+
Now you can answer: "What did compaction do on the last run?"
|
|
84
|
+
|
|
85
|
+
### Technical Details
|
|
86
|
+
|
|
87
|
+
- Pino + pino-roll for async, non-blocking file writes
|
|
88
|
+
- Child loggers for module namespacing
|
|
89
|
+
- Lazy initialization pattern for test isolation
|
|
90
|
+
- 56 new tests (10 logger + 18 compaction + 28 CLI)
|
|
91
|
+
|
|
92
|
+
Complements existing `DEBUG=swarm:*` env var approach — Pino for structured file logs, debug for stderr filtering.
|
|
93
|
+
|
|
94
|
+
### Patch Changes
|
|
95
|
+
|
|
96
|
+
- [`b5792bd`](https://github.com/joelhooks/swarm-tools/commit/b5792bd5f6aa4bf3ad9757fe351bc144e84f09af) Thanks [@joelhooks](https://github.com/joelhooks)! - ## 🎯 Coordinators Remember Who They Are
|
|
97
|
+
|
|
98
|
+
Fixed the compaction bug where coordinators lost their identity after context compression.
|
|
99
|
+
|
|
100
|
+
**The Problem:**
|
|
101
|
+
After compaction, coordinators would wake up and start doing worker tasks directly (running tests, editing files) instead of spawning workers. The injected context said "you are a coordinator" but gave worker-style resume commands.
|
|
102
|
+
|
|
103
|
+
**The Fix:**
|
|
104
|
+
`buildDynamicSwarmState()` now generates coordinator-focused context:
|
|
105
|
+
|
|
106
|
+
```
|
|
107
|
+
## 🎯 YOU ARE THE COORDINATOR
|
|
108
|
+
|
|
109
|
+
**Primary role:** Orchestrate workers, review their output, unblock dependencies.
|
|
110
|
+
**Spawn workers** for implementation tasks - don't do them yourself.
|
|
111
|
+
|
|
112
|
+
**RESUME STEPS:**
|
|
113
|
+
1. Check swarm status: `swarm_status(epic_id="bd-actual-id", ...)`
|
|
114
|
+
2. Check inbox: `swarmmail_inbox(limit=5)`
|
|
115
|
+
3. For in_progress subtasks: Review with `swarm_review`
|
|
116
|
+
4. For open subtasks: Spawn workers with `swarm_spawn_subtask`
|
|
117
|
+
5. For blocked subtasks: Investigate and unblock
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
Also captures specific swarm state during detection:
|
|
121
|
+
|
|
122
|
+
- Epic ID and title (not placeholders)
|
|
123
|
+
- Subtask counts by status
|
|
124
|
+
- Actual project path
|
|
125
|
+
|
|
126
|
+
**New eval infrastructure:**
|
|
127
|
+
|
|
128
|
+
- `coordinator-behavior.eval.ts` - LLM-as-judge eval testing whether Claude actually behaves like a coordinator given the injected context
|
|
129
|
+
- Scorers for coordinator tools, avoiding worker behaviors, and coordinator mindset
|
|
130
|
+
|
|
131
|
+
> "The coordinator's job is to keep the swarm cooking, not to cook themselves."
|
|
132
|
+
|
|
133
|
+
- Updated dependencies [[`a78a40d`](https://github.com/joelhooks/swarm-tools/commit/a78a40de32eb34d1738b208f2a36929a4ab6cb81), [`5a7c084`](https://github.com/joelhooks/swarm-tools/commit/5a7c084514297b5b9ca5df9459a74f18eb805b8a)]:
|
|
134
|
+
- swarm-mail@1.5.0
|
|
135
|
+
|
|
3
136
|
## 0.33.0
|
|
4
137
|
|
|
5
138
|
### Minor Changes
|
package/README.md
CHANGED
|
@@ -179,7 +179,14 @@ Auto-saves progress at milestones. Survives context death or crashes. Data store
|
|
|
179
179
|
|
|
180
180
|
---
|
|
181
181
|
|
|
182
|
-
## What's New in v0.
|
|
182
|
+
## What's New in v0.33
|
|
183
|
+
|
|
184
|
+
- **Pino logging infrastructure** - Structured JSON logs with daily rotation to `~/.config/swarm-tools/logs/`
|
|
185
|
+
- **Compaction hook instrumented** - 14 log points across all phases (START, GATHER, RENDER, DECIDE, COMPLETE)
|
|
186
|
+
- **`swarm log` CLI** - Query/tail logs with module, level, and time filters
|
|
187
|
+
- **Analytics queries** - 5 pre-built queries based on Four Golden Signals (latency, traffic, errors, saturation, conflicts)
|
|
188
|
+
|
|
189
|
+
### v0.32
|
|
183
190
|
|
|
184
191
|
- **libSQL storage** (embedded SQLite) replaced PGLite - no external DB needed
|
|
185
192
|
- **95% integration test coverage** - checkpoint/recovery proven with 9 tests
|
|
@@ -235,6 +242,27 @@ swarm init # Initialize hive in project
|
|
|
235
242
|
swarm config # Show config file paths
|
|
236
243
|
```
|
|
237
244
|
|
|
245
|
+
### Logging & Observability
|
|
246
|
+
|
|
247
|
+
Structured Pino logging with daily rotation:
|
|
248
|
+
|
|
249
|
+
```bash
|
|
250
|
+
# Enable pretty logging during development
|
|
251
|
+
SWARM_LOG_PRETTY=1 opencode
|
|
252
|
+
|
|
253
|
+
# Query logs
|
|
254
|
+
swarm log # Tail recent logs
|
|
255
|
+
swarm log compaction # Filter by module
|
|
256
|
+
swarm log --level warn # Filter by level (warn+)
|
|
257
|
+
swarm log --since 1h # Last hour
|
|
258
|
+
swarm log --json | jq # Pipe to jq for analysis
|
|
259
|
+
```
|
|
260
|
+
|
|
261
|
+
**Log files:** `~/.config/swarm-tools/logs/`
|
|
262
|
+
- `swarm.1log`, `swarm.2log`, ... (main logs)
|
|
263
|
+
- `compaction.1log`, ... (module-specific)
|
|
264
|
+
- Daily rotation, 14-day retention
|
|
265
|
+
|
|
238
266
|
---
|
|
239
267
|
|
|
240
268
|
## Further Reading
|
package/bin/swarm.test.ts
CHANGED
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
* - rmWithStatus: logs file removal
|
|
9
9
|
*/
|
|
10
10
|
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
|
|
11
|
-
import { mkdirSync, rmSync, writeFileSync, existsSync, readFileSync } from "fs";
|
|
11
|
+
import { mkdirSync, rmSync, writeFileSync, existsSync, readFileSync, readdirSync } from "fs";
|
|
12
12
|
import { join } from "path";
|
|
13
13
|
import { tmpdir } from "os";
|
|
14
14
|
|
|
@@ -192,3 +192,344 @@ READ-ONLY research agent. Never modifies code - only gathers intel and stores fi
|
|
|
192
192
|
});
|
|
193
193
|
});
|
|
194
194
|
});
|
|
195
|
+
|
|
196
|
+
// ============================================================================
|
|
197
|
+
// Log Command Tests (TDD)
|
|
198
|
+
// ============================================================================
|
|
199
|
+
|
|
200
|
+
describe("Log command helpers", () => {
|
|
201
|
+
let testDir: string;
|
|
202
|
+
|
|
203
|
+
beforeEach(() => {
|
|
204
|
+
testDir = join(tmpdir(), `swarm-log-test-${Date.now()}`);
|
|
205
|
+
mkdirSync(testDir, { recursive: true });
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
afterEach(() => {
|
|
209
|
+
if (existsSync(testDir)) {
|
|
210
|
+
rmSync(testDir, { recursive: true, force: true });
|
|
211
|
+
}
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
describe("parseLogLine", () => {
|
|
215
|
+
function parseLogLine(line: string): { level: number; time: string; module: string; msg: string } | null {
|
|
216
|
+
try {
|
|
217
|
+
const parsed = JSON.parse(line);
|
|
218
|
+
if (typeof parsed.level === "number" && parsed.time && parsed.msg) {
|
|
219
|
+
return {
|
|
220
|
+
level: parsed.level,
|
|
221
|
+
time: parsed.time,
|
|
222
|
+
module: parsed.module || "unknown",
|
|
223
|
+
msg: parsed.msg,
|
|
224
|
+
};
|
|
225
|
+
}
|
|
226
|
+
} catch {
|
|
227
|
+
// Invalid JSON
|
|
228
|
+
}
|
|
229
|
+
return null;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
test("parses valid log line", () => {
|
|
233
|
+
const line = '{"level":30,"time":"2024-12-24T16:00:00.000Z","module":"compaction","msg":"started"}';
|
|
234
|
+
const result = parseLogLine(line);
|
|
235
|
+
|
|
236
|
+
expect(result).not.toBeNull();
|
|
237
|
+
expect(result?.level).toBe(30);
|
|
238
|
+
expect(result?.module).toBe("compaction");
|
|
239
|
+
expect(result?.msg).toBe("started");
|
|
240
|
+
});
|
|
241
|
+
|
|
242
|
+
test("returns null for invalid JSON", () => {
|
|
243
|
+
const line = "not json";
|
|
244
|
+
expect(parseLogLine(line)).toBeNull();
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
test("defaults module to 'unknown' if missing", () => {
|
|
248
|
+
const line = '{"level":30,"time":"2024-12-24T16:00:00.000Z","msg":"test"}';
|
|
249
|
+
const result = parseLogLine(line);
|
|
250
|
+
|
|
251
|
+
expect(result?.module).toBe("unknown");
|
|
252
|
+
});
|
|
253
|
+
});
|
|
254
|
+
|
|
255
|
+
describe("filterLogsByLevel", () => {
|
|
256
|
+
function filterLogsByLevel(logs: Array<{ level: number }>, minLevel: number): Array<{ level: number }> {
|
|
257
|
+
return logs.filter((log) => log.level >= minLevel);
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
test("filters logs by minimum level", () => {
|
|
261
|
+
const logs = [
|
|
262
|
+
{ level: 10 }, // trace
|
|
263
|
+
{ level: 30 }, // info
|
|
264
|
+
{ level: 50 }, // error
|
|
265
|
+
];
|
|
266
|
+
|
|
267
|
+
const result = filterLogsByLevel(logs, 30);
|
|
268
|
+
expect(result).toHaveLength(2);
|
|
269
|
+
expect(result[0].level).toBe(30);
|
|
270
|
+
expect(result[1].level).toBe(50);
|
|
271
|
+
});
|
|
272
|
+
|
|
273
|
+
test("includes all logs when minLevel is 0", () => {
|
|
274
|
+
const logs = [
|
|
275
|
+
{ level: 10 },
|
|
276
|
+
{ level: 20 },
|
|
277
|
+
{ level: 30 },
|
|
278
|
+
];
|
|
279
|
+
|
|
280
|
+
const result = filterLogsByLevel(logs, 0);
|
|
281
|
+
expect(result).toHaveLength(3);
|
|
282
|
+
});
|
|
283
|
+
});
|
|
284
|
+
|
|
285
|
+
describe("filterLogsByModule", () => {
|
|
286
|
+
function filterLogsByModule(logs: Array<{ module: string }>, module: string): Array<{ module: string }> {
|
|
287
|
+
return logs.filter((log) => log.module === module);
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
test("filters logs by exact module name", () => {
|
|
291
|
+
const logs = [
|
|
292
|
+
{ module: "compaction" },
|
|
293
|
+
{ module: "swarm" },
|
|
294
|
+
{ module: "compaction" },
|
|
295
|
+
];
|
|
296
|
+
|
|
297
|
+
const result = filterLogsByModule(logs, "compaction");
|
|
298
|
+
expect(result).toHaveLength(2);
|
|
299
|
+
});
|
|
300
|
+
|
|
301
|
+
test("returns empty array when no match", () => {
|
|
302
|
+
const logs = [
|
|
303
|
+
{ module: "compaction" },
|
|
304
|
+
];
|
|
305
|
+
|
|
306
|
+
const result = filterLogsByModule(logs, "swarm");
|
|
307
|
+
expect(result).toHaveLength(0);
|
|
308
|
+
});
|
|
309
|
+
});
|
|
310
|
+
|
|
311
|
+
describe("filterLogsBySince", () => {
|
|
312
|
+
function parseDuration(duration: string): number | null {
|
|
313
|
+
const match = duration.match(/^(\d+)([smhd])$/);
|
|
314
|
+
if (!match) return null;
|
|
315
|
+
|
|
316
|
+
const [, num, unit] = match;
|
|
317
|
+
const value = parseInt(num, 10);
|
|
318
|
+
|
|
319
|
+
const multipliers: Record<string, number> = {
|
|
320
|
+
s: 1000,
|
|
321
|
+
m: 60 * 1000,
|
|
322
|
+
h: 60 * 60 * 1000,
|
|
323
|
+
d: 24 * 60 * 60 * 1000,
|
|
324
|
+
};
|
|
325
|
+
|
|
326
|
+
return value * multipliers[unit];
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
function filterLogsBySince(logs: Array<{ time: string }>, sinceMs: number): Array<{ time: string }> {
|
|
330
|
+
const cutoffTime = Date.now() - sinceMs;
|
|
331
|
+
return logs.filter((log) => new Date(log.time).getTime() >= cutoffTime);
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
test("parseDuration handles seconds", () => {
|
|
335
|
+
expect(parseDuration("30s")).toBe(30 * 1000);
|
|
336
|
+
});
|
|
337
|
+
|
|
338
|
+
test("parseDuration handles minutes", () => {
|
|
339
|
+
expect(parseDuration("5m")).toBe(5 * 60 * 1000);
|
|
340
|
+
});
|
|
341
|
+
|
|
342
|
+
test("parseDuration handles hours", () => {
|
|
343
|
+
expect(parseDuration("2h")).toBe(2 * 60 * 60 * 1000);
|
|
344
|
+
});
|
|
345
|
+
|
|
346
|
+
test("parseDuration handles days", () => {
|
|
347
|
+
expect(parseDuration("1d")).toBe(24 * 60 * 60 * 1000);
|
|
348
|
+
});
|
|
349
|
+
|
|
350
|
+
test("parseDuration returns null for invalid format", () => {
|
|
351
|
+
expect(parseDuration("invalid")).toBeNull();
|
|
352
|
+
expect(parseDuration("30x")).toBeNull();
|
|
353
|
+
expect(parseDuration("30")).toBeNull();
|
|
354
|
+
});
|
|
355
|
+
|
|
356
|
+
test("filterLogsBySince filters old logs", () => {
|
|
357
|
+
const now = Date.now();
|
|
358
|
+
const logs = [
|
|
359
|
+
{ time: new Date(now - 10000).toISOString() }, // 10s ago
|
|
360
|
+
{ time: new Date(now - 120000).toISOString() }, // 2m ago
|
|
361
|
+
{ time: new Date(now - 1000).toISOString() }, // 1s ago
|
|
362
|
+
];
|
|
363
|
+
|
|
364
|
+
const result = filterLogsBySince(logs, 60000); // Last 1m
|
|
365
|
+
expect(result).toHaveLength(2); // Only logs within last minute
|
|
366
|
+
});
|
|
367
|
+
});
|
|
368
|
+
|
|
369
|
+
describe("formatLogLine", () => {
|
|
370
|
+
function levelToName(level: number): string {
|
|
371
|
+
if (level >= 60) return "FATAL";
|
|
372
|
+
if (level >= 50) return "ERROR";
|
|
373
|
+
if (level >= 40) return "WARN ";
|
|
374
|
+
if (level >= 30) return "INFO ";
|
|
375
|
+
if (level >= 20) return "DEBUG";
|
|
376
|
+
return "TRACE";
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
function formatLogLine(log: { level: number; time: string; module: string; msg: string }): string {
|
|
380
|
+
const timestamp = new Date(log.time).toLocaleTimeString();
|
|
381
|
+
const levelName = levelToName(log.level);
|
|
382
|
+
const module = log.module.padEnd(12);
|
|
383
|
+
return `${timestamp} ${levelName} ${module} ${log.msg}`;
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
test("formats log line with timestamp and level", () => {
|
|
387
|
+
const log = {
|
|
388
|
+
level: 30,
|
|
389
|
+
time: "2024-12-24T16:00:00.000Z",
|
|
390
|
+
module: "compaction",
|
|
391
|
+
msg: "started",
|
|
392
|
+
};
|
|
393
|
+
|
|
394
|
+
const result = formatLogLine(log);
|
|
395
|
+
expect(result).toContain("INFO");
|
|
396
|
+
expect(result).toContain("compaction");
|
|
397
|
+
expect(result).toContain("started");
|
|
398
|
+
});
|
|
399
|
+
|
|
400
|
+
test("pads module name for alignment", () => {
|
|
401
|
+
const log1 = formatLogLine({ level: 30, time: "2024-12-24T16:00:00.000Z", module: "a", msg: "test" });
|
|
402
|
+
const log2 = formatLogLine({ level: 30, time: "2024-12-24T16:00:00.000Z", module: "compaction", msg: "test" });
|
|
403
|
+
|
|
404
|
+
// Module names should be padded to 12 chars
|
|
405
|
+
expect(log1).toContain("a test"); // 'a' + 11 spaces
|
|
406
|
+
expect(log2).toContain("compaction test"); // 'compaction' + 3 spaces (10 chars + 2)
|
|
407
|
+
});
|
|
408
|
+
|
|
409
|
+
test("levelToName maps all levels correctly", () => {
|
|
410
|
+
expect(levelToName(10)).toBe("TRACE");
|
|
411
|
+
expect(levelToName(20)).toBe("DEBUG");
|
|
412
|
+
expect(levelToName(30)).toBe("INFO ");
|
|
413
|
+
expect(levelToName(40)).toBe("WARN ");
|
|
414
|
+
expect(levelToName(50)).toBe("ERROR");
|
|
415
|
+
expect(levelToName(60)).toBe("FATAL");
|
|
416
|
+
});
|
|
417
|
+
});
|
|
418
|
+
|
|
419
|
+
describe("readLogFiles", () => {
|
|
420
|
+
test("reads multiple .1log files", () => {
|
|
421
|
+
// Create test log files
|
|
422
|
+
const log1 = join(testDir, "swarm.1log");
|
|
423
|
+
const log2 = join(testDir, "swarm.2log");
|
|
424
|
+
const log3 = join(testDir, "compaction.1log");
|
|
425
|
+
|
|
426
|
+
writeFileSync(log1, '{"level":30,"time":"2024-12-24T16:00:00.000Z","msg":"line1"}\n');
|
|
427
|
+
writeFileSync(log2, '{"level":30,"time":"2024-12-24T16:00:01.000Z","msg":"line2"}\n');
|
|
428
|
+
writeFileSync(log3, '{"level":30,"time":"2024-12-24T16:00:02.000Z","module":"compaction","msg":"line3"}\n');
|
|
429
|
+
|
|
430
|
+
function readLogFiles(dir: string): string[] {
|
|
431
|
+
if (!existsSync(dir)) return [];
|
|
432
|
+
|
|
433
|
+
const files = readdirSync(dir)
|
|
434
|
+
.filter((f) => /\.\d+log$/.test(f))
|
|
435
|
+
.sort() // Sort by filename
|
|
436
|
+
.map((f) => join(dir, f));
|
|
437
|
+
|
|
438
|
+
const lines: string[] = [];
|
|
439
|
+
for (const file of files) {
|
|
440
|
+
const content = readFileSync(file, "utf-8");
|
|
441
|
+
lines.push(...content.split("\n").filter((line) => line.trim()));
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
return lines;
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
const lines = readLogFiles(testDir);
|
|
448
|
+
expect(lines).toHaveLength(3);
|
|
449
|
+
// Files are sorted alphabetically: compaction.1log, swarm.1log, swarm.2log
|
|
450
|
+
expect(lines.some((l) => l.includes("line1"))).toBe(true);
|
|
451
|
+
expect(lines.some((l) => l.includes("line2"))).toBe(true);
|
|
452
|
+
expect(lines.some((l) => l.includes("line3"))).toBe(true);
|
|
453
|
+
});
|
|
454
|
+
|
|
455
|
+
test("returns empty array for non-existent directory", () => {
|
|
456
|
+
function readLogFiles(dir: string): string[] {
|
|
457
|
+
if (!existsSync(dir)) return [];
|
|
458
|
+
return [];
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
const lines = readLogFiles(join(testDir, "nonexistent"));
|
|
462
|
+
expect(lines).toHaveLength(0);
|
|
463
|
+
});
|
|
464
|
+
});
|
|
465
|
+
|
|
466
|
+
describe("watchLogs", () => {
|
|
467
|
+
test("detects new log lines appended to file", async () => {
|
|
468
|
+
const logFile = join(testDir, "swarm.1log");
|
|
469
|
+
const collectedLines: string[] = [];
|
|
470
|
+
|
|
471
|
+
// Create initial log file
|
|
472
|
+
writeFileSync(logFile, '{"level":30,"time":"2024-12-24T16:00:00.000Z","msg":"initial"}\n');
|
|
473
|
+
|
|
474
|
+
// Import watch utilities
|
|
475
|
+
const { watch } = await import("fs");
|
|
476
|
+
const { appendFileSync } = await import("fs");
|
|
477
|
+
|
|
478
|
+
// Track file position for incremental reads
|
|
479
|
+
let lastSize = 0;
|
|
480
|
+
|
|
481
|
+
function readNewLines(filePath: string): string[] {
|
|
482
|
+
const content = readFileSync(filePath, "utf-8");
|
|
483
|
+
const newContent = content.slice(lastSize);
|
|
484
|
+
lastSize = content.length;
|
|
485
|
+
return newContent.split("\n").filter((line) => line.trim());
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
// Simulate watch behavior
|
|
489
|
+
const watcher = watch(testDir, (eventType, filename) => {
|
|
490
|
+
if (filename && /\.\d+log$/.test(filename)) {
|
|
491
|
+
const newLines = readNewLines(join(testDir, filename));
|
|
492
|
+
collectedLines.push(...newLines);
|
|
493
|
+
}
|
|
494
|
+
});
|
|
495
|
+
|
|
496
|
+
// Wait for watcher to be ready
|
|
497
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
498
|
+
|
|
499
|
+
// Append new log line
|
|
500
|
+
appendFileSync(logFile, '{"level":30,"time":"2024-12-24T16:00:01.000Z","msg":"appended"}\n');
|
|
501
|
+
|
|
502
|
+
// Wait for event to fire
|
|
503
|
+
await new Promise((resolve) => setTimeout(resolve, 200));
|
|
504
|
+
|
|
505
|
+
watcher.close();
|
|
506
|
+
|
|
507
|
+
// Should have detected the new line
|
|
508
|
+
expect(collectedLines.some((l) => l.includes("appended"))).toBe(true);
|
|
509
|
+
});
|
|
510
|
+
|
|
511
|
+
test("parseWatchArgs extracts --watch flag", () => {
|
|
512
|
+
function parseWatchArgs(args: string[]): { watch: boolean; interval: number } {
|
|
513
|
+
let watch = false;
|
|
514
|
+
let interval = 1000; // default 1 second
|
|
515
|
+
|
|
516
|
+
for (let i = 0; i < args.length; i++) {
|
|
517
|
+
const arg = args[i];
|
|
518
|
+
if (arg === "--watch" || arg === "-w") {
|
|
519
|
+
watch = true;
|
|
520
|
+
} else if (arg === "--interval" && i + 1 < args.length) {
|
|
521
|
+
interval = parseInt(args[++i], 10);
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
return { watch, interval };
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
expect(parseWatchArgs(["--watch"])).toEqual({ watch: true, interval: 1000 });
|
|
529
|
+
expect(parseWatchArgs(["-w"])).toEqual({ watch: true, interval: 1000 });
|
|
530
|
+
expect(parseWatchArgs(["--watch", "--interval", "500"])).toEqual({ watch: true, interval: 500 });
|
|
531
|
+
expect(parseWatchArgs(["compaction", "--watch"])).toEqual({ watch: true, interval: 1000 });
|
|
532
|
+
expect(parseWatchArgs(["--level", "error"])).toEqual({ watch: false, interval: 1000 });
|
|
533
|
+
});
|
|
534
|
+
});
|
|
535
|
+
});
|