opencode-swarm-plugin 0.33.0 → 0.34.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.hive/issues.jsonl +12 -0
- package/.hive/memories.jsonl +255 -1
- package/.turbo/turbo-build.log +4 -4
- package/.turbo/turbo-test.log +289 -289
- package/CHANGELOG.md +98 -0
- package/README.md +29 -1
- package/bin/swarm.test.ts +272 -1
- package/bin/swarm.ts +226 -4
- package/dist/compaction-hook.d.ts +1 -1
- package/dist/compaction-hook.d.ts.map +1 -1
- package/dist/index.d.ts +95 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +11848 -124
- package/dist/logger.d.ts +34 -0
- package/dist/logger.d.ts.map +1 -0
- package/dist/plugin.js +11722 -112
- package/dist/swarm-orchestrate.d.ts +105 -0
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts +54 -2
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/dist/swarm-research.d.ts +127 -0
- package/dist/swarm-research.d.ts.map +1 -0
- package/dist/swarm-review.d.ts.map +1 -1
- package/dist/swarm.d.ts +56 -1
- package/dist/swarm.d.ts.map +1 -1
- package/evals/compaction-resumption.eval.ts +289 -0
- package/evals/coordinator-behavior.eval.ts +307 -0
- package/evals/fixtures/compaction-cases.ts +350 -0
- package/evals/scorers/compaction-scorers.ts +305 -0
- package/evals/scorers/index.ts +12 -0
- package/package.json +5 -2
- package/src/compaction-hook.test.ts +617 -1
- package/src/compaction-hook.ts +291 -18
- package/src/index.ts +29 -0
- package/src/logger.test.ts +189 -0
- package/src/logger.ts +135 -0
- package/src/swarm-prompts.test.ts +164 -1
- package/src/swarm-prompts.ts +178 -4
- package/src/swarm-review.test.ts +177 -0
- package/src/swarm-review.ts +12 -47
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,103 @@
|
|
|
1
1
|
# opencode-swarm-plugin
|
|
2
2
|
|
|
3
|
+
## 0.34.0
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- [`704c366`](https://github.com/joelhooks/swarm-tools/commit/704c36690fb6fd52cfb9222ddeef3b663dfdb9ed) Thanks [@joelhooks](https://github.com/joelhooks)! - ## 🪵 Pino Logging Infrastructure
|
|
8
|
+
|
|
9
|
+
> "You can't improve what you can't measure." — Peter Drucker
|
|
10
|
+
|
|
11
|
+
Finally, visibility into what the swarm is actually doing.
|
|
12
|
+
|
|
13
|
+
### What's New
|
|
14
|
+
|
|
15
|
+
**Structured Logging with Pino**
|
|
16
|
+
|
|
17
|
+
- Daily log rotation via `pino-roll` (14-day retention)
|
|
18
|
+
- Logs to `~/.config/swarm-tools/logs/`
|
|
19
|
+
- Module-specific log files (e.g., `compaction.1log`, `swarm.1log`)
|
|
20
|
+
- Pretty mode for development: `SWARM_LOG_PRETTY=1`
|
|
21
|
+
|
|
22
|
+
**Compaction Hook Instrumented**
|
|
23
|
+
|
|
24
|
+
- 14 strategic log points across all phases
|
|
25
|
+
- START: session context, trigger reason
|
|
26
|
+
- GATHER: per-source timing (hive, swarm-mail, skills)
|
|
27
|
+
- DETECT/INJECT: confidence scores, context decisions
|
|
28
|
+
- COMPLETE: duration, success, what was injected
|
|
29
|
+
|
|
30
|
+
**New CLI: `swarm log`**
|
|
31
|
+
|
|
32
|
+
```bash
|
|
33
|
+
swarm log # Tail recent logs
|
|
34
|
+
swarm log compaction # Filter by module
|
|
35
|
+
swarm log --level warn # Filter by severity
|
|
36
|
+
swarm log --since 1h # Last hour only
|
|
37
|
+
swarm log --json | jq # Pipe to jq for analysis
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
### Why This Matters
|
|
41
|
+
|
|
42
|
+
The compaction hook does a LOT of work with zero visibility:
|
|
43
|
+
|
|
44
|
+
- Context injection decisions
|
|
45
|
+
- Data gathering from multiple sources
|
|
46
|
+
- Template rendering and size calculations
|
|
47
|
+
|
|
48
|
+
Now you can answer: "What did compaction do on the last run?"
|
|
49
|
+
|
|
50
|
+
### Technical Details
|
|
51
|
+
|
|
52
|
+
- Pino + pino-roll for async, non-blocking file writes
|
|
53
|
+
- Child loggers for module namespacing
|
|
54
|
+
- Lazy initialization pattern for test isolation
|
|
55
|
+
- 56 new tests (10 logger + 18 compaction + 28 CLI)
|
|
56
|
+
|
|
57
|
+
Complements existing `DEBUG=swarm:*` env var approach — Pino for structured file logs, debug for stderr filtering.
|
|
58
|
+
|
|
59
|
+
### Patch Changes
|
|
60
|
+
|
|
61
|
+
- [`b5792bd`](https://github.com/joelhooks/swarm-tools/commit/b5792bd5f6aa4bf3ad9757fe351bc144e84f09af) Thanks [@joelhooks](https://github.com/joelhooks)! - ## 🎯 Coordinators Remember Who They Are
|
|
62
|
+
|
|
63
|
+
Fixed the compaction bug where coordinators lost their identity after context compression.
|
|
64
|
+
|
|
65
|
+
**The Problem:**
|
|
66
|
+
After compaction, coordinators would wake up and start doing worker tasks directly (running tests, editing files) instead of spawning workers. The injected context said "you are a coordinator" but gave worker-style resume commands.
|
|
67
|
+
|
|
68
|
+
**The Fix:**
|
|
69
|
+
`buildDynamicSwarmState()` now generates coordinator-focused context:
|
|
70
|
+
|
|
71
|
+
```
|
|
72
|
+
## 🎯 YOU ARE THE COORDINATOR
|
|
73
|
+
|
|
74
|
+
**Primary role:** Orchestrate workers, review their output, unblock dependencies.
|
|
75
|
+
**Spawn workers** for implementation tasks - don't do them yourself.
|
|
76
|
+
|
|
77
|
+
**RESUME STEPS:**
|
|
78
|
+
1. Check swarm status: `swarm_status(epic_id="bd-actual-id", ...)`
|
|
79
|
+
2. Check inbox: `swarmmail_inbox(limit=5)`
|
|
80
|
+
3. For in_progress subtasks: Review with `swarm_review`
|
|
81
|
+
4. For open subtasks: Spawn workers with `swarm_spawn_subtask`
|
|
82
|
+
5. For blocked subtasks: Investigate and unblock
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
Also captures specific swarm state during detection:
|
|
86
|
+
|
|
87
|
+
- Epic ID and title (not placeholders)
|
|
88
|
+
- Subtask counts by status
|
|
89
|
+
- Actual project path
|
|
90
|
+
|
|
91
|
+
**New eval infrastructure:**
|
|
92
|
+
|
|
93
|
+
- `coordinator-behavior.eval.ts` - LLM-as-judge eval testing whether Claude actually behaves like a coordinator given the injected context
|
|
94
|
+
- Scorers for coordinator tools, avoiding worker behaviors, and coordinator mindset
|
|
95
|
+
|
|
96
|
+
> "The coordinator's job is to keep the swarm cooking, not to cook themselves."
|
|
97
|
+
|
|
98
|
+
- Updated dependencies [[`a78a40d`](https://github.com/joelhooks/swarm-tools/commit/a78a40de32eb34d1738b208f2a36929a4ab6cb81), [`5a7c084`](https://github.com/joelhooks/swarm-tools/commit/5a7c084514297b5b9ca5df9459a74f18eb805b8a)]:
|
|
99
|
+
- swarm-mail@1.5.0
|
|
100
|
+
|
|
3
101
|
## 0.33.0
|
|
4
102
|
|
|
5
103
|
### Minor Changes
|
package/README.md
CHANGED
|
@@ -179,7 +179,14 @@ Auto-saves progress at milestones. Survives context death or crashes. Data store
|
|
|
179
179
|
|
|
180
180
|
---
|
|
181
181
|
|
|
182
|
-
## What's New in v0.
|
|
182
|
+
## What's New in v0.33
|
|
183
|
+
|
|
184
|
+
- **Pino logging infrastructure** - Structured JSON logs with daily rotation to `~/.config/swarm-tools/logs/`
|
|
185
|
+
- **Compaction hook instrumented** - 14 log points across all phases (START, GATHER, RENDER, DECIDE, COMPLETE)
|
|
186
|
+
- **`swarm log` CLI** - Query/tail logs with module, level, and time filters
|
|
187
|
+
- **Analytics queries** - 5 pre-built queries based on Four Golden Signals (latency, traffic, errors, saturation, conflicts)
|
|
188
|
+
|
|
189
|
+
### v0.32
|
|
183
190
|
|
|
184
191
|
- **libSQL storage** (embedded SQLite) replaced PGLite - no external DB needed
|
|
185
192
|
- **95% integration test coverage** - checkpoint/recovery proven with 9 tests
|
|
@@ -235,6 +242,27 @@ swarm init # Initialize hive in project
|
|
|
235
242
|
swarm config # Show config file paths
|
|
236
243
|
```
|
|
237
244
|
|
|
245
|
+
### Logging & Observability
|
|
246
|
+
|
|
247
|
+
Structured Pino logging with daily rotation:
|
|
248
|
+
|
|
249
|
+
```bash
|
|
250
|
+
# Enable pretty logging during development
|
|
251
|
+
SWARM_LOG_PRETTY=1 opencode
|
|
252
|
+
|
|
253
|
+
# Query logs
|
|
254
|
+
swarm log # Tail recent logs
|
|
255
|
+
swarm log compaction # Filter by module
|
|
256
|
+
swarm log --level warn # Filter by level (warn+)
|
|
257
|
+
swarm log --since 1h # Last hour
|
|
258
|
+
swarm log --json | jq # Pipe to jq for analysis
|
|
259
|
+
```
|
|
260
|
+
|
|
261
|
+
**Log files:** `~/.config/swarm-tools/logs/`
|
|
262
|
+
- `swarm.1log`, `swarm.2log`, ... (main logs)
|
|
263
|
+
- `compaction.1log`, ... (module-specific)
|
|
264
|
+
- Daily rotation, 14-day retention
|
|
265
|
+
|
|
238
266
|
---
|
|
239
267
|
|
|
240
268
|
## Further Reading
|
package/bin/swarm.test.ts
CHANGED
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
* - rmWithStatus: logs file removal
|
|
9
9
|
*/
|
|
10
10
|
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
|
|
11
|
-
import { mkdirSync, rmSync, writeFileSync, existsSync, readFileSync } from "fs";
|
|
11
|
+
import { mkdirSync, rmSync, writeFileSync, existsSync, readFileSync, readdirSync } from "fs";
|
|
12
12
|
import { join } from "path";
|
|
13
13
|
import { tmpdir } from "os";
|
|
14
14
|
|
|
@@ -192,3 +192,274 @@ READ-ONLY research agent. Never modifies code - only gathers intel and stores fi
|
|
|
192
192
|
});
|
|
193
193
|
});
|
|
194
194
|
});
|
|
195
|
+
|
|
196
|
+
// ============================================================================
|
|
197
|
+
// Log Command Tests (TDD)
|
|
198
|
+
// ============================================================================
|
|
199
|
+
|
|
200
|
+
describe("Log command helpers", () => {
|
|
201
|
+
let testDir: string;
|
|
202
|
+
|
|
203
|
+
beforeEach(() => {
|
|
204
|
+
testDir = join(tmpdir(), `swarm-log-test-${Date.now()}`);
|
|
205
|
+
mkdirSync(testDir, { recursive: true });
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
afterEach(() => {
|
|
209
|
+
if (existsSync(testDir)) {
|
|
210
|
+
rmSync(testDir, { recursive: true, force: true });
|
|
211
|
+
}
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
describe("parseLogLine", () => {
|
|
215
|
+
function parseLogLine(line: string): { level: number; time: string; module: string; msg: string } | null {
|
|
216
|
+
try {
|
|
217
|
+
const parsed = JSON.parse(line);
|
|
218
|
+
if (typeof parsed.level === "number" && parsed.time && parsed.msg) {
|
|
219
|
+
return {
|
|
220
|
+
level: parsed.level,
|
|
221
|
+
time: parsed.time,
|
|
222
|
+
module: parsed.module || "unknown",
|
|
223
|
+
msg: parsed.msg,
|
|
224
|
+
};
|
|
225
|
+
}
|
|
226
|
+
} catch {
|
|
227
|
+
// Invalid JSON
|
|
228
|
+
}
|
|
229
|
+
return null;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
test("parses valid log line", () => {
|
|
233
|
+
const line = '{"level":30,"time":"2024-12-24T16:00:00.000Z","module":"compaction","msg":"started"}';
|
|
234
|
+
const result = parseLogLine(line);
|
|
235
|
+
|
|
236
|
+
expect(result).not.toBeNull();
|
|
237
|
+
expect(result?.level).toBe(30);
|
|
238
|
+
expect(result?.module).toBe("compaction");
|
|
239
|
+
expect(result?.msg).toBe("started");
|
|
240
|
+
});
|
|
241
|
+
|
|
242
|
+
test("returns null for invalid JSON", () => {
|
|
243
|
+
const line = "not json";
|
|
244
|
+
expect(parseLogLine(line)).toBeNull();
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
test("defaults module to 'unknown' if missing", () => {
|
|
248
|
+
const line = '{"level":30,"time":"2024-12-24T16:00:00.000Z","msg":"test"}';
|
|
249
|
+
const result = parseLogLine(line);
|
|
250
|
+
|
|
251
|
+
expect(result?.module).toBe("unknown");
|
|
252
|
+
});
|
|
253
|
+
});
|
|
254
|
+
|
|
255
|
+
describe("filterLogsByLevel", () => {
|
|
256
|
+
function filterLogsByLevel(logs: Array<{ level: number }>, minLevel: number): Array<{ level: number }> {
|
|
257
|
+
return logs.filter((log) => log.level >= minLevel);
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
test("filters logs by minimum level", () => {
|
|
261
|
+
const logs = [
|
|
262
|
+
{ level: 10 }, // trace
|
|
263
|
+
{ level: 30 }, // info
|
|
264
|
+
{ level: 50 }, // error
|
|
265
|
+
];
|
|
266
|
+
|
|
267
|
+
const result = filterLogsByLevel(logs, 30);
|
|
268
|
+
expect(result).toHaveLength(2);
|
|
269
|
+
expect(result[0].level).toBe(30);
|
|
270
|
+
expect(result[1].level).toBe(50);
|
|
271
|
+
});
|
|
272
|
+
|
|
273
|
+
test("includes all logs when minLevel is 0", () => {
|
|
274
|
+
const logs = [
|
|
275
|
+
{ level: 10 },
|
|
276
|
+
{ level: 20 },
|
|
277
|
+
{ level: 30 },
|
|
278
|
+
];
|
|
279
|
+
|
|
280
|
+
const result = filterLogsByLevel(logs, 0);
|
|
281
|
+
expect(result).toHaveLength(3);
|
|
282
|
+
});
|
|
283
|
+
});
|
|
284
|
+
|
|
285
|
+
describe("filterLogsByModule", () => {
|
|
286
|
+
function filterLogsByModule(logs: Array<{ module: string }>, module: string): Array<{ module: string }> {
|
|
287
|
+
return logs.filter((log) => log.module === module);
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
test("filters logs by exact module name", () => {
|
|
291
|
+
const logs = [
|
|
292
|
+
{ module: "compaction" },
|
|
293
|
+
{ module: "swarm" },
|
|
294
|
+
{ module: "compaction" },
|
|
295
|
+
];
|
|
296
|
+
|
|
297
|
+
const result = filterLogsByModule(logs, "compaction");
|
|
298
|
+
expect(result).toHaveLength(2);
|
|
299
|
+
});
|
|
300
|
+
|
|
301
|
+
test("returns empty array when no match", () => {
|
|
302
|
+
const logs = [
|
|
303
|
+
{ module: "compaction" },
|
|
304
|
+
];
|
|
305
|
+
|
|
306
|
+
const result = filterLogsByModule(logs, "swarm");
|
|
307
|
+
expect(result).toHaveLength(0);
|
|
308
|
+
});
|
|
309
|
+
});
|
|
310
|
+
|
|
311
|
+
describe("filterLogsBySince", () => {
|
|
312
|
+
function parseDuration(duration: string): number | null {
|
|
313
|
+
const match = duration.match(/^(\d+)([smhd])$/);
|
|
314
|
+
if (!match) return null;
|
|
315
|
+
|
|
316
|
+
const [, num, unit] = match;
|
|
317
|
+
const value = parseInt(num, 10);
|
|
318
|
+
|
|
319
|
+
const multipliers: Record<string, number> = {
|
|
320
|
+
s: 1000,
|
|
321
|
+
m: 60 * 1000,
|
|
322
|
+
h: 60 * 60 * 1000,
|
|
323
|
+
d: 24 * 60 * 60 * 1000,
|
|
324
|
+
};
|
|
325
|
+
|
|
326
|
+
return value * multipliers[unit];
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
function filterLogsBySince(logs: Array<{ time: string }>, sinceMs: number): Array<{ time: string }> {
|
|
330
|
+
const cutoffTime = Date.now() - sinceMs;
|
|
331
|
+
return logs.filter((log) => new Date(log.time).getTime() >= cutoffTime);
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
test("parseDuration handles seconds", () => {
|
|
335
|
+
expect(parseDuration("30s")).toBe(30 * 1000);
|
|
336
|
+
});
|
|
337
|
+
|
|
338
|
+
test("parseDuration handles minutes", () => {
|
|
339
|
+
expect(parseDuration("5m")).toBe(5 * 60 * 1000);
|
|
340
|
+
});
|
|
341
|
+
|
|
342
|
+
test("parseDuration handles hours", () => {
|
|
343
|
+
expect(parseDuration("2h")).toBe(2 * 60 * 60 * 1000);
|
|
344
|
+
});
|
|
345
|
+
|
|
346
|
+
test("parseDuration handles days", () => {
|
|
347
|
+
expect(parseDuration("1d")).toBe(24 * 60 * 60 * 1000);
|
|
348
|
+
});
|
|
349
|
+
|
|
350
|
+
test("parseDuration returns null for invalid format", () => {
|
|
351
|
+
expect(parseDuration("invalid")).toBeNull();
|
|
352
|
+
expect(parseDuration("30x")).toBeNull();
|
|
353
|
+
expect(parseDuration("30")).toBeNull();
|
|
354
|
+
});
|
|
355
|
+
|
|
356
|
+
test("filterLogsBySince filters old logs", () => {
|
|
357
|
+
const now = Date.now();
|
|
358
|
+
const logs = [
|
|
359
|
+
{ time: new Date(now - 10000).toISOString() }, // 10s ago
|
|
360
|
+
{ time: new Date(now - 120000).toISOString() }, // 2m ago
|
|
361
|
+
{ time: new Date(now - 1000).toISOString() }, // 1s ago
|
|
362
|
+
];
|
|
363
|
+
|
|
364
|
+
const result = filterLogsBySince(logs, 60000); // Last 1m
|
|
365
|
+
expect(result).toHaveLength(2); // Only logs within last minute
|
|
366
|
+
});
|
|
367
|
+
});
|
|
368
|
+
|
|
369
|
+
describe("formatLogLine", () => {
|
|
370
|
+
function levelToName(level: number): string {
|
|
371
|
+
if (level >= 60) return "FATAL";
|
|
372
|
+
if (level >= 50) return "ERROR";
|
|
373
|
+
if (level >= 40) return "WARN ";
|
|
374
|
+
if (level >= 30) return "INFO ";
|
|
375
|
+
if (level >= 20) return "DEBUG";
|
|
376
|
+
return "TRACE";
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
function formatLogLine(log: { level: number; time: string; module: string; msg: string }): string {
|
|
380
|
+
const timestamp = new Date(log.time).toLocaleTimeString();
|
|
381
|
+
const levelName = levelToName(log.level);
|
|
382
|
+
const module = log.module.padEnd(12);
|
|
383
|
+
return `${timestamp} ${levelName} ${module} ${log.msg}`;
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
test("formats log line with timestamp and level", () => {
|
|
387
|
+
const log = {
|
|
388
|
+
level: 30,
|
|
389
|
+
time: "2024-12-24T16:00:00.000Z",
|
|
390
|
+
module: "compaction",
|
|
391
|
+
msg: "started",
|
|
392
|
+
};
|
|
393
|
+
|
|
394
|
+
const result = formatLogLine(log);
|
|
395
|
+
expect(result).toContain("INFO");
|
|
396
|
+
expect(result).toContain("compaction");
|
|
397
|
+
expect(result).toContain("started");
|
|
398
|
+
});
|
|
399
|
+
|
|
400
|
+
test("pads module name for alignment", () => {
|
|
401
|
+
const log1 = formatLogLine({ level: 30, time: "2024-12-24T16:00:00.000Z", module: "a", msg: "test" });
|
|
402
|
+
const log2 = formatLogLine({ level: 30, time: "2024-12-24T16:00:00.000Z", module: "compaction", msg: "test" });
|
|
403
|
+
|
|
404
|
+
// Module names should be padded to 12 chars
|
|
405
|
+
expect(log1).toContain("a test"); // 'a' + 11 spaces
|
|
406
|
+
expect(log2).toContain("compaction test"); // 'compaction' + 3 spaces (10 chars + 2)
|
|
407
|
+
});
|
|
408
|
+
|
|
409
|
+
test("levelToName maps all levels correctly", () => {
|
|
410
|
+
expect(levelToName(10)).toBe("TRACE");
|
|
411
|
+
expect(levelToName(20)).toBe("DEBUG");
|
|
412
|
+
expect(levelToName(30)).toBe("INFO ");
|
|
413
|
+
expect(levelToName(40)).toBe("WARN ");
|
|
414
|
+
expect(levelToName(50)).toBe("ERROR");
|
|
415
|
+
expect(levelToName(60)).toBe("FATAL");
|
|
416
|
+
});
|
|
417
|
+
});
|
|
418
|
+
|
|
419
|
+
describe("readLogFiles", () => {
|
|
420
|
+
test("reads multiple .1log files", () => {
|
|
421
|
+
// Create test log files
|
|
422
|
+
const log1 = join(testDir, "swarm.1log");
|
|
423
|
+
const log2 = join(testDir, "swarm.2log");
|
|
424
|
+
const log3 = join(testDir, "compaction.1log");
|
|
425
|
+
|
|
426
|
+
writeFileSync(log1, '{"level":30,"time":"2024-12-24T16:00:00.000Z","msg":"line1"}\n');
|
|
427
|
+
writeFileSync(log2, '{"level":30,"time":"2024-12-24T16:00:01.000Z","msg":"line2"}\n');
|
|
428
|
+
writeFileSync(log3, '{"level":30,"time":"2024-12-24T16:00:02.000Z","module":"compaction","msg":"line3"}\n');
|
|
429
|
+
|
|
430
|
+
function readLogFiles(dir: string): string[] {
|
|
431
|
+
if (!existsSync(dir)) return [];
|
|
432
|
+
|
|
433
|
+
const files = readdirSync(dir)
|
|
434
|
+
.filter((f) => /\.\d+log$/.test(f))
|
|
435
|
+
.sort() // Sort by filename
|
|
436
|
+
.map((f) => join(dir, f));
|
|
437
|
+
|
|
438
|
+
const lines: string[] = [];
|
|
439
|
+
for (const file of files) {
|
|
440
|
+
const content = readFileSync(file, "utf-8");
|
|
441
|
+
lines.push(...content.split("\n").filter((line) => line.trim()));
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
return lines;
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
const lines = readLogFiles(testDir);
|
|
448
|
+
expect(lines).toHaveLength(3);
|
|
449
|
+
// Files are sorted alphabetically: compaction.1log, swarm.1log, swarm.2log
|
|
450
|
+
expect(lines.some((l) => l.includes("line1"))).toBe(true);
|
|
451
|
+
expect(lines.some((l) => l.includes("line2"))).toBe(true);
|
|
452
|
+
expect(lines.some((l) => l.includes("line3"))).toBe(true);
|
|
453
|
+
});
|
|
454
|
+
|
|
455
|
+
test("returns empty array for non-existent directory", () => {
|
|
456
|
+
function readLogFiles(dir: string): string[] {
|
|
457
|
+
if (!existsSync(dir)) return [];
|
|
458
|
+
return [];
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
const lines = readLogFiles(join(testDir, "nonexistent"));
|
|
462
|
+
expect(lines).toHaveLength(0);
|
|
463
|
+
});
|
|
464
|
+
});
|
|
465
|
+
});
|
package/bin/swarm.ts
CHANGED
|
@@ -1153,9 +1153,17 @@ const result2 = await Task(subagent_type="swarm/worker", prompt="<from above>")
|
|
|
1153
1153
|
4. **SEND FEEDBACK** - Approve or request changes
|
|
1154
1154
|
\`swarm_review_feedback(project_key, task_id, worker_id, status, issues)\`
|
|
1155
1155
|
|
|
1156
|
-
If approved
|
|
1157
|
-
|
|
1158
|
-
|
|
1156
|
+
**If approved:**
|
|
1157
|
+
- Close cell, spawn next worker
|
|
1158
|
+
|
|
1159
|
+
**If needs_changes:**
|
|
1160
|
+
- \`swarm_review_feedback\` returns \`retry_context\` (NOT sends message - worker is dead)
|
|
1161
|
+
- Generate retry prompt: \`swarm_spawn_retry(retry_context)\`
|
|
1162
|
+
- Spawn NEW worker with Task() using retry prompt
|
|
1163
|
+
- Max 3 attempts before marking task blocked
|
|
1164
|
+
|
|
1165
|
+
**If 3 failures:**
|
|
1166
|
+
- Mark task blocked, escalate to human
|
|
1159
1167
|
|
|
1160
1168
|
5. **ONLY THEN** - Spawn next worker or complete
|
|
1161
1169
|
|
|
@@ -2706,6 +2714,7 @@ ${cyan("Commands:")}
|
|
|
2706
2714
|
swarm config Show paths to generated config files
|
|
2707
2715
|
swarm agents Update AGENTS.md with skill awareness
|
|
2708
2716
|
swarm migrate Migrate PGlite database to libSQL
|
|
2717
|
+
swarm log View swarm logs with filtering
|
|
2709
2718
|
swarm update Update to latest version
|
|
2710
2719
|
swarm version Show version and banner
|
|
2711
2720
|
swarm tool Execute a tool (for plugin wrapper)
|
|
@@ -2716,6 +2725,14 @@ ${cyan("Tool Execution:")}
|
|
|
2716
2725
|
swarm tool <name> Execute tool with no args
|
|
2717
2726
|
swarm tool <name> --json '<args>' Execute tool with JSON args
|
|
2718
2727
|
|
|
2728
|
+
${cyan("Log Viewing:")}
|
|
2729
|
+
swarm log Tail recent logs (last 50 lines)
|
|
2730
|
+
swarm log <module> Filter by module (e.g., compaction)
|
|
2731
|
+
swarm log --level <level> Filter by level (trace, debug, info, warn, error, fatal)
|
|
2732
|
+
swarm log --since <duration> Time filter (30s, 5m, 2h, 1d)
|
|
2733
|
+
swarm log --json Raw JSON output for jq
|
|
2734
|
+
swarm log --limit <n> Limit output to n lines (default: 50)
|
|
2735
|
+
|
|
2719
2736
|
${cyan("Usage in OpenCode:")}
|
|
2720
2737
|
/swarm "Add user authentication with OAuth"
|
|
2721
2738
|
@swarm/planner "Decompose this into parallel tasks"
|
|
@@ -3045,7 +3062,7 @@ async function migrate() {
|
|
|
3045
3062
|
// Show results
|
|
3046
3063
|
const showStat = (label: string, stat: { migrated: number; skipped: number; failed: number }) => {
|
|
3047
3064
|
if (stat.migrated > 0 || stat.skipped > 0 || stat.failed > 0) {
|
|
3048
|
-
const parts = [];
|
|
3065
|
+
const parts: string[] = [];
|
|
3049
3066
|
if (stat.migrated > 0) parts.push(green(`${stat.migrated} migrated`));
|
|
3050
3067
|
if (stat.skipped > 0) parts.push(dim(`${stat.skipped} skipped`));
|
|
3051
3068
|
if (stat.failed > 0) parts.push(`\x1b[31m${stat.failed} failed\x1b[0m`);
|
|
@@ -3073,6 +3090,207 @@ async function migrate() {
|
|
|
3073
3090
|
}
|
|
3074
3091
|
}
|
|
3075
3092
|
|
|
3093
|
+
// ============================================================================
|
|
3094
|
+
// Log Command - View swarm logs with filtering
|
|
3095
|
+
// ============================================================================
|
|
3096
|
+
|
|
3097
|
+
interface LogLine {
|
|
3098
|
+
level: number;
|
|
3099
|
+
time: string;
|
|
3100
|
+
module: string;
|
|
3101
|
+
msg: string;
|
|
3102
|
+
}
|
|
3103
|
+
|
|
3104
|
+
function parseLogLine(line: string): LogLine | null {
|
|
3105
|
+
try {
|
|
3106
|
+
const parsed = JSON.parse(line);
|
|
3107
|
+
if (typeof parsed.level === "number" && parsed.time && parsed.msg) {
|
|
3108
|
+
return {
|
|
3109
|
+
level: parsed.level,
|
|
3110
|
+
time: parsed.time,
|
|
3111
|
+
module: parsed.module || "unknown",
|
|
3112
|
+
msg: parsed.msg,
|
|
3113
|
+
};
|
|
3114
|
+
}
|
|
3115
|
+
} catch {
|
|
3116
|
+
// Invalid JSON
|
|
3117
|
+
}
|
|
3118
|
+
return null;
|
|
3119
|
+
}
|
|
3120
|
+
|
|
3121
|
+
function levelToName(level: number): string {
|
|
3122
|
+
if (level >= 60) return "FATAL";
|
|
3123
|
+
if (level >= 50) return "ERROR";
|
|
3124
|
+
if (level >= 40) return "WARN ";
|
|
3125
|
+
if (level >= 30) return "INFO ";
|
|
3126
|
+
if (level >= 20) return "DEBUG";
|
|
3127
|
+
return "TRACE";
|
|
3128
|
+
}
|
|
3129
|
+
|
|
3130
|
+
function levelToColor(level: number): (s: string) => string {
|
|
3131
|
+
if (level >= 50) return (s: string) => `\x1b[31m${s}\x1b[0m`; // red
|
|
3132
|
+
if (level >= 40) return (s: string) => `\x1b[33m${s}\x1b[0m`; // yellow
|
|
3133
|
+
if (level >= 30) return green; // green
|
|
3134
|
+
return dim; // dim for debug/trace
|
|
3135
|
+
}
|
|
3136
|
+
|
|
3137
|
+
function levelNameToNumber(name: string): number {
|
|
3138
|
+
const lower = name.toLowerCase();
|
|
3139
|
+
if (lower === "fatal") return 60;
|
|
3140
|
+
if (lower === "error") return 50;
|
|
3141
|
+
if (lower === "warn") return 40;
|
|
3142
|
+
if (lower === "info") return 30;
|
|
3143
|
+
if (lower === "debug") return 20;
|
|
3144
|
+
if (lower === "trace") return 10;
|
|
3145
|
+
return 30; // default to info
|
|
3146
|
+
}
|
|
3147
|
+
|
|
3148
|
+
function parseDuration(duration: string): number | null {
|
|
3149
|
+
const match = duration.match(/^(\d+)([smhd])$/);
|
|
3150
|
+
if (!match) return null;
|
|
3151
|
+
|
|
3152
|
+
const [, num, unit] = match;
|
|
3153
|
+
const value = parseInt(num, 10);
|
|
3154
|
+
|
|
3155
|
+
const multipliers: Record<string, number> = {
|
|
3156
|
+
s: 1000,
|
|
3157
|
+
m: 60 * 1000,
|
|
3158
|
+
h: 60 * 60 * 1000,
|
|
3159
|
+
d: 24 * 60 * 60 * 1000,
|
|
3160
|
+
};
|
|
3161
|
+
|
|
3162
|
+
return value * multipliers[unit];
|
|
3163
|
+
}
|
|
3164
|
+
|
|
3165
|
+
function formatLogLine(log: LogLine, useColor = true): string {
|
|
3166
|
+
const timestamp = new Date(log.time).toLocaleTimeString();
|
|
3167
|
+
const levelName = levelToName(log.level);
|
|
3168
|
+
const module = log.module.padEnd(12);
|
|
3169
|
+
const levelStr = useColor ? levelToColor(log.level)(levelName) : levelName;
|
|
3170
|
+
|
|
3171
|
+
return `${timestamp} ${levelStr} ${module} ${log.msg}`;
|
|
3172
|
+
}
|
|
3173
|
+
|
|
3174
|
+
function readLogFiles(dir: string): string[] {
|
|
3175
|
+
if (!existsSync(dir)) return [];
|
|
3176
|
+
|
|
3177
|
+
const allFiles = readdirSync(dir);
|
|
3178
|
+
const logFiles = allFiles
|
|
3179
|
+
.filter((f: string) => /\.\d+log$/.test(f))
|
|
3180
|
+
.sort()
|
|
3181
|
+
.map((f: string) => join(dir, f));
|
|
3182
|
+
|
|
3183
|
+
const lines: string[] = [];
|
|
3184
|
+
for (const file of logFiles) {
|
|
3185
|
+
try {
|
|
3186
|
+
const content = readFileSync(file, "utf-8");
|
|
3187
|
+
const fileLines = content.split("\n").filter((line: string) => line.trim());
|
|
3188
|
+
lines.push(...fileLines);
|
|
3189
|
+
} catch {
|
|
3190
|
+
// Skip unreadable files
|
|
3191
|
+
}
|
|
3192
|
+
}
|
|
3193
|
+
|
|
3194
|
+
return lines;
|
|
3195
|
+
}
|
|
3196
|
+
|
|
3197
|
+
async function logs() {
|
|
3198
|
+
const args = process.argv.slice(3);
|
|
3199
|
+
|
|
3200
|
+
// Parse arguments
|
|
3201
|
+
let moduleFilter: string | null = null;
|
|
3202
|
+
let levelFilter: number | null = null;
|
|
3203
|
+
let sinceMs: number | null = null;
|
|
3204
|
+
let jsonOutput = false;
|
|
3205
|
+
let limit = 50;
|
|
3206
|
+
|
|
3207
|
+
for (let i = 0; i < args.length; i++) {
|
|
3208
|
+
const arg = args[i];
|
|
3209
|
+
|
|
3210
|
+
if (arg === "--level" && i + 1 < args.length) {
|
|
3211
|
+
levelFilter = levelNameToNumber(args[++i]);
|
|
3212
|
+
} else if (arg === "--since" && i + 1 < args.length) {
|
|
3213
|
+
const duration = parseDuration(args[++i]);
|
|
3214
|
+
if (duration === null) {
|
|
3215
|
+
p.log.error(`Invalid duration format: ${args[i]}`);
|
|
3216
|
+
p.log.message(dim(" Use format: 30s, 5m, 2h, 1d"));
|
|
3217
|
+
process.exit(1);
|
|
3218
|
+
}
|
|
3219
|
+
sinceMs = duration;
|
|
3220
|
+
} else if (arg === "--json") {
|
|
3221
|
+
jsonOutput = true;
|
|
3222
|
+
} else if (arg === "--limit" && i + 1 < args.length) {
|
|
3223
|
+
limit = parseInt(args[++i], 10);
|
|
3224
|
+
if (isNaN(limit) || limit <= 0) {
|
|
3225
|
+
p.log.error(`Invalid limit: ${args[i]}`);
|
|
3226
|
+
process.exit(1);
|
|
3227
|
+
}
|
|
3228
|
+
} else if (!arg.startsWith("--")) {
|
|
3229
|
+
// Positional arg = module filter
|
|
3230
|
+
moduleFilter = arg;
|
|
3231
|
+
}
|
|
3232
|
+
}
|
|
3233
|
+
|
|
3234
|
+
// Read logs from ~/.config/swarm-tools/logs/
|
|
3235
|
+
const logsDir = join(homedir(), ".config", "swarm-tools", "logs");
|
|
3236
|
+
|
|
3237
|
+
if (!existsSync(logsDir)) {
|
|
3238
|
+
if (!jsonOutput) {
|
|
3239
|
+
p.log.warn("No logs directory found");
|
|
3240
|
+
p.log.message(dim(` Expected: ${logsDir}`));
|
|
3241
|
+
} else {
|
|
3242
|
+
console.log(JSON.stringify({ logs: [] }));
|
|
3243
|
+
}
|
|
3244
|
+
return;
|
|
3245
|
+
}
|
|
3246
|
+
|
|
3247
|
+
const rawLines = readLogFiles(logsDir);
|
|
3248
|
+
|
|
3249
|
+
// Parse and filter
|
|
3250
|
+
let logs: LogLine[] = rawLines
|
|
3251
|
+
.map(parseLogLine)
|
|
3252
|
+
.filter((log): log is LogLine => log !== null);
|
|
3253
|
+
|
|
3254
|
+
// Apply filters
|
|
3255
|
+
if (moduleFilter) {
|
|
3256
|
+
logs = logs.filter((log) => log.module === moduleFilter);
|
|
3257
|
+
}
|
|
3258
|
+
|
|
3259
|
+
if (levelFilter !== null) {
|
|
3260
|
+
logs = logs.filter((log) => log.level >= levelFilter);
|
|
3261
|
+
}
|
|
3262
|
+
|
|
3263
|
+
if (sinceMs !== null) {
|
|
3264
|
+
const cutoffTime = Date.now() - sinceMs;
|
|
3265
|
+
logs = logs.filter((log) => new Date(log.time).getTime() >= cutoffTime);
|
|
3266
|
+
}
|
|
3267
|
+
|
|
3268
|
+
// Apply limit (keep most recent)
|
|
3269
|
+
logs = logs.slice(-limit);
|
|
3270
|
+
|
|
3271
|
+
// Output
|
|
3272
|
+
if (jsonOutput) {
|
|
3273
|
+
console.log(JSON.stringify({ logs }, null, 2));
|
|
3274
|
+
} else {
|
|
3275
|
+
if (logs.length === 0) {
|
|
3276
|
+
p.log.warn("No logs found matching filters");
|
|
3277
|
+
return;
|
|
3278
|
+
}
|
|
3279
|
+
|
|
3280
|
+
console.log(yellow(BANNER));
|
|
3281
|
+
console.log(dim(` Logs (${logs.length} entries)`));
|
|
3282
|
+
if (moduleFilter) console.log(dim(` Module: ${moduleFilter}`));
|
|
3283
|
+
if (levelFilter !== null) console.log(dim(` Level: >=${levelToName(levelFilter)}`));
|
|
3284
|
+
if (sinceMs !== null) console.log(dim(` Since: last ${args[args.indexOf("--since") + 1]}`));
|
|
3285
|
+
console.log();
|
|
3286
|
+
|
|
3287
|
+
for (const log of logs) {
|
|
3288
|
+
console.log(formatLogLine(log));
|
|
3289
|
+
}
|
|
3290
|
+
console.log();
|
|
3291
|
+
}
|
|
3292
|
+
}
|
|
3293
|
+
|
|
3076
3294
|
// ============================================================================
|
|
3077
3295
|
// Database Info Command
|
|
3078
3296
|
// ============================================================================
|
|
@@ -3216,6 +3434,10 @@ switch (command) {
|
|
|
3216
3434
|
case "db":
|
|
3217
3435
|
await db();
|
|
3218
3436
|
break;
|
|
3437
|
+
case "log":
|
|
3438
|
+
case "logs":
|
|
3439
|
+
await logs();
|
|
3440
|
+
break;
|
|
3219
3441
|
case "version":
|
|
3220
3442
|
case "--version":
|
|
3221
3443
|
case "-v":
|