opencode-swarm-plugin 0.34.0 → 0.36.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-test.log +333 -333
- package/CHANGELOG.md +97 -0
- package/bin/swarm.test.ts +70 -0
- package/bin/swarm.ts +139 -14
- package/examples/plugin-wrapper-template.ts +447 -33
- package/package.json +1 -1
- package/src/compaction-hook.test.ts +226 -258
- package/src/compaction-hook.ts +361 -16
- package/src/eval-capture.ts +5 -6
- package/src/index.ts +21 -1
- package/src/learning.integration.test.ts +0 -2
- package/src/schemas/task.ts +0 -1
- package/src/swarm-decompose.ts +1 -15
- package/src/swarm-prompts.ts +1 -8
- package/src/swarm.integration.test.ts +0 -40
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,102 @@
|
|
|
1
1
|
# opencode-swarm-plugin
|
|
2
2
|
|
|
3
|
+
## 0.36.0
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- [`ae213aa`](https://github.com/joelhooks/swarm-tools/commit/ae213aa49be977e425e0a767b5b2db16e462f76b) Thanks [@joelhooks](https://github.com/joelhooks)! - ## 🔬 Compaction Hook: Now With X-Ray Vision
|
|
8
|
+
|
|
9
|
+
The compaction hook was logging to `console.log` like a caveman. Now it writes structured JSON logs to `~/.config/swarm-tools/logs/compaction.log` - visible via `swarm log compaction`.
|
|
10
|
+
|
|
11
|
+
**The Problem:**
|
|
12
|
+
|
|
13
|
+
- Plugin wrapper used `console.log` → stdout → invisible
|
|
14
|
+
- npm package had pino logging → but wrapper didn't use it
|
|
15
|
+
- Running `/compact` gave zero visibility into what happened
|
|
16
|
+
|
|
17
|
+
**The Fix:**
|
|
18
|
+
Added comprehensive file-based logging throughout the compaction flow:
|
|
19
|
+
|
|
20
|
+
```
|
|
21
|
+
┌─────────────────────────────────────────────────────────────┐
|
|
22
|
+
│ COMPACTION LOGGING │
|
|
23
|
+
├─────────────────────────────────────────────────────────────┤
|
|
24
|
+
│ compaction_hook_invoked │ Full input/output objects │
|
|
25
|
+
│ detect_swarm_* │ CLI calls, cells, confidence │
|
|
26
|
+
│ query_swarm_state_* │ Epic/subtask extraction │
|
|
27
|
+
│ generate_compaction_prompt_*│ LLM timing, success/failure │
|
|
28
|
+
│ context_injected_via_* │ Which API used │
|
|
29
|
+
│ compaction_complete_* │ Final result + timing │
|
|
30
|
+
└─────────────────────────────────────────────────────────────┘
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
**Also Enhanced:**
|
|
34
|
+
|
|
35
|
+
- SDK message scanning for precise swarm state extraction
|
|
36
|
+
- Merged scanned state (ground truth) with hive detection (heuristic)
|
|
37
|
+
- 9 new tests for `scanSessionMessages()` (32 total passing)
|
|
38
|
+
|
|
39
|
+
**To See It Work:**
|
|
40
|
+
|
|
41
|
+
```bash
|
|
42
|
+
swarm setup --reinstall # Regenerate plugin wrapper
|
|
43
|
+
# Run /compact in OpenCode
|
|
44
|
+
swarm log compaction # See what happened
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
### Patch Changes
|
|
48
|
+
|
|
49
|
+
- [`5cfc42e`](https://github.com/joelhooks/swarm-tools/commit/5cfc42e93d3e5424e308857a40af4fd9fbda0ba3) Thanks [@joelhooks](https://github.com/joelhooks)! - ## 🐝 Swarm Workers Unchained
|
|
50
|
+
|
|
51
|
+
Removed the vestigial `max_subtasks` parameter from decomposition tools. It was dead code - the prompts already say "as many as needed" and the replacement was doing nothing.
|
|
52
|
+
|
|
53
|
+
**What changed:**
|
|
54
|
+
|
|
55
|
+
- Removed `max_subtasks` arg from `swarm_decompose`, `swarm_plan_prompt`, `swarm_delegate_planning`
|
|
56
|
+
- Removed from `DecomposeArgsSchema`
|
|
57
|
+
- Renamed `max_subtasks` → `subtask_count` in eval capture (records actual count, not a limit)
|
|
58
|
+
- Cleaned up tests that were passing the unused parameter
|
|
59
|
+
|
|
60
|
+
**Why it matters:**
|
|
61
|
+
The LLM decides how many subtasks based on task complexity, not an arbitrary cap. "Plan aggressively" means spawn as many workers as the task needs.
|
|
62
|
+
|
|
63
|
+
**No functional change** - the parameter wasn't being used anyway.
|
|
64
|
+
|
|
65
|
+
## 0.35.0
|
|
66
|
+
|
|
67
|
+
### Minor Changes
|
|
68
|
+
|
|
69
|
+
- [`084f888`](https://github.com/joelhooks/swarm-tools/commit/084f888fcac4912f594428b1ac7148c8a8aaa422) Thanks [@joelhooks](https://github.com/joelhooks)! - ## 👁️ Watch Your Swarm in Real-Time
|
|
70
|
+
|
|
71
|
+
`swarm log` now has a `--watch` mode for continuous log monitoring. No more running the command repeatedly - just sit back and watch the bees work.
|
|
72
|
+
|
|
73
|
+
```bash
|
|
74
|
+
# Watch all logs
|
|
75
|
+
swarm log --watch
|
|
76
|
+
|
|
77
|
+
# Watch with filters
|
|
78
|
+
swarm log compaction -w --level error
|
|
79
|
+
|
|
80
|
+
# Faster polling (500ms instead of default 1s)
|
|
81
|
+
swarm log --watch --interval 500
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
**New flags:**
|
|
85
|
+
|
|
86
|
+
- `--watch`, `-w` - Enable continuous monitoring mode
|
|
87
|
+
- `--interval <ms>` - Poll interval in milliseconds (default: 1000, min: 100)
|
|
88
|
+
|
|
89
|
+
**How it works:**
|
|
90
|
+
|
|
91
|
+
- Shows initial logs (last N lines based on `--limit`)
|
|
92
|
+
- Polls log files for new entries at the specified interval
|
|
93
|
+
- Tracks file positions for efficient incremental reads
|
|
94
|
+
- Handles log rotation gracefully (detects file truncation)
|
|
95
|
+
- All existing filters work: `--level`, `--since`, module name
|
|
96
|
+
- Clean shutdown on Ctrl+C
|
|
97
|
+
|
|
98
|
+
_"The hive that watches itself, debugs itself."_
|
|
99
|
+
|
|
3
100
|
## 0.34.0
|
|
4
101
|
|
|
5
102
|
### Minor Changes
|
package/bin/swarm.test.ts
CHANGED
|
@@ -462,4 +462,74 @@ describe("Log command helpers", () => {
|
|
|
462
462
|
expect(lines).toHaveLength(0);
|
|
463
463
|
});
|
|
464
464
|
});
|
|
465
|
+
|
|
466
|
+
describe("watchLogs", () => {
|
|
467
|
+
test("detects new log lines appended to file", async () => {
|
|
468
|
+
const logFile = join(testDir, "swarm.1log");
|
|
469
|
+
const collectedLines: string[] = [];
|
|
470
|
+
|
|
471
|
+
// Create initial log file
|
|
472
|
+
writeFileSync(logFile, '{"level":30,"time":"2024-12-24T16:00:00.000Z","msg":"initial"}\n');
|
|
473
|
+
|
|
474
|
+
// Import watch utilities
|
|
475
|
+
const { watch } = await import("fs");
|
|
476
|
+
const { appendFileSync } = await import("fs");
|
|
477
|
+
|
|
478
|
+
// Track file position for incremental reads
|
|
479
|
+
let lastSize = 0;
|
|
480
|
+
|
|
481
|
+
function readNewLines(filePath: string): string[] {
|
|
482
|
+
const content = readFileSync(filePath, "utf-8");
|
|
483
|
+
const newContent = content.slice(lastSize);
|
|
484
|
+
lastSize = content.length;
|
|
485
|
+
return newContent.split("\n").filter((line) => line.trim());
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
// Simulate watch behavior
|
|
489
|
+
const watcher = watch(testDir, (eventType, filename) => {
|
|
490
|
+
if (filename && /\.\d+log$/.test(filename)) {
|
|
491
|
+
const newLines = readNewLines(join(testDir, filename));
|
|
492
|
+
collectedLines.push(...newLines);
|
|
493
|
+
}
|
|
494
|
+
});
|
|
495
|
+
|
|
496
|
+
// Wait for watcher to be ready
|
|
497
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
498
|
+
|
|
499
|
+
// Append new log line
|
|
500
|
+
appendFileSync(logFile, '{"level":30,"time":"2024-12-24T16:00:01.000Z","msg":"appended"}\n');
|
|
501
|
+
|
|
502
|
+
// Wait for event to fire
|
|
503
|
+
await new Promise((resolve) => setTimeout(resolve, 200));
|
|
504
|
+
|
|
505
|
+
watcher.close();
|
|
506
|
+
|
|
507
|
+
// Should have detected the new line
|
|
508
|
+
expect(collectedLines.some((l) => l.includes("appended"))).toBe(true);
|
|
509
|
+
});
|
|
510
|
+
|
|
511
|
+
test("parseWatchArgs extracts --watch flag", () => {
|
|
512
|
+
function parseWatchArgs(args: string[]): { watch: boolean; interval: number } {
|
|
513
|
+
let watch = false;
|
|
514
|
+
let interval = 1000; // default 1 second
|
|
515
|
+
|
|
516
|
+
for (let i = 0; i < args.length; i++) {
|
|
517
|
+
const arg = args[i];
|
|
518
|
+
if (arg === "--watch" || arg === "-w") {
|
|
519
|
+
watch = true;
|
|
520
|
+
} else if (arg === "--interval" && i + 1 < args.length) {
|
|
521
|
+
interval = parseInt(args[++i], 10);
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
return { watch, interval };
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
expect(parseWatchArgs(["--watch"])).toEqual({ watch: true, interval: 1000 });
|
|
529
|
+
expect(parseWatchArgs(["-w"])).toEqual({ watch: true, interval: 1000 });
|
|
530
|
+
expect(parseWatchArgs(["--watch", "--interval", "500"])).toEqual({ watch: true, interval: 500 });
|
|
531
|
+
expect(parseWatchArgs(["compaction", "--watch"])).toEqual({ watch: true, interval: 1000 });
|
|
532
|
+
expect(parseWatchArgs(["--level", "error"])).toEqual({ watch: false, interval: 1000 });
|
|
533
|
+
});
|
|
534
|
+
});
|
|
465
535
|
});
|
package/bin/swarm.ts
CHANGED
|
@@ -2732,6 +2732,8 @@ ${cyan("Log Viewing:")}
|
|
|
2732
2732
|
swarm log --since <duration> Time filter (30s, 5m, 2h, 1d)
|
|
2733
2733
|
swarm log --json Raw JSON output for jq
|
|
2734
2734
|
swarm log --limit <n> Limit output to n lines (default: 50)
|
|
2735
|
+
swarm log --watch, -w Watch mode - continuously monitor for new logs
|
|
2736
|
+
swarm log --interval <ms> Poll interval in ms (default: 1000, min: 100)
|
|
2735
2737
|
|
|
2736
2738
|
${cyan("Usage in OpenCode:")}
|
|
2737
2739
|
/swarm "Add user authentication with OAuth"
|
|
@@ -3203,6 +3205,8 @@ async function logs() {
|
|
|
3203
3205
|
let sinceMs: number | null = null;
|
|
3204
3206
|
let jsonOutput = false;
|
|
3205
3207
|
let limit = 50;
|
|
3208
|
+
let watchMode = false;
|
|
3209
|
+
let pollInterval = 1000; // 1 second default
|
|
3206
3210
|
|
|
3207
3211
|
for (let i = 0; i < args.length; i++) {
|
|
3208
3212
|
const arg = args[i];
|
|
@@ -3225,7 +3229,15 @@ async function logs() {
|
|
|
3225
3229
|
p.log.error(`Invalid limit: ${args[i]}`);
|
|
3226
3230
|
process.exit(1);
|
|
3227
3231
|
}
|
|
3228
|
-
} else if (
|
|
3232
|
+
} else if (arg === "--watch" || arg === "-w") {
|
|
3233
|
+
watchMode = true;
|
|
3234
|
+
} else if (arg === "--interval" && i + 1 < args.length) {
|
|
3235
|
+
pollInterval = parseInt(args[++i], 10);
|
|
3236
|
+
if (isNaN(pollInterval) || pollInterval < 100) {
|
|
3237
|
+
p.log.error(`Invalid interval: ${args[i]} (minimum 100ms)`);
|
|
3238
|
+
process.exit(1);
|
|
3239
|
+
}
|
|
3240
|
+
} else if (!arg.startsWith("--") && !arg.startsWith("-")) {
|
|
3229
3241
|
// Positional arg = module filter
|
|
3230
3242
|
moduleFilter = arg;
|
|
3231
3243
|
}
|
|
@@ -3244,6 +3256,131 @@ async function logs() {
|
|
|
3244
3256
|
return;
|
|
3245
3257
|
}
|
|
3246
3258
|
|
|
3259
|
+
// Helper to filter logs
|
|
3260
|
+
const filterLogs = (rawLogs: LogLine[]): LogLine[] => {
|
|
3261
|
+
let filtered = rawLogs;
|
|
3262
|
+
|
|
3263
|
+
if (moduleFilter) {
|
|
3264
|
+
filtered = filtered.filter((log) => log.module === moduleFilter);
|
|
3265
|
+
}
|
|
3266
|
+
|
|
3267
|
+
if (levelFilter !== null) {
|
|
3268
|
+
filtered = filtered.filter((log) => log.level >= levelFilter);
|
|
3269
|
+
}
|
|
3270
|
+
|
|
3271
|
+
if (sinceMs !== null) {
|
|
3272
|
+
const cutoffTime = Date.now() - sinceMs;
|
|
3273
|
+
filtered = filtered.filter((log) => new Date(log.time).getTime() >= cutoffTime);
|
|
3274
|
+
}
|
|
3275
|
+
|
|
3276
|
+
return filtered;
|
|
3277
|
+
};
|
|
3278
|
+
|
|
3279
|
+
// Watch mode - continuous monitoring
|
|
3280
|
+
if (watchMode) {
|
|
3281
|
+
console.log(yellow(BANNER));
|
|
3282
|
+
console.log(dim(` Watching logs... (Ctrl+C to stop)`));
|
|
3283
|
+
if (moduleFilter) console.log(dim(` Module: ${moduleFilter}`));
|
|
3284
|
+
if (levelFilter !== null) console.log(dim(` Level: >=${levelToName(levelFilter)}`));
|
|
3285
|
+
console.log();
|
|
3286
|
+
|
|
3287
|
+
// Track file positions for incremental reads
|
|
3288
|
+
const filePositions: Map<string, number> = new Map();
|
|
3289
|
+
|
|
3290
|
+
// Initialize positions from current file sizes
|
|
3291
|
+
const initializePositions = () => {
|
|
3292
|
+
if (!existsSync(logsDir)) return;
|
|
3293
|
+
const files = readdirSync(logsDir).filter((f: string) => /\.\d+log$/.test(f));
|
|
3294
|
+
for (const file of files) {
|
|
3295
|
+
const filePath = join(logsDir, file);
|
|
3296
|
+
try {
|
|
3297
|
+
const stats = statSync(filePath);
|
|
3298
|
+
filePositions.set(filePath, stats.size);
|
|
3299
|
+
} catch {
|
|
3300
|
+
// Skip unreadable files
|
|
3301
|
+
}
|
|
3302
|
+
}
|
|
3303
|
+
};
|
|
3304
|
+
|
|
3305
|
+
// Read new lines from a file since last position
|
|
3306
|
+
const readNewLines = (filePath: string): string[] => {
|
|
3307
|
+
try {
|
|
3308
|
+
const stats = statSync(filePath);
|
|
3309
|
+
const lastPos = filePositions.get(filePath) || 0;
|
|
3310
|
+
|
|
3311
|
+
if (stats.size <= lastPos) {
|
|
3312
|
+
// File was truncated or no new content
|
|
3313
|
+
if (stats.size < lastPos) {
|
|
3314
|
+
filePositions.set(filePath, stats.size);
|
|
3315
|
+
}
|
|
3316
|
+
return [];
|
|
3317
|
+
}
|
|
3318
|
+
|
|
3319
|
+
const content = readFileSync(filePath, "utf-8");
|
|
3320
|
+
const newContent = content.slice(lastPos);
|
|
3321
|
+
filePositions.set(filePath, stats.size);
|
|
3322
|
+
|
|
3323
|
+
return newContent.split("\n").filter((line: string) => line.trim());
|
|
3324
|
+
} catch {
|
|
3325
|
+
return [];
|
|
3326
|
+
}
|
|
3327
|
+
};
|
|
3328
|
+
|
|
3329
|
+
// Print initial logs (last N lines)
|
|
3330
|
+
const rawLines = readLogFiles(logsDir);
|
|
3331
|
+
let logs: LogLine[] = rawLines
|
|
3332
|
+
.map(parseLogLine)
|
|
3333
|
+
.filter((log): log is LogLine => log !== null);
|
|
3334
|
+
logs = filterLogs(logs).slice(-limit);
|
|
3335
|
+
|
|
3336
|
+
for (const log of logs) {
|
|
3337
|
+
console.log(formatLogLine(log));
|
|
3338
|
+
}
|
|
3339
|
+
|
|
3340
|
+
// Initialize positions after printing initial logs
|
|
3341
|
+
initializePositions();
|
|
3342
|
+
|
|
3343
|
+
// Poll for new logs
|
|
3344
|
+
const pollForNewLogs = () => {
|
|
3345
|
+
if (!existsSync(logsDir)) return;
|
|
3346
|
+
|
|
3347
|
+
const files = readdirSync(logsDir).filter((f: string) => /\.\d+log$/.test(f));
|
|
3348
|
+
|
|
3349
|
+
for (const file of files) {
|
|
3350
|
+
const filePath = join(logsDir, file);
|
|
3351
|
+
const newLines = readNewLines(filePath);
|
|
3352
|
+
|
|
3353
|
+
for (const line of newLines) {
|
|
3354
|
+
const parsed = parseLogLine(line);
|
|
3355
|
+
if (parsed) {
|
|
3356
|
+
const filtered = filterLogs([parsed]);
|
|
3357
|
+
if (filtered.length > 0) {
|
|
3358
|
+
console.log(formatLogLine(filtered[0]));
|
|
3359
|
+
}
|
|
3360
|
+
}
|
|
3361
|
+
}
|
|
3362
|
+
}
|
|
3363
|
+
};
|
|
3364
|
+
|
|
3365
|
+
// Set up polling interval
|
|
3366
|
+
const intervalId = setInterval(pollForNewLogs, pollInterval);
|
|
3367
|
+
|
|
3368
|
+
// Handle graceful shutdown
|
|
3369
|
+
const cleanup = () => {
|
|
3370
|
+
clearInterval(intervalId);
|
|
3371
|
+
console.log(dim("\n Stopped watching."));
|
|
3372
|
+
process.exit(0);
|
|
3373
|
+
};
|
|
3374
|
+
|
|
3375
|
+
process.on("SIGINT", cleanup);
|
|
3376
|
+
process.on("SIGTERM", cleanup);
|
|
3377
|
+
|
|
3378
|
+
// Keep process alive
|
|
3379
|
+
await new Promise(() => {});
|
|
3380
|
+
return;
|
|
3381
|
+
}
|
|
3382
|
+
|
|
3383
|
+
// Non-watch mode - one-shot output
|
|
3247
3384
|
const rawLines = readLogFiles(logsDir);
|
|
3248
3385
|
|
|
3249
3386
|
// Parse and filter
|
|
@@ -3251,19 +3388,7 @@ async function logs() {
|
|
|
3251
3388
|
.map(parseLogLine)
|
|
3252
3389
|
.filter((log): log is LogLine => log !== null);
|
|
3253
3390
|
|
|
3254
|
-
|
|
3255
|
-
if (moduleFilter) {
|
|
3256
|
-
logs = logs.filter((log) => log.module === moduleFilter);
|
|
3257
|
-
}
|
|
3258
|
-
|
|
3259
|
-
if (levelFilter !== null) {
|
|
3260
|
-
logs = logs.filter((log) => log.level >= levelFilter);
|
|
3261
|
-
}
|
|
3262
|
-
|
|
3263
|
-
if (sinceMs !== null) {
|
|
3264
|
-
const cutoffTime = Date.now() - sinceMs;
|
|
3265
|
-
logs = logs.filter((log) => new Date(log.time).getTime() >= cutoffTime);
|
|
3266
|
-
}
|
|
3391
|
+
logs = filterLogs(logs);
|
|
3267
3392
|
|
|
3268
3393
|
// Apply limit (keep most recent)
|
|
3269
3394
|
logs = logs.slice(-limit);
|