opencode-swarm-plugin 0.42.9 → 0.43.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.hive/issues.jsonl +14 -0
- package/.turbo/turbo-build.log +2 -2
- package/CHANGELOG.md +79 -0
- package/README.md +296 -6
- package/bin/swarm.test.ts +615 -0
- package/bin/swarm.ts +434 -0
- package/dist/dashboard.d.ts +83 -0
- package/dist/dashboard.d.ts.map +1 -0
- package/dist/error-enrichment.d.ts +49 -0
- package/dist/error-enrichment.d.ts.map +1 -0
- package/dist/export-tools.d.ts +76 -0
- package/dist/export-tools.d.ts.map +1 -0
- package/dist/index.d.ts +2 -2
- package/dist/observability-tools.d.ts +2 -2
- package/dist/query-tools.d.ts +59 -0
- package/dist/query-tools.d.ts.map +1 -0
- package/dist/replay-tools.d.ts +28 -0
- package/dist/replay-tools.d.ts.map +1 -0
- package/package.json +1 -1
- package/src/dashboard.test.ts +611 -0
- package/src/dashboard.ts +462 -0
- package/src/error-enrichment.test.ts +403 -0
- package/src/error-enrichment.ts +219 -0
- package/src/export-tools.test.ts +476 -0
- package/src/export-tools.ts +257 -0
- package/src/query-tools.test.ts +636 -0
- package/src/query-tools.ts +324 -0
- package/src/replay-tools.test.ts +496 -0
- package/src/replay-tools.ts +240 -0
package/bin/swarm.ts
CHANGED
|
@@ -2503,6 +2503,405 @@ async function update() {
|
|
|
2503
2503
|
}
|
|
2504
2504
|
}
|
|
2505
2505
|
|
|
2506
|
+
// ============================================================================
|
|
2507
|
+
// Observability Commands (Phase 5)
|
|
2508
|
+
// ============================================================================
|
|
2509
|
+
|
|
2510
|
+
/**
|
|
2511
|
+
* Parse args for query command
|
|
2512
|
+
*/
|
|
2513
|
+
function parseQueryArgs(args: string[]): { format: string; query?: string; preset?: string } {
|
|
2514
|
+
let format = "table";
|
|
2515
|
+
let query: string | undefined;
|
|
2516
|
+
let preset: string | undefined;
|
|
2517
|
+
|
|
2518
|
+
for (let i = 0; i < args.length; i++) {
|
|
2519
|
+
if (args[i] === "--format") {
|
|
2520
|
+
format = args[i + 1] || "table";
|
|
2521
|
+
i++;
|
|
2522
|
+
} else if (args[i] === "--sql") {
|
|
2523
|
+
query = args[i + 1];
|
|
2524
|
+
i++;
|
|
2525
|
+
} else if (args[i] === "--preset") {
|
|
2526
|
+
preset = args[i + 1];
|
|
2527
|
+
i++;
|
|
2528
|
+
}
|
|
2529
|
+
}
|
|
2530
|
+
|
|
2531
|
+
return { format, query, preset };
|
|
2532
|
+
}
|
|
2533
|
+
|
|
2534
|
+
async function query() {
|
|
2535
|
+
const args = process.argv.slice(3); // Everything after "swarm query"
|
|
2536
|
+
const parsed = parseQueryArgs(args);
|
|
2537
|
+
|
|
2538
|
+
// Import query tools
|
|
2539
|
+
const { executeQuery, executePreset, formatAsTable, formatAsCSV, formatAsJSON } = await import("../src/observability/query-tools.js");
|
|
2540
|
+
|
|
2541
|
+
p.intro("swarm query");
|
|
2542
|
+
|
|
2543
|
+
const projectPath = process.cwd();
|
|
2544
|
+
|
|
2545
|
+
try {
|
|
2546
|
+
let rows: any[];
|
|
2547
|
+
|
|
2548
|
+
if (parsed.preset) {
|
|
2549
|
+
// Execute preset query
|
|
2550
|
+
p.log.step(`Executing preset: ${parsed.preset}`);
|
|
2551
|
+
rows = await executePreset(projectPath, parsed.preset);
|
|
2552
|
+
} else if (parsed.query) {
|
|
2553
|
+
// Execute custom SQL
|
|
2554
|
+
p.log.step("Executing custom SQL");
|
|
2555
|
+
rows = await executeQuery(projectPath, parsed.query);
|
|
2556
|
+
} else {
|
|
2557
|
+
p.log.error("No query specified. Use --sql or --preset");
|
|
2558
|
+
p.outro("Aborted");
|
|
2559
|
+
process.exit(1);
|
|
2560
|
+
}
|
|
2561
|
+
|
|
2562
|
+
// Format output
|
|
2563
|
+
let output: string;
|
|
2564
|
+
switch (parsed.format) {
|
|
2565
|
+
case "csv":
|
|
2566
|
+
output = formatAsCSV(rows);
|
|
2567
|
+
break;
|
|
2568
|
+
case "json":
|
|
2569
|
+
output = formatAsJSON(rows);
|
|
2570
|
+
break;
|
|
2571
|
+
case "table":
|
|
2572
|
+
default:
|
|
2573
|
+
output = formatAsTable(rows);
|
|
2574
|
+
break;
|
|
2575
|
+
}
|
|
2576
|
+
|
|
2577
|
+
console.log();
|
|
2578
|
+
console.log(output);
|
|
2579
|
+
console.log();
|
|
2580
|
+
|
|
2581
|
+
p.outro(`Found ${rows.length} result(s)`);
|
|
2582
|
+
} catch (error) {
|
|
2583
|
+
p.log.error("Query failed");
|
|
2584
|
+
p.log.message(error instanceof Error ? error.message : String(error));
|
|
2585
|
+
p.outro("Aborted");
|
|
2586
|
+
process.exit(1);
|
|
2587
|
+
}
|
|
2588
|
+
}
|
|
2589
|
+
|
|
2590
|
+
/**
|
|
2591
|
+
* Parse args for dashboard command
|
|
2592
|
+
*/
|
|
2593
|
+
function parseDashboardArgs(args: string[]): { epic?: string; refresh: number } {
|
|
2594
|
+
let epic: string | undefined;
|
|
2595
|
+
let refresh = 1000;
|
|
2596
|
+
|
|
2597
|
+
for (let i = 0; i < args.length; i++) {
|
|
2598
|
+
if (args[i] === "--epic") {
|
|
2599
|
+
epic = args[i + 1];
|
|
2600
|
+
i++;
|
|
2601
|
+
} else if (args[i] === "--refresh") {
|
|
2602
|
+
const ms = parseInt(args[i + 1], 10);
|
|
2603
|
+
if (!isNaN(ms) && ms > 0) {
|
|
2604
|
+
refresh = ms;
|
|
2605
|
+
}
|
|
2606
|
+
i++;
|
|
2607
|
+
}
|
|
2608
|
+
}
|
|
2609
|
+
|
|
2610
|
+
return { epic, refresh };
|
|
2611
|
+
}
|
|
2612
|
+
|
|
2613
|
+
async function dashboard() {
|
|
2614
|
+
const args = process.argv.slice(3);
|
|
2615
|
+
const parsed = parseDashboardArgs(args);
|
|
2616
|
+
|
|
2617
|
+
const { getWorkerStatus, getSubtaskProgress, getFileLocks, getRecentMessages, getEpicList } = await import("../src/observability/dashboard.js");
|
|
2618
|
+
|
|
2619
|
+
p.intro("swarm dashboard");
|
|
2620
|
+
|
|
2621
|
+
const projectPath = process.cwd();
|
|
2622
|
+
|
|
2623
|
+
console.clear();
|
|
2624
|
+
console.log(yellow("=".repeat(60)));
|
|
2625
|
+
console.log(yellow(" SWARM DASHBOARD"));
|
|
2626
|
+
console.log(yellow("=".repeat(60)));
|
|
2627
|
+
console.log();
|
|
2628
|
+
|
|
2629
|
+
let iteration = 0;
|
|
2630
|
+
|
|
2631
|
+
// Refresh loop
|
|
2632
|
+
const refreshLoop = async () => {
|
|
2633
|
+
try {
|
|
2634
|
+
// Move cursor to top
|
|
2635
|
+
if (iteration > 0) {
|
|
2636
|
+
process.stdout.write("\x1b[H");
|
|
2637
|
+
}
|
|
2638
|
+
|
|
2639
|
+
const timestamp = new Date().toLocaleTimeString();
|
|
2640
|
+
console.log(dim(`Last updated: ${timestamp} (Press Ctrl+C to exit)`));
|
|
2641
|
+
console.log();
|
|
2642
|
+
|
|
2643
|
+
// Worker Status
|
|
2644
|
+
console.log(cyan("Worker Status:"));
|
|
2645
|
+
const workers = await getWorkerStatus(projectPath, parsed.epic);
|
|
2646
|
+
if (workers.length === 0) {
|
|
2647
|
+
console.log(dim(" No active workers"));
|
|
2648
|
+
} else {
|
|
2649
|
+
for (const w of workers) {
|
|
2650
|
+
console.log(` ${w.agent_name} - ${w.status} - ${w.current_bead_id || "idle"}`);
|
|
2651
|
+
}
|
|
2652
|
+
}
|
|
2653
|
+
console.log();
|
|
2654
|
+
|
|
2655
|
+
// Subtask Progress
|
|
2656
|
+
console.log(cyan("Subtask Progress:"));
|
|
2657
|
+
const progress = await getSubtaskProgress(projectPath, parsed.epic);
|
|
2658
|
+
if (progress.length === 0) {
|
|
2659
|
+
console.log(dim(" No subtasks"));
|
|
2660
|
+
} else {
|
|
2661
|
+
for (const p of progress) {
|
|
2662
|
+
const bar = "█".repeat(Math.floor(p.progress / 10)) + "░".repeat(10 - Math.floor(p.progress / 10));
|
|
2663
|
+
console.log(` ${p.bead_id} [${bar}] ${p.progress}% - ${p.status}`);
|
|
2664
|
+
}
|
|
2665
|
+
}
|
|
2666
|
+
console.log();
|
|
2667
|
+
|
|
2668
|
+
// File Locks
|
|
2669
|
+
console.log(cyan("File Locks:"));
|
|
2670
|
+
const locks = await getFileLocks(projectPath);
|
|
2671
|
+
if (locks.length === 0) {
|
|
2672
|
+
console.log(dim(" No active locks"));
|
|
2673
|
+
} else {
|
|
2674
|
+
for (const lock of locks) {
|
|
2675
|
+
console.log(` ${lock.path_pattern} - ${lock.agent_name} (${lock.exclusive ? "exclusive" : "shared"})`);
|
|
2676
|
+
}
|
|
2677
|
+
}
|
|
2678
|
+
console.log();
|
|
2679
|
+
|
|
2680
|
+
// Recent Messages
|
|
2681
|
+
console.log(cyan("Recent Messages:"));
|
|
2682
|
+
const messages = await getRecentMessages(projectPath, parsed.epic, 5);
|
|
2683
|
+
if (messages.length === 0) {
|
|
2684
|
+
console.log(dim(" No recent messages"));
|
|
2685
|
+
} else {
|
|
2686
|
+
for (const msg of messages) {
|
|
2687
|
+
const timeAgo = Math.floor((Date.now() - new Date(msg.timestamp).getTime()) / 1000);
|
|
2688
|
+
console.log(` ${msg.from_agent} → ${msg.to_agents}: ${msg.subject} (${timeAgo}s ago)`);
|
|
2689
|
+
}
|
|
2690
|
+
}
|
|
2691
|
+
console.log();
|
|
2692
|
+
|
|
2693
|
+
iteration++;
|
|
2694
|
+
} catch (error) {
|
|
2695
|
+
console.log(red("Dashboard error: " + (error instanceof Error ? error.message : String(error))));
|
|
2696
|
+
}
|
|
2697
|
+
};
|
|
2698
|
+
|
|
2699
|
+
// Initial render
|
|
2700
|
+
await refreshLoop();
|
|
2701
|
+
|
|
2702
|
+
// Set up refresh interval
|
|
2703
|
+
const interval = setInterval(refreshLoop, parsed.refresh);
|
|
2704
|
+
|
|
2705
|
+
// Handle Ctrl+C
|
|
2706
|
+
process.on("SIGINT", () => {
|
|
2707
|
+
clearInterval(interval);
|
|
2708
|
+
console.log();
|
|
2709
|
+
p.outro("Dashboard closed");
|
|
2710
|
+
process.exit(0);
|
|
2711
|
+
});
|
|
2712
|
+
|
|
2713
|
+
// Keep process alive
|
|
2714
|
+
await new Promise(() => {});
|
|
2715
|
+
}
|
|
2716
|
+
|
|
2717
|
+
/**
|
|
2718
|
+
* Parse args for replay command
|
|
2719
|
+
*/
|
|
2720
|
+
function parseReplayArgs(args: string[]): {
|
|
2721
|
+
epicId?: string;
|
|
2722
|
+
speed: number;
|
|
2723
|
+
types: string[];
|
|
2724
|
+
agent?: string;
|
|
2725
|
+
since?: Date;
|
|
2726
|
+
until?: Date;
|
|
2727
|
+
} {
|
|
2728
|
+
let epicId: string | undefined;
|
|
2729
|
+
let speed = 1;
|
|
2730
|
+
let types: string[] = [];
|
|
2731
|
+
let agent: string | undefined;
|
|
2732
|
+
let since: Date | undefined;
|
|
2733
|
+
let until: Date | undefined;
|
|
2734
|
+
|
|
2735
|
+
// First positional arg is epic ID
|
|
2736
|
+
if (args.length > 0 && !args[0].startsWith("--")) {
|
|
2737
|
+
epicId = args[0];
|
|
2738
|
+
}
|
|
2739
|
+
|
|
2740
|
+
for (let i = 0; i < args.length; i++) {
|
|
2741
|
+
if (args[i] === "--speed") {
|
|
2742
|
+
const val = args[i + 1];
|
|
2743
|
+
if (val === "instant") {
|
|
2744
|
+
speed = Infinity;
|
|
2745
|
+
} else {
|
|
2746
|
+
const parsed = parseFloat(val?.replace("x", "") || "1");
|
|
2747
|
+
if (!isNaN(parsed) && parsed > 0) {
|
|
2748
|
+
speed = parsed;
|
|
2749
|
+
}
|
|
2750
|
+
}
|
|
2751
|
+
i++;
|
|
2752
|
+
} else if (args[i] === "--type") {
|
|
2753
|
+
types = args[i + 1]?.split(",").map((t) => t.trim()) || [];
|
|
2754
|
+
i++;
|
|
2755
|
+
} else if (args[i] === "--agent") {
|
|
2756
|
+
agent = args[i + 1];
|
|
2757
|
+
i++;
|
|
2758
|
+
} else if (args[i] === "--since") {
|
|
2759
|
+
const dateStr = args[i + 1];
|
|
2760
|
+
if (dateStr) {
|
|
2761
|
+
since = new Date(dateStr);
|
|
2762
|
+
}
|
|
2763
|
+
i++;
|
|
2764
|
+
} else if (args[i] === "--until") {
|
|
2765
|
+
const dateStr = args[i + 1];
|
|
2766
|
+
if (dateStr) {
|
|
2767
|
+
until = new Date(dateStr);
|
|
2768
|
+
}
|
|
2769
|
+
i++;
|
|
2770
|
+
}
|
|
2771
|
+
}
|
|
2772
|
+
|
|
2773
|
+
return { epicId, speed, types, agent, since, until };
|
|
2774
|
+
}
|
|
2775
|
+
|
|
2776
|
+
async function replay() {
|
|
2777
|
+
const args = process.argv.slice(3);
|
|
2778
|
+
const parsed = parseReplayArgs(args);
|
|
2779
|
+
|
|
2780
|
+
if (!parsed.epicId) {
|
|
2781
|
+
p.log.error("Epic ID required");
|
|
2782
|
+
p.log.message("Usage: swarm replay <epic-id> [options]");
|
|
2783
|
+
process.exit(1);
|
|
2784
|
+
}
|
|
2785
|
+
|
|
2786
|
+
const { fetchEpicEvents, filterEvents, replayWithTiming, formatReplayEvent } = await import("../src/observability/replay-tools.js");
|
|
2787
|
+
|
|
2788
|
+
p.intro(`swarm replay ${parsed.epicId}`);
|
|
2789
|
+
|
|
2790
|
+
const projectPath = process.cwd();
|
|
2791
|
+
|
|
2792
|
+
try {
|
|
2793
|
+
// Fetch events
|
|
2794
|
+
p.log.step("Fetching events...");
|
|
2795
|
+
let events = await fetchEpicEvents(projectPath, parsed.epicId);
|
|
2796
|
+
|
|
2797
|
+
// Apply filters
|
|
2798
|
+
events = filterEvents(events, {
|
|
2799
|
+
types: parsed.types,
|
|
2800
|
+
agent: parsed.agent,
|
|
2801
|
+
since: parsed.since,
|
|
2802
|
+
until: parsed.until,
|
|
2803
|
+
});
|
|
2804
|
+
|
|
2805
|
+
if (events.length === 0) {
|
|
2806
|
+
p.log.warn("No events found matching filters");
|
|
2807
|
+
p.outro("Aborted");
|
|
2808
|
+
process.exit(0);
|
|
2809
|
+
}
|
|
2810
|
+
|
|
2811
|
+
p.log.success(`Found ${events.length} events`);
|
|
2812
|
+
p.log.message(dim(`Speed: ${parsed.speed === Infinity ? "instant" : `${parsed.speed}x`}`));
|
|
2813
|
+
console.log();
|
|
2814
|
+
|
|
2815
|
+
// Replay events
|
|
2816
|
+
await replayWithTiming(events, parsed.speed, (event) => {
|
|
2817
|
+
console.log(formatReplayEvent(event));
|
|
2818
|
+
});
|
|
2819
|
+
|
|
2820
|
+
console.log();
|
|
2821
|
+
p.outro("Replay complete");
|
|
2822
|
+
} catch (error) {
|
|
2823
|
+
p.log.error("Replay failed");
|
|
2824
|
+
p.log.message(error instanceof Error ? error.message : String(error));
|
|
2825
|
+
p.outro("Aborted");
|
|
2826
|
+
process.exit(1);
|
|
2827
|
+
}
|
|
2828
|
+
}
|
|
2829
|
+
|
|
2830
|
+
/**
|
|
2831
|
+
* Parse args for export command
|
|
2832
|
+
*/
|
|
2833
|
+
function parseExportArgs(args: string[]): {
|
|
2834
|
+
format: string;
|
|
2835
|
+
epic?: string;
|
|
2836
|
+
output?: string;
|
|
2837
|
+
} {
|
|
2838
|
+
let format = "json";
|
|
2839
|
+
let epic: string | undefined;
|
|
2840
|
+
let output: string | undefined;
|
|
2841
|
+
|
|
2842
|
+
for (let i = 0; i < args.length; i++) {
|
|
2843
|
+
if (args[i] === "--format") {
|
|
2844
|
+
format = args[i + 1] || "json";
|
|
2845
|
+
i++;
|
|
2846
|
+
} else if (args[i] === "--epic") {
|
|
2847
|
+
epic = args[i + 1];
|
|
2848
|
+
i++;
|
|
2849
|
+
} else if (args[i] === "--output") {
|
|
2850
|
+
output = args[i + 1];
|
|
2851
|
+
i++;
|
|
2852
|
+
}
|
|
2853
|
+
}
|
|
2854
|
+
|
|
2855
|
+
return { format, epic, output };
|
|
2856
|
+
}
|
|
2857
|
+
|
|
2858
|
+
async function exportEvents() {
|
|
2859
|
+
const args = process.argv.slice(3);
|
|
2860
|
+
const parsed = parseExportArgs(args);
|
|
2861
|
+
|
|
2862
|
+
const { exportToOTLP, exportToCSV, exportToJSON } = await import("../src/observability/export-tools.js");
|
|
2863
|
+
|
|
2864
|
+
p.intro("swarm export");
|
|
2865
|
+
|
|
2866
|
+
const projectPath = process.cwd();
|
|
2867
|
+
|
|
2868
|
+
try {
|
|
2869
|
+
let result: string;
|
|
2870
|
+
|
|
2871
|
+
p.log.step(`Exporting as ${parsed.format}...`);
|
|
2872
|
+
|
|
2873
|
+
switch (parsed.format) {
|
|
2874
|
+
case "otlp":
|
|
2875
|
+
result = await exportToOTLP(projectPath, parsed.epic);
|
|
2876
|
+
break;
|
|
2877
|
+
case "csv":
|
|
2878
|
+
result = await exportToCSV(projectPath, parsed.epic);
|
|
2879
|
+
break;
|
|
2880
|
+
case "json":
|
|
2881
|
+
default:
|
|
2882
|
+
result = await exportToJSON(projectPath, parsed.epic);
|
|
2883
|
+
break;
|
|
2884
|
+
}
|
|
2885
|
+
|
|
2886
|
+
// Output to file or stdout
|
|
2887
|
+
if (parsed.output) {
|
|
2888
|
+
writeFileSync(parsed.output, result);
|
|
2889
|
+
p.log.success(`Exported to: ${parsed.output}`);
|
|
2890
|
+
} else {
|
|
2891
|
+
console.log();
|
|
2892
|
+
console.log(result);
|
|
2893
|
+
console.log();
|
|
2894
|
+
}
|
|
2895
|
+
|
|
2896
|
+
p.outro("Export complete");
|
|
2897
|
+
} catch (error) {
|
|
2898
|
+
p.log.error("Export failed");
|
|
2899
|
+
p.log.message(error instanceof Error ? error.message : String(error));
|
|
2900
|
+
p.outro("Aborted");
|
|
2901
|
+
process.exit(1);
|
|
2902
|
+
}
|
|
2903
|
+
}
|
|
2904
|
+
|
|
2506
2905
|
async function help() {
|
|
2507
2906
|
console.log(yellow(BANNER));
|
|
2508
2907
|
console.log(dim(" " + TAGLINE + " v" + VERSION));
|
|
@@ -2525,6 +2924,10 @@ ${cyan("Commands:")}
|
|
|
2525
2924
|
swarm stats Show swarm health metrics powered by swarm-insights (strategy success rates, patterns)
|
|
2526
2925
|
swarm history Show recent swarm activity timeline with insights data
|
|
2527
2926
|
swarm eval Eval-driven development commands
|
|
2927
|
+
swarm query SQL analytics with presets (--sql, --preset, --format)
|
|
2928
|
+
swarm dashboard Live terminal UI with worker status (--epic, --refresh)
|
|
2929
|
+
swarm replay Event replay with timing (--speed, --type, --agent, --since, --until)
|
|
2930
|
+
swarm export Export events (--format otlp/csv/json, --epic, --output)
|
|
2528
2931
|
swarm update Update to latest version
|
|
2529
2932
|
swarm version Show version and banner
|
|
2530
2933
|
swarm tool Execute a tool (for plugin wrapper)
|
|
@@ -2573,6 +2976,25 @@ ${cyan("Eval Commands:")}
|
|
|
2573
2976
|
swarm eval history Show eval run history with trends
|
|
2574
2977
|
swarm eval run Execute evals and report results (stub)
|
|
2575
2978
|
|
|
2979
|
+
${cyan("Observability Commands:")}
|
|
2980
|
+
swarm query --sql <query> Execute custom SQL query
|
|
2981
|
+
swarm query --preset <name> Execute preset query (failed_decompositions, duration_by_strategy, etc)
|
|
2982
|
+
swarm query --format <fmt> Output format: table (default), csv, json
|
|
2983
|
+
swarm dashboard Live terminal UI showing worker status, progress, locks, messages
|
|
2984
|
+
swarm dashboard --epic <id> Focus on specific epic
|
|
2985
|
+
swarm dashboard --refresh <ms> Poll interval in milliseconds (default: 1000)
|
|
2986
|
+
swarm replay <epic-id> Replay epic events with timing
|
|
2987
|
+
swarm replay <epic-id> --speed 2x Playback speed: 1x, 2x, instant
|
|
2988
|
+
swarm replay <epic-id> --type <types> Filter by event types (comma-separated)
|
|
2989
|
+
swarm replay <epic-id> --agent <name> Filter by agent name
|
|
2990
|
+
swarm replay <epic-id> --since <time> Events after this time
|
|
2991
|
+
swarm replay <epic-id> --until <time> Events before this time
|
|
2992
|
+
swarm export Export events to stdout (JSON)
|
|
2993
|
+
swarm export --format otlp Export as OpenTelemetry (OTLP)
|
|
2994
|
+
swarm export --format csv Export as CSV
|
|
2995
|
+
swarm export --epic <id> Export specific epic only
|
|
2996
|
+
swarm export --output <file> Write to file instead of stdout
|
|
2997
|
+
|
|
2576
2998
|
${cyan("Usage in OpenCode:")}
|
|
2577
2999
|
/swarm "Add user authentication with OAuth"
|
|
2578
3000
|
@swarm/planner "Decompose this into parallel tasks"
|
|
@@ -4612,6 +5034,18 @@ switch (command) {
|
|
|
4612
5034
|
case "eval":
|
|
4613
5035
|
await evalCommand();
|
|
4614
5036
|
break;
|
|
5037
|
+
case "query":
|
|
5038
|
+
await query();
|
|
5039
|
+
break;
|
|
5040
|
+
case "dashboard":
|
|
5041
|
+
await dashboard();
|
|
5042
|
+
break;
|
|
5043
|
+
case "replay":
|
|
5044
|
+
await replay();
|
|
5045
|
+
break;
|
|
5046
|
+
case "export":
|
|
5047
|
+
await exportEvents();
|
|
5048
|
+
break;
|
|
4615
5049
|
case "version":
|
|
4616
5050
|
case "--version":
|
|
4617
5051
|
case "-v":
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Dashboard Data Layer
|
|
3
|
+
*
|
|
4
|
+
* Provides read-only queries for swarm observability dashboard.
|
|
5
|
+
* Data sources:
|
|
6
|
+
* - libSQL events table (event sourcing)
|
|
7
|
+
* - Hive cells (work items)
|
|
8
|
+
* - Agent projections (agent states)
|
|
9
|
+
* - Reservation projections (file locks)
|
|
10
|
+
*/
|
|
11
|
+
import type { DatabaseAdapter } from "swarm-mail";
|
|
12
|
+
export interface WorkerStatus {
|
|
13
|
+
agent_name: string;
|
|
14
|
+
status: "idle" | "working" | "blocked";
|
|
15
|
+
current_task?: string;
|
|
16
|
+
last_activity: string;
|
|
17
|
+
}
|
|
18
|
+
export interface SubtaskProgress {
|
|
19
|
+
bead_id: string;
|
|
20
|
+
title: string;
|
|
21
|
+
status: "open" | "in_progress" | "completed" | "blocked";
|
|
22
|
+
progress_percent: number;
|
|
23
|
+
}
|
|
24
|
+
export interface FileLock {
|
|
25
|
+
path: string;
|
|
26
|
+
agent_name: string;
|
|
27
|
+
reason: string;
|
|
28
|
+
acquired_at: string;
|
|
29
|
+
ttl_seconds: number;
|
|
30
|
+
}
|
|
31
|
+
export interface RecentMessage {
|
|
32
|
+
id: number;
|
|
33
|
+
from: string;
|
|
34
|
+
to: string[];
|
|
35
|
+
subject: string;
|
|
36
|
+
timestamp: string;
|
|
37
|
+
importance: "low" | "normal" | "high" | "urgent";
|
|
38
|
+
}
|
|
39
|
+
export interface EpicInfo {
|
|
40
|
+
epic_id: string;
|
|
41
|
+
title: string;
|
|
42
|
+
subtask_count: number;
|
|
43
|
+
completed_count: number;
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Get current status of all worker agents.
|
|
47
|
+
* Derives status from latest events: task_started, progress_reported, task_blocked, etc.
|
|
48
|
+
*/
|
|
49
|
+
export declare function getWorkerStatus(db: DatabaseAdapter, options?: {
|
|
50
|
+
project_key?: string;
|
|
51
|
+
}): Promise<WorkerStatus[]>;
|
|
52
|
+
/**
|
|
53
|
+
* Get progress of all subtasks within an epic.
|
|
54
|
+
* Returns completion percentage from progress_reported events.
|
|
55
|
+
*/
|
|
56
|
+
export declare function getSubtaskProgress(db: DatabaseAdapter, epic_id: string): Promise<SubtaskProgress[]>;
|
|
57
|
+
/**
|
|
58
|
+
* Get currently active file reservations.
|
|
59
|
+
* Excludes released reservations.
|
|
60
|
+
*/
|
|
61
|
+
export declare function getFileLocks(db: DatabaseAdapter, options?: {
|
|
62
|
+
project_key?: string;
|
|
63
|
+
}): Promise<FileLock[]>;
|
|
64
|
+
/**
|
|
65
|
+
* Get recent swarm mail messages, ordered by timestamp descending.
|
|
66
|
+
* Defaults to limit of 10.
|
|
67
|
+
*/
|
|
68
|
+
export declare function getRecentMessages(db: DatabaseAdapter, options?: {
|
|
69
|
+
limit?: number;
|
|
70
|
+
thread_id?: string;
|
|
71
|
+
importance?: "low" | "normal" | "high" | "urgent";
|
|
72
|
+
}): Promise<RecentMessage[]>;
|
|
73
|
+
/**
|
|
74
|
+
* Get list of all epics with subtask counts.
|
|
75
|
+
* Used for dashboard tabs/navigation.
|
|
76
|
+
*
|
|
77
|
+
* Derives epic information from events when beads table doesn't exist (test mode).
|
|
78
|
+
* In production, queries beads table directly.
|
|
79
|
+
*/
|
|
80
|
+
export declare function getEpicList(db: DatabaseAdapter, options?: {
|
|
81
|
+
status?: "open" | "in_progress" | "completed" | "blocked";
|
|
82
|
+
}): Promise<EpicInfo[]>;
|
|
83
|
+
//# sourceMappingURL=dashboard.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dashboard.d.ts","sourceRoot":"","sources":["../src/dashboard.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AAElD,MAAM,WAAW,YAAY;IAC5B,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,SAAS,CAAC;IACvC,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,MAAM,CAAC;CACtB;AAED,MAAM,WAAW,eAAe;IAC/B,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,GAAG,aAAa,GAAG,WAAW,GAAG,SAAS,CAAC;IACzD,gBAAgB,EAAE,MAAM,CAAC;CACzB;AAED,MAAM,WAAW,QAAQ;IACxB,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,WAAW,EAAE,MAAM,CAAC;IACpB,WAAW,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,aAAa;IAC7B,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,EAAE,EAAE,MAAM,EAAE,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,QAAQ,CAAC;CACjD;AAED,MAAM,WAAW,QAAQ;IACxB,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,aAAa,EAAE,MAAM,CAAC;IACtB,eAAe,EAAE,MAAM,CAAC;CACxB;AAED;;;GAGG;AACH,wBAAsB,eAAe,CACpC,EAAE,EAAE,eAAe,EACnB,OAAO,CAAC,EAAE;IAAE,WAAW,CAAC,EAAE,MAAM,CAAA;CAAE,GAChC,OAAO,CAAC,YAAY,EAAE,CAAC,CAoEzB;AAED;;;GAGG;AACH,wBAAsB,kBAAkB,CACvC,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,MAAM,GACb,OAAO,CAAC,eAAe,EAAE,CAAC,CAuD5B;AAED;;;GAGG;AACH,wBAAsB,YAAY,CACjC,EAAE,EAAE,eAAe,EACnB,OAAO,CAAC,EAAE;IAAE,WAAW,CAAC,EAAE,MAAM,CAAA;CAAE,GAChC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAiDrB;AAED;;;GAGG;AACH,wBAAsB,iBAAiB,CACtC,EAAE,EAAE,eAAe,EACnB,OAAO,CAAC,EAAE;IACT,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,UAAU,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,QAAQ,CAAC;CAClD,GACC,OAAO,CAAC,aAAa,EAAE,CAAC,CAkD1B;AAED;;;;;;GAMG;AACH,wBAAsB,WAAW,CAChC,EAAE,EAAE,eAAe,EACnB,OAAO,CAAC,EAAE;IAAE,MAAM,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,WAAW,GAAG,SAAS,CAAA;CAAE,GACrE,OAAO,CAAC,QAAQ,EAAE,CAAC,CA0IrB"}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Error Enrichment - Structured error context for swarm agents
|
|
3
|
+
*
|
|
4
|
+
* TDD GREEN: Minimal implementation to pass tests
|
|
5
|
+
*/
|
|
6
|
+
export interface SwarmErrorContext {
|
|
7
|
+
file?: string;
|
|
8
|
+
line?: number;
|
|
9
|
+
agent?: string;
|
|
10
|
+
epic_id?: string;
|
|
11
|
+
bead_id?: string;
|
|
12
|
+
recent_events?: Array<{
|
|
13
|
+
type: string;
|
|
14
|
+
timestamp: string;
|
|
15
|
+
message: string;
|
|
16
|
+
}>;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* SwarmError - Error class with structured context
|
|
20
|
+
*/
|
|
21
|
+
export declare class SwarmError extends Error {
|
|
22
|
+
context: SwarmErrorContext;
|
|
23
|
+
constructor(message: string, context?: SwarmErrorContext);
|
|
24
|
+
toJSON(): {
|
|
25
|
+
name: string;
|
|
26
|
+
message: string;
|
|
27
|
+
context: SwarmErrorContext;
|
|
28
|
+
stack: string | undefined;
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* enrichError - Convert any error to SwarmError with context
|
|
33
|
+
*/
|
|
34
|
+
export declare function enrichError(error: unknown, context: SwarmErrorContext): SwarmError;
|
|
35
|
+
/**
|
|
36
|
+
* debugLog - Conditional logging based on DEBUG env var
|
|
37
|
+
*
|
|
38
|
+
* Patterns:
|
|
39
|
+
* - DEBUG=swarm:* (all)
|
|
40
|
+
* - DEBUG=swarm:coordinator
|
|
41
|
+
* - DEBUG=swarm:worker
|
|
42
|
+
* - DEBUG=swarm:mail
|
|
43
|
+
*/
|
|
44
|
+
export declare function debugLog(namespace: string, message: string, data?: unknown): void;
|
|
45
|
+
/**
|
|
46
|
+
* suggestFix - Pattern matching for common swarm errors
|
|
47
|
+
*/
|
|
48
|
+
export declare function suggestFix(error: Error | SwarmError): string | null;
|
|
49
|
+
//# sourceMappingURL=error-enrichment.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"error-enrichment.d.ts","sourceRoot":"","sources":["../src/error-enrichment.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,MAAM,WAAW,iBAAiB;IACjC,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,aAAa,CAAC,EAAE,KAAK,CAAC;QACrB,IAAI,EAAE,MAAM,CAAC;QACb,SAAS,EAAE,MAAM,CAAC;QAClB,OAAO,EAAE,MAAM,CAAC;KAChB,CAAC,CAAC;CACH;AAED;;GAEG;AACH,qBAAa,UAAW,SAAQ,KAAK;IACpC,OAAO,EAAE,iBAAiB,CAAC;gBAEf,OAAO,EAAE,MAAM,EAAE,OAAO,GAAE,iBAAsB;IAW5D,MAAM;;;;;;CAQN;AAED;;GAEG;AACH,wBAAgB,WAAW,CAC1B,KAAK,EAAE,OAAO,EACd,OAAO,EAAE,iBAAiB,GACxB,UAAU,CAyBZ;AAED;;;;;;;;GAQG;AACH,wBAAgB,QAAQ,CACvB,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,MAAM,EACf,IAAI,CAAC,EAAE,OAAO,GACZ,IAAI,CAyBN;AAED;;GAEG;AACH,wBAAgB,UAAU,CAAC,KAAK,EAAE,KAAK,GAAG,UAAU,GAAG,MAAM,GAAG,IAAI,CA8EnE"}
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Export Tools - Convert Cell Events to Various Formats
|
|
3
|
+
*
|
|
4
|
+
* GREEN PHASE: Minimal implementation to pass tests
|
|
5
|
+
*
|
|
6
|
+
* Supports:
|
|
7
|
+
* - OTLP (OpenTelemetry Protocol) - for distributed tracing
|
|
8
|
+
* - CSV - for spreadsheet analysis
|
|
9
|
+
* - JSON - for generic data interchange
|
|
10
|
+
*/
|
|
11
|
+
import type { CellEvent } from "./schemas/cell-events.js";
|
|
12
|
+
/**
|
|
13
|
+
* OpenTelemetry OTLP span structure
|
|
14
|
+
*/
|
|
15
|
+
interface OTLPSpan {
|
|
16
|
+
traceId: string;
|
|
17
|
+
spanId: string;
|
|
18
|
+
name: string;
|
|
19
|
+
startTimeUnixNano: string;
|
|
20
|
+
attributes: Array<{
|
|
21
|
+
key: string;
|
|
22
|
+
value: {
|
|
23
|
+
stringValue?: string;
|
|
24
|
+
intValue?: number;
|
|
25
|
+
boolValue?: boolean;
|
|
26
|
+
};
|
|
27
|
+
}>;
|
|
28
|
+
}
|
|
29
|
+
interface OTLPOutput {
|
|
30
|
+
resourceSpans: Array<{
|
|
31
|
+
resource: {
|
|
32
|
+
attributes: Array<{
|
|
33
|
+
key: string;
|
|
34
|
+
value: {
|
|
35
|
+
stringValue: string;
|
|
36
|
+
};
|
|
37
|
+
}>;
|
|
38
|
+
};
|
|
39
|
+
scopeSpans: Array<{
|
|
40
|
+
scope: {
|
|
41
|
+
name: string;
|
|
42
|
+
};
|
|
43
|
+
spans: OTLPSpan[];
|
|
44
|
+
}>;
|
|
45
|
+
}>;
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Export cell events to OpenTelemetry OTLP format
|
|
49
|
+
*
|
|
50
|
+
* Mapping:
|
|
51
|
+
* - epic_id (from metadata) → trace_id (32 hex chars)
|
|
52
|
+
* - cell_id → span_id (16 hex chars)
|
|
53
|
+
* - timestamp → startTimeUnixNano (nanoseconds as string)
|
|
54
|
+
* - event.type → span.name
|
|
55
|
+
* - event payload → span.attributes
|
|
56
|
+
*/
|
|
57
|
+
export declare function exportToOTLP(events: CellEvent[]): OTLPOutput;
|
|
58
|
+
/**
|
|
59
|
+
* Export cell events to CSV format
|
|
60
|
+
*
|
|
61
|
+
* Format:
|
|
62
|
+
* - Headers: id,type,timestamp,project_key,cell_id,payload
|
|
63
|
+
* - Payload: JSON serialization of entire event (minus headers)
|
|
64
|
+
*/
|
|
65
|
+
export declare function exportToCSV(events: CellEvent[]): string;
|
|
66
|
+
/**
|
|
67
|
+
* Export cell events to JSON format
|
|
68
|
+
*
|
|
69
|
+
* Format:
|
|
70
|
+
* - Array of event objects
|
|
71
|
+
* - Pretty-printed with 2-space indentation
|
|
72
|
+
* - Preserves all fields and discriminated union types
|
|
73
|
+
*/
|
|
74
|
+
export declare function exportToJSON(events: CellEvent[]): string;
|
|
75
|
+
export {};
|
|
76
|
+
//# sourceMappingURL=export-tools.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"export-tools.d.ts","sourceRoot":"","sources":["../src/export-tools.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAO1D;;GAEG;AACH,UAAU,QAAQ;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,iBAAiB,EAAE,MAAM,CAAC;IAC1B,UAAU,EAAE,KAAK,CAAC;QAChB,GAAG,EAAE,MAAM,CAAC;QACZ,KAAK,EAAE;YACL,WAAW,CAAC,EAAE,MAAM,CAAC;YACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;YAClB,SAAS,CAAC,EAAE,OAAO,CAAC;SACrB,CAAC;KACH,CAAC,CAAC;CACJ;AAED,UAAU,UAAU;IAClB,aAAa,EAAE,KAAK,CAAC;QACnB,QAAQ,EAAE;YACR,UAAU,EAAE,KAAK,CAAC;gBAChB,GAAG,EAAE,MAAM,CAAC;gBACZ,KAAK,EAAE;oBAAE,WAAW,EAAE,MAAM,CAAA;iBAAE,CAAC;aAChC,CAAC,CAAC;SACJ,CAAC;QACF,UAAU,EAAE,KAAK,CAAC;YAChB,KAAK,EAAE;gBACL,IAAI,EAAE,MAAM,CAAC;aACd,CAAC;YACF,KAAK,EAAE,QAAQ,EAAE,CAAC;SACnB,CAAC,CAAC;KACJ,CAAC,CAAC;CACJ;AA6ED;;;;;;;;;GASG;AACH,wBAAgB,YAAY,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,UAAU,CAiD5D;AAsBD;;;;;;GAMG;AACH,wBAAgB,WAAW,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,MAAM,CAoBvD;AAMD;;;;;;;GAOG;AACH,wBAAgB,YAAY,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,MAAM,CAMxD"}
|
package/dist/index.d.ts
CHANGED
|
@@ -171,13 +171,13 @@ export declare const allTools: {
|
|
|
171
171
|
args: {
|
|
172
172
|
sql: import("zod").ZodString;
|
|
173
173
|
format: import("zod").ZodOptional<import("zod").ZodEnum<{
|
|
174
|
-
json: "json";
|
|
175
174
|
table: "table";
|
|
175
|
+
json: "json";
|
|
176
176
|
}>>;
|
|
177
177
|
};
|
|
178
178
|
execute(args: {
|
|
179
179
|
sql: string;
|
|
180
|
-
format?: "
|
|
180
|
+
format?: "table" | "json" | undefined;
|
|
181
181
|
}, context: import("@opencode-ai/plugin").ToolContext): Promise<string>;
|
|
182
182
|
};
|
|
183
183
|
readonly swarm_diagnose: {
|