opencode-swarm-plugin 0.39.1 → 0.42.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.hive/analysis/eval-failure-analysis-2025-12-25.md +331 -0
- package/.hive/analysis/session-data-quality-audit.md +320 -0
- package/.hive/eval-results.json +481 -24
- package/.hive/issues.jsonl +76 -11
- package/.hive/memories.jsonl +159 -1
- package/.opencode/eval-history.jsonl +315 -0
- package/.turbo/turbo-build.log +5 -5
- package/CHANGELOG.md +207 -0
- package/README.md +2 -0
- package/SCORER-ANALYSIS.md +598 -0
- package/bin/eval-gate.test.ts +158 -0
- package/bin/eval-gate.ts +74 -0
- package/bin/swarm.test.ts +1054 -719
- package/bin/swarm.ts +577 -0
- package/dist/compaction-hook.d.ts +10 -1
- package/dist/compaction-hook.d.ts.map +1 -1
- package/dist/compaction-observability.d.ts +173 -0
- package/dist/compaction-observability.d.ts.map +1 -0
- package/dist/compaction-prompt-scoring.d.ts +1 -0
- package/dist/compaction-prompt-scoring.d.ts.map +1 -1
- package/dist/eval-capture.d.ts +93 -0
- package/dist/eval-capture.d.ts.map +1 -1
- package/dist/eval-runner.d.ts +134 -0
- package/dist/eval-runner.d.ts.map +1 -0
- package/dist/hive.d.ts.map +1 -1
- package/dist/index.d.ts +65 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +84043 -28070
- package/dist/memory-tools.d.ts +70 -2
- package/dist/memory-tools.d.ts.map +1 -1
- package/dist/memory.d.ts +37 -0
- package/dist/memory.d.ts.map +1 -1
- package/dist/observability-tools.d.ts +64 -0
- package/dist/observability-tools.d.ts.map +1 -1
- package/dist/plugin.js +83570 -27466
- package/dist/schemas/task.d.ts +3 -3
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts +32 -1
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/docs/planning/ADR-009-oh-my-opencode-patterns.md +353 -0
- package/evals/ARCHITECTURE.md +1189 -0
- package/evals/README.md +113 -0
- package/evals/example.eval.ts +3 -4
- package/evals/fixtures/compaction-prompt-cases.ts +6 -0
- package/evals/scorers/coordinator-discipline.evalite-test.ts +163 -0
- package/evals/scorers/coordinator-discipline.ts +82 -2
- package/evals/scorers/index.test.ts +146 -0
- package/evals/scorers/index.ts +104 -0
- package/evals/swarm-decomposition.eval.ts +13 -4
- package/examples/commands/swarm.md +291 -21
- package/package.json +4 -3
- package/src/compaction-hook.ts +258 -110
- package/src/compaction-observability.integration.test.ts +139 -0
- package/src/compaction-observability.test.ts +187 -0
- package/src/compaction-observability.ts +324 -0
- package/src/compaction-prompt-scorers.test.ts +10 -9
- package/src/compaction-prompt-scoring.ts +7 -5
- package/src/eval-capture.test.ts +204 -1
- package/src/eval-capture.ts +194 -2
- package/src/eval-runner.test.ts +223 -0
- package/src/eval-runner.ts +402 -0
- package/src/hive.ts +57 -22
- package/src/index.ts +54 -1
- package/src/memory-tools.test.ts +84 -0
- package/src/memory-tools.ts +68 -3
- package/src/memory.test.ts +2 -2
- package/src/memory.ts +122 -49
- package/src/observability-tools.test.ts +13 -0
- package/src/observability-tools.ts +277 -0
- package/src/swarm-orchestrate.test.ts +162 -0
- package/src/swarm-orchestrate.ts +7 -5
- package/src/swarm-prompts.test.ts +168 -4
- package/src/swarm-prompts.ts +228 -7
- package/.env +0 -2
- package/.turbo/turbo-test.log +0 -481
- package/.turbo/turbo-typecheck.log +0 -1
- package/dist/beads.d.ts +0 -386
- package/dist/beads.d.ts.map +0 -1
- package/dist/schemas/bead-events.d.ts +0 -698
- package/dist/schemas/bead-events.d.ts.map +0 -1
- package/dist/schemas/bead.d.ts +0 -255
- package/dist/schemas/bead.d.ts.map +0 -1
package/bin/swarm.ts
CHANGED
|
@@ -2520,6 +2520,8 @@ ${cyan("Commands:")}
|
|
|
2520
2520
|
swarm migrate Migrate PGlite database to libSQL
|
|
2521
2521
|
swarm cells List or get cells from database (replaces 'swarm tool hive_query')
|
|
2522
2522
|
swarm log View swarm logs with filtering
|
|
2523
|
+
swarm stats Show swarm health metrics and success rates
|
|
2524
|
+
swarm history Show recent swarm activity timeline
|
|
2523
2525
|
swarm eval Eval-driven development commands
|
|
2524
2526
|
swarm update Update to latest version
|
|
2525
2527
|
swarm version Show version and banner
|
|
@@ -2548,6 +2550,21 @@ ${cyan("Log Viewing:")}
|
|
|
2548
2550
|
swarm log --limit <n> Limit output to n lines (default: 50)
|
|
2549
2551
|
swarm log --watch, -w Watch mode - continuously monitor for new logs
|
|
2550
2552
|
swarm log --interval <ms> Poll interval in ms (default: 1000, min: 100)
|
|
2553
|
+
swarm log sessions List all captured coordinator sessions
|
|
2554
|
+
swarm log sessions <session_id> View events for a specific session
|
|
2555
|
+
swarm log sessions --latest View most recent session
|
|
2556
|
+
swarm log sessions --type <type> Filter by event type (DECISION, VIOLATION, OUTCOME, COMPACTION)
|
|
2557
|
+
swarm log sessions --json Raw JSON output for jq
|
|
2558
|
+
|
|
2559
|
+
${cyan("Stats & History:")}
|
|
2560
|
+
swarm stats Show swarm health metrics (last 7 days)
|
|
2561
|
+
swarm stats --since 24h Show stats for custom time period
|
|
2562
|
+
swarm stats --json Output as JSON for scripting
|
|
2563
|
+
swarm history Show recent swarms (last 10)
|
|
2564
|
+
swarm history --limit 20 Show more swarms
|
|
2565
|
+
swarm history --status success Filter by success/failed/in_progress
|
|
2566
|
+
swarm history --strategy file-based Filter by decomposition strategy
|
|
2567
|
+
swarm history --verbose Show detailed subtask information
|
|
2551
2568
|
|
|
2552
2569
|
${cyan("Eval Commands:")}
|
|
2553
2570
|
swarm eval status [eval-name] Show current phase, thresholds, recent scores
|
|
@@ -2911,6 +2928,298 @@ async function migrate() {
|
|
|
2911
2928
|
}
|
|
2912
2929
|
}
|
|
2913
2930
|
|
|
2931
|
+
// ============================================================================
|
|
2932
|
+
// Session Log Helpers
|
|
2933
|
+
// ============================================================================
|
|
2934
|
+
|
|
2935
|
+
import type { CoordinatorEvent } from "../src/eval-capture.js";
|
|
2936
|
+
|
|
2937
|
+
/**
|
|
2938
|
+
* Parse a session file and return events
|
|
2939
|
+
*/
|
|
2940
|
+
function parseSessionFile(filePath: string): CoordinatorEvent[] {
|
|
2941
|
+
if (!existsSync(filePath)) {
|
|
2942
|
+
throw new Error(`Session file not found: ${filePath}`);
|
|
2943
|
+
}
|
|
2944
|
+
|
|
2945
|
+
const content = readFileSync(filePath, "utf-8");
|
|
2946
|
+
const lines = content.split("\n").filter((line) => line.trim());
|
|
2947
|
+
const events: CoordinatorEvent[] = [];
|
|
2948
|
+
|
|
2949
|
+
for (const line of lines) {
|
|
2950
|
+
try {
|
|
2951
|
+
const parsed = JSON.parse(line);
|
|
2952
|
+
events.push(parsed);
|
|
2953
|
+
} catch {
|
|
2954
|
+
// Skip invalid JSON lines
|
|
2955
|
+
}
|
|
2956
|
+
}
|
|
2957
|
+
|
|
2958
|
+
return events;
|
|
2959
|
+
}
|
|
2960
|
+
|
|
2961
|
+
/**
|
|
2962
|
+
* List all session files in a directory
|
|
2963
|
+
*/
|
|
2964
|
+
function listSessionFiles(
|
|
2965
|
+
dir: string,
|
|
2966
|
+
): Array<{
|
|
2967
|
+
session_id: string;
|
|
2968
|
+
file_path: string;
|
|
2969
|
+
event_count: number;
|
|
2970
|
+
start_time: string;
|
|
2971
|
+
end_time?: string;
|
|
2972
|
+
}> {
|
|
2973
|
+
if (!existsSync(dir)) return [];
|
|
2974
|
+
|
|
2975
|
+
const files = readdirSync(dir).filter((f: string) => f.endsWith(".jsonl"));
|
|
2976
|
+
const sessions: Array<{
|
|
2977
|
+
session_id: string;
|
|
2978
|
+
file_path: string;
|
|
2979
|
+
event_count: number;
|
|
2980
|
+
start_time: string;
|
|
2981
|
+
end_time?: string;
|
|
2982
|
+
}> = [];
|
|
2983
|
+
|
|
2984
|
+
for (const file of files) {
|
|
2985
|
+
const filePath = join(dir, file);
|
|
2986
|
+
try {
|
|
2987
|
+
const events = parseSessionFile(filePath);
|
|
2988
|
+
if (events.length === 0) continue;
|
|
2989
|
+
|
|
2990
|
+
const timestamps = events.map((e) => new Date(e.timestamp).getTime());
|
|
2991
|
+
const startTime = new Date(Math.min(...timestamps)).toISOString();
|
|
2992
|
+
const endTime =
|
|
2993
|
+
timestamps.length > 1
|
|
2994
|
+
? new Date(Math.max(...timestamps)).toISOString()
|
|
2995
|
+
: undefined;
|
|
2996
|
+
|
|
2997
|
+
sessions.push({
|
|
2998
|
+
session_id: events[0].session_id,
|
|
2999
|
+
file_path: filePath,
|
|
3000
|
+
event_count: events.length,
|
|
3001
|
+
start_time: startTime,
|
|
3002
|
+
end_time: endTime,
|
|
3003
|
+
});
|
|
3004
|
+
} catch {
|
|
3005
|
+
// Skip invalid files
|
|
3006
|
+
}
|
|
3007
|
+
}
|
|
3008
|
+
|
|
3009
|
+
// Sort by start time (newest first)
|
|
3010
|
+
return sessions.sort((a, b) =>
|
|
3011
|
+
new Date(b.start_time).getTime() - new Date(a.start_time).getTime()
|
|
3012
|
+
);
|
|
3013
|
+
}
|
|
3014
|
+
|
|
3015
|
+
/**
|
|
3016
|
+
* Get the latest session file
|
|
3017
|
+
*/
|
|
3018
|
+
function getLatestSession(
|
|
3019
|
+
dir: string,
|
|
3020
|
+
): {
|
|
3021
|
+
session_id: string;
|
|
3022
|
+
file_path: string;
|
|
3023
|
+
event_count: number;
|
|
3024
|
+
start_time: string;
|
|
3025
|
+
end_time?: string;
|
|
3026
|
+
} | null {
|
|
3027
|
+
const sessions = listSessionFiles(dir);
|
|
3028
|
+
return sessions.length > 0 ? sessions[0] : null;
|
|
3029
|
+
}
|
|
3030
|
+
|
|
3031
|
+
/**
|
|
3032
|
+
* Filter events by type
|
|
3033
|
+
*/
|
|
3034
|
+
function filterEventsByType(
|
|
3035
|
+
events: CoordinatorEvent[],
|
|
3036
|
+
eventType: string,
|
|
3037
|
+
): CoordinatorEvent[] {
|
|
3038
|
+
if (eventType === "all") return events;
|
|
3039
|
+
return events.filter((e) => e.event_type === eventType.toUpperCase());
|
|
3040
|
+
}
|
|
3041
|
+
|
|
3042
|
+
/**
|
|
3043
|
+
* Filter events by time
|
|
3044
|
+
*/
|
|
3045
|
+
function filterEventsSince(
|
|
3046
|
+
events: CoordinatorEvent[],
|
|
3047
|
+
sinceMs: number,
|
|
3048
|
+
): CoordinatorEvent[] {
|
|
3049
|
+
const cutoffTime = Date.now() - sinceMs;
|
|
3050
|
+
return events.filter((e) =>
|
|
3051
|
+
new Date(e.timestamp).getTime() >= cutoffTime
|
|
3052
|
+
);
|
|
3053
|
+
}
|
|
3054
|
+
|
|
3055
|
+
/**
|
|
3056
|
+
* Format an event for display
|
|
3057
|
+
*/
|
|
3058
|
+
function formatEvent(event: CoordinatorEvent, useColor = true): string {
|
|
3059
|
+
const timestamp = new Date(event.timestamp).toLocaleTimeString();
|
|
3060
|
+
const typeColor = useColor
|
|
3061
|
+
? event.event_type === "VIOLATION"
|
|
3062
|
+
? red
|
|
3063
|
+
: event.event_type === "OUTCOME"
|
|
3064
|
+
? green
|
|
3065
|
+
: cyan
|
|
3066
|
+
: (s: string) => s;
|
|
3067
|
+
|
|
3068
|
+
const type = typeColor(event.event_type.padEnd(12));
|
|
3069
|
+
|
|
3070
|
+
// Get specific type
|
|
3071
|
+
let specificType = "";
|
|
3072
|
+
if (event.event_type === "DECISION") {
|
|
3073
|
+
specificType = event.decision_type;
|
|
3074
|
+
} else if (event.event_type === "VIOLATION") {
|
|
3075
|
+
specificType = event.violation_type;
|
|
3076
|
+
} else if (event.event_type === "OUTCOME") {
|
|
3077
|
+
specificType = event.outcome_type;
|
|
3078
|
+
} else if (event.event_type === "COMPACTION") {
|
|
3079
|
+
specificType = event.compaction_type;
|
|
3080
|
+
}
|
|
3081
|
+
|
|
3082
|
+
return `${timestamp} ${type} ${specificType}`;
|
|
3083
|
+
}
|
|
3084
|
+
|
|
3085
|
+
// ============================================================================
|
|
3086
|
+
// Session Log Command
|
|
3087
|
+
// ============================================================================
|
|
3088
|
+
|
|
3089
|
+
async function logSessions() {
|
|
3090
|
+
const args = process.argv.slice(4); // Skip 'log' and 'sessions'
|
|
3091
|
+
const sessionsDir = join(homedir(), ".config", "swarm-tools", "sessions");
|
|
3092
|
+
|
|
3093
|
+
// Parse arguments
|
|
3094
|
+
let sessionId: string | null = null;
|
|
3095
|
+
let latest = false;
|
|
3096
|
+
let jsonOutput = false;
|
|
3097
|
+
let eventTypeFilter: string | null = null;
|
|
3098
|
+
let sinceMs: number | null = null;
|
|
3099
|
+
let limit = 100;
|
|
3100
|
+
|
|
3101
|
+
for (let i = 0; i < args.length; i++) {
|
|
3102
|
+
const arg = args[i];
|
|
3103
|
+
|
|
3104
|
+
if (arg === "--latest") {
|
|
3105
|
+
latest = true;
|
|
3106
|
+
} else if (arg === "--json") {
|
|
3107
|
+
jsonOutput = true;
|
|
3108
|
+
} else if (arg === "--type" && i + 1 < args.length) {
|
|
3109
|
+
eventTypeFilter = args[++i];
|
|
3110
|
+
} else if (arg === "--since" && i + 1 < args.length) {
|
|
3111
|
+
const duration = parseDuration(args[++i]);
|
|
3112
|
+
if (duration === null) {
|
|
3113
|
+
p.log.error(`Invalid duration format: ${args[i]}`);
|
|
3114
|
+
p.log.message(dim(" Use format: 30s, 5m, 2h, 1d"));
|
|
3115
|
+
process.exit(1);
|
|
3116
|
+
}
|
|
3117
|
+
sinceMs = duration;
|
|
3118
|
+
} else if (arg === "--limit" && i + 1 < args.length) {
|
|
3119
|
+
limit = parseInt(args[++i], 10);
|
|
3120
|
+
if (isNaN(limit) || limit <= 0) {
|
|
3121
|
+
p.log.error(`Invalid limit: ${args[i]}`);
|
|
3122
|
+
process.exit(1);
|
|
3123
|
+
}
|
|
3124
|
+
} else if (!arg.startsWith("--") && !arg.startsWith("-")) {
|
|
3125
|
+
// Positional arg = session ID
|
|
3126
|
+
sessionId = arg;
|
|
3127
|
+
}
|
|
3128
|
+
}
|
|
3129
|
+
|
|
3130
|
+
// If no args, list sessions
|
|
3131
|
+
if (!sessionId && !latest) {
|
|
3132
|
+
const sessions = listSessionFiles(sessionsDir);
|
|
3133
|
+
|
|
3134
|
+
if (jsonOutput) {
|
|
3135
|
+
console.log(JSON.stringify({ sessions }, null, 2));
|
|
3136
|
+
return;
|
|
3137
|
+
}
|
|
3138
|
+
|
|
3139
|
+
if (sessions.length === 0) {
|
|
3140
|
+
p.log.warn("No session files found");
|
|
3141
|
+
p.log.message(dim(` Expected: ${sessionsDir}/*.jsonl`));
|
|
3142
|
+
return;
|
|
3143
|
+
}
|
|
3144
|
+
|
|
3145
|
+
console.log(yellow(BANNER));
|
|
3146
|
+
console.log(dim(` Coordinator Sessions (${sessions.length} total)\n`));
|
|
3147
|
+
|
|
3148
|
+
// Show sessions table
|
|
3149
|
+
for (const session of sessions) {
|
|
3150
|
+
const startTime = new Date(session.start_time).toLocaleString();
|
|
3151
|
+
const duration = session.end_time
|
|
3152
|
+
? ((new Date(session.end_time).getTime() - new Date(session.start_time).getTime()) / 1000).toFixed(0) + "s"
|
|
3153
|
+
: "ongoing";
|
|
3154
|
+
|
|
3155
|
+
console.log(` ${cyan(session.session_id)}`);
|
|
3156
|
+
console.log(` ${dim("Started:")} ${startTime}`);
|
|
3157
|
+
console.log(` ${dim("Events:")} ${session.event_count}`);
|
|
3158
|
+
console.log(` ${dim("Duration:")} ${duration}`);
|
|
3159
|
+
console.log();
|
|
3160
|
+
}
|
|
3161
|
+
|
|
3162
|
+
console.log(dim(" Use --latest to view most recent session"));
|
|
3163
|
+
console.log(dim(" Use <session_id> to view specific session"));
|
|
3164
|
+
console.log();
|
|
3165
|
+
return;
|
|
3166
|
+
}
|
|
3167
|
+
|
|
3168
|
+
// Get session (either by ID or latest)
|
|
3169
|
+
let session: { session_id: string; file_path: string; event_count: number; start_time: string; end_time?: string; } | null = null;
|
|
3170
|
+
|
|
3171
|
+
if (latest) {
|
|
3172
|
+
session = getLatestSession(sessionsDir);
|
|
3173
|
+
if (!session) {
|
|
3174
|
+
p.log.error("No sessions found");
|
|
3175
|
+
return;
|
|
3176
|
+
}
|
|
3177
|
+
} else if (sessionId) {
|
|
3178
|
+
// Find session by ID (partial match)
|
|
3179
|
+
const sessions = listSessionFiles(sessionsDir);
|
|
3180
|
+
session = sessions.find(s => s.session_id.includes(sessionId!)) || null;
|
|
3181
|
+
|
|
3182
|
+
if (!session) {
|
|
3183
|
+
p.log.error(`Session not found: ${sessionId}`);
|
|
3184
|
+
return;
|
|
3185
|
+
}
|
|
3186
|
+
}
|
|
3187
|
+
|
|
3188
|
+
// Load and filter events
|
|
3189
|
+
let events = parseSessionFile(session!.file_path);
|
|
3190
|
+
|
|
3191
|
+
if (eventTypeFilter) {
|
|
3192
|
+
events = filterEventsByType(events, eventTypeFilter);
|
|
3193
|
+
}
|
|
3194
|
+
|
|
3195
|
+
if (sinceMs !== null) {
|
|
3196
|
+
events = filterEventsSince(events, sinceMs);
|
|
3197
|
+
}
|
|
3198
|
+
|
|
3199
|
+
// Apply limit
|
|
3200
|
+
if (events.length > limit) {
|
|
3201
|
+
events = events.slice(-limit);
|
|
3202
|
+
}
|
|
3203
|
+
|
|
3204
|
+
// Output
|
|
3205
|
+
if (jsonOutput) {
|
|
3206
|
+
console.log(JSON.stringify({ session_id: session!.session_id, events }, null, 2));
|
|
3207
|
+
return;
|
|
3208
|
+
}
|
|
3209
|
+
|
|
3210
|
+
console.log(yellow(BANNER));
|
|
3211
|
+
console.log(dim(` Session: ${session!.session_id}\n`));
|
|
3212
|
+
console.log(` ${dim("Events:")} ${events.length}/${session!.event_count}`);
|
|
3213
|
+
if (eventTypeFilter) console.log(` ${dim("Type:")} ${eventTypeFilter}`);
|
|
3214
|
+
if (sinceMs !== null) console.log(` ${dim("Since:")} ${args[args.indexOf("--since") + 1]}`);
|
|
3215
|
+
console.log();
|
|
3216
|
+
|
|
3217
|
+
for (const event of events) {
|
|
3218
|
+
console.log(" " + formatEvent(event, true));
|
|
3219
|
+
}
|
|
3220
|
+
console.log();
|
|
3221
|
+
}
|
|
3222
|
+
|
|
2914
3223
|
// ============================================================================
|
|
2915
3224
|
// Log Command - View swarm logs with filtering
|
|
2916
3225
|
// ============================================================================
|
|
@@ -3226,6 +3535,12 @@ async function cells() {
|
|
|
3226
3535
|
async function logs() {
|
|
3227
3536
|
const args = process.argv.slice(3);
|
|
3228
3537
|
|
|
3538
|
+
// Check for 'sessions' subcommand
|
|
3539
|
+
if (args[0] === "sessions") {
|
|
3540
|
+
await logSessions();
|
|
3541
|
+
return;
|
|
3542
|
+
}
|
|
3543
|
+
|
|
3229
3544
|
// Parse arguments
|
|
3230
3545
|
let moduleFilter: string | null = null;
|
|
3231
3546
|
let levelFilter: number | null = null;
|
|
@@ -3710,6 +4025,262 @@ function formatEvalRunResultOutput(result: {
|
|
|
3710
4025
|
p.log.message(result.message);
|
|
3711
4026
|
}
|
|
3712
4027
|
|
|
4028
|
+
// ============================================================================
|
|
4029
|
+
// Stats Command - Swarm Health Metrics
|
|
4030
|
+
// ============================================================================
|
|
4031
|
+
|
|
4032
|
+
async function stats() {
|
|
4033
|
+
const { getSwarmMailLibSQL } = await import("swarm-mail");
|
|
4034
|
+
const { formatSwarmStats, parseTimePeriod, aggregateByStrategy } = await import(
|
|
4035
|
+
"../src/observability-tools"
|
|
4036
|
+
);
|
|
4037
|
+
|
|
4038
|
+
p.intro("swarm stats");
|
|
4039
|
+
|
|
4040
|
+
// Parse args
|
|
4041
|
+
const args = process.argv.slice(3);
|
|
4042
|
+
let period = "7d"; // default to 7 days
|
|
4043
|
+
let format: "text" | "json" = "text";
|
|
4044
|
+
|
|
4045
|
+
for (let i = 0; i < args.length; i++) {
|
|
4046
|
+
if (args[i] === "--since" || args[i] === "-s") {
|
|
4047
|
+
period = args[i + 1] || "7d";
|
|
4048
|
+
i++;
|
|
4049
|
+
} else if (args[i] === "--json") {
|
|
4050
|
+
format = "json";
|
|
4051
|
+
}
|
|
4052
|
+
}
|
|
4053
|
+
|
|
4054
|
+
try {
|
|
4055
|
+
const projectPath = process.cwd();
|
|
4056
|
+
const swarmMail = await getSwarmMailLibSQL(projectPath);
|
|
4057
|
+
const db = await swarmMail.getDatabase();
|
|
4058
|
+
|
|
4059
|
+
// Calculate since timestamp
|
|
4060
|
+
const since = parseTimePeriod(period);
|
|
4061
|
+
const periodMatch = period.match(/^(\d+)([dhm])$/);
|
|
4062
|
+
const periodDays = periodMatch ?
|
|
4063
|
+
(periodMatch[2] === "d" ? Number.parseInt(periodMatch[1]) :
|
|
4064
|
+
periodMatch[2] === "h" ? Number.parseInt(periodMatch[1]) / 24 :
|
|
4065
|
+
Number.parseInt(periodMatch[1]) / (24 * 60)) : 7;
|
|
4066
|
+
|
|
4067
|
+
// Query overall stats
|
|
4068
|
+
const overallResult = await db.query(
|
|
4069
|
+
`SELECT
|
|
4070
|
+
COUNT(DISTINCT json_extract(data, '$.epic_id')) as total_swarms,
|
|
4071
|
+
SUM(CASE WHEN json_extract(data, '$.success') = 'true' THEN 1 ELSE 0 END) as successes,
|
|
4072
|
+
COUNT(*) as total_outcomes,
|
|
4073
|
+
CAST(AVG(CAST(json_extract(data, '$.duration_ms') AS REAL)) / 60000 AS REAL) as avg_duration_min
|
|
4074
|
+
FROM events
|
|
4075
|
+
WHERE type = 'subtask_outcome'
|
|
4076
|
+
AND timestamp >= ?`,
|
|
4077
|
+
[since],
|
|
4078
|
+
);
|
|
4079
|
+
|
|
4080
|
+
const overall = overallResult.rows[0] as {
|
|
4081
|
+
total_swarms: number;
|
|
4082
|
+
successes: number;
|
|
4083
|
+
total_outcomes: number;
|
|
4084
|
+
avg_duration_min: number;
|
|
4085
|
+
} || { total_swarms: 0, successes: 0, total_outcomes: 0, avg_duration_min: 0 };
|
|
4086
|
+
|
|
4087
|
+
// Query strategy breakdown
|
|
4088
|
+
const strategyResult = await db.query(
|
|
4089
|
+
`SELECT
|
|
4090
|
+
json_extract(data, '$.strategy') as strategy,
|
|
4091
|
+
json_extract(data, '$.success') as success
|
|
4092
|
+
FROM events
|
|
4093
|
+
WHERE type = 'subtask_outcome'
|
|
4094
|
+
AND timestamp >= ?`,
|
|
4095
|
+
[since],
|
|
4096
|
+
);
|
|
4097
|
+
|
|
4098
|
+
const strategies = aggregateByStrategy(
|
|
4099
|
+
(strategyResult.rows as Array<{ strategy: string | null; success: string }>).map(
|
|
4100
|
+
(row) => ({
|
|
4101
|
+
strategy: row.strategy,
|
|
4102
|
+
success: row.success === "true",
|
|
4103
|
+
}),
|
|
4104
|
+
),
|
|
4105
|
+
);
|
|
4106
|
+
|
|
4107
|
+
// Query coordinator stats from sessions
|
|
4108
|
+
const sessionsPath = join(
|
|
4109
|
+
homedir(),
|
|
4110
|
+
".config",
|
|
4111
|
+
"swarm-tools",
|
|
4112
|
+
"sessions",
|
|
4113
|
+
);
|
|
4114
|
+
let coordinatorStats = {
|
|
4115
|
+
violationRate: 0,
|
|
4116
|
+
spawnEfficiency: 0,
|
|
4117
|
+
reviewThoroughness: 0,
|
|
4118
|
+
};
|
|
4119
|
+
|
|
4120
|
+
if (existsSync(sessionsPath)) {
|
|
4121
|
+
const sessionFiles = readdirSync(sessionsPath).filter(
|
|
4122
|
+
(f) => f.endsWith(".jsonl") && statSync(join(sessionsPath, f)).mtimeMs >= since,
|
|
4123
|
+
);
|
|
4124
|
+
|
|
4125
|
+
let totalViolations = 0;
|
|
4126
|
+
let totalSpawns = 0;
|
|
4127
|
+
let totalReviews = 0;
|
|
4128
|
+
let totalSwarms = 0;
|
|
4129
|
+
|
|
4130
|
+
for (const file of sessionFiles) {
|
|
4131
|
+
try {
|
|
4132
|
+
const content = readFileSync(join(sessionsPath, file), "utf-8");
|
|
4133
|
+
const lines = content.trim().split("\n");
|
|
4134
|
+
|
|
4135
|
+
let violations = 0;
|
|
4136
|
+
let spawns = 0;
|
|
4137
|
+
let reviews = 0;
|
|
4138
|
+
|
|
4139
|
+
for (const line of lines) {
|
|
4140
|
+
try {
|
|
4141
|
+
const event = JSON.parse(line);
|
|
4142
|
+
if (event.type === "VIOLATION") violations++;
|
|
4143
|
+
if (event.type === "DECISION" && event.action === "spawn") spawns++;
|
|
4144
|
+
if (event.type === "DECISION" && event.action === "review") reviews++;
|
|
4145
|
+
} catch {
|
|
4146
|
+
// Skip invalid lines
|
|
4147
|
+
}
|
|
4148
|
+
}
|
|
4149
|
+
|
|
4150
|
+
if (spawns > 0 || violations > 0) {
|
|
4151
|
+
totalViolations += violations;
|
|
4152
|
+
totalSpawns += spawns;
|
|
4153
|
+
totalReviews += reviews;
|
|
4154
|
+
totalSwarms++;
|
|
4155
|
+
}
|
|
4156
|
+
} catch {
|
|
4157
|
+
// Skip unreadable files
|
|
4158
|
+
}
|
|
4159
|
+
}
|
|
4160
|
+
|
|
4161
|
+
coordinatorStats = {
|
|
4162
|
+
violationRate: totalSwarms > 0 ? (totalViolations / totalSwarms) * 100 : 0,
|
|
4163
|
+
spawnEfficiency: totalSwarms > 0 ? (totalSpawns / totalSwarms) * 100 : 0,
|
|
4164
|
+
reviewThoroughness: totalSpawns > 0 ? (totalReviews / totalSpawns) * 100 : 0,
|
|
4165
|
+
};
|
|
4166
|
+
}
|
|
4167
|
+
|
|
4168
|
+
// Build stats data
|
|
4169
|
+
const stats = {
|
|
4170
|
+
overall: {
|
|
4171
|
+
totalSwarms: overall.total_swarms,
|
|
4172
|
+
successRate:
|
|
4173
|
+
overall.total_outcomes > 0
|
|
4174
|
+
? (overall.successes / overall.total_outcomes) * 100
|
|
4175
|
+
: 0,
|
|
4176
|
+
avgDurationMin: overall.avg_duration_min || 0,
|
|
4177
|
+
},
|
|
4178
|
+
byStrategy: strategies,
|
|
4179
|
+
coordinator: coordinatorStats,
|
|
4180
|
+
recentDays: Math.round(periodDays * 10) / 10,
|
|
4181
|
+
};
|
|
4182
|
+
|
|
4183
|
+
// Output
|
|
4184
|
+
if (format === "json") {
|
|
4185
|
+
console.log(JSON.stringify(stats, null, 2));
|
|
4186
|
+
} else {
|
|
4187
|
+
console.log();
|
|
4188
|
+
console.log(formatSwarmStats(stats));
|
|
4189
|
+
console.log();
|
|
4190
|
+
}
|
|
4191
|
+
|
|
4192
|
+
p.outro("Stats ready!");
|
|
4193
|
+
} catch (error) {
|
|
4194
|
+
p.log.error(error instanceof Error ? error.message : String(error));
|
|
4195
|
+
p.outro("Failed to load stats");
|
|
4196
|
+
process.exit(1);
|
|
4197
|
+
}
|
|
4198
|
+
}
|
|
4199
|
+
|
|
4200
|
+
// ============================================================================
|
|
4201
|
+
// History Command
|
|
4202
|
+
// ============================================================================
|
|
4203
|
+
|
|
4204
|
+
async function swarmHistory() {
|
|
4205
|
+
const {
|
|
4206
|
+
querySwarmHistory,
|
|
4207
|
+
formatSwarmHistory,
|
|
4208
|
+
} = await import("../src/observability-tools.js");
|
|
4209
|
+
|
|
4210
|
+
p.intro("swarm history");
|
|
4211
|
+
|
|
4212
|
+
// Parse args
|
|
4213
|
+
const args = process.argv.slice(3);
|
|
4214
|
+
let limit = 10;
|
|
4215
|
+
let status: "success" | "failed" | "in_progress" | undefined;
|
|
4216
|
+
let strategy: "file-based" | "feature-based" | "risk-based" | undefined;
|
|
4217
|
+
let verbose = false;
|
|
4218
|
+
|
|
4219
|
+
for (let i = 0; i < args.length; i++) {
|
|
4220
|
+
const arg = args[i];
|
|
4221
|
+
|
|
4222
|
+
if (arg === "--limit" || arg === "-n") {
|
|
4223
|
+
const limitStr = args[i + 1];
|
|
4224
|
+
if (limitStr && !Number.isNaN(Number(limitStr))) {
|
|
4225
|
+
limit = Number(limitStr);
|
|
4226
|
+
i++;
|
|
4227
|
+
}
|
|
4228
|
+
} else if (arg === "--status") {
|
|
4229
|
+
const statusStr = args[i + 1];
|
|
4230
|
+
if (
|
|
4231
|
+
statusStr &&
|
|
4232
|
+
["success", "failed", "in_progress"].includes(statusStr)
|
|
4233
|
+
) {
|
|
4234
|
+
status = statusStr as "success" | "failed" | "in_progress";
|
|
4235
|
+
i++;
|
|
4236
|
+
}
|
|
4237
|
+
} else if (arg === "--strategy") {
|
|
4238
|
+
const strategyStr = args[i + 1];
|
|
4239
|
+
if (
|
|
4240
|
+
strategyStr &&
|
|
4241
|
+
["file-based", "feature-based", "risk-based"].includes(strategyStr)
|
|
4242
|
+
) {
|
|
4243
|
+
strategy = strategyStr as "file-based" | "feature-based" | "risk-based";
|
|
4244
|
+
i++;
|
|
4245
|
+
}
|
|
4246
|
+
} else if (arg === "--verbose" || arg === "-v") {
|
|
4247
|
+
verbose = true;
|
|
4248
|
+
}
|
|
4249
|
+
}
|
|
4250
|
+
|
|
4251
|
+
try {
|
|
4252
|
+
const projectPath = process.cwd();
|
|
4253
|
+
const records = await querySwarmHistory(projectPath, {
|
|
4254
|
+
limit,
|
|
4255
|
+
status,
|
|
4256
|
+
strategy,
|
|
4257
|
+
});
|
|
4258
|
+
|
|
4259
|
+
console.log();
|
|
4260
|
+
console.log(formatSwarmHistory(records));
|
|
4261
|
+
console.log();
|
|
4262
|
+
|
|
4263
|
+
if (verbose && records.length > 0) {
|
|
4264
|
+
console.log("Details:");
|
|
4265
|
+
for (const record of records) {
|
|
4266
|
+
console.log(
|
|
4267
|
+
` ${record.epic_id}: ${record.epic_title} (${record.strategy})`,
|
|
4268
|
+
);
|
|
4269
|
+
console.log(
|
|
4270
|
+
` Tasks: ${record.completed_count}/${record.task_count}, Success: ${record.overall_success ? "✅" : "❌"}`,
|
|
4271
|
+
);
|
|
4272
|
+
}
|
|
4273
|
+
console.log();
|
|
4274
|
+
}
|
|
4275
|
+
|
|
4276
|
+
p.outro("History ready!");
|
|
4277
|
+
} catch (error) {
|
|
4278
|
+
p.log.error(error instanceof Error ? error.message : String(error));
|
|
4279
|
+
p.outro("Failed to load history");
|
|
4280
|
+
process.exit(1);
|
|
4281
|
+
}
|
|
4282
|
+
}
|
|
4283
|
+
|
|
3713
4284
|
// ============================================================================
|
|
3714
4285
|
// Eval Command
|
|
3715
4286
|
// ============================================================================
|
|
@@ -3971,6 +4542,12 @@ switch (command) {
|
|
|
3971
4542
|
case "logs":
|
|
3972
4543
|
await logs();
|
|
3973
4544
|
break;
|
|
4545
|
+
case "stats":
|
|
4546
|
+
await stats();
|
|
4547
|
+
break;
|
|
4548
|
+
case "history":
|
|
4549
|
+
await swarmHistory();
|
|
4550
|
+
break;
|
|
3974
4551
|
case "eval":
|
|
3975
4552
|
await evalCommand();
|
|
3976
4553
|
break;
|
|
@@ -37,8 +37,17 @@
|
|
|
37
37
|
*
|
|
38
38
|
* This is NOT about preserving state for a human - it's about the swarm continuing
|
|
39
39
|
* autonomously after context compression.
|
|
40
|
+
*
|
|
41
|
+
* Structure optimized for eval scores:
|
|
42
|
+
* 1. ASCII header (visual anchor, coordinatorIdentity scorer)
|
|
43
|
+
* 2. What Good Looks Like (behavioral examples, outcome-focused)
|
|
44
|
+
* 3. Immediate actions (actionable tool calls, postCompactionDiscipline scorer)
|
|
45
|
+
* 4. Forbidden tools (explicit list, forbiddenToolsPresent scorer)
|
|
46
|
+
* 5. Mandatory behaviors (inbox, skills, review)
|
|
47
|
+
* 6. Role & mandates (strong language, coordinatorIdentity scorer)
|
|
48
|
+
* 7. Reference sections (supporting material)
|
|
40
49
|
*/
|
|
41
|
-
export declare const SWARM_COMPACTION_CONTEXT = "\n\u250C\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2510\n\u2502 \u2502\n\u2502 \uD83D\uDC1D YOU ARE THE COORDINATOR \uD83D\uDC1D \u2502\n\u2502 \u2502\n\u2502 NOT A WORKER. NOT AN IMPLEMENTER. \u2502\n\u2502 YOU ORCHESTRATE. \u2502\n\u2502 \u2502\n\u2514\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2518\n\
|
|
50
|
+
export declare const SWARM_COMPACTION_CONTEXT = "\n\u250C\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2510\n\u2502 \u2502\n\u2502 \uD83D\uDC1D YOU ARE THE COORDINATOR \uD83D\uDC1D \u2502\n\u2502 \u2502\n\u2502 NOT A WORKER. NOT AN IMPLEMENTER. \u2502\n\u2502 YOU ORCHESTRATE. \u2502\n\u2502 \u2502\n\u2514\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2518\n\nContext was compacted but the swarm is still running. **YOU ARE THE COORDINATOR.**\n\nYour role is ORCHESTRATION, not implementation. The resume steps above (if present) tell you exactly what to do first.\n\n---\n\n## \uD83C\uDFAF WHAT GOOD LOOKS LIKE (Behavioral Examples)\n\n**\u2705 GOOD Coordinator Behavior:**\n- Spawned researcher for unfamiliar tech \u2192 got summary \u2192 stored in semantic-memory\n- Loaded `skills_use(name=\"testing-patterns\")` BEFORE spawning test workers\n- Checked `swarmmail_inbox()` every 5-10 minutes \u2192 caught blocked worker \u2192 unblocked in 2min\n- Delegated planning to swarm/planner subagent \u2192 main context stayed clean\n- Workers reserved their OWN files \u2192 no conflicts\n- Reviewed all worker output with `swarm_review` \u2192 caught integration issue before merge\n\n**\u274C COMMON MISTAKES (Avoid These):**\n- Called context7/pdf-brain directly \u2192 dumped 50KB into thread \u2192 context exhaustion\n- Skipped skill loading \u2192 workers reinvented patterns already in skills\n- Never checked inbox \u2192 worker stuck 25 minutes \u2192 silent failure\n- Reserved files as coordinator \u2192 workers blocked \u2192 swarm stalled\n- Closed cells when workers said \"done\" \u2192 skipped review \u2192 shipped broken code\n\n---\n\n## \uD83D\uDEAB FORBIDDEN TOOLS (NEVER Use These Directly)\n\nCoordinators do NOT do implementation work. These tools are **FORBIDDEN**:\n\n### File Modification (ALWAYS spawn workers instead)\n- `Edit` - SPAWN A WORKER\n- `Write` - SPAWN A WORKER\n- `bash` (for file modifications) - SPAWN A WORKER\n- `swarmmail_reserve` - Workers reserve their own files\n- `git commit` - Workers commit their own changes\n\n### External Data Fetching (SPAWN A RESEARCHER instead)\n\n**Repository fetching:**\n- `repo-crawl_file`, `repo-crawl_readme`, `repo-crawl_search`, `repo-crawl_structure`, `repo-crawl_tree`\n- `repo-autopsy_*` (all repo-autopsy tools)\n\n**Web/documentation fetching:**\n- `webfetch`, `fetch_fetch`\n- `context7_resolve-library-id`, `context7_get-library-docs`\n\n**Knowledge base:**\n- `pdf-brain_search`, `pdf-brain_read`\n\n**Instead:** Use `swarm_spawn_researcher` with a clear research task. The researcher will fetch, summarize, and return findings.\n\n---\n\n## \uD83D\uDCBC YOUR ROLE (Non-Negotiable)\n\nYou are the **COORDINATOR**. Your job is ORCHESTRATION, not implementation.\n\n### What Coordinators Do:\n- \u2705 Spawn workers for implementation tasks\n- \u2705 Monitor worker progress via `swarm_status` and `swarmmail_inbox`\n- \u2705 Review completed work with `swarm_review`\n- \u2705 Unblock dependencies and resolve conflicts\n- \u2705 Close the loop when epics complete\n\n### What Coordinators NEVER Do:\n- \u274C **NEVER** edit or write files directly\n- \u274C **NEVER** run tests with `bash`\n- \u274C **NEVER** \"just do it myself to save time\"\n- \u274C **NEVER** reserve files (workers reserve)\n- \u274C **NEVER** fetch external data directly (spawn researchers)\n\n**If you catch yourself about to edit a file, STOP. Use `swarm_spawn_subtask` instead.**\n\n### Strong Mandates:\n- **ALWAYS** spawn workers for implementation tasks\n- **ALWAYS** check status and inbox before decisions\n- **ALWAYS** review worker output before accepting\n- **NON-NEGOTIABLE:** You orchestrate. You do NOT implement.\n\n---\n\n## \uD83D\uDCCB MANDATORY BEHAVIORS (Post-Compaction Checklist)\n\n### 1. Inbox Monitoring (EVERY 5-10 MINUTES)\n```\nswarmmail_inbox(limit=5) # Check for messages\nswarmmail_read_message(message_id=N) # Read urgent ones\nswarm_status(epic_id, project_key) # Overall progress\n```\n**Intervention triggers:** Worker blocked >5min, file conflict, scope creep\n\n### 2. Skill Loading (BEFORE spawning workers)\n```\nskills_use(name=\"swarm-coordination\") # ALWAYS for swarms\nskills_use(name=\"testing-patterns\") # If task involves tests\nskills_use(name=\"system-design\") # If architectural decisions\n```\n**Include skill recommendations in shared_context for workers.**\n\n### 3. Worker Review (AFTER EVERY worker returns)\n```\nswarm_review(project_key, epic_id, task_id, files_touched)\n# Evaluate: Does it fulfill requirements? Enable downstream tasks? Type safe?\nswarm_review_feedback(project_key, task_id, worker_id, status, issues)\n```\n**3-Strike Rule:** After 3 rejections \u2192 mark blocked \u2192 escalate to human.\n\n### 4. Research Spawning (For unfamiliar tech)\n```\nTask(subagent_type=\"swarm-researcher\", prompt=\"Research <topic>...\")\n```\n**NEVER call context7, pdf-brain, webfetch directly.** Spawn a researcher.\n\n---\n\n## \uD83D\uDCDD SUMMARY FORMAT (Preserve This State)\n\nWhen compaction occurs, extract and preserve this structure:\n\n```\n## \uD83D\uDC1D Swarm State\n\n**Epic:** CELL_ID - TITLE\n**Project:** PROJECT_PATH\n**Progress:** X/Y subtasks complete\n\n**Active:**\n- CELL_ID: TITLE [in_progress] \u2192 AGENT working on FILES\n\n**Blocked:**\n- CELL_ID: TITLE - BLOCKED: REASON\n\n**Completed:**\n- CELL_ID: TITLE \u2713\n\n**Ready to Spawn:**\n- CELL_ID: TITLE (files: FILES)\n```\n\n### What to Extract:\n1. **Epic & Subtasks** - IDs, titles, status, file assignments\n2. **What's Running** - Active agents and their current work\n3. **What's Blocked** - Blockers and what's needed to unblock\n4. **What's Done** - Completed work and follow-ups\n5. **What's Next** - Pending subtasks ready to spawn\n\n---\n\n## \uD83D\uDCCB REFERENCE: Full Coordinator Workflow\n\nYou are ALWAYS swarming. Use this workflow for any new work:\n\n### Phase 1.5: Research (For Complex Tasks)\n\nIf the task requires unfamiliar technologies, spawn a researcher FIRST:\n\n```\nswarm_spawn_researcher(\n research_id=\"research-TOPIC\",\n epic_id=\"mjkw...\", # your epic ID\n tech_stack=[\"TECHNOLOGY\"],\n project_path=\"PROJECT_PATH\"\n)\n// Then spawn with Task(subagent_type=\"swarm/researcher\", prompt=\"...\")\n```\n\n### Phase 2: Knowledge Gathering\n\n```\nsemantic-memory_find(query=\"TASK_KEYWORDS\", limit=5) # Past learnings\ncass_search(query=\"TASK_DESCRIPTION\", limit=5) # Similar past tasks\nskills_list() # Available skills\n```\n\n### Phase 3: Decompose\n\n```\nswarm_select_strategy(task=\"TASK\")\nswarm_plan_prompt(task=\"TASK\", context=\"KNOWLEDGE\")\nswarm_validate_decomposition(response=\"CELLTREE_JSON\")\n```\n\n### Phase 4: Create Cells\n\n`hive_create_epic(epic_title=\"TASK\", subtasks=[...])`\n\n### Phase 5: File Reservations\n\n> **\u26A0\uFE0F Coordinator NEVER reserves files.** Workers reserve their own files with `swarmmail_reserve`.\n\n### Phase 6: Spawn Workers\n\n```\nswarm_spawn_subtask(bead_id, epic_id, title, files, shared_context, project_path)\nTask(subagent_type=\"swarm/worker\", prompt=\"GENERATED_PROMPT\")\n```\n\n### Phase 7: Review Loop (MANDATORY)\n\n**AFTER EVERY Task() RETURNS:**\n\n1. `swarmmail_inbox()` - Check for messages\n2. `swarm_review(project_key, epic_id, task_id, files_touched)` - Generate review\n3. Evaluate against epic goals\n4. `swarm_review_feedback(project_key, task_id, worker_id, status, issues)`\n\n**If needs_changes:**\n```\nswarm_spawn_retry(bead_id, epic_id, original_prompt, attempt, issues, diff, files, project_path)\n// Spawn NEW worker with Task() using retry prompt\n// Max 3 attempts before marking task blocked\n```\n\n### Phase 8: Complete\n\n`hive_sync()` - Sync all cells to git\n\n---\n\n## \uD83D\uDCCA REFERENCE: Decomposition Strategies\n\n| Strategy | Best For | Keywords |\n| -------------- | ------------------------ | -------------------------------------- |\n| file-based | Refactoring, migrations | refactor, migrate, rename, update all |\n| feature-based | New features | add, implement, build, create, feature |\n| risk-based | Bug fixes, security | fix, bug, security, critical, urgent |\n\n---\n\n**You are the COORDINATOR. You orchestrate. You do NOT implement. Spawn workers.**\n";
|
|
42
51
|
/**
|
|
43
52
|
* Fallback detection prompt - tells the compactor what to look for
|
|
44
53
|
*
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"compaction-hook.d.ts","sourceRoot":"","sources":["../src/compaction-hook.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;
|
|
1
|
+
{"version":3,"file":"compaction-hook.d.ts","sourceRoot":"","sources":["../src/compaction-hook.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AAwCH;;;;;;;;;;;;;;;;;;GAkBG;AACH,eAAO,MAAM,wBAAwB,0jSAgPpC,CAAC;AAEF;;;;;GAKG;AACH,eAAO,MAAM,wBAAwB,0nCAiCpC,CAAC;AA2FF;;;;;;;;GAQG;AACH,MAAM,MAAM,cAAc,GAAG,OAAO,CAAC;AAErC;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,QAAQ,EAAE,GAAG,CACX,MAAM,EACN;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,MAAM,CAAC;QAAC,KAAK,CAAC,EAAE,MAAM,EAAE,CAAA;KAAE,CACrE,CAAC;IACF,UAAU,CAAC,EAAE;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,OAAO,CAAC;QAAC,SAAS,EAAE,MAAM,CAAA;KAAE,CAAC;CACjE;AAED;;;;;;;;;;;;;;GAcG;AACH,wBAAsB,mBAAmB,CACvC,MAAM,EAAE,cAAc,EACtB,SAAS,EAAE,MAAM,EACjB,KAAK,GAAE,MAAY,GAClB,OAAO,CAAC,iBAAiB,CAAC,CAgJ5B;AAwVD;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,wBAAgB,oBAAoB,CAAC,MAAM,CAAC,EAAE,cAAc,IAExD,OAAO;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,EAC5B,QAAQ;IAAE,OAAO,EAAE,MAAM,EAAE,CAAA;CAAE,KAC5B,OAAO,CAAC,IAAI,CAAC,CAqLjB"}
|