opencode-swarm-plugin 0.39.1 → 0.40.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/.hive/issues.jsonl +16 -0
  2. package/CHANGELOG.md +52 -0
  3. package/bin/swarm.test.ts +406 -0
  4. package/bin/swarm.ts +303 -0
  5. package/dist/compaction-hook.d.ts +8 -1
  6. package/dist/compaction-hook.d.ts.map +1 -1
  7. package/dist/compaction-observability.d.ts +173 -0
  8. package/dist/compaction-observability.d.ts.map +1 -0
  9. package/dist/eval-capture.d.ts +93 -0
  10. package/dist/eval-capture.d.ts.map +1 -1
  11. package/dist/hive.d.ts.map +1 -1
  12. package/dist/index.d.ts +36 -1
  13. package/dist/index.d.ts.map +1 -1
  14. package/dist/index.js +15670 -580
  15. package/dist/plugin.js +15623 -557
  16. package/dist/schemas/task.d.ts +3 -3
  17. package/evals/README.md +113 -0
  18. package/evals/scorers/coordinator-discipline.evalite-test.ts +163 -0
  19. package/evals/scorers/coordinator-discipline.ts +335 -2
  20. package/evals/scorers/index.test.ts +146 -0
  21. package/evals/scorers/index.ts +104 -0
  22. package/evals/swarm-decomposition.eval.ts +9 -2
  23. package/examples/commands/swarm.md +291 -21
  24. package/package.json +1 -1
  25. package/src/compaction-hook.ts +258 -110
  26. package/src/compaction-observability.integration.test.ts +139 -0
  27. package/src/compaction-observability.test.ts +187 -0
  28. package/src/compaction-observability.ts +324 -0
  29. package/src/eval-capture.test.ts +204 -1
  30. package/src/eval-capture.ts +194 -2
  31. package/src/eval-runner.test.ts +96 -0
  32. package/src/eval-runner.ts +356 -0
  33. package/src/hive.ts +34 -0
  34. package/src/index.ts +54 -1
  35. package/src/memory.test.ts +110 -0
  36. package/src/memory.ts +34 -0
  37. package/dist/beads.d.ts +0 -386
  38. package/dist/beads.d.ts.map +0 -1
  39. package/dist/schemas/bead-events.d.ts +0 -698
  40. package/dist/schemas/bead-events.d.ts.map +0 -1
  41. package/dist/schemas/bead.d.ts +0 -255
  42. package/dist/schemas/bead.d.ts.map +0 -1
package/bin/swarm.ts CHANGED
@@ -2548,6 +2548,11 @@ ${cyan("Log Viewing:")}
2548
2548
  swarm log --limit <n> Limit output to n lines (default: 50)
2549
2549
  swarm log --watch, -w Watch mode - continuously monitor for new logs
2550
2550
  swarm log --interval <ms> Poll interval in ms (default: 1000, min: 100)
2551
+ swarm log sessions List all captured coordinator sessions
2552
+ swarm log sessions <session_id> View events for a specific session
2553
+ swarm log sessions --latest View most recent session
2554
+ swarm log sessions --type <type> Filter by event type (DECISION, VIOLATION, OUTCOME, COMPACTION)
2555
+ swarm log sessions --json Raw JSON output for jq
2551
2556
 
2552
2557
  ${cyan("Eval Commands:")}
2553
2558
  swarm eval status [eval-name] Show current phase, thresholds, recent scores
@@ -2911,6 +2916,298 @@ async function migrate() {
2911
2916
  }
2912
2917
  }
2913
2918
 
2919
+ // ============================================================================
2920
+ // Session Log Helpers
2921
+ // ============================================================================
2922
+
2923
+ import type { CoordinatorEvent } from "../src/eval-capture.js";
2924
+
2925
+ /**
2926
+ * Parse a session file and return events
2927
+ */
2928
+ function parseSessionFile(filePath: string): CoordinatorEvent[] {
2929
+ if (!existsSync(filePath)) {
2930
+ throw new Error(`Session file not found: ${filePath}`);
2931
+ }
2932
+
2933
+ const content = readFileSync(filePath, "utf-8");
2934
+ const lines = content.split("\n").filter((line) => line.trim());
2935
+ const events: CoordinatorEvent[] = [];
2936
+
2937
+ for (const line of lines) {
2938
+ try {
2939
+ const parsed = JSON.parse(line);
2940
+ events.push(parsed);
2941
+ } catch {
2942
+ // Skip invalid JSON lines
2943
+ }
2944
+ }
2945
+
2946
+ return events;
2947
+ }
2948
+
2949
+ /**
2950
+ * List all session files in a directory
2951
+ */
2952
+ function listSessionFiles(
2953
+ dir: string,
2954
+ ): Array<{
2955
+ session_id: string;
2956
+ file_path: string;
2957
+ event_count: number;
2958
+ start_time: string;
2959
+ end_time?: string;
2960
+ }> {
2961
+ if (!existsSync(dir)) return [];
2962
+
2963
+ const files = readdirSync(dir).filter((f: string) => f.endsWith(".jsonl"));
2964
+ const sessions: Array<{
2965
+ session_id: string;
2966
+ file_path: string;
2967
+ event_count: number;
2968
+ start_time: string;
2969
+ end_time?: string;
2970
+ }> = [];
2971
+
2972
+ for (const file of files) {
2973
+ const filePath = join(dir, file);
2974
+ try {
2975
+ const events = parseSessionFile(filePath);
2976
+ if (events.length === 0) continue;
2977
+
2978
+ const timestamps = events.map((e) => new Date(e.timestamp).getTime());
2979
+ const startTime = new Date(Math.min(...timestamps)).toISOString();
2980
+ const endTime =
2981
+ timestamps.length > 1
2982
+ ? new Date(Math.max(...timestamps)).toISOString()
2983
+ : undefined;
2984
+
2985
+ sessions.push({
2986
+ session_id: events[0].session_id,
2987
+ file_path: filePath,
2988
+ event_count: events.length,
2989
+ start_time: startTime,
2990
+ end_time: endTime,
2991
+ });
2992
+ } catch {
2993
+ // Skip invalid files
2994
+ }
2995
+ }
2996
+
2997
+ // Sort by start time (newest first)
2998
+ return sessions.sort((a, b) =>
2999
+ new Date(b.start_time).getTime() - new Date(a.start_time).getTime()
3000
+ );
3001
+ }
3002
+
3003
+ /**
3004
+ * Get the latest session file
3005
+ */
3006
+ function getLatestSession(
3007
+ dir: string,
3008
+ ): {
3009
+ session_id: string;
3010
+ file_path: string;
3011
+ event_count: number;
3012
+ start_time: string;
3013
+ end_time?: string;
3014
+ } | null {
3015
+ const sessions = listSessionFiles(dir);
3016
+ return sessions.length > 0 ? sessions[0] : null;
3017
+ }
3018
+
3019
+ /**
3020
+ * Filter events by type
3021
+ */
3022
+ function filterEventsByType(
3023
+ events: CoordinatorEvent[],
3024
+ eventType: string,
3025
+ ): CoordinatorEvent[] {
3026
+ if (eventType === "all") return events;
3027
+ return events.filter((e) => e.event_type === eventType.toUpperCase());
3028
+ }
3029
+
3030
+ /**
3031
+ * Filter events by time
3032
+ */
3033
+ function filterEventsSince(
3034
+ events: CoordinatorEvent[],
3035
+ sinceMs: number,
3036
+ ): CoordinatorEvent[] {
3037
+ const cutoffTime = Date.now() - sinceMs;
3038
+ return events.filter((e) =>
3039
+ new Date(e.timestamp).getTime() >= cutoffTime
3040
+ );
3041
+ }
3042
+
3043
+ /**
3044
+ * Format an event for display
3045
+ */
3046
+ function formatEvent(event: CoordinatorEvent, useColor = true): string {
3047
+ const timestamp = new Date(event.timestamp).toLocaleTimeString();
3048
+ const typeColor = useColor
3049
+ ? event.event_type === "VIOLATION"
3050
+ ? red
3051
+ : event.event_type === "OUTCOME"
3052
+ ? green
3053
+ : cyan
3054
+ : (s: string) => s;
3055
+
3056
+ const type = typeColor(event.event_type.padEnd(12));
3057
+
3058
+ // Get specific type
3059
+ let specificType = "";
3060
+ if (event.event_type === "DECISION") {
3061
+ specificType = event.decision_type;
3062
+ } else if (event.event_type === "VIOLATION") {
3063
+ specificType = event.violation_type;
3064
+ } else if (event.event_type === "OUTCOME") {
3065
+ specificType = event.outcome_type;
3066
+ } else if (event.event_type === "COMPACTION") {
3067
+ specificType = event.compaction_type;
3068
+ }
3069
+
3070
+ return `${timestamp} ${type} ${specificType}`;
3071
+ }
3072
+
3073
+ // ============================================================================
3074
+ // Session Log Command
3075
+ // ============================================================================
3076
+
3077
+ async function logSessions() {
3078
+ const args = process.argv.slice(4); // Skip 'log' and 'sessions'
3079
+ const sessionsDir = join(homedir(), ".config", "swarm-tools", "sessions");
3080
+
3081
+ // Parse arguments
3082
+ let sessionId: string | null = null;
3083
+ let latest = false;
3084
+ let jsonOutput = false;
3085
+ let eventTypeFilter: string | null = null;
3086
+ let sinceMs: number | null = null;
3087
+ let limit = 100;
3088
+
3089
+ for (let i = 0; i < args.length; i++) {
3090
+ const arg = args[i];
3091
+
3092
+ if (arg === "--latest") {
3093
+ latest = true;
3094
+ } else if (arg === "--json") {
3095
+ jsonOutput = true;
3096
+ } else if (arg === "--type" && i + 1 < args.length) {
3097
+ eventTypeFilter = args[++i];
3098
+ } else if (arg === "--since" && i + 1 < args.length) {
3099
+ const duration = parseDuration(args[++i]);
3100
+ if (duration === null) {
3101
+ p.log.error(`Invalid duration format: ${args[i]}`);
3102
+ p.log.message(dim(" Use format: 30s, 5m, 2h, 1d"));
3103
+ process.exit(1);
3104
+ }
3105
+ sinceMs = duration;
3106
+ } else if (arg === "--limit" && i + 1 < args.length) {
3107
+ limit = parseInt(args[++i], 10);
3108
+ if (isNaN(limit) || limit <= 0) {
3109
+ p.log.error(`Invalid limit: ${args[i]}`);
3110
+ process.exit(1);
3111
+ }
3112
+ } else if (!arg.startsWith("--") && !arg.startsWith("-")) {
3113
+ // Positional arg = session ID
3114
+ sessionId = arg;
3115
+ }
3116
+ }
3117
+
3118
+ // If no args, list sessions
3119
+ if (!sessionId && !latest) {
3120
+ const sessions = listSessionFiles(sessionsDir);
3121
+
3122
+ if (jsonOutput) {
3123
+ console.log(JSON.stringify({ sessions }, null, 2));
3124
+ return;
3125
+ }
3126
+
3127
+ if (sessions.length === 0) {
3128
+ p.log.warn("No session files found");
3129
+ p.log.message(dim(` Expected: ${sessionsDir}/*.jsonl`));
3130
+ return;
3131
+ }
3132
+
3133
+ console.log(yellow(BANNER));
3134
+ console.log(dim(` Coordinator Sessions (${sessions.length} total)\n`));
3135
+
3136
+ // Show sessions table
3137
+ for (const session of sessions) {
3138
+ const startTime = new Date(session.start_time).toLocaleString();
3139
+ const duration = session.end_time
3140
+ ? ((new Date(session.end_time).getTime() - new Date(session.start_time).getTime()) / 1000).toFixed(0) + "s"
3141
+ : "ongoing";
3142
+
3143
+ console.log(` ${cyan(session.session_id)}`);
3144
+ console.log(` ${dim("Started:")} ${startTime}`);
3145
+ console.log(` ${dim("Events:")} ${session.event_count}`);
3146
+ console.log(` ${dim("Duration:")} ${duration}`);
3147
+ console.log();
3148
+ }
3149
+
3150
+ console.log(dim(" Use --latest to view most recent session"));
3151
+ console.log(dim(" Use <session_id> to view specific session"));
3152
+ console.log();
3153
+ return;
3154
+ }
3155
+
3156
+ // Get session (either by ID or latest)
3157
+ let session: { session_id: string; file_path: string; event_count: number; start_time: string; end_time?: string; } | null = null;
3158
+
3159
+ if (latest) {
3160
+ session = getLatestSession(sessionsDir);
3161
+ if (!session) {
3162
+ p.log.error("No sessions found");
3163
+ return;
3164
+ }
3165
+ } else if (sessionId) {
3166
+ // Find session by ID (partial match)
3167
+ const sessions = listSessionFiles(sessionsDir);
3168
+ session = sessions.find(s => s.session_id.includes(sessionId!)) || null;
3169
+
3170
+ if (!session) {
3171
+ p.log.error(`Session not found: ${sessionId}`);
3172
+ return;
3173
+ }
3174
+ }
3175
+
3176
+ // Load and filter events
3177
+ let events = parseSessionFile(session!.file_path);
3178
+
3179
+ if (eventTypeFilter) {
3180
+ events = filterEventsByType(events, eventTypeFilter);
3181
+ }
3182
+
3183
+ if (sinceMs !== null) {
3184
+ events = filterEventsSince(events, sinceMs);
3185
+ }
3186
+
3187
+ // Apply limit
3188
+ if (events.length > limit) {
3189
+ events = events.slice(-limit);
3190
+ }
3191
+
3192
+ // Output
3193
+ if (jsonOutput) {
3194
+ console.log(JSON.stringify({ session_id: session!.session_id, events }, null, 2));
3195
+ return;
3196
+ }
3197
+
3198
+ console.log(yellow(BANNER));
3199
+ console.log(dim(` Session: ${session!.session_id}\n`));
3200
+ console.log(` ${dim("Events:")} ${events.length}/${session!.event_count}`);
3201
+ if (eventTypeFilter) console.log(` ${dim("Type:")} ${eventTypeFilter}`);
3202
+ if (sinceMs !== null) console.log(` ${dim("Since:")} ${args[args.indexOf("--since") + 1]}`);
3203
+ console.log();
3204
+
3205
+ for (const event of events) {
3206
+ console.log(" " + formatEvent(event, true));
3207
+ }
3208
+ console.log();
3209
+ }
3210
+
2914
3211
  // ============================================================================
2915
3212
  // Log Command - View swarm logs with filtering
2916
3213
  // ============================================================================
@@ -3226,6 +3523,12 @@ async function cells() {
3226
3523
  async function logs() {
3227
3524
  const args = process.argv.slice(3);
3228
3525
 
3526
+ // Check for 'sessions' subcommand
3527
+ if (args[0] === "sessions") {
3528
+ await logSessions();
3529
+ return;
3530
+ }
3531
+
3229
3532
  // Parse arguments
3230
3533
  let moduleFilter: string | null = null;
3231
3534
  let levelFilter: number | null = null;
@@ -37,8 +37,15 @@
37
37
  *
38
38
  * This is NOT about preserving state for a human - it's about the swarm continuing
39
39
  * autonomously after context compression.
40
+ *
41
+ * Structure optimized for eval scores:
42
+ * 1. ASCII header (visual anchor, coordinatorIdentity scorer)
43
+ * 2. Immediate actions (actionable tool calls, postCompactionDiscipline scorer)
44
+ * 3. Forbidden tools (explicit list, forbiddenToolsPresent scorer)
45
+ * 4. Role & mandates (strong language, coordinatorIdentity scorer)
46
+ * 5. Reference sections (supporting material)
40
47
  */
41
- export declare const SWARM_COMPACTION_CONTEXT = "\n\u250C\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2510\n\u2502 \u2502\n\u2502 \uD83D\uDC1D YOU ARE THE COORDINATOR \uD83D\uDC1D \u2502\n\u2502 \u2502\n\u2502 NOT A WORKER. NOT AN IMPLEMENTER. \u2502\n\u2502 YOU ORCHESTRATE. \u2502\n\u2502 \u2502\n\u2514\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2518\n\n## \uD83C\uDFAF NON-NEGOTIABLE: YOU ARE THE COORDINATOR\n\nContext was compacted but the swarm is still running. **YOU ARE THE COORDINATOR.**\n\nYour role is ORCHESTRATION, not implementation. When you catch yourself about to do work directly, STOP.\n\n### \u26D4 NEVER DO THESE (Coordinator Anti-Patterns)\n\n**CRITICAL: Coordinators NEVER do implementation work. ALWAYS spawn workers.**\n\n- \u274C **NEVER** use `edit` or `write` tools - SPAWN A WORKER\n- \u274C **NEVER** run tests with `bash` - SPAWN A WORKER \n- \u274C **NEVER** implement features yourself - SPAWN A WORKER\n- \u274C **NEVER** \"just do it myself to save time\" - NO. SPAWN A WORKER.\n- \u274C **NEVER** reserve files with `swarmmail_reserve` - Workers reserve files\n- \u274C **NEVER** fetch files/docs directly - SPAWN A RESEARCHER\n\n**If you catch yourself about to edit a file, STOP. Use `swarm_spawn_subtask` instead.**\n\n### \uD83D\uDEAB FORBIDDEN TOOLS (Coordinators MUST delegate these)\n\n**NEVER use these tools directly. ALWAYS spawn a researcher worker via `swarm_spawn_researcher`:**\n\n**Repository fetching:**\n- `repo-crawl_file`, `repo-crawl_readme`, `repo-crawl_search`, `repo-crawl_structure`, `repo-crawl_tree`\n- `repo-autopsy_*` (all repo-autopsy tools)\n\n**Web/documentation fetching:**\n- `webfetch`, `fetch_fetch`\n- `context7_resolve-library-id`, `context7_get-library-docs`\n\n**Knowledge base:**\n- `pdf-brain_search`, `pdf-brain_read`\n\n**If you need external data:** Use `swarm_spawn_researcher` with a clear research task. The researcher will fetch, summarize, and return findings.\n\n### \u2705 ALWAYS DO THESE (Coordinator Checklist)\n\nOn resume, execute this checklist IN ORDER:\n\n1. `swarm_status(epic_id=\"<epic>\", project_key=\"<path>\")` - Get current state\n2. `swarmmail_inbox(limit=5)` - Check for agent messages\n3. For completed work: `swarm_review` \u2192 `swarm_review_feedback`\n4. For open subtasks: `swarm_spawn_subtask` (NOT \"do it yourself\")\n5. For blocked work: Investigate, unblock, reassign\n\n### Preserve in Summary\n\nExtract from session context:\n\n1. **Epic & Subtasks** - IDs, titles, status, file assignments\n2. **What's Running** - Which agents are active, what they're working on \n3. **What's Blocked** - Blockers and what's needed to unblock\n4. **What's Done** - Completed work and any follow-ups needed\n5. **What's Next** - Pending subtasks ready to spawn\n\n### Summary Format\n\n```\n## \uD83D\uDC1D Swarm State\n\n**Epic:** <cell-xxx> - <title>\n**Project:** <path>\n**Progress:** X/Y subtasks complete\n\n**Active:**\n- <cell-xxx>: <title> [in_progress] \u2192 <agent> working on <files>\n\n**Blocked:**\n- <cell-xxx>: <title> - BLOCKED: <reason>\n\n**Completed:**\n- <cell-xxx>: <title> \u2713\n\n**Ready to Spawn:**\n- <cell-xxx>: <title> (files: <...>)\n```\n\n### Your Role\n\n- **Spawn aggressively** - If a subtask is ready and unblocked, spawn an agent\n- **Monitor actively** - Check status, read messages, respond to blockers\n- **Review work** - Use `swarm_review` and `swarm_review_feedback` for completed work\n- **Close the loop** - When all subtasks done, verify and close the epic\n\n**You are the COORDINATOR. You orchestrate. You do NOT implement. Spawn workers.**\n\n---\n\n## \uD83D\uDCCB FULL COORDINATOR WORKFLOW (Reference)\n\nYou are ALWAYS swarming. Here is the complete workflow for any new work:\n\n### Phase 1.5: Research Phase (FOR COMPLEX TASKS)\n\n**If the task requires understanding unfamiliar technologies, spawn a researcher FIRST:**\n\n```\nswarm_spawn_researcher(\n research_id=\"research-<topic>\",\n epic_id=\"<epic-id>\",\n tech_stack=[\"<technology>\"],\n project_path=\"<path>\"\n)\n// Then spawn with Task(subagent_type=\"swarm/researcher\", prompt=\"<from above>\")\n```\n\n### Phase 2: Knowledge Gathering\n\n```\nsemantic-memory_find(query=\"<task keywords>\", limit=5) # Past learnings\ncass_search(query=\"<task description>\", limit=5) # Similar past tasks \nskills_list() # Available skills\n```\n\n### Phase 3: Decompose\n\n```\nswarm_select_strategy(task=\"<task>\")\nswarm_plan_prompt(task=\"<task>\", context=\"<synthesized knowledge>\")\nswarm_validate_decomposition(response=\"<CellTree JSON>\")\n```\n\n### Phase 4: Create Cells\n\n`hive_create_epic(epic_title=\"<task>\", subtasks=[...])`\n\n### Phase 5: DO NOT Reserve Files\n\n> **\u26A0\uFE0F Coordinator NEVER reserves files.** Workers reserve their own files.\n\n### Phase 6: Spawn Workers\n\n```\nswarm_spawn_subtask(bead_id, epic_id, title, files, shared_context, project_path)\nTask(subagent_type=\"swarm/worker\", prompt=\"<from above>\")\n```\n\n### Phase 7: MANDATORY Review Loop\n\n**AFTER EVERY Task() RETURNS:**\n\n1. `swarmmail_inbox()` - Check for messages\n2. `swarm_review(project_key, epic_id, task_id, files_touched)` - Generate review\n3. Evaluate against epic goals\n4. `swarm_review_feedback(project_key, task_id, worker_id, status, issues)`\n\n**If needs_changes:**\n```\nswarm_spawn_retry(bead_id, epic_id, original_prompt, attempt, issues, diff, files, project_path)\n// Spawn NEW worker with Task() using retry prompt\n// Max 3 attempts before marking task blocked\n```\n\n### Phase 8: Complete\n\n`hive_sync()` - Sync all cells to git\n\n## Strategy Reference\n\n| Strategy | Best For | Keywords |\n| -------------- | ------------------------ | -------------------------------------- |\n| file-based | Refactoring, migrations | refactor, migrate, rename, update all |\n| feature-based | New features | add, implement, build, create, feature |\n| risk-based | Bug fixes, security | fix, bug, security, critical, urgent |\n\n**You are the COORDINATOR. You orchestrate. You do NOT implement. Spawn workers.**\n";
48
+ export declare const SWARM_COMPACTION_CONTEXT = "\n\u250C\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2510\n\u2502 \u2502\n\u2502 \uD83D\uDC1D YOU ARE THE COORDINATOR \uD83D\uDC1D \u2502\n\u2502 \u2502\n\u2502 NOT A WORKER. NOT AN IMPLEMENTER. \u2502\n\u2502 YOU ORCHESTRATE. \u2502\n\u2502 \u2502\n\u2514\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2518\n\nContext was compacted but the swarm is still running. **YOU ARE THE COORDINATOR.**\n\nYour role is ORCHESTRATION, not implementation. The resume steps above (if present) tell you exactly what to do first.\n\n---\n\n## \uD83D\uDEAB FORBIDDEN TOOLS (NEVER Use These Directly)\n\nCoordinators do NOT do implementation work. These tools are **FORBIDDEN**:\n\n### File Modification (ALWAYS spawn workers instead)\n- `Edit` - SPAWN A WORKER\n- `Write` - SPAWN A WORKER\n- `bash` (for file modifications) - SPAWN A WORKER\n- `swarmmail_reserve` - Workers reserve their own files\n- `git commit` - Workers commit their own changes\n\n### External Data Fetching (SPAWN A RESEARCHER instead)\n\n**Repository fetching:**\n- `repo-crawl_file`, `repo-crawl_readme`, `repo-crawl_search`, `repo-crawl_structure`, `repo-crawl_tree`\n- `repo-autopsy_*` (all repo-autopsy tools)\n\n**Web/documentation fetching:**\n- `webfetch`, `fetch_fetch`\n- `context7_resolve-library-id`, `context7_get-library-docs`\n\n**Knowledge base:**\n- `pdf-brain_search`, `pdf-brain_read`\n\n**Instead:** Use `swarm_spawn_researcher` with a clear research task. The researcher will fetch, summarize, and return findings.\n\n---\n\n## \uD83D\uDCBC YOUR ROLE (Non-Negotiable)\n\nYou are the **COORDINATOR**. Your job is ORCHESTRATION, not implementation.\n\n### What Coordinators Do:\n- \u2705 Spawn workers for implementation tasks\n- \u2705 Monitor worker progress via `swarm_status` and `swarmmail_inbox`\n- \u2705 Review completed work with `swarm_review`\n- \u2705 Unblock dependencies and resolve conflicts\n- \u2705 Close the loop when epics complete\n\n### What Coordinators NEVER Do:\n- \u274C **NEVER** edit or write files directly\n- \u274C **NEVER** run tests with `bash`\n- \u274C **NEVER** \"just do it myself to save time\"\n- \u274C **NEVER** reserve files (workers reserve)\n- \u274C **NEVER** fetch external data directly (spawn researchers)\n\n**If you catch yourself about to edit a file, STOP. Use `swarm_spawn_subtask` instead.**\n\n### Strong Mandates:\n- **ALWAYS** spawn workers for implementation tasks\n- **ALWAYS** check status and inbox before decisions\n- **ALWAYS** review worker output before accepting\n- **NON-NEGOTIABLE:** You orchestrate. You do NOT implement.\n\n---\n\n## \uD83D\uDCDD SUMMARY FORMAT (Preserve This State)\n\nWhen compaction occurs, extract and preserve this structure:\n\n```\n## \uD83D\uDC1D Swarm State\n\n**Epic:** CELL_ID - TITLE\n**Project:** PROJECT_PATH\n**Progress:** X/Y subtasks complete\n\n**Active:**\n- CELL_ID: TITLE [in_progress] \u2192 AGENT working on FILES\n\n**Blocked:**\n- CELL_ID: TITLE - BLOCKED: REASON\n\n**Completed:**\n- CELL_ID: TITLE \u2713\n\n**Ready to Spawn:**\n- CELL_ID: TITLE (files: FILES)\n```\n\n### What to Extract:\n1. **Epic & Subtasks** - IDs, titles, status, file assignments\n2. **What's Running** - Active agents and their current work\n3. **What's Blocked** - Blockers and what's needed to unblock\n4. **What's Done** - Completed work and follow-ups\n5. **What's Next** - Pending subtasks ready to spawn\n\n---\n\n## \uD83D\uDCCB REFERENCE: Full Coordinator Workflow\n\nYou are ALWAYS swarming. Use this workflow for any new work:\n\n### Phase 1.5: Research (For Complex Tasks)\n\nIf the task requires unfamiliar technologies, spawn a researcher FIRST:\n\n```\nswarm_spawn_researcher(\n research_id=\"research-TOPIC\",\n epic_id=\"mjkw...\", # your epic ID\n tech_stack=[\"TECHNOLOGY\"],\n project_path=\"PROJECT_PATH\"\n)\n// Then spawn with Task(subagent_type=\"swarm/researcher\", prompt=\"...\")\n```\n\n### Phase 2: Knowledge Gathering\n\n```\nsemantic-memory_find(query=\"TASK_KEYWORDS\", limit=5) # Past learnings\ncass_search(query=\"TASK_DESCRIPTION\", limit=5) # Similar past tasks\nskills_list() # Available skills\n```\n\n### Phase 3: Decompose\n\n```\nswarm_select_strategy(task=\"TASK\")\nswarm_plan_prompt(task=\"TASK\", context=\"KNOWLEDGE\")\nswarm_validate_decomposition(response=\"CELLTREE_JSON\")\n```\n\n### Phase 4: Create Cells\n\n`hive_create_epic(epic_title=\"TASK\", subtasks=[...])`\n\n### Phase 5: File Reservations\n\n> **\u26A0\uFE0F Coordinator NEVER reserves files.** Workers reserve their own files with `swarmmail_reserve`.\n\n### Phase 6: Spawn Workers\n\n```\nswarm_spawn_subtask(bead_id, epic_id, title, files, shared_context, project_path)\nTask(subagent_type=\"swarm/worker\", prompt=\"GENERATED_PROMPT\")\n```\n\n### Phase 7: Review Loop (MANDATORY)\n\n**AFTER EVERY Task() RETURNS:**\n\n1. `swarmmail_inbox()` - Check for messages\n2. `swarm_review(project_key, epic_id, task_id, files_touched)` - Generate review\n3. Evaluate against epic goals\n4. `swarm_review_feedback(project_key, task_id, worker_id, status, issues)`\n\n**If needs_changes:**\n```\nswarm_spawn_retry(bead_id, epic_id, original_prompt, attempt, issues, diff, files, project_path)\n// Spawn NEW worker with Task() using retry prompt\n// Max 3 attempts before marking task blocked\n```\n\n### Phase 8: Complete\n\n`hive_sync()` - Sync all cells to git\n\n---\n\n## \uD83D\uDCCA REFERENCE: Decomposition Strategies\n\n| Strategy | Best For | Keywords |\n| -------------- | ------------------------ | -------------------------------------- |\n| file-based | Refactoring, migrations | refactor, migrate, rename, update all |\n| feature-based | New features | add, implement, build, create, feature |\n| risk-based | Bug fixes, security | fix, bug, security, critical, urgent |\n\n---\n\n**You are the COORDINATOR. You orchestrate. You do NOT implement. Spawn workers.**\n";
42
49
  /**
43
50
  * Fallback detection prompt - tells the compactor what to look for
44
51
  *
@@ -1 +1 @@
1
- {"version":3,"file":"compaction-hook.d.ts","sourceRoot":"","sources":["../src/compaction-hook.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AA+BH;;;;;;;;;GASG;AACH,eAAO,MAAM,wBAAwB,w6NAiLpC,CAAC;AAEF;;;;;GAKG;AACH,eAAO,MAAM,wBAAwB,0nCAiCpC,CAAC;AAqFF;;;;;;;;GAQG;AACH,MAAM,MAAM,cAAc,GAAG,OAAO,CAAC;AAErC;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,QAAQ,EAAE,GAAG,CACX,MAAM,EACN;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,MAAM,CAAC;QAAC,KAAK,CAAC,EAAE,MAAM,EAAE,CAAA;KAAE,CACrE,CAAC;IACF,UAAU,CAAC,EAAE;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,OAAO,CAAC;QAAC,SAAS,EAAE,MAAM,CAAA;KAAE,CAAC;CACjE;AAED;;;;;;;;;;;;;;GAcG;AACH,wBAAsB,mBAAmB,CACvC,MAAM,EAAE,cAAc,EACtB,SAAS,EAAE,MAAM,EACjB,KAAK,GAAE,MAAY,GAClB,OAAO,CAAC,iBAAiB,CAAC,CAgJ5B;AAoVD;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,wBAAgB,oBAAoB,CAAC,MAAM,CAAC,EAAE,cAAc,IAExD,OAAO;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,EAC5B,QAAQ;IAAE,OAAO,EAAE,MAAM,EAAE,CAAA;CAAE,KAC5B,OAAO,CAAC,IAAI,CAAC,CA4HjB"}
1
+ {"version":3,"file":"compaction-hook.d.ts","sourceRoot":"","sources":["../src/compaction-hook.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AAwCH;;;;;;;;;;;;;;;;GAgBG;AACH,eAAO,MAAM,wBAAwB,mwNA2LpC,CAAC;AAEF;;;;;GAKG;AACH,eAAO,MAAM,wBAAwB,0nCAiCpC,CAAC;AA2FF;;;;;;;;GAQG;AACH,MAAM,MAAM,cAAc,GAAG,OAAO,CAAC;AAErC;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,QAAQ,EAAE,GAAG,CACX,MAAM,EACN;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,MAAM,CAAC;QAAC,KAAK,CAAC,EAAE,MAAM,EAAE,CAAA;KAAE,CACrE,CAAC;IACF,UAAU,CAAC,EAAE;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,OAAO,CAAC;QAAC,SAAS,EAAE,MAAM,CAAA;KAAE,CAAC;CACjE;AAED;;;;;;;;;;;;;;GAcG;AACH,wBAAsB,mBAAmB,CACvC,MAAM,EAAE,cAAc,EACtB,SAAS,EAAE,MAAM,EACjB,KAAK,GAAE,MAAY,GAClB,OAAO,CAAC,iBAAiB,CAAC,CAgJ5B;AAwVD;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,wBAAgB,oBAAoB,CAAC,MAAM,CAAC,EAAE,cAAc,IAExD,OAAO;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,EAC5B,QAAQ;IAAE,OAAO,EAAE,MAAM,EAAE,CAAA;CAAE,KAC5B,OAAO,CAAC,IAAI,CAAC,CAqLjB"}
@@ -0,0 +1,173 @@
1
+ /**
2
+ * Compaction Hook Observability
3
+ *
4
+ * Structured logging, metrics, and queryable history for the pre-compaction hook.
5
+ *
6
+ * **Philosophy:** Make the invisible visible. When patterns aren't extracted,
7
+ * when detection fails, when timing explodes - we need to know WHY.
8
+ *
9
+ * @example
10
+ * ```typescript
11
+ * const metrics = createMetricsCollector({ session_id: "abc123" });
12
+ *
13
+ * recordPhaseStart(metrics, CompactionPhase.DETECT);
14
+ * // ... detection logic ...
15
+ * recordPhaseComplete(metrics, CompactionPhase.DETECT, { confidence: "high" });
16
+ *
17
+ * recordPatternExtracted(metrics, "epic_state", "Found epic bd-123");
18
+ *
19
+ * const summary = getMetricsSummary(metrics);
20
+ * console.log(`Detected: ${summary.detected}, Confidence: ${summary.confidence}`);
21
+ * ```
22
+ */
23
+ /**
24
+ * Compaction phases - aligned with existing log structure
25
+ *
26
+ * From compaction-hook.ts:
27
+ * - START: session_id, trigger
28
+ * - GATHER: source (swarm-mail|hive), duration_ms, stats/counts
29
+ * - DETECT: confidence, detected, reason_count, reasons
30
+ * - INJECT: confidence, context_length, context_type (full|fallback|none)
31
+ * - COMPLETE: duration_ms, success, detected, confidence, context_injected
32
+ */
33
+ export declare enum CompactionPhase {
34
+ START = "START",
35
+ GATHER_SWARM_MAIL = "GATHER_SWARM_MAIL",
36
+ GATHER_HIVE = "GATHER_HIVE",
37
+ DETECT = "DETECT",
38
+ INJECT = "INJECT",
39
+ COMPLETE = "COMPLETE"
40
+ }
41
+ /**
42
+ * Phase timing and outcome
43
+ */
44
+ interface PhaseMetrics {
45
+ duration_ms: number;
46
+ success: boolean;
47
+ error?: string;
48
+ /** Additional phase-specific data */
49
+ metadata?: Record<string, unknown>;
50
+ }
51
+ /**
52
+ * Pattern extraction record
53
+ */
54
+ interface PatternRecord {
55
+ pattern_type: string;
56
+ reason: string;
57
+ /** Debug details (only captured if debug mode enabled) */
58
+ details?: Record<string, unknown>;
59
+ timestamp: number;
60
+ }
61
+ /**
62
+ * Compaction metrics collector
63
+ *
64
+ * Mutable state object that accumulates metrics during a compaction run.
65
+ */
66
+ export interface CompactionMetrics {
67
+ /** Session metadata */
68
+ session_id?: string;
69
+ has_sdk_client?: boolean;
70
+ debug?: boolean;
71
+ /** Phase timings */
72
+ phases: Map<CompactionPhase, {
73
+ start_time: number;
74
+ end_time?: number;
75
+ metadata?: Record<string, unknown>;
76
+ error?: string;
77
+ }>;
78
+ /** Pattern extraction tracking */
79
+ extracted: PatternRecord[];
80
+ skipped: PatternRecord[];
81
+ /** Final detection result */
82
+ confidence?: "high" | "medium" | "low" | "none";
83
+ detected?: boolean;
84
+ /** Overall timing */
85
+ start_time: number;
86
+ end_time?: number;
87
+ }
88
+ /**
89
+ * Metrics summary (read-only snapshot)
90
+ */
91
+ export interface CompactionMetricsSummary {
92
+ session_id?: string;
93
+ has_sdk_client?: boolean;
94
+ /** Phase breakdown */
95
+ phases: Record<string, PhaseMetrics>;
96
+ /** Pattern extraction stats */
97
+ patterns_extracted: number;
98
+ patterns_skipped: number;
99
+ extraction_success_rate: number;
100
+ extracted_patterns: string[];
101
+ skipped_patterns: string[];
102
+ /** Detection outcome */
103
+ confidence?: "high" | "medium" | "low" | "none";
104
+ detected?: boolean;
105
+ /** Timing */
106
+ total_duration_ms: number;
107
+ /** Debug info (only if debug mode enabled) */
108
+ debug_info?: Array<{
109
+ phase: string;
110
+ pattern: string;
111
+ details: Record<string, unknown>;
112
+ }>;
113
+ }
114
+ /**
115
+ * Create a metrics collector
116
+ *
117
+ * @param metadata - Session metadata to capture
118
+ * @returns Mutable metrics collector
119
+ */
120
+ export declare function createMetricsCollector(metadata?: {
121
+ session_id?: string;
122
+ has_sdk_client?: boolean;
123
+ debug?: boolean;
124
+ }): CompactionMetrics;
125
+ /**
126
+ * Record phase start
127
+ *
128
+ * @param metrics - Metrics collector
129
+ * @param phase - Phase being started
130
+ */
131
+ export declare function recordPhaseStart(metrics: CompactionMetrics, phase: CompactionPhase): void;
132
+ /**
133
+ * Record phase completion
134
+ *
135
+ * @param metrics - Metrics collector
136
+ * @param phase - Phase being completed
137
+ * @param result - Phase outcome
138
+ */
139
+ export declare function recordPhaseComplete(metrics: CompactionMetrics, phase: CompactionPhase, result?: {
140
+ success?: boolean;
141
+ error?: string;
142
+ confidence?: "high" | "medium" | "low" | "none";
143
+ detected?: boolean;
144
+ [key: string]: unknown;
145
+ }): void;
146
+ /**
147
+ * Record an extracted pattern
148
+ *
149
+ * @param metrics - Metrics collector
150
+ * @param pattern_type - Type of pattern extracted (e.g., "epic_state", "agent_name")
151
+ * @param reason - Human-readable reason for extraction
152
+ * @param details - Debug details (only captured if debug mode enabled)
153
+ */
154
+ export declare function recordPatternExtracted(metrics: CompactionMetrics, pattern_type: string, reason: string, details?: Record<string, unknown>): void;
155
+ /**
156
+ * Record a skipped pattern
157
+ *
158
+ * @param metrics - Metrics collector
159
+ * @param pattern_type - Type of pattern that was skipped
160
+ * @param reason - Human-readable reason for skipping
161
+ */
162
+ export declare function recordPatternSkipped(metrics: CompactionMetrics, pattern_type: string, reason: string): void;
163
+ /**
164
+ * Get metrics summary (read-only snapshot)
165
+ *
166
+ * Computes derived metrics like success rates and total duration.
167
+ *
168
+ * @param metrics - Metrics collector
169
+ * @returns Immutable summary
170
+ */
171
+ export declare function getMetricsSummary(metrics: CompactionMetrics): CompactionMetricsSummary;
172
+ export {};
173
+ //# sourceMappingURL=compaction-observability.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"compaction-observability.d.ts","sourceRoot":"","sources":["../src/compaction-observability.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;GAqBG;AAEH;;;;;;;;;GASG;AACH,oBAAY,eAAe;IACzB,KAAK,UAAU;IACf,iBAAiB,sBAAsB;IACvC,WAAW,gBAAgB;IAC3B,MAAM,WAAW;IACjB,MAAM,WAAW;IACjB,QAAQ,aAAa;CACtB;AAED;;GAEG;AACH,UAAU,YAAY;IACpB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,OAAO,CAAC;IACjB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,qCAAqC;IACrC,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CACpC;AAED;;GAEG;AACH,UAAU,aAAa;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,MAAM,EAAE,MAAM,CAAC;IACf,0DAA0D;IAC1D,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAClC,SAAS,EAAE,MAAM,CAAC;CACnB;AAED;;;;GAIG;AACH,MAAM,WAAW,iBAAiB;IAChC,uBAAuB;IACvB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,KAAK,CAAC,EAAE,OAAO,CAAC;IAEhB,oBAAoB;IACpB,MAAM,EAAE,GAAG,CAAC,eAAe,EAAE;QAC3B,UAAU,EAAE,MAAM,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACnC,KAAK,CAAC,EAAE,MAAM,CAAC;KAChB,CAAC,CAAC;IAEH,kCAAkC;IAClC,SAAS,EAAE,aAAa,EAAE,CAAC;IAC3B,OAAO,EAAE,aAAa,EAAE,CAAC;IAEzB,6BAA6B;IAC7B,UAAU,CAAC,EAAE,MAAM,GAAG,QAAQ,GAAG,KAAK,GAAG,MAAM,CAAC;IAChD,QAAQ,CAAC,EAAE,OAAO,CAAC;IAEnB,qBAAqB;IACrB,UAAU,EAAE,MAAM,CAAC;IACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB,sBAAsB;IACtB,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IAErC,+BAA+B;IAC/B,kBAAkB,EAAE,MAAM,CAAC;IAC3B,gBAAgB,EAAE,MAAM,CAAC;IACzB,uBAAuB,EAAE,MAAM,CAAC;IAChC,kBAAkB,EAAE,MAAM,EAAE,CAAC;IAC7B,gBAAgB,EAAE,MAAM,EAAE,CAAC;IAE3B,wBAAwB;IACxB,UAAU,CAAC,EAAE,MAAM,GAAG,QAAQ,GAAG,KAAK,GAAG,MAAM,CAAC;IAChD,QAAQ,CAAC,EAAE,OAAO,CAAC;IAEnB,aAAa;IACb,iBAAiB,EAAE,MAAM,CAAC;IAE1B,8CAA8C;IAC9C,UAAU,CAAC,EAAE,KAAK,CAAC;QACjB,KAAK,EAAE,MAAM,CAAC;QACd,OAAO,EAAE,MAAM,CAAC;QAChB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KAClC,CAAC,CAAC;CACJ;AAED;;;;;GAKG;AACH,wBAAgB,sBAAsB,CAAC,QAAQ,CAAC,EAAE;IAChD,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,KAAK,CAAC,EAAE,OAAO,CAAC;CACjB,GAAG,iBAAiB,CAUpB;AAED;;;;;GAKG;AACH,wBAAgB,gBAAgB,CAC9B,OAAO,EAAE,iBAAiB,EAC1B,KAAK,EAAE,eAAe,GACrB,IAAI,CAIN;AAED;;;;;;GAMG;AACH,wBAAgB,mBAAmB,CACjC,OAAO,EAAE,iBAAiB,EAC1B,KAAK,EAAE,eAAe,EACtB,MAAM,CAAC,EAAE;IACP,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,UAAU,CAAC,EAAE,MAAM,GAAG,QAAQ,GAAG,KAAK,GAAG,MAAM,CAAC;IAChD,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;CACxB,GACA,IAAI,CAmCN;AAED;;;;;;;GAOG;AACH,wBAAgB,sBAAsB,CACpC,OAAO,EAAE,iBAAiB,EAC1B,YAAY,EAAE,MAAM,EACpB,MAAM,EAAE,MAAM,EACd,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAChC,IAAI,CAYN;AAED;;;;;;GAMG;AACH,wBAAgB,oBAAoB,CAClC,OAAO,EAAE,iBAAiB,EAC1B,YAAY,EAAE,MAAM,EACpB,MAAM,EAAE,MAAM,GACb,IAAI,CAMN;AAED;;;;;;;GAOG;AACH,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,iBAAiB,GAAG,wBAAwB,CAkDtF"}