opencode-swarm-plugin 0.17.1 → 0.18.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/plugin.js CHANGED
@@ -12810,6 +12810,11 @@ var init_events = __esm(() => {
12810
12810
  async function appendEvent(event, projectPath) {
12811
12811
  const db = await getDatabase(projectPath);
12812
12812
  const { type, project_key, timestamp, ...rest } = event;
12813
+ console.log("[SwarmMail] Appending event", {
12814
+ type,
12815
+ projectKey: project_key,
12816
+ timestamp
12817
+ });
12813
12818
  const result = await db.query(`INSERT INTO events (type, project_key, timestamp, data)
12814
12819
  VALUES ($1, $2, $3, $4)
12815
12820
  RETURNING id, sequence`, [type, project_key, timestamp, JSON.stringify(rest)]);
@@ -12818,6 +12823,13 @@ async function appendEvent(event, projectPath) {
12818
12823
  throw new Error("Failed to insert event - no row returned");
12819
12824
  }
12820
12825
  const { id, sequence } = row;
12826
+ console.log("[SwarmMail] Event appended", {
12827
+ type,
12828
+ id,
12829
+ sequence,
12830
+ projectKey: project_key
12831
+ });
12832
+ console.debug("[SwarmMail] Updating materialized views", { type, id });
12821
12833
  await updateMaterializedViews(db, { ...event, id, sequence });
12822
12834
  return { ...event, id, sequence };
12823
12835
  }
@@ -12841,9 +12853,13 @@ async function appendEvents(events, projectPath) {
12841
12853
  results.push(enrichedEvent);
12842
12854
  }
12843
12855
  await db.exec("COMMIT");
12844
- } catch (error45) {
12845
- await db.exec("ROLLBACK");
12846
- throw error45;
12856
+ } catch (e) {
12857
+ try {
12858
+ await db.exec("ROLLBACK");
12859
+ } catch (rollbackError) {
12860
+ console.error("[SwarmMail] ROLLBACK failed:", rollbackError);
12861
+ }
12862
+ throw e;
12847
12863
  }
12848
12864
  return results;
12849
12865
  }
@@ -12946,33 +12962,42 @@ async function replayEvents(options2 = {}, projectPath) {
12946
12962
  };
12947
12963
  }
12948
12964
  async function updateMaterializedViews(db, event) {
12949
- switch (event.type) {
12950
- case "agent_registered":
12951
- await handleAgentRegistered(db, event);
12952
- break;
12953
- case "agent_active":
12954
- await db.query(`UPDATE agents SET last_active_at = $1 WHERE project_key = $2 AND name = $3`, [event.timestamp, event.project_key, event.agent_name]);
12955
- break;
12956
- case "message_sent":
12957
- await handleMessageSent(db, event);
12958
- break;
12959
- case "message_read":
12960
- await db.query(`UPDATE message_recipients SET read_at = $1 WHERE message_id = $2 AND agent_name = $3`, [event.timestamp, event.message_id, event.agent_name]);
12961
- break;
12962
- case "message_acked":
12963
- await db.query(`UPDATE message_recipients SET acked_at = $1 WHERE message_id = $2 AND agent_name = $3`, [event.timestamp, event.message_id, event.agent_name]);
12964
- break;
12965
- case "file_reserved":
12966
- await handleFileReserved(db, event);
12967
- break;
12968
- case "file_released":
12969
- await handleFileReleased(db, event);
12970
- break;
12971
- case "task_started":
12972
- case "task_progress":
12973
- case "task_completed":
12974
- case "task_blocked":
12975
- break;
12965
+ try {
12966
+ switch (event.type) {
12967
+ case "agent_registered":
12968
+ await handleAgentRegistered(db, event);
12969
+ break;
12970
+ case "agent_active":
12971
+ await db.query(`UPDATE agents SET last_active_at = $1 WHERE project_key = $2 AND name = $3`, [event.timestamp, event.project_key, event.agent_name]);
12972
+ break;
12973
+ case "message_sent":
12974
+ await handleMessageSent(db, event);
12975
+ break;
12976
+ case "message_read":
12977
+ await db.query(`UPDATE message_recipients SET read_at = $1 WHERE message_id = $2 AND agent_name = $3`, [event.timestamp, event.message_id, event.agent_name]);
12978
+ break;
12979
+ case "message_acked":
12980
+ await db.query(`UPDATE message_recipients SET acked_at = $1 WHERE message_id = $2 AND agent_name = $3`, [event.timestamp, event.message_id, event.agent_name]);
12981
+ break;
12982
+ case "file_reserved":
12983
+ await handleFileReserved(db, event);
12984
+ break;
12985
+ case "file_released":
12986
+ await handleFileReleased(db, event);
12987
+ break;
12988
+ case "task_started":
12989
+ case "task_progress":
12990
+ case "task_completed":
12991
+ case "task_blocked":
12992
+ break;
12993
+ }
12994
+ } catch (error45) {
12995
+ console.error("[SwarmMail] Failed to update materialized views", {
12996
+ eventType: event.type,
12997
+ eventId: event.id,
12998
+ error: error45
12999
+ });
13000
+ throw error45;
12976
13001
  }
12977
13002
  }
12978
13003
  async function handleAgentRegistered(db, event) {
@@ -12992,6 +13017,12 @@ async function handleAgentRegistered(db, event) {
12992
13017
  ]);
12993
13018
  }
12994
13019
  async function handleMessageSent(db, event) {
13020
+ console.log("[SwarmMail] Handling message sent event", {
13021
+ from: event.from_agent,
13022
+ to: event.to_agents,
13023
+ subject: event.subject,
13024
+ projectKey: event.project_key
13025
+ });
12995
13026
  const result = await db.query(`INSERT INTO messages (project_key, from_agent, subject, body, thread_id, importance, ack_required, created_at)
12996
13027
  VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
12997
13028
  RETURNING id`, [
@@ -13009,24 +13040,42 @@ async function handleMessageSent(db, event) {
13009
13040
  throw new Error("Failed to insert message - no row returned");
13010
13041
  }
13011
13042
  const messageId = msgRow.id;
13012
- for (const agent of event.to_agents) {
13043
+ if (event.to_agents.length > 0) {
13044
+ const values = event.to_agents.map((_, i) => `($1, $${i + 2})`).join(", ");
13045
+ const params = [messageId, ...event.to_agents];
13013
13046
  await db.query(`INSERT INTO message_recipients (message_id, agent_name)
13014
- VALUES ($1, $2)
13015
- ON CONFLICT DO NOTHING`, [messageId, agent]);
13047
+ VALUES ${values}
13048
+ ON CONFLICT DO NOTHING`, params);
13049
+ console.log("[SwarmMail] Message recipients inserted", {
13050
+ messageId,
13051
+ recipientCount: event.to_agents.length
13052
+ });
13016
13053
  }
13017
13054
  }
13018
13055
  async function handleFileReserved(db, event) {
13019
- for (const path of event.paths) {
13020
- await db.query(`INSERT INTO reservations (project_key, agent_name, path_pattern, exclusive, reason, created_at, expires_at)
13021
- VALUES ($1, $2, $3, $4, $5, $6, $7)`, [
13056
+ console.log("[SwarmMail] Handling file reservation event", {
13057
+ agent: event.agent_name,
13058
+ paths: event.paths,
13059
+ exclusive: event.exclusive,
13060
+ projectKey: event.project_key
13061
+ });
13062
+ if (event.paths.length > 0) {
13063
+ const values = event.paths.map((_, i) => `($1, $2, $${i + 3}, $${event.paths.length + 3}, $${event.paths.length + 4}, $${event.paths.length + 5}, $${event.paths.length + 6})`).join(", ");
13064
+ const params = [
13022
13065
  event.project_key,
13023
13066
  event.agent_name,
13024
- path,
13067
+ ...event.paths,
13025
13068
  event.exclusive,
13026
13069
  event.reason || null,
13027
13070
  event.timestamp,
13028
13071
  event.expires_at
13029
- ]);
13072
+ ];
13073
+ await db.query(`INSERT INTO reservations (project_key, agent_name, path_pattern, exclusive, reason, created_at, expires_at)
13074
+ VALUES ${values}`, params);
13075
+ console.log("[SwarmMail] File reservations inserted", {
13076
+ agent: event.agent_name,
13077
+ reservationCount: event.paths.length
13078
+ });
13030
13079
  }
13031
13080
  }
13032
13081
  async function handleFileReleased(db, event) {
@@ -14592,6 +14641,12 @@ async function checkConflicts(projectKey, agentName, paths, projectPath) {
14592
14641
  }
14593
14642
  for (const path2 of paths) {
14594
14643
  if (pathMatches(path2, reservation.path_pattern)) {
14644
+ console.warn("[SwarmMail] Conflict detected", {
14645
+ path: path2,
14646
+ holder: reservation.agent_name,
14647
+ pattern: reservation.path_pattern,
14648
+ requestedBy: agentName
14649
+ });
14595
14650
  conflicts.push({
14596
14651
  path: path2,
14597
14652
  holder: reservation.agent_name,
@@ -14601,6 +14656,13 @@ async function checkConflicts(projectKey, agentName, paths, projectPath) {
14601
14656
  }
14602
14657
  }
14603
14658
  }
14659
+ if (conflicts.length > 0) {
14660
+ console.warn("[SwarmMail] Total conflicts detected", {
14661
+ count: conflicts.length,
14662
+ requestedBy: agentName,
14663
+ paths
14664
+ });
14665
+ }
14604
14666
  return conflicts;
14605
14667
  }
14606
14668
  function pathMatches(path2, pattern) {
@@ -15271,6 +15333,7 @@ var init_migrations = __esm(() => {
15271
15333
  // src/streams/index.ts
15272
15334
  var exports_streams = {};
15273
15335
  __export(exports_streams, {
15336
+ withTimeout: () => withTimeout,
15274
15337
  sendSwarmMessage: () => sendSwarmMessage,
15275
15338
  sendMessage: () => sendMessage,
15276
15339
  sendAgentMessage: () => sendAgentMessage,
@@ -15343,6 +15406,10 @@ import { PGlite } from "@electric-sql/pglite";
15343
15406
  import { existsSync, mkdirSync, appendFileSync } from "node:fs";
15344
15407
  import { join } from "node:path";
15345
15408
  import { homedir } from "node:os";
15409
+ async function withTimeout(promise2, ms, operation) {
15410
+ const timeout = new Promise((_, reject) => setTimeout(() => reject(new Error(`${operation} timed out after ${ms}ms`)), ms));
15411
+ return Promise.race([promise2, timeout]);
15412
+ }
15346
15413
  function debugLog(message, data) {
15347
15414
  const timestamp = new Date().toISOString();
15348
15415
  const logLine = data ? `[${timestamp}] ${message}: ${JSON.stringify(data, null, 2)}
@@ -30659,14 +30726,35 @@ class SqliteRateLimiter {
30659
30726
  }
30660
30727
  return { allowed, remaining, resetAt };
30661
30728
  }
30729
+ cleanup() {
30730
+ const BATCH_SIZE = 1000;
30731
+ const MAX_BATCHES = 10;
30732
+ const cutoff = Date.now() - 7200000;
30733
+ let totalDeleted = 0;
30734
+ for (let i = 0;i < MAX_BATCHES; i++) {
30735
+ const result = this.db.run(`DELETE FROM rate_limits
30736
+ WHERE rowid IN (
30737
+ SELECT rowid FROM rate_limits
30738
+ WHERE timestamp < ?
30739
+ LIMIT ?
30740
+ )`, [cutoff, BATCH_SIZE]);
30741
+ totalDeleted += result.changes;
30742
+ if (result.changes < BATCH_SIZE)
30743
+ break;
30744
+ }
30745
+ if (totalDeleted > 0) {
30746
+ console.log("[RateLimiter] Cleanup completed:", {
30747
+ deletedRows: totalDeleted
30748
+ });
30749
+ }
30750
+ }
30662
30751
  async recordRequest(agentName, endpoint) {
30663
30752
  const now = Date.now();
30664
30753
  const stmt = this.db.prepare(`INSERT INTO rate_limits (agent_name, endpoint, window, timestamp) VALUES (?, ?, ?, ?)`);
30665
30754
  stmt.run(agentName, endpoint, "minute", now);
30666
30755
  stmt.run(agentName, endpoint, "hour", now);
30667
30756
  if (Math.random() < 0.01) {
30668
- const cutoff = Date.now() - 7200000;
30669
- this.db.run(`DELETE FROM rate_limits WHERE timestamp < ?`, [cutoff]);
30757
+ this.cleanup();
30670
30758
  }
30671
30759
  }
30672
30760
  async close() {
@@ -32593,6 +32681,36 @@ class ErrorAccumulator {
32593
32681
  };
32594
32682
  }
32595
32683
  }
32684
+ function formatMemoryStoreOnSuccess(beadId, summary, filesTouched, strategy) {
32685
+ const strategyInfo = strategy ? ` using ${strategy} strategy` : "";
32686
+ return {
32687
+ information: `Task "${beadId}" completed successfully${strategyInfo}.
32688
+ Key insight: ${summary}
32689
+ Files touched: ${filesTouched.join(", ") || "none"}`,
32690
+ metadata: `swarm, success, ${beadId}, ${strategy || "completion"}`,
32691
+ instruction: "Store this successful completion in semantic-memory for future reference"
32692
+ };
32693
+ }
32694
+ function formatMemoryStoreOn3Strike(beadId, failures) {
32695
+ const failuresList = failures.map((f, i) => `${i + 1}. ${f.attempt} - Failed: ${f.reason}`).join(`
32696
+ `);
32697
+ return {
32698
+ information: `Architecture problem detected in ${beadId}: Task failed after 3 attempts.
32699
+ Attempts:
32700
+ ${failuresList}
32701
+
32702
+ This indicates a structural issue requiring human decision, not another fix attempt.`,
32703
+ metadata: `architecture, 3-strike, ${beadId}, failure`,
32704
+ instruction: "Store this architectural problem in semantic-memory to avoid similar patterns in future"
32705
+ };
32706
+ }
32707
+ function formatMemoryQueryForDecomposition(task, limit = 3) {
32708
+ return {
32709
+ query: task,
32710
+ limit,
32711
+ instruction: "Query semantic-memory for relevant past learnings about similar tasks before decomposition"
32712
+ };
32713
+ }
32596
32714
 
32597
32715
  // src/swarm.ts
32598
32716
  init_skills();
@@ -33512,7 +33630,136 @@ ${args.context}` : `## Additional Context
33512
33630
  },
33513
33631
  validation_note: "Parse agent response as JSON and validate with swarm_validate_decomposition",
33514
33632
  cass_history: cassResultInfo,
33515
- skills: skillsInfo
33633
+ skills: skillsInfo,
33634
+ memory_query: formatMemoryQueryForDecomposition(args.task, 3)
33635
+ }, null, 2);
33636
+ }
33637
+ });
33638
+ var swarm_delegate_planning = tool({
33639
+ description: "Delegate task decomposition to a swarm/planner subagent. Returns a prompt to spawn the planner. Use this to keep coordinator context lean - all planning reasoning happens in the subagent.",
33640
+ args: {
33641
+ task: tool.schema.string().min(1).describe("The task to decompose"),
33642
+ context: tool.schema.string().optional().describe("Additional context to include"),
33643
+ max_subtasks: tool.schema.number().int().min(2).max(10).optional().default(5).describe("Maximum number of subtasks (default: 5)"),
33644
+ strategy: tool.schema.enum(["auto", "file-based", "feature-based", "risk-based"]).optional().default("auto").describe("Decomposition strategy (default: auto-detect)"),
33645
+ query_cass: tool.schema.boolean().optional().default(true).describe("Query CASS for similar past tasks (default: true)")
33646
+ },
33647
+ async execute(args) {
33648
+ let selectedStrategy;
33649
+ let strategyReasoning;
33650
+ if (args.strategy && args.strategy !== "auto") {
33651
+ selectedStrategy = args.strategy;
33652
+ strategyReasoning = `User-specified strategy: ${selectedStrategy}`;
33653
+ } else {
33654
+ const selection = selectStrategy(args.task);
33655
+ selectedStrategy = selection.strategy;
33656
+ strategyReasoning = selection.reasoning;
33657
+ }
33658
+ let cassContext = "";
33659
+ let cassResultInfo;
33660
+ if (args.query_cass !== false) {
33661
+ const cassResult = await queryCassHistory(args.task, 3);
33662
+ if (cassResult.status === "success") {
33663
+ cassContext = formatCassHistoryForPrompt(cassResult.data);
33664
+ cassResultInfo = {
33665
+ queried: true,
33666
+ results_found: cassResult.data.results.length,
33667
+ included_in_context: true
33668
+ };
33669
+ } else {
33670
+ cassResultInfo = {
33671
+ queried: true,
33672
+ results_found: 0,
33673
+ included_in_context: false,
33674
+ reason: cassResult.status
33675
+ };
33676
+ }
33677
+ } else {
33678
+ cassResultInfo = { queried: false, reason: "disabled" };
33679
+ }
33680
+ let skillsContext = "";
33681
+ let skillsInfo = {
33682
+ included: false
33683
+ };
33684
+ const allSkills = await listSkills();
33685
+ if (allSkills.length > 0) {
33686
+ skillsContext = await getSkillsContextForSwarm();
33687
+ const relevantSkills = await findRelevantSkills(args.task);
33688
+ skillsInfo = {
33689
+ included: true,
33690
+ count: allSkills.length,
33691
+ relevant: relevantSkills
33692
+ };
33693
+ if (relevantSkills.length > 0) {
33694
+ skillsContext += `
33695
+
33696
+ **Suggested skills for this task**: ${relevantSkills.join(", ")}`;
33697
+ }
33698
+ }
33699
+ const strategyGuidelines = formatStrategyGuidelines(selectedStrategy);
33700
+ const contextSection = args.context ? `## Additional Context
33701
+ ${args.context}` : `## Additional Context
33702
+ (none provided)`;
33703
+ const planningPrompt = STRATEGY_DECOMPOSITION_PROMPT.replace("{task}", args.task).replace("{strategy_guidelines}", strategyGuidelines).replace("{context_section}", contextSection).replace("{cass_history}", cassContext || "").replace("{skills_context}", skillsContext || "").replace("{max_subtasks}", (args.max_subtasks ?? 5).toString());
33704
+ const subagentInstructions = `
33705
+ ## CRITICAL: Output Format
33706
+
33707
+ You are a planner subagent. Your ONLY output must be valid JSON matching the BeadTree schema.
33708
+
33709
+ DO NOT include:
33710
+ - Explanatory text before or after the JSON
33711
+ - Markdown code fences (\`\`\`json)
33712
+ - Commentary or reasoning
33713
+
33714
+ OUTPUT ONLY the raw JSON object.
33715
+
33716
+ ## Example Output
33717
+
33718
+ {
33719
+ "epic": {
33720
+ "title": "Add user authentication",
33721
+ "description": "Implement OAuth-based authentication system"
33722
+ },
33723
+ "subtasks": [
33724
+ {
33725
+ "title": "Set up OAuth provider",
33726
+ "description": "Configure OAuth client credentials and redirect URLs",
33727
+ "files": ["src/auth/oauth.ts", "src/config/auth.ts"],
33728
+ "dependencies": [],
33729
+ "estimated_complexity": 2
33730
+ },
33731
+ {
33732
+ "title": "Create auth routes",
33733
+ "description": "Implement login, logout, and callback routes",
33734
+ "files": ["src/app/api/auth/[...nextauth]/route.ts"],
33735
+ "dependencies": [0],
33736
+ "estimated_complexity": 3
33737
+ }
33738
+ ]
33739
+ }
33740
+
33741
+ Now generate the BeadTree for the given task.`;
33742
+ const fullPrompt = `${planningPrompt}
33743
+
33744
+ ${subagentInstructions}`;
33745
+ return JSON.stringify({
33746
+ prompt: fullPrompt,
33747
+ subagent_type: "swarm/planner",
33748
+ description: "Task decomposition planning",
33749
+ strategy: {
33750
+ selected: selectedStrategy,
33751
+ reasoning: strategyReasoning
33752
+ },
33753
+ expected_output: "BeadTree JSON (raw JSON, no markdown)",
33754
+ next_steps: [
33755
+ "1. Spawn subagent with Task tool using returned prompt",
33756
+ "2. Parse subagent response as JSON",
33757
+ "3. Validate with swarm_validate_decomposition",
33758
+ "4. Create beads with beads_create_epic"
33759
+ ],
33760
+ cass_history: cassResultInfo,
33761
+ skills: skillsInfo,
33762
+ memory_query: formatMemoryQueryForDecomposition(args.task, 3)
33516
33763
  }, null, 2);
33517
33764
  }
33518
33765
  });
@@ -33568,7 +33815,8 @@ var swarm_decompose = tool({
33568
33815
  ]
33569
33816
  },
33570
33817
  validation_note: "Parse agent response as JSON and validate with BeadTreeSchema from schemas/bead.ts",
33571
- cass_history: cassResultInfo
33818
+ cass_history: cassResultInfo,
33819
+ memory_query: formatMemoryQueryForDecomposition(args.task, 3)
33572
33820
  }, null, 2);
33573
33821
  }
33574
33822
  });
@@ -34085,7 +34333,7 @@ var swarm_complete = tool({
34085
34333
  threadId: epicId,
34086
34334
  importance: "normal"
34087
34335
  });
34088
- return JSON.stringify({
34336
+ const response = {
34089
34337
  success: true,
34090
34338
  bead_id: args.bead_id,
34091
34339
  closed: true,
@@ -34121,8 +34369,10 @@ Did you learn anything reusable during this subtask? Consider:
34121
34369
 
34122
34370
  If you discovered something valuable, use \`swarm_learn\` or \`skills_create\` to preserve it as a skill for future swarms.
34123
34371
 
34124
- Files touched: ${args.files_touched?.join(", ") || "none recorded"}`
34125
- }, null, 2);
34372
+ Files touched: ${args.files_touched?.join(", ") || "none recorded"}`,
34373
+ memory_store: formatMemoryStoreOnSuccess(args.bead_id, args.summary, args.files_touched || [])
34374
+ };
34375
+ return JSON.stringify(response, null, 2);
34126
34376
  }
34127
34377
  });
34128
34378
  function classifyFailure(error45) {
@@ -34658,6 +34908,7 @@ var swarmTools = {
34658
34908
  swarm_init,
34659
34909
  swarm_select_strategy,
34660
34910
  swarm_plan_prompt,
34911
+ swarm_delegate_planning,
34661
34912
  swarm_decompose,
34662
34913
  swarm_validate_decomposition,
34663
34914
  swarm_status,
@@ -34704,14 +34955,18 @@ var swarm_check_strikes = tool({
34704
34955
  }
34705
34956
  const record2 = await addStrike(args.bead_id, args.attempt, args.reason, globalStrikeStorage);
34706
34957
  const strikedOut = record2.strike_count >= 3;
34707
- return JSON.stringify({
34958
+ const response = {
34708
34959
  bead_id: args.bead_id,
34709
34960
  strike_count: record2.strike_count,
34710
34961
  is_striked_out: strikedOut,
34711
34962
  failures: record2.failures,
34712
34963
  message: strikedOut ? "⚠️ STRUCK OUT: 3 strikes reached. STOP and question the architecture." : `Strike ${record2.strike_count} recorded. ${3 - record2.strike_count} remaining.`,
34713
34964
  warning: strikedOut ? "DO NOT attempt Fix #4. Call with action=get_prompt for architecture review." : undefined
34714
- }, null, 2);
34965
+ };
34966
+ if (strikedOut) {
34967
+ response.memory_store = formatMemoryStoreOn3Strike(args.bead_id, record2.failures);
34968
+ }
34969
+ return JSON.stringify(response, null, 2);
34715
34970
  }
34716
34971
  case "clear": {
34717
34972
  await clearStrikes(args.bead_id, globalStrikeStorage);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "opencode-swarm-plugin",
3
- "version": "0.17.1",
3
+ "version": "0.18.0",
4
4
  "description": "Multi-agent swarm coordination for OpenCode with learning capabilities, beads integration, and Agent Mail",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
package/src/learning.ts CHANGED
@@ -956,6 +956,112 @@ export class ErrorAccumulator {
956
956
  }
957
957
  }
958
958
 
959
+ // ============================================================================
960
+ // Semantic Memory Integration Helpers
961
+ // ============================================================================
962
+
963
+ /**
964
+ * Format memory store instruction for successful task completion
965
+ *
966
+ * @param beadId - Bead ID that completed
967
+ * @param summary - Completion summary
968
+ * @param filesTouched - Files modified
969
+ * @param strategy - Decomposition strategy used (if applicable)
970
+ * @returns Memory store instruction object
971
+ */
972
+ export function formatMemoryStoreOnSuccess(
973
+ beadId: string,
974
+ summary: string,
975
+ filesTouched: string[],
976
+ strategy?: DecompositionStrategy,
977
+ ): {
978
+ information: string;
979
+ metadata: string;
980
+ instruction: string;
981
+ } {
982
+ const strategyInfo = strategy ? ` using ${strategy} strategy` : "";
983
+
984
+ return {
985
+ information: `Task "${beadId}" completed successfully${strategyInfo}.
986
+ Key insight: ${summary}
987
+ Files touched: ${filesTouched.join(", ") || "none"}`,
988
+ metadata: `swarm, success, ${beadId}, ${strategy || "completion"}`,
989
+ instruction:
990
+ "Store this successful completion in semantic-memory for future reference",
991
+ };
992
+ }
993
+
994
+ /**
995
+ * Format memory store instruction for architectural problems (3-strike)
996
+ *
997
+ * @param beadId - Bead ID that struck out
998
+ * @param failures - Array of failure attempts
999
+ * @returns Memory store instruction object
1000
+ */
1001
+ export function formatMemoryStoreOn3Strike(
1002
+ beadId: string,
1003
+ failures: Array<{ attempt: string; reason: string }>,
1004
+ ): {
1005
+ information: string;
1006
+ metadata: string;
1007
+ instruction: string;
1008
+ } {
1009
+ const failuresList = failures
1010
+ .map((f, i) => `${i + 1}. ${f.attempt} - Failed: ${f.reason}`)
1011
+ .join("\n");
1012
+
1013
+ return {
1014
+ information: `Architecture problem detected in ${beadId}: Task failed after 3 attempts.
1015
+ Attempts:
1016
+ ${failuresList}
1017
+
1018
+ This indicates a structural issue requiring human decision, not another fix attempt.`,
1019
+ metadata: `architecture, 3-strike, ${beadId}, failure`,
1020
+ instruction:
1021
+ "Store this architectural problem in semantic-memory to avoid similar patterns in future",
1022
+ };
1023
+ }
1024
+
1025
+ /**
1026
+ * Format memory query instruction for task decomposition
1027
+ *
1028
+ * @param task - Task description
1029
+ * @param limit - Max results to return
1030
+ * @returns Memory query instruction object
1031
+ */
1032
+ export function formatMemoryQueryForDecomposition(
1033
+ task: string,
1034
+ limit: number = 3,
1035
+ ): {
1036
+ query: string;
1037
+ limit: number;
1038
+ instruction: string;
1039
+ } {
1040
+ return {
1041
+ query: task,
1042
+ limit,
1043
+ instruction:
1044
+ "Query semantic-memory for relevant past learnings about similar tasks before decomposition",
1045
+ };
1046
+ }
1047
+
1048
+ /**
1049
+ * Format memory validation hint when CASS history helped
1050
+ *
1051
+ * @param beadId - Bead ID that benefited from CASS
1052
+ * @returns Memory validation hint
1053
+ */
1054
+ export function formatMemoryValidationHint(beadId: string): {
1055
+ instruction: string;
1056
+ context: string;
1057
+ } {
1058
+ return {
1059
+ instruction:
1060
+ "If any semantic-memory entries helped with this task, validate them to reset decay timer",
1061
+ context: `Task ${beadId} completed successfully with assistance from past learnings`,
1062
+ };
1063
+ }
1064
+
959
1065
  // ============================================================================
960
1066
  // Exports
961
1067
  // ============================================================================
@@ -35,12 +35,15 @@ import { homedir } from "node:os";
35
35
  // SQLite is optional - only available in Bun runtime
36
36
  // We use dynamic import to avoid breaking Node.js environments
37
37
  interface BunDatabase {
38
- run(sql: string, params?: unknown[]): void;
38
+ run(
39
+ sql: string,
40
+ params?: unknown[],
41
+ ): { changes: number; lastInsertRowid: number };
39
42
  query<T>(sql: string): {
40
43
  get(...params: unknown[]): T | null;
41
44
  };
42
45
  prepare(sql: string): {
43
- run(...params: unknown[]): void;
46
+ run(...params: unknown[]): { changes: number; lastInsertRowid: number };
44
47
  };
45
48
  close(): void;
46
49
  }
@@ -453,6 +456,47 @@ export class SqliteRateLimiter implements RateLimiter {
453
456
  return { allowed, remaining, resetAt };
454
457
  }
455
458
 
459
+ /**
460
+ * Clean up old rate limit entries in bounded batches
461
+ *
462
+ * Limits cleanup to prevent blocking recordRequest on large datasets:
463
+ * - BATCH_SIZE: 1000 rows per iteration
464
+ * - MAX_BATCHES: 10 (max 10k rows per cleanup invocation)
465
+ *
466
+ * Stops early if fewer than BATCH_SIZE rows deleted (no more to clean).
467
+ */
468
+ private cleanup(): void {
469
+ const BATCH_SIZE = 1000;
470
+ const MAX_BATCHES = 10;
471
+ const cutoff = Date.now() - 7_200_000; // 2 hours
472
+
473
+ let totalDeleted = 0;
474
+
475
+ // Run bounded batches
476
+ for (let i = 0; i < MAX_BATCHES; i++) {
477
+ const result = this.db.run(
478
+ `DELETE FROM rate_limits
479
+ WHERE rowid IN (
480
+ SELECT rowid FROM rate_limits
481
+ WHERE timestamp < ?
482
+ LIMIT ?
483
+ )`,
484
+ [cutoff, BATCH_SIZE],
485
+ );
486
+
487
+ totalDeleted += result.changes;
488
+
489
+ // Stop if we deleted less than batch size (no more to delete)
490
+ if (result.changes < BATCH_SIZE) break;
491
+ }
492
+
493
+ if (totalDeleted > 0) {
494
+ console.log("[RateLimiter] Cleanup completed:", {
495
+ deletedRows: totalDeleted,
496
+ });
497
+ }
498
+ }
499
+
456
500
  async recordRequest(agentName: string, endpoint: string): Promise<void> {
457
501
  const now = Date.now();
458
502
 
@@ -465,9 +509,9 @@ export class SqliteRateLimiter implements RateLimiter {
465
509
  stmt.run(agentName, endpoint, "hour", now);
466
510
 
467
511
  // Opportunistic cleanup of old entries (1% chance to avoid overhead)
512
+ // Now bounded to prevent blocking on large datasets
468
513
  if (Math.random() < 0.01) {
469
- const cutoff = Date.now() - 7_200_000;
470
- this.db.run(`DELETE FROM rate_limits WHERE timestamp < ?`, [cutoff]);
514
+ this.cleanup();
471
515
  }
472
516
  }
473
517