opencode-swarm-plugin 0.30.6 → 0.31.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -27051,13 +27051,41 @@ echo "Project directory: $1"
27051
27051
  };
27052
27052
  });
27053
27053
 
27054
+ // src/model-selection.ts
27055
+ var exports_model_selection = {};
27056
+ __export(exports_model_selection, {
27057
+ selectWorkerModel: () => selectWorkerModel
27058
+ });
27059
+ function selectWorkerModel(subtask, config2) {
27060
+ if (subtask.model) {
27061
+ return subtask.model;
27062
+ }
27063
+ const files = subtask.files || [];
27064
+ if (files.length > 0) {
27065
+ const allDocs = files.every((f) => {
27066
+ const lower = f.toLowerCase();
27067
+ return lower.endsWith(".md") || lower.endsWith(".mdx");
27068
+ });
27069
+ const allTests = files.every((f) => {
27070
+ const lower = f.toLowerCase();
27071
+ return lower.includes(".test.") || lower.includes(".spec.");
27072
+ });
27073
+ if (allDocs || allTests) {
27074
+ return config2.liteModel || config2.primaryModel || "anthropic/claude-haiku-4-5";
27075
+ }
27076
+ }
27077
+ return config2.primaryModel || "anthropic/claude-haiku-4-5";
27078
+ }
27079
+
27054
27080
  // src/hive.ts
27055
27081
  init_dist();
27056
27082
  import {
27057
27083
  createHiveAdapter,
27058
27084
  FlushManager,
27059
27085
  importFromJSONL,
27060
- getSwarmMail
27086
+ syncMemories,
27087
+ getSwarmMail,
27088
+ resolvePartialId
27061
27089
  } from "swarm-mail";
27062
27090
  import { existsSync, readFileSync } from "node:fs";
27063
27091
  import { join } from "node:path";
@@ -27253,7 +27281,8 @@ var DecomposedSubtaskSchema = exports_external.object({
27253
27281
  description: exports_external.string(),
27254
27282
  files: exports_external.array(exports_external.string()),
27255
27283
  estimated_effort: EffortLevelSchema,
27256
- risks: exports_external.array(exports_external.string()).optional().default([])
27284
+ risks: exports_external.array(exports_external.string()).optional().default([]),
27285
+ model: exports_external.string().optional()
27257
27286
  });
27258
27287
  var SubtaskDependencySchema = exports_external.object({
27259
27288
  from: exports_external.number().int().min(0),
@@ -27907,6 +27936,43 @@ async function importJsonlToPGLite(projectPath) {
27907
27936
  return { imported, updated, errors: errors3 };
27908
27937
  }
27909
27938
  var adapterCache = new Map;
27939
+ var exitHookRegistered = false;
27940
+ var exitHookRunning = false;
27941
+ function registerExitHook() {
27942
+ if (exitHookRegistered) {
27943
+ return;
27944
+ }
27945
+ exitHookRegistered = true;
27946
+ process.on("beforeExit", async (code) => {
27947
+ if (exitHookRunning) {
27948
+ return;
27949
+ }
27950
+ exitHookRunning = true;
27951
+ try {
27952
+ const flushPromises = [];
27953
+ for (const [projectKey, adapter] of adapterCache.entries()) {
27954
+ const flushPromise = (async () => {
27955
+ try {
27956
+ ensureHiveDirectory(projectKey);
27957
+ const flushManager = new FlushManager({
27958
+ adapter,
27959
+ projectKey,
27960
+ outputPath: `${projectKey}/.hive/issues.jsonl`
27961
+ });
27962
+ await flushManager.flush();
27963
+ } catch (error45) {
27964
+ console.warn(`[hive exit hook] Failed to flush ${projectKey}:`, error45 instanceof Error ? error45.message : String(error45));
27965
+ }
27966
+ })();
27967
+ flushPromises.push(flushPromise);
27968
+ }
27969
+ await Promise.all(flushPromises);
27970
+ } finally {
27971
+ exitHookRunning = false;
27972
+ }
27973
+ });
27974
+ }
27975
+ registerExitHook();
27910
27976
  async function getHiveAdapter(projectKey) {
27911
27977
  if (adapterCache.has(projectKey)) {
27912
27978
  return adapterCache.get(projectKey);
@@ -28061,6 +28127,17 @@ var hive_create_epic = tool({
28061
28127
  console.warn("[hive_create_epic] Failed to emit DecompositionGeneratedEvent:", error45);
28062
28128
  }
28063
28129
  }
28130
+ try {
28131
+ ensureHiveDirectory(projectKey);
28132
+ const flushManager = new FlushManager({
28133
+ adapter,
28134
+ projectKey,
28135
+ outputPath: `${projectKey}/.hive/issues.jsonl`
28136
+ });
28137
+ await flushManager.flush();
28138
+ } catch (error45) {
28139
+ console.warn("[hive_create_epic] Failed to sync to JSONL:", error45);
28140
+ }
28064
28141
  return JSON.stringify(result, null, 2);
28065
28142
  } catch (error45) {
28066
28143
  const rollbackErrors = [];
@@ -28125,7 +28202,7 @@ var hive_query = tool({
28125
28202
  var hive_update = tool({
28126
28203
  description: "Update cell status/description",
28127
28204
  args: {
28128
- id: tool.schema.string().describe("Cell ID"),
28205
+ id: tool.schema.string().describe("Cell ID or partial hash"),
28129
28206
  status: tool.schema.enum(["open", "in_progress", "blocked", "closed"]).optional().describe("New status"),
28130
28207
  description: tool.schema.string().optional().describe("New description"),
28131
28208
  priority: tool.schema.number().min(0).max(3).optional().describe("New priority")
@@ -28135,27 +28212,34 @@ var hive_update = tool({
28135
28212
  const projectKey = getHiveWorkingDirectory();
28136
28213
  const adapter = await getHiveAdapter(projectKey);
28137
28214
  try {
28215
+ const cellId = await resolvePartialId(adapter, projectKey, validated.id) || validated.id;
28138
28216
  let cell;
28139
28217
  if (validated.status) {
28140
- cell = await adapter.changeCellStatus(projectKey, validated.id, validated.status);
28218
+ cell = await adapter.changeCellStatus(projectKey, cellId, validated.status);
28141
28219
  }
28142
28220
  if (validated.description !== undefined || validated.priority !== undefined) {
28143
- cell = await adapter.updateCell(projectKey, validated.id, {
28221
+ cell = await adapter.updateCell(projectKey, cellId, {
28144
28222
  description: validated.description,
28145
28223
  priority: validated.priority
28146
28224
  });
28147
28225
  } else if (!validated.status) {
28148
- const existingCell = await adapter.getCell(projectKey, validated.id);
28226
+ const existingCell = await adapter.getCell(projectKey, cellId);
28149
28227
  if (!existingCell) {
28150
28228
  throw new HiveError(`Cell not found: ${validated.id}`, "hive_update");
28151
28229
  }
28152
28230
  cell = existingCell;
28153
28231
  }
28154
- await adapter.markDirty(projectKey, validated.id);
28232
+ await adapter.markDirty(projectKey, cellId);
28155
28233
  const formatted = formatCellForOutput(cell);
28156
28234
  return JSON.stringify(formatted, null, 2);
28157
28235
  } catch (error45) {
28158
28236
  const message = error45 instanceof Error ? error45.message : String(error45);
28237
+ if (message.includes("Ambiguous hash")) {
28238
+ throw new HiveError(`Ambiguous ID '${validated.id}': multiple cells match. Please provide more characters.`, "hive_update");
28239
+ }
28240
+ if (message.includes("Bead not found") || message.includes("Cell not found")) {
28241
+ throw new HiveError(`No cell found matching ID '${validated.id}'`, "hive_update");
28242
+ }
28159
28243
  throw new HiveError(`Failed to update cell: ${message}`, "hive_update");
28160
28244
  }
28161
28245
  }
@@ -28163,7 +28247,7 @@ var hive_update = tool({
28163
28247
  var hive_close = tool({
28164
28248
  description: "Close a cell with reason",
28165
28249
  args: {
28166
- id: tool.schema.string().describe("Cell ID"),
28250
+ id: tool.schema.string().describe("Cell ID or partial hash"),
28167
28251
  reason: tool.schema.string().describe("Completion reason")
28168
28252
  },
28169
28253
  async execute(args, ctx) {
@@ -28171,11 +28255,18 @@ var hive_close = tool({
28171
28255
  const projectKey = getHiveWorkingDirectory();
28172
28256
  const adapter = await getHiveAdapter(projectKey);
28173
28257
  try {
28174
- const cell = await adapter.closeCell(projectKey, validated.id, validated.reason);
28175
- await adapter.markDirty(projectKey, validated.id);
28258
+ const cellId = await resolvePartialId(adapter, projectKey, validated.id) || validated.id;
28259
+ const cell = await adapter.closeCell(projectKey, cellId, validated.reason);
28260
+ await adapter.markDirty(projectKey, cellId);
28176
28261
  return `Closed ${cell.id}: ${validated.reason}`;
28177
28262
  } catch (error45) {
28178
28263
  const message = error45 instanceof Error ? error45.message : String(error45);
28264
+ if (message.includes("Ambiguous hash")) {
28265
+ throw new HiveError(`Ambiguous ID '${validated.id}': multiple cells match. Please provide more characters.`, "hive_close");
28266
+ }
28267
+ if (message.includes("Bead not found") || message.includes("Cell not found")) {
28268
+ throw new HiveError(`No cell found matching ID '${validated.id}'`, "hive_close");
28269
+ }
28179
28270
  throw new HiveError(`Failed to close cell: ${message}`, "hive_close");
28180
28271
  }
28181
28272
  }
@@ -28183,17 +28274,24 @@ var hive_close = tool({
28183
28274
  var hive_start = tool({
28184
28275
  description: "Mark a cell as in-progress (shortcut for update --status in_progress)",
28185
28276
  args: {
28186
- id: tool.schema.string().describe("Cell ID")
28277
+ id: tool.schema.string().describe("Cell ID or partial hash")
28187
28278
  },
28188
28279
  async execute(args, ctx) {
28189
28280
  const projectKey = getHiveWorkingDirectory();
28190
28281
  const adapter = await getHiveAdapter(projectKey);
28191
28282
  try {
28192
- const cell = await adapter.changeCellStatus(projectKey, args.id, "in_progress");
28193
- await adapter.markDirty(projectKey, args.id);
28283
+ const cellId = await resolvePartialId(adapter, projectKey, args.id) || args.id;
28284
+ const cell = await adapter.changeCellStatus(projectKey, cellId, "in_progress");
28285
+ await adapter.markDirty(projectKey, cellId);
28194
28286
  return `Started: ${cell.id}`;
28195
28287
  } catch (error45) {
28196
28288
  const message = error45 instanceof Error ? error45.message : String(error45);
28289
+ if (message.includes("Ambiguous hash")) {
28290
+ throw new HiveError(`Ambiguous ID '${args.id}': multiple cells match. Please provide more characters.`, "hive_start");
28291
+ }
28292
+ if (message.includes("Bead not found") || message.includes("Cell not found")) {
28293
+ throw new HiveError(`No cell found matching ID '${args.id}'`, "hive_start");
28294
+ }
28197
28295
  throw new HiveError(`Failed to start cell: ${message}`, "hive_start");
28198
28296
  }
28199
28297
  }
@@ -28247,8 +28345,18 @@ var hive_sync = tool({
28247
28345
  outputPath: `${projectKey}/.hive/issues.jsonl`
28248
28346
  });
28249
28347
  const flushResult = await withTimeout(flushManager.flush(), TIMEOUT_MS, "flush hive");
28250
- if (flushResult.cellsExported === 0) {
28251
- return "No cells to sync";
28348
+ const swarmMail = await getSwarmMail(projectKey);
28349
+ const db = await swarmMail.getDatabase();
28350
+ const hivePath = join(projectKey, ".hive");
28351
+ let memoriesSynced = 0;
28352
+ try {
28353
+ const memoryResult = await syncMemories(db, hivePath);
28354
+ memoriesSynced = memoryResult.exported;
28355
+ } catch (err) {
28356
+ console.warn("[hive_sync] Memory sync warning:", err);
28357
+ }
28358
+ if (flushResult.cellsExported === 0 && memoriesSynced === 0) {
28359
+ return "No cells or memories to sync";
28252
28360
  }
28253
28361
  const hiveStatusResult = await runGitCommand([
28254
28362
  "status",
@@ -30647,7 +30755,7 @@ Agents MUST update their bead status as they work. No silent progress.
30647
30755
 
30648
30756
  ## Requirements
30649
30757
 
30650
- 1. **Break into 2-{max_subtasks} independent subtasks** that can run in parallel
30758
+ 1. **Break into independent subtasks** that can run in parallel (as many as needed)
30651
30759
  2. **Assign files** - each subtask must specify which files it will modify
30652
30760
  3. **No file overlap** - files cannot appear in multiple subtasks (they get exclusive locks)
30653
30761
  4. **Order by dependency** - if subtask B needs subtask A's output, A must come first in the array
@@ -30720,7 +30828,7 @@ Agents MUST update their bead status as they work. No silent progress.
30720
30828
 
30721
30829
  ## Requirements
30722
30830
 
30723
- 1. **Break into 2-{max_subtasks} independent subtasks** that can run in parallel
30831
+ 1. **Break into independent subtasks** that can run in parallel (as many as needed)
30724
30832
  2. **Assign files** - each subtask must specify which files it will modify
30725
30833
  3. **No file overlap** - files cannot appear in multiple subtasks (they get exclusive locks)
30726
30834
  4. **Order by dependency** - if subtask B needs subtask A's output, A must come first in the array
@@ -30886,10 +30994,10 @@ var swarm_decompose = tool({
30886
30994
  description: "Generate decomposition prompt for breaking task into parallelizable subtasks. Optionally queries CASS for similar past tasks.",
30887
30995
  args: {
30888
30996
  task: tool.schema.string().min(1).describe("Task description to decompose"),
30889
- max_subtasks: tool.schema.number().int().min(2).max(10).default(5).describe("Maximum number of subtasks (default: 5)"),
30997
+ max_subtasks: tool.schema.number().int().min(1).optional().describe("Suggested max subtasks (optional - LLM decides if not specified)"),
30890
30998
  context: tool.schema.string().optional().describe("Additional context (codebase info, constraints, etc.)"),
30891
30999
  query_cass: tool.schema.boolean().optional().describe("Query CASS for similar past tasks (default: true)"),
30892
- cass_limit: tool.schema.number().int().min(1).max(10).optional().describe("Max CASS results to include (default: 3)")
31000
+ cass_limit: tool.schema.number().int().min(1).optional().describe("Max CASS results to include (default: 3)")
30893
31001
  },
30894
31002
  async execute(args) {
30895
31003
  const { formatMemoryQueryForDecomposition: formatMemoryQueryForDecomposition2 } = await Promise.resolve().then(() => (init_learning(), exports_learning));
@@ -31017,7 +31125,7 @@ var swarm_delegate_planning = tool({
31017
31125
  args: {
31018
31126
  task: tool.schema.string().min(1).describe("The task to decompose"),
31019
31127
  context: tool.schema.string().optional().describe("Additional context to include"),
31020
- max_subtasks: tool.schema.number().int().min(2).max(10).optional().default(5).describe("Maximum number of subtasks (default: 5)"),
31128
+ max_subtasks: tool.schema.number().int().min(1).optional().describe("Suggested max subtasks (optional - LLM decides if not specified)"),
31021
31129
  strategy: tool.schema.enum(["auto", "file-based", "feature-based", "risk-based"]).optional().default("auto").describe("Decomposition strategy (default: auto-detect)"),
31022
31130
  query_cass: tool.schema.boolean().optional().default(true).describe("Query CASS for similar past tasks (default: true)")
31023
31131
  },
@@ -34318,6 +34426,21 @@ This will be recorded as a negative learning signal.`;
34318
34426
  }
34319
34427
  }, null, 2);
34320
34428
  }
34429
+ let syncSuccess = false;
34430
+ let syncError;
34431
+ try {
34432
+ const previousWorkingDir = getHiveWorkingDirectory();
34433
+ setHiveWorkingDirectory(args.project_key);
34434
+ try {
34435
+ const syncResult = await hive_sync.execute({ auto_pull: false }, _ctx);
34436
+ syncSuccess = !syncResult.includes("error");
34437
+ } finally {
34438
+ setHiveWorkingDirectory(previousWorkingDir);
34439
+ }
34440
+ } catch (error45) {
34441
+ syncError = error45 instanceof Error ? error45.message : String(error45);
34442
+ console.warn(`[swarm_complete] Auto-sync failed (non-fatal): ${syncError}`);
34443
+ }
34321
34444
  try {
34322
34445
  const epicId3 = args.bead_id.includes(".") ? args.bead_id.split(".")[0] : args.bead_id;
34323
34446
  const durationMs2 = args.start_time ? Date.now() - args.start_time : 0;
@@ -34403,6 +34526,8 @@ This will be recorded as a negative learning signal.`;
34403
34526
  bead_id: args.bead_id,
34404
34527
  closed: true,
34405
34528
  reservations_released: true,
34529
+ synced: syncSuccess,
34530
+ sync_error: syncError,
34406
34531
  message_sent: messageSent,
34407
34532
  message_error: messageError,
34408
34533
  agent_registration: {
@@ -35079,7 +35204,7 @@ Agents MUST update their cell status as they work. No silent progress.
35079
35204
 
35080
35205
  ## Requirements
35081
35206
 
35082
- 1. **Break into 2-{max_subtasks} independent subtasks** that can run in parallel
35207
+ 1. **Break into independent subtasks** that can run in parallel (as many as needed)
35083
35208
  2. **Assign files** - each subtask must specify which files it will modify
35084
35209
  3. **No file overlap** - files cannot appear in multiple subtasks (they get exclusive locks)
35085
35210
  4. **Order by dependency** - if subtask B needs subtask A's output, A must come first in the array
@@ -35231,18 +35356,36 @@ swarmmail_init(project_path="{project_path}", task_description="{bead_id}: {subt
35231
35356
 
35232
35357
  **If you skip this step, your work will not be tracked and swarm_complete will fail.**
35233
35358
 
35234
- ### Step 2: Query Past Learnings (BEFORE starting work)
35359
+ ### Step 2: \uD83E\uDDE0 Query Past Learnings (MANDATORY - BEFORE starting work)
35360
+
35361
+ **⚠️ CRITICAL: ALWAYS query semantic memory BEFORE writing ANY code.**
35362
+
35235
35363
  \`\`\`
35236
- semantic-memory_find(query="<keywords from your task>", limit=5)
35364
+ semantic-memory_find(query="<keywords from your task>", limit=5, expand=true)
35237
35365
  \`\`\`
35238
35366
 
35239
- **Check if past agents solved similar problems.** Search for:
35240
- - Error messages if debugging
35241
- - Domain concepts (e.g., "authentication", "caching")
35242
- - Technology stack (e.g., "Next.js", "React")
35243
- - Patterns (e.g., "event sourcing", "validation")
35367
+ **Why this is MANDATORY:**
35368
+ - Past agents may have already solved your exact problem
35369
+ - Avoids repeating mistakes that wasted 30+ minutes before
35370
+ - Discovers project-specific patterns and gotchas
35371
+ - Finds known workarounds for tool/library quirks
35372
+
35373
+ **Search Query Examples by Task Type:**
35374
+
35375
+ - **Bug fix**: Use exact error message or "<symptom> <component>"
35376
+ - **New feature**: Search "<domain concept> implementation pattern"
35377
+ - **Refactor**: Query "<pattern name> migration approach"
35378
+ - **Integration**: Look for "<library name> gotchas configuration"
35379
+ - **Testing**: Find "testing <component type> characterization tests"
35380
+ - **Performance**: Search "<technology> performance optimization"
35244
35381
 
35245
- **Past learnings save time and prevent repeating mistakes.**
35382
+ **BEFORE you start coding:**
35383
+ 1. Run semantic-memory_find with keywords from your task
35384
+ 2. Read the results with expand=true for full content
35385
+ 3. Check if any memory solves your problem or warns of pitfalls
35386
+ 4. Adjust your approach based on past learnings
35387
+
35388
+ **If you skip this step, you WILL waste time solving already-solved problems.**
35246
35389
 
35247
35390
  ### Step 3: Load Relevant Skills (if available)
35248
35391
  \`\`\`
@@ -35328,21 +35471,44 @@ swarm_checkpoint(
35328
35471
 
35329
35472
  **Checkpoints preserve context so you can recover if things go wrong.**
35330
35473
 
35331
- ### Step 8: Store Learnings (if you discovered something)
35474
+ ### Step 8: \uD83D\uDCBE STORE YOUR LEARNINGS (if you discovered something)
35475
+
35476
+ **If you learned it the hard way, STORE IT so the next agent doesn't have to.**
35477
+
35332
35478
  \`\`\`
35333
35479
  semantic-memory_store(
35334
35480
  information="<what you learned, WHY it matters, how to apply it>",
35335
- metadata="<tags: domain, tech-stack, pattern-type>"
35481
+ tags="<domain, tech-stack, pattern-type>"
35336
35482
  )
35337
35483
  \`\`\`
35338
35484
 
35339
- **Store:**
35340
- - Tricky bugs you solved (root cause + solution)
35341
- - Project-specific patterns or domain rules
35342
- - Tool/library gotchas and workarounds
35343
- - Failed approaches (anti-patterns to avoid)
35485
+ **MANDATORY Storage Triggers - Store when you:**
35486
+ - \uD83D\uDC1B **Solved a tricky bug** (>15min debugging) - include root cause + solution
35487
+ - \uD83D\uDCA1 **Discovered a project-specific pattern** - domain rules, business logic quirks
35488
+ - ⚠️ **Found a tool/library gotcha** - API quirks, version-specific bugs, workarounds
35489
+ - \uD83D\uDEAB **Tried an approach that failed** - anti-patterns to avoid, why it didn't work
35490
+ - \uD83C\uDFD7️ **Made an architectural decision** - reasoning, alternatives considered, tradeoffs
35344
35491
 
35345
- **Don't store generic knowledge.** Store the WHY, not just the WHAT.
35492
+ **What Makes a GOOD Memory:**
35493
+
35494
+ ✅ **GOOD** (actionable, explains WHY):
35495
+ \`\`\`
35496
+ "OAuth refresh tokens need 5min buffer before expiry to avoid race conditions.
35497
+ Without buffer, token refresh can fail mid-request if expiry happens between
35498
+ check and use. Implemented with: if (expiresAt - Date.now() < 300000) refresh()"
35499
+ \`\`\`
35500
+
35501
+ ❌ **BAD** (generic, no context):
35502
+ \`\`\`
35503
+ "Fixed the auth bug by adding a null check"
35504
+ \`\`\`
35505
+
35506
+ **What NOT to Store:**
35507
+ - Generic knowledge that's in official documentation
35508
+ - Implementation details that change frequently
35509
+ - Vague descriptions without context ("fixed the thing")
35510
+
35511
+ **The WHY matters more than the WHAT.** Future agents need context to apply your learning.
35346
35512
 
35347
35513
  ### Step 9: Complete (REQUIRED - releases reservations)
35348
35514
  \`\`\`
@@ -35433,17 +35599,20 @@ Other cell operations:
35433
35599
 
35434
35600
  **NON-NEGOTIABLE:**
35435
35601
  1. Step 1 (swarmmail_init) MUST be first - do it before anything else
35436
- 2. Step 2 (semantic-memory_find) MUST happen before starting work
35602
+ 2. \uD83E\uDDE0 Step 2 (semantic-memory_find) MUST happen BEFORE starting work - query first, code second
35437
35603
  3. Step 4 (swarmmail_reserve) - YOU reserve files, not coordinator
35438
35604
  4. Step 6 (swarm_progress) - Report at milestones, don't work silently
35439
- 5. Step 9 (swarm_complete) - Use this to close, NOT hive_close
35605
+ 5. \uD83D\uDCBE Step 8 (semantic-memory_store) - If you learned something hard, STORE IT
35606
+ 6. Step 9 (swarm_complete) - Use this to close, NOT hive_close
35440
35607
 
35441
35608
  **If you skip these steps:**
35442
35609
  - Your work won't be tracked (swarm_complete will fail)
35443
- - You'll waste time repeating solved problems (no semantic memory query)
35610
+ - \uD83D\uDD04 You'll waste time repeating already-solved problems (no semantic memory query)
35444
35611
  - Edit conflicts with other agents (no file reservation)
35445
35612
  - Lost work if you crash (no checkpoints)
35446
- - Future agents repeat your mistakes (no learnings stored)
35613
+ - \uD83D\uDD04 Future agents repeat YOUR mistakes (no learnings stored)
35614
+
35615
+ **Memory is the swarm's collective intelligence. Query it. Feed it.**
35447
35616
 
35448
35617
  Begin now.`;
35449
35618
  var EVALUATION_PROMPT = `Evaluate the work completed for this subtask.
@@ -35576,7 +35745,7 @@ var swarm_subtask_prompt = tool({
35576
35745
  }
35577
35746
  });
35578
35747
  var swarm_spawn_subtask = tool({
35579
- description: "Prepare a subtask for spawning. Returns prompt with Agent Mail/hive tracking instructions. IMPORTANT: Pass project_path for swarmmail_init.",
35748
+ description: "Prepare a subtask for spawning. Returns prompt with Agent Mail/hive tracking instructions. IMPORTANT: Pass project_path for swarmmail_init. Automatically selects appropriate model based on file types.",
35580
35749
  args: {
35581
35750
  bead_id: tool.schema.string().describe("Subtask bead ID"),
35582
35751
  epic_id: tool.schema.string().describe("Parent epic bead ID"),
@@ -35589,7 +35758,8 @@ var swarm_spawn_subtask = tool({
35589
35758
  shared_context: tool.schema.string().optional(),
35590
35759
  skills_to_load: tool.schema.array(tool.schema.string()).optional(),
35591
35760
  coordinator_notes: tool.schema.string().optional()
35592
- }).optional().describe("Recovery context from checkpoint compaction")
35761
+ }).optional().describe("Recovery context from checkpoint compaction"),
35762
+ model: tool.schema.string().optional().describe("Optional explicit model override (auto-selected if not provided)")
35593
35763
  },
35594
35764
  async execute(args) {
35595
35765
  const prompt = formatSubtaskPromptV2({
@@ -35602,13 +35772,28 @@ var swarm_spawn_subtask = tool({
35602
35772
  project_path: args.project_path,
35603
35773
  recovery_context: args.recovery_context
35604
35774
  });
35775
+ const { selectWorkerModel: selectWorkerModel2 } = await Promise.resolve().then(() => exports_model_selection);
35776
+ const subtask = {
35777
+ title: args.subtask_title,
35778
+ description: args.subtask_description || "",
35779
+ files: args.files,
35780
+ estimated_effort: "medium",
35781
+ risks: [],
35782
+ model: args.model
35783
+ };
35784
+ const config2 = {
35785
+ primaryModel: "anthropic/claude-sonnet-4-5",
35786
+ liteModel: "anthropic/claude-haiku-4-5"
35787
+ };
35788
+ const selectedModel = selectWorkerModel2(subtask, config2);
35605
35789
  return JSON.stringify({
35606
35790
  prompt,
35607
35791
  bead_id: args.bead_id,
35608
35792
  epic_id: args.epic_id,
35609
35793
  files: args.files,
35610
35794
  project_path: args.project_path,
35611
- recovery_context: args.recovery_context
35795
+ recovery_context: args.recovery_context,
35796
+ recommended_model: selectedModel
35612
35797
  }, null, 2);
35613
35798
  }
35614
35799
  });
@@ -35647,10 +35832,10 @@ var swarm_plan_prompt = tool({
35647
35832
  args: {
35648
35833
  task: tool.schema.string().min(1).describe("Task description to decompose"),
35649
35834
  strategy: tool.schema.enum(["file-based", "feature-based", "risk-based", "auto"]).optional().describe("Decomposition strategy (default: auto-detect)"),
35650
- max_subtasks: tool.schema.number().int().min(2).max(10).default(5).describe("Maximum number of subtasks (default: 5)"),
35835
+ max_subtasks: tool.schema.number().int().min(1).optional().describe("Suggested max subtasks (optional - LLM decides if not specified)"),
35651
35836
  context: tool.schema.string().optional().describe("Additional context (codebase info, constraints, etc.)"),
35652
35837
  query_cass: tool.schema.boolean().optional().describe("Query CASS for similar past tasks (default: true)"),
35653
- cass_limit: tool.schema.number().int().min(1).max(10).optional().describe("Max CASS results to include (default: 3)"),
35838
+ cass_limit: tool.schema.number().int().min(1).optional().describe("Max CASS results to include (default: 3)"),
35654
35839
  include_skills: tool.schema.boolean().optional().describe("Include available skills in context (default: true)")
35655
35840
  },
35656
35841
  async execute(args) {
@@ -50310,12 +50495,18 @@ async function createMemoryAdapter(db) {
50310
50495
  if (tags.length > 0) {
50311
50496
  metadata.tags = tags;
50312
50497
  }
50498
+ const clampConfidence = (c) => {
50499
+ if (c === undefined)
50500
+ return 0.7;
50501
+ return Math.max(0, Math.min(1, c));
50502
+ };
50313
50503
  const memory = {
50314
50504
  id,
50315
50505
  content: args2.information,
50316
50506
  metadata,
50317
50507
  collection,
50318
- createdAt: new Date
50508
+ createdAt: new Date,
50509
+ confidence: clampConfidence(args2.confidence)
50319
50510
  };
50320
50511
  const program = exports_Effect.gen(function* () {
50321
50512
  const ollama = yield* Ollama;
@@ -50426,12 +50617,13 @@ function resetMemoryCache() {
50426
50617
  cachedProjectPath = null;
50427
50618
  }
50428
50619
  var semantic_memory_store = tool({
50429
- description: "Store a memory with semantic embedding. Memories are searchable by semantic similarity and can be organized into collections.",
50620
+ description: "Store a memory with semantic embedding. Memories are searchable by semantic similarity and can be organized into collections. Confidence affects decay rate: high confidence (1.0) = 135 day half-life, low confidence (0.0) = 45 day half-life.",
50430
50621
  args: {
50431
50622
  information: tool.schema.string().describe("The information to store (required)"),
50432
50623
  collection: tool.schema.string().optional().describe("Collection name (defaults to 'default')"),
50433
50624
  tags: tool.schema.string().optional().describe("Comma-separated tags (e.g., 'auth,tokens,oauth')"),
50434
- metadata: tool.schema.string().optional().describe("JSON string with additional metadata")
50625
+ metadata: tool.schema.string().optional().describe("JSON string with additional metadata"),
50626
+ confidence: tool.schema.number().optional().describe("Confidence level (0.0-1.0) affecting decay rate. Higher = slower decay. Default 0.7")
50435
50627
  },
50436
50628
  async execute(args2, ctx) {
50437
50629
  const adapter = await getMemoryAdapter();
@@ -29,12 +29,14 @@ export declare const semantic_memory_store: {
29
29
  collection: import("zod").ZodOptional<import("zod").ZodString>;
30
30
  tags: import("zod").ZodOptional<import("zod").ZodString>;
31
31
  metadata: import("zod").ZodOptional<import("zod").ZodString>;
32
+ confidence: import("zod").ZodOptional<import("zod").ZodNumber>;
32
33
  };
33
34
  execute(args: {
34
35
  information: string;
35
36
  collection?: string | undefined;
36
37
  tags?: string | undefined;
37
38
  metadata?: string | undefined;
39
+ confidence?: number | undefined;
38
40
  }, context: import("@opencode-ai/plugin").ToolContext): Promise<string>;
39
41
  };
40
42
  /**
@@ -134,12 +136,14 @@ export declare const memoryTools: {
134
136
  collection: import("zod").ZodOptional<import("zod").ZodString>;
135
137
  tags: import("zod").ZodOptional<import("zod").ZodString>;
136
138
  metadata: import("zod").ZodOptional<import("zod").ZodString>;
139
+ confidence: import("zod").ZodOptional<import("zod").ZodNumber>;
137
140
  };
138
141
  execute(args: {
139
142
  information: string;
140
143
  collection?: string | undefined;
141
144
  tags?: string | undefined;
142
145
  metadata?: string | undefined;
146
+ confidence?: number | undefined;
143
147
  }, context: import("@opencode-ai/plugin").ToolContext): Promise<string>;
144
148
  };
145
149
  readonly "semantic-memory_find": {
@@ -1 +1 @@
1
- {"version":3,"file":"memory-tools.d.ts","sourceRoot":"","sources":["../src/memory-tools.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;GAaG;AAIH,OAAO,EACN,mBAAmB,EACnB,KAAK,aAAa,EAClB,KAAK,SAAS,EACd,KAAK,QAAQ,EACb,KAAK,MAAM,EACX,KAAK,QAAQ,EACb,KAAK,WAAW,EAChB,KAAK,UAAU,EACf,KAAK,WAAW,EAChB,KAAK,YAAY,EACjB,KAAK,eAAe,EACpB,MAAM,UAAU,CAAC;AAGlB,YAAY,EACX,aAAa,EACb,SAAS,EACT,QAAQ,EACR,MAAM,EACN,QAAQ,EACR,WAAW,EACX,UAAU,EACV,WAAW,EACX,YAAY,EACZ,eAAe,GACf,CAAC;AA2CF;;GAEG;AACH,wBAAgB,gBAAgB,IAAI,IAAI,CAGvC;AAGD,OAAO,EAAE,mBAAmB,EAAE,CAAC;AAM/B;;GAEG;AACH,eAAO,MAAM,qBAAqB;;;;;;;;;;;;;;CAyBhC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,oBAAoB;;;;;;;;;;;;;;;;CA2B/B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,mBAAmB;;;;;;;;CAU9B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,sBAAsB;;;;;;;;CAUjC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,wBAAwB;;;;;;;;CAWnC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,oBAAoB;;;;;;;;CAa/B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,qBAAqB;;;;CAQhC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,qBAAqB;;;;CAShC,CAAC;AAMH;;;;GAIG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CASd,CAAC"}
1
+ {"version":3,"file":"memory-tools.d.ts","sourceRoot":"","sources":["../src/memory-tools.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;GAaG;AAIH,OAAO,EACN,mBAAmB,EACnB,KAAK,aAAa,EAClB,KAAK,SAAS,EACd,KAAK,QAAQ,EACb,KAAK,MAAM,EACX,KAAK,QAAQ,EACb,KAAK,WAAW,EAChB,KAAK,UAAU,EACf,KAAK,WAAW,EAChB,KAAK,YAAY,EACjB,KAAK,eAAe,EACpB,MAAM,UAAU,CAAC;AAGlB,YAAY,EACX,aAAa,EACb,SAAS,EACT,QAAQ,EACR,MAAM,EACN,QAAQ,EACR,WAAW,EACX,UAAU,EACV,WAAW,EACX,YAAY,EACZ,eAAe,GACf,CAAC;AA2CF;;GAEG;AACH,wBAAgB,gBAAgB,IAAI,IAAI,CAGvC;AAGD,OAAO,EAAE,mBAAmB,EAAE,CAAC;AAM/B;;GAEG;AACH,eAAO,MAAM,qBAAqB;;;;;;;;;;;;;;;;CA6BhC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,oBAAoB;;;;;;;;;;;;;;;;CA2B/B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,mBAAmB;;;;;;;;CAU9B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,sBAAsB;;;;;;;;CAUjC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,wBAAwB;;;;;;;;CAWnC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,oBAAoB;;;;;;;;CAa/B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,qBAAqB;;;;CAQhC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,qBAAqB;;;;CAShC,CAAC;AAMH;;;;GAIG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CASd,CAAC"}
package/dist/memory.d.ts CHANGED
@@ -38,6 +38,8 @@ export interface StoreArgs {
38
38
  readonly collection?: string;
39
39
  readonly tags?: string;
40
40
  readonly metadata?: string;
41
+ /** Confidence level (0.0-1.0) affecting decay rate. Higher = slower decay. Default 0.7 */
42
+ readonly confidence?: number;
41
43
  }
42
44
  /** Arguments for find operation */
43
45
  export interface FindArgs {
@@ -1 +1 @@
1
- {"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../src/memory.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AAGH,OAAO,EACN,KAAK,eAAe,EAKpB,KAAK,MAAM,EAIX,MAAM,YAAY,CAAC;AAYpB;;;GAGG;AACH,wBAAgB,mBAAmB,IAAI,IAAI,CAE1C;AAMD,oCAAoC;AACpC,MAAM,WAAW,SAAS;IACzB,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,QAAQ,CAAC,UAAU,CAAC,EAAE,MAAM,CAAC;IAC7B,QAAQ,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC;IACvB,QAAQ,CAAC,QAAQ,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED,mCAAmC;AACnC,MAAM,WAAW,QAAQ;IACxB,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IACvB,QAAQ,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,UAAU,CAAC,EAAE,MAAM,CAAC;IAC7B,QAAQ,CAAC,MAAM,CAAC,EAAE,OAAO,CAAC;IAC1B,QAAQ,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC;CACvB;AAED,mDAAmD;AACnD,MAAM,WAAW,MAAM;IACtB,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;CACpB;AAED,mCAAmC;AACnC,MAAM,WAAW,QAAQ;IACxB,QAAQ,CAAC,UAAU,CAAC,EAAE,MAAM,CAAC;CAC7B;AAED,kCAAkC;AAClC,MAAM,WAAW,WAAW;IAC3B,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;IACpB,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;CACzB;AAED,iCAAiC;AACjC,MAAM,WAAW,UAAU;IAC1B,QAAQ,CAAC,OAAO,EAAE,KAAK,CAAC;QACvB,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;QACpB,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;QACzB,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;QACvB,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;QAC5B,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC3C,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;KAC3B,CAAC,CAAC;IACH,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;CACvB;AAED,kCAAkC;AAClC,MAAM,WAAW,WAAW;IAC3B,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;CAC5B;AAED,+BAA+B;AAC/B,MAAM,WAAW,YAAY;IAC5B,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC;IACzB,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;CAC1B;AAED,6CAA6C;AAC7C,MAAM,WAAW,eAAe;IAC/B,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;CAC1B;AAiED;;;;GAIG;AACH,MAAM,WAAW,aAAa;IAC7B,QAAQ,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE,SAAS,KAAK,OAAO,CAAC,WAAW,CAAC,CAAC;IAC1D,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,QAAQ,KAAK,OAAO,CAAC,UAAU,CAAC,CAAC;IACvD,QAAQ,CAAC,GAAG,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC;IACvD,QAAQ,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,eAAe,CAAC,CAAC;IAC5D,QAAQ,CAAC,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,eAAe,CAAC,CAAC;IAC9D,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,QAAQ,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;IACrD,QAAQ,CAAC,KAAK,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,CAAC;IAC3C,QAAQ,CAAC,WAAW,EAAE,MAAM,OAAO,CAAC,YAAY,CAAC,CAAC;CAClD;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAsB,mBAAmB,CACxC,EAAE,EAAE,eAAe,GACjB,OAAO,CAAC,aAAa,CAAC,CAqNxB"}
1
+ {"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../src/memory.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AAGH,OAAO,EACN,KAAK,eAAe,EAKpB,KAAK,MAAM,EAIX,MAAM,YAAY,CAAC;AAYpB;;;GAGG;AACH,wBAAgB,mBAAmB,IAAI,IAAI,CAE1C;AAMD,oCAAoC;AACpC,MAAM,WAAW,SAAS;IACzB,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,QAAQ,CAAC,UAAU,CAAC,EAAE,MAAM,CAAC;IAC7B,QAAQ,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC;IACvB,QAAQ,CAAC,QAAQ,CAAC,EAAE,MAAM,CAAC;IAC3B,0FAA0F;IAC1F,QAAQ,CAAC,UAAU,CAAC,EAAE,MAAM,CAAC;CAC7B;AAED,mCAAmC;AACnC,MAAM,WAAW,QAAQ;IACxB,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IACvB,QAAQ,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,UAAU,CAAC,EAAE,MAAM,CAAC;IAC7B,QAAQ,CAAC,MAAM,CAAC,EAAE,OAAO,CAAC;IAC1B,QAAQ,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC;CACvB;AAED,mDAAmD;AACnD,MAAM,WAAW,MAAM;IACtB,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;CACpB;AAED,mCAAmC;AACnC,MAAM,WAAW,QAAQ;IACxB,QAAQ,CAAC,UAAU,CAAC,EAAE,MAAM,CAAC;CAC7B;AAED,kCAAkC;AAClC,MAAM,WAAW,WAAW;IAC3B,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;IACpB,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;CACzB;AAED,iCAAiC;AACjC,MAAM,WAAW,UAAU;IAC1B,QAAQ,CAAC,OAAO,EAAE,KAAK,CAAC;QACvB,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;QACpB,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;QACzB,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;QACvB,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;QAC5B,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC3C,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;KAC3B,CAAC,CAAC;IACH,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;CACvB;AAED,kCAAkC;AAClC,MAAM,WAAW,WAAW;IAC3B,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;CAC5B;AAED,+BAA+B;AAC/B,MAAM,WAAW,YAAY;IAC5B,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC;IACzB,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;CAC1B;AAED,6CAA6C;AAC7C,MAAM,WAAW,eAAe;IAC/B,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;CAC1B;AAiED;;;;GAIG;AACH,MAAM,WAAW,aAAa;IAC7B,QAAQ,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE,SAAS,KAAK,OAAO,CAAC,WAAW,CAAC,CAAC;IAC1D,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,QAAQ,KAAK,OAAO,CAAC,UAAU,CAAC,CAAC;IACvD,QAAQ,CAAC,GAAG,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC;IACvD,QAAQ,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,eAAe,CAAC,CAAC;IAC5D,QAAQ,CAAC,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,eAAe,CAAC,CAAC;IAC9D,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,QAAQ,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;IACrD,QAAQ,CAAC,KAAK,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,CAAC;IAC3C,QAAQ,CAAC,WAAW,EAAE,MAAM,OAAO,CAAC,YAAY,CAAC,CAAC;CAClD;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAsB,mBAAmB,CACxC,EAAE,EAAE,eAAe,GACjB,OAAO,CAAC,aAAa,CAAC,CA4NxB"}
@@ -0,0 +1,37 @@
1
+ /**
2
+ * Model Selection Module
3
+ *
4
+ * Determines which model a worker agent should use based on subtask
5
+ * characteristics like file types and complexity.
6
+ *
7
+ * Priority:
8
+ * 1. Explicit model field in subtask
9
+ * 2. File-type inference (docs/tests → lite model)
10
+ * 3. Default to primary model
11
+ */
12
+ import type { DecomposedSubtask } from "./schemas/task";
13
+ /**
14
+ * Configuration interface for swarm models
15
+ */
16
+ export interface SwarmConfig {
17
+ primaryModel: string;
18
+ liteModel?: string;
19
+ }
20
+ /**
21
+ * Select the appropriate model for a worker agent based on subtask characteristics
22
+ *
23
+ * Priority order:
24
+ * 1. Explicit `model` field in subtask (if present)
25
+ * 2. File-type inference:
26
+ * - All .md/.mdx files → liteModel
27
+ * - All .test./.spec. files → liteModel
28
+ * 3. Mixed files or implementation → primaryModel
29
+ *
30
+ * @param subtask - The subtask to evaluate
31
+ * @param config - Swarm configuration with model preferences
32
+ * @returns Model identifier string
33
+ */
34
+ export declare function selectWorkerModel(subtask: DecomposedSubtask & {
35
+ model?: string;
36
+ }, config: SwarmConfig): string;
37
+ //# sourceMappingURL=model-selection.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"model-selection.d.ts","sourceRoot":"","sources":["../src/model-selection.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;GAUG;AAEH,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,gBAAgB,CAAC;AAExD;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,iBAAiB,CAC/B,OAAO,EAAE,iBAAiB,GAAG;IAAE,KAAK,CAAC,EAAE,MAAM,CAAA;CAAE,EAC/C,MAAM,EAAE,WAAW,GAClB,MAAM,CA4BR"}