opencode-swarm-plugin 0.12.9 → 0.12.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/plugin.js CHANGED
@@ -21683,6 +21683,16 @@ var ValidationResultSchema = exports_external.object({
21683
21683
  errors: exports_external.array(exports_external.string()).optional(),
21684
21684
  extractionMethod: exports_external.string().optional()
21685
21685
  });
21686
+ var FailureModeSchema = exports_external.enum([
21687
+ "timeout",
21688
+ "conflict",
21689
+ "validation",
21690
+ "tool_failure",
21691
+ "context_overflow",
21692
+ "dependency_blocked",
21693
+ "user_cancelled",
21694
+ "unknown"
21695
+ ]);
21686
21696
  // src/schemas/task.ts
21687
21697
  var EffortLevelSchema = exports_external.enum([
21688
21698
  "trivial",
@@ -23751,6 +23761,16 @@ var DecompositionStrategySchema = exports_external.enum([
23751
23761
  "risk-based",
23752
23762
  "research-based"
23753
23763
  ]);
23764
+ var FailureModeSchema2 = exports_external.enum([
23765
+ "timeout",
23766
+ "conflict",
23767
+ "validation",
23768
+ "tool_failure",
23769
+ "context_overflow",
23770
+ "dependency_blocked",
23771
+ "user_cancelled",
23772
+ "unknown"
23773
+ ]);
23754
23774
  var OutcomeSignalsSchema = exports_external.object({
23755
23775
  bead_id: exports_external.string(),
23756
23776
  duration_ms: exports_external.number().int().min(0),
@@ -23759,7 +23779,9 @@ var OutcomeSignalsSchema = exports_external.object({
23759
23779
  success: exports_external.boolean(),
23760
23780
  files_touched: exports_external.array(exports_external.string()).default([]),
23761
23781
  timestamp: exports_external.string(),
23762
- strategy: DecompositionStrategySchema.optional()
23782
+ strategy: DecompositionStrategySchema.optional(),
23783
+ failure_mode: FailureModeSchema2.optional(),
23784
+ failure_details: exports_external.string().optional()
23763
23785
  });
23764
23786
  var ScoredOutcomeSchema = exports_external.object({
23765
23787
  signals: OutcomeSignalsSchema,
@@ -24124,19 +24146,31 @@ var STRATEGIES = {
24124
24146
  "research",
24125
24147
  "investigate",
24126
24148
  "explore",
24127
- "find",
24149
+ "find out",
24128
24150
  "discover",
24129
24151
  "understand",
24130
- "learn",
24152
+ "learn about",
24131
24153
  "analyze",
24132
- "what",
24133
- "how",
24134
- "why",
24154
+ "what is",
24155
+ "what are",
24156
+ "how does",
24157
+ "how do",
24158
+ "why does",
24159
+ "why do",
24135
24160
  "compare",
24136
24161
  "evaluate",
24137
24162
  "study",
24138
24163
  "look up",
24139
- "search"
24164
+ "look into",
24165
+ "search for",
24166
+ "dig into",
24167
+ "figure out",
24168
+ "debug options",
24169
+ "debug levers",
24170
+ "configuration options",
24171
+ "environment variables",
24172
+ "available options",
24173
+ "documentation"
24140
24174
  ],
24141
24175
  guidelines: [
24142
24176
  "Split by information source (PDFs, repos, history, web)",
@@ -24170,8 +24204,15 @@ function selectStrategy(task) {
24170
24204
  for (const [strategyName, definition] of Object.entries(STRATEGIES)) {
24171
24205
  const name = strategyName;
24172
24206
  for (const keyword of definition.keywords) {
24173
- if (taskLower.includes(keyword)) {
24174
- scores[name] += 1;
24207
+ if (keyword.includes(" ")) {
24208
+ if (taskLower.includes(keyword)) {
24209
+ scores[name] += 1;
24210
+ }
24211
+ } else {
24212
+ const regex = new RegExp(`\\b${keyword}\\b`, "i");
24213
+ if (regex.test(taskLower)) {
24214
+ scores[name] += 1;
24215
+ }
24175
24216
  }
24176
24217
  }
24177
24218
  }
@@ -24367,51 +24408,46 @@ Before writing code:
24367
24408
  Begin work on your subtask now.`;
24368
24409
  var SUBTASK_PROMPT_V2 = `You are a swarm agent working on: **{subtask_title}**
24369
24410
 
24370
- ## Identity
24371
- - **Bead ID**: {bead_id}
24372
- - **Epic ID**: {epic_id}
24411
+ ## [IDENTITY]
24412
+ Agent: (assigned at spawn)
24413
+ Bead: {bead_id}
24414
+ Epic: {epic_id}
24373
24415
 
24374
- ## Task
24416
+ ## [TASK]
24375
24417
  {subtask_description}
24376
24418
 
24377
- ## Files (exclusive reservation)
24419
+ ## [FILES]
24420
+ Reserved (exclusive):
24378
24421
  {file_list}
24379
24422
 
24380
24423
  Only modify these files. Need others? Message the coordinator.
24381
24424
 
24382
- ## Context
24425
+ ## [CONTEXT]
24383
24426
  {shared_context}
24384
24427
 
24385
24428
  {compressed_context}
24386
24429
 
24387
24430
  {error_context}
24388
24431
 
24389
- ## MANDATORY: Use These Tools
24432
+ ## [TOOLS]
24433
+ ### Beads
24434
+ - beads_update (status: blocked)
24435
+ - beads_create (new bugs)
24436
+ - beads_close (via swarm_complete)
24390
24437
 
24391
- ### Agent Mail - communicate with the swarm
24392
- \`\`\`typescript
24393
- // Report progress, ask questions, announce blockers
24394
- agentmail_send({
24395
- to: ["coordinator"],
24396
- subject: "Progress update",
24397
- body: "What you did or need",
24398
- thread_id: "{epic_id}"
24399
- })
24400
- \`\`\`
24438
+ ### Agent Mail
24439
+ - agentmail_send (thread_id: {epic_id})
24401
24440
 
24402
- ### Beads - track your work
24403
- - **Blocked?** \`beads_update({ id: "{bead_id}", status: "blocked" })\`
24404
- - **Found bug?** \`beads_create({ title: "Bug description", type: "bug" })\`
24405
- - **Done?** \`swarm_complete({ bead_id: "{bead_id}", summary: "What you did", files_touched: [...] })\`
24441
+ ### Completion
24442
+ - swarm_complete (REQUIRED when done)
24406
24443
 
24407
- ## Workflow
24444
+ ## [OUTPUT]
24445
+ 1. Read files first
24446
+ 2. Implement changes
24447
+ 3. Verify (typecheck)
24448
+ 4. Complete with swarm_complete
24408
24449
 
24409
- 1. **Read** the files first
24410
- 2. **Plan** your approach (message coordinator if complex)
24411
- 3. **Implement** the changes
24412
- 4. **Verify** (typecheck, tests)
24413
- 5. **Report** progress via Agent Mail
24414
- 6. **Complete** with swarm_complete when done
24450
+ Return: Summary of changes made
24415
24451
 
24416
24452
  **Never work silently.** Communicate progress and blockers immediately.
24417
24453
 
@@ -25002,6 +25038,52 @@ async function runUbsScan(files) {
25002
25038
  return null;
25003
25039
  }
25004
25040
  }
25041
+ var swarm_broadcast = tool({
25042
+ description: "Broadcast context update to all agents working on the same epic",
25043
+ args: {
25044
+ epic_id: tool.schema.string().describe("Epic ID (e.g., bd-abc123)"),
25045
+ message: tool.schema.string().describe("Context update to share (what changed, what was learned)"),
25046
+ importance: tool.schema.enum(["info", "warning", "blocker"]).default("info").describe("Priority level (default: info)"),
25047
+ files_affected: tool.schema.array(tool.schema.string()).optional().describe("Files this context relates to")
25048
+ },
25049
+ async execute(args, ctx) {
25050
+ const state = requireState(ctx.sessionID);
25051
+ const beadId = ctx.beadId || "unknown";
25052
+ const body = [
25053
+ `## Context Update`,
25054
+ "",
25055
+ `**From**: ${state.agentName} (${beadId})`,
25056
+ `**Priority**: ${args.importance.toUpperCase()}`,
25057
+ "",
25058
+ args.message,
25059
+ "",
25060
+ args.files_affected && args.files_affected.length > 0 ? `**Files affected**:
25061
+ ${args.files_affected.map((f) => `- \`${f}\``).join(`
25062
+ `)}` : ""
25063
+ ].filter(Boolean).join(`
25064
+ `);
25065
+ const mailImportance = args.importance === "blocker" ? "urgent" : args.importance === "warning" ? "high" : "normal";
25066
+ await mcpCall("send_message", {
25067
+ project_key: state.projectKey,
25068
+ sender_name: state.agentName,
25069
+ to: [],
25070
+ subject: `[${args.importance.toUpperCase()}] Context update from ${state.agentName}`,
25071
+ body_md: body,
25072
+ thread_id: args.epic_id,
25073
+ importance: mailImportance,
25074
+ ack_required: args.importance === "blocker"
25075
+ });
25076
+ return JSON.stringify({
25077
+ broadcast: true,
25078
+ epic_id: args.epic_id,
25079
+ from: state.agentName,
25080
+ bead_id: beadId,
25081
+ importance: args.importance,
25082
+ recipients: "all agents in epic",
25083
+ ack_required: args.importance === "blocker"
25084
+ }, null, 2);
25085
+ }
25086
+ });
25005
25087
  var swarm_complete = tool({
25006
25088
  description: "Mark subtask complete, release reservations, notify coordinator. Runs UBS bug scan if files_touched provided.",
25007
25089
  args: {
@@ -25094,6 +25176,25 @@ var swarm_complete = tool({
25094
25176
  }, null, 2);
25095
25177
  }
25096
25178
  });
25179
+ function classifyFailure(error45) {
25180
+ const msg = (typeof error45 === "string" ? error45 : error45.message).toLowerCase();
25181
+ if (msg.includes("timeout"))
25182
+ return "timeout";
25183
+ if (msg.includes("conflict") || msg.includes("reservation"))
25184
+ return "conflict";
25185
+ if (msg.includes("validation") || msg.includes("schema"))
25186
+ return "validation";
25187
+ if (msg.includes("context") || msg.includes("token"))
25188
+ return "context_overflow";
25189
+ if (msg.includes("blocked") || msg.includes("dependency"))
25190
+ return "dependency_blocked";
25191
+ if (msg.includes("cancel"))
25192
+ return "user_cancelled";
25193
+ if (msg.includes("tool") || msg.includes("command") || msg.includes("failed to execute")) {
25194
+ return "tool_failure";
25195
+ }
25196
+ return "unknown";
25197
+ }
25097
25198
  var swarm_record_outcome = tool({
25098
25199
  description: "Record subtask outcome for implicit feedback scoring. Tracks duration, errors, retries to learn decomposition quality.",
25099
25200
  args: {
@@ -25104,7 +25205,18 @@ var swarm_record_outcome = tool({
25104
25205
  success: tool.schema.boolean().describe("Whether the subtask succeeded"),
25105
25206
  files_touched: tool.schema.array(tool.schema.string()).optional().describe("Files that were modified"),
25106
25207
  criteria: tool.schema.array(tool.schema.string()).optional().describe("Criteria to generate feedback for (default: all default criteria)"),
25107
- strategy: tool.schema.enum(["file-based", "feature-based", "risk-based", "research-based"]).optional().describe("Decomposition strategy used for this task")
25208
+ strategy: tool.schema.enum(["file-based", "feature-based", "risk-based", "research-based"]).optional().describe("Decomposition strategy used for this task"),
25209
+ failure_mode: tool.schema.enum([
25210
+ "timeout",
25211
+ "conflict",
25212
+ "validation",
25213
+ "tool_failure",
25214
+ "context_overflow",
25215
+ "dependency_blocked",
25216
+ "user_cancelled",
25217
+ "unknown"
25218
+ ]).optional().describe("Failure classification (only when success=false). Auto-classified if not provided."),
25219
+ failure_details: tool.schema.string().optional().describe("Detailed failure context (error message, stack trace, etc.)")
25108
25220
  },
25109
25221
  async execute(args) {
25110
25222
  const signals = {
@@ -25115,8 +25227,13 @@ var swarm_record_outcome = tool({
25115
25227
  success: args.success,
25116
25228
  files_touched: args.files_touched ?? [],
25117
25229
  timestamp: new Date().toISOString(),
25118
- strategy: args.strategy
25230
+ strategy: args.strategy,
25231
+ failure_mode: args.failure_mode,
25232
+ failure_details: args.failure_details
25119
25233
  };
25234
+ if (!args.success && !args.failure_mode && args.failure_details) {
25235
+ signals.failure_mode = classifyFailure(args.failure_details);
25236
+ }
25120
25237
  const validated = OutcomeSignalsSchema.parse(signals);
25121
25238
  const scored = scoreImplicitFeedback(validated, DEFAULT_LEARNING_CONFIG);
25122
25239
  const errorStats = await globalErrorAccumulator.getErrorStats(args.bead_id);
@@ -25156,6 +25273,8 @@ var swarm_record_outcome = tool({
25156
25273
  retry_count: args.retry_count ?? 0,
25157
25274
  success: args.success,
25158
25275
  strategy: args.strategy,
25276
+ failure_mode: validated.failure_mode,
25277
+ failure_details: validated.failure_details,
25159
25278
  accumulated_errors: errorStats.total,
25160
25279
  unresolved_errors: errorStats.unresolved
25161
25280
  },
@@ -25447,6 +25566,7 @@ var swarmTools = {
25447
25566
  swarm_validate_decomposition,
25448
25567
  swarm_status,
25449
25568
  swarm_progress,
25569
+ swarm_broadcast,
25450
25570
  swarm_complete,
25451
25571
  swarm_record_outcome,
25452
25572
  swarm_subtask_prompt,
@@ -25457,6 +25577,327 @@ var swarmTools = {
25457
25577
  swarm_get_error_context,
25458
25578
  swarm_resolve_error
25459
25579
  };
25580
+
25581
+ // src/repo-crawl.ts
25582
+ var GITHUB_API_URL = "https://api.github.com";
25583
+ var GITHUB_TOKEN = process.env.GITHUB_TOKEN;
25584
+ var DEFAULT_MAX_RESULTS = 10;
25585
+ var DEFAULT_MAX_LENGTH = 1e4;
25586
+ var DEFAULT_DEPTH = 2;
25587
+
25588
+ class RepoCrawlError extends Error {
25589
+ statusCode;
25590
+ endpoint;
25591
+ constructor(message, statusCode, endpoint) {
25592
+ super(message);
25593
+ this.statusCode = statusCode;
25594
+ this.endpoint = endpoint;
25595
+ this.name = "RepoCrawlError";
25596
+ }
25597
+ }
25598
+ function parseRepo(input) {
25599
+ let normalized = input.replace(/^https?:\/\//, "").replace(/\.git$/, "").replace(/^github\.com\//, "");
25600
+ const parts = normalized.split("/").filter(Boolean);
25601
+ if (parts.length < 2) {
25602
+ throw new RepoCrawlError(`Invalid repo format: "${input}". Expected "owner/repo" or GitHub URL.`);
25603
+ }
25604
+ const [owner, repo] = parts;
25605
+ return { owner, repo };
25606
+ }
25607
+ async function githubFetch(endpoint, options = {}) {
25608
+ const headers = {
25609
+ Accept: "application/vnd.github.v3+json",
25610
+ "User-Agent": "opencode-swarm-plugin",
25611
+ ...options.headers || {}
25612
+ };
25613
+ if (GITHUB_TOKEN) {
25614
+ headers.Authorization = `Bearer ${GITHUB_TOKEN}`;
25615
+ }
25616
+ const url2 = `${GITHUB_API_URL}${endpoint}`;
25617
+ const response = await fetch(url2, { ...options, headers });
25618
+ if (response.status === 403) {
25619
+ const rateLimitRemaining = response.headers.get("x-ratelimit-remaining");
25620
+ const rateLimitReset = response.headers.get("x-ratelimit-reset");
25621
+ if (rateLimitRemaining === "0" && rateLimitReset) {
25622
+ const resetDate = new Date(parseInt(rateLimitReset) * 1000);
25623
+ throw new RepoCrawlError(`GitHub API rate limit exceeded. Resets at ${resetDate.toISOString()}. ` + `${GITHUB_TOKEN ? "Using authenticated token." : "Set GITHUB_TOKEN env var for higher limits."}`, 403, endpoint);
25624
+ }
25625
+ }
25626
+ if (response.status === 404) {
25627
+ throw new RepoCrawlError(`Resource not found: ${endpoint}`, 404, endpoint);
25628
+ }
25629
+ if (!response.ok) {
25630
+ const body = await response.text();
25631
+ throw new RepoCrawlError(`GitHub API error (${response.status}): ${body}`, response.status, endpoint);
25632
+ }
25633
+ return response.json();
25634
+ }
25635
+ function decodeContent(content, encoding) {
25636
+ if (encoding === "base64") {
25637
+ return Buffer.from(content, "base64").toString("utf-8");
25638
+ }
25639
+ return content;
25640
+ }
25641
+ function detectTechStack(tree) {
25642
+ const stack = new Set;
25643
+ const filePatterns = {
25644
+ "package.json": "Node.js/npm",
25645
+ "yarn.lock": "Yarn",
25646
+ "pnpm-lock.yaml": "pnpm",
25647
+ "bun.lockb": "Bun",
25648
+ "Cargo.toml": "Rust",
25649
+ "go.mod": "Go",
25650
+ "requirements.txt": "Python/pip",
25651
+ Pipfile: "Python/pipenv",
25652
+ "pyproject.toml": "Python/poetry",
25653
+ Gemfile: "Ruby/Bundler",
25654
+ "composer.json": "PHP/Composer",
25655
+ "pom.xml": "Java/Maven",
25656
+ "build.gradle": "Java/Gradle",
25657
+ "tsconfig.json": "TypeScript",
25658
+ "next.config.js": "Next.js",
25659
+ "nuxt.config.js": "Nuxt.js",
25660
+ "vue.config.js": "Vue.js",
25661
+ "angular.json": "Angular",
25662
+ "svelte.config.js": "Svelte",
25663
+ Dockerfile: "Docker",
25664
+ "docker-compose.yml": "Docker Compose",
25665
+ ".terraform": "Terraform",
25666
+ Makefile: "Make"
25667
+ };
25668
+ for (const item of tree) {
25669
+ const basename = item.path.split("/").pop() || "";
25670
+ if (filePatterns[basename]) {
25671
+ stack.add(filePatterns[basename]);
25672
+ }
25673
+ if (basename.endsWith(".rs"))
25674
+ stack.add("Rust");
25675
+ if (basename.endsWith(".go"))
25676
+ stack.add("Go");
25677
+ if (basename.endsWith(".py"))
25678
+ stack.add("Python");
25679
+ if (basename.endsWith(".rb"))
25680
+ stack.add("Ruby");
25681
+ if (basename.endsWith(".php"))
25682
+ stack.add("PHP");
25683
+ if (basename.endsWith(".java"))
25684
+ stack.add("Java");
25685
+ if (basename.endsWith(".kt"))
25686
+ stack.add("Kotlin");
25687
+ if (basename.endsWith(".swift"))
25688
+ stack.add("Swift");
25689
+ if (basename.endsWith(".ts") || basename.endsWith(".tsx"))
25690
+ stack.add("TypeScript");
25691
+ if (basename.endsWith(".jsx"))
25692
+ stack.add("React");
25693
+ }
25694
+ return Array.from(stack).sort();
25695
+ }
25696
+ function truncate(text, maxLength) {
25697
+ if (text.length <= maxLength) {
25698
+ return text;
25699
+ }
25700
+ return text.slice(0, maxLength) + `
25701
+
25702
+ [... truncated ...]`;
25703
+ }
25704
+ var repo_readme = tool({
25705
+ description: "Get README.md content from a GitHub repository",
25706
+ args: {
25707
+ repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
25708
+ maxLength: tool.schema.number().optional().describe(`Max content length (default: ${DEFAULT_MAX_LENGTH})`)
25709
+ },
25710
+ async execute(args, ctx) {
25711
+ try {
25712
+ const { owner, repo } = parseRepo(args.repo);
25713
+ const maxLength = args.maxLength || DEFAULT_MAX_LENGTH;
25714
+ const readme = await githubFetch(`/repos/${owner}/${repo}/readme`);
25715
+ if (!readme.content || !readme.encoding) {
25716
+ return "README exists but content is not available";
25717
+ }
25718
+ const content = decodeContent(readme.content, readme.encoding);
25719
+ const truncated = truncate(content, maxLength);
25720
+ return JSON.stringify({
25721
+ repo: `${owner}/${repo}`,
25722
+ path: readme.path,
25723
+ size: readme.size,
25724
+ content: truncated,
25725
+ truncated: content.length > maxLength
25726
+ }, null, 2);
25727
+ } catch (error45) {
25728
+ if (error45 instanceof RepoCrawlError) {
25729
+ return JSON.stringify({ error: error45.message }, null, 2);
25730
+ }
25731
+ throw error45;
25732
+ }
25733
+ }
25734
+ });
25735
+ var repo_structure = tool({
25736
+ description: "Get repository structure with tech stack detection (root level only by default)",
25737
+ args: {
25738
+ repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
25739
+ depth: tool.schema.number().optional().describe(`Tree depth (1=root only, 2=one level deep, etc. Default: ${DEFAULT_DEPTH})`)
25740
+ },
25741
+ async execute(args, ctx) {
25742
+ try {
25743
+ const { owner, repo } = parseRepo(args.repo);
25744
+ const depth = args.depth || DEFAULT_DEPTH;
25745
+ const repoInfo = await githubFetch(`/repos/${owner}/${repo}`);
25746
+ const tree = await githubFetch(`/repos/${owner}/${repo}/git/trees/${repoInfo.default_branch}`, {
25747
+ method: "GET"
25748
+ });
25749
+ const filtered = tree.tree.filter((item) => {
25750
+ const pathDepth = item.path.split("/").length;
25751
+ return pathDepth <= depth;
25752
+ });
25753
+ const techStack = detectTechStack(filtered);
25754
+ const dirs = filtered.filter((item) => item.type === "tree").map((item) => item.path);
25755
+ const files = filtered.filter((item) => item.type === "blob").map((item) => item.path);
25756
+ return JSON.stringify({
25757
+ repo: repoInfo.full_name,
25758
+ description: repoInfo.description,
25759
+ language: repoInfo.language,
25760
+ stars: repoInfo.stargazers_count,
25761
+ topics: repoInfo.topics,
25762
+ techStack,
25763
+ directories: dirs.slice(0, 50),
25764
+ files: files.slice(0, 50),
25765
+ truncated: tree.truncated || dirs.length > 50 || files.length > 50
25766
+ }, null, 2);
25767
+ } catch (error45) {
25768
+ if (error45 instanceof RepoCrawlError) {
25769
+ return JSON.stringify({ error: error45.message }, null, 2);
25770
+ }
25771
+ throw error45;
25772
+ }
25773
+ }
25774
+ });
25775
+ var repo_tree = tool({
25776
+ description: "Get directory tree for a path in a repository",
25777
+ args: {
25778
+ repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
25779
+ path: tool.schema.string().optional().describe("Path in repo (default: root)"),
25780
+ maxDepth: tool.schema.number().optional().describe(`Max depth to traverse (default: ${DEFAULT_DEPTH})`)
25781
+ },
25782
+ async execute(args, ctx) {
25783
+ try {
25784
+ const { owner, repo } = parseRepo(args.repo);
25785
+ const targetPath = args.path || "";
25786
+ const maxDepth = args.maxDepth || DEFAULT_DEPTH;
25787
+ const repoInfo = await githubFetch(`/repos/${owner}/${repo}`);
25788
+ const contents = await githubFetch(`/repos/${owner}/${repo}/contents/${targetPath}`);
25789
+ if (!Array.isArray(contents)) {
25790
+ return JSON.stringify({ error: "Path is a file, not a directory" });
25791
+ }
25792
+ const tree = [];
25793
+ for (const item of contents) {
25794
+ tree.push({
25795
+ path: item.path,
25796
+ type: item.type,
25797
+ size: item.size
25798
+ });
25799
+ if (item.type === "dir" && maxDepth > 1) {
25800
+ try {
25801
+ const subContents = await githubFetch(`/repos/${owner}/${repo}/contents/${item.path}`);
25802
+ if (Array.isArray(subContents)) {
25803
+ for (const subItem of subContents.slice(0, 20)) {
25804
+ tree.push({
25805
+ path: subItem.path,
25806
+ type: subItem.type,
25807
+ size: subItem.size
25808
+ });
25809
+ }
25810
+ }
25811
+ } catch {}
25812
+ }
25813
+ }
25814
+ return JSON.stringify({
25815
+ repo: `${owner}/${repo}`,
25816
+ path: targetPath || "(root)",
25817
+ items: tree
25818
+ }, null, 2);
25819
+ } catch (error45) {
25820
+ if (error45 instanceof RepoCrawlError) {
25821
+ return JSON.stringify({ error: error45.message }, null, 2);
25822
+ }
25823
+ throw error45;
25824
+ }
25825
+ }
25826
+ });
25827
+ var repo_file = tool({
25828
+ description: "Get file content from a GitHub repository",
25829
+ args: {
25830
+ repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
25831
+ path: tool.schema.string().describe("File path in repository"),
25832
+ maxLength: tool.schema.number().optional().describe(`Max content length (default: ${DEFAULT_MAX_LENGTH})`)
25833
+ },
25834
+ async execute(args, ctx) {
25835
+ try {
25836
+ const { owner, repo } = parseRepo(args.repo);
25837
+ const maxLength = args.maxLength || DEFAULT_MAX_LENGTH;
25838
+ const file2 = await githubFetch(`/repos/${owner}/${repo}/contents/${args.path}`);
25839
+ if (file2.type !== "file") {
25840
+ return JSON.stringify({ error: "Path is not a file" });
25841
+ }
25842
+ if (!file2.content || !file2.encoding) {
25843
+ return JSON.stringify({ error: "File content not available" });
25844
+ }
25845
+ const content = decodeContent(file2.content, file2.encoding);
25846
+ const truncated = truncate(content, maxLength);
25847
+ return JSON.stringify({
25848
+ repo: `${owner}/${repo}`,
25849
+ path: file2.path,
25850
+ size: file2.size,
25851
+ content: truncated,
25852
+ truncated: content.length > maxLength
25853
+ }, null, 2);
25854
+ } catch (error45) {
25855
+ if (error45 instanceof RepoCrawlError) {
25856
+ return JSON.stringify({ error: error45.message }, null, 2);
25857
+ }
25858
+ throw error45;
25859
+ }
25860
+ }
25861
+ });
25862
+ var repo_search = tool({
25863
+ description: "Search code in a GitHub repository",
25864
+ args: {
25865
+ repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
25866
+ query: tool.schema.string().describe("Search query (GitHub code search)"),
25867
+ maxResults: tool.schema.number().optional().describe(`Max results (default: ${DEFAULT_MAX_RESULTS})`)
25868
+ },
25869
+ async execute(args, ctx) {
25870
+ try {
25871
+ const { owner, repo } = parseRepo(args.repo);
25872
+ const maxResults = args.maxResults || DEFAULT_MAX_RESULTS;
25873
+ const searchQuery = `${args.query} repo:${owner}/${repo}`;
25874
+ const results = await githubFetch(`/search/code?q=${encodeURIComponent(searchQuery)}&per_page=${maxResults}`);
25875
+ const items = results.items.map((item) => ({
25876
+ path: item.path,
25877
+ url: item.html_url,
25878
+ matches: item.text_matches?.map((m) => m.fragment) || []
25879
+ }));
25880
+ return JSON.stringify({
25881
+ repo: `${owner}/${repo}`,
25882
+ query: args.query,
25883
+ totalCount: results.total_count,
25884
+ results: items
25885
+ }, null, 2);
25886
+ } catch (error45) {
25887
+ if (error45 instanceof RepoCrawlError) {
25888
+ return JSON.stringify({ error: error45.message }, null, 2);
25889
+ }
25890
+ throw error45;
25891
+ }
25892
+ }
25893
+ });
25894
+ var repoCrawlTools = {
25895
+ repo_readme,
25896
+ repo_structure,
25897
+ repo_tree,
25898
+ repo_file,
25899
+ repo_search
25900
+ };
25460
25901
  // src/anti-patterns.ts
25461
25902
  var PatternKindSchema = exports_external.enum(["pattern", "anti_pattern"]);
25462
25903
  var DecompositionPatternSchema = exports_external.object({
@@ -25583,7 +26024,8 @@ var SwarmPlugin = async (input) => {
25583
26024
  ...beadsTools,
25584
26025
  ...agentMailTools,
25585
26026
  ...structuredTools,
25586
- ...swarmTools
26027
+ ...swarmTools,
26028
+ ...repoCrawlTools
25587
26029
  },
25588
26030
  event: async ({ event }) => {
25589
26031
  if (event.type === "session.idle") {
@@ -25627,7 +26069,8 @@ var allTools = {
25627
26069
  ...beadsTools,
25628
26070
  ...agentMailTools,
25629
26071
  ...structuredTools,
25630
- ...swarmTools
26072
+ ...swarmTools,
26073
+ ...repoCrawlTools
25631
26074
  };
25632
26075
  export {
25633
26076
  SwarmPlugin
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "opencode-swarm-plugin",
3
- "version": "0.12.9",
3
+ "version": "0.12.11",
4
4
  "description": "Multi-agent swarm coordination for OpenCode with learning capabilities, beads integration, and Agent Mail",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",