opencode-swarm-plugin 0.12.9 → 0.12.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -21683,6 +21683,16 @@ var ValidationResultSchema = exports_external.object({
21683
21683
  errors: exports_external.array(exports_external.string()).optional(),
21684
21684
  extractionMethod: exports_external.string().optional()
21685
21685
  });
21686
+ var FailureModeSchema = exports_external.enum([
21687
+ "timeout",
21688
+ "conflict",
21689
+ "validation",
21690
+ "tool_failure",
21691
+ "context_overflow",
21692
+ "dependency_blocked",
21693
+ "user_cancelled",
21694
+ "unknown"
21695
+ ]);
21686
21696
  // src/schemas/task.ts
21687
21697
  var EffortLevelSchema = exports_external.enum([
21688
21698
  "trivial",
@@ -23777,6 +23787,16 @@ var DecompositionStrategySchema = exports_external.enum([
23777
23787
  "risk-based",
23778
23788
  "research-based"
23779
23789
  ]);
23790
+ var FailureModeSchema2 = exports_external.enum([
23791
+ "timeout",
23792
+ "conflict",
23793
+ "validation",
23794
+ "tool_failure",
23795
+ "context_overflow",
23796
+ "dependency_blocked",
23797
+ "user_cancelled",
23798
+ "unknown"
23799
+ ]);
23780
23800
  var OutcomeSignalsSchema = exports_external.object({
23781
23801
  bead_id: exports_external.string(),
23782
23802
  duration_ms: exports_external.number().int().min(0),
@@ -23785,7 +23805,9 @@ var OutcomeSignalsSchema = exports_external.object({
23785
23805
  success: exports_external.boolean(),
23786
23806
  files_touched: exports_external.array(exports_external.string()).default([]),
23787
23807
  timestamp: exports_external.string(),
23788
- strategy: DecompositionStrategySchema.optional()
23808
+ strategy: DecompositionStrategySchema.optional(),
23809
+ failure_mode: FailureModeSchema2.optional(),
23810
+ failure_details: exports_external.string().optional()
23789
23811
  });
23790
23812
  var ScoredOutcomeSchema = exports_external.object({
23791
23813
  signals: OutcomeSignalsSchema,
@@ -24166,19 +24188,31 @@ var STRATEGIES = {
24166
24188
  "research",
24167
24189
  "investigate",
24168
24190
  "explore",
24169
- "find",
24191
+ "find out",
24170
24192
  "discover",
24171
24193
  "understand",
24172
- "learn",
24194
+ "learn about",
24173
24195
  "analyze",
24174
- "what",
24175
- "how",
24176
- "why",
24196
+ "what is",
24197
+ "what are",
24198
+ "how does",
24199
+ "how do",
24200
+ "why does",
24201
+ "why do",
24177
24202
  "compare",
24178
24203
  "evaluate",
24179
24204
  "study",
24180
24205
  "look up",
24181
- "search"
24206
+ "look into",
24207
+ "search for",
24208
+ "dig into",
24209
+ "figure out",
24210
+ "debug options",
24211
+ "debug levers",
24212
+ "configuration options",
24213
+ "environment variables",
24214
+ "available options",
24215
+ "documentation"
24182
24216
  ],
24183
24217
  guidelines: [
24184
24218
  "Split by information source (PDFs, repos, history, web)",
@@ -24212,8 +24246,15 @@ function selectStrategy(task) {
24212
24246
  for (const [strategyName, definition] of Object.entries(STRATEGIES)) {
24213
24247
  const name = strategyName;
24214
24248
  for (const keyword of definition.keywords) {
24215
- if (taskLower.includes(keyword)) {
24216
- scores[name] += 1;
24249
+ if (keyword.includes(" ")) {
24250
+ if (taskLower.includes(keyword)) {
24251
+ scores[name] += 1;
24252
+ }
24253
+ } else {
24254
+ const regex = new RegExp(`\\b${keyword}\\b`, "i");
24255
+ if (regex.test(taskLower)) {
24256
+ scores[name] += 1;
24257
+ }
24217
24258
  }
24218
24259
  }
24219
24260
  }
@@ -24409,51 +24450,46 @@ Before writing code:
24409
24450
  Begin work on your subtask now.`;
24410
24451
  var SUBTASK_PROMPT_V2 = `You are a swarm agent working on: **{subtask_title}**
24411
24452
 
24412
- ## Identity
24413
- - **Bead ID**: {bead_id}
24414
- - **Epic ID**: {epic_id}
24453
+ ## [IDENTITY]
24454
+ Agent: (assigned at spawn)
24455
+ Bead: {bead_id}
24456
+ Epic: {epic_id}
24415
24457
 
24416
- ## Task
24458
+ ## [TASK]
24417
24459
  {subtask_description}
24418
24460
 
24419
- ## Files (exclusive reservation)
24461
+ ## [FILES]
24462
+ Reserved (exclusive):
24420
24463
  {file_list}
24421
24464
 
24422
24465
  Only modify these files. Need others? Message the coordinator.
24423
24466
 
24424
- ## Context
24467
+ ## [CONTEXT]
24425
24468
  {shared_context}
24426
24469
 
24427
24470
  {compressed_context}
24428
24471
 
24429
24472
  {error_context}
24430
24473
 
24431
- ## MANDATORY: Use These Tools
24474
+ ## [TOOLS]
24475
+ ### Beads
24476
+ - beads_update (status: blocked)
24477
+ - beads_create (new bugs)
24478
+ - beads_close (via swarm_complete)
24432
24479
 
24433
- ### Agent Mail - communicate with the swarm
24434
- \`\`\`typescript
24435
- // Report progress, ask questions, announce blockers
24436
- agentmail_send({
24437
- to: ["coordinator"],
24438
- subject: "Progress update",
24439
- body: "What you did or need",
24440
- thread_id: "{epic_id}"
24441
- })
24442
- \`\`\`
24480
+ ### Agent Mail
24481
+ - agentmail_send (thread_id: {epic_id})
24443
24482
 
24444
- ### Beads - track your work
24445
- - **Blocked?** \`beads_update({ id: "{bead_id}", status: "blocked" })\`
24446
- - **Found bug?** \`beads_create({ title: "Bug description", type: "bug" })\`
24447
- - **Done?** \`swarm_complete({ bead_id: "{bead_id}", summary: "What you did", files_touched: [...] })\`
24483
+ ### Completion
24484
+ - swarm_complete (REQUIRED when done)
24448
24485
 
24449
- ## Workflow
24486
+ ## [OUTPUT]
24487
+ 1. Read files first
24488
+ 2. Implement changes
24489
+ 3. Verify (typecheck)
24490
+ 4. Complete with swarm_complete
24450
24491
 
24451
- 1. **Read** the files first
24452
- 2. **Plan** your approach (message coordinator if complex)
24453
- 3. **Implement** the changes
24454
- 4. **Verify** (typecheck, tests)
24455
- 5. **Report** progress via Agent Mail
24456
- 6. **Complete** with swarm_complete when done
24492
+ Return: Summary of changes made
24457
24493
 
24458
24494
  **Never work silently.** Communicate progress and blockers immediately.
24459
24495
 
@@ -25052,6 +25088,52 @@ async function runUbsScan(files) {
25052
25088
  return null;
25053
25089
  }
25054
25090
  }
25091
+ var swarm_broadcast = tool({
25092
+ description: "Broadcast context update to all agents working on the same epic",
25093
+ args: {
25094
+ epic_id: tool.schema.string().describe("Epic ID (e.g., bd-abc123)"),
25095
+ message: tool.schema.string().describe("Context update to share (what changed, what was learned)"),
25096
+ importance: tool.schema.enum(["info", "warning", "blocker"]).default("info").describe("Priority level (default: info)"),
25097
+ files_affected: tool.schema.array(tool.schema.string()).optional().describe("Files this context relates to")
25098
+ },
25099
+ async execute(args, ctx) {
25100
+ const state = requireState(ctx.sessionID);
25101
+ const beadId = ctx.beadId || "unknown";
25102
+ const body = [
25103
+ `## Context Update`,
25104
+ "",
25105
+ `**From**: ${state.agentName} (${beadId})`,
25106
+ `**Priority**: ${args.importance.toUpperCase()}`,
25107
+ "",
25108
+ args.message,
25109
+ "",
25110
+ args.files_affected && args.files_affected.length > 0 ? `**Files affected**:
25111
+ ${args.files_affected.map((f) => `- \`${f}\``).join(`
25112
+ `)}` : ""
25113
+ ].filter(Boolean).join(`
25114
+ `);
25115
+ const mailImportance = args.importance === "blocker" ? "urgent" : args.importance === "warning" ? "high" : "normal";
25116
+ await mcpCall("send_message", {
25117
+ project_key: state.projectKey,
25118
+ sender_name: state.agentName,
25119
+ to: [],
25120
+ subject: `[${args.importance.toUpperCase()}] Context update from ${state.agentName}`,
25121
+ body_md: body,
25122
+ thread_id: args.epic_id,
25123
+ importance: mailImportance,
25124
+ ack_required: args.importance === "blocker"
25125
+ });
25126
+ return JSON.stringify({
25127
+ broadcast: true,
25128
+ epic_id: args.epic_id,
25129
+ from: state.agentName,
25130
+ bead_id: beadId,
25131
+ importance: args.importance,
25132
+ recipients: "all agents in epic",
25133
+ ack_required: args.importance === "blocker"
25134
+ }, null, 2);
25135
+ }
25136
+ });
25055
25137
  var swarm_complete = tool({
25056
25138
  description: "Mark subtask complete, release reservations, notify coordinator. Runs UBS bug scan if files_touched provided.",
25057
25139
  args: {
@@ -25144,6 +25226,25 @@ var swarm_complete = tool({
25144
25226
  }, null, 2);
25145
25227
  }
25146
25228
  });
25229
+ function classifyFailure(error45) {
25230
+ const msg = (typeof error45 === "string" ? error45 : error45.message).toLowerCase();
25231
+ if (msg.includes("timeout"))
25232
+ return "timeout";
25233
+ if (msg.includes("conflict") || msg.includes("reservation"))
25234
+ return "conflict";
25235
+ if (msg.includes("validation") || msg.includes("schema"))
25236
+ return "validation";
25237
+ if (msg.includes("context") || msg.includes("token"))
25238
+ return "context_overflow";
25239
+ if (msg.includes("blocked") || msg.includes("dependency"))
25240
+ return "dependency_blocked";
25241
+ if (msg.includes("cancel"))
25242
+ return "user_cancelled";
25243
+ if (msg.includes("tool") || msg.includes("command") || msg.includes("failed to execute")) {
25244
+ return "tool_failure";
25245
+ }
25246
+ return "unknown";
25247
+ }
25147
25248
  var swarm_record_outcome = tool({
25148
25249
  description: "Record subtask outcome for implicit feedback scoring. Tracks duration, errors, retries to learn decomposition quality.",
25149
25250
  args: {
@@ -25154,7 +25255,18 @@ var swarm_record_outcome = tool({
25154
25255
  success: tool.schema.boolean().describe("Whether the subtask succeeded"),
25155
25256
  files_touched: tool.schema.array(tool.schema.string()).optional().describe("Files that were modified"),
25156
25257
  criteria: tool.schema.array(tool.schema.string()).optional().describe("Criteria to generate feedback for (default: all default criteria)"),
25157
- strategy: tool.schema.enum(["file-based", "feature-based", "risk-based", "research-based"]).optional().describe("Decomposition strategy used for this task")
25258
+ strategy: tool.schema.enum(["file-based", "feature-based", "risk-based", "research-based"]).optional().describe("Decomposition strategy used for this task"),
25259
+ failure_mode: tool.schema.enum([
25260
+ "timeout",
25261
+ "conflict",
25262
+ "validation",
25263
+ "tool_failure",
25264
+ "context_overflow",
25265
+ "dependency_blocked",
25266
+ "user_cancelled",
25267
+ "unknown"
25268
+ ]).optional().describe("Failure classification (only when success=false). Auto-classified if not provided."),
25269
+ failure_details: tool.schema.string().optional().describe("Detailed failure context (error message, stack trace, etc.)")
25158
25270
  },
25159
25271
  async execute(args) {
25160
25272
  const signals = {
@@ -25165,8 +25277,13 @@ var swarm_record_outcome = tool({
25165
25277
  success: args.success,
25166
25278
  files_touched: args.files_touched ?? [],
25167
25279
  timestamp: new Date().toISOString(),
25168
- strategy: args.strategy
25280
+ strategy: args.strategy,
25281
+ failure_mode: args.failure_mode,
25282
+ failure_details: args.failure_details
25169
25283
  };
25284
+ if (!args.success && !args.failure_mode && args.failure_details) {
25285
+ signals.failure_mode = classifyFailure(args.failure_details);
25286
+ }
25170
25287
  const validated = OutcomeSignalsSchema.parse(signals);
25171
25288
  const scored = scoreImplicitFeedback(validated, DEFAULT_LEARNING_CONFIG);
25172
25289
  const errorStats = await globalErrorAccumulator.getErrorStats(args.bead_id);
@@ -25206,6 +25323,8 @@ var swarm_record_outcome = tool({
25206
25323
  retry_count: args.retry_count ?? 0,
25207
25324
  success: args.success,
25208
25325
  strategy: args.strategy,
25326
+ failure_mode: validated.failure_mode,
25327
+ failure_details: validated.failure_details,
25209
25328
  accumulated_errors: errorStats.total,
25210
25329
  unresolved_errors: errorStats.unresolved
25211
25330
  },
@@ -25497,6 +25616,7 @@ var swarmTools = {
25497
25616
  swarm_validate_decomposition,
25498
25617
  swarm_status,
25499
25618
  swarm_progress,
25619
+ swarm_broadcast,
25500
25620
  swarm_complete,
25501
25621
  swarm_record_outcome,
25502
25622
  swarm_subtask_prompt,
@@ -25507,6 +25627,327 @@ var swarmTools = {
25507
25627
  swarm_get_error_context,
25508
25628
  swarm_resolve_error
25509
25629
  };
25630
+
25631
+ // src/repo-crawl.ts
25632
+ var GITHUB_API_URL = "https://api.github.com";
25633
+ var GITHUB_TOKEN = process.env.GITHUB_TOKEN;
25634
+ var DEFAULT_MAX_RESULTS = 10;
25635
+ var DEFAULT_MAX_LENGTH = 1e4;
25636
+ var DEFAULT_DEPTH = 2;
25637
+
25638
+ class RepoCrawlError extends Error {
25639
+ statusCode;
25640
+ endpoint;
25641
+ constructor(message, statusCode, endpoint) {
25642
+ super(message);
25643
+ this.statusCode = statusCode;
25644
+ this.endpoint = endpoint;
25645
+ this.name = "RepoCrawlError";
25646
+ }
25647
+ }
25648
+ function parseRepo(input) {
25649
+ let normalized = input.replace(/^https?:\/\//, "").replace(/\.git$/, "").replace(/^github\.com\//, "");
25650
+ const parts = normalized.split("/").filter(Boolean);
25651
+ if (parts.length < 2) {
25652
+ throw new RepoCrawlError(`Invalid repo format: "${input}". Expected "owner/repo" or GitHub URL.`);
25653
+ }
25654
+ const [owner, repo] = parts;
25655
+ return { owner, repo };
25656
+ }
25657
+ async function githubFetch(endpoint, options = {}) {
25658
+ const headers = {
25659
+ Accept: "application/vnd.github.v3+json",
25660
+ "User-Agent": "opencode-swarm-plugin",
25661
+ ...options.headers || {}
25662
+ };
25663
+ if (GITHUB_TOKEN) {
25664
+ headers.Authorization = `Bearer ${GITHUB_TOKEN}`;
25665
+ }
25666
+ const url2 = `${GITHUB_API_URL}${endpoint}`;
25667
+ const response = await fetch(url2, { ...options, headers });
25668
+ if (response.status === 403) {
25669
+ const rateLimitRemaining = response.headers.get("x-ratelimit-remaining");
25670
+ const rateLimitReset = response.headers.get("x-ratelimit-reset");
25671
+ if (rateLimitRemaining === "0" && rateLimitReset) {
25672
+ const resetDate = new Date(parseInt(rateLimitReset) * 1000);
25673
+ throw new RepoCrawlError(`GitHub API rate limit exceeded. Resets at ${resetDate.toISOString()}. ` + `${GITHUB_TOKEN ? "Using authenticated token." : "Set GITHUB_TOKEN env var for higher limits."}`, 403, endpoint);
25674
+ }
25675
+ }
25676
+ if (response.status === 404) {
25677
+ throw new RepoCrawlError(`Resource not found: ${endpoint}`, 404, endpoint);
25678
+ }
25679
+ if (!response.ok) {
25680
+ const body = await response.text();
25681
+ throw new RepoCrawlError(`GitHub API error (${response.status}): ${body}`, response.status, endpoint);
25682
+ }
25683
+ return response.json();
25684
+ }
25685
+ function decodeContent(content, encoding) {
25686
+ if (encoding === "base64") {
25687
+ return Buffer.from(content, "base64").toString("utf-8");
25688
+ }
25689
+ return content;
25690
+ }
25691
+ function detectTechStack(tree) {
25692
+ const stack = new Set;
25693
+ const filePatterns = {
25694
+ "package.json": "Node.js/npm",
25695
+ "yarn.lock": "Yarn",
25696
+ "pnpm-lock.yaml": "pnpm",
25697
+ "bun.lockb": "Bun",
25698
+ "Cargo.toml": "Rust",
25699
+ "go.mod": "Go",
25700
+ "requirements.txt": "Python/pip",
25701
+ Pipfile: "Python/pipenv",
25702
+ "pyproject.toml": "Python/poetry",
25703
+ Gemfile: "Ruby/Bundler",
25704
+ "composer.json": "PHP/Composer",
25705
+ "pom.xml": "Java/Maven",
25706
+ "build.gradle": "Java/Gradle",
25707
+ "tsconfig.json": "TypeScript",
25708
+ "next.config.js": "Next.js",
25709
+ "nuxt.config.js": "Nuxt.js",
25710
+ "vue.config.js": "Vue.js",
25711
+ "angular.json": "Angular",
25712
+ "svelte.config.js": "Svelte",
25713
+ Dockerfile: "Docker",
25714
+ "docker-compose.yml": "Docker Compose",
25715
+ ".terraform": "Terraform",
25716
+ Makefile: "Make"
25717
+ };
25718
+ for (const item of tree) {
25719
+ const basename = item.path.split("/").pop() || "";
25720
+ if (filePatterns[basename]) {
25721
+ stack.add(filePatterns[basename]);
25722
+ }
25723
+ if (basename.endsWith(".rs"))
25724
+ stack.add("Rust");
25725
+ if (basename.endsWith(".go"))
25726
+ stack.add("Go");
25727
+ if (basename.endsWith(".py"))
25728
+ stack.add("Python");
25729
+ if (basename.endsWith(".rb"))
25730
+ stack.add("Ruby");
25731
+ if (basename.endsWith(".php"))
25732
+ stack.add("PHP");
25733
+ if (basename.endsWith(".java"))
25734
+ stack.add("Java");
25735
+ if (basename.endsWith(".kt"))
25736
+ stack.add("Kotlin");
25737
+ if (basename.endsWith(".swift"))
25738
+ stack.add("Swift");
25739
+ if (basename.endsWith(".ts") || basename.endsWith(".tsx"))
25740
+ stack.add("TypeScript");
25741
+ if (basename.endsWith(".jsx"))
25742
+ stack.add("React");
25743
+ }
25744
+ return Array.from(stack).sort();
25745
+ }
25746
+ function truncate(text, maxLength) {
25747
+ if (text.length <= maxLength) {
25748
+ return text;
25749
+ }
25750
+ return text.slice(0, maxLength) + `
25751
+
25752
+ [... truncated ...]`;
25753
+ }
25754
+ var repo_readme = tool({
25755
+ description: "Get README.md content from a GitHub repository",
25756
+ args: {
25757
+ repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
25758
+ maxLength: tool.schema.number().optional().describe(`Max content length (default: ${DEFAULT_MAX_LENGTH})`)
25759
+ },
25760
+ async execute(args, ctx) {
25761
+ try {
25762
+ const { owner, repo } = parseRepo(args.repo);
25763
+ const maxLength = args.maxLength || DEFAULT_MAX_LENGTH;
25764
+ const readme = await githubFetch(`/repos/${owner}/${repo}/readme`);
25765
+ if (!readme.content || !readme.encoding) {
25766
+ return "README exists but content is not available";
25767
+ }
25768
+ const content = decodeContent(readme.content, readme.encoding);
25769
+ const truncated = truncate(content, maxLength);
25770
+ return JSON.stringify({
25771
+ repo: `${owner}/${repo}`,
25772
+ path: readme.path,
25773
+ size: readme.size,
25774
+ content: truncated,
25775
+ truncated: content.length > maxLength
25776
+ }, null, 2);
25777
+ } catch (error45) {
25778
+ if (error45 instanceof RepoCrawlError) {
25779
+ return JSON.stringify({ error: error45.message }, null, 2);
25780
+ }
25781
+ throw error45;
25782
+ }
25783
+ }
25784
+ });
25785
+ var repo_structure = tool({
25786
+ description: "Get repository structure with tech stack detection (root level only by default)",
25787
+ args: {
25788
+ repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
25789
+ depth: tool.schema.number().optional().describe(`Tree depth (1=root only, 2=one level deep, etc. Default: ${DEFAULT_DEPTH})`)
25790
+ },
25791
+ async execute(args, ctx) {
25792
+ try {
25793
+ const { owner, repo } = parseRepo(args.repo);
25794
+ const depth = args.depth || DEFAULT_DEPTH;
25795
+ const repoInfo = await githubFetch(`/repos/${owner}/${repo}`);
25796
+ const tree = await githubFetch(`/repos/${owner}/${repo}/git/trees/${repoInfo.default_branch}`, {
25797
+ method: "GET"
25798
+ });
25799
+ const filtered = tree.tree.filter((item) => {
25800
+ const pathDepth = item.path.split("/").length;
25801
+ return pathDepth <= depth;
25802
+ });
25803
+ const techStack = detectTechStack(filtered);
25804
+ const dirs = filtered.filter((item) => item.type === "tree").map((item) => item.path);
25805
+ const files = filtered.filter((item) => item.type === "blob").map((item) => item.path);
25806
+ return JSON.stringify({
25807
+ repo: repoInfo.full_name,
25808
+ description: repoInfo.description,
25809
+ language: repoInfo.language,
25810
+ stars: repoInfo.stargazers_count,
25811
+ topics: repoInfo.topics,
25812
+ techStack,
25813
+ directories: dirs.slice(0, 50),
25814
+ files: files.slice(0, 50),
25815
+ truncated: tree.truncated || dirs.length > 50 || files.length > 50
25816
+ }, null, 2);
25817
+ } catch (error45) {
25818
+ if (error45 instanceof RepoCrawlError) {
25819
+ return JSON.stringify({ error: error45.message }, null, 2);
25820
+ }
25821
+ throw error45;
25822
+ }
25823
+ }
25824
+ });
25825
+ var repo_tree = tool({
25826
+ description: "Get directory tree for a path in a repository",
25827
+ args: {
25828
+ repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
25829
+ path: tool.schema.string().optional().describe("Path in repo (default: root)"),
25830
+ maxDepth: tool.schema.number().optional().describe(`Max depth to traverse (default: ${DEFAULT_DEPTH})`)
25831
+ },
25832
+ async execute(args, ctx) {
25833
+ try {
25834
+ const { owner, repo } = parseRepo(args.repo);
25835
+ const targetPath = args.path || "";
25836
+ const maxDepth = args.maxDepth || DEFAULT_DEPTH;
25837
+ const repoInfo = await githubFetch(`/repos/${owner}/${repo}`);
25838
+ const contents = await githubFetch(`/repos/${owner}/${repo}/contents/${targetPath}`);
25839
+ if (!Array.isArray(contents)) {
25840
+ return JSON.stringify({ error: "Path is a file, not a directory" });
25841
+ }
25842
+ const tree = [];
25843
+ for (const item of contents) {
25844
+ tree.push({
25845
+ path: item.path,
25846
+ type: item.type,
25847
+ size: item.size
25848
+ });
25849
+ if (item.type === "dir" && maxDepth > 1) {
25850
+ try {
25851
+ const subContents = await githubFetch(`/repos/${owner}/${repo}/contents/${item.path}`);
25852
+ if (Array.isArray(subContents)) {
25853
+ for (const subItem of subContents.slice(0, 20)) {
25854
+ tree.push({
25855
+ path: subItem.path,
25856
+ type: subItem.type,
25857
+ size: subItem.size
25858
+ });
25859
+ }
25860
+ }
25861
+ } catch {}
25862
+ }
25863
+ }
25864
+ return JSON.stringify({
25865
+ repo: `${owner}/${repo}`,
25866
+ path: targetPath || "(root)",
25867
+ items: tree
25868
+ }, null, 2);
25869
+ } catch (error45) {
25870
+ if (error45 instanceof RepoCrawlError) {
25871
+ return JSON.stringify({ error: error45.message }, null, 2);
25872
+ }
25873
+ throw error45;
25874
+ }
25875
+ }
25876
+ });
25877
+ var repo_file = tool({
25878
+ description: "Get file content from a GitHub repository",
25879
+ args: {
25880
+ repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
25881
+ path: tool.schema.string().describe("File path in repository"),
25882
+ maxLength: tool.schema.number().optional().describe(`Max content length (default: ${DEFAULT_MAX_LENGTH})`)
25883
+ },
25884
+ async execute(args, ctx) {
25885
+ try {
25886
+ const { owner, repo } = parseRepo(args.repo);
25887
+ const maxLength = args.maxLength || DEFAULT_MAX_LENGTH;
25888
+ const file2 = await githubFetch(`/repos/${owner}/${repo}/contents/${args.path}`);
25889
+ if (file2.type !== "file") {
25890
+ return JSON.stringify({ error: "Path is not a file" });
25891
+ }
25892
+ if (!file2.content || !file2.encoding) {
25893
+ return JSON.stringify({ error: "File content not available" });
25894
+ }
25895
+ const content = decodeContent(file2.content, file2.encoding);
25896
+ const truncated = truncate(content, maxLength);
25897
+ return JSON.stringify({
25898
+ repo: `${owner}/${repo}`,
25899
+ path: file2.path,
25900
+ size: file2.size,
25901
+ content: truncated,
25902
+ truncated: content.length > maxLength
25903
+ }, null, 2);
25904
+ } catch (error45) {
25905
+ if (error45 instanceof RepoCrawlError) {
25906
+ return JSON.stringify({ error: error45.message }, null, 2);
25907
+ }
25908
+ throw error45;
25909
+ }
25910
+ }
25911
+ });
25912
+ var repo_search = tool({
25913
+ description: "Search code in a GitHub repository",
25914
+ args: {
25915
+ repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
25916
+ query: tool.schema.string().describe("Search query (GitHub code search)"),
25917
+ maxResults: tool.schema.number().optional().describe(`Max results (default: ${DEFAULT_MAX_RESULTS})`)
25918
+ },
25919
+ async execute(args, ctx) {
25920
+ try {
25921
+ const { owner, repo } = parseRepo(args.repo);
25922
+ const maxResults = args.maxResults || DEFAULT_MAX_RESULTS;
25923
+ const searchQuery = `${args.query} repo:${owner}/${repo}`;
25924
+ const results = await githubFetch(`/search/code?q=${encodeURIComponent(searchQuery)}&per_page=${maxResults}`);
25925
+ const items = results.items.map((item) => ({
25926
+ path: item.path,
25927
+ url: item.html_url,
25928
+ matches: item.text_matches?.map((m) => m.fragment) || []
25929
+ }));
25930
+ return JSON.stringify({
25931
+ repo: `${owner}/${repo}`,
25932
+ query: args.query,
25933
+ totalCount: results.total_count,
25934
+ results: items
25935
+ }, null, 2);
25936
+ } catch (error45) {
25937
+ if (error45 instanceof RepoCrawlError) {
25938
+ return JSON.stringify({ error: error45.message }, null, 2);
25939
+ }
25940
+ throw error45;
25941
+ }
25942
+ }
25943
+ });
25944
+ var repoCrawlTools = {
25945
+ repo_readme,
25946
+ repo_structure,
25947
+ repo_tree,
25948
+ repo_file,
25949
+ repo_search
25950
+ };
25510
25951
  // src/anti-patterns.ts
25511
25952
  var PatternKindSchema = exports_external.enum(["pattern", "anti_pattern"]);
25512
25953
  var DecompositionPatternSchema = exports_external.object({
@@ -25953,7 +26394,8 @@ var SwarmPlugin = async (input) => {
25953
26394
  ...beadsTools,
25954
26395
  ...agentMailTools,
25955
26396
  ...structuredTools,
25956
- ...swarmTools
26397
+ ...swarmTools,
26398
+ ...repoCrawlTools
25957
26399
  },
25958
26400
  event: async ({ event }) => {
25959
26401
  if (event.type === "session.idle") {
@@ -25998,7 +26440,8 @@ var allTools = {
25998
26440
  ...beadsTools,
25999
26441
  ...agentMailTools,
26000
26442
  ...structuredTools,
26001
- ...swarmTools
26443
+ ...swarmTools,
26444
+ ...repoCrawlTools
26002
26445
  };
26003
26446
  export {
26004
26447
  withToolFallback,
@@ -26010,6 +26453,7 @@ export {
26010
26453
  resetToolCache,
26011
26454
  resetStorage,
26012
26455
  requireTool,
26456
+ repoCrawlTools,
26013
26457
  isToolAvailable,
26014
26458
  isSemanticMemoryAvailable,
26015
26459
  ifToolAvailable,
@@ -26055,6 +26499,7 @@ export {
26055
26499
  SemanticMemoryStorage,
26056
26500
  SUBTASK_PROMPT_V2,
26057
26501
  STRATEGIES,
26502
+ RepoCrawlError,
26058
26503
  InMemoryStorage,
26059
26504
  FileReservationConflictError,
26060
26505
  EvaluationSchema,