opencode-swarm-plugin 0.12.10 → 0.12.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.beads/issues.jsonl +7 -7
- package/dist/index.js +459 -33
- package/dist/plugin.js +457 -33
- package/package.json +1 -1
- package/src/index.ts +21 -0
- package/src/learning.ts +19 -0
- package/src/repo-crawl.ts +610 -0
- package/src/schemas/evaluation.ts +21 -0
- package/src/swarm.ts +173 -29
package/dist/plugin.js
CHANGED
|
@@ -21683,6 +21683,16 @@ var ValidationResultSchema = exports_external.object({
|
|
|
21683
21683
|
errors: exports_external.array(exports_external.string()).optional(),
|
|
21684
21684
|
extractionMethod: exports_external.string().optional()
|
|
21685
21685
|
});
|
|
21686
|
+
var FailureModeSchema = exports_external.enum([
|
|
21687
|
+
"timeout",
|
|
21688
|
+
"conflict",
|
|
21689
|
+
"validation",
|
|
21690
|
+
"tool_failure",
|
|
21691
|
+
"context_overflow",
|
|
21692
|
+
"dependency_blocked",
|
|
21693
|
+
"user_cancelled",
|
|
21694
|
+
"unknown"
|
|
21695
|
+
]);
|
|
21686
21696
|
// src/schemas/task.ts
|
|
21687
21697
|
var EffortLevelSchema = exports_external.enum([
|
|
21688
21698
|
"trivial",
|
|
@@ -23751,6 +23761,16 @@ var DecompositionStrategySchema = exports_external.enum([
|
|
|
23751
23761
|
"risk-based",
|
|
23752
23762
|
"research-based"
|
|
23753
23763
|
]);
|
|
23764
|
+
var FailureModeSchema2 = exports_external.enum([
|
|
23765
|
+
"timeout",
|
|
23766
|
+
"conflict",
|
|
23767
|
+
"validation",
|
|
23768
|
+
"tool_failure",
|
|
23769
|
+
"context_overflow",
|
|
23770
|
+
"dependency_blocked",
|
|
23771
|
+
"user_cancelled",
|
|
23772
|
+
"unknown"
|
|
23773
|
+
]);
|
|
23754
23774
|
var OutcomeSignalsSchema = exports_external.object({
|
|
23755
23775
|
bead_id: exports_external.string(),
|
|
23756
23776
|
duration_ms: exports_external.number().int().min(0),
|
|
@@ -23759,7 +23779,9 @@ var OutcomeSignalsSchema = exports_external.object({
|
|
|
23759
23779
|
success: exports_external.boolean(),
|
|
23760
23780
|
files_touched: exports_external.array(exports_external.string()).default([]),
|
|
23761
23781
|
timestamp: exports_external.string(),
|
|
23762
|
-
strategy: DecompositionStrategySchema.optional()
|
|
23782
|
+
strategy: DecompositionStrategySchema.optional(),
|
|
23783
|
+
failure_mode: FailureModeSchema2.optional(),
|
|
23784
|
+
failure_details: exports_external.string().optional()
|
|
23763
23785
|
});
|
|
23764
23786
|
var ScoredOutcomeSchema = exports_external.object({
|
|
23765
23787
|
signals: OutcomeSignalsSchema,
|
|
@@ -24386,51 +24408,46 @@ Before writing code:
|
|
|
24386
24408
|
Begin work on your subtask now.`;
|
|
24387
24409
|
var SUBTASK_PROMPT_V2 = `You are a swarm agent working on: **{subtask_title}**
|
|
24388
24410
|
|
|
24389
|
-
##
|
|
24390
|
-
|
|
24391
|
-
|
|
24411
|
+
## [IDENTITY]
|
|
24412
|
+
Agent: (assigned at spawn)
|
|
24413
|
+
Bead: {bead_id}
|
|
24414
|
+
Epic: {epic_id}
|
|
24392
24415
|
|
|
24393
|
-
##
|
|
24416
|
+
## [TASK]
|
|
24394
24417
|
{subtask_description}
|
|
24395
24418
|
|
|
24396
|
-
##
|
|
24419
|
+
## [FILES]
|
|
24420
|
+
Reserved (exclusive):
|
|
24397
24421
|
{file_list}
|
|
24398
24422
|
|
|
24399
24423
|
Only modify these files. Need others? Message the coordinator.
|
|
24400
24424
|
|
|
24401
|
-
##
|
|
24425
|
+
## [CONTEXT]
|
|
24402
24426
|
{shared_context}
|
|
24403
24427
|
|
|
24404
24428
|
{compressed_context}
|
|
24405
24429
|
|
|
24406
24430
|
{error_context}
|
|
24407
24431
|
|
|
24408
|
-
##
|
|
24432
|
+
## [TOOLS]
|
|
24433
|
+
### Beads
|
|
24434
|
+
- beads_update (status: blocked)
|
|
24435
|
+
- beads_create (new bugs)
|
|
24436
|
+
- beads_close (via swarm_complete)
|
|
24409
24437
|
|
|
24410
|
-
### Agent Mail
|
|
24411
|
-
|
|
24412
|
-
// Report progress, ask questions, announce blockers
|
|
24413
|
-
agentmail_send({
|
|
24414
|
-
to: ["coordinator"],
|
|
24415
|
-
subject: "Progress update",
|
|
24416
|
-
body: "What you did or need",
|
|
24417
|
-
thread_id: "{epic_id}"
|
|
24418
|
-
})
|
|
24419
|
-
\`\`\`
|
|
24438
|
+
### Agent Mail
|
|
24439
|
+
- agentmail_send (thread_id: {epic_id})
|
|
24420
24440
|
|
|
24421
|
-
###
|
|
24422
|
-
-
|
|
24423
|
-
- **Found bug?** \`beads_create({ title: "Bug description", type: "bug" })\`
|
|
24424
|
-
- **Done?** \`swarm_complete({ bead_id: "{bead_id}", summary: "What you did", files_touched: [...] })\`
|
|
24441
|
+
### Completion
|
|
24442
|
+
- swarm_complete (REQUIRED when done)
|
|
24425
24443
|
|
|
24426
|
-
##
|
|
24444
|
+
## [OUTPUT]
|
|
24445
|
+
1. Read files first
|
|
24446
|
+
2. Implement changes
|
|
24447
|
+
3. Verify (typecheck)
|
|
24448
|
+
4. Complete with swarm_complete
|
|
24427
24449
|
|
|
24428
|
-
|
|
24429
|
-
2. **Plan** your approach (message coordinator if complex)
|
|
24430
|
-
3. **Implement** the changes
|
|
24431
|
-
4. **Verify** (typecheck, tests)
|
|
24432
|
-
5. **Report** progress via Agent Mail
|
|
24433
|
-
6. **Complete** with swarm_complete when done
|
|
24450
|
+
Return: Summary of changes made
|
|
24434
24451
|
|
|
24435
24452
|
**Never work silently.** Communicate progress and blockers immediately.
|
|
24436
24453
|
|
|
@@ -25021,6 +25038,52 @@ async function runUbsScan(files) {
|
|
|
25021
25038
|
return null;
|
|
25022
25039
|
}
|
|
25023
25040
|
}
|
|
25041
|
+
var swarm_broadcast = tool({
|
|
25042
|
+
description: "Broadcast context update to all agents working on the same epic",
|
|
25043
|
+
args: {
|
|
25044
|
+
epic_id: tool.schema.string().describe("Epic ID (e.g., bd-abc123)"),
|
|
25045
|
+
message: tool.schema.string().describe("Context update to share (what changed, what was learned)"),
|
|
25046
|
+
importance: tool.schema.enum(["info", "warning", "blocker"]).default("info").describe("Priority level (default: info)"),
|
|
25047
|
+
files_affected: tool.schema.array(tool.schema.string()).optional().describe("Files this context relates to")
|
|
25048
|
+
},
|
|
25049
|
+
async execute(args, ctx) {
|
|
25050
|
+
const state = requireState(ctx.sessionID);
|
|
25051
|
+
const beadId = ctx.beadId || "unknown";
|
|
25052
|
+
const body = [
|
|
25053
|
+
`## Context Update`,
|
|
25054
|
+
"",
|
|
25055
|
+
`**From**: ${state.agentName} (${beadId})`,
|
|
25056
|
+
`**Priority**: ${args.importance.toUpperCase()}`,
|
|
25057
|
+
"",
|
|
25058
|
+
args.message,
|
|
25059
|
+
"",
|
|
25060
|
+
args.files_affected && args.files_affected.length > 0 ? `**Files affected**:
|
|
25061
|
+
${args.files_affected.map((f) => `- \`${f}\``).join(`
|
|
25062
|
+
`)}` : ""
|
|
25063
|
+
].filter(Boolean).join(`
|
|
25064
|
+
`);
|
|
25065
|
+
const mailImportance = args.importance === "blocker" ? "urgent" : args.importance === "warning" ? "high" : "normal";
|
|
25066
|
+
await mcpCall("send_message", {
|
|
25067
|
+
project_key: state.projectKey,
|
|
25068
|
+
sender_name: state.agentName,
|
|
25069
|
+
to: [],
|
|
25070
|
+
subject: `[${args.importance.toUpperCase()}] Context update from ${state.agentName}`,
|
|
25071
|
+
body_md: body,
|
|
25072
|
+
thread_id: args.epic_id,
|
|
25073
|
+
importance: mailImportance,
|
|
25074
|
+
ack_required: args.importance === "blocker"
|
|
25075
|
+
});
|
|
25076
|
+
return JSON.stringify({
|
|
25077
|
+
broadcast: true,
|
|
25078
|
+
epic_id: args.epic_id,
|
|
25079
|
+
from: state.agentName,
|
|
25080
|
+
bead_id: beadId,
|
|
25081
|
+
importance: args.importance,
|
|
25082
|
+
recipients: "all agents in epic",
|
|
25083
|
+
ack_required: args.importance === "blocker"
|
|
25084
|
+
}, null, 2);
|
|
25085
|
+
}
|
|
25086
|
+
});
|
|
25024
25087
|
var swarm_complete = tool({
|
|
25025
25088
|
description: "Mark subtask complete, release reservations, notify coordinator. Runs UBS bug scan if files_touched provided.",
|
|
25026
25089
|
args: {
|
|
@@ -25113,6 +25176,25 @@ var swarm_complete = tool({
|
|
|
25113
25176
|
}, null, 2);
|
|
25114
25177
|
}
|
|
25115
25178
|
});
|
|
25179
|
+
function classifyFailure(error45) {
|
|
25180
|
+
const msg = (typeof error45 === "string" ? error45 : error45.message).toLowerCase();
|
|
25181
|
+
if (msg.includes("timeout"))
|
|
25182
|
+
return "timeout";
|
|
25183
|
+
if (msg.includes("conflict") || msg.includes("reservation"))
|
|
25184
|
+
return "conflict";
|
|
25185
|
+
if (msg.includes("validation") || msg.includes("schema"))
|
|
25186
|
+
return "validation";
|
|
25187
|
+
if (msg.includes("context") || msg.includes("token"))
|
|
25188
|
+
return "context_overflow";
|
|
25189
|
+
if (msg.includes("blocked") || msg.includes("dependency"))
|
|
25190
|
+
return "dependency_blocked";
|
|
25191
|
+
if (msg.includes("cancel"))
|
|
25192
|
+
return "user_cancelled";
|
|
25193
|
+
if (msg.includes("tool") || msg.includes("command") || msg.includes("failed to execute")) {
|
|
25194
|
+
return "tool_failure";
|
|
25195
|
+
}
|
|
25196
|
+
return "unknown";
|
|
25197
|
+
}
|
|
25116
25198
|
var swarm_record_outcome = tool({
|
|
25117
25199
|
description: "Record subtask outcome for implicit feedback scoring. Tracks duration, errors, retries to learn decomposition quality.",
|
|
25118
25200
|
args: {
|
|
@@ -25123,7 +25205,18 @@ var swarm_record_outcome = tool({
|
|
|
25123
25205
|
success: tool.schema.boolean().describe("Whether the subtask succeeded"),
|
|
25124
25206
|
files_touched: tool.schema.array(tool.schema.string()).optional().describe("Files that were modified"),
|
|
25125
25207
|
criteria: tool.schema.array(tool.schema.string()).optional().describe("Criteria to generate feedback for (default: all default criteria)"),
|
|
25126
|
-
strategy: tool.schema.enum(["file-based", "feature-based", "risk-based", "research-based"]).optional().describe("Decomposition strategy used for this task")
|
|
25208
|
+
strategy: tool.schema.enum(["file-based", "feature-based", "risk-based", "research-based"]).optional().describe("Decomposition strategy used for this task"),
|
|
25209
|
+
failure_mode: tool.schema.enum([
|
|
25210
|
+
"timeout",
|
|
25211
|
+
"conflict",
|
|
25212
|
+
"validation",
|
|
25213
|
+
"tool_failure",
|
|
25214
|
+
"context_overflow",
|
|
25215
|
+
"dependency_blocked",
|
|
25216
|
+
"user_cancelled",
|
|
25217
|
+
"unknown"
|
|
25218
|
+
]).optional().describe("Failure classification (only when success=false). Auto-classified if not provided."),
|
|
25219
|
+
failure_details: tool.schema.string().optional().describe("Detailed failure context (error message, stack trace, etc.)")
|
|
25127
25220
|
},
|
|
25128
25221
|
async execute(args) {
|
|
25129
25222
|
const signals = {
|
|
@@ -25134,8 +25227,13 @@ var swarm_record_outcome = tool({
|
|
|
25134
25227
|
success: args.success,
|
|
25135
25228
|
files_touched: args.files_touched ?? [],
|
|
25136
25229
|
timestamp: new Date().toISOString(),
|
|
25137
|
-
strategy: args.strategy
|
|
25230
|
+
strategy: args.strategy,
|
|
25231
|
+
failure_mode: args.failure_mode,
|
|
25232
|
+
failure_details: args.failure_details
|
|
25138
25233
|
};
|
|
25234
|
+
if (!args.success && !args.failure_mode && args.failure_details) {
|
|
25235
|
+
signals.failure_mode = classifyFailure(args.failure_details);
|
|
25236
|
+
}
|
|
25139
25237
|
const validated = OutcomeSignalsSchema.parse(signals);
|
|
25140
25238
|
const scored = scoreImplicitFeedback(validated, DEFAULT_LEARNING_CONFIG);
|
|
25141
25239
|
const errorStats = await globalErrorAccumulator.getErrorStats(args.bead_id);
|
|
@@ -25175,6 +25273,8 @@ var swarm_record_outcome = tool({
|
|
|
25175
25273
|
retry_count: args.retry_count ?? 0,
|
|
25176
25274
|
success: args.success,
|
|
25177
25275
|
strategy: args.strategy,
|
|
25276
|
+
failure_mode: validated.failure_mode,
|
|
25277
|
+
failure_details: validated.failure_details,
|
|
25178
25278
|
accumulated_errors: errorStats.total,
|
|
25179
25279
|
unresolved_errors: errorStats.unresolved
|
|
25180
25280
|
},
|
|
@@ -25466,6 +25566,7 @@ var swarmTools = {
|
|
|
25466
25566
|
swarm_validate_decomposition,
|
|
25467
25567
|
swarm_status,
|
|
25468
25568
|
swarm_progress,
|
|
25569
|
+
swarm_broadcast,
|
|
25469
25570
|
swarm_complete,
|
|
25470
25571
|
swarm_record_outcome,
|
|
25471
25572
|
swarm_subtask_prompt,
|
|
@@ -25476,6 +25577,327 @@ var swarmTools = {
|
|
|
25476
25577
|
swarm_get_error_context,
|
|
25477
25578
|
swarm_resolve_error
|
|
25478
25579
|
};
|
|
25580
|
+
|
|
25581
|
+
// src/repo-crawl.ts
|
|
25582
|
+
var GITHUB_API_URL = "https://api.github.com";
|
|
25583
|
+
var GITHUB_TOKEN = process.env.GITHUB_TOKEN;
|
|
25584
|
+
var DEFAULT_MAX_RESULTS = 10;
|
|
25585
|
+
var DEFAULT_MAX_LENGTH = 1e4;
|
|
25586
|
+
var DEFAULT_DEPTH = 2;
|
|
25587
|
+
|
|
25588
|
+
class RepoCrawlError extends Error {
|
|
25589
|
+
statusCode;
|
|
25590
|
+
endpoint;
|
|
25591
|
+
constructor(message, statusCode, endpoint) {
|
|
25592
|
+
super(message);
|
|
25593
|
+
this.statusCode = statusCode;
|
|
25594
|
+
this.endpoint = endpoint;
|
|
25595
|
+
this.name = "RepoCrawlError";
|
|
25596
|
+
}
|
|
25597
|
+
}
|
|
25598
|
+
function parseRepo(input) {
|
|
25599
|
+
let normalized = input.replace(/^https?:\/\//, "").replace(/\.git$/, "").replace(/^github\.com\//, "");
|
|
25600
|
+
const parts = normalized.split("/").filter(Boolean);
|
|
25601
|
+
if (parts.length < 2) {
|
|
25602
|
+
throw new RepoCrawlError(`Invalid repo format: "${input}". Expected "owner/repo" or GitHub URL.`);
|
|
25603
|
+
}
|
|
25604
|
+
const [owner, repo] = parts;
|
|
25605
|
+
return { owner, repo };
|
|
25606
|
+
}
|
|
25607
|
+
async function githubFetch(endpoint, options = {}) {
|
|
25608
|
+
const headers = {
|
|
25609
|
+
Accept: "application/vnd.github.v3+json",
|
|
25610
|
+
"User-Agent": "opencode-swarm-plugin",
|
|
25611
|
+
...options.headers || {}
|
|
25612
|
+
};
|
|
25613
|
+
if (GITHUB_TOKEN) {
|
|
25614
|
+
headers.Authorization = `Bearer ${GITHUB_TOKEN}`;
|
|
25615
|
+
}
|
|
25616
|
+
const url2 = `${GITHUB_API_URL}${endpoint}`;
|
|
25617
|
+
const response = await fetch(url2, { ...options, headers });
|
|
25618
|
+
if (response.status === 403) {
|
|
25619
|
+
const rateLimitRemaining = response.headers.get("x-ratelimit-remaining");
|
|
25620
|
+
const rateLimitReset = response.headers.get("x-ratelimit-reset");
|
|
25621
|
+
if (rateLimitRemaining === "0" && rateLimitReset) {
|
|
25622
|
+
const resetDate = new Date(parseInt(rateLimitReset) * 1000);
|
|
25623
|
+
throw new RepoCrawlError(`GitHub API rate limit exceeded. Resets at ${resetDate.toISOString()}. ` + `${GITHUB_TOKEN ? "Using authenticated token." : "Set GITHUB_TOKEN env var for higher limits."}`, 403, endpoint);
|
|
25624
|
+
}
|
|
25625
|
+
}
|
|
25626
|
+
if (response.status === 404) {
|
|
25627
|
+
throw new RepoCrawlError(`Resource not found: ${endpoint}`, 404, endpoint);
|
|
25628
|
+
}
|
|
25629
|
+
if (!response.ok) {
|
|
25630
|
+
const body = await response.text();
|
|
25631
|
+
throw new RepoCrawlError(`GitHub API error (${response.status}): ${body}`, response.status, endpoint);
|
|
25632
|
+
}
|
|
25633
|
+
return response.json();
|
|
25634
|
+
}
|
|
25635
|
+
function decodeContent(content, encoding) {
|
|
25636
|
+
if (encoding === "base64") {
|
|
25637
|
+
return Buffer.from(content, "base64").toString("utf-8");
|
|
25638
|
+
}
|
|
25639
|
+
return content;
|
|
25640
|
+
}
|
|
25641
|
+
function detectTechStack(tree) {
|
|
25642
|
+
const stack = new Set;
|
|
25643
|
+
const filePatterns = {
|
|
25644
|
+
"package.json": "Node.js/npm",
|
|
25645
|
+
"yarn.lock": "Yarn",
|
|
25646
|
+
"pnpm-lock.yaml": "pnpm",
|
|
25647
|
+
"bun.lockb": "Bun",
|
|
25648
|
+
"Cargo.toml": "Rust",
|
|
25649
|
+
"go.mod": "Go",
|
|
25650
|
+
"requirements.txt": "Python/pip",
|
|
25651
|
+
Pipfile: "Python/pipenv",
|
|
25652
|
+
"pyproject.toml": "Python/poetry",
|
|
25653
|
+
Gemfile: "Ruby/Bundler",
|
|
25654
|
+
"composer.json": "PHP/Composer",
|
|
25655
|
+
"pom.xml": "Java/Maven",
|
|
25656
|
+
"build.gradle": "Java/Gradle",
|
|
25657
|
+
"tsconfig.json": "TypeScript",
|
|
25658
|
+
"next.config.js": "Next.js",
|
|
25659
|
+
"nuxt.config.js": "Nuxt.js",
|
|
25660
|
+
"vue.config.js": "Vue.js",
|
|
25661
|
+
"angular.json": "Angular",
|
|
25662
|
+
"svelte.config.js": "Svelte",
|
|
25663
|
+
Dockerfile: "Docker",
|
|
25664
|
+
"docker-compose.yml": "Docker Compose",
|
|
25665
|
+
".terraform": "Terraform",
|
|
25666
|
+
Makefile: "Make"
|
|
25667
|
+
};
|
|
25668
|
+
for (const item of tree) {
|
|
25669
|
+
const basename = item.path.split("/").pop() || "";
|
|
25670
|
+
if (filePatterns[basename]) {
|
|
25671
|
+
stack.add(filePatterns[basename]);
|
|
25672
|
+
}
|
|
25673
|
+
if (basename.endsWith(".rs"))
|
|
25674
|
+
stack.add("Rust");
|
|
25675
|
+
if (basename.endsWith(".go"))
|
|
25676
|
+
stack.add("Go");
|
|
25677
|
+
if (basename.endsWith(".py"))
|
|
25678
|
+
stack.add("Python");
|
|
25679
|
+
if (basename.endsWith(".rb"))
|
|
25680
|
+
stack.add("Ruby");
|
|
25681
|
+
if (basename.endsWith(".php"))
|
|
25682
|
+
stack.add("PHP");
|
|
25683
|
+
if (basename.endsWith(".java"))
|
|
25684
|
+
stack.add("Java");
|
|
25685
|
+
if (basename.endsWith(".kt"))
|
|
25686
|
+
stack.add("Kotlin");
|
|
25687
|
+
if (basename.endsWith(".swift"))
|
|
25688
|
+
stack.add("Swift");
|
|
25689
|
+
if (basename.endsWith(".ts") || basename.endsWith(".tsx"))
|
|
25690
|
+
stack.add("TypeScript");
|
|
25691
|
+
if (basename.endsWith(".jsx"))
|
|
25692
|
+
stack.add("React");
|
|
25693
|
+
}
|
|
25694
|
+
return Array.from(stack).sort();
|
|
25695
|
+
}
|
|
25696
|
+
function truncate(text, maxLength) {
|
|
25697
|
+
if (text.length <= maxLength) {
|
|
25698
|
+
return text;
|
|
25699
|
+
}
|
|
25700
|
+
return text.slice(0, maxLength) + `
|
|
25701
|
+
|
|
25702
|
+
[... truncated ...]`;
|
|
25703
|
+
}
|
|
25704
|
+
var repo_readme = tool({
|
|
25705
|
+
description: "Get README.md content from a GitHub repository",
|
|
25706
|
+
args: {
|
|
25707
|
+
repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
|
|
25708
|
+
maxLength: tool.schema.number().optional().describe(`Max content length (default: ${DEFAULT_MAX_LENGTH})`)
|
|
25709
|
+
},
|
|
25710
|
+
async execute(args, ctx) {
|
|
25711
|
+
try {
|
|
25712
|
+
const { owner, repo } = parseRepo(args.repo);
|
|
25713
|
+
const maxLength = args.maxLength || DEFAULT_MAX_LENGTH;
|
|
25714
|
+
const readme = await githubFetch(`/repos/${owner}/${repo}/readme`);
|
|
25715
|
+
if (!readme.content || !readme.encoding) {
|
|
25716
|
+
return "README exists but content is not available";
|
|
25717
|
+
}
|
|
25718
|
+
const content = decodeContent(readme.content, readme.encoding);
|
|
25719
|
+
const truncated = truncate(content, maxLength);
|
|
25720
|
+
return JSON.stringify({
|
|
25721
|
+
repo: `${owner}/${repo}`,
|
|
25722
|
+
path: readme.path,
|
|
25723
|
+
size: readme.size,
|
|
25724
|
+
content: truncated,
|
|
25725
|
+
truncated: content.length > maxLength
|
|
25726
|
+
}, null, 2);
|
|
25727
|
+
} catch (error45) {
|
|
25728
|
+
if (error45 instanceof RepoCrawlError) {
|
|
25729
|
+
return JSON.stringify({ error: error45.message }, null, 2);
|
|
25730
|
+
}
|
|
25731
|
+
throw error45;
|
|
25732
|
+
}
|
|
25733
|
+
}
|
|
25734
|
+
});
|
|
25735
|
+
var repo_structure = tool({
|
|
25736
|
+
description: "Get repository structure with tech stack detection (root level only by default)",
|
|
25737
|
+
args: {
|
|
25738
|
+
repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
|
|
25739
|
+
depth: tool.schema.number().optional().describe(`Tree depth (1=root only, 2=one level deep, etc. Default: ${DEFAULT_DEPTH})`)
|
|
25740
|
+
},
|
|
25741
|
+
async execute(args, ctx) {
|
|
25742
|
+
try {
|
|
25743
|
+
const { owner, repo } = parseRepo(args.repo);
|
|
25744
|
+
const depth = args.depth || DEFAULT_DEPTH;
|
|
25745
|
+
const repoInfo = await githubFetch(`/repos/${owner}/${repo}`);
|
|
25746
|
+
const tree = await githubFetch(`/repos/${owner}/${repo}/git/trees/${repoInfo.default_branch}`, {
|
|
25747
|
+
method: "GET"
|
|
25748
|
+
});
|
|
25749
|
+
const filtered = tree.tree.filter((item) => {
|
|
25750
|
+
const pathDepth = item.path.split("/").length;
|
|
25751
|
+
return pathDepth <= depth;
|
|
25752
|
+
});
|
|
25753
|
+
const techStack = detectTechStack(filtered);
|
|
25754
|
+
const dirs = filtered.filter((item) => item.type === "tree").map((item) => item.path);
|
|
25755
|
+
const files = filtered.filter((item) => item.type === "blob").map((item) => item.path);
|
|
25756
|
+
return JSON.stringify({
|
|
25757
|
+
repo: repoInfo.full_name,
|
|
25758
|
+
description: repoInfo.description,
|
|
25759
|
+
language: repoInfo.language,
|
|
25760
|
+
stars: repoInfo.stargazers_count,
|
|
25761
|
+
topics: repoInfo.topics,
|
|
25762
|
+
techStack,
|
|
25763
|
+
directories: dirs.slice(0, 50),
|
|
25764
|
+
files: files.slice(0, 50),
|
|
25765
|
+
truncated: tree.truncated || dirs.length > 50 || files.length > 50
|
|
25766
|
+
}, null, 2);
|
|
25767
|
+
} catch (error45) {
|
|
25768
|
+
if (error45 instanceof RepoCrawlError) {
|
|
25769
|
+
return JSON.stringify({ error: error45.message }, null, 2);
|
|
25770
|
+
}
|
|
25771
|
+
throw error45;
|
|
25772
|
+
}
|
|
25773
|
+
}
|
|
25774
|
+
});
|
|
25775
|
+
var repo_tree = tool({
|
|
25776
|
+
description: "Get directory tree for a path in a repository",
|
|
25777
|
+
args: {
|
|
25778
|
+
repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
|
|
25779
|
+
path: tool.schema.string().optional().describe("Path in repo (default: root)"),
|
|
25780
|
+
maxDepth: tool.schema.number().optional().describe(`Max depth to traverse (default: ${DEFAULT_DEPTH})`)
|
|
25781
|
+
},
|
|
25782
|
+
async execute(args, ctx) {
|
|
25783
|
+
try {
|
|
25784
|
+
const { owner, repo } = parseRepo(args.repo);
|
|
25785
|
+
const targetPath = args.path || "";
|
|
25786
|
+
const maxDepth = args.maxDepth || DEFAULT_DEPTH;
|
|
25787
|
+
const repoInfo = await githubFetch(`/repos/${owner}/${repo}`);
|
|
25788
|
+
const contents = await githubFetch(`/repos/${owner}/${repo}/contents/${targetPath}`);
|
|
25789
|
+
if (!Array.isArray(contents)) {
|
|
25790
|
+
return JSON.stringify({ error: "Path is a file, not a directory" });
|
|
25791
|
+
}
|
|
25792
|
+
const tree = [];
|
|
25793
|
+
for (const item of contents) {
|
|
25794
|
+
tree.push({
|
|
25795
|
+
path: item.path,
|
|
25796
|
+
type: item.type,
|
|
25797
|
+
size: item.size
|
|
25798
|
+
});
|
|
25799
|
+
if (item.type === "dir" && maxDepth > 1) {
|
|
25800
|
+
try {
|
|
25801
|
+
const subContents = await githubFetch(`/repos/${owner}/${repo}/contents/${item.path}`);
|
|
25802
|
+
if (Array.isArray(subContents)) {
|
|
25803
|
+
for (const subItem of subContents.slice(0, 20)) {
|
|
25804
|
+
tree.push({
|
|
25805
|
+
path: subItem.path,
|
|
25806
|
+
type: subItem.type,
|
|
25807
|
+
size: subItem.size
|
|
25808
|
+
});
|
|
25809
|
+
}
|
|
25810
|
+
}
|
|
25811
|
+
} catch {}
|
|
25812
|
+
}
|
|
25813
|
+
}
|
|
25814
|
+
return JSON.stringify({
|
|
25815
|
+
repo: `${owner}/${repo}`,
|
|
25816
|
+
path: targetPath || "(root)",
|
|
25817
|
+
items: tree
|
|
25818
|
+
}, null, 2);
|
|
25819
|
+
} catch (error45) {
|
|
25820
|
+
if (error45 instanceof RepoCrawlError) {
|
|
25821
|
+
return JSON.stringify({ error: error45.message }, null, 2);
|
|
25822
|
+
}
|
|
25823
|
+
throw error45;
|
|
25824
|
+
}
|
|
25825
|
+
}
|
|
25826
|
+
});
|
|
25827
|
+
var repo_file = tool({
|
|
25828
|
+
description: "Get file content from a GitHub repository",
|
|
25829
|
+
args: {
|
|
25830
|
+
repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
|
|
25831
|
+
path: tool.schema.string().describe("File path in repository"),
|
|
25832
|
+
maxLength: tool.schema.number().optional().describe(`Max content length (default: ${DEFAULT_MAX_LENGTH})`)
|
|
25833
|
+
},
|
|
25834
|
+
async execute(args, ctx) {
|
|
25835
|
+
try {
|
|
25836
|
+
const { owner, repo } = parseRepo(args.repo);
|
|
25837
|
+
const maxLength = args.maxLength || DEFAULT_MAX_LENGTH;
|
|
25838
|
+
const file2 = await githubFetch(`/repos/${owner}/${repo}/contents/${args.path}`);
|
|
25839
|
+
if (file2.type !== "file") {
|
|
25840
|
+
return JSON.stringify({ error: "Path is not a file" });
|
|
25841
|
+
}
|
|
25842
|
+
if (!file2.content || !file2.encoding) {
|
|
25843
|
+
return JSON.stringify({ error: "File content not available" });
|
|
25844
|
+
}
|
|
25845
|
+
const content = decodeContent(file2.content, file2.encoding);
|
|
25846
|
+
const truncated = truncate(content, maxLength);
|
|
25847
|
+
return JSON.stringify({
|
|
25848
|
+
repo: `${owner}/${repo}`,
|
|
25849
|
+
path: file2.path,
|
|
25850
|
+
size: file2.size,
|
|
25851
|
+
content: truncated,
|
|
25852
|
+
truncated: content.length > maxLength
|
|
25853
|
+
}, null, 2);
|
|
25854
|
+
} catch (error45) {
|
|
25855
|
+
if (error45 instanceof RepoCrawlError) {
|
|
25856
|
+
return JSON.stringify({ error: error45.message }, null, 2);
|
|
25857
|
+
}
|
|
25858
|
+
throw error45;
|
|
25859
|
+
}
|
|
25860
|
+
}
|
|
25861
|
+
});
|
|
25862
|
+
var repo_search = tool({
|
|
25863
|
+
description: "Search code in a GitHub repository",
|
|
25864
|
+
args: {
|
|
25865
|
+
repo: tool.schema.string().describe('Repository (e.g., "owner/repo" or GitHub URL)'),
|
|
25866
|
+
query: tool.schema.string().describe("Search query (GitHub code search)"),
|
|
25867
|
+
maxResults: tool.schema.number().optional().describe(`Max results (default: ${DEFAULT_MAX_RESULTS})`)
|
|
25868
|
+
},
|
|
25869
|
+
async execute(args, ctx) {
|
|
25870
|
+
try {
|
|
25871
|
+
const { owner, repo } = parseRepo(args.repo);
|
|
25872
|
+
const maxResults = args.maxResults || DEFAULT_MAX_RESULTS;
|
|
25873
|
+
const searchQuery = `${args.query} repo:${owner}/${repo}`;
|
|
25874
|
+
const results = await githubFetch(`/search/code?q=${encodeURIComponent(searchQuery)}&per_page=${maxResults}`);
|
|
25875
|
+
const items = results.items.map((item) => ({
|
|
25876
|
+
path: item.path,
|
|
25877
|
+
url: item.html_url,
|
|
25878
|
+
matches: item.text_matches?.map((m) => m.fragment) || []
|
|
25879
|
+
}));
|
|
25880
|
+
return JSON.stringify({
|
|
25881
|
+
repo: `${owner}/${repo}`,
|
|
25882
|
+
query: args.query,
|
|
25883
|
+
totalCount: results.total_count,
|
|
25884
|
+
results: items
|
|
25885
|
+
}, null, 2);
|
|
25886
|
+
} catch (error45) {
|
|
25887
|
+
if (error45 instanceof RepoCrawlError) {
|
|
25888
|
+
return JSON.stringify({ error: error45.message }, null, 2);
|
|
25889
|
+
}
|
|
25890
|
+
throw error45;
|
|
25891
|
+
}
|
|
25892
|
+
}
|
|
25893
|
+
});
|
|
25894
|
+
var repoCrawlTools = {
|
|
25895
|
+
repo_readme,
|
|
25896
|
+
repo_structure,
|
|
25897
|
+
repo_tree,
|
|
25898
|
+
repo_file,
|
|
25899
|
+
repo_search
|
|
25900
|
+
};
|
|
25479
25901
|
// src/anti-patterns.ts
|
|
25480
25902
|
var PatternKindSchema = exports_external.enum(["pattern", "anti_pattern"]);
|
|
25481
25903
|
var DecompositionPatternSchema = exports_external.object({
|
|
@@ -25602,7 +26024,8 @@ var SwarmPlugin = async (input) => {
|
|
|
25602
26024
|
...beadsTools,
|
|
25603
26025
|
...agentMailTools,
|
|
25604
26026
|
...structuredTools,
|
|
25605
|
-
...swarmTools
|
|
26027
|
+
...swarmTools,
|
|
26028
|
+
...repoCrawlTools
|
|
25606
26029
|
},
|
|
25607
26030
|
event: async ({ event }) => {
|
|
25608
26031
|
if (event.type === "session.idle") {
|
|
@@ -25646,7 +26069,8 @@ var allTools = {
|
|
|
25646
26069
|
...beadsTools,
|
|
25647
26070
|
...agentMailTools,
|
|
25648
26071
|
...structuredTools,
|
|
25649
|
-
...swarmTools
|
|
26072
|
+
...swarmTools,
|
|
26073
|
+
...repoCrawlTools
|
|
25650
26074
|
};
|
|
25651
26075
|
export {
|
|
25652
26076
|
SwarmPlugin
|
package/package.json
CHANGED
package/src/index.ts
CHANGED
|
@@ -30,6 +30,7 @@ import {
|
|
|
30
30
|
} from "./agent-mail";
|
|
31
31
|
import { structuredTools } from "./structured";
|
|
32
32
|
import { swarmTools } from "./swarm";
|
|
33
|
+
import { repoCrawlTools } from "./repo-crawl";
|
|
33
34
|
|
|
34
35
|
/**
|
|
35
36
|
* OpenCode Swarm Plugin
|
|
@@ -39,6 +40,7 @@ import { swarmTools } from "./swarm";
|
|
|
39
40
|
* - agent-mail:* - Multi-agent coordination via Agent Mail MCP
|
|
40
41
|
* - structured:* - Structured output parsing and validation
|
|
41
42
|
* - swarm:* - Swarm orchestration and task decomposition
|
|
43
|
+
* - repo-crawl:* - GitHub API tools for repository research
|
|
42
44
|
*
|
|
43
45
|
* @param input - Plugin context from OpenCode
|
|
44
46
|
* @returns Plugin hooks including tools, events, and tool execution hooks
|
|
@@ -102,12 +104,14 @@ export const SwarmPlugin: Plugin = async (
|
|
|
102
104
|
* Tools are namespaced by module:
|
|
103
105
|
* - beads:create, beads:query, beads:update, etc.
|
|
104
106
|
* - agent-mail:init, agent-mail:send, agent-mail:reserve, etc.
|
|
107
|
+
* - repo-crawl:readme, repo-crawl:structure, etc.
|
|
105
108
|
*/
|
|
106
109
|
tool: {
|
|
107
110
|
...beadsTools,
|
|
108
111
|
...agentMailTools,
|
|
109
112
|
...structuredTools,
|
|
110
113
|
...swarmTools,
|
|
114
|
+
...repoCrawlTools,
|
|
111
115
|
},
|
|
112
116
|
|
|
113
117
|
/**
|
|
@@ -295,6 +299,7 @@ export const allTools = {
|
|
|
295
299
|
...agentMailTools,
|
|
296
300
|
...structuredTools,
|
|
297
301
|
...swarmTools,
|
|
302
|
+
...repoCrawlTools,
|
|
298
303
|
} as const;
|
|
299
304
|
|
|
300
305
|
/**
|
|
@@ -361,3 +366,19 @@ export {
|
|
|
361
366
|
type ToolStatus,
|
|
362
367
|
type ToolAvailability,
|
|
363
368
|
} from "./tool-availability";
|
|
369
|
+
|
|
370
|
+
/**
|
|
371
|
+
* Re-export repo-crawl module
|
|
372
|
+
*
|
|
373
|
+
* Includes:
|
|
374
|
+
* - repoCrawlTools - All GitHub API repository research tools
|
|
375
|
+
* - repo_readme, repo_structure, repo_tree, repo_file, repo_search - Individual tools
|
|
376
|
+
* - RepoCrawlError - Error class
|
|
377
|
+
*
|
|
378
|
+
* Features:
|
|
379
|
+
* - Parse repos from various formats (owner/repo, URLs)
|
|
380
|
+
* - Optional GITHUB_TOKEN auth for higher rate limits (5000 vs 60 req/hour)
|
|
381
|
+
* - Tech stack detection from file patterns
|
|
382
|
+
* - Graceful rate limit handling
|
|
383
|
+
*/
|
|
384
|
+
export { repoCrawlTools, RepoCrawlError } from "./repo-crawl";
|
package/src/learning.ts
CHANGED
|
@@ -115,6 +115,21 @@ export const DecompositionStrategySchema = z.enum([
|
|
|
115
115
|
]);
|
|
116
116
|
export type DecompositionStrategy = z.infer<typeof DecompositionStrategySchema>;
|
|
117
117
|
|
|
118
|
+
/**
|
|
119
|
+
* Failure mode taxonomy (imported from evaluation.ts)
|
|
120
|
+
*/
|
|
121
|
+
export const FailureModeSchema = z.enum([
|
|
122
|
+
"timeout",
|
|
123
|
+
"conflict",
|
|
124
|
+
"validation",
|
|
125
|
+
"tool_failure",
|
|
126
|
+
"context_overflow",
|
|
127
|
+
"dependency_blocked",
|
|
128
|
+
"user_cancelled",
|
|
129
|
+
"unknown",
|
|
130
|
+
]);
|
|
131
|
+
export type FailureMode = z.infer<typeof FailureModeSchema>;
|
|
132
|
+
|
|
118
133
|
/**
|
|
119
134
|
* Outcome signals from a completed subtask
|
|
120
135
|
*
|
|
@@ -138,6 +153,10 @@ export const OutcomeSignalsSchema = z.object({
|
|
|
138
153
|
timestamp: z.string(), // ISO-8601
|
|
139
154
|
/** Decomposition strategy used for this task */
|
|
140
155
|
strategy: DecompositionStrategySchema.optional(),
|
|
156
|
+
/** Failure classification (only when success=false) */
|
|
157
|
+
failure_mode: FailureModeSchema.optional(),
|
|
158
|
+
/** Detailed failure context */
|
|
159
|
+
failure_details: z.string().optional(),
|
|
141
160
|
});
|
|
142
161
|
export type OutcomeSignals = z.infer<typeof OutcomeSignalsSchema>;
|
|
143
162
|
|