opencode-swarm-plugin 0.31.7 → 0.33.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +4 -4
- package/.turbo/turbo-test.log +324 -316
- package/CHANGELOG.md +394 -0
- package/README.md +129 -181
- package/bin/swarm.test.ts +31 -0
- package/bin/swarm.ts +635 -140
- package/dist/compaction-hook.d.ts +1 -1
- package/dist/compaction-hook.d.ts.map +1 -1
- package/dist/hive.d.ts.map +1 -1
- package/dist/index.d.ts +17 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +653 -139
- package/dist/memory-tools.d.ts.map +1 -1
- package/dist/memory.d.ts +5 -4
- package/dist/memory.d.ts.map +1 -1
- package/dist/observability-tools.d.ts +116 -0
- package/dist/observability-tools.d.ts.map +1 -0
- package/dist/plugin.js +648 -136
- package/dist/skills.d.ts.map +1 -1
- package/dist/swarm-orchestrate.d.ts +29 -5
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts +66 -0
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/dist/swarm.d.ts +17 -2
- package/dist/swarm.d.ts.map +1 -1
- package/evals/lib/{data-loader.test.ts → data-loader.evalite-test.ts} +7 -6
- package/evals/lib/data-loader.ts +1 -1
- package/evals/scorers/{outcome-scorers.test.ts → outcome-scorers.evalite-test.ts} +1 -1
- package/examples/plugin-wrapper-template.ts +316 -12
- package/global-skills/swarm-coordination/SKILL.md +118 -8
- package/package.json +3 -2
- package/src/compaction-hook.ts +5 -3
- package/src/hive.integration.test.ts +83 -1
- package/src/hive.ts +37 -12
- package/src/index.ts +25 -1
- package/src/mandate-storage.integration.test.ts +601 -0
- package/src/memory-tools.ts +6 -4
- package/src/memory.integration.test.ts +117 -49
- package/src/memory.test.ts +41 -217
- package/src/memory.ts +12 -8
- package/src/observability-tools.test.ts +346 -0
- package/src/observability-tools.ts +594 -0
- package/src/repo-crawl.integration.test.ts +441 -0
- package/src/skills.integration.test.ts +1192 -0
- package/src/skills.test.ts +42 -1
- package/src/skills.ts +8 -4
- package/src/structured.integration.test.ts +817 -0
- package/src/swarm-deferred.integration.test.ts +157 -0
- package/src/swarm-deferred.test.ts +38 -0
- package/src/swarm-mail.integration.test.ts +15 -19
- package/src/swarm-orchestrate.integration.test.ts +282 -0
- package/src/swarm-orchestrate.test.ts +123 -0
- package/src/swarm-orchestrate.ts +279 -201
- package/src/swarm-prompts.test.ts +481 -0
- package/src/swarm-prompts.ts +297 -0
- package/src/swarm-research.integration.test.ts +544 -0
- package/src/swarm-research.test.ts +698 -0
- package/src/swarm-research.ts +472 -0
- package/src/swarm-review.integration.test.ts +290 -0
- package/src/swarm.integration.test.ts +23 -20
- package/src/swarm.ts +6 -3
- package/src/tool-adapter.integration.test.ts +1221 -0
package/bin/swarm.ts
CHANGED
|
@@ -37,11 +37,13 @@ import {
|
|
|
37
37
|
} from "../src/hive";
|
|
38
38
|
import {
|
|
39
39
|
legacyDatabaseExists,
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
40
|
+
migratePGliteToLibSQL,
|
|
41
|
+
pgliteExists,
|
|
42
|
+
getLibSQLProjectTempDirName,
|
|
43
|
+
getLibSQLDatabasePath,
|
|
44
|
+
hashLibSQLProjectPath,
|
|
43
45
|
} from "swarm-mail";
|
|
44
|
-
import {
|
|
46
|
+
import { tmpdir } from "os";
|
|
45
47
|
|
|
46
48
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
47
49
|
const pkg = JSON.parse(
|
|
@@ -709,6 +711,68 @@ function buildAgentsSemanticMemorySection(newline: string): string {
|
|
|
709
711
|
].join(newline);
|
|
710
712
|
}
|
|
711
713
|
|
|
714
|
+
function buildAgentsSwarmCoordinatorSection(newline: string): string {
|
|
715
|
+
return [
|
|
716
|
+
"## Swarm Coordinator Checklist (MANDATORY)",
|
|
717
|
+
"",
|
|
718
|
+
"When coordinating a swarm, you MUST monitor workers and review their output.",
|
|
719
|
+
"",
|
|
720
|
+
"### Monitor Loop",
|
|
721
|
+
"",
|
|
722
|
+
"```",
|
|
723
|
+
"┌─────────────────────────────────────────────────────────────┐",
|
|
724
|
+
"│ COORDINATOR MONITOR LOOP │",
|
|
725
|
+
"├─────────────────────────────────────────────────────────────┤",
|
|
726
|
+
"│ │",
|
|
727
|
+
"│ 1. CHECK INBOX │",
|
|
728
|
+
"│ swarmmail_inbox() │",
|
|
729
|
+
"│ swarmmail_read_message(message_id=N) │",
|
|
730
|
+
"│ │",
|
|
731
|
+
"│ 2. CHECK STATUS │",
|
|
732
|
+
"│ swarm_status(epic_id, project_key) │",
|
|
733
|
+
"│ │",
|
|
734
|
+
"│ 3. REVIEW COMPLETED WORK │",
|
|
735
|
+
"│ swarm_review(project_key, epic_id, task_id, files) │",
|
|
736
|
+
"│ → Generates review prompt with epic context + diff │",
|
|
737
|
+
"│ │",
|
|
738
|
+
"│ 4. SEND FEEDBACK │",
|
|
739
|
+
"│ swarm_review_feedback( │",
|
|
740
|
+
"│ project_key, task_id, worker_id, │",
|
|
741
|
+
"│ status=\"approved|needs_changes\", │",
|
|
742
|
+
"│ issues=\"[{file, line, issue, suggestion}]\" │",
|
|
743
|
+
"│ ) │",
|
|
744
|
+
"│ │",
|
|
745
|
+
"│ 5. INTERVENE IF NEEDED │",
|
|
746
|
+
"│ - Blocked >5min → unblock or reassign │",
|
|
747
|
+
"│ - File conflicts → mediate │",
|
|
748
|
+
"│ - Scope creep → approve or reject │",
|
|
749
|
+
"│ - 3 review failures → escalate to human │",
|
|
750
|
+
"│ │",
|
|
751
|
+
"└─────────────────────────────────────────────────────────────┘",
|
|
752
|
+
"```",
|
|
753
|
+
"",
|
|
754
|
+
"### Review Tools",
|
|
755
|
+
"",
|
|
756
|
+
"| Tool | Purpose |",
|
|
757
|
+
"|------|---------|",
|
|
758
|
+
"| `swarm_review` | Generate review prompt with epic context, dependencies, and git diff |",
|
|
759
|
+
"| `swarm_review_feedback` | Send approval/rejection to worker (tracks 3-strike rule) |",
|
|
760
|
+
"",
|
|
761
|
+
"### Review Criteria",
|
|
762
|
+
"",
|
|
763
|
+
"- Does work fulfill subtask requirements?",
|
|
764
|
+
"- Does it serve the overall epic goal?",
|
|
765
|
+
"- Does it enable downstream tasks?",
|
|
766
|
+
"- Type safety, no obvious bugs?",
|
|
767
|
+
"",
|
|
768
|
+
"### 3-Strike Rule",
|
|
769
|
+
"",
|
|
770
|
+
"After 3 review rejections, task is marked **blocked**. This signals an architectural problem, not \"try harder.\"",
|
|
771
|
+
"",
|
|
772
|
+
"**NEVER skip the review step.** Workers complete faster when they get feedback.",
|
|
773
|
+
].join(newline);
|
|
774
|
+
}
|
|
775
|
+
|
|
712
776
|
function updateAgentsToolPreferencesBlock(
|
|
713
777
|
content: string,
|
|
714
778
|
newline: string,
|
|
@@ -740,6 +804,9 @@ function updateAgentsToolPreferencesBlock(
|
|
|
740
804
|
const hasSemanticTools =
|
|
741
805
|
/semantic-memory_find/i.test(block) &&
|
|
742
806
|
/semantic-memory_store/i.test(block);
|
|
807
|
+
const hasSwarmReviewTools =
|
|
808
|
+
/swarm_review\b/i.test(block) &&
|
|
809
|
+
/swarm_review_feedback/i.test(block);
|
|
743
810
|
|
|
744
811
|
const linesToAdd: string[] = [];
|
|
745
812
|
if (!hasSkillsTools) {
|
|
@@ -757,6 +824,11 @@ function updateAgentsToolPreferencesBlock(
|
|
|
757
824
|
"- **semantic-memory_find, semantic-memory_store, semantic-memory_validate** - Persistent learning across sessions",
|
|
758
825
|
);
|
|
759
826
|
}
|
|
827
|
+
if (!hasSwarmReviewTools) {
|
|
828
|
+
linesToAdd.push(
|
|
829
|
+
"- **swarm_review, swarm_review_feedback** - Coordinator reviews worker output (3-strike rule)",
|
|
830
|
+
);
|
|
831
|
+
}
|
|
760
832
|
|
|
761
833
|
if (linesToAdd.length === 0) {
|
|
762
834
|
return { content, changed: false };
|
|
@@ -820,6 +892,10 @@ function updateAgentsMdContent({
|
|
|
820
892
|
const hasSemanticMemorySection =
|
|
821
893
|
/^#{1,6}\s+Semantic Memory\b/im.test(updated) ||
|
|
822
894
|
/semantic-memory_store\(/.test(updated);
|
|
895
|
+
const hasSwarmCoordinatorSection =
|
|
896
|
+
/^#{1,6}\s+Swarm Coordinator\b/im.test(updated) ||
|
|
897
|
+
/swarm_review\(/.test(updated) ||
|
|
898
|
+
/COORDINATOR MONITOR LOOP/i.test(updated);
|
|
823
899
|
|
|
824
900
|
const sectionsToAppend: string[] = [];
|
|
825
901
|
if (!hasSkillsSection) {
|
|
@@ -836,6 +912,10 @@ function updateAgentsMdContent({
|
|
|
836
912
|
sectionsToAppend.push(buildAgentsSemanticMemorySection(newline));
|
|
837
913
|
changes.push("Added Semantic Memory section");
|
|
838
914
|
}
|
|
915
|
+
if (!hasSwarmCoordinatorSection) {
|
|
916
|
+
sectionsToAppend.push(buildAgentsSwarmCoordinatorSection(newline));
|
|
917
|
+
changes.push("Added Swarm Coordinator Checklist section");
|
|
918
|
+
}
|
|
839
919
|
|
|
840
920
|
if (sectionsToAppend.length > 0) {
|
|
841
921
|
const trimmed = updated.replace(/\s+$/g, "");
|
|
@@ -1053,18 +1133,45 @@ const result2 = await Task(subagent_type="swarm/worker", prompt="<from above>")
|
|
|
1053
1133
|
|
|
1054
1134
|
**IMPORTANT:** Pass \`project_path\` to \`swarm_spawn_subtask\` so workers can call \`swarmmail_init\`.
|
|
1055
1135
|
|
|
1056
|
-
### Phase 7:
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1136
|
+
### Phase 7: MANDATORY Review Loop (NON-NEGOTIABLE)
|
|
1137
|
+
|
|
1138
|
+
**⚠️ AFTER EVERY Task() RETURNS, YOU MUST:**
|
|
1139
|
+
|
|
1140
|
+
1. **CHECK INBOX** - Worker may have sent messages
|
|
1141
|
+
\`swarmmail_inbox()\`
|
|
1142
|
+
\`swarmmail_read_message(message_id=N)\`
|
|
1143
|
+
|
|
1144
|
+
2. **REVIEW WORK** - Generate review with diff
|
|
1145
|
+
\`swarm_review(project_key, epic_id, task_id, files_touched)\`
|
|
1061
1146
|
|
|
1062
|
-
|
|
1147
|
+
3. **EVALUATE** - Does it meet epic goals?
|
|
1148
|
+
- Fulfills subtask requirements?
|
|
1149
|
+
- Serves overall epic goal?
|
|
1150
|
+
- Enables downstream tasks?
|
|
1151
|
+
- Type safety, no obvious bugs?
|
|
1152
|
+
|
|
1153
|
+
4. **SEND FEEDBACK** - Approve or request changes
|
|
1154
|
+
\`swarm_review_feedback(project_key, task_id, worker_id, status, issues)\`
|
|
1155
|
+
|
|
1156
|
+
If approved: Close cell, spawn next worker
|
|
1157
|
+
If needs_changes: Worker retries (max 3 attempts)
|
|
1158
|
+
If 3 failures: Mark blocked, escalate to human
|
|
1159
|
+
|
|
1160
|
+
5. **ONLY THEN** - Spawn next worker or complete
|
|
1161
|
+
|
|
1162
|
+
**DO NOT skip this. DO NOT batch reviews. Review EACH worker IMMEDIATELY after return.**
|
|
1163
|
+
|
|
1164
|
+
**Intervene if:**
|
|
1165
|
+
- Worker blocked >5min → unblock or reassign
|
|
1166
|
+
- File conflicts → mediate between workers
|
|
1167
|
+
- Scope creep → approve or reject expansion
|
|
1168
|
+
- Review fails 3x → mark task blocked, escalate to human
|
|
1063
1169
|
|
|
1064
1170
|
### Phase 8: Complete
|
|
1065
1171
|
\`\`\`
|
|
1066
|
-
|
|
1067
|
-
hive_sync()
|
|
1172
|
+
# After all workers complete and reviews pass:
|
|
1173
|
+
hive_sync() # Sync all cells to git
|
|
1174
|
+
# Coordinator does NOT call swarm_complete - workers do that
|
|
1068
1175
|
\`\`\`
|
|
1069
1176
|
|
|
1070
1177
|
## Strategy Reference
|
|
@@ -1215,6 +1322,233 @@ hive_update(id="<bead-id>", status="blocked")
|
|
|
1215
1322
|
Begin by reading your full prompt and executing Step 1.
|
|
1216
1323
|
`;
|
|
1217
1324
|
|
|
1325
|
+
const getResearcherAgent = (model: string) => `---
|
|
1326
|
+
name: swarm-researcher
|
|
1327
|
+
description: READ-ONLY research agent - discovers tools, fetches docs, stores findings
|
|
1328
|
+
model: ${model}
|
|
1329
|
+
---
|
|
1330
|
+
|
|
1331
|
+
You are a research agent. Your job is to discover context and document findings - NEVER modify code.
|
|
1332
|
+
|
|
1333
|
+
## CRITICAL: You Are READ-ONLY
|
|
1334
|
+
|
|
1335
|
+
**YOU DO NOT:**
|
|
1336
|
+
- Edit code files
|
|
1337
|
+
- Run tests
|
|
1338
|
+
- Make commits
|
|
1339
|
+
- Reserve files (you don't edit, so no reservations needed)
|
|
1340
|
+
- Implement features
|
|
1341
|
+
|
|
1342
|
+
**YOU DO:**
|
|
1343
|
+
- Discover available tools (MCP servers, skills, CLI tools)
|
|
1344
|
+
- Read lockfiles to get current package versions
|
|
1345
|
+
- Fetch documentation for those versions
|
|
1346
|
+
- Store findings in semantic-memory (full details)
|
|
1347
|
+
- Broadcast summaries via swarm mail (condensed)
|
|
1348
|
+
- Return structured summary for shared context
|
|
1349
|
+
|
|
1350
|
+
## Workflow
|
|
1351
|
+
|
|
1352
|
+
### Step 1: Initialize (MANDATORY FIRST)
|
|
1353
|
+
|
|
1354
|
+
\`\`\`
|
|
1355
|
+
swarmmail_init(project_path="/abs/path/to/project", task_description="Research: <what you're researching>")
|
|
1356
|
+
\`\`\`
|
|
1357
|
+
|
|
1358
|
+
### Step 2: Discover Available Tools
|
|
1359
|
+
|
|
1360
|
+
**DO NOT assume what tools are installed. Discover them:**
|
|
1361
|
+
|
|
1362
|
+
\`\`\`
|
|
1363
|
+
# Check what skills user has installed
|
|
1364
|
+
skills_list()
|
|
1365
|
+
|
|
1366
|
+
# Check what MCP servers are available (look for context7, pdf-brain, fetch, etc.)
|
|
1367
|
+
# Note: No direct MCP listing tool - infer from task context or ask coordinator
|
|
1368
|
+
|
|
1369
|
+
# Check for CLI tools if relevant (bd, cass, ubs, ollama)
|
|
1370
|
+
# Use Bash tool to check: which <tool-name>
|
|
1371
|
+
\`\`\`
|
|
1372
|
+
|
|
1373
|
+
### Step 3: Load Relevant Skills
|
|
1374
|
+
|
|
1375
|
+
Based on research task, load appropriate skills:
|
|
1376
|
+
|
|
1377
|
+
\`\`\`
|
|
1378
|
+
skills_use(name="<skill-name>", context="Researching <topic>")
|
|
1379
|
+
\`\`\`
|
|
1380
|
+
|
|
1381
|
+
### Step 4: Read Lockfiles (if researching dependencies)
|
|
1382
|
+
|
|
1383
|
+
**DO NOT read implementation code.** Only read metadata:
|
|
1384
|
+
|
|
1385
|
+
\`\`\`
|
|
1386
|
+
# For package.json projects
|
|
1387
|
+
read("package.json")
|
|
1388
|
+
read("package-lock.json") or read("bun.lock") or read("pnpm-lock.yaml")
|
|
1389
|
+
|
|
1390
|
+
# For Python
|
|
1391
|
+
read("requirements.txt") or read("pyproject.toml")
|
|
1392
|
+
|
|
1393
|
+
# For Go
|
|
1394
|
+
read("go.mod")
|
|
1395
|
+
\`\`\`
|
|
1396
|
+
|
|
1397
|
+
Extract current version numbers for libraries you need to research.
|
|
1398
|
+
|
|
1399
|
+
### Step 5: Fetch Documentation
|
|
1400
|
+
|
|
1401
|
+
Use available doc tools to get version-specific docs:
|
|
1402
|
+
|
|
1403
|
+
\`\`\`
|
|
1404
|
+
# If context7 available (check skills_list or task context)
|
|
1405
|
+
# Use it for library docs
|
|
1406
|
+
|
|
1407
|
+
# If pdf-brain available
|
|
1408
|
+
pdf-brain_search(query="<library> <version> <topic>", limit=5)
|
|
1409
|
+
|
|
1410
|
+
# If fetch tool available
|
|
1411
|
+
fetch(url="https://docs.example.com/v2.0/...")
|
|
1412
|
+
|
|
1413
|
+
# If repo-crawl available for OSS libraries
|
|
1414
|
+
repo-crawl_readme(repo="owner/repo")
|
|
1415
|
+
repo-crawl_file(repo="owner/repo", path="docs/...")
|
|
1416
|
+
\`\`\`
|
|
1417
|
+
|
|
1418
|
+
### Step 6: Store Full Findings in Semantic Memory
|
|
1419
|
+
|
|
1420
|
+
**Store detailed findings for future agents:**
|
|
1421
|
+
|
|
1422
|
+
\`\`\`
|
|
1423
|
+
semantic-memory_store(
|
|
1424
|
+
information="Researched <library> v<version>. Key findings: <detailed notes with examples, gotchas, patterns>",
|
|
1425
|
+
metadata="<library>, <version>, <topic>, research"
|
|
1426
|
+
)
|
|
1427
|
+
\`\`\`
|
|
1428
|
+
|
|
1429
|
+
**Include:**
|
|
1430
|
+
- Library/framework versions discovered
|
|
1431
|
+
- Key API patterns
|
|
1432
|
+
- Breaking changes from previous versions
|
|
1433
|
+
- Common gotchas
|
|
1434
|
+
- Relevant examples
|
|
1435
|
+
|
|
1436
|
+
### Step 7: Broadcast Condensed Summary via Swarm Mail
|
|
1437
|
+
|
|
1438
|
+
**Send concise summary to coordinator:**
|
|
1439
|
+
|
|
1440
|
+
\`\`\`
|
|
1441
|
+
swarmmail_send(
|
|
1442
|
+
to=["coordinator"],
|
|
1443
|
+
subject="Research Complete: <topic>",
|
|
1444
|
+
body="<3-5 bullet points with key takeaways>",
|
|
1445
|
+
thread_id="<epic-id>"
|
|
1446
|
+
)
|
|
1447
|
+
\`\`\`
|
|
1448
|
+
|
|
1449
|
+
### Step 8: Return Structured Summary
|
|
1450
|
+
|
|
1451
|
+
**Output format for shared_context:**
|
|
1452
|
+
|
|
1453
|
+
\`\`\`json
|
|
1454
|
+
{
|
|
1455
|
+
"researched": "<topic>",
|
|
1456
|
+
"tools_discovered": ["skill-1", "skill-2", "mcp-server-1"],
|
|
1457
|
+
"versions": {
|
|
1458
|
+
"library-1": "1.2.3",
|
|
1459
|
+
"library-2": "4.5.6"
|
|
1460
|
+
},
|
|
1461
|
+
"key_findings": [
|
|
1462
|
+
"Finding 1 with actionable insight",
|
|
1463
|
+
"Finding 2 with actionable insight",
|
|
1464
|
+
"Finding 3 with actionable insight"
|
|
1465
|
+
],
|
|
1466
|
+
"relevant_skills": ["skill-to-use-1", "skill-to-use-2"],
|
|
1467
|
+
"stored_in_memory": true
|
|
1468
|
+
}
|
|
1469
|
+
\`\`\`
|
|
1470
|
+
|
|
1471
|
+
## Tool Discovery Patterns
|
|
1472
|
+
|
|
1473
|
+
### Skills Discovery
|
|
1474
|
+
|
|
1475
|
+
\`\`\`
|
|
1476
|
+
skills_list()
|
|
1477
|
+
# Returns: Available skills from global, project, bundled sources
|
|
1478
|
+
|
|
1479
|
+
# Load relevant skill for research domain
|
|
1480
|
+
skills_use(name="<skill>", context="Researching <topic>")
|
|
1481
|
+
\`\`\`
|
|
1482
|
+
|
|
1483
|
+
### MCP Server Detection
|
|
1484
|
+
|
|
1485
|
+
**No direct listing tool.** Infer from:
|
|
1486
|
+
- Task context (coordinator may mention available tools)
|
|
1487
|
+
- Trial: Try calling a tool and catch error if not available
|
|
1488
|
+
- Read OpenCode config if accessible
|
|
1489
|
+
|
|
1490
|
+
### CLI Tool Detection
|
|
1491
|
+
|
|
1492
|
+
\`\`\`
|
|
1493
|
+
# Check if tool is installed
|
|
1494
|
+
bash("which <tool>", description="Check if <tool> is available")
|
|
1495
|
+
|
|
1496
|
+
# Examples:
|
|
1497
|
+
bash("which cass", description="Check CASS availability")
|
|
1498
|
+
bash("which ubs", description="Check UBS availability")
|
|
1499
|
+
bash("ollama --version", description="Check Ollama availability")
|
|
1500
|
+
\`\`\`
|
|
1501
|
+
|
|
1502
|
+
## Context Efficiency Rules (MANDATORY)
|
|
1503
|
+
|
|
1504
|
+
**NEVER dump raw documentation.** Always summarize.
|
|
1505
|
+
|
|
1506
|
+
| ❌ Bad (Context Bomb) | ✅ Good (Condensed) |
|
|
1507
|
+
|---------------------|-------------------|
|
|
1508
|
+
| Paste entire API reference | "Library uses hooks API. Key hooks: useQuery, useMutation. Breaking change in v2: callbacks removed." |
|
|
1509
|
+
| Copy full changelog | "v2.0 breaking changes: renamed auth() → authenticate(), dropped IE11 support" |
|
|
1510
|
+
| Include all examples | "Common pattern: async/await with error boundaries (stored full example in semantic-memory)" |
|
|
1511
|
+
|
|
1512
|
+
**Storage Strategy:**
|
|
1513
|
+
- **Semantic Memory**: Full details, examples, code snippets
|
|
1514
|
+
- **Swarm Mail**: 3-5 bullet points only
|
|
1515
|
+
- **Return Value**: Structured JSON summary
|
|
1516
|
+
|
|
1517
|
+
## When to Use This Agent
|
|
1518
|
+
|
|
1519
|
+
**DO spawn researcher when:**
|
|
1520
|
+
- Task requires understanding current tech stack versions
|
|
1521
|
+
- Need to fetch library/framework documentation
|
|
1522
|
+
- Discovering project conventions from config files
|
|
1523
|
+
- Researching best practices for unfamiliar domain
|
|
1524
|
+
|
|
1525
|
+
**DON'T spawn researcher when:**
|
|
1526
|
+
- Information is already in semantic memory (query first!)
|
|
1527
|
+
- Task doesn't need external docs
|
|
1528
|
+
- Time-sensitive work (research adds latency)
|
|
1529
|
+
|
|
1530
|
+
## Example Research Tasks
|
|
1531
|
+
|
|
1532
|
+
**"Research Next.js 16 caching APIs"**
|
|
1533
|
+
|
|
1534
|
+
1. Read package.json → extract Next.js version
|
|
1535
|
+
2. Use context7 or fetch to get Next.js 16 cache docs
|
|
1536
|
+
3. Store findings: unstable_cache, revalidatePath, cache patterns
|
|
1537
|
+
4. Broadcast: "Next.js 16 uses native fetch caching + unstable_cache for functions"
|
|
1538
|
+
5. Return structured summary with key APIs
|
|
1539
|
+
|
|
1540
|
+
**"Discover available testing tools"**
|
|
1541
|
+
|
|
1542
|
+
1. Check skills_list for testing-patterns skill
|
|
1543
|
+
2. Check which jest/vitest/bun (bash tool)
|
|
1544
|
+
3. Read package.json devDependencies
|
|
1545
|
+
4. Store findings: test runner, assertion library, coverage tool
|
|
1546
|
+
5. Broadcast: "Project uses Bun test with happy-dom"
|
|
1547
|
+
6. Return tool inventory
|
|
1548
|
+
|
|
1549
|
+
Begin by executing Step 1 (swarmmail_init).
|
|
1550
|
+
`;
|
|
1551
|
+
|
|
1218
1552
|
// ============================================================================
|
|
1219
1553
|
// Commands
|
|
1220
1554
|
// ============================================================================
|
|
@@ -1377,9 +1711,44 @@ async function setup() {
|
|
|
1377
1711
|
|
|
1378
1712
|
p.intro("opencode-swarm-plugin v" + VERSION);
|
|
1379
1713
|
|
|
1714
|
+
// Migrate legacy database if present (do this first, before config check)
|
|
1715
|
+
const cwd = process.cwd();
|
|
1716
|
+
const tempDirName = getLibSQLProjectTempDirName(cwd);
|
|
1717
|
+
const tempDir = join(tmpdir(), tempDirName);
|
|
1718
|
+
const pglitePath = join(tempDir, "streams");
|
|
1719
|
+
const libsqlPath = join(tempDir, "streams.db");
|
|
1720
|
+
|
|
1721
|
+
if (pgliteExists(pglitePath)) {
|
|
1722
|
+
const migrateSpinner = p.spinner();
|
|
1723
|
+
migrateSpinner.start("Migrating...");
|
|
1724
|
+
|
|
1725
|
+
try {
|
|
1726
|
+
const result = await migratePGliteToLibSQL({
|
|
1727
|
+
pglitePath,
|
|
1728
|
+
libsqlPath,
|
|
1729
|
+
dryRun: false,
|
|
1730
|
+
onProgress: () => {},
|
|
1731
|
+
});
|
|
1732
|
+
|
|
1733
|
+
const total = result.memories.migrated + result.beads.migrated;
|
|
1734
|
+
if (total > 0) {
|
|
1735
|
+
migrateSpinner.stop(`Migrated ${result.memories.migrated} memories, ${result.beads.migrated} cells`);
|
|
1736
|
+
} else {
|
|
1737
|
+
migrateSpinner.stop("Migrated");
|
|
1738
|
+
}
|
|
1739
|
+
|
|
1740
|
+
if (result.errors.length > 0) {
|
|
1741
|
+
p.log.warn(`${result.errors.length} errors during migration`);
|
|
1742
|
+
}
|
|
1743
|
+
} catch (error) {
|
|
1744
|
+
migrateSpinner.stop("Migration failed");
|
|
1745
|
+
p.log.error(error instanceof Error ? error.message : String(error));
|
|
1746
|
+
}
|
|
1747
|
+
}
|
|
1748
|
+
|
|
1380
1749
|
let isReinstall = false;
|
|
1381
1750
|
|
|
1382
|
-
// Check if already configured
|
|
1751
|
+
// Check if already configured
|
|
1383
1752
|
p.log.step("Checking existing configuration...");
|
|
1384
1753
|
const configDir = join(homedir(), ".config", "opencode");
|
|
1385
1754
|
const pluginDir = join(configDir, "plugin");
|
|
@@ -1391,6 +1760,7 @@ async function setup() {
|
|
|
1391
1760
|
const swarmAgentDir = join(agentDir, "swarm");
|
|
1392
1761
|
const plannerAgentPath = join(swarmAgentDir, "planner.md");
|
|
1393
1762
|
const workerAgentPath = join(swarmAgentDir, "worker.md");
|
|
1763
|
+
const researcherAgentPath = join(swarmAgentDir, "researcher.md");
|
|
1394
1764
|
// Legacy flat paths (for detection/cleanup)
|
|
1395
1765
|
const legacyPlannerPath = join(agentDir, "swarm-planner.md");
|
|
1396
1766
|
const legacyWorkerPath = join(agentDir, "swarm-worker.md");
|
|
@@ -1400,13 +1770,14 @@ async function setup() {
|
|
|
1400
1770
|
commandPath,
|
|
1401
1771
|
plannerAgentPath,
|
|
1402
1772
|
workerAgentPath,
|
|
1773
|
+
researcherAgentPath,
|
|
1403
1774
|
legacyPlannerPath,
|
|
1404
1775
|
legacyWorkerPath,
|
|
1405
1776
|
].filter((f) => existsSync(f));
|
|
1406
1777
|
|
|
1407
1778
|
if (existingFiles.length > 0) {
|
|
1408
1779
|
p.log.success("Swarm is already configured!");
|
|
1409
|
-
p.log.message(dim(" Found " + existingFiles.length + "/
|
|
1780
|
+
p.log.message(dim(" Found " + existingFiles.length + "/5 config files"));
|
|
1410
1781
|
|
|
1411
1782
|
const action = await p.select({
|
|
1412
1783
|
message: "What would you like to do?",
|
|
@@ -1624,7 +1995,6 @@ async function setup() {
|
|
|
1624
1995
|
|
|
1625
1996
|
// Check for .beads → .hive migration
|
|
1626
1997
|
p.log.step("Checking for legacy .beads directory...");
|
|
1627
|
-
const cwd = process.cwd();
|
|
1628
1998
|
const migrationCheck = checkBeadsMigrationNeeded(cwd);
|
|
1629
1999
|
if (migrationCheck.needed) {
|
|
1630
2000
|
p.log.warn("Found legacy .beads directory");
|
|
@@ -1683,120 +2053,6 @@ async function setup() {
|
|
|
1683
2053
|
p.log.message(dim(" No legacy .beads directory found"));
|
|
1684
2054
|
}
|
|
1685
2055
|
|
|
1686
|
-
// Check for legacy semantic-memory migration
|
|
1687
|
-
p.log.step("Checking for legacy semantic-memory database...");
|
|
1688
|
-
if (legacyDatabaseExists()) {
|
|
1689
|
-
p.log.warn("Found legacy semantic-memory database");
|
|
1690
|
-
|
|
1691
|
-
// Check if target database already has memories (already migrated)
|
|
1692
|
-
let swarmMail = null;
|
|
1693
|
-
try {
|
|
1694
|
-
swarmMail = await getSwarmMail(cwd);
|
|
1695
|
-
const targetDb = await swarmMail.getDatabase(cwd);
|
|
1696
|
-
const alreadyMigrated = await targetHasMemories(targetDb);
|
|
1697
|
-
|
|
1698
|
-
if (alreadyMigrated) {
|
|
1699
|
-
p.log.message(dim(" Already migrated to swarm-mail"));
|
|
1700
|
-
await swarmMail.close();
|
|
1701
|
-
} else {
|
|
1702
|
-
await swarmMail.close();
|
|
1703
|
-
swarmMail = null;
|
|
1704
|
-
|
|
1705
|
-
// Target is empty - show migration status and prompt
|
|
1706
|
-
const migrationStatus = await getMigrationStatus();
|
|
1707
|
-
if (migrationStatus) {
|
|
1708
|
-
const { total, withEmbeddings } = migrationStatus;
|
|
1709
|
-
p.log.message(dim(` Memories: ${total} total (${withEmbeddings} with embeddings)`));
|
|
1710
|
-
p.log.message(dim(` Will migrate to swarm-mail unified database`));
|
|
1711
|
-
|
|
1712
|
-
const shouldMigrate = await p.confirm({
|
|
1713
|
-
message: "Migrate to swarm-mail database? (recommended)",
|
|
1714
|
-
initialValue: true,
|
|
1715
|
-
});
|
|
1716
|
-
|
|
1717
|
-
if (p.isCancel(shouldMigrate)) {
|
|
1718
|
-
p.cancel("Setup cancelled");
|
|
1719
|
-
process.exit(0);
|
|
1720
|
-
}
|
|
1721
|
-
|
|
1722
|
-
if (shouldMigrate) {
|
|
1723
|
-
const migrateSpinner = p.spinner();
|
|
1724
|
-
migrateSpinner.start("Connecting to target database...");
|
|
1725
|
-
|
|
1726
|
-
try {
|
|
1727
|
-
// Get swarm-mail database for this project
|
|
1728
|
-
swarmMail = await getSwarmMail(cwd);
|
|
1729
|
-
const targetDb = await swarmMail.getDatabase(cwd);
|
|
1730
|
-
migrateSpinner.message("Migrating memories...");
|
|
1731
|
-
|
|
1732
|
-
// Run migration with progress updates
|
|
1733
|
-
const result = await migrateLegacyMemories({
|
|
1734
|
-
targetDb,
|
|
1735
|
-
onProgress: (msg) => {
|
|
1736
|
-
// Update spinner message for key milestones
|
|
1737
|
-
if (msg.includes("complete") || msg.includes("Progress:")) {
|
|
1738
|
-
migrateSpinner.message(msg.replace("[migrate] ", ""));
|
|
1739
|
-
}
|
|
1740
|
-
},
|
|
1741
|
-
});
|
|
1742
|
-
|
|
1743
|
-
migrateSpinner.stop("Semantic memory migration complete");
|
|
1744
|
-
|
|
1745
|
-
if (result.migrated > 0) {
|
|
1746
|
-
p.log.success(`Migrated ${result.migrated} memories to swarm-mail`);
|
|
1747
|
-
}
|
|
1748
|
-
if (result.skipped > 0) {
|
|
1749
|
-
p.log.message(dim(` Skipped ${result.skipped} (already exist)`));
|
|
1750
|
-
}
|
|
1751
|
-
if (result.failed > 0) {
|
|
1752
|
-
p.log.warn(`Failed to migrate ${result.failed} memories`);
|
|
1753
|
-
for (const error of result.errors.slice(0, 3)) {
|
|
1754
|
-
p.log.message(dim(` ${error}`));
|
|
1755
|
-
}
|
|
1756
|
-
if (result.errors.length > 3) {
|
|
1757
|
-
p.log.message(dim(` ... and ${result.errors.length - 3} more errors`));
|
|
1758
|
-
}
|
|
1759
|
-
}
|
|
1760
|
-
|
|
1761
|
-
// Close the connection to allow process to exit
|
|
1762
|
-
await swarmMail.close();
|
|
1763
|
-
swarmMail = null;
|
|
1764
|
-
} catch (error) {
|
|
1765
|
-
migrateSpinner.stop("Migration failed");
|
|
1766
|
-
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
1767
|
-
// Hide internal PGLite errors, only show user-actionable messages
|
|
1768
|
-
if (!errorMsg.includes("NOTICE") && !errorMsg.includes("PGlite")) {
|
|
1769
|
-
p.log.error(errorMsg);
|
|
1770
|
-
} else {
|
|
1771
|
-
p.log.warn("Migration encountered an error - please try again");
|
|
1772
|
-
}
|
|
1773
|
-
if (swarmMail) {
|
|
1774
|
-
await swarmMail.close();
|
|
1775
|
-
swarmMail = null;
|
|
1776
|
-
}
|
|
1777
|
-
}
|
|
1778
|
-
} else {
|
|
1779
|
-
p.log.warn("Skipping migration - legacy semantic-memory will continue to work but is deprecated");
|
|
1780
|
-
}
|
|
1781
|
-
}
|
|
1782
|
-
}
|
|
1783
|
-
} catch (error) {
|
|
1784
|
-
// Failed to connect to target database - log and skip
|
|
1785
|
-
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
1786
|
-
// Hide internal PGLite errors
|
|
1787
|
-
if (!errorMsg.includes("NOTICE") && !errorMsg.includes("PGlite")) {
|
|
1788
|
-
p.log.message(dim(` Could not check migration status: ${errorMsg}`));
|
|
1789
|
-
} else {
|
|
1790
|
-
p.log.message(dim(" Could not check migration status - skipping"));
|
|
1791
|
-
}
|
|
1792
|
-
if (swarmMail) {
|
|
1793
|
-
await swarmMail.close();
|
|
1794
|
-
}
|
|
1795
|
-
}
|
|
1796
|
-
} else {
|
|
1797
|
-
p.log.message(dim(" No legacy semantic-memory database found"));
|
|
1798
|
-
}
|
|
1799
|
-
|
|
1800
2056
|
// Check for legacy semantic-memory MCP server in OpenCode config
|
|
1801
2057
|
p.log.step("Checking for legacy MCP servers...");
|
|
1802
2058
|
const opencodeConfigPath = join(configDir, 'config.json');
|
|
@@ -1988,11 +2244,12 @@ async function setup() {
|
|
|
1988
2244
|
stats[writeFileWithStatus(pluginPath, getPluginWrapper(), "Plugin")]++;
|
|
1989
2245
|
stats[writeFileWithStatus(commandPath, SWARM_COMMAND, "Command")]++;
|
|
1990
2246
|
|
|
1991
|
-
// Write nested agent files (swarm/planner.md, swarm/worker.md)
|
|
2247
|
+
// Write nested agent files (swarm/planner.md, swarm/worker.md, swarm/researcher.md)
|
|
1992
2248
|
// This is the format used by Task(subagent_type="swarm/worker")
|
|
1993
2249
|
p.log.step("Writing agent configuration...");
|
|
1994
2250
|
stats[writeFileWithStatus(plannerAgentPath, getPlannerAgent(coordinatorModel as string), "Planner agent")]++;
|
|
1995
2251
|
stats[writeFileWithStatus(workerAgentPath, getWorkerAgent(workerModel as string), "Worker agent")]++;
|
|
2252
|
+
stats[writeFileWithStatus(researcherAgentPath, getResearcherAgent(workerModel as string), "Researcher agent")]++;
|
|
1996
2253
|
|
|
1997
2254
|
// Clean up legacy flat agent files if they exist
|
|
1998
2255
|
if (existsSync(legacyPlannerPath) || existsSync(legacyWorkerPath)) {
|
|
@@ -2293,8 +2550,10 @@ function config() {
|
|
|
2293
2550
|
const configDir = join(homedir(), ".config", "opencode");
|
|
2294
2551
|
const pluginPath = join(configDir, "plugin", "swarm.ts");
|
|
2295
2552
|
const commandPath = join(configDir, "command", "swarm.md");
|
|
2296
|
-
const
|
|
2297
|
-
const
|
|
2553
|
+
const swarmAgentDir = join(configDir, "agent", "swarm");
|
|
2554
|
+
const plannerAgentPath = join(swarmAgentDir, "planner.md");
|
|
2555
|
+
const workerAgentPath = join(swarmAgentDir, "worker.md");
|
|
2556
|
+
const researcherAgentPath = join(swarmAgentDir, "researcher.md");
|
|
2298
2557
|
const globalSkillsPath = join(configDir, "skills");
|
|
2299
2558
|
|
|
2300
2559
|
console.log(yellow(BANNER));
|
|
@@ -2306,8 +2565,9 @@ function config() {
|
|
|
2306
2565
|
const files = [
|
|
2307
2566
|
{ path: pluginPath, desc: "Plugin loader", emoji: "🔌" },
|
|
2308
2567
|
{ path: commandPath, desc: "/swarm command prompt", emoji: "📜" },
|
|
2309
|
-
{ path: plannerAgentPath, desc: "@swarm
|
|
2310
|
-
{ path: workerAgentPath, desc: "@swarm
|
|
2568
|
+
{ path: plannerAgentPath, desc: "@swarm/planner agent", emoji: "🤖" },
|
|
2569
|
+
{ path: workerAgentPath, desc: "@swarm/worker agent", emoji: "🐝" },
|
|
2570
|
+
{ path: researcherAgentPath, desc: "@swarm/researcher agent", emoji: "🔬" },
|
|
2311
2571
|
];
|
|
2312
2572
|
|
|
2313
2573
|
for (const { path, desc, emoji } of files) {
|
|
@@ -2445,6 +2705,7 @@ ${cyan("Commands:")}
|
|
|
2445
2705
|
swarm init Initialize beads in current project
|
|
2446
2706
|
swarm config Show paths to generated config files
|
|
2447
2707
|
swarm agents Update AGENTS.md with skill awareness
|
|
2708
|
+
swarm migrate Migrate PGlite database to libSQL
|
|
2448
2709
|
swarm update Update to latest version
|
|
2449
2710
|
swarm version Show version and banner
|
|
2450
2711
|
swarm tool Execute a tool (for plugin wrapper)
|
|
@@ -2457,15 +2718,17 @@ ${cyan("Tool Execution:")}
|
|
|
2457
2718
|
|
|
2458
2719
|
${cyan("Usage in OpenCode:")}
|
|
2459
2720
|
/swarm "Add user authentication with OAuth"
|
|
2460
|
-
@swarm
|
|
2461
|
-
@swarm
|
|
2721
|
+
@swarm/planner "Decompose this into parallel tasks"
|
|
2722
|
+
@swarm/worker "Execute this specific subtask"
|
|
2723
|
+
@swarm/researcher "Research Next.js caching APIs"
|
|
2462
2724
|
|
|
2463
2725
|
${cyan("Customization:")}
|
|
2464
2726
|
Edit the generated files to customize behavior:
|
|
2465
|
-
${dim("~/.config/opencode/command/swarm.md")}
|
|
2466
|
-
${dim("~/.config/opencode/agent/swarm
|
|
2467
|
-
${dim("~/.config/opencode/agent/swarm
|
|
2468
|
-
${dim("~/.config/opencode/
|
|
2727
|
+
${dim("~/.config/opencode/command/swarm.md")} - /swarm command prompt
|
|
2728
|
+
${dim("~/.config/opencode/agent/swarm/planner.md")} - @swarm/planner (coordinator)
|
|
2729
|
+
${dim("~/.config/opencode/agent/swarm/worker.md")} - @swarm/worker (task executor)
|
|
2730
|
+
${dim("~/.config/opencode/agent/swarm/researcher.md")} - @swarm/researcher (read-only research)
|
|
2731
|
+
${dim("~/.config/opencode/plugin/swarm.ts")} - Plugin loader
|
|
2469
2732
|
|
|
2470
2733
|
${dim("Docs: https://github.com/joelhooks/opencode-swarm-plugin")}
|
|
2471
2734
|
`);
|
|
@@ -2683,6 +2946,232 @@ async function agents() {
|
|
|
2683
2946
|
p.outro("Done");
|
|
2684
2947
|
}
|
|
2685
2948
|
|
|
2949
|
+
// ============================================================================
|
|
2950
|
+
// Migrate Command - PGlite → libSQL migration
|
|
2951
|
+
// ============================================================================
|
|
2952
|
+
|
|
2953
|
+
async function migrate() {
|
|
2954
|
+
p.intro("swarm migrate v" + VERSION);
|
|
2955
|
+
|
|
2956
|
+
const projectPath = process.cwd();
|
|
2957
|
+
|
|
2958
|
+
// Calculate the temp directory path (same logic as libsql.convenience.ts)
|
|
2959
|
+
const tempDirName = getLibSQLProjectTempDirName(projectPath);
|
|
2960
|
+
const tempDir = join(tmpdir(), tempDirName);
|
|
2961
|
+
const pglitePath = join(tempDir, "streams");
|
|
2962
|
+
const libsqlPath = join(tempDir, "streams.db");
|
|
2963
|
+
|
|
2964
|
+
// Check if PGlite exists
|
|
2965
|
+
if (!pgliteExists(pglitePath)) {
|
|
2966
|
+
p.log.success("No PGlite database found - nothing to migrate!");
|
|
2967
|
+
p.outro("Done");
|
|
2968
|
+
return;
|
|
2969
|
+
}
|
|
2970
|
+
|
|
2971
|
+
// Dry run to show counts
|
|
2972
|
+
const s = p.spinner();
|
|
2973
|
+
s.start("Scanning PGlite database...");
|
|
2974
|
+
|
|
2975
|
+
try {
|
|
2976
|
+
const dryResult = await migratePGliteToLibSQL({
|
|
2977
|
+
pglitePath,
|
|
2978
|
+
libsqlPath,
|
|
2979
|
+
dryRun: true,
|
|
2980
|
+
onProgress: () => {}, // silent during dry run
|
|
2981
|
+
});
|
|
2982
|
+
|
|
2983
|
+
s.stop("Scan complete");
|
|
2984
|
+
|
|
2985
|
+
// Show summary
|
|
2986
|
+
const totalItems =
|
|
2987
|
+
dryResult.memories.migrated +
|
|
2988
|
+
dryResult.beads.migrated +
|
|
2989
|
+
dryResult.messages.migrated +
|
|
2990
|
+
dryResult.agents.migrated +
|
|
2991
|
+
dryResult.events.migrated;
|
|
2992
|
+
|
|
2993
|
+
if (totalItems === 0) {
|
|
2994
|
+
p.log.warn("PGlite database exists but contains no data");
|
|
2995
|
+
p.outro("Nothing to migrate");
|
|
2996
|
+
return;
|
|
2997
|
+
}
|
|
2998
|
+
|
|
2999
|
+
p.log.step("Found data to migrate:");
|
|
3000
|
+
if (dryResult.memories.migrated > 0) {
|
|
3001
|
+
p.log.message(` 📝 ${dryResult.memories.migrated} memories`);
|
|
3002
|
+
}
|
|
3003
|
+
if (dryResult.beads.migrated > 0) {
|
|
3004
|
+
p.log.message(` 🐝 ${dryResult.beads.migrated} cells`);
|
|
3005
|
+
}
|
|
3006
|
+
if (dryResult.messages.migrated > 0) {
|
|
3007
|
+
p.log.message(` ✉️ ${dryResult.messages.migrated} messages`);
|
|
3008
|
+
}
|
|
3009
|
+
if (dryResult.agents.migrated > 0) {
|
|
3010
|
+
p.log.message(` 🤖 ${dryResult.agents.migrated} agents`);
|
|
3011
|
+
}
|
|
3012
|
+
if (dryResult.events.migrated > 0) {
|
|
3013
|
+
p.log.message(` 📋 ${dryResult.events.migrated} events`);
|
|
3014
|
+
}
|
|
3015
|
+
|
|
3016
|
+
// Confirm
|
|
3017
|
+
const confirm = await p.confirm({
|
|
3018
|
+
message: "Migrate this data to libSQL?",
|
|
3019
|
+
initialValue: true,
|
|
3020
|
+
});
|
|
3021
|
+
|
|
3022
|
+
if (p.isCancel(confirm) || !confirm) {
|
|
3023
|
+
p.outro("Migration cancelled");
|
|
3024
|
+
return;
|
|
3025
|
+
}
|
|
3026
|
+
|
|
3027
|
+
// Run actual migration
|
|
3028
|
+
const migrateSpinner = p.spinner();
|
|
3029
|
+
migrateSpinner.start("Migrating data...");
|
|
3030
|
+
|
|
3031
|
+
const result = await migratePGliteToLibSQL({
|
|
3032
|
+
pglitePath,
|
|
3033
|
+
libsqlPath,
|
|
3034
|
+
dryRun: false,
|
|
3035
|
+
onProgress: (msg) => {
|
|
3036
|
+
// Update spinner for key milestones
|
|
3037
|
+
if (msg.includes("Migrating") || msg.includes("complete")) {
|
|
3038
|
+
migrateSpinner.message(msg.replace("[migrate] ", ""));
|
|
3039
|
+
}
|
|
3040
|
+
},
|
|
3041
|
+
});
|
|
3042
|
+
|
|
3043
|
+
migrateSpinner.stop("Migration complete!");
|
|
3044
|
+
|
|
3045
|
+
// Show results
|
|
3046
|
+
const showStat = (label: string, stat: { migrated: number; skipped: number; failed: number }) => {
|
|
3047
|
+
if (stat.migrated > 0 || stat.skipped > 0 || stat.failed > 0) {
|
|
3048
|
+
const parts = [];
|
|
3049
|
+
if (stat.migrated > 0) parts.push(green(`${stat.migrated} migrated`));
|
|
3050
|
+
if (stat.skipped > 0) parts.push(dim(`${stat.skipped} skipped`));
|
|
3051
|
+
if (stat.failed > 0) parts.push(`\x1b[31m${stat.failed} failed\x1b[0m`);
|
|
3052
|
+
p.log.message(` ${label}: ${parts.join(", ")}`);
|
|
3053
|
+
}
|
|
3054
|
+
};
|
|
3055
|
+
|
|
3056
|
+
showStat("Memories", result.memories);
|
|
3057
|
+
showStat("Cells", result.beads);
|
|
3058
|
+
showStat("Messages", result.messages);
|
|
3059
|
+
showStat("Agents", result.agents);
|
|
3060
|
+
showStat("Events", result.events);
|
|
3061
|
+
|
|
3062
|
+
if (result.errors.length > 0) {
|
|
3063
|
+
p.log.warn(`${result.errors.length} errors occurred`);
|
|
3064
|
+
}
|
|
3065
|
+
|
|
3066
|
+
p.outro("Migration complete! 🐝");
|
|
3067
|
+
|
|
3068
|
+
} catch (error) {
|
|
3069
|
+
s.stop("Migration failed");
|
|
3070
|
+
p.log.error(error instanceof Error ? error.message : String(error));
|
|
3071
|
+
p.outro("Migration failed");
|
|
3072
|
+
process.exit(1);
|
|
3073
|
+
}
|
|
3074
|
+
}
|
|
3075
|
+
|
|
3076
|
+
// ============================================================================
|
|
3077
|
+
// Database Info Command
|
|
3078
|
+
// ============================================================================
|
|
3079
|
+
|
|
3080
|
+
/**
|
|
3081
|
+
* Show database location and status
|
|
3082
|
+
*
|
|
3083
|
+
* Helps debug which database is being used and its schema state.
|
|
3084
|
+
*/
|
|
3085
|
+
async function db() {
|
|
3086
|
+
const projectPath = process.cwd();
|
|
3087
|
+
const projectName = basename(projectPath);
|
|
3088
|
+
const hash = hashLibSQLProjectPath(projectPath);
|
|
3089
|
+
const dbPath = getLibSQLDatabasePath(projectPath);
|
|
3090
|
+
const dbDir = dirname(dbPath.replace("file:", ""));
|
|
3091
|
+
const dbFile = dbPath.replace("file:", "");
|
|
3092
|
+
|
|
3093
|
+
console.log(yellow(BANNER));
|
|
3094
|
+
console.log(dim(` ${TAGLINE}\n`));
|
|
3095
|
+
|
|
3096
|
+
console.log(cyan(" Database Info\n"));
|
|
3097
|
+
|
|
3098
|
+
console.log(` ${dim("Project:")} ${projectPath}`);
|
|
3099
|
+
console.log(` ${dim("Project Name:")} ${projectName}`);
|
|
3100
|
+
console.log(` ${dim("Hash:")} ${hash}`);
|
|
3101
|
+
console.log(` ${dim("DB Directory:")} ${dbDir}`);
|
|
3102
|
+
console.log(` ${dim("DB File:")} ${dbFile}`);
|
|
3103
|
+
console.log();
|
|
3104
|
+
|
|
3105
|
+
// Check if database exists
|
|
3106
|
+
if (existsSync(dbFile)) {
|
|
3107
|
+
const stats = statSync(dbFile);
|
|
3108
|
+
const sizeKB = Math.round(stats.size / 1024);
|
|
3109
|
+
console.log(` ${green("✓")} Database exists (${sizeKB} KB)`);
|
|
3110
|
+
|
|
3111
|
+
// Check schema
|
|
3112
|
+
try {
|
|
3113
|
+
const { execSync } = await import("child_process");
|
|
3114
|
+
const schema = execSync(`sqlite3 "${dbFile}" "SELECT sql FROM sqlite_master WHERE type='table' AND name='beads'"`, { encoding: "utf-8" }).trim();
|
|
3115
|
+
|
|
3116
|
+
if (schema) {
|
|
3117
|
+
const hasProjectKey = schema.includes("project_key");
|
|
3118
|
+
if (hasProjectKey) {
|
|
3119
|
+
console.log(` ${green("✓")} Schema is correct (has project_key)`);
|
|
3120
|
+
} else {
|
|
3121
|
+
console.log(` \x1b[31m✗\x1b[0m Schema is OLD (missing project_key)`);
|
|
3122
|
+
console.log();
|
|
3123
|
+
console.log(dim(" To fix: delete the database and restart OpenCode"));
|
|
3124
|
+
console.log(dim(` rm -r "${dbDir}"`));
|
|
3125
|
+
}
|
|
3126
|
+
} else {
|
|
3127
|
+
console.log(` ${dim("○")} No beads table yet (will be created on first use)`);
|
|
3128
|
+
}
|
|
3129
|
+
|
|
3130
|
+
// Check schema_version
|
|
3131
|
+
try {
|
|
3132
|
+
const version = execSync(`sqlite3 "${dbFile}" "SELECT MAX(version) FROM schema_version"`, { encoding: "utf-8" }).trim();
|
|
3133
|
+
if (version && version !== "") {
|
|
3134
|
+
console.log(` ${dim("○")} Schema version: ${version}`);
|
|
3135
|
+
}
|
|
3136
|
+
} catch {
|
|
3137
|
+
console.log(` ${dim("○")} No schema_version table`);
|
|
3138
|
+
}
|
|
3139
|
+
|
|
3140
|
+
// Count records
|
|
3141
|
+
try {
|
|
3142
|
+
const beadCount = execSync(`sqlite3 "${dbFile}" "SELECT COUNT(*) FROM beads"`, { encoding: "utf-8" }).trim();
|
|
3143
|
+
console.log(` ${dim("○")} Cells: ${beadCount}`);
|
|
3144
|
+
} catch {
|
|
3145
|
+
// Table doesn't exist yet
|
|
3146
|
+
}
|
|
3147
|
+
|
|
3148
|
+
try {
|
|
3149
|
+
const memoryCount = execSync(`sqlite3 "${dbFile}" "SELECT COUNT(*) FROM memories"`, { encoding: "utf-8" }).trim();
|
|
3150
|
+
console.log(` ${dim("○")} Memories: ${memoryCount}`);
|
|
3151
|
+
} catch {
|
|
3152
|
+
// Table doesn't exist yet
|
|
3153
|
+
}
|
|
3154
|
+
|
|
3155
|
+
} catch (error) {
|
|
3156
|
+
console.log(` ${dim("○")} Could not inspect schema (sqlite3 not available)`);
|
|
3157
|
+
}
|
|
3158
|
+
} else {
|
|
3159
|
+
console.log(` ${dim("○")} Database does not exist yet`);
|
|
3160
|
+
console.log(dim(" Will be created on first use"));
|
|
3161
|
+
}
|
|
3162
|
+
|
|
3163
|
+
// Check for legacy PGLite
|
|
3164
|
+
console.log();
|
|
3165
|
+
const pglitePath = join(dbDir, "streams");
|
|
3166
|
+
if (existsSync(pglitePath)) {
|
|
3167
|
+
console.log(` \x1b[33m!\x1b[0m Legacy PGLite directory exists`);
|
|
3168
|
+
console.log(dim(` ${pglitePath}`));
|
|
3169
|
+
console.log(dim(" Run 'swarm migrate' to migrate data"));
|
|
3170
|
+
}
|
|
3171
|
+
|
|
3172
|
+
console.log();
|
|
3173
|
+
}
|
|
3174
|
+
|
|
2686
3175
|
// ============================================================================
|
|
2687
3176
|
// Main
|
|
2688
3177
|
// ============================================================================
|
|
@@ -2721,6 +3210,12 @@ switch (command) {
|
|
|
2721
3210
|
case "agents":
|
|
2722
3211
|
await agents();
|
|
2723
3212
|
break;
|
|
3213
|
+
case "migrate":
|
|
3214
|
+
await migrate();
|
|
3215
|
+
break;
|
|
3216
|
+
case "db":
|
|
3217
|
+
await db();
|
|
3218
|
+
break;
|
|
2724
3219
|
case "version":
|
|
2725
3220
|
case "--version":
|
|
2726
3221
|
case "-v":
|