opencode-swarm-plugin 0.29.0 → 0.30.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +94 -0
- package/README.md +3 -6
- package/bin/swarm.test.ts +163 -0
- package/bin/swarm.ts +304 -72
- package/dist/hive.d.ts.map +1 -1
- package/dist/index.d.ts +94 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +18825 -3469
- package/dist/memory-tools.d.ts +209 -0
- package/dist/memory-tools.d.ts.map +1 -0
- package/dist/memory.d.ts +124 -0
- package/dist/memory.d.ts.map +1 -0
- package/dist/plugin.js +18775 -3430
- package/dist/schemas/index.d.ts +7 -0
- package/dist/schemas/index.d.ts.map +1 -1
- package/dist/schemas/worker-handoff.d.ts +78 -0
- package/dist/schemas/worker-handoff.d.ts.map +1 -0
- package/dist/swarm-orchestrate.d.ts +50 -0
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts +1 -1
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/dist/swarm-review.d.ts +4 -0
- package/dist/swarm-review.d.ts.map +1 -1
- package/docs/planning/ADR-008-worker-handoff-protocol.md +293 -0
- package/examples/plugin-wrapper-template.ts +157 -28
- package/package.json +3 -1
- package/src/hive.integration.test.ts +114 -0
- package/src/hive.ts +33 -22
- package/src/index.ts +41 -8
- package/src/memory-tools.test.ts +111 -0
- package/src/memory-tools.ts +273 -0
- package/src/memory.integration.test.ts +266 -0
- package/src/memory.test.ts +334 -0
- package/src/memory.ts +441 -0
- package/src/schemas/index.ts +18 -0
- package/src/schemas/worker-handoff.test.ts +271 -0
- package/src/schemas/worker-handoff.ts +131 -0
- package/src/swarm-orchestrate.ts +262 -24
- package/src/swarm-prompts.ts +48 -5
- package/src/swarm-review.ts +7 -0
- package/src/swarm.integration.test.ts +386 -9
|
@@ -73,7 +73,11 @@ async function execTool(
|
|
|
73
73
|
);
|
|
74
74
|
} else if (!result.success && result.error) {
|
|
75
75
|
// Tool returned an error in JSON format
|
|
76
|
-
|
|
76
|
+
// Handle both string errors and object errors with .message
|
|
77
|
+
const errorMsg = typeof result.error === "string"
|
|
78
|
+
? result.error
|
|
79
|
+
: (result.error.message || "Tool execution failed");
|
|
80
|
+
reject(new Error(errorMsg));
|
|
77
81
|
} else {
|
|
78
82
|
resolve(stdout);
|
|
79
83
|
}
|
|
@@ -89,11 +93,11 @@ async function execTool(
|
|
|
89
93
|
try {
|
|
90
94
|
const result = JSON.parse(stdout);
|
|
91
95
|
if (!result.success && result.error) {
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
)
|
|
96
|
-
);
|
|
96
|
+
// Handle both string errors and object errors with .message
|
|
97
|
+
const errorMsg = typeof result.error === "string"
|
|
98
|
+
? result.error
|
|
99
|
+
: (result.error.message || `Tool failed with code ${code}`);
|
|
100
|
+
reject(new Error(errorMsg));
|
|
97
101
|
} else {
|
|
98
102
|
reject(
|
|
99
103
|
new Error(stderr || stdout || `Tool failed with code ${code}`),
|
|
@@ -883,15 +887,33 @@ const skills_execute = tool({
|
|
|
883
887
|
// Compaction Hook - Swarm Recovery Context
|
|
884
888
|
// =============================================================================
|
|
885
889
|
|
|
890
|
+
/**
|
|
891
|
+
* Detection result with confidence level
|
|
892
|
+
*/
|
|
893
|
+
interface SwarmDetection {
|
|
894
|
+
detected: boolean;
|
|
895
|
+
confidence: "high" | "medium" | "low" | "none";
|
|
896
|
+
reasons: string[];
|
|
897
|
+
}
|
|
898
|
+
|
|
886
899
|
/**
|
|
887
900
|
* Check for swarm sign - evidence a swarm passed through
|
|
888
901
|
*
|
|
889
|
-
*
|
|
890
|
-
* -
|
|
891
|
-
* - Open
|
|
892
|
-
* -
|
|
902
|
+
* Uses multiple signals with different confidence levels:
|
|
903
|
+
* - HIGH: in_progress cells (active work)
|
|
904
|
+
* - MEDIUM: Open subtasks, unclosed epics, recently updated cells
|
|
905
|
+
* - LOW: Any cells exist
|
|
906
|
+
*
|
|
907
|
+
* Philosophy: Err on the side of continuation.
|
|
908
|
+
* False positive = extra context (low cost)
|
|
909
|
+
* False negative = lost swarm (high cost)
|
|
893
910
|
*/
|
|
894
|
-
async function
|
|
911
|
+
async function detectSwarm(): Promise<SwarmDetection> {
|
|
912
|
+
const reasons: string[] = [];
|
|
913
|
+
let highConfidence = false;
|
|
914
|
+
let mediumConfidence = false;
|
|
915
|
+
let lowConfidence = false;
|
|
916
|
+
|
|
895
917
|
try {
|
|
896
918
|
const result = await new Promise<{ exitCode: number; stdout: string }>(
|
|
897
919
|
(resolve) => {
|
|
@@ -909,24 +931,82 @@ async function hasSwarmSign(): Promise<boolean> {
|
|
|
909
931
|
},
|
|
910
932
|
);
|
|
911
933
|
|
|
912
|
-
if (result.exitCode !== 0)
|
|
934
|
+
if (result.exitCode !== 0) {
|
|
935
|
+
return { detected: false, confidence: "none", reasons: ["hive_query failed"] };
|
|
936
|
+
}
|
|
913
937
|
|
|
914
|
-
const
|
|
915
|
-
if (!Array.isArray(
|
|
938
|
+
const cells = JSON.parse(result.stdout);
|
|
939
|
+
if (!Array.isArray(cells) || cells.length === 0) {
|
|
940
|
+
return { detected: false, confidence: "none", reasons: ["no cells found"] };
|
|
941
|
+
}
|
|
916
942
|
|
|
917
|
-
//
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
943
|
+
// HIGH: Any in_progress cells
|
|
944
|
+
const inProgress = cells.filter(
|
|
945
|
+
(c: { status: string }) => c.status === "in_progress"
|
|
946
|
+
);
|
|
947
|
+
if (inProgress.length > 0) {
|
|
948
|
+
highConfidence = true;
|
|
949
|
+
reasons.push(`${inProgress.length} cells in_progress`);
|
|
950
|
+
}
|
|
951
|
+
|
|
952
|
+
// MEDIUM: Open subtasks (cells with parent_id)
|
|
953
|
+
const subtasks = cells.filter(
|
|
954
|
+
(c: { status: string; parent_id?: string }) =>
|
|
955
|
+
c.status === "open" && c.parent_id
|
|
956
|
+
);
|
|
957
|
+
if (subtasks.length > 0) {
|
|
958
|
+
mediumConfidence = true;
|
|
959
|
+
reasons.push(`${subtasks.length} open subtasks`);
|
|
960
|
+
}
|
|
961
|
+
|
|
962
|
+
// MEDIUM: Unclosed epics
|
|
963
|
+
const openEpics = cells.filter(
|
|
964
|
+
(c: { status: string; type?: string }) =>
|
|
965
|
+
c.type === "epic" && c.status !== "closed"
|
|
966
|
+
);
|
|
967
|
+
if (openEpics.length > 0) {
|
|
968
|
+
mediumConfidence = true;
|
|
969
|
+
reasons.push(`${openEpics.length} unclosed epics`);
|
|
970
|
+
}
|
|
971
|
+
|
|
972
|
+
// MEDIUM: Recently updated cells (last hour)
|
|
973
|
+
const oneHourAgo = Date.now() - 60 * 60 * 1000;
|
|
974
|
+
const recentCells = cells.filter(
|
|
975
|
+
(c: { updated_at?: number }) => c.updated_at && c.updated_at > oneHourAgo
|
|
926
976
|
);
|
|
977
|
+
if (recentCells.length > 0) {
|
|
978
|
+
mediumConfidence = true;
|
|
979
|
+
reasons.push(`${recentCells.length} cells updated in last hour`);
|
|
980
|
+
}
|
|
981
|
+
|
|
982
|
+
// LOW: Any cells exist at all
|
|
983
|
+
if (cells.length > 0) {
|
|
984
|
+
lowConfidence = true;
|
|
985
|
+
reasons.push(`${cells.length} total cells in hive`);
|
|
986
|
+
}
|
|
927
987
|
} catch {
|
|
928
|
-
|
|
988
|
+
// Detection failed, use fallback
|
|
989
|
+
lowConfidence = true;
|
|
990
|
+
reasons.push("Detection error, using fallback");
|
|
929
991
|
}
|
|
992
|
+
|
|
993
|
+
// Determine overall confidence
|
|
994
|
+
let confidence: "high" | "medium" | "low" | "none";
|
|
995
|
+
if (highConfidence) {
|
|
996
|
+
confidence = "high";
|
|
997
|
+
} else if (mediumConfidence) {
|
|
998
|
+
confidence = "medium";
|
|
999
|
+
} else if (lowConfidence) {
|
|
1000
|
+
confidence = "low";
|
|
1001
|
+
} else {
|
|
1002
|
+
confidence = "none";
|
|
1003
|
+
}
|
|
1004
|
+
|
|
1005
|
+
return {
|
|
1006
|
+
detected: confidence !== "none",
|
|
1007
|
+
confidence,
|
|
1008
|
+
reasons,
|
|
1009
|
+
};
|
|
930
1010
|
}
|
|
931
1011
|
|
|
932
1012
|
/**
|
|
@@ -992,6 +1072,47 @@ Extract from session context:
|
|
|
992
1072
|
**You are not waiting for instructions. You are the coordinator. Coordinate.**
|
|
993
1073
|
`;
|
|
994
1074
|
|
|
1075
|
+
/**
|
|
1076
|
+
* Fallback detection prompt - tells the compactor what to look for
|
|
1077
|
+
*
|
|
1078
|
+
* Used when we can't definitively detect a swarm but want to be safe.
|
|
1079
|
+
* The compactor can check the conversation context for these patterns.
|
|
1080
|
+
*/
|
|
1081
|
+
const SWARM_DETECTION_FALLBACK = `## 🐝 Swarm Detection - Check Your Context
|
|
1082
|
+
|
|
1083
|
+
**IMPORTANT:** Before summarizing, check if this session involves an active swarm.
|
|
1084
|
+
|
|
1085
|
+
Look for ANY of these patterns in the conversation:
|
|
1086
|
+
|
|
1087
|
+
### Tool Calls (definite swarm sign)
|
|
1088
|
+
- \`swarm_decompose\`, \`swarm_spawn_subtask\`, \`swarm_status\`, \`swarm_complete\`
|
|
1089
|
+
- \`swarmmail_init\`, \`swarmmail_reserve\`, \`swarmmail_send\`
|
|
1090
|
+
- \`hive_create_epic\`, \`hive_start\`, \`hive_close\`
|
|
1091
|
+
|
|
1092
|
+
### IDs and Names
|
|
1093
|
+
- Cell IDs: \`bd-xxx\`, \`bd-xxx.N\` (subtask format)
|
|
1094
|
+
- Agent names: BlueLake, RedMountain, GreenValley, etc.
|
|
1095
|
+
- Epic references: "epic", "subtask", "parent"
|
|
1096
|
+
|
|
1097
|
+
### Coordination Language
|
|
1098
|
+
- "spawn", "worker", "coordinator"
|
|
1099
|
+
- "reserve", "reservation", "files"
|
|
1100
|
+
- "blocked", "unblock", "dependency"
|
|
1101
|
+
- "progress", "complete", "in_progress"
|
|
1102
|
+
|
|
1103
|
+
### If You Find Swarm Evidence
|
|
1104
|
+
|
|
1105
|
+
Include this in your summary:
|
|
1106
|
+
1. Epic ID and title
|
|
1107
|
+
2. Project path
|
|
1108
|
+
3. Subtask status (running/blocked/done/pending)
|
|
1109
|
+
4. Any blockers or issues
|
|
1110
|
+
5. What should happen next
|
|
1111
|
+
|
|
1112
|
+
**Then tell the resumed session:**
|
|
1113
|
+
"This is an active swarm. Check swarm_status and swarmmail_inbox immediately."
|
|
1114
|
+
`;
|
|
1115
|
+
|
|
995
1116
|
// Extended hooks type to include experimental compaction hook
|
|
996
1117
|
type ExtendedHooks = Hooks & {
|
|
997
1118
|
"experimental.session.compacting"?: (
|
|
@@ -1065,15 +1186,23 @@ export const SwarmPlugin: Plugin = async (
|
|
|
1065
1186
|
skills_execute,
|
|
1066
1187
|
},
|
|
1067
1188
|
|
|
1068
|
-
// Swarm-aware compaction hook -
|
|
1189
|
+
// Swarm-aware compaction hook - injects context based on detection confidence
|
|
1069
1190
|
"experimental.session.compacting": async (
|
|
1070
1191
|
_input: { sessionID: string },
|
|
1071
1192
|
output: { context: string[] },
|
|
1072
1193
|
) => {
|
|
1073
|
-
const
|
|
1074
|
-
|
|
1075
|
-
|
|
1194
|
+
const detection = await detectSwarm();
|
|
1195
|
+
|
|
1196
|
+
if (detection.confidence === "high" || detection.confidence === "medium") {
|
|
1197
|
+
// Definite or probable swarm - inject full context
|
|
1198
|
+
const header = `[Swarm detected: ${detection.reasons.join(", ")}]\n\n`;
|
|
1199
|
+
output.context.push(header + SWARM_COMPACTION_CONTEXT);
|
|
1200
|
+
} else if (detection.confidence === "low") {
|
|
1201
|
+
// Possible swarm - inject fallback detection prompt
|
|
1202
|
+
const header = `[Possible swarm: ${detection.reasons.join(", ")}]\n\n`;
|
|
1203
|
+
output.context.push(header + SWARM_DETECTION_FALLBACK);
|
|
1076
1204
|
}
|
|
1205
|
+
// confidence === "none" - no injection, probably not a swarm
|
|
1077
1206
|
},
|
|
1078
1207
|
};
|
|
1079
1208
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "opencode-swarm-plugin",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.30.2",
|
|
4
4
|
"description": "Multi-agent swarm coordination for OpenCode with learning capabilities, beads integration, and Agent Mail",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -32,8 +32,10 @@
|
|
|
32
32
|
"dependencies": {
|
|
33
33
|
"@clack/prompts": "^0.11.0",
|
|
34
34
|
"@opencode-ai/plugin": "^1.0.134",
|
|
35
|
+
"effect": "^3.19.12",
|
|
35
36
|
"gray-matter": "^4.0.3",
|
|
36
37
|
"ioredis": "^5.4.1",
|
|
38
|
+
"minimatch": "^10.1.1",
|
|
37
39
|
"swarm-mail": "0.4.0",
|
|
38
40
|
"zod": "4.1.8"
|
|
39
41
|
},
|
|
@@ -16,9 +16,11 @@ import {
|
|
|
16
16
|
hive_start,
|
|
17
17
|
hive_ready,
|
|
18
18
|
hive_link_thread,
|
|
19
|
+
hive_sync,
|
|
19
20
|
HiveError,
|
|
20
21
|
getHiveAdapter,
|
|
21
22
|
setHiveWorkingDirectory,
|
|
23
|
+
getHiveWorkingDirectory,
|
|
22
24
|
// Legacy aliases for backward compatibility tests
|
|
23
25
|
beads_link_thread,
|
|
24
26
|
BeadError,
|
|
@@ -1120,6 +1122,118 @@ describe("beads integration", () => {
|
|
|
1120
1122
|
});
|
|
1121
1123
|
});
|
|
1122
1124
|
|
|
1125
|
+
describe("hive_sync", () => {
|
|
1126
|
+
it("commits .hive changes before pulling (regression test for unstaged changes error)", async () => {
|
|
1127
|
+
const { mkdirSync, rmSync, writeFileSync, existsSync } = await import("node:fs");
|
|
1128
|
+
const { join } = await import("node:path");
|
|
1129
|
+
const { tmpdir } = await import("node:os");
|
|
1130
|
+
const { execSync } = await import("node:child_process");
|
|
1131
|
+
|
|
1132
|
+
// Create a temp git repository
|
|
1133
|
+
const tempProject = join(tmpdir(), `hive-sync-test-${Date.now()}`);
|
|
1134
|
+
mkdirSync(tempProject, { recursive: true });
|
|
1135
|
+
|
|
1136
|
+
// Initialize git repo
|
|
1137
|
+
execSync("git init", { cwd: tempProject });
|
|
1138
|
+
execSync('git config user.email "test@example.com"', { cwd: tempProject });
|
|
1139
|
+
execSync('git config user.name "Test User"', { cwd: tempProject });
|
|
1140
|
+
|
|
1141
|
+
// Create .hive directory and issues.jsonl
|
|
1142
|
+
const hiveDir = join(tempProject, ".hive");
|
|
1143
|
+
mkdirSync(hiveDir, { recursive: true });
|
|
1144
|
+
const issuesPath = join(hiveDir, "issues.jsonl");
|
|
1145
|
+
writeFileSync(issuesPath, "");
|
|
1146
|
+
|
|
1147
|
+
// Initial commit
|
|
1148
|
+
execSync("git add .", { cwd: tempProject });
|
|
1149
|
+
execSync('git commit -m "initial commit"', { cwd: tempProject });
|
|
1150
|
+
|
|
1151
|
+
// Set working directory for hive commands
|
|
1152
|
+
const originalDir = getHiveWorkingDirectory();
|
|
1153
|
+
setHiveWorkingDirectory(tempProject);
|
|
1154
|
+
|
|
1155
|
+
try {
|
|
1156
|
+
// Create a cell (this will mark it dirty and flush will write to JSONL)
|
|
1157
|
+
await hive_create.execute(
|
|
1158
|
+
{ title: "Sync test cell", type: "task" },
|
|
1159
|
+
mockContext,
|
|
1160
|
+
);
|
|
1161
|
+
|
|
1162
|
+
// Sync with auto_pull=false (skip pull since no remote configured)
|
|
1163
|
+
const result = await hive_sync.execute(
|
|
1164
|
+
{ auto_pull: false },
|
|
1165
|
+
mockContext,
|
|
1166
|
+
);
|
|
1167
|
+
|
|
1168
|
+
// Should succeed
|
|
1169
|
+
expect(result).toContain("successfully");
|
|
1170
|
+
|
|
1171
|
+
// Verify .hive changes were committed (working tree should be clean)
|
|
1172
|
+
const status = execSync("git status --porcelain", {
|
|
1173
|
+
cwd: tempProject,
|
|
1174
|
+
encoding: "utf-8",
|
|
1175
|
+
});
|
|
1176
|
+
expect(status.trim()).toBe("");
|
|
1177
|
+
|
|
1178
|
+
// Verify commit exists
|
|
1179
|
+
const log = execSync("git log --oneline", {
|
|
1180
|
+
cwd: tempProject,
|
|
1181
|
+
encoding: "utf-8",
|
|
1182
|
+
});
|
|
1183
|
+
expect(log).toContain("chore: sync hive");
|
|
1184
|
+
} finally {
|
|
1185
|
+
// Restore original working directory
|
|
1186
|
+
setHiveWorkingDirectory(originalDir);
|
|
1187
|
+
|
|
1188
|
+
// Cleanup
|
|
1189
|
+
rmSync(tempProject, { recursive: true, force: true });
|
|
1190
|
+
}
|
|
1191
|
+
});
|
|
1192
|
+
|
|
1193
|
+
it("handles case with no changes to commit", async () => {
|
|
1194
|
+
const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
|
|
1195
|
+
const { join } = await import("node:path");
|
|
1196
|
+
const { tmpdir } = await import("node:os");
|
|
1197
|
+
const { execSync } = await import("node:child_process");
|
|
1198
|
+
|
|
1199
|
+
// Create temp git repo
|
|
1200
|
+
const tempProject = join(tmpdir(), `hive-sync-test-${Date.now()}`);
|
|
1201
|
+
mkdirSync(tempProject, { recursive: true });
|
|
1202
|
+
|
|
1203
|
+
// Initialize git
|
|
1204
|
+
execSync("git init", { cwd: tempProject });
|
|
1205
|
+
execSync('git config user.email "test@example.com"', { cwd: tempProject });
|
|
1206
|
+
execSync('git config user.name "Test User"', { cwd: tempProject });
|
|
1207
|
+
|
|
1208
|
+
// Create .hive directory with committed issues.jsonl
|
|
1209
|
+
const hiveDir = join(tempProject, ".hive");
|
|
1210
|
+
mkdirSync(hiveDir, { recursive: true });
|
|
1211
|
+
writeFileSync(join(hiveDir, "issues.jsonl"), "");
|
|
1212
|
+
|
|
1213
|
+
// Commit everything
|
|
1214
|
+
execSync("git add .", { cwd: tempProject });
|
|
1215
|
+
execSync('git commit -m "initial"', { cwd: tempProject });
|
|
1216
|
+
|
|
1217
|
+
// Set working directory
|
|
1218
|
+
const originalDir = getHiveWorkingDirectory();
|
|
1219
|
+
setHiveWorkingDirectory(tempProject);
|
|
1220
|
+
|
|
1221
|
+
try {
|
|
1222
|
+
// Sync with no changes (should handle gracefully)
|
|
1223
|
+
const result = await hive_sync.execute(
|
|
1224
|
+
{ auto_pull: false },
|
|
1225
|
+
mockContext,
|
|
1226
|
+
);
|
|
1227
|
+
|
|
1228
|
+
// Should return "No cells to sync" since no dirty cells
|
|
1229
|
+
expect(result).toContain("No cells to sync");
|
|
1230
|
+
} finally {
|
|
1231
|
+
setHiveWorkingDirectory(originalDir);
|
|
1232
|
+
rmSync(tempProject, { recursive: true, force: true });
|
|
1233
|
+
}
|
|
1234
|
+
});
|
|
1235
|
+
});
|
|
1236
|
+
|
|
1123
1237
|
describe("mergeHistoricBeads", () => {
|
|
1124
1238
|
it("merges empty base file - no changes", async () => {
|
|
1125
1239
|
const { mergeHistoricBeads } = await import("./hive");
|
package/src/hive.ts
CHANGED
|
@@ -1053,38 +1053,49 @@ export const hive_sync = tool({
|
|
|
1053
1053
|
}
|
|
1054
1054
|
}
|
|
1055
1055
|
|
|
1056
|
-
// 6. Pull if requested
|
|
1056
|
+
// 6. Pull if requested (check if remote exists first)
|
|
1057
1057
|
if (autoPull) {
|
|
1058
|
-
const
|
|
1059
|
-
|
|
1060
|
-
TIMEOUT_MS,
|
|
1061
|
-
"git pull --rebase",
|
|
1062
|
-
);
|
|
1058
|
+
const remoteCheckResult = await runGitCommand(["remote"]);
|
|
1059
|
+
const hasRemote = remoteCheckResult.stdout.trim() !== "";
|
|
1063
1060
|
|
|
1064
|
-
if (
|
|
1065
|
-
|
|
1066
|
-
|
|
1061
|
+
if (hasRemote) {
|
|
1062
|
+
const pullResult = await withTimeout(
|
|
1063
|
+
runGitCommand(["pull", "--rebase"]),
|
|
1064
|
+
TIMEOUT_MS,
|
|
1067
1065
|
"git pull --rebase",
|
|
1068
|
-
pullResult.exitCode,
|
|
1069
1066
|
);
|
|
1067
|
+
|
|
1068
|
+
if (pullResult.exitCode !== 0) {
|
|
1069
|
+
throw new HiveError(
|
|
1070
|
+
`Failed to pull: ${pullResult.stderr}`,
|
|
1071
|
+
"git pull --rebase",
|
|
1072
|
+
pullResult.exitCode,
|
|
1073
|
+
);
|
|
1074
|
+
}
|
|
1070
1075
|
}
|
|
1071
1076
|
}
|
|
1072
1077
|
|
|
1073
|
-
// 7. Push
|
|
1074
|
-
const
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
`Failed to push: ${pushResult.stderr}`,
|
|
1078
|
+
// 7. Push (check if remote exists first)
|
|
1079
|
+
const remoteCheckResult = await runGitCommand(["remote"]);
|
|
1080
|
+
const hasRemote = remoteCheckResult.stdout.trim() !== "";
|
|
1081
|
+
|
|
1082
|
+
if (hasRemote) {
|
|
1083
|
+
const pushResult = await withTimeout(
|
|
1084
|
+
runGitCommand(["push"]),
|
|
1085
|
+
TIMEOUT_MS,
|
|
1082
1086
|
"git push",
|
|
1083
|
-
pushResult.exitCode,
|
|
1084
1087
|
);
|
|
1088
|
+
if (pushResult.exitCode !== 0) {
|
|
1089
|
+
throw new HiveError(
|
|
1090
|
+
`Failed to push: ${pushResult.stderr}`,
|
|
1091
|
+
"git push",
|
|
1092
|
+
pushResult.exitCode,
|
|
1093
|
+
);
|
|
1094
|
+
}
|
|
1095
|
+
return "Hive synced and pushed successfully";
|
|
1096
|
+
} else {
|
|
1097
|
+
return "Hive synced successfully (no remote configured)";
|
|
1085
1098
|
}
|
|
1086
|
-
|
|
1087
|
-
return "Hive synced and pushed successfully";
|
|
1088
1099
|
},
|
|
1089
1100
|
});
|
|
1090
1101
|
|
package/src/index.ts
CHANGED
|
@@ -46,6 +46,7 @@ import { reviewTools } from "./swarm-review";
|
|
|
46
46
|
import { repoCrawlTools } from "./repo-crawl";
|
|
47
47
|
import { skillsTools, setSkillsProjectDirectory } from "./skills";
|
|
48
48
|
import { mandateTools } from "./mandates";
|
|
49
|
+
import { memoryTools } from "./memory-tools";
|
|
49
50
|
import {
|
|
50
51
|
guardrailOutput,
|
|
51
52
|
DEFAULT_GUARDRAIL_CONFIG,
|
|
@@ -69,6 +70,7 @@ import {
|
|
|
69
70
|
* - repo-crawl:* - GitHub API tools for repository research
|
|
70
71
|
* - skills:* - Agent skills discovery, activation, and execution
|
|
71
72
|
* - mandate:* - Agent voting system for collaborative knowledge curation
|
|
73
|
+
* - semantic-memory:* - Semantic memory with vector embeddings (Ollama + PGLite)
|
|
72
74
|
*
|
|
73
75
|
* @param input - Plugin context from OpenCode
|
|
74
76
|
* @returns Plugin hooks including tools, events, and tool execution hooks
|
|
@@ -148,9 +150,10 @@ export const SwarmPlugin: Plugin = async (
|
|
|
148
150
|
* - beads:* - Legacy aliases (deprecated, use hive:* instead)
|
|
149
151
|
* - agent-mail:init, agent-mail:send, agent-mail:reserve, etc. (legacy MCP)
|
|
150
152
|
* - swarm-mail:init, swarm-mail:send, swarm-mail:reserve, etc. (embedded)
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
153
|
+
* - repo-crawl:readme, repo-crawl:structure, etc.
|
|
154
|
+
* - mandate:file, mandate:vote, mandate:query, etc.
|
|
155
|
+
* - semantic-memory:store, semantic-memory:find, semantic-memory:get, etc.
|
|
156
|
+
*/
|
|
154
157
|
tool: {
|
|
155
158
|
...hiveTools,
|
|
156
159
|
...swarmMailTools,
|
|
@@ -161,6 +164,7 @@ export const SwarmPlugin: Plugin = async (
|
|
|
161
164
|
...repoCrawlTools,
|
|
162
165
|
...skillsTools,
|
|
163
166
|
...mandateTools,
|
|
167
|
+
...memoryTools,
|
|
164
168
|
},
|
|
165
169
|
|
|
166
170
|
/**
|
|
@@ -249,11 +253,9 @@ export const SwarmPlugin: Plugin = async (
|
|
|
249
253
|
await releaseReservations();
|
|
250
254
|
}
|
|
251
255
|
|
|
252
|
-
//
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
void $`bd sync`.quiet().nothrow();
|
|
256
|
-
}
|
|
256
|
+
// Note: hive_sync should be called explicitly at session end
|
|
257
|
+
// Auto-sync was removed because bd CLI is deprecated
|
|
258
|
+
// The hive_sync tool handles flushing to JSONL and git commit/push
|
|
257
259
|
},
|
|
258
260
|
};
|
|
259
261
|
};
|
|
@@ -419,6 +421,7 @@ export const allTools = {
|
|
|
419
421
|
...repoCrawlTools,
|
|
420
422
|
...skillsTools,
|
|
421
423
|
...mandateTools,
|
|
424
|
+
...memoryTools,
|
|
422
425
|
} as const;
|
|
423
426
|
|
|
424
427
|
/**
|
|
@@ -646,3 +649,33 @@ export {
|
|
|
646
649
|
* ```
|
|
647
650
|
*/
|
|
648
651
|
export { SWARM_COMPACTION_CONTEXT, createCompactionHook } from "./compaction-hook";
|
|
652
|
+
|
|
653
|
+
/**
|
|
654
|
+
* Re-export memory module
|
|
655
|
+
*
|
|
656
|
+
* Includes:
|
|
657
|
+
* - memoryTools - All semantic-memory tools (store, find, get, remove, validate, list, stats, check)
|
|
658
|
+
* - createMemoryAdapter - Factory function for memory adapter
|
|
659
|
+
* - resetMemoryCache - Cache management for testing
|
|
660
|
+
*
|
|
661
|
+
* Types:
|
|
662
|
+
* - MemoryAdapter - Memory adapter interface
|
|
663
|
+
* - StoreArgs, FindArgs, IdArgs, ListArgs - Tool argument types
|
|
664
|
+
* - StoreResult, FindResult, StatsResult, HealthResult, OperationResult - Result types
|
|
665
|
+
*/
|
|
666
|
+
export {
|
|
667
|
+
memoryTools,
|
|
668
|
+
createMemoryAdapter,
|
|
669
|
+
resetMemoryCache,
|
|
670
|
+
type MemoryAdapter,
|
|
671
|
+
type StoreArgs,
|
|
672
|
+
type FindArgs,
|
|
673
|
+
type IdArgs,
|
|
674
|
+
type ListArgs,
|
|
675
|
+
type StoreResult,
|
|
676
|
+
type FindResult,
|
|
677
|
+
type StatsResult,
|
|
678
|
+
type HealthResult,
|
|
679
|
+
type OperationResult,
|
|
680
|
+
} from "./memory-tools";
|
|
681
|
+
export type { Memory, SearchResult, SearchOptions } from "swarm-mail";
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Memory Tools Integration Tests
|
|
3
|
+
*
|
|
4
|
+
* Tests for semantic-memory_* tool registration and execution.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { describe, test, expect, beforeAll, afterAll } from "bun:test";
|
|
8
|
+
import { memoryTools, resetMemoryCache } from "./memory-tools";
|
|
9
|
+
import { closeAllSwarmMail } from "swarm-mail";
|
|
10
|
+
|
|
11
|
+
describe("memory tools integration", () => {
|
|
12
|
+
afterAll(async () => {
|
|
13
|
+
resetMemoryCache();
|
|
14
|
+
await closeAllSwarmMail();
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
test("all tools are registered with correct names", () => {
|
|
18
|
+
const toolNames = Object.keys(memoryTools);
|
|
19
|
+
expect(toolNames).toContain("semantic-memory_store");
|
|
20
|
+
expect(toolNames).toContain("semantic-memory_find");
|
|
21
|
+
expect(toolNames).toContain("semantic-memory_get");
|
|
22
|
+
expect(toolNames).toContain("semantic-memory_remove");
|
|
23
|
+
expect(toolNames).toContain("semantic-memory_validate");
|
|
24
|
+
expect(toolNames).toContain("semantic-memory_list");
|
|
25
|
+
expect(toolNames).toContain("semantic-memory_stats");
|
|
26
|
+
expect(toolNames).toContain("semantic-memory_check");
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
test("tools have execute functions", () => {
|
|
30
|
+
for (const [name, tool] of Object.entries(memoryTools)) {
|
|
31
|
+
expect(typeof tool.execute).toBe("function");
|
|
32
|
+
}
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
describe("semantic-memory_store", () => {
|
|
36
|
+
test("executes and returns JSON", async () => {
|
|
37
|
+
const tool = memoryTools["semantic-memory_store"];
|
|
38
|
+
const result = await tool.execute(
|
|
39
|
+
{
|
|
40
|
+
information: "Test memory for tools integration",
|
|
41
|
+
tags: "test",
|
|
42
|
+
},
|
|
43
|
+
{ sessionID: "test-session" } as any,
|
|
44
|
+
);
|
|
45
|
+
|
|
46
|
+
expect(typeof result).toBe("string");
|
|
47
|
+
const parsed = JSON.parse(result);
|
|
48
|
+
expect(parsed.id).toBeDefined();
|
|
49
|
+
expect(parsed.id).toMatch(/^mem_/);
|
|
50
|
+
expect(parsed.message).toContain("Stored memory");
|
|
51
|
+
});
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
describe("semantic-memory_find", () => {
|
|
55
|
+
test("executes and returns JSON array", async () => {
|
|
56
|
+
// Store a memory first
|
|
57
|
+
const storeTool = memoryTools["semantic-memory_store"];
|
|
58
|
+
await storeTool.execute(
|
|
59
|
+
{
|
|
60
|
+
information: "Findable test memory with unique keyword xyztest123",
|
|
61
|
+
},
|
|
62
|
+
{ sessionID: "test-session" } as any,
|
|
63
|
+
);
|
|
64
|
+
|
|
65
|
+
// Search for it
|
|
66
|
+
const findTool = memoryTools["semantic-memory_find"];
|
|
67
|
+
const result = await findTool.execute(
|
|
68
|
+
{
|
|
69
|
+
query: "xyztest123",
|
|
70
|
+
limit: 5,
|
|
71
|
+
},
|
|
72
|
+
{ sessionID: "test-session" } as any,
|
|
73
|
+
);
|
|
74
|
+
|
|
75
|
+
expect(typeof result).toBe("string");
|
|
76
|
+
const parsed = JSON.parse(result);
|
|
77
|
+
expect(parsed.results).toBeDefined();
|
|
78
|
+
expect(Array.isArray(parsed.results)).toBe(true);
|
|
79
|
+
expect(parsed.count).toBeGreaterThanOrEqual(0);
|
|
80
|
+
});
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
describe("semantic-memory_stats", () => {
|
|
84
|
+
test("returns memory and embedding counts", async () => {
|
|
85
|
+
const tool = memoryTools["semantic-memory_stats"];
|
|
86
|
+
const result = await tool.execute(
|
|
87
|
+
{},
|
|
88
|
+
{ sessionID: "test-session" } as any,
|
|
89
|
+
);
|
|
90
|
+
|
|
91
|
+
expect(typeof result).toBe("string");
|
|
92
|
+
const parsed = JSON.parse(result);
|
|
93
|
+
expect(typeof parsed.memories).toBe("number");
|
|
94
|
+
expect(typeof parsed.embeddings).toBe("number");
|
|
95
|
+
});
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
describe("semantic-memory_check", () => {
|
|
99
|
+
test("checks Ollama health", async () => {
|
|
100
|
+
const tool = memoryTools["semantic-memory_check"];
|
|
101
|
+
const result = await tool.execute(
|
|
102
|
+
{},
|
|
103
|
+
{ sessionID: "test-session" } as any,
|
|
104
|
+
);
|
|
105
|
+
|
|
106
|
+
expect(typeof result).toBe("string");
|
|
107
|
+
const parsed = JSON.parse(result);
|
|
108
|
+
expect(typeof parsed.ollama).toBe("boolean");
|
|
109
|
+
});
|
|
110
|
+
});
|
|
111
|
+
});
|