@deeplake/hivemind 0.6.47

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,266 @@
1
+ #!/usr/bin/env node
2
+
3
+ // dist/src/hooks/codex/wiki-worker.js
4
+ import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, existsSync as existsSync2, appendFileSync as appendFileSync2, mkdirSync as mkdirSync2, rmSync } from "node:fs";
5
+ import { execFileSync } from "node:child_process";
6
+ import { join as join3 } from "node:path";
7
+
8
+ // dist/src/hooks/summary-state.js
9
+ import { readFileSync, writeFileSync, writeSync, mkdirSync, renameSync, existsSync, unlinkSync, openSync, closeSync } from "node:fs";
10
+ import { homedir as homedir2 } from "node:os";
11
+ import { join as join2 } from "node:path";
12
+
13
+ // dist/src/utils/debug.js
14
+ import { appendFileSync } from "node:fs";
15
+ import { join } from "node:path";
16
+ import { homedir } from "node:os";
17
+ var DEBUG = process.env.HIVEMIND_DEBUG === "1";
18
+ var LOG = join(homedir(), ".deeplake", "hook-debug.log");
19
+ function log(tag, msg) {
20
+ if (!DEBUG)
21
+ return;
22
+ appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg}
23
+ `);
24
+ }
25
+
26
+ // dist/src/hooks/summary-state.js
27
+ var dlog = (msg) => log("summary-state", msg);
28
+ var STATE_DIR = join2(homedir2(), ".claude", "hooks", "summary-state");
29
+ var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4));
30
+ function statePath(sessionId) {
31
+ return join2(STATE_DIR, `${sessionId}.json`);
32
+ }
33
+ function lockPath(sessionId) {
34
+ return join2(STATE_DIR, `${sessionId}.lock`);
35
+ }
36
+ function readState(sessionId) {
37
+ const p = statePath(sessionId);
38
+ if (!existsSync(p))
39
+ return null;
40
+ try {
41
+ return JSON.parse(readFileSync(p, "utf-8"));
42
+ } catch {
43
+ return null;
44
+ }
45
+ }
46
+ function writeState(sessionId, state) {
47
+ mkdirSync(STATE_DIR, { recursive: true });
48
+ const p = statePath(sessionId);
49
+ const tmp = `${p}.${process.pid}.${Date.now()}.tmp`;
50
+ writeFileSync(tmp, JSON.stringify(state));
51
+ renameSync(tmp, p);
52
+ }
53
+ function withRmwLock(sessionId, fn) {
54
+ mkdirSync(STATE_DIR, { recursive: true });
55
+ const rmwLock = statePath(sessionId) + ".rmw";
56
+ const deadline = Date.now() + 2e3;
57
+ let fd = null;
58
+ while (fd === null) {
59
+ try {
60
+ fd = openSync(rmwLock, "wx");
61
+ } catch (e) {
62
+ if (e.code !== "EEXIST")
63
+ throw e;
64
+ if (Date.now() > deadline) {
65
+ dlog(`rmw lock deadline exceeded for ${sessionId}, reclaiming stale lock`);
66
+ try {
67
+ unlinkSync(rmwLock);
68
+ } catch (unlinkErr) {
69
+ dlog(`stale rmw lock unlink failed for ${sessionId}: ${unlinkErr.message}`);
70
+ }
71
+ continue;
72
+ }
73
+ Atomics.wait(YIELD_BUF, 0, 0, 10);
74
+ }
75
+ }
76
+ try {
77
+ return fn();
78
+ } finally {
79
+ closeSync(fd);
80
+ try {
81
+ unlinkSync(rmwLock);
82
+ } catch (unlinkErr) {
83
+ dlog(`rmw lock cleanup failed for ${sessionId}: ${unlinkErr.message}`);
84
+ }
85
+ }
86
+ }
87
+ function finalizeSummary(sessionId, jsonlLines) {
88
+ withRmwLock(sessionId, () => {
89
+ const prev = readState(sessionId);
90
+ writeState(sessionId, {
91
+ lastSummaryAt: Date.now(),
92
+ lastSummaryCount: jsonlLines,
93
+ totalCount: Math.max(prev?.totalCount ?? 0, jsonlLines)
94
+ });
95
+ });
96
+ }
97
+ function releaseLock(sessionId) {
98
+ try {
99
+ unlinkSync(lockPath(sessionId));
100
+ } catch (e) {
101
+ if (e?.code !== "ENOENT") {
102
+ dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`);
103
+ }
104
+ }
105
+ }
106
+
107
+ // dist/src/hooks/upload-summary.js
108
+ import { randomUUID } from "node:crypto";
109
+ function esc(s) {
110
+ return s.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
111
+ }
112
+ function extractDescription(text) {
113
+ const match = text.match(/## What Happened\n([\s\S]*?)(?=\n##|$)/);
114
+ return match ? match[1].trim().slice(0, 300) : "completed";
115
+ }
116
+ async function uploadSummary(query2, params) {
117
+ const { tableName, vpath, fname, userName, project, agent, text } = params;
118
+ const ts = params.ts ?? (/* @__PURE__ */ new Date()).toISOString();
119
+ const desc = extractDescription(text);
120
+ const sizeBytes = Buffer.byteLength(text);
121
+ const existing = await query2(`SELECT path FROM "${tableName}" WHERE path = '${esc(vpath)}' LIMIT 1`);
122
+ if (existing.length > 0) {
123
+ const sql2 = `UPDATE "${tableName}" SET summary = E'${esc(text)}', size_bytes = ${sizeBytes}, description = E'${esc(desc)}', last_update_date = '${ts}' WHERE path = '${esc(vpath)}'`;
124
+ await query2(sql2);
125
+ return { path: "update", sql: sql2, descLength: desc.length, summaryLength: text.length };
126
+ }
127
+ const sql = `INSERT INTO "${tableName}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${randomUUID()}', '${esc(vpath)}', '${esc(fname)}', E'${esc(text)}', '${esc(userName)}', 'text/markdown', ${sizeBytes}, '${esc(project)}', E'${esc(desc)}', '${esc(agent)}', '${ts}', '${ts}')`;
128
+ await query2(sql);
129
+ return { path: "insert", sql, descLength: desc.length, summaryLength: text.length };
130
+ }
131
+
132
+ // dist/src/hooks/codex/wiki-worker.js
133
+ var dlog2 = (msg) => log("codex-wiki-worker", msg);
134
+ var cfg = JSON.parse(readFileSync2(process.argv[2], "utf-8"));
135
+ var tmpDir = cfg.tmpDir;
136
+ var tmpJsonl = join3(tmpDir, "session.jsonl");
137
+ var tmpSummary = join3(tmpDir, "summary.md");
138
+ function wlog(msg) {
139
+ try {
140
+ mkdirSync2(cfg.hooksDir, { recursive: true });
141
+ appendFileSync2(cfg.wikiLog, `[${(/* @__PURE__ */ new Date()).toISOString().replace("T", " ").slice(0, 19)}] wiki-worker(${cfg.sessionId}): ${msg}
142
+ `);
143
+ } catch {
144
+ }
145
+ }
146
+ function esc2(s) {
147
+ return s.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
148
+ }
149
+ async function query(sql, retries = 4) {
150
+ for (let attempt = 0; attempt <= retries; attempt++) {
151
+ const r = await fetch(`${cfg.apiUrl}/workspaces/${cfg.workspaceId}/tables/query`, {
152
+ method: "POST",
153
+ headers: {
154
+ Authorization: `Bearer ${cfg.token}`,
155
+ "Content-Type": "application/json",
156
+ "X-Activeloop-Org-Id": cfg.orgId
157
+ },
158
+ body: JSON.stringify({ query: sql })
159
+ });
160
+ if (r.ok) {
161
+ const j = await r.json();
162
+ if (!j.columns || !j.rows)
163
+ return [];
164
+ return j.rows.map((row) => Object.fromEntries(j.columns.map((col, i) => [col, row[i]])));
165
+ }
166
+ const retryable = r.status === 401 || r.status === 403 || r.status === 429 || r.status === 500 || r.status === 502 || r.status === 503;
167
+ if (attempt < retries && retryable) {
168
+ const base = Math.min(3e4, 2e3 * Math.pow(2, attempt));
169
+ const delay = base + Math.floor(Math.random() * 1e3);
170
+ wlog(`API ${r.status}, retrying in ${delay}ms (attempt ${attempt + 1}/${retries})`);
171
+ await new Promise((resolve) => setTimeout(resolve, delay));
172
+ continue;
173
+ }
174
+ throw new Error(`API ${r.status}: ${(await r.text()).slice(0, 200)}`);
175
+ }
176
+ return [];
177
+ }
178
+ function cleanup() {
179
+ try {
180
+ rmSync(tmpDir, { recursive: true, force: true });
181
+ } catch (cleanupErr) {
182
+ dlog2(`cleanup failed to remove ${tmpDir}: ${cleanupErr.message}`);
183
+ }
184
+ }
185
+ async function main() {
186
+ try {
187
+ wlog("fetching session events");
188
+ const rows = await query(`SELECT message, creation_date FROM "${cfg.sessionsTable}" WHERE path LIKE E'${esc2(`/sessions/%${cfg.sessionId}%`)}' ORDER BY creation_date ASC`);
189
+ if (rows.length === 0) {
190
+ wlog("no session events found \u2014 exiting");
191
+ return;
192
+ }
193
+ const jsonlContent = rows.map((r) => typeof r.message === "string" ? r.message : JSON.stringify(r.message)).join("\n");
194
+ const jsonlLines = rows.length;
195
+ const pathRows = await query(`SELECT DISTINCT path FROM "${cfg.sessionsTable}" WHERE path LIKE '${esc2(`/sessions/%${cfg.sessionId}%`)}' LIMIT 1`);
196
+ const jsonlServerPath = pathRows.length > 0 ? pathRows[0].path : `/sessions/unknown/${cfg.sessionId}.jsonl`;
197
+ writeFileSync2(tmpJsonl, jsonlContent);
198
+ wlog(`found ${jsonlLines} events at ${jsonlServerPath}`);
199
+ let prevOffset = 0;
200
+ try {
201
+ const sumRows = await query(`SELECT summary FROM "${cfg.memoryTable}" WHERE path = '${esc2(`/summaries/${cfg.userName}/${cfg.sessionId}.md`)}' LIMIT 1`);
202
+ if (sumRows.length > 0 && sumRows[0]["summary"]) {
203
+ const existing = sumRows[0]["summary"];
204
+ const match = existing.match(/\*\*JSONL offset\*\*:\s*(\d+)/);
205
+ if (match)
206
+ prevOffset = parseInt(match[1], 10);
207
+ writeFileSync2(tmpSummary, existing);
208
+ wlog(`existing summary found, offset=${prevOffset}`);
209
+ }
210
+ } catch {
211
+ }
212
+ const prompt = cfg.promptTemplate.replace(/__JSONL__/g, tmpJsonl).replace(/__SUMMARY__/g, tmpSummary).replace(/__SESSION_ID__/g, cfg.sessionId).replace(/__PROJECT__/g, cfg.project).replace(/__PREV_OFFSET__/g, String(prevOffset)).replace(/__JSONL_LINES__/g, String(jsonlLines)).replace(/__JSONL_SERVER_PATH__/g, jsonlServerPath);
213
+ wlog("running codex exec");
214
+ try {
215
+ execFileSync(cfg.codexBin, [
216
+ "exec",
217
+ "--dangerously-bypass-approvals-and-sandbox",
218
+ prompt
219
+ ], {
220
+ stdio: ["ignore", "pipe", "pipe"],
221
+ timeout: 12e4,
222
+ env: { ...process.env, HIVEMIND_WIKI_WORKER: "1", HIVEMIND_CAPTURE: "false" }
223
+ });
224
+ wlog("codex exec exited (code 0)");
225
+ } catch (e) {
226
+ wlog(`codex exec failed: ${e.status ?? e.message}`);
227
+ }
228
+ if (existsSync2(tmpSummary)) {
229
+ const text = readFileSync2(tmpSummary, "utf-8");
230
+ if (text.trim()) {
231
+ const fname = `${cfg.sessionId}.md`;
232
+ const vpath = `/summaries/${cfg.userName}/${fname}`;
233
+ const result = await uploadSummary(query, {
234
+ tableName: cfg.memoryTable,
235
+ vpath,
236
+ fname,
237
+ userName: cfg.userName,
238
+ project: cfg.project,
239
+ agent: "codex",
240
+ sessionId: cfg.sessionId,
241
+ text
242
+ });
243
+ wlog(`uploaded ${vpath} (summary=${result.summaryLength}, desc=${result.descLength})`);
244
+ try {
245
+ finalizeSummary(cfg.sessionId, jsonlLines);
246
+ wlog(`sidecar updated: lastSummaryCount=${jsonlLines}`);
247
+ } catch (e) {
248
+ wlog(`sidecar update failed: ${e.message}`);
249
+ }
250
+ }
251
+ } else {
252
+ wlog("no summary file generated");
253
+ }
254
+ wlog("done");
255
+ } catch (e) {
256
+ wlog(`fatal: ${e.message}`);
257
+ } finally {
258
+ cleanup();
259
+ try {
260
+ releaseLock(cfg.sessionId);
261
+ } catch (releaseErr) {
262
+ dlog2(`releaseLock failed in finally for ${cfg.sessionId}: ${releaseErr.message}`);
263
+ }
264
+ }
265
+ }
266
+ main();
@@ -0,0 +1,65 @@
1
+ ---
2
+ name: hivemind-memory
3
+ description: Global team and org memory powered by Activeloop. ALWAYS check BOTH built-in memory AND Hivemind memory when recalling information.
4
+ allowed-tools: Bash
5
+ ---
6
+
7
+ # Hivemind Memory
8
+
9
+ You have persistent memory at `~/.deeplake/memory/` — global memory shared across all sessions, users, and agents in the org.
10
+
11
+ ## Memory Structure
12
+
13
+ ```
14
+ ~/.deeplake/memory/
15
+ ├── index.md ← START HERE — table of all sessions
16
+ ├── summaries/
17
+ │ ├── session-abc.md ← AI-generated wiki summary
18
+ │ └── session-xyz.md
19
+ └── sessions/
20
+ └── username/
21
+ ├── user_org_ws_slug1.jsonl ← raw session data
22
+ └── user_org_ws_slug2.jsonl
23
+ ```
24
+
25
+ ## How to Search
26
+
27
+ 1. **First**: Read `~/.deeplake/memory/index.md` — quick scan of all sessions with dates, projects, descriptions
28
+ 2. **If you need details**: Read the specific summary at `~/.deeplake/memory/summaries/<session>.md`
29
+ 3. **If you need raw data**: Read the session JSONL at `~/.deeplake/memory/sessions/<user>/<file>.jsonl`
30
+ 4. **Keyword search**: `grep -r "keyword" ~/.deeplake/memory/`
31
+
32
+ Do NOT jump straight to reading raw JSONL files. Always start with index.md and summaries.
33
+
34
+ ## Organization Management
35
+
36
+ Each argument is separate — do NOT quote subcommands together. The auth command is at `$CODEX_PLUGIN_ROOT/bundle/commands/auth-login.js` (or check the session context for the resolved path):
37
+ - `node "<path>/auth-login.js" login` — SSO login
38
+ - `node "<path>/auth-login.js" whoami` — show current user/org
39
+ - `node "<path>/auth-login.js" org list` — list organizations
40
+ - `node "<path>/auth-login.js" org switch <name-or-id>` — switch organization
41
+ - `node "<path>/auth-login.js" workspaces` — list workspaces
42
+ - `node "<path>/auth-login.js" workspace <id>` — switch workspace
43
+ - `node "<path>/auth-login.js" invite <email> <ADMIN|WRITE|READ>` — invite member (ALWAYS ask user which role first)
44
+ - `node "<path>/auth-login.js" members` — list members
45
+ - `node "<path>/auth-login.js" remove <user-id>` — remove member
46
+ - `node "<path>/auth-login.js" --help` — show all commands
47
+
48
+ ## Important: Bash Only
49
+
50
+ Only use bash commands (cat, ls, grep, echo, jq, head, tail, sed, awk, etc.) to interact with `~/.deeplake/memory/`. Do NOT use python, python3, node, curl, or other interpreters — they are not available in the memory filesystem. If a task seems to require Python, rewrite it using bash tools (e.g., `cat file.json | jq 'keys | length'`).
51
+
52
+ ## Limits
53
+
54
+ Do NOT spawn subagents to read deeplake memory. If a file returns empty after 2 attempts, skip it and move on. Report what you found rather than exhaustively retrying.
55
+
56
+ ## Getting Started
57
+
58
+ After installing the plugin:
59
+ 1. Authenticate with `node "<AUTH_CMD>" login`
60
+ 2. Start using memory — ask questions, Codex automatically captures and searches
61
+
62
+ ## Configuration
63
+
64
+ - `HIVEMIND_DEBUG=1 codex` — enable verbose logging to `~/.deeplake/hook-debug.log`
65
+ - `HIVEMIND_CAPTURE=false codex` — disable session capture