@deeplake/hivemind 0.6.47
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +20 -0
- package/.claude-plugin/plugin.json +19 -0
- package/LICENSE +201 -0
- package/README.md +359 -0
- package/bundle/cli.js +1051 -0
- package/codex/bundle/capture.js +759 -0
- package/codex/bundle/commands/auth-login.js +862 -0
- package/codex/bundle/package.json +1 -0
- package/codex/bundle/pre-tool-use.js +2097 -0
- package/codex/bundle/session-start-setup.js +585 -0
- package/codex/bundle/session-start.js +129 -0
- package/codex/bundle/shell/deeplake-shell.js +69338 -0
- package/codex/bundle/stop.js +673 -0
- package/codex/bundle/wiki-worker.js +266 -0
- package/codex/skills/deeplake-memory/SKILL.md +65 -0
- package/cursor/bundle/capture.js +485 -0
- package/cursor/bundle/commands/auth-login.js +862 -0
- package/cursor/bundle/package.json +1 -0
- package/cursor/bundle/session-end.js +45 -0
- package/cursor/bundle/session-start.js +520 -0
- package/cursor/bundle/shell/deeplake-shell.js +69338 -0
- package/mcp/bundle/package.json +1 -0
- package/mcp/bundle/server.js +24068 -0
- package/openclaw/README.md +89 -0
- package/openclaw/dist/index.js +1714 -0
- package/openclaw/dist/package.json +1 -0
- package/openclaw/openclaw.plugin.json +56 -0
- package/openclaw/package.json +29 -0
- package/openclaw/skills/SKILL.md +61 -0
- package/package.json +69 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"type":"module"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// dist/src/utils/stdin.js
|
|
4
|
+
function readStdin() {
|
|
5
|
+
return new Promise((resolve, reject) => {
|
|
6
|
+
let data = "";
|
|
7
|
+
process.stdin.setEncoding("utf-8");
|
|
8
|
+
process.stdin.on("data", (chunk) => data += chunk);
|
|
9
|
+
process.stdin.on("end", () => {
|
|
10
|
+
try {
|
|
11
|
+
resolve(JSON.parse(data));
|
|
12
|
+
} catch (err) {
|
|
13
|
+
reject(new Error(`Failed to parse hook input: ${err}`));
|
|
14
|
+
}
|
|
15
|
+
});
|
|
16
|
+
process.stdin.on("error", reject);
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// dist/src/utils/debug.js
|
|
21
|
+
import { appendFileSync } from "node:fs";
|
|
22
|
+
import { join } from "node:path";
|
|
23
|
+
import { homedir } from "node:os";
|
|
24
|
+
var DEBUG = process.env.HIVEMIND_DEBUG === "1";
|
|
25
|
+
var LOG = join(homedir(), ".deeplake", "hook-debug.log");
|
|
26
|
+
function log(tag, msg) {
|
|
27
|
+
if (!DEBUG)
|
|
28
|
+
return;
|
|
29
|
+
appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg}
|
|
30
|
+
`);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// dist/src/hooks/cursor/session-end.js
|
|
34
|
+
var log2 = (msg) => log("cursor-session-end", msg);
|
|
35
|
+
async function main() {
|
|
36
|
+
if (process.env.HIVEMIND_WIKI_WORKER === "1")
|
|
37
|
+
return;
|
|
38
|
+
const input = await readStdin();
|
|
39
|
+
const sessionId = input.conversation_id ?? input.session_id ?? "?";
|
|
40
|
+
log2(`session=${sessionId} reason=${input.reason ?? "?"} status=${input.final_status ?? "?"}`);
|
|
41
|
+
}
|
|
42
|
+
main().catch((e) => {
|
|
43
|
+
log2(`fatal: ${e.message}`);
|
|
44
|
+
process.exit(0);
|
|
45
|
+
});
|
|
@@ -0,0 +1,520 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// dist/src/hooks/cursor/session-start.js
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
5
|
+
import { dirname as dirname2, join as join6 } from "node:path";
|
|
6
|
+
|
|
7
|
+
// dist/src/commands/auth.js
|
|
8
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync, unlinkSync } from "node:fs";
|
|
9
|
+
import { join } from "node:path";
|
|
10
|
+
import { homedir } from "node:os";
|
|
11
|
+
import { execSync } from "node:child_process";
|
|
12
|
+
var CONFIG_DIR = join(homedir(), ".deeplake");
|
|
13
|
+
var CREDS_PATH = join(CONFIG_DIR, "credentials.json");
|
|
14
|
+
function loadCredentials() {
|
|
15
|
+
if (!existsSync(CREDS_PATH))
|
|
16
|
+
return null;
|
|
17
|
+
try {
|
|
18
|
+
return JSON.parse(readFileSync(CREDS_PATH, "utf-8"));
|
|
19
|
+
} catch {
|
|
20
|
+
return null;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
// dist/src/config.js
|
|
25
|
+
import { readFileSync as readFileSync2, existsSync as existsSync2 } from "node:fs";
|
|
26
|
+
import { join as join2 } from "node:path";
|
|
27
|
+
import { homedir as homedir2, userInfo } from "node:os";
|
|
28
|
+
function loadConfig() {
|
|
29
|
+
const home = homedir2();
|
|
30
|
+
const credPath = join2(home, ".deeplake", "credentials.json");
|
|
31
|
+
let creds = null;
|
|
32
|
+
if (existsSync2(credPath)) {
|
|
33
|
+
try {
|
|
34
|
+
creds = JSON.parse(readFileSync2(credPath, "utf-8"));
|
|
35
|
+
} catch {
|
|
36
|
+
return null;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
const token = process.env.HIVEMIND_TOKEN ?? creds?.token;
|
|
40
|
+
const orgId = process.env.HIVEMIND_ORG_ID ?? creds?.orgId;
|
|
41
|
+
if (!token || !orgId)
|
|
42
|
+
return null;
|
|
43
|
+
return {
|
|
44
|
+
token,
|
|
45
|
+
orgId,
|
|
46
|
+
orgName: creds?.orgName ?? orgId,
|
|
47
|
+
userName: creds?.userName || userInfo().username || "unknown",
|
|
48
|
+
workspaceId: process.env.HIVEMIND_WORKSPACE_ID ?? creds?.workspaceId ?? "default",
|
|
49
|
+
apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
|
|
50
|
+
tableName: process.env.HIVEMIND_TABLE ?? "memory",
|
|
51
|
+
sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
|
|
52
|
+
memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join2(home, ".deeplake", "memory")
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// dist/src/deeplake-api.js
|
|
57
|
+
import { randomUUID } from "node:crypto";
|
|
58
|
+
import { existsSync as existsSync3, mkdirSync as mkdirSync2, readFileSync as readFileSync3, writeFileSync as writeFileSync2 } from "node:fs";
|
|
59
|
+
import { join as join4 } from "node:path";
|
|
60
|
+
import { tmpdir } from "node:os";
|
|
61
|
+
|
|
62
|
+
// dist/src/utils/debug.js
|
|
63
|
+
import { appendFileSync } from "node:fs";
|
|
64
|
+
import { join as join3 } from "node:path";
|
|
65
|
+
import { homedir as homedir3 } from "node:os";
|
|
66
|
+
var DEBUG = process.env.HIVEMIND_DEBUG === "1";
|
|
67
|
+
var LOG = join3(homedir3(), ".deeplake", "hook-debug.log");
|
|
68
|
+
function log(tag, msg) {
|
|
69
|
+
if (!DEBUG)
|
|
70
|
+
return;
|
|
71
|
+
appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg}
|
|
72
|
+
`);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// dist/src/utils/sql.js
|
|
76
|
+
function sqlStr(value) {
|
|
77
|
+
return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// dist/src/deeplake-api.js
|
|
81
|
+
var log2 = (msg) => log("sdk", msg);
|
|
82
|
+
function summarizeSql(sql, maxLen = 220) {
|
|
83
|
+
const compact = sql.replace(/\s+/g, " ").trim();
|
|
84
|
+
return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact;
|
|
85
|
+
}
|
|
86
|
+
function traceSql(msg) {
|
|
87
|
+
const traceEnabled = process.env.HIVEMIND_TRACE_SQL === "1" || process.env.HIVEMIND_DEBUG === "1";
|
|
88
|
+
if (!traceEnabled)
|
|
89
|
+
return;
|
|
90
|
+
process.stderr.write(`[deeplake-sql] ${msg}
|
|
91
|
+
`);
|
|
92
|
+
if (process.env.HIVEMIND_DEBUG === "1")
|
|
93
|
+
log2(msg);
|
|
94
|
+
}
|
|
95
|
+
var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
|
|
96
|
+
var MAX_RETRIES = 3;
|
|
97
|
+
var BASE_DELAY_MS = 500;
|
|
98
|
+
var MAX_CONCURRENCY = 5;
|
|
99
|
+
var QUERY_TIMEOUT_MS = Number(process.env.HIVEMIND_QUERY_TIMEOUT_MS ?? 1e4);
|
|
100
|
+
var INDEX_MARKER_TTL_MS = Number(process.env.HIVEMIND_INDEX_MARKER_TTL_MS ?? 6 * 60 * 6e4);
|
|
101
|
+
function sleep(ms) {
|
|
102
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
103
|
+
}
|
|
104
|
+
function isTimeoutError(error) {
|
|
105
|
+
const name = error instanceof Error ? error.name.toLowerCase() : "";
|
|
106
|
+
const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
|
|
107
|
+
return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
|
|
108
|
+
}
|
|
109
|
+
function isDuplicateIndexError(error) {
|
|
110
|
+
const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
|
|
111
|
+
return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
|
|
112
|
+
}
|
|
113
|
+
function isSessionInsertQuery(sql) {
|
|
114
|
+
return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
|
|
115
|
+
}
|
|
116
|
+
function isTransientHtml403(text) {
|
|
117
|
+
const body = text.toLowerCase();
|
|
118
|
+
return body.includes("<html") || body.includes("403 forbidden") || body.includes("cloudflare") || body.includes("nginx");
|
|
119
|
+
}
|
|
120
|
+
function getIndexMarkerDir() {
|
|
121
|
+
return process.env.HIVEMIND_INDEX_MARKER_DIR ?? join4(tmpdir(), "hivemind-deeplake-indexes");
|
|
122
|
+
}
|
|
123
|
+
var Semaphore = class {
|
|
124
|
+
max;
|
|
125
|
+
waiting = [];
|
|
126
|
+
active = 0;
|
|
127
|
+
constructor(max) {
|
|
128
|
+
this.max = max;
|
|
129
|
+
}
|
|
130
|
+
async acquire() {
|
|
131
|
+
if (this.active < this.max) {
|
|
132
|
+
this.active++;
|
|
133
|
+
return;
|
|
134
|
+
}
|
|
135
|
+
await new Promise((resolve) => this.waiting.push(resolve));
|
|
136
|
+
}
|
|
137
|
+
release() {
|
|
138
|
+
this.active--;
|
|
139
|
+
const next = this.waiting.shift();
|
|
140
|
+
if (next) {
|
|
141
|
+
this.active++;
|
|
142
|
+
next();
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
};
|
|
146
|
+
var DeeplakeApi = class {
|
|
147
|
+
token;
|
|
148
|
+
apiUrl;
|
|
149
|
+
orgId;
|
|
150
|
+
workspaceId;
|
|
151
|
+
tableName;
|
|
152
|
+
_pendingRows = [];
|
|
153
|
+
_sem = new Semaphore(MAX_CONCURRENCY);
|
|
154
|
+
_tablesCache = null;
|
|
155
|
+
constructor(token, apiUrl, orgId, workspaceId, tableName) {
|
|
156
|
+
this.token = token;
|
|
157
|
+
this.apiUrl = apiUrl;
|
|
158
|
+
this.orgId = orgId;
|
|
159
|
+
this.workspaceId = workspaceId;
|
|
160
|
+
this.tableName = tableName;
|
|
161
|
+
}
|
|
162
|
+
/** Execute SQL with retry on transient errors and bounded concurrency. */
|
|
163
|
+
async query(sql) {
|
|
164
|
+
const startedAt = Date.now();
|
|
165
|
+
const summary = summarizeSql(sql);
|
|
166
|
+
traceSql(`query start: ${summary}`);
|
|
167
|
+
await this._sem.acquire();
|
|
168
|
+
try {
|
|
169
|
+
const rows = await this._queryWithRetry(sql);
|
|
170
|
+
traceSql(`query ok (${Date.now() - startedAt}ms, rows=${rows.length}): ${summary}`);
|
|
171
|
+
return rows;
|
|
172
|
+
} catch (e) {
|
|
173
|
+
const message = e instanceof Error ? e.message : String(e);
|
|
174
|
+
traceSql(`query fail (${Date.now() - startedAt}ms): ${summary} :: ${message}`);
|
|
175
|
+
throw e;
|
|
176
|
+
} finally {
|
|
177
|
+
this._sem.release();
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
async _queryWithRetry(sql) {
|
|
181
|
+
let lastError;
|
|
182
|
+
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
|
183
|
+
let resp;
|
|
184
|
+
try {
|
|
185
|
+
const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
|
|
186
|
+
resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
|
|
187
|
+
method: "POST",
|
|
188
|
+
headers: {
|
|
189
|
+
Authorization: `Bearer ${this.token}`,
|
|
190
|
+
"Content-Type": "application/json",
|
|
191
|
+
"X-Activeloop-Org-Id": this.orgId
|
|
192
|
+
},
|
|
193
|
+
signal,
|
|
194
|
+
body: JSON.stringify({ query: sql })
|
|
195
|
+
});
|
|
196
|
+
} catch (e) {
|
|
197
|
+
if (isTimeoutError(e)) {
|
|
198
|
+
lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
|
|
199
|
+
throw lastError;
|
|
200
|
+
}
|
|
201
|
+
lastError = e instanceof Error ? e : new Error(String(e));
|
|
202
|
+
if (attempt < MAX_RETRIES) {
|
|
203
|
+
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
|
|
204
|
+
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (fetch error: ${lastError.message}) in ${delay.toFixed(0)}ms`);
|
|
205
|
+
await sleep(delay);
|
|
206
|
+
continue;
|
|
207
|
+
}
|
|
208
|
+
throw lastError;
|
|
209
|
+
}
|
|
210
|
+
if (resp.ok) {
|
|
211
|
+
const raw = await resp.json();
|
|
212
|
+
if (!raw?.rows || !raw?.columns)
|
|
213
|
+
return [];
|
|
214
|
+
return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
|
|
215
|
+
}
|
|
216
|
+
const text = await resp.text().catch(() => "");
|
|
217
|
+
const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
|
|
218
|
+
if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
|
|
219
|
+
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
|
|
220
|
+
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
|
|
221
|
+
await sleep(delay);
|
|
222
|
+
continue;
|
|
223
|
+
}
|
|
224
|
+
throw new Error(`Query failed: ${resp.status}: ${text.slice(0, 200)}`);
|
|
225
|
+
}
|
|
226
|
+
throw lastError ?? new Error("Query failed: max retries exceeded");
|
|
227
|
+
}
|
|
228
|
+
// ── Writes ──────────────────────────────────────────────────────────────────
|
|
229
|
+
/** Queue rows for writing. Call commit() to flush. */
|
|
230
|
+
appendRows(rows) {
|
|
231
|
+
this._pendingRows.push(...rows);
|
|
232
|
+
}
|
|
233
|
+
/** Flush pending rows via SQL. */
|
|
234
|
+
async commit() {
|
|
235
|
+
if (this._pendingRows.length === 0)
|
|
236
|
+
return;
|
|
237
|
+
const rows = this._pendingRows;
|
|
238
|
+
this._pendingRows = [];
|
|
239
|
+
const CONCURRENCY = 10;
|
|
240
|
+
for (let i = 0; i < rows.length; i += CONCURRENCY) {
|
|
241
|
+
const chunk = rows.slice(i, i + CONCURRENCY);
|
|
242
|
+
await Promise.allSettled(chunk.map((r) => this.upsertRowSql(r)));
|
|
243
|
+
}
|
|
244
|
+
log2(`commit: ${rows.length} rows`);
|
|
245
|
+
}
|
|
246
|
+
async upsertRowSql(row) {
|
|
247
|
+
const ts = (/* @__PURE__ */ new Date()).toISOString();
|
|
248
|
+
const cd = row.creationDate ?? ts;
|
|
249
|
+
const lud = row.lastUpdateDate ?? ts;
|
|
250
|
+
const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`);
|
|
251
|
+
if (exists.length > 0) {
|
|
252
|
+
let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`;
|
|
253
|
+
if (row.project !== void 0)
|
|
254
|
+
setClauses += `, project = '${sqlStr(row.project)}'`;
|
|
255
|
+
if (row.description !== void 0)
|
|
256
|
+
setClauses += `, description = '${sqlStr(row.description)}'`;
|
|
257
|
+
await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`);
|
|
258
|
+
} else {
|
|
259
|
+
const id = randomUUID();
|
|
260
|
+
let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date";
|
|
261
|
+
let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`;
|
|
262
|
+
if (row.project !== void 0) {
|
|
263
|
+
cols += ", project";
|
|
264
|
+
vals += `, '${sqlStr(row.project)}'`;
|
|
265
|
+
}
|
|
266
|
+
if (row.description !== void 0) {
|
|
267
|
+
cols += ", description";
|
|
268
|
+
vals += `, '${sqlStr(row.description)}'`;
|
|
269
|
+
}
|
|
270
|
+
await this.query(`INSERT INTO "${this.tableName}" (${cols}) VALUES (${vals})`);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
/** Update specific columns on a row by path. */
|
|
274
|
+
async updateColumns(path, columns) {
|
|
275
|
+
const setClauses = Object.entries(columns).map(([col, val]) => typeof val === "number" ? `${col} = ${val}` : `${col} = '${sqlStr(String(val))}'`).join(", ");
|
|
276
|
+
await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(path)}'`);
|
|
277
|
+
}
|
|
278
|
+
// ── Convenience ─────────────────────────────────────────────────────────────
|
|
279
|
+
/** Create a BM25 search index on a column. */
|
|
280
|
+
async createIndex(column) {
|
|
281
|
+
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
|
|
282
|
+
}
|
|
283
|
+
buildLookupIndexName(table, suffix) {
|
|
284
|
+
return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
|
|
285
|
+
}
|
|
286
|
+
getLookupIndexMarkerPath(table, suffix) {
|
|
287
|
+
const markerKey = [
|
|
288
|
+
this.workspaceId,
|
|
289
|
+
this.orgId,
|
|
290
|
+
table,
|
|
291
|
+
suffix
|
|
292
|
+
].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
|
|
293
|
+
return join4(getIndexMarkerDir(), `${markerKey}.json`);
|
|
294
|
+
}
|
|
295
|
+
hasFreshLookupIndexMarker(table, suffix) {
|
|
296
|
+
const markerPath = this.getLookupIndexMarkerPath(table, suffix);
|
|
297
|
+
if (!existsSync3(markerPath))
|
|
298
|
+
return false;
|
|
299
|
+
try {
|
|
300
|
+
const raw = JSON.parse(readFileSync3(markerPath, "utf-8"));
|
|
301
|
+
const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
|
|
302
|
+
if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
|
|
303
|
+
return false;
|
|
304
|
+
return true;
|
|
305
|
+
} catch {
|
|
306
|
+
return false;
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
markLookupIndexReady(table, suffix) {
|
|
310
|
+
mkdirSync2(getIndexMarkerDir(), { recursive: true });
|
|
311
|
+
writeFileSync2(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
|
|
312
|
+
}
|
|
313
|
+
async ensureLookupIndex(table, suffix, columnsSql) {
|
|
314
|
+
if (this.hasFreshLookupIndexMarker(table, suffix))
|
|
315
|
+
return;
|
|
316
|
+
const indexName = this.buildLookupIndexName(table, suffix);
|
|
317
|
+
try {
|
|
318
|
+
await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
|
|
319
|
+
this.markLookupIndexReady(table, suffix);
|
|
320
|
+
} catch (e) {
|
|
321
|
+
if (isDuplicateIndexError(e)) {
|
|
322
|
+
this.markLookupIndexReady(table, suffix);
|
|
323
|
+
return;
|
|
324
|
+
}
|
|
325
|
+
log2(`index "${indexName}" skipped: ${e.message}`);
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
/** List all tables in the workspace (with retry). */
|
|
329
|
+
async listTables(forceRefresh = false) {
|
|
330
|
+
if (!forceRefresh && this._tablesCache)
|
|
331
|
+
return [...this._tablesCache];
|
|
332
|
+
const { tables, cacheable } = await this._fetchTables();
|
|
333
|
+
if (cacheable)
|
|
334
|
+
this._tablesCache = [...tables];
|
|
335
|
+
return tables;
|
|
336
|
+
}
|
|
337
|
+
async _fetchTables() {
|
|
338
|
+
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
|
339
|
+
try {
|
|
340
|
+
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
|
|
341
|
+
headers: {
|
|
342
|
+
Authorization: `Bearer ${this.token}`,
|
|
343
|
+
"X-Activeloop-Org-Id": this.orgId
|
|
344
|
+
}
|
|
345
|
+
});
|
|
346
|
+
if (resp.ok) {
|
|
347
|
+
const data = await resp.json();
|
|
348
|
+
return {
|
|
349
|
+
tables: (data.tables ?? []).map((t) => t.table_name),
|
|
350
|
+
cacheable: true
|
|
351
|
+
};
|
|
352
|
+
}
|
|
353
|
+
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
|
|
354
|
+
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
|
|
355
|
+
continue;
|
|
356
|
+
}
|
|
357
|
+
return { tables: [], cacheable: false };
|
|
358
|
+
} catch {
|
|
359
|
+
if (attempt < MAX_RETRIES) {
|
|
360
|
+
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
|
|
361
|
+
continue;
|
|
362
|
+
}
|
|
363
|
+
return { tables: [], cacheable: false };
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
return { tables: [], cacheable: false };
|
|
367
|
+
}
|
|
368
|
+
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
|
|
369
|
+
async ensureTable(name) {
|
|
370
|
+
const tbl = name ?? this.tableName;
|
|
371
|
+
const tables = await this.listTables();
|
|
372
|
+
if (!tables.includes(tbl)) {
|
|
373
|
+
log2(`table "${tbl}" not found, creating`);
|
|
374
|
+
await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
|
|
375
|
+
log2(`table "${tbl}" created`);
|
|
376
|
+
if (!tables.includes(tbl))
|
|
377
|
+
this._tablesCache = [...tables, tbl];
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
|
|
381
|
+
async ensureSessionsTable(name) {
|
|
382
|
+
const tables = await this.listTables();
|
|
383
|
+
if (!tables.includes(name)) {
|
|
384
|
+
log2(`table "${name}" not found, creating`);
|
|
385
|
+
await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
|
|
386
|
+
log2(`table "${name}" created`);
|
|
387
|
+
if (!tables.includes(name))
|
|
388
|
+
this._tablesCache = [...tables, name];
|
|
389
|
+
}
|
|
390
|
+
await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
|
|
391
|
+
}
|
|
392
|
+
};
|
|
393
|
+
|
|
394
|
+
// dist/src/utils/stdin.js
|
|
395
|
+
function readStdin() {
|
|
396
|
+
return new Promise((resolve, reject) => {
|
|
397
|
+
let data = "";
|
|
398
|
+
process.stdin.setEncoding("utf-8");
|
|
399
|
+
process.stdin.on("data", (chunk) => data += chunk);
|
|
400
|
+
process.stdin.on("end", () => {
|
|
401
|
+
try {
|
|
402
|
+
resolve(JSON.parse(data));
|
|
403
|
+
} catch (err) {
|
|
404
|
+
reject(new Error(`Failed to parse hook input: ${err}`));
|
|
405
|
+
}
|
|
406
|
+
});
|
|
407
|
+
process.stdin.on("error", reject);
|
|
408
|
+
});
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
// dist/src/utils/version-check.js
|
|
412
|
+
import { readFileSync as readFileSync4 } from "node:fs";
|
|
413
|
+
import { dirname, join as join5 } from "node:path";
|
|
414
|
+
function getInstalledVersion(bundleDir, pluginManifestDir) {
|
|
415
|
+
try {
|
|
416
|
+
const pluginJson = join5(bundleDir, "..", pluginManifestDir, "plugin.json");
|
|
417
|
+
const plugin = JSON.parse(readFileSync4(pluginJson, "utf-8"));
|
|
418
|
+
if (plugin.version)
|
|
419
|
+
return plugin.version;
|
|
420
|
+
} catch {
|
|
421
|
+
}
|
|
422
|
+
let dir = bundleDir;
|
|
423
|
+
for (let i = 0; i < 5; i++) {
|
|
424
|
+
const candidate = join5(dir, "package.json");
|
|
425
|
+
try {
|
|
426
|
+
const pkg = JSON.parse(readFileSync4(candidate, "utf-8"));
|
|
427
|
+
if ((pkg.name === "hivemind" || pkg.name === "hivemind-codex") && pkg.version)
|
|
428
|
+
return pkg.version;
|
|
429
|
+
} catch {
|
|
430
|
+
}
|
|
431
|
+
const parent = dirname(dir);
|
|
432
|
+
if (parent === dir)
|
|
433
|
+
break;
|
|
434
|
+
dir = parent;
|
|
435
|
+
}
|
|
436
|
+
return null;
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
// dist/src/hooks/cursor/session-start.js
|
|
440
|
+
var log3 = (msg) => log("cursor-session-start", msg);
|
|
441
|
+
var __bundleDir = dirname2(fileURLToPath(import.meta.url));
|
|
442
|
+
var AUTH_CMD = join6(__bundleDir, "commands", "auth-login.js");
|
|
443
|
+
var context = `DEEPLAKE MEMORY: Persistent memory at ~/.deeplake/memory/ shared across sessions, users, and agents.
|
|
444
|
+
|
|
445
|
+
Structure: index.md (start here) \u2192 summaries/*.md \u2192 sessions/*.jsonl (last resort). Do NOT jump straight to JSONL.
|
|
446
|
+
Search: grep -r "keyword" ~/.deeplake/memory/
|
|
447
|
+
IMPORTANT: Only use bash commands (cat, ls, grep, echo, jq, head, tail, sed, awk, etc.) to interact with ~/.deeplake/memory/. Do NOT use python, python3, node, curl, or other interpreters \u2014 they are not available in the memory filesystem.
|
|
448
|
+
Do NOT spawn subagents to read deeplake memory.`;
|
|
449
|
+
function resolveSessionId(input) {
|
|
450
|
+
return input.session_id ?? input.conversation_id ?? `cursor-${Date.now()}`;
|
|
451
|
+
}
|
|
452
|
+
function resolveCwd(input) {
|
|
453
|
+
const roots = input.workspace_roots;
|
|
454
|
+
if (Array.isArray(roots) && roots.length > 0 && typeof roots[0] === "string") {
|
|
455
|
+
return roots[0];
|
|
456
|
+
}
|
|
457
|
+
return process.cwd();
|
|
458
|
+
}
|
|
459
|
+
async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, workspaceId) {
|
|
460
|
+
const summaryPath = `/summaries/${userName}/${sessionId}.md`;
|
|
461
|
+
const existing = await api.query(`SELECT path FROM "${table}" WHERE path = '${sqlStr(summaryPath)}' LIMIT 1`);
|
|
462
|
+
if (existing.length > 0)
|
|
463
|
+
return;
|
|
464
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
465
|
+
const projectName = cwd.split("/").pop() ?? "unknown";
|
|
466
|
+
const sessionSource = `/sessions/${userName}/${userName}_${orgName}_${workspaceId}_${sessionId}.jsonl`;
|
|
467
|
+
const content = [
|
|
468
|
+
`# Session ${sessionId}`,
|
|
469
|
+
`- **Source**: ${sessionSource}`,
|
|
470
|
+
`- **Started**: ${now}`,
|
|
471
|
+
`- **Project**: ${projectName}`,
|
|
472
|
+
`- **Status**: in-progress`,
|
|
473
|
+
""
|
|
474
|
+
].join("\n");
|
|
475
|
+
const filename = `${sessionId}.md`;
|
|
476
|
+
await api.query(`INSERT INTO "${table}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', 'cursor', '${now}', '${now}')`);
|
|
477
|
+
}
|
|
478
|
+
async function main() {
|
|
479
|
+
if (process.env.HIVEMIND_WIKI_WORKER === "1")
|
|
480
|
+
return;
|
|
481
|
+
const input = await readStdin();
|
|
482
|
+
const sessionId = resolveSessionId(input);
|
|
483
|
+
const cwd = resolveCwd(input);
|
|
484
|
+
const creds = loadCredentials();
|
|
485
|
+
if (!creds?.token) {
|
|
486
|
+
log3("no credentials found");
|
|
487
|
+
} else {
|
|
488
|
+
log3(`credentials loaded: org=${creds.orgName ?? creds.orgId}`);
|
|
489
|
+
}
|
|
490
|
+
const captureEnabled = process.env.HIVEMIND_CAPTURE !== "false";
|
|
491
|
+
if (creds?.token && captureEnabled) {
|
|
492
|
+
try {
|
|
493
|
+
const config = loadConfig();
|
|
494
|
+
if (config) {
|
|
495
|
+
const table = config.tableName;
|
|
496
|
+
const sessionsTable = config.sessionsTableName;
|
|
497
|
+
const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, table);
|
|
498
|
+
await api.ensureTable();
|
|
499
|
+
await api.ensureSessionsTable(sessionsTable);
|
|
500
|
+
await createPlaceholder(api, table, sessionId, cwd, config.userName, config.orgName, config.workspaceId);
|
|
501
|
+
log3("placeholder created");
|
|
502
|
+
}
|
|
503
|
+
} catch (e) {
|
|
504
|
+
log3(`placeholder failed: ${e.message}`);
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
let versionNotice = "";
|
|
508
|
+
const current = getInstalledVersion(__bundleDir, ".claude-plugin");
|
|
509
|
+
if (current)
|
|
510
|
+
versionNotice = `
|
|
511
|
+
Hivemind v${current}`;
|
|
512
|
+
const additionalContext = creds?.token ? `${context}
|
|
513
|
+
Logged in to Deeplake as org: ${creds.orgName ?? creds.orgId} (workspace: ${creds.workspaceId ?? "default"})${versionNotice}` : `${context}
|
|
514
|
+
Not logged in to Deeplake. Run: node "${AUTH_CMD}" login${versionNotice}`;
|
|
515
|
+
console.log(JSON.stringify({ additional_context: additionalContext }));
|
|
516
|
+
}
|
|
517
|
+
main().catch((e) => {
|
|
518
|
+
log3(`fatal: ${e.message}`);
|
|
519
|
+
process.exit(0);
|
|
520
|
+
});
|