@deeplake/hivemind 0.6.47
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +20 -0
- package/.claude-plugin/plugin.json +19 -0
- package/LICENSE +201 -0
- package/README.md +359 -0
- package/bundle/cli.js +1051 -0
- package/codex/bundle/capture.js +759 -0
- package/codex/bundle/commands/auth-login.js +862 -0
- package/codex/bundle/package.json +1 -0
- package/codex/bundle/pre-tool-use.js +2097 -0
- package/codex/bundle/session-start-setup.js +585 -0
- package/codex/bundle/session-start.js +129 -0
- package/codex/bundle/shell/deeplake-shell.js +69338 -0
- package/codex/bundle/stop.js +673 -0
- package/codex/bundle/wiki-worker.js +266 -0
- package/codex/skills/deeplake-memory/SKILL.md +65 -0
- package/cursor/bundle/capture.js +485 -0
- package/cursor/bundle/commands/auth-login.js +862 -0
- package/cursor/bundle/package.json +1 -0
- package/cursor/bundle/session-end.js +45 -0
- package/cursor/bundle/session-start.js +520 -0
- package/cursor/bundle/shell/deeplake-shell.js +69338 -0
- package/mcp/bundle/package.json +1 -0
- package/mcp/bundle/server.js +24068 -0
- package/openclaw/README.md +89 -0
- package/openclaw/dist/index.js +1714 -0
- package/openclaw/dist/package.json +1 -0
- package/openclaw/openclaw.plugin.json +56 -0
- package/openclaw/package.json +29 -0
- package/openclaw/skills/SKILL.md +61 -0
- package/package.json +69 -0
|
@@ -0,0 +1,673 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// dist/src/hooks/codex/stop.js
|
|
4
|
+
import { readFileSync as readFileSync4, existsSync as existsSync4 } from "node:fs";
|
|
5
|
+
|
|
6
|
+
// dist/src/utils/stdin.js
|
|
7
|
+
function readStdin() {
|
|
8
|
+
return new Promise((resolve, reject) => {
|
|
9
|
+
let data = "";
|
|
10
|
+
process.stdin.setEncoding("utf-8");
|
|
11
|
+
process.stdin.on("data", (chunk) => data += chunk);
|
|
12
|
+
process.stdin.on("end", () => {
|
|
13
|
+
try {
|
|
14
|
+
resolve(JSON.parse(data));
|
|
15
|
+
} catch (err) {
|
|
16
|
+
reject(new Error(`Failed to parse hook input: ${err}`));
|
|
17
|
+
}
|
|
18
|
+
});
|
|
19
|
+
process.stdin.on("error", reject);
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
// dist/src/config.js
|
|
24
|
+
import { readFileSync, existsSync } from "node:fs";
|
|
25
|
+
import { join } from "node:path";
|
|
26
|
+
import { homedir, userInfo } from "node:os";
|
|
27
|
+
function loadConfig() {
|
|
28
|
+
const home = homedir();
|
|
29
|
+
const credPath = join(home, ".deeplake", "credentials.json");
|
|
30
|
+
let creds = null;
|
|
31
|
+
if (existsSync(credPath)) {
|
|
32
|
+
try {
|
|
33
|
+
creds = JSON.parse(readFileSync(credPath, "utf-8"));
|
|
34
|
+
} catch {
|
|
35
|
+
return null;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
const token = process.env.HIVEMIND_TOKEN ?? creds?.token;
|
|
39
|
+
const orgId = process.env.HIVEMIND_ORG_ID ?? creds?.orgId;
|
|
40
|
+
if (!token || !orgId)
|
|
41
|
+
return null;
|
|
42
|
+
return {
|
|
43
|
+
token,
|
|
44
|
+
orgId,
|
|
45
|
+
orgName: creds?.orgName ?? orgId,
|
|
46
|
+
userName: creds?.userName || userInfo().username || "unknown",
|
|
47
|
+
workspaceId: process.env.HIVEMIND_WORKSPACE_ID ?? creds?.workspaceId ?? "default",
|
|
48
|
+
apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
|
|
49
|
+
tableName: process.env.HIVEMIND_TABLE ?? "memory",
|
|
50
|
+
sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
|
|
51
|
+
memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join(home, ".deeplake", "memory")
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// dist/src/deeplake-api.js
|
|
56
|
+
import { randomUUID } from "node:crypto";
|
|
57
|
+
import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs";
|
|
58
|
+
import { join as join3 } from "node:path";
|
|
59
|
+
import { tmpdir } from "node:os";
|
|
60
|
+
|
|
61
|
+
// dist/src/utils/debug.js
|
|
62
|
+
import { appendFileSync } from "node:fs";
|
|
63
|
+
import { join as join2 } from "node:path";
|
|
64
|
+
import { homedir as homedir2 } from "node:os";
|
|
65
|
+
var DEBUG = process.env.HIVEMIND_DEBUG === "1";
|
|
66
|
+
var LOG = join2(homedir2(), ".deeplake", "hook-debug.log");
|
|
67
|
+
function utcTimestamp(d = /* @__PURE__ */ new Date()) {
|
|
68
|
+
return d.toISOString().replace("T", " ").slice(0, 19) + " UTC";
|
|
69
|
+
}
|
|
70
|
+
function log(tag, msg) {
|
|
71
|
+
if (!DEBUG)
|
|
72
|
+
return;
|
|
73
|
+
appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg}
|
|
74
|
+
`);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// dist/src/utils/sql.js
|
|
78
|
+
function sqlStr(value) {
|
|
79
|
+
return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// dist/src/deeplake-api.js
|
|
83
|
+
var log2 = (msg) => log("sdk", msg);
|
|
84
|
+
function summarizeSql(sql, maxLen = 220) {
|
|
85
|
+
const compact = sql.replace(/\s+/g, " ").trim();
|
|
86
|
+
return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact;
|
|
87
|
+
}
|
|
88
|
+
function traceSql(msg) {
|
|
89
|
+
const traceEnabled = process.env.HIVEMIND_TRACE_SQL === "1" || process.env.HIVEMIND_DEBUG === "1";
|
|
90
|
+
if (!traceEnabled)
|
|
91
|
+
return;
|
|
92
|
+
process.stderr.write(`[deeplake-sql] ${msg}
|
|
93
|
+
`);
|
|
94
|
+
if (process.env.HIVEMIND_DEBUG === "1")
|
|
95
|
+
log2(msg);
|
|
96
|
+
}
|
|
97
|
+
var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
|
|
98
|
+
var MAX_RETRIES = 3;
|
|
99
|
+
var BASE_DELAY_MS = 500;
|
|
100
|
+
var MAX_CONCURRENCY = 5;
|
|
101
|
+
var QUERY_TIMEOUT_MS = Number(process.env.HIVEMIND_QUERY_TIMEOUT_MS ?? 1e4);
|
|
102
|
+
var INDEX_MARKER_TTL_MS = Number(process.env.HIVEMIND_INDEX_MARKER_TTL_MS ?? 6 * 60 * 6e4);
|
|
103
|
+
function sleep(ms) {
|
|
104
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
105
|
+
}
|
|
106
|
+
function isTimeoutError(error) {
|
|
107
|
+
const name = error instanceof Error ? error.name.toLowerCase() : "";
|
|
108
|
+
const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
|
|
109
|
+
return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
|
|
110
|
+
}
|
|
111
|
+
function isDuplicateIndexError(error) {
|
|
112
|
+
const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
|
|
113
|
+
return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
|
|
114
|
+
}
|
|
115
|
+
function isSessionInsertQuery(sql) {
|
|
116
|
+
return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
|
|
117
|
+
}
|
|
118
|
+
function isTransientHtml403(text) {
|
|
119
|
+
const body = text.toLowerCase();
|
|
120
|
+
return body.includes("<html") || body.includes("403 forbidden") || body.includes("cloudflare") || body.includes("nginx");
|
|
121
|
+
}
|
|
122
|
+
function getIndexMarkerDir() {
|
|
123
|
+
return process.env.HIVEMIND_INDEX_MARKER_DIR ?? join3(tmpdir(), "hivemind-deeplake-indexes");
|
|
124
|
+
}
|
|
125
|
+
var Semaphore = class {
|
|
126
|
+
max;
|
|
127
|
+
waiting = [];
|
|
128
|
+
active = 0;
|
|
129
|
+
constructor(max) {
|
|
130
|
+
this.max = max;
|
|
131
|
+
}
|
|
132
|
+
async acquire() {
|
|
133
|
+
if (this.active < this.max) {
|
|
134
|
+
this.active++;
|
|
135
|
+
return;
|
|
136
|
+
}
|
|
137
|
+
await new Promise((resolve) => this.waiting.push(resolve));
|
|
138
|
+
}
|
|
139
|
+
release() {
|
|
140
|
+
this.active--;
|
|
141
|
+
const next = this.waiting.shift();
|
|
142
|
+
if (next) {
|
|
143
|
+
this.active++;
|
|
144
|
+
next();
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
};
|
|
148
|
+
var DeeplakeApi = class {
|
|
149
|
+
token;
|
|
150
|
+
apiUrl;
|
|
151
|
+
orgId;
|
|
152
|
+
workspaceId;
|
|
153
|
+
tableName;
|
|
154
|
+
_pendingRows = [];
|
|
155
|
+
_sem = new Semaphore(MAX_CONCURRENCY);
|
|
156
|
+
_tablesCache = null;
|
|
157
|
+
constructor(token, apiUrl, orgId, workspaceId, tableName) {
|
|
158
|
+
this.token = token;
|
|
159
|
+
this.apiUrl = apiUrl;
|
|
160
|
+
this.orgId = orgId;
|
|
161
|
+
this.workspaceId = workspaceId;
|
|
162
|
+
this.tableName = tableName;
|
|
163
|
+
}
|
|
164
|
+
/** Execute SQL with retry on transient errors and bounded concurrency. */
|
|
165
|
+
async query(sql) {
|
|
166
|
+
const startedAt = Date.now();
|
|
167
|
+
const summary = summarizeSql(sql);
|
|
168
|
+
traceSql(`query start: ${summary}`);
|
|
169
|
+
await this._sem.acquire();
|
|
170
|
+
try {
|
|
171
|
+
const rows = await this._queryWithRetry(sql);
|
|
172
|
+
traceSql(`query ok (${Date.now() - startedAt}ms, rows=${rows.length}): ${summary}`);
|
|
173
|
+
return rows;
|
|
174
|
+
} catch (e) {
|
|
175
|
+
const message = e instanceof Error ? e.message : String(e);
|
|
176
|
+
traceSql(`query fail (${Date.now() - startedAt}ms): ${summary} :: ${message}`);
|
|
177
|
+
throw e;
|
|
178
|
+
} finally {
|
|
179
|
+
this._sem.release();
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
async _queryWithRetry(sql) {
|
|
183
|
+
let lastError;
|
|
184
|
+
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
|
185
|
+
let resp;
|
|
186
|
+
try {
|
|
187
|
+
const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
|
|
188
|
+
resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
|
|
189
|
+
method: "POST",
|
|
190
|
+
headers: {
|
|
191
|
+
Authorization: `Bearer ${this.token}`,
|
|
192
|
+
"Content-Type": "application/json",
|
|
193
|
+
"X-Activeloop-Org-Id": this.orgId
|
|
194
|
+
},
|
|
195
|
+
signal,
|
|
196
|
+
body: JSON.stringify({ query: sql })
|
|
197
|
+
});
|
|
198
|
+
} catch (e) {
|
|
199
|
+
if (isTimeoutError(e)) {
|
|
200
|
+
lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
|
|
201
|
+
throw lastError;
|
|
202
|
+
}
|
|
203
|
+
lastError = e instanceof Error ? e : new Error(String(e));
|
|
204
|
+
if (attempt < MAX_RETRIES) {
|
|
205
|
+
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
|
|
206
|
+
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (fetch error: ${lastError.message}) in ${delay.toFixed(0)}ms`);
|
|
207
|
+
await sleep(delay);
|
|
208
|
+
continue;
|
|
209
|
+
}
|
|
210
|
+
throw lastError;
|
|
211
|
+
}
|
|
212
|
+
if (resp.ok) {
|
|
213
|
+
const raw = await resp.json();
|
|
214
|
+
if (!raw?.rows || !raw?.columns)
|
|
215
|
+
return [];
|
|
216
|
+
return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
|
|
217
|
+
}
|
|
218
|
+
const text = await resp.text().catch(() => "");
|
|
219
|
+
const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
|
|
220
|
+
if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
|
|
221
|
+
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
|
|
222
|
+
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
|
|
223
|
+
await sleep(delay);
|
|
224
|
+
continue;
|
|
225
|
+
}
|
|
226
|
+
throw new Error(`Query failed: ${resp.status}: ${text.slice(0, 200)}`);
|
|
227
|
+
}
|
|
228
|
+
throw lastError ?? new Error("Query failed: max retries exceeded");
|
|
229
|
+
}
|
|
230
|
+
// ── Writes ──────────────────────────────────────────────────────────────────
|
|
231
|
+
/** Queue rows for writing. Call commit() to flush. */
|
|
232
|
+
appendRows(rows) {
|
|
233
|
+
this._pendingRows.push(...rows);
|
|
234
|
+
}
|
|
235
|
+
/** Flush pending rows via SQL. */
|
|
236
|
+
async commit() {
|
|
237
|
+
if (this._pendingRows.length === 0)
|
|
238
|
+
return;
|
|
239
|
+
const rows = this._pendingRows;
|
|
240
|
+
this._pendingRows = [];
|
|
241
|
+
const CONCURRENCY = 10;
|
|
242
|
+
for (let i = 0; i < rows.length; i += CONCURRENCY) {
|
|
243
|
+
const chunk = rows.slice(i, i + CONCURRENCY);
|
|
244
|
+
await Promise.allSettled(chunk.map((r) => this.upsertRowSql(r)));
|
|
245
|
+
}
|
|
246
|
+
log2(`commit: ${rows.length} rows`);
|
|
247
|
+
}
|
|
248
|
+
async upsertRowSql(row) {
|
|
249
|
+
const ts = (/* @__PURE__ */ new Date()).toISOString();
|
|
250
|
+
const cd = row.creationDate ?? ts;
|
|
251
|
+
const lud = row.lastUpdateDate ?? ts;
|
|
252
|
+
const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`);
|
|
253
|
+
if (exists.length > 0) {
|
|
254
|
+
let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`;
|
|
255
|
+
if (row.project !== void 0)
|
|
256
|
+
setClauses += `, project = '${sqlStr(row.project)}'`;
|
|
257
|
+
if (row.description !== void 0)
|
|
258
|
+
setClauses += `, description = '${sqlStr(row.description)}'`;
|
|
259
|
+
await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`);
|
|
260
|
+
} else {
|
|
261
|
+
const id = randomUUID();
|
|
262
|
+
let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date";
|
|
263
|
+
let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`;
|
|
264
|
+
if (row.project !== void 0) {
|
|
265
|
+
cols += ", project";
|
|
266
|
+
vals += `, '${sqlStr(row.project)}'`;
|
|
267
|
+
}
|
|
268
|
+
if (row.description !== void 0) {
|
|
269
|
+
cols += ", description";
|
|
270
|
+
vals += `, '${sqlStr(row.description)}'`;
|
|
271
|
+
}
|
|
272
|
+
await this.query(`INSERT INTO "${this.tableName}" (${cols}) VALUES (${vals})`);
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
/** Update specific columns on a row by path. */
|
|
276
|
+
async updateColumns(path, columns) {
|
|
277
|
+
const setClauses = Object.entries(columns).map(([col, val]) => typeof val === "number" ? `${col} = ${val}` : `${col} = '${sqlStr(String(val))}'`).join(", ");
|
|
278
|
+
await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(path)}'`);
|
|
279
|
+
}
|
|
280
|
+
// ── Convenience ─────────────────────────────────────────────────────────────
|
|
281
|
+
/** Create a BM25 search index on a column. */
|
|
282
|
+
async createIndex(column) {
|
|
283
|
+
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
|
|
284
|
+
}
|
|
285
|
+
buildLookupIndexName(table, suffix) {
|
|
286
|
+
return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
|
|
287
|
+
}
|
|
288
|
+
getLookupIndexMarkerPath(table, suffix) {
|
|
289
|
+
const markerKey = [
|
|
290
|
+
this.workspaceId,
|
|
291
|
+
this.orgId,
|
|
292
|
+
table,
|
|
293
|
+
suffix
|
|
294
|
+
].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
|
|
295
|
+
return join3(getIndexMarkerDir(), `${markerKey}.json`);
|
|
296
|
+
}
|
|
297
|
+
hasFreshLookupIndexMarker(table, suffix) {
|
|
298
|
+
const markerPath = this.getLookupIndexMarkerPath(table, suffix);
|
|
299
|
+
if (!existsSync2(markerPath))
|
|
300
|
+
return false;
|
|
301
|
+
try {
|
|
302
|
+
const raw = JSON.parse(readFileSync2(markerPath, "utf-8"));
|
|
303
|
+
const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
|
|
304
|
+
if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
|
|
305
|
+
return false;
|
|
306
|
+
return true;
|
|
307
|
+
} catch {
|
|
308
|
+
return false;
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
markLookupIndexReady(table, suffix) {
|
|
312
|
+
mkdirSync(getIndexMarkerDir(), { recursive: true });
|
|
313
|
+
writeFileSync(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
|
|
314
|
+
}
|
|
315
|
+
async ensureLookupIndex(table, suffix, columnsSql) {
|
|
316
|
+
if (this.hasFreshLookupIndexMarker(table, suffix))
|
|
317
|
+
return;
|
|
318
|
+
const indexName = this.buildLookupIndexName(table, suffix);
|
|
319
|
+
try {
|
|
320
|
+
await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
|
|
321
|
+
this.markLookupIndexReady(table, suffix);
|
|
322
|
+
} catch (e) {
|
|
323
|
+
if (isDuplicateIndexError(e)) {
|
|
324
|
+
this.markLookupIndexReady(table, suffix);
|
|
325
|
+
return;
|
|
326
|
+
}
|
|
327
|
+
log2(`index "${indexName}" skipped: ${e.message}`);
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
/** List all tables in the workspace (with retry). */
|
|
331
|
+
async listTables(forceRefresh = false) {
|
|
332
|
+
if (!forceRefresh && this._tablesCache)
|
|
333
|
+
return [...this._tablesCache];
|
|
334
|
+
const { tables, cacheable } = await this._fetchTables();
|
|
335
|
+
if (cacheable)
|
|
336
|
+
this._tablesCache = [...tables];
|
|
337
|
+
return tables;
|
|
338
|
+
}
|
|
339
|
+
async _fetchTables() {
|
|
340
|
+
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
|
341
|
+
try {
|
|
342
|
+
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
|
|
343
|
+
headers: {
|
|
344
|
+
Authorization: `Bearer ${this.token}`,
|
|
345
|
+
"X-Activeloop-Org-Id": this.orgId
|
|
346
|
+
}
|
|
347
|
+
});
|
|
348
|
+
if (resp.ok) {
|
|
349
|
+
const data = await resp.json();
|
|
350
|
+
return {
|
|
351
|
+
tables: (data.tables ?? []).map((t) => t.table_name),
|
|
352
|
+
cacheable: true
|
|
353
|
+
};
|
|
354
|
+
}
|
|
355
|
+
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
|
|
356
|
+
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
|
|
357
|
+
continue;
|
|
358
|
+
}
|
|
359
|
+
return { tables: [], cacheable: false };
|
|
360
|
+
} catch {
|
|
361
|
+
if (attempt < MAX_RETRIES) {
|
|
362
|
+
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
|
|
363
|
+
continue;
|
|
364
|
+
}
|
|
365
|
+
return { tables: [], cacheable: false };
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
return { tables: [], cacheable: false };
|
|
369
|
+
}
|
|
370
|
+
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
|
|
371
|
+
async ensureTable(name) {
|
|
372
|
+
const tbl = name ?? this.tableName;
|
|
373
|
+
const tables = await this.listTables();
|
|
374
|
+
if (!tables.includes(tbl)) {
|
|
375
|
+
log2(`table "${tbl}" not found, creating`);
|
|
376
|
+
await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
|
|
377
|
+
log2(`table "${tbl}" created`);
|
|
378
|
+
if (!tables.includes(tbl))
|
|
379
|
+
this._tablesCache = [...tables, tbl];
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
|
|
383
|
+
async ensureSessionsTable(name) {
|
|
384
|
+
const tables = await this.listTables();
|
|
385
|
+
if (!tables.includes(name)) {
|
|
386
|
+
log2(`table "${name}" not found, creating`);
|
|
387
|
+
await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
|
|
388
|
+
log2(`table "${name}" created`);
|
|
389
|
+
if (!tables.includes(name))
|
|
390
|
+
this._tablesCache = [...tables, name];
|
|
391
|
+
}
|
|
392
|
+
await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
|
|
393
|
+
}
|
|
394
|
+
};
|
|
395
|
+
|
|
396
|
+
// dist/src/hooks/codex/spawn-wiki-worker.js
|
|
397
|
+
import { spawn, execSync } from "node:child_process";
|
|
398
|
+
import { fileURLToPath } from "node:url";
|
|
399
|
+
import { dirname, join as join5 } from "node:path";
|
|
400
|
+
import { writeFileSync as writeFileSync2, mkdirSync as mkdirSync3 } from "node:fs";
|
|
401
|
+
import { homedir as homedir3, tmpdir as tmpdir2 } from "node:os";
|
|
402
|
+
|
|
403
|
+
// dist/src/utils/wiki-log.js
|
|
404
|
+
import { mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs";
|
|
405
|
+
import { join as join4 } from "node:path";
|
|
406
|
+
function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") {
|
|
407
|
+
const path = join4(hooksDir, filename);
|
|
408
|
+
return {
|
|
409
|
+
path,
|
|
410
|
+
log(msg) {
|
|
411
|
+
try {
|
|
412
|
+
mkdirSync2(hooksDir, { recursive: true });
|
|
413
|
+
appendFileSync2(path, `[${utcTimestamp()}] ${msg}
|
|
414
|
+
`);
|
|
415
|
+
} catch {
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
};
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
// dist/src/hooks/codex/spawn-wiki-worker.js
|
|
422
|
+
var HOME = homedir3();
|
|
423
|
+
var wikiLogger = makeWikiLogger(join5(HOME, ".codex", "hooks"));
|
|
424
|
+
var WIKI_LOG = wikiLogger.path;
|
|
425
|
+
var WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge \u2014 entities, decisions, relationships, and facts \u2014 into a structured, searchable wiki entry.
|
|
426
|
+
|
|
427
|
+
SESSION JSONL path: __JSONL__
|
|
428
|
+
SUMMARY FILE to write: __SUMMARY__
|
|
429
|
+
SESSION ID: __SESSION_ID__
|
|
430
|
+
PROJECT: __PROJECT__
|
|
431
|
+
PREVIOUS JSONL OFFSET (lines already processed): __PREV_OFFSET__
|
|
432
|
+
CURRENT JSONL LINES: __JSONL_LINES__
|
|
433
|
+
|
|
434
|
+
Steps:
|
|
435
|
+
1. Read the session JSONL at the path above.
|
|
436
|
+
- If PREVIOUS JSONL OFFSET > 0, this is a resumed session. Read the existing summary file first,
|
|
437
|
+
then focus on lines AFTER the offset for new content. Merge new facts into the existing summary.
|
|
438
|
+
- If offset is 0, generate from scratch.
|
|
439
|
+
|
|
440
|
+
2. Write the summary file at the path above with this EXACT format:
|
|
441
|
+
|
|
442
|
+
# Session __SESSION_ID__
|
|
443
|
+
- **Source**: __JSONL_SERVER_PATH__
|
|
444
|
+
- **Started**: <extract from JSONL>
|
|
445
|
+
- **Ended**: <now>
|
|
446
|
+
- **Project**: __PROJECT__
|
|
447
|
+
- **JSONL offset**: __JSONL_LINES__
|
|
448
|
+
|
|
449
|
+
## What Happened
|
|
450
|
+
<2-3 dense sentences. What was the goal, what was accomplished, what's left.>
|
|
451
|
+
|
|
452
|
+
## People
|
|
453
|
+
<For each person mentioned: name, role, what they did/said. Format: **Name** \u2014 role \u2014 action>
|
|
454
|
+
|
|
455
|
+
## Entities
|
|
456
|
+
<Every named thing: repos, branches, files, APIs, tools, services, tables, features, bugs.
|
|
457
|
+
Format: **entity** (type) \u2014 what was done with it, its current state>
|
|
458
|
+
|
|
459
|
+
## Decisions & Reasoning
|
|
460
|
+
<Every decision made and WHY.>
|
|
461
|
+
|
|
462
|
+
## Key Facts
|
|
463
|
+
<Bullet list of atomic facts that could answer future questions.>
|
|
464
|
+
|
|
465
|
+
## Files Modified
|
|
466
|
+
<bullet list: path (new/modified/deleted) \u2014 what changed>
|
|
467
|
+
|
|
468
|
+
## Open Questions / TODO
|
|
469
|
+
<Anything unresolved, blocked, or explicitly deferred>
|
|
470
|
+
|
|
471
|
+
IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact.
|
|
472
|
+
PRIVACY: Never include absolute filesystem paths in the summary.
|
|
473
|
+
LENGTH LIMIT: Keep the total summary under 4000 characters.`;
|
|
474
|
+
var wikiLog = wikiLogger.log;
|
|
475
|
+
function findCodexBin() {
|
|
476
|
+
try {
|
|
477
|
+
return execSync("which codex 2>/dev/null", { encoding: "utf-8" }).trim();
|
|
478
|
+
} catch {
|
|
479
|
+
return "codex";
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
function spawnCodexWikiWorker(opts) {
|
|
483
|
+
const { config, sessionId, cwd, bundleDir, reason } = opts;
|
|
484
|
+
const projectName = cwd.split("/").pop() || "unknown";
|
|
485
|
+
const tmpDir = join5(tmpdir2(), `deeplake-wiki-${sessionId}-${Date.now()}`);
|
|
486
|
+
mkdirSync3(tmpDir, { recursive: true });
|
|
487
|
+
const configFile = join5(tmpDir, "config.json");
|
|
488
|
+
writeFileSync2(configFile, JSON.stringify({
|
|
489
|
+
apiUrl: config.apiUrl,
|
|
490
|
+
token: config.token,
|
|
491
|
+
orgId: config.orgId,
|
|
492
|
+
workspaceId: config.workspaceId,
|
|
493
|
+
memoryTable: config.tableName,
|
|
494
|
+
sessionsTable: config.sessionsTableName,
|
|
495
|
+
sessionId,
|
|
496
|
+
userName: config.userName,
|
|
497
|
+
project: projectName,
|
|
498
|
+
tmpDir,
|
|
499
|
+
codexBin: findCodexBin(),
|
|
500
|
+
wikiLog: WIKI_LOG,
|
|
501
|
+
hooksDir: join5(HOME, ".codex", "hooks"),
|
|
502
|
+
promptTemplate: WIKI_PROMPT_TEMPLATE
|
|
503
|
+
}));
|
|
504
|
+
wikiLog(`${reason}: spawning summary worker for ${sessionId}`);
|
|
505
|
+
const workerPath = join5(bundleDir, "wiki-worker.js");
|
|
506
|
+
spawn("nohup", ["node", workerPath, configFile], {
|
|
507
|
+
detached: true,
|
|
508
|
+
stdio: ["ignore", "ignore", "ignore"]
|
|
509
|
+
}).unref();
|
|
510
|
+
wikiLog(`${reason}: spawned summary worker for ${sessionId}`);
|
|
511
|
+
}
|
|
512
|
+
function bundleDirFromImportMeta(importMetaUrl) {
|
|
513
|
+
return dirname(fileURLToPath(importMetaUrl));
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
// dist/src/hooks/summary-state.js
|
|
517
|
+
import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, writeSync, mkdirSync as mkdirSync4, renameSync, existsSync as existsSync3, unlinkSync, openSync, closeSync } from "node:fs";
|
|
518
|
+
import { homedir as homedir4 } from "node:os";
|
|
519
|
+
import { join as join6 } from "node:path";
|
|
520
|
+
var dlog = (msg) => log("summary-state", msg);
|
|
521
|
+
var STATE_DIR = join6(homedir4(), ".claude", "hooks", "summary-state");
|
|
522
|
+
var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4));
|
|
523
|
+
function lockPath(sessionId) {
|
|
524
|
+
return join6(STATE_DIR, `${sessionId}.lock`);
|
|
525
|
+
}
|
|
526
|
+
function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) {
|
|
527
|
+
mkdirSync4(STATE_DIR, { recursive: true });
|
|
528
|
+
const p = lockPath(sessionId);
|
|
529
|
+
if (existsSync3(p)) {
|
|
530
|
+
try {
|
|
531
|
+
const ageMs = Date.now() - parseInt(readFileSync3(p, "utf-8"), 10);
|
|
532
|
+
if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
|
|
533
|
+
return false;
|
|
534
|
+
} catch (readErr) {
|
|
535
|
+
dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`);
|
|
536
|
+
}
|
|
537
|
+
try {
|
|
538
|
+
unlinkSync(p);
|
|
539
|
+
} catch (unlinkErr) {
|
|
540
|
+
dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`);
|
|
541
|
+
return false;
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
try {
|
|
545
|
+
const fd = openSync(p, "wx");
|
|
546
|
+
try {
|
|
547
|
+
writeSync(fd, String(Date.now()));
|
|
548
|
+
} finally {
|
|
549
|
+
closeSync(fd);
|
|
550
|
+
}
|
|
551
|
+
return true;
|
|
552
|
+
} catch (e) {
|
|
553
|
+
if (e.code === "EEXIST")
|
|
554
|
+
return false;
|
|
555
|
+
throw e;
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
function releaseLock(sessionId) {
|
|
559
|
+
try {
|
|
560
|
+
unlinkSync(lockPath(sessionId));
|
|
561
|
+
} catch (e) {
|
|
562
|
+
if (e?.code !== "ENOENT") {
|
|
563
|
+
dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`);
|
|
564
|
+
}
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
// dist/src/utils/session-path.js
|
|
569
|
+
function buildSessionPath(config, sessionId) {
|
|
570
|
+
const workspace = config.workspaceId ?? "default";
|
|
571
|
+
return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${workspace}_${sessionId}.jsonl`;
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
// dist/src/hooks/codex/stop.js
|
|
575
|
+
var log3 = (msg) => log("codex-stop", msg);
|
|
576
|
+
var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false";
|
|
577
|
+
async function main() {
|
|
578
|
+
if (process.env.HIVEMIND_WIKI_WORKER === "1")
|
|
579
|
+
return;
|
|
580
|
+
const input = await readStdin();
|
|
581
|
+
const sessionId = input.session_id;
|
|
582
|
+
if (!sessionId)
|
|
583
|
+
return;
|
|
584
|
+
const config = loadConfig();
|
|
585
|
+
if (!config) {
|
|
586
|
+
log3("no config");
|
|
587
|
+
return;
|
|
588
|
+
}
|
|
589
|
+
if (CAPTURE) {
|
|
590
|
+
try {
|
|
591
|
+
const sessionsTable = config.sessionsTableName;
|
|
592
|
+
const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, sessionsTable);
|
|
593
|
+
const ts = (/* @__PURE__ */ new Date()).toISOString();
|
|
594
|
+
let lastAssistantMessage = "";
|
|
595
|
+
if (input.transcript_path) {
|
|
596
|
+
try {
|
|
597
|
+
const transcriptPath = input.transcript_path;
|
|
598
|
+
if (existsSync4(transcriptPath)) {
|
|
599
|
+
const transcript = readFileSync4(transcriptPath, "utf-8");
|
|
600
|
+
const lines = transcript.trim().split("\n").reverse();
|
|
601
|
+
for (const line2 of lines) {
|
|
602
|
+
try {
|
|
603
|
+
const entry2 = JSON.parse(line2);
|
|
604
|
+
const msg = entry2.payload ?? entry2;
|
|
605
|
+
if (msg.role === "assistant" && msg.content) {
|
|
606
|
+
const content = typeof msg.content === "string" ? msg.content : Array.isArray(msg.content) ? msg.content.filter((b) => b.type === "output_text" || b.type === "text").map((b) => b.text).join("\n") : "";
|
|
607
|
+
if (content) {
|
|
608
|
+
lastAssistantMessage = content.slice(0, 4e3);
|
|
609
|
+
break;
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
} catch {
|
|
613
|
+
}
|
|
614
|
+
}
|
|
615
|
+
if (lastAssistantMessage)
|
|
616
|
+
log3(`extracted assistant message from transcript (${lastAssistantMessage.length} chars)`);
|
|
617
|
+
}
|
|
618
|
+
} catch (e) {
|
|
619
|
+
log3(`transcript read failed: ${e.message}`);
|
|
620
|
+
}
|
|
621
|
+
}
|
|
622
|
+
const entry = {
|
|
623
|
+
id: crypto.randomUUID(),
|
|
624
|
+
session_id: sessionId,
|
|
625
|
+
transcript_path: input.transcript_path,
|
|
626
|
+
cwd: input.cwd,
|
|
627
|
+
hook_event_name: input.hook_event_name,
|
|
628
|
+
model: input.model,
|
|
629
|
+
timestamp: ts,
|
|
630
|
+
type: lastAssistantMessage ? "assistant_message" : "assistant_stop",
|
|
631
|
+
content: lastAssistantMessage
|
|
632
|
+
};
|
|
633
|
+
const line = JSON.stringify(entry);
|
|
634
|
+
const sessionPath = buildSessionPath(config, sessionId);
|
|
635
|
+
const projectName = (input.cwd ?? "").split("/").pop() || "unknown";
|
|
636
|
+
const filename = sessionPath.split("/").pop() ?? "";
|
|
637
|
+
const jsonForSql = sqlStr(line);
|
|
638
|
+
const insertSql = `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', 'Stop', 'codex', '${ts}', '${ts}')`;
|
|
639
|
+
await api.query(insertSql);
|
|
640
|
+
log3("stop event captured");
|
|
641
|
+
} catch (e) {
|
|
642
|
+
log3(`capture failed: ${e.message}`);
|
|
643
|
+
}
|
|
644
|
+
}
|
|
645
|
+
if (!CAPTURE)
|
|
646
|
+
return;
|
|
647
|
+
if (!tryAcquireLock(sessionId)) {
|
|
648
|
+
wikiLog(`Stop: periodic worker already running for ${sessionId}, skipping`);
|
|
649
|
+
return;
|
|
650
|
+
}
|
|
651
|
+
wikiLog(`Stop: triggering summary for ${sessionId}`);
|
|
652
|
+
try {
|
|
653
|
+
spawnCodexWikiWorker({
|
|
654
|
+
config,
|
|
655
|
+
sessionId,
|
|
656
|
+
cwd: input.cwd ?? "",
|
|
657
|
+
bundleDir: bundleDirFromImportMeta(import.meta.url),
|
|
658
|
+
reason: "Stop"
|
|
659
|
+
});
|
|
660
|
+
} catch (e) {
|
|
661
|
+
log3(`spawn failed: ${e.message}`);
|
|
662
|
+
try {
|
|
663
|
+
releaseLock(sessionId);
|
|
664
|
+
} catch (releaseErr) {
|
|
665
|
+
log3(`releaseLock after spawn failure also failed: ${releaseErr.message}`);
|
|
666
|
+
}
|
|
667
|
+
throw e;
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
main().catch((e) => {
|
|
671
|
+
log3(`fatal: ${e.message}`);
|
|
672
|
+
process.exit(0);
|
|
673
|
+
});
|