@deeplake/hivemind 0.6.48 → 0.7.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +2 -2
- package/.claude-plugin/plugin.json +1 -1
- package/README.md +244 -20
- package/bundle/cli.js +1369 -112
- package/codex/bundle/capture.js +546 -96
- package/codex/bundle/commands/auth-login.js +290 -81
- package/codex/bundle/embeddings/embed-daemon.js +243 -0
- package/codex/bundle/pre-tool-use.js +666 -111
- package/codex/bundle/session-start-setup.js +231 -64
- package/codex/bundle/session-start.js +52 -13
- package/codex/bundle/shell/deeplake-shell.js +716 -119
- package/codex/bundle/skilify-worker.js +907 -0
- package/codex/bundle/stop.js +819 -79
- package/codex/bundle/wiki-worker.js +312 -11
- package/cursor/bundle/capture.js +1116 -64
- package/cursor/bundle/commands/auth-login.js +290 -81
- package/cursor/bundle/embeddings/embed-daemon.js +243 -0
- package/cursor/bundle/pre-tool-use.js +598 -77
- package/cursor/bundle/session-end.js +520 -2
- package/cursor/bundle/session-start.js +257 -65
- package/cursor/bundle/shell/deeplake-shell.js +716 -119
- package/cursor/bundle/skilify-worker.js +907 -0
- package/cursor/bundle/wiki-worker.js +571 -0
- package/hermes/bundle/capture.js +1119 -65
- package/hermes/bundle/commands/auth-login.js +290 -81
- package/hermes/bundle/embeddings/embed-daemon.js +243 -0
- package/hermes/bundle/pre-tool-use.js +597 -76
- package/hermes/bundle/session-end.js +522 -1
- package/hermes/bundle/session-start.js +260 -65
- package/hermes/bundle/shell/deeplake-shell.js +716 -119
- package/hermes/bundle/skilify-worker.js +907 -0
- package/hermes/bundle/wiki-worker.js +572 -0
- package/mcp/bundle/server.js +290 -75
- package/openclaw/dist/chunks/auth-creds-AEKS6D3P.js +14 -0
- package/openclaw/dist/chunks/chunk-SRCBBT4H.js +37 -0
- package/openclaw/dist/chunks/config-ZLH6JFJS.js +34 -0
- package/openclaw/dist/chunks/index-marker-store-PGT5CW6T.js +33 -0
- package/openclaw/dist/chunks/setup-config-C35UK4LP.js +114 -0
- package/openclaw/dist/index.js +929 -710
- package/openclaw/dist/skilify-worker.js +907 -0
- package/openclaw/openclaw.plugin.json +1 -1
- package/openclaw/package.json +1 -1
- package/openclaw/skills/SKILL.md +19 -0
- package/package.json +7 -1
- package/pi/extension-source/hivemind.ts +603 -22
package/cursor/bundle/capture.js
CHANGED
|
@@ -1,4 +1,56 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
|
+
var __esm = (fn, res) => function __init() {
|
|
5
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
6
|
+
};
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
// dist/src/index-marker-store.js
|
|
13
|
+
var index_marker_store_exports = {};
|
|
14
|
+
__export(index_marker_store_exports, {
|
|
15
|
+
buildIndexMarkerPath: () => buildIndexMarkerPath,
|
|
16
|
+
getIndexMarkerDir: () => getIndexMarkerDir,
|
|
17
|
+
hasFreshIndexMarker: () => hasFreshIndexMarker,
|
|
18
|
+
writeIndexMarker: () => writeIndexMarker
|
|
19
|
+
});
|
|
20
|
+
import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs";
|
|
21
|
+
import { join as join3 } from "node:path";
|
|
22
|
+
import { tmpdir } from "node:os";
|
|
23
|
+
function getIndexMarkerDir() {
|
|
24
|
+
return process.env.HIVEMIND_INDEX_MARKER_DIR ?? join3(tmpdir(), "hivemind-deeplake-indexes");
|
|
25
|
+
}
|
|
26
|
+
function buildIndexMarkerPath(workspaceId, orgId, table, suffix) {
|
|
27
|
+
const markerKey = [workspaceId, orgId, table, suffix].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
|
|
28
|
+
return join3(getIndexMarkerDir(), `${markerKey}.json`);
|
|
29
|
+
}
|
|
30
|
+
function hasFreshIndexMarker(markerPath) {
|
|
31
|
+
if (!existsSync2(markerPath))
|
|
32
|
+
return false;
|
|
33
|
+
try {
|
|
34
|
+
const raw = JSON.parse(readFileSync2(markerPath, "utf-8"));
|
|
35
|
+
const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
|
|
36
|
+
if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
|
|
37
|
+
return false;
|
|
38
|
+
return true;
|
|
39
|
+
} catch {
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
function writeIndexMarker(markerPath) {
|
|
44
|
+
mkdirSync(getIndexMarkerDir(), { recursive: true });
|
|
45
|
+
writeFileSync(markerPath, JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
|
|
46
|
+
}
|
|
47
|
+
var INDEX_MARKER_TTL_MS;
|
|
48
|
+
var init_index_marker_store = __esm({
|
|
49
|
+
"dist/src/index-marker-store.js"() {
|
|
50
|
+
"use strict";
|
|
51
|
+
INDEX_MARKER_TTL_MS = Number(process.env.HIVEMIND_INDEX_MARKER_TTL_MS ?? 6 * 60 * 6e4);
|
|
52
|
+
}
|
|
53
|
+
});
|
|
2
54
|
|
|
3
55
|
// dist/src/utils/stdin.js
|
|
4
56
|
function readStdin() {
|
|
@@ -45,15 +97,13 @@ function loadConfig() {
|
|
|
45
97
|
apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
|
|
46
98
|
tableName: process.env.HIVEMIND_TABLE ?? "memory",
|
|
47
99
|
sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
|
|
100
|
+
skillsTableName: process.env.HIVEMIND_SKILLS_TABLE ?? "skills",
|
|
48
101
|
memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join(home, ".deeplake", "memory")
|
|
49
102
|
};
|
|
50
103
|
}
|
|
51
104
|
|
|
52
105
|
// dist/src/deeplake-api.js
|
|
53
106
|
import { randomUUID } from "node:crypto";
|
|
54
|
-
import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs";
|
|
55
|
-
import { join as join3 } from "node:path";
|
|
56
|
-
import { tmpdir } from "node:os";
|
|
57
107
|
|
|
58
108
|
// dist/src/utils/debug.js
|
|
59
109
|
import { appendFileSync } from "node:fs";
|
|
@@ -61,6 +111,9 @@ import { join as join2 } from "node:path";
|
|
|
61
111
|
import { homedir as homedir2 } from "node:os";
|
|
62
112
|
var DEBUG = process.env.HIVEMIND_DEBUG === "1";
|
|
63
113
|
var LOG = join2(homedir2(), ".deeplake", "hook-debug.log");
|
|
114
|
+
function utcTimestamp(d = /* @__PURE__ */ new Date()) {
|
|
115
|
+
return d.toISOString().replace("T", " ").slice(0, 19) + " UTC";
|
|
116
|
+
}
|
|
64
117
|
function log(tag, msg) {
|
|
65
118
|
if (!DEBUG)
|
|
66
119
|
return;
|
|
@@ -72,8 +125,33 @@ function log(tag, msg) {
|
|
|
72
125
|
function sqlStr(value) {
|
|
73
126
|
return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
|
|
74
127
|
}
|
|
128
|
+
function sqlIdent(name) {
|
|
129
|
+
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
|
|
130
|
+
throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
|
|
131
|
+
}
|
|
132
|
+
return name;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// dist/src/embeddings/columns.js
|
|
136
|
+
var SUMMARY_EMBEDDING_COL = "summary_embedding";
|
|
137
|
+
var MESSAGE_EMBEDDING_COL = "message_embedding";
|
|
138
|
+
|
|
139
|
+
// dist/src/utils/client-header.js
|
|
140
|
+
var DEEPLAKE_CLIENT_HEADER = "X-Deeplake-Client";
|
|
141
|
+
function deeplakeClientValue() {
|
|
142
|
+
return "hivemind";
|
|
143
|
+
}
|
|
144
|
+
function deeplakeClientHeader() {
|
|
145
|
+
return { [DEEPLAKE_CLIENT_HEADER]: deeplakeClientValue() };
|
|
146
|
+
}
|
|
75
147
|
|
|
76
148
|
// dist/src/deeplake-api.js
|
|
149
|
+
var indexMarkerStorePromise = null;
|
|
150
|
+
function getIndexMarkerStore() {
|
|
151
|
+
if (!indexMarkerStorePromise)
|
|
152
|
+
indexMarkerStorePromise = Promise.resolve().then(() => (init_index_marker_store(), index_marker_store_exports));
|
|
153
|
+
return indexMarkerStorePromise;
|
|
154
|
+
}
|
|
77
155
|
var log2 = (msg) => log("sdk", msg);
|
|
78
156
|
function summarizeSql(sql, maxLen = 220) {
|
|
79
157
|
const compact = sql.replace(/\s+/g, " ").trim();
|
|
@@ -93,7 +171,6 @@ var MAX_RETRIES = 3;
|
|
|
93
171
|
var BASE_DELAY_MS = 500;
|
|
94
172
|
var MAX_CONCURRENCY = 5;
|
|
95
173
|
var QUERY_TIMEOUT_MS = Number(process.env.HIVEMIND_QUERY_TIMEOUT_MS ?? 1e4);
|
|
96
|
-
var INDEX_MARKER_TTL_MS = Number(process.env.HIVEMIND_INDEX_MARKER_TTL_MS ?? 6 * 60 * 6e4);
|
|
97
174
|
function sleep(ms) {
|
|
98
175
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
99
176
|
}
|
|
@@ -113,9 +190,6 @@ function isTransientHtml403(text) {
|
|
|
113
190
|
const body = text.toLowerCase();
|
|
114
191
|
return body.includes("<html") || body.includes("403 forbidden") || body.includes("cloudflare") || body.includes("nginx");
|
|
115
192
|
}
|
|
116
|
-
function getIndexMarkerDir() {
|
|
117
|
-
return process.env.HIVEMIND_INDEX_MARKER_DIR ?? join3(tmpdir(), "hivemind-deeplake-indexes");
|
|
118
|
-
}
|
|
119
193
|
var Semaphore = class {
|
|
120
194
|
max;
|
|
121
195
|
waiting = [];
|
|
@@ -184,7 +258,8 @@ var DeeplakeApi = class {
|
|
|
184
258
|
headers: {
|
|
185
259
|
Authorization: `Bearer ${this.token}`,
|
|
186
260
|
"Content-Type": "application/json",
|
|
187
|
-
"X-Activeloop-Org-Id": this.orgId
|
|
261
|
+
"X-Activeloop-Org-Id": this.orgId,
|
|
262
|
+
...deeplakeClientHeader()
|
|
188
263
|
},
|
|
189
264
|
signal,
|
|
190
265
|
body: JSON.stringify({ query: sql })
|
|
@@ -211,7 +286,8 @@ var DeeplakeApi = class {
|
|
|
211
286
|
}
|
|
212
287
|
const text = await resp.text().catch(() => "");
|
|
213
288
|
const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
|
|
214
|
-
|
|
289
|
+
const alreadyExists = resp.status === 500 && isDuplicateIndexError(text);
|
|
290
|
+
if (!alreadyExists && attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
|
|
215
291
|
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
|
|
216
292
|
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
|
|
217
293
|
await sleep(delay);
|
|
@@ -245,7 +321,7 @@ var DeeplakeApi = class {
|
|
|
245
321
|
const lud = row.lastUpdateDate ?? ts;
|
|
246
322
|
const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`);
|
|
247
323
|
if (exists.length > 0) {
|
|
248
|
-
let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`;
|
|
324
|
+
let setClauses = `summary = E'${sqlStr(row.contentText)}', ${SUMMARY_EMBEDDING_COL} = NULL, mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`;
|
|
249
325
|
if (row.project !== void 0)
|
|
250
326
|
setClauses += `, project = '${sqlStr(row.project)}'`;
|
|
251
327
|
if (row.description !== void 0)
|
|
@@ -253,8 +329,8 @@ var DeeplakeApi = class {
|
|
|
253
329
|
await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`);
|
|
254
330
|
} else {
|
|
255
331
|
const id = randomUUID();
|
|
256
|
-
let cols =
|
|
257
|
-
let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`;
|
|
332
|
+
let cols = `id, path, filename, summary, ${SUMMARY_EMBEDDING_COL}, mime_type, size_bytes, creation_date, last_update_date`;
|
|
333
|
+
let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', NULL, '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`;
|
|
258
334
|
if (row.project !== void 0) {
|
|
259
335
|
cols += ", project";
|
|
260
336
|
vals += `, '${sqlStr(row.project)}'`;
|
|
@@ -279,48 +355,83 @@ var DeeplakeApi = class {
|
|
|
279
355
|
buildLookupIndexName(table, suffix) {
|
|
280
356
|
return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
|
|
281
357
|
}
|
|
282
|
-
getLookupIndexMarkerPath(table, suffix) {
|
|
283
|
-
const markerKey = [
|
|
284
|
-
this.workspaceId,
|
|
285
|
-
this.orgId,
|
|
286
|
-
table,
|
|
287
|
-
suffix
|
|
288
|
-
].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
|
|
289
|
-
return join3(getIndexMarkerDir(), `${markerKey}.json`);
|
|
290
|
-
}
|
|
291
|
-
hasFreshLookupIndexMarker(table, suffix) {
|
|
292
|
-
const markerPath = this.getLookupIndexMarkerPath(table, suffix);
|
|
293
|
-
if (!existsSync2(markerPath))
|
|
294
|
-
return false;
|
|
295
|
-
try {
|
|
296
|
-
const raw = JSON.parse(readFileSync2(markerPath, "utf-8"));
|
|
297
|
-
const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
|
|
298
|
-
if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
|
|
299
|
-
return false;
|
|
300
|
-
return true;
|
|
301
|
-
} catch {
|
|
302
|
-
return false;
|
|
303
|
-
}
|
|
304
|
-
}
|
|
305
|
-
markLookupIndexReady(table, suffix) {
|
|
306
|
-
mkdirSync(getIndexMarkerDir(), { recursive: true });
|
|
307
|
-
writeFileSync(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
|
|
308
|
-
}
|
|
309
358
|
async ensureLookupIndex(table, suffix, columnsSql) {
|
|
310
|
-
|
|
359
|
+
const markers = await getIndexMarkerStore();
|
|
360
|
+
const markerPath = markers.buildIndexMarkerPath(this.workspaceId, this.orgId, table, suffix);
|
|
361
|
+
if (markers.hasFreshIndexMarker(markerPath))
|
|
311
362
|
return;
|
|
312
363
|
const indexName = this.buildLookupIndexName(table, suffix);
|
|
313
364
|
try {
|
|
314
365
|
await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
|
|
315
|
-
|
|
366
|
+
markers.writeIndexMarker(markerPath);
|
|
316
367
|
} catch (e) {
|
|
317
368
|
if (isDuplicateIndexError(e)) {
|
|
318
|
-
|
|
369
|
+
markers.writeIndexMarker(markerPath);
|
|
319
370
|
return;
|
|
320
371
|
}
|
|
321
372
|
log2(`index "${indexName}" skipped: ${e.message}`);
|
|
322
373
|
}
|
|
323
374
|
}
|
|
375
|
+
/**
|
|
376
|
+
* Ensure a vector column exists on the given table.
|
|
377
|
+
*
|
|
378
|
+
* The previous implementation always issued `ALTER TABLE ADD COLUMN IF NOT
|
|
379
|
+
* EXISTS …` on every SessionStart. On a long-running workspace that's
|
|
380
|
+
* already migrated, every call returns 500 "Column already exists" — noisy
|
|
381
|
+
* in the log and a wasted round-trip. Worse, the very first call after the
|
|
382
|
+
* column is genuinely added triggers Deeplake's post-ALTER `vector::at`
|
|
383
|
+
* window (~30s) during which subsequent INSERTs fail; minimising the
|
|
384
|
+
* number of ALTER calls minimises exposure to that window.
|
|
385
|
+
*
|
|
386
|
+
* New flow:
|
|
387
|
+
* 1. Check the local marker file (mirrors ensureLookupIndex). If fresh,
|
|
388
|
+
* return — zero network calls.
|
|
389
|
+
* 2. SELECT 1 FROM information_schema.columns WHERE table_name = T AND
|
|
390
|
+
* column_name = C. Read-only, idempotent, can't tickle the post-ALTER
|
|
391
|
+
* bug. If the column is present → mark + return.
|
|
392
|
+
* 3. Only if step 2 says the column is missing, fall back to ALTER ADD
|
|
393
|
+
* COLUMN IF NOT EXISTS. Mark on success, also mark if Deeplake reports
|
|
394
|
+
* "already exists" (race: another client added it between our SELECT
|
|
395
|
+
* and ALTER).
|
|
396
|
+
*
|
|
397
|
+
* Marker uses the same dir / TTL as ensureLookupIndex so both schema
|
|
398
|
+
* caches share an opt-out (HIVEMIND_INDEX_MARKER_DIR) and a TTL knob.
|
|
399
|
+
*/
|
|
400
|
+
async ensureEmbeddingColumn(table, column) {
|
|
401
|
+
await this.ensureColumn(table, column, "FLOAT4[]");
|
|
402
|
+
}
|
|
403
|
+
/**
|
|
404
|
+
* Generic marker-gated column migration. Same SELECT-then-ALTER flow as
|
|
405
|
+
* ensureEmbeddingColumn, parameterized by SQL type so it can patch up any
|
|
406
|
+
* column that was added to the schema after the table was originally
|
|
407
|
+
* created. Used today for `summary_embedding`, `message_embedding`, and
|
|
408
|
+
* the `agent` column (added 2026-04-11) — the latter has no fallback if
|
|
409
|
+
* a user upgraded over a pre-2026-04-11 table, so every INSERT fails
|
|
410
|
+
* with `column "agent" does not exist`.
|
|
411
|
+
*/
|
|
412
|
+
async ensureColumn(table, column, sqlType) {
|
|
413
|
+
const markers = await getIndexMarkerStore();
|
|
414
|
+
const markerPath = markers.buildIndexMarkerPath(this.workspaceId, this.orgId, table, `col_${column}`);
|
|
415
|
+
if (markers.hasFreshIndexMarker(markerPath))
|
|
416
|
+
return;
|
|
417
|
+
const colCheck = `SELECT 1 FROM information_schema.columns WHERE table_name = '${sqlStr(table)}' AND column_name = '${sqlStr(column)}' AND table_schema = '${sqlStr(this.workspaceId)}' LIMIT 1`;
|
|
418
|
+
const rows = await this.query(colCheck);
|
|
419
|
+
if (rows.length > 0) {
|
|
420
|
+
markers.writeIndexMarker(markerPath);
|
|
421
|
+
return;
|
|
422
|
+
}
|
|
423
|
+
try {
|
|
424
|
+
await this.query(`ALTER TABLE "${table}" ADD COLUMN ${column} ${sqlType}`);
|
|
425
|
+
} catch (e) {
|
|
426
|
+
const msg = e instanceof Error ? e.message : String(e);
|
|
427
|
+
if (!/already exists/i.test(msg))
|
|
428
|
+
throw e;
|
|
429
|
+
const recheck = await this.query(colCheck);
|
|
430
|
+
if (recheck.length === 0)
|
|
431
|
+
throw e;
|
|
432
|
+
}
|
|
433
|
+
markers.writeIndexMarker(markerPath);
|
|
434
|
+
}
|
|
324
435
|
/** List all tables in the workspace (with retry). */
|
|
325
436
|
async listTables(forceRefresh = false) {
|
|
326
437
|
if (!forceRefresh && this._tablesCache)
|
|
@@ -336,7 +447,8 @@ var DeeplakeApi = class {
|
|
|
336
447
|
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
|
|
337
448
|
headers: {
|
|
338
449
|
Authorization: `Bearer ${this.token}`,
|
|
339
|
-
"X-Activeloop-Org-Id": this.orgId
|
|
450
|
+
"X-Activeloop-Org-Id": this.orgId,
|
|
451
|
+
...deeplakeClientHeader()
|
|
340
452
|
}
|
|
341
453
|
});
|
|
342
454
|
if (resp.ok) {
|
|
@@ -361,29 +473,84 @@ var DeeplakeApi = class {
|
|
|
361
473
|
}
|
|
362
474
|
return { tables: [], cacheable: false };
|
|
363
475
|
}
|
|
476
|
+
/**
|
|
477
|
+
* Run a `CREATE TABLE` with an extra outer retry budget. The base
|
|
478
|
+
* `query()` already retries 3 times on fetch errors (~3.5s total), but a
|
|
479
|
+
* failed CREATE is permanent corruption — every subsequent SELECT against
|
|
480
|
+
* the missing table fails. Wrapping in an outer loop with longer backoff
|
|
481
|
+
* (2s, 5s, then 10s) gives us ~17s of reach across transient network
|
|
482
|
+
* blips before giving up. Failures still propagate; getApi() resets its
|
|
483
|
+
* cache on init failure (openclaw plugin) so the next call retries the
|
|
484
|
+
* whole init flow.
|
|
485
|
+
*/
|
|
486
|
+
async createTableWithRetry(sql, label) {
|
|
487
|
+
const OUTER_BACKOFFS_MS = [2e3, 5e3, 1e4];
|
|
488
|
+
let lastErr = null;
|
|
489
|
+
for (let attempt = 0; attempt <= OUTER_BACKOFFS_MS.length; attempt++) {
|
|
490
|
+
try {
|
|
491
|
+
await this.query(sql);
|
|
492
|
+
return;
|
|
493
|
+
} catch (err) {
|
|
494
|
+
lastErr = err;
|
|
495
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
496
|
+
log2(`CREATE TABLE "${label}" attempt ${attempt + 1}/${OUTER_BACKOFFS_MS.length + 1} failed: ${msg}`);
|
|
497
|
+
if (attempt < OUTER_BACKOFFS_MS.length) {
|
|
498
|
+
await sleep(OUTER_BACKOFFS_MS[attempt]);
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
throw lastErr;
|
|
503
|
+
}
|
|
364
504
|
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
|
|
365
505
|
async ensureTable(name) {
|
|
366
|
-
const tbl = name ?? this.tableName;
|
|
506
|
+
const tbl = sqlIdent(name ?? this.tableName);
|
|
367
507
|
const tables = await this.listTables();
|
|
368
508
|
if (!tables.includes(tbl)) {
|
|
369
509
|
log2(`table "${tbl}" not found, creating`);
|
|
370
|
-
await this.
|
|
510
|
+
await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', summary_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, tbl);
|
|
371
511
|
log2(`table "${tbl}" created`);
|
|
372
512
|
if (!tables.includes(tbl))
|
|
373
513
|
this._tablesCache = [...tables, tbl];
|
|
374
514
|
}
|
|
515
|
+
await this.ensureEmbeddingColumn(tbl, SUMMARY_EMBEDDING_COL);
|
|
516
|
+
await this.ensureColumn(tbl, "agent", "TEXT NOT NULL DEFAULT ''");
|
|
375
517
|
}
|
|
376
518
|
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
|
|
377
519
|
async ensureSessionsTable(name) {
|
|
520
|
+
const safe = sqlIdent(name);
|
|
378
521
|
const tables = await this.listTables();
|
|
379
|
-
if (!tables.includes(
|
|
380
|
-
log2(`table "${
|
|
381
|
-
await this.
|
|
382
|
-
log2(`table "${
|
|
383
|
-
if (!tables.includes(
|
|
384
|
-
this._tablesCache = [...tables,
|
|
522
|
+
if (!tables.includes(safe)) {
|
|
523
|
+
log2(`table "${safe}" not found, creating`);
|
|
524
|
+
await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, message_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
|
|
525
|
+
log2(`table "${safe}" created`);
|
|
526
|
+
if (!tables.includes(safe))
|
|
527
|
+
this._tablesCache = [...tables, safe];
|
|
385
528
|
}
|
|
386
|
-
await this.
|
|
529
|
+
await this.ensureEmbeddingColumn(safe, MESSAGE_EMBEDDING_COL);
|
|
530
|
+
await this.ensureColumn(safe, "agent", "TEXT NOT NULL DEFAULT ''");
|
|
531
|
+
await this.ensureLookupIndex(safe, "path_creation_date", `("path", "creation_date")`);
|
|
532
|
+
}
|
|
533
|
+
/**
|
|
534
|
+
* Create the skills table.
|
|
535
|
+
*
|
|
536
|
+
* One row per skill version. Workers INSERT a fresh row on every KEEP /
|
|
537
|
+
* MERGE rather than UPDATE-ing in place, so the full version history is
|
|
538
|
+
* recoverable. Uniqueness in the *current* state is by (project_key, name)
|
|
539
|
+
* — newer rows shadow older ones at read time (ORDER BY version DESC).
|
|
540
|
+
* This sidesteps the Deeplake UPDATE-coalescing quirk that bit the wiki
|
|
541
|
+
* worker.
|
|
542
|
+
*/
|
|
543
|
+
async ensureSkillsTable(name) {
|
|
544
|
+
const safe = sqlIdent(name);
|
|
545
|
+
const tables = await this.listTables();
|
|
546
|
+
if (!tables.includes(safe)) {
|
|
547
|
+
log2(`table "${safe}" not found, creating`);
|
|
548
|
+
await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
|
|
549
|
+
log2(`table "${safe}" created`);
|
|
550
|
+
if (!tables.includes(safe))
|
|
551
|
+
this._tablesCache = [...tables, safe];
|
|
552
|
+
}
|
|
553
|
+
await this.ensureLookupIndex(safe, "project_key_name", `("project_key", "name")`);
|
|
387
554
|
}
|
|
388
555
|
};
|
|
389
556
|
|
|
@@ -393,8 +560,849 @@ function buildSessionPath(config, sessionId) {
|
|
|
393
560
|
return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${workspace}_${sessionId}.jsonl`;
|
|
394
561
|
}
|
|
395
562
|
|
|
563
|
+
// dist/src/embeddings/client.js
|
|
564
|
+
import { connect } from "node:net";
|
|
565
|
+
import { spawn } from "node:child_process";
|
|
566
|
+
import { openSync, closeSync, writeSync, unlinkSync, existsSync as existsSync3, readFileSync as readFileSync3 } from "node:fs";
|
|
567
|
+
import { homedir as homedir3 } from "node:os";
|
|
568
|
+
import { join as join4 } from "node:path";
|
|
569
|
+
|
|
570
|
+
// dist/src/embeddings/protocol.js
|
|
571
|
+
var DEFAULT_SOCKET_DIR = "/tmp";
|
|
572
|
+
var DEFAULT_IDLE_TIMEOUT_MS = 10 * 60 * 1e3;
|
|
573
|
+
var DEFAULT_CLIENT_TIMEOUT_MS = 2e3;
|
|
574
|
+
function socketPathFor(uid, dir = DEFAULT_SOCKET_DIR) {
|
|
575
|
+
return `${dir}/hivemind-embed-${uid}.sock`;
|
|
576
|
+
}
|
|
577
|
+
function pidPathFor(uid, dir = DEFAULT_SOCKET_DIR) {
|
|
578
|
+
return `${dir}/hivemind-embed-${uid}.pid`;
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
// dist/src/embeddings/client.js
|
|
582
|
+
var SHARED_DAEMON_PATH = join4(homedir3(), ".hivemind", "embed-deps", "embed-daemon.js");
|
|
583
|
+
var log3 = (m) => log("embed-client", m);
|
|
584
|
+
function getUid() {
|
|
585
|
+
const uid = typeof process.getuid === "function" ? process.getuid() : void 0;
|
|
586
|
+
return uid !== void 0 ? String(uid) : process.env.USER ?? "default";
|
|
587
|
+
}
|
|
588
|
+
var EmbedClient = class {
|
|
589
|
+
socketPath;
|
|
590
|
+
pidPath;
|
|
591
|
+
timeoutMs;
|
|
592
|
+
daemonEntry;
|
|
593
|
+
autoSpawn;
|
|
594
|
+
spawnWaitMs;
|
|
595
|
+
nextId = 0;
|
|
596
|
+
constructor(opts = {}) {
|
|
597
|
+
const uid = getUid();
|
|
598
|
+
const dir = opts.socketDir ?? "/tmp";
|
|
599
|
+
this.socketPath = socketPathFor(uid, dir);
|
|
600
|
+
this.pidPath = pidPathFor(uid, dir);
|
|
601
|
+
this.timeoutMs = opts.timeoutMs ?? DEFAULT_CLIENT_TIMEOUT_MS;
|
|
602
|
+
this.daemonEntry = opts.daemonEntry ?? process.env.HIVEMIND_EMBED_DAEMON ?? (existsSync3(SHARED_DAEMON_PATH) ? SHARED_DAEMON_PATH : void 0);
|
|
603
|
+
this.autoSpawn = opts.autoSpawn ?? true;
|
|
604
|
+
this.spawnWaitMs = opts.spawnWaitMs ?? 5e3;
|
|
605
|
+
}
|
|
606
|
+
/**
|
|
607
|
+
* Returns an embedding vector, or null on timeout/failure. Hooks MUST treat
|
|
608
|
+
* null as "skip embedding column" — never block the write path on us.
|
|
609
|
+
*
|
|
610
|
+
* Fire-and-forget spawn on miss: if the daemon isn't up, this call returns
|
|
611
|
+
* null AND kicks off a background spawn. The next call finds a ready daemon.
|
|
612
|
+
*/
|
|
613
|
+
async embed(text, kind = "document") {
|
|
614
|
+
let sock;
|
|
615
|
+
try {
|
|
616
|
+
sock = await this.connectOnce();
|
|
617
|
+
} catch {
|
|
618
|
+
if (this.autoSpawn)
|
|
619
|
+
this.trySpawnDaemon();
|
|
620
|
+
return null;
|
|
621
|
+
}
|
|
622
|
+
try {
|
|
623
|
+
const id = String(++this.nextId);
|
|
624
|
+
const req = { op: "embed", id, kind, text };
|
|
625
|
+
const resp = await this.sendAndWait(sock, req);
|
|
626
|
+
if (resp.error || !("embedding" in resp) || !resp.embedding) {
|
|
627
|
+
log3(`embed err: ${resp.error ?? "no embedding"}`);
|
|
628
|
+
return null;
|
|
629
|
+
}
|
|
630
|
+
return resp.embedding;
|
|
631
|
+
} catch (e) {
|
|
632
|
+
const err = e instanceof Error ? e.message : String(e);
|
|
633
|
+
log3(`embed failed: ${err}`);
|
|
634
|
+
return null;
|
|
635
|
+
} finally {
|
|
636
|
+
try {
|
|
637
|
+
sock.end();
|
|
638
|
+
} catch {
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
}
|
|
642
|
+
/**
|
|
643
|
+
* Wait up to spawnWaitMs for the daemon to accept connections, spawning if
|
|
644
|
+
* necessary. Meant for SessionStart / long-running batches — not the hot path.
|
|
645
|
+
*/
|
|
646
|
+
async warmup() {
|
|
647
|
+
try {
|
|
648
|
+
const s = await this.connectOnce();
|
|
649
|
+
s.end();
|
|
650
|
+
return true;
|
|
651
|
+
} catch {
|
|
652
|
+
if (!this.autoSpawn)
|
|
653
|
+
return false;
|
|
654
|
+
this.trySpawnDaemon();
|
|
655
|
+
try {
|
|
656
|
+
const s = await this.waitForSocket();
|
|
657
|
+
s.end();
|
|
658
|
+
return true;
|
|
659
|
+
} catch {
|
|
660
|
+
return false;
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
connectOnce() {
|
|
665
|
+
return new Promise((resolve, reject) => {
|
|
666
|
+
const sock = connect(this.socketPath);
|
|
667
|
+
const to = setTimeout(() => {
|
|
668
|
+
sock.destroy();
|
|
669
|
+
reject(new Error("connect timeout"));
|
|
670
|
+
}, this.timeoutMs);
|
|
671
|
+
sock.once("connect", () => {
|
|
672
|
+
clearTimeout(to);
|
|
673
|
+
resolve(sock);
|
|
674
|
+
});
|
|
675
|
+
sock.once("error", (e) => {
|
|
676
|
+
clearTimeout(to);
|
|
677
|
+
reject(e);
|
|
678
|
+
});
|
|
679
|
+
});
|
|
680
|
+
}
|
|
681
|
+
trySpawnDaemon() {
|
|
682
|
+
let fd;
|
|
683
|
+
try {
|
|
684
|
+
fd = openSync(this.pidPath, "wx", 384);
|
|
685
|
+
writeSync(fd, String(process.pid));
|
|
686
|
+
} catch (e) {
|
|
687
|
+
if (this.isPidFileStale()) {
|
|
688
|
+
try {
|
|
689
|
+
unlinkSync(this.pidPath);
|
|
690
|
+
} catch {
|
|
691
|
+
}
|
|
692
|
+
try {
|
|
693
|
+
fd = openSync(this.pidPath, "wx", 384);
|
|
694
|
+
writeSync(fd, String(process.pid));
|
|
695
|
+
} catch {
|
|
696
|
+
return;
|
|
697
|
+
}
|
|
698
|
+
} else {
|
|
699
|
+
return;
|
|
700
|
+
}
|
|
701
|
+
}
|
|
702
|
+
if (!this.daemonEntry || !existsSync3(this.daemonEntry)) {
|
|
703
|
+
log3(`daemonEntry not configured or missing: ${this.daemonEntry}`);
|
|
704
|
+
try {
|
|
705
|
+
closeSync(fd);
|
|
706
|
+
unlinkSync(this.pidPath);
|
|
707
|
+
} catch {
|
|
708
|
+
}
|
|
709
|
+
return;
|
|
710
|
+
}
|
|
711
|
+
try {
|
|
712
|
+
const child = spawn(process.execPath, [this.daemonEntry], {
|
|
713
|
+
detached: true,
|
|
714
|
+
stdio: "ignore",
|
|
715
|
+
env: process.env
|
|
716
|
+
});
|
|
717
|
+
child.unref();
|
|
718
|
+
log3(`spawned daemon pid=${child.pid}`);
|
|
719
|
+
} finally {
|
|
720
|
+
closeSync(fd);
|
|
721
|
+
}
|
|
722
|
+
}
|
|
723
|
+
isPidFileStale() {
|
|
724
|
+
try {
|
|
725
|
+
const raw = readFileSync3(this.pidPath, "utf-8").trim();
|
|
726
|
+
const pid = Number(raw);
|
|
727
|
+
if (!pid || Number.isNaN(pid))
|
|
728
|
+
return true;
|
|
729
|
+
try {
|
|
730
|
+
process.kill(pid, 0);
|
|
731
|
+
return false;
|
|
732
|
+
} catch {
|
|
733
|
+
return true;
|
|
734
|
+
}
|
|
735
|
+
} catch {
|
|
736
|
+
return true;
|
|
737
|
+
}
|
|
738
|
+
}
|
|
739
|
+
async waitForSocket() {
|
|
740
|
+
const deadline = Date.now() + this.spawnWaitMs;
|
|
741
|
+
let delay = 30;
|
|
742
|
+
while (Date.now() < deadline) {
|
|
743
|
+
await sleep2(delay);
|
|
744
|
+
delay = Math.min(delay * 1.5, 300);
|
|
745
|
+
if (!existsSync3(this.socketPath))
|
|
746
|
+
continue;
|
|
747
|
+
try {
|
|
748
|
+
return await this.connectOnce();
|
|
749
|
+
} catch {
|
|
750
|
+
}
|
|
751
|
+
}
|
|
752
|
+
throw new Error("daemon did not become ready within spawnWaitMs");
|
|
753
|
+
}
|
|
754
|
+
sendAndWait(sock, req) {
|
|
755
|
+
return new Promise((resolve, reject) => {
|
|
756
|
+
let buf = "";
|
|
757
|
+
const to = setTimeout(() => {
|
|
758
|
+
sock.destroy();
|
|
759
|
+
reject(new Error("request timeout"));
|
|
760
|
+
}, this.timeoutMs);
|
|
761
|
+
sock.setEncoding("utf-8");
|
|
762
|
+
sock.on("data", (chunk) => {
|
|
763
|
+
buf += chunk;
|
|
764
|
+
const nl = buf.indexOf("\n");
|
|
765
|
+
if (nl === -1)
|
|
766
|
+
return;
|
|
767
|
+
const line = buf.slice(0, nl);
|
|
768
|
+
clearTimeout(to);
|
|
769
|
+
try {
|
|
770
|
+
resolve(JSON.parse(line));
|
|
771
|
+
} catch (e) {
|
|
772
|
+
reject(e);
|
|
773
|
+
}
|
|
774
|
+
});
|
|
775
|
+
sock.on("error", (e) => {
|
|
776
|
+
clearTimeout(to);
|
|
777
|
+
reject(e);
|
|
778
|
+
});
|
|
779
|
+
sock.on("end", () => {
|
|
780
|
+
clearTimeout(to);
|
|
781
|
+
reject(new Error("connection closed without response"));
|
|
782
|
+
});
|
|
783
|
+
sock.write(JSON.stringify(req) + "\n");
|
|
784
|
+
});
|
|
785
|
+
}
|
|
786
|
+
};
|
|
787
|
+
function sleep2(ms) {
|
|
788
|
+
return new Promise((r) => setTimeout(r, ms));
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
// dist/src/embeddings/sql.js
|
|
792
|
+
function embeddingSqlLiteral(vec) {
|
|
793
|
+
if (!vec || vec.length === 0)
|
|
794
|
+
return "NULL";
|
|
795
|
+
const parts = [];
|
|
796
|
+
for (const v of vec) {
|
|
797
|
+
if (!Number.isFinite(v))
|
|
798
|
+
return "NULL";
|
|
799
|
+
parts.push(String(v));
|
|
800
|
+
}
|
|
801
|
+
return `ARRAY[${parts.join(",")}]::float4[]`;
|
|
802
|
+
}
|
|
803
|
+
|
|
804
|
+
// dist/src/embeddings/disable.js
|
|
805
|
+
import { createRequire } from "node:module";
|
|
806
|
+
import { homedir as homedir4 } from "node:os";
|
|
807
|
+
import { join as join5 } from "node:path";
|
|
808
|
+
import { pathToFileURL } from "node:url";
|
|
809
|
+
var cachedStatus = null;
|
|
810
|
+
function defaultResolveTransformers() {
|
|
811
|
+
try {
|
|
812
|
+
createRequire(import.meta.url).resolve("@huggingface/transformers");
|
|
813
|
+
return;
|
|
814
|
+
} catch {
|
|
815
|
+
}
|
|
816
|
+
const sharedDir = join5(homedir4(), ".hivemind", "embed-deps");
|
|
817
|
+
createRequire(pathToFileURL(`${sharedDir}/`).href).resolve("@huggingface/transformers");
|
|
818
|
+
}
|
|
819
|
+
var _resolve = defaultResolveTransformers;
|
|
820
|
+
function detectStatus() {
|
|
821
|
+
if (process.env.HIVEMIND_EMBEDDINGS === "false")
|
|
822
|
+
return "env-disabled";
|
|
823
|
+
try {
|
|
824
|
+
_resolve();
|
|
825
|
+
return "enabled";
|
|
826
|
+
} catch {
|
|
827
|
+
return "no-transformers";
|
|
828
|
+
}
|
|
829
|
+
}
|
|
830
|
+
function embeddingsStatus() {
|
|
831
|
+
if (cachedStatus !== null)
|
|
832
|
+
return cachedStatus;
|
|
833
|
+
cachedStatus = detectStatus();
|
|
834
|
+
return cachedStatus;
|
|
835
|
+
}
|
|
836
|
+
function embeddingsDisabled() {
|
|
837
|
+
return embeddingsStatus() !== "enabled";
|
|
838
|
+
}
|
|
839
|
+
|
|
840
|
+
// dist/src/hooks/cursor/capture.js
|
|
841
|
+
import { fileURLToPath as fileURLToPath3 } from "node:url";
|
|
842
|
+
import { dirname as dirname3, join as join13 } from "node:path";
|
|
843
|
+
|
|
844
|
+
// dist/src/hooks/summary-state.js
|
|
845
|
+
import { readFileSync as readFileSync4, writeFileSync as writeFileSync2, writeSync as writeSync2, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync4, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs";
|
|
846
|
+
import { homedir as homedir5 } from "node:os";
|
|
847
|
+
import { join as join6 } from "node:path";
|
|
848
|
+
var dlog = (msg) => log("summary-state", msg);
|
|
849
|
+
var STATE_DIR = join6(homedir5(), ".claude", "hooks", "summary-state");
|
|
850
|
+
var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4));
|
|
851
|
+
function statePath(sessionId) {
|
|
852
|
+
return join6(STATE_DIR, `${sessionId}.json`);
|
|
853
|
+
}
|
|
854
|
+
function lockPath(sessionId) {
|
|
855
|
+
return join6(STATE_DIR, `${sessionId}.lock`);
|
|
856
|
+
}
|
|
857
|
+
function readState(sessionId) {
|
|
858
|
+
const p = statePath(sessionId);
|
|
859
|
+
if (!existsSync4(p))
|
|
860
|
+
return null;
|
|
861
|
+
try {
|
|
862
|
+
return JSON.parse(readFileSync4(p, "utf-8"));
|
|
863
|
+
} catch {
|
|
864
|
+
return null;
|
|
865
|
+
}
|
|
866
|
+
}
|
|
867
|
+
function writeState(sessionId, state) {
|
|
868
|
+
mkdirSync2(STATE_DIR, { recursive: true });
|
|
869
|
+
const p = statePath(sessionId);
|
|
870
|
+
const tmp = `${p}.${process.pid}.${Date.now()}.tmp`;
|
|
871
|
+
writeFileSync2(tmp, JSON.stringify(state));
|
|
872
|
+
renameSync(tmp, p);
|
|
873
|
+
}
|
|
874
|
+
function withRmwLock(sessionId, fn) {
|
|
875
|
+
mkdirSync2(STATE_DIR, { recursive: true });
|
|
876
|
+
const rmwLock = statePath(sessionId) + ".rmw";
|
|
877
|
+
const deadline = Date.now() + 2e3;
|
|
878
|
+
let fd = null;
|
|
879
|
+
while (fd === null) {
|
|
880
|
+
try {
|
|
881
|
+
fd = openSync2(rmwLock, "wx");
|
|
882
|
+
} catch (e) {
|
|
883
|
+
if (e.code !== "EEXIST")
|
|
884
|
+
throw e;
|
|
885
|
+
if (Date.now() > deadline) {
|
|
886
|
+
dlog(`rmw lock deadline exceeded for ${sessionId}, reclaiming stale lock`);
|
|
887
|
+
try {
|
|
888
|
+
unlinkSync2(rmwLock);
|
|
889
|
+
} catch (unlinkErr) {
|
|
890
|
+
dlog(`stale rmw lock unlink failed for ${sessionId}: ${unlinkErr.message}`);
|
|
891
|
+
}
|
|
892
|
+
continue;
|
|
893
|
+
}
|
|
894
|
+
Atomics.wait(YIELD_BUF, 0, 0, 10);
|
|
895
|
+
}
|
|
896
|
+
}
|
|
897
|
+
try {
|
|
898
|
+
return fn();
|
|
899
|
+
} finally {
|
|
900
|
+
closeSync2(fd);
|
|
901
|
+
try {
|
|
902
|
+
unlinkSync2(rmwLock);
|
|
903
|
+
} catch (unlinkErr) {
|
|
904
|
+
dlog(`rmw lock cleanup failed for ${sessionId}: ${unlinkErr.message}`);
|
|
905
|
+
}
|
|
906
|
+
}
|
|
907
|
+
}
|
|
908
|
+
function bumpTotalCount(sessionId) {
|
|
909
|
+
return withRmwLock(sessionId, () => {
|
|
910
|
+
const now = Date.now();
|
|
911
|
+
const existing = readState(sessionId);
|
|
912
|
+
const next = existing ? { ...existing, totalCount: existing.totalCount + 1 } : { lastSummaryAt: now, lastSummaryCount: 0, totalCount: 1 };
|
|
913
|
+
writeState(sessionId, next);
|
|
914
|
+
return next;
|
|
915
|
+
});
|
|
916
|
+
}
|
|
917
|
+
function loadTriggerConfig() {
|
|
918
|
+
const n = Number(process.env.HIVEMIND_SUMMARY_EVERY_N_MSGS ?? "");
|
|
919
|
+
const h = Number(process.env.HIVEMIND_SUMMARY_EVERY_HOURS ?? "");
|
|
920
|
+
return {
|
|
921
|
+
everyNMessages: Number.isInteger(n) && n > 0 ? n : 50,
|
|
922
|
+
everyHours: Number.isFinite(h) && h > 0 ? h : 2
|
|
923
|
+
};
|
|
924
|
+
}
|
|
925
|
+
var FIRST_SUMMARY_AT = 10;
|
|
926
|
+
function shouldTrigger(state, cfg, now = Date.now()) {
|
|
927
|
+
const msgsSince = state.totalCount - state.lastSummaryCount;
|
|
928
|
+
if (state.lastSummaryCount === 0 && state.totalCount >= FIRST_SUMMARY_AT)
|
|
929
|
+
return true;
|
|
930
|
+
if (msgsSince >= cfg.everyNMessages)
|
|
931
|
+
return true;
|
|
932
|
+
if (msgsSince > 0 && now - state.lastSummaryAt >= cfg.everyHours * 3600 * 1e3)
|
|
933
|
+
return true;
|
|
934
|
+
return false;
|
|
935
|
+
}
|
|
936
|
+
function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) {
|
|
937
|
+
mkdirSync2(STATE_DIR, { recursive: true });
|
|
938
|
+
const p = lockPath(sessionId);
|
|
939
|
+
if (existsSync4(p)) {
|
|
940
|
+
try {
|
|
941
|
+
const ageMs = Date.now() - parseInt(readFileSync4(p, "utf-8"), 10);
|
|
942
|
+
if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
|
|
943
|
+
return false;
|
|
944
|
+
} catch (readErr) {
|
|
945
|
+
dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`);
|
|
946
|
+
}
|
|
947
|
+
try {
|
|
948
|
+
unlinkSync2(p);
|
|
949
|
+
} catch (unlinkErr) {
|
|
950
|
+
dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`);
|
|
951
|
+
return false;
|
|
952
|
+
}
|
|
953
|
+
}
|
|
954
|
+
try {
|
|
955
|
+
const fd = openSync2(p, "wx");
|
|
956
|
+
try {
|
|
957
|
+
writeSync2(fd, String(Date.now()));
|
|
958
|
+
} finally {
|
|
959
|
+
closeSync2(fd);
|
|
960
|
+
}
|
|
961
|
+
return true;
|
|
962
|
+
} catch (e) {
|
|
963
|
+
if (e.code === "EEXIST")
|
|
964
|
+
return false;
|
|
965
|
+
throw e;
|
|
966
|
+
}
|
|
967
|
+
}
|
|
968
|
+
function releaseLock(sessionId) {
|
|
969
|
+
try {
|
|
970
|
+
unlinkSync2(lockPath(sessionId));
|
|
971
|
+
} catch (e) {
|
|
972
|
+
if (e?.code !== "ENOENT") {
|
|
973
|
+
dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`);
|
|
974
|
+
}
|
|
975
|
+
}
|
|
976
|
+
}
|
|
977
|
+
|
|
978
|
+
// dist/src/hooks/cursor/spawn-wiki-worker.js
|
|
979
|
+
import { spawn as spawn2, execSync } from "node:child_process";
|
|
980
|
+
import { fileURLToPath } from "node:url";
|
|
981
|
+
import { dirname, join as join8 } from "node:path";
|
|
982
|
+
import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync4 } from "node:fs";
|
|
983
|
+
import { homedir as homedir6, tmpdir as tmpdir2 } from "node:os";
|
|
984
|
+
|
|
985
|
+
// dist/src/utils/wiki-log.js
|
|
986
|
+
import { mkdirSync as mkdirSync3, appendFileSync as appendFileSync2 } from "node:fs";
|
|
987
|
+
import { join as join7 } from "node:path";
|
|
988
|
+
function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") {
|
|
989
|
+
const path = join7(hooksDir, filename);
|
|
990
|
+
return {
|
|
991
|
+
path,
|
|
992
|
+
log(msg) {
|
|
993
|
+
try {
|
|
994
|
+
mkdirSync3(hooksDir, { recursive: true });
|
|
995
|
+
appendFileSync2(path, `[${utcTimestamp()}] ${msg}
|
|
996
|
+
`);
|
|
997
|
+
} catch {
|
|
998
|
+
}
|
|
999
|
+
}
|
|
1000
|
+
};
|
|
1001
|
+
}
|
|
1002
|
+
|
|
1003
|
+
// dist/src/hooks/cursor/spawn-wiki-worker.js
|
|
1004
|
+
var HOME = homedir6();
|
|
1005
|
+
var wikiLogger = makeWikiLogger(join8(HOME, ".cursor", "hooks"));
|
|
1006
|
+
var WIKI_LOG = wikiLogger.path;
|
|
1007
|
+
var WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge \u2014 entities, decisions, relationships, and facts \u2014 into a structured, searchable wiki entry.
|
|
1008
|
+
|
|
1009
|
+
SESSION JSONL path: __JSONL__
|
|
1010
|
+
SUMMARY FILE to write: __SUMMARY__
|
|
1011
|
+
SESSION ID: __SESSION_ID__
|
|
1012
|
+
PROJECT: __PROJECT__
|
|
1013
|
+
PREVIOUS JSONL OFFSET (lines already processed): __PREV_OFFSET__
|
|
1014
|
+
CURRENT JSONL LINES: __JSONL_LINES__
|
|
1015
|
+
|
|
1016
|
+
Steps:
|
|
1017
|
+
1. Read the session JSONL at the path above.
|
|
1018
|
+
- If PREVIOUS JSONL OFFSET > 0, this is a resumed session. Read the existing summary file first,
|
|
1019
|
+
then focus on lines AFTER the offset for new content. Merge new facts into the existing summary.
|
|
1020
|
+
- If offset is 0, generate from scratch.
|
|
1021
|
+
|
|
1022
|
+
2. Write the summary file at the path above with this EXACT format:
|
|
1023
|
+
|
|
1024
|
+
# Session __SESSION_ID__
|
|
1025
|
+
- **Source**: __JSONL_SERVER_PATH__
|
|
1026
|
+
- **Started**: <extract from JSONL>
|
|
1027
|
+
- **Ended**: <now>
|
|
1028
|
+
- **Project**: __PROJECT__
|
|
1029
|
+
- **JSONL offset**: __JSONL_LINES__
|
|
1030
|
+
|
|
1031
|
+
## What Happened
|
|
1032
|
+
<2-3 dense sentences. What was the goal, what was accomplished, what's left.>
|
|
1033
|
+
|
|
1034
|
+
## People
|
|
1035
|
+
<For each person mentioned: name, role, what they did/said. Format: **Name** \u2014 role \u2014 action>
|
|
1036
|
+
|
|
1037
|
+
## Entities
|
|
1038
|
+
<Every named thing: repos, branches, files, APIs, tools, services, tables, features, bugs.
|
|
1039
|
+
Format: **entity** (type) \u2014 what was done with it, its current state>
|
|
1040
|
+
|
|
1041
|
+
## Decisions & Reasoning
|
|
1042
|
+
<Every decision made and WHY.>
|
|
1043
|
+
|
|
1044
|
+
## Key Facts
|
|
1045
|
+
<Bullet list of atomic facts that could answer future questions.>
|
|
1046
|
+
|
|
1047
|
+
## Files Modified
|
|
1048
|
+
<bullet list: path (new/modified/deleted) \u2014 what changed>
|
|
1049
|
+
|
|
1050
|
+
## Open Questions / TODO
|
|
1051
|
+
<Anything unresolved, blocked, or explicitly deferred>
|
|
1052
|
+
|
|
1053
|
+
IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact.
|
|
1054
|
+
PRIVACY: Never include absolute filesystem paths in the summary.
|
|
1055
|
+
LENGTH LIMIT: Keep the total summary under 4000 characters.`;
|
|
1056
|
+
var wikiLog = wikiLogger.log;
|
|
1057
|
+
function findCursorBin() {
|
|
1058
|
+
try {
|
|
1059
|
+
return execSync("which cursor-agent 2>/dev/null", { encoding: "utf-8" }).trim() || "cursor-agent";
|
|
1060
|
+
} catch {
|
|
1061
|
+
return "cursor-agent";
|
|
1062
|
+
}
|
|
1063
|
+
}
|
|
1064
|
+
function spawnCursorWikiWorker(opts) {
|
|
1065
|
+
const { config, sessionId, cwd, bundleDir, reason } = opts;
|
|
1066
|
+
const projectName = cwd.split("/").pop() || "unknown";
|
|
1067
|
+
const tmpDir = join8(tmpdir2(), `deeplake-wiki-${sessionId}-${Date.now()}`);
|
|
1068
|
+
mkdirSync4(tmpDir, { recursive: true });
|
|
1069
|
+
const configFile = join8(tmpDir, "config.json");
|
|
1070
|
+
writeFileSync3(configFile, JSON.stringify({
|
|
1071
|
+
apiUrl: config.apiUrl,
|
|
1072
|
+
token: config.token,
|
|
1073
|
+
orgId: config.orgId,
|
|
1074
|
+
workspaceId: config.workspaceId,
|
|
1075
|
+
memoryTable: config.tableName,
|
|
1076
|
+
sessionsTable: config.sessionsTableName,
|
|
1077
|
+
sessionId,
|
|
1078
|
+
userName: config.userName,
|
|
1079
|
+
project: projectName,
|
|
1080
|
+
tmpDir,
|
|
1081
|
+
cursorBin: findCursorBin(),
|
|
1082
|
+
cursorModel: process.env.HIVEMIND_CURSOR_MODEL ?? "auto",
|
|
1083
|
+
wikiLog: WIKI_LOG,
|
|
1084
|
+
hooksDir: join8(HOME, ".cursor", "hooks"),
|
|
1085
|
+
promptTemplate: WIKI_PROMPT_TEMPLATE
|
|
1086
|
+
}));
|
|
1087
|
+
wikiLog(`${reason}: spawning summary worker for ${sessionId}`);
|
|
1088
|
+
const workerPath = join8(bundleDir, "wiki-worker.js");
|
|
1089
|
+
spawn2("nohup", ["node", workerPath, configFile], {
|
|
1090
|
+
detached: true,
|
|
1091
|
+
stdio: ["ignore", "ignore", "ignore"]
|
|
1092
|
+
}).unref();
|
|
1093
|
+
wikiLog(`${reason}: spawned summary worker for ${sessionId}`);
|
|
1094
|
+
}
|
|
1095
|
+
function bundleDirFromImportMeta(importMetaUrl) {
|
|
1096
|
+
return dirname(fileURLToPath(importMetaUrl));
|
|
1097
|
+
}
|
|
1098
|
+
|
|
1099
|
+
// dist/src/skilify/spawn-skilify-worker.js
|
|
1100
|
+
import { spawn as spawn3 } from "node:child_process";
|
|
1101
|
+
import { fileURLToPath as fileURLToPath2 } from "node:url";
|
|
1102
|
+
import { dirname as dirname2, join as join10 } from "node:path";
|
|
1103
|
+
import { writeFileSync as writeFileSync4, mkdirSync as mkdirSync5, appendFileSync as appendFileSync3, chmodSync } from "node:fs";
|
|
1104
|
+
import { homedir as homedir8, tmpdir as tmpdir3 } from "node:os";
|
|
1105
|
+
|
|
1106
|
+
// dist/src/skilify/gate-runner.js
|
|
1107
|
+
import { execFileSync } from "node:child_process";
|
|
1108
|
+
import { existsSync as existsSync5 } from "node:fs";
|
|
1109
|
+
import { homedir as homedir7 } from "node:os";
|
|
1110
|
+
import { join as join9 } from "node:path";
|
|
1111
|
+
function findAgentBin(agent) {
|
|
1112
|
+
const which = (name) => {
|
|
1113
|
+
try {
|
|
1114
|
+
const out = execFileSync("which", [name], {
|
|
1115
|
+
encoding: "utf-8",
|
|
1116
|
+
stdio: ["ignore", "pipe", "ignore"]
|
|
1117
|
+
});
|
|
1118
|
+
return out.trim() || null;
|
|
1119
|
+
} catch {
|
|
1120
|
+
return null;
|
|
1121
|
+
}
|
|
1122
|
+
};
|
|
1123
|
+
switch (agent) {
|
|
1124
|
+
case "claude_code":
|
|
1125
|
+
return which("claude") ?? join9(homedir7(), ".claude", "local", "claude");
|
|
1126
|
+
case "codex":
|
|
1127
|
+
return which("codex") ?? "/usr/local/bin/codex";
|
|
1128
|
+
case "cursor":
|
|
1129
|
+
return which("cursor-agent") ?? "/usr/local/bin/cursor-agent";
|
|
1130
|
+
case "hermes":
|
|
1131
|
+
return which("hermes") ?? join9(homedir7(), ".local", "bin", "hermes");
|
|
1132
|
+
case "pi":
|
|
1133
|
+
return which("pi") ?? join9(homedir7(), ".local", "bin", "pi");
|
|
1134
|
+
}
|
|
1135
|
+
}
|
|
1136
|
+
|
|
1137
|
+
// dist/src/skilify/spawn-skilify-worker.js
|
|
1138
|
+
var HOME2 = homedir8();
|
|
1139
|
+
var SKILIFY_LOG = join10(HOME2, ".claude", "hooks", "skilify.log");
|
|
1140
|
+
function skilifyLog(msg) {
|
|
1141
|
+
try {
|
|
1142
|
+
mkdirSync5(dirname2(SKILIFY_LOG), { recursive: true });
|
|
1143
|
+
appendFileSync3(SKILIFY_LOG, `[${utcTimestamp()}] ${msg}
|
|
1144
|
+
`);
|
|
1145
|
+
} catch {
|
|
1146
|
+
}
|
|
1147
|
+
}
|
|
1148
|
+
function spawnSkilifyWorker(opts) {
|
|
1149
|
+
const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts;
|
|
1150
|
+
const tmpDir = join10(tmpdir3(), `deeplake-skilify-${projectKey}-${Date.now()}`);
|
|
1151
|
+
mkdirSync5(tmpDir, { recursive: true, mode: 448 });
|
|
1152
|
+
const gateBin = findAgentBin(agent);
|
|
1153
|
+
const configFile = join10(tmpDir, "config.json");
|
|
1154
|
+
writeFileSync4(configFile, JSON.stringify({
|
|
1155
|
+
apiUrl: config.apiUrl,
|
|
1156
|
+
token: config.token,
|
|
1157
|
+
orgId: config.orgId,
|
|
1158
|
+
workspaceId: config.workspaceId,
|
|
1159
|
+
sessionsTable: config.sessionsTableName,
|
|
1160
|
+
skillsTable: config.skillsTableName,
|
|
1161
|
+
userName: config.userName,
|
|
1162
|
+
cwd,
|
|
1163
|
+
projectKey,
|
|
1164
|
+
project,
|
|
1165
|
+
agent,
|
|
1166
|
+
scope: scopeConfig.scope,
|
|
1167
|
+
team: scopeConfig.team,
|
|
1168
|
+
install: scopeConfig.install,
|
|
1169
|
+
tmpDir,
|
|
1170
|
+
gateBin,
|
|
1171
|
+
cursorModel: process.env.HIVEMIND_CURSOR_MODEL,
|
|
1172
|
+
hermesProvider: process.env.HIVEMIND_HERMES_PROVIDER,
|
|
1173
|
+
hermesModel: process.env.HIVEMIND_HERMES_MODEL,
|
|
1174
|
+
piProvider: process.env.HIVEMIND_PI_PROVIDER,
|
|
1175
|
+
piModel: process.env.HIVEMIND_PI_MODEL,
|
|
1176
|
+
skilifyLog: SKILIFY_LOG,
|
|
1177
|
+
currentSessionId
|
|
1178
|
+
}), { mode: 384 });
|
|
1179
|
+
try {
|
|
1180
|
+
chmodSync(configFile, 384);
|
|
1181
|
+
} catch {
|
|
1182
|
+
}
|
|
1183
|
+
skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`);
|
|
1184
|
+
const workerPath = join10(bundleDir, "skilify-worker.js");
|
|
1185
|
+
spawn3("nohup", ["node", workerPath, configFile], {
|
|
1186
|
+
detached: true,
|
|
1187
|
+
stdio: ["ignore", "ignore", "ignore"]
|
|
1188
|
+
}).unref();
|
|
1189
|
+
skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`);
|
|
1190
|
+
}
|
|
1191
|
+
|
|
1192
|
+
// dist/src/skilify/state.js
|
|
1193
|
+
import { readFileSync as readFileSync5, writeFileSync as writeFileSync5, writeSync as writeSync3, mkdirSync as mkdirSync6, renameSync as renameSync2, existsSync as existsSync6, unlinkSync as unlinkSync3, openSync as openSync3, closeSync as closeSync3 } from "node:fs";
|
|
1194
|
+
import { execSync as execSync2 } from "node:child_process";
|
|
1195
|
+
import { homedir as homedir9 } from "node:os";
|
|
1196
|
+
import { createHash } from "node:crypto";
|
|
1197
|
+
import { join as join11, basename } from "node:path";
|
|
1198
|
+
var dlog2 = (msg) => log("skilify-state", msg);
|
|
1199
|
+
var STATE_DIR2 = join11(homedir9(), ".deeplake", "state", "skilify");
|
|
1200
|
+
var YIELD_BUF2 = new Int32Array(new SharedArrayBuffer(4));
|
|
1201
|
+
var TRIGGER_THRESHOLD = (() => {
|
|
1202
|
+
const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? "");
|
|
1203
|
+
return Number.isInteger(n) && n > 0 ? n : 20;
|
|
1204
|
+
})();
|
|
1205
|
+
function statePath2(projectKey) {
|
|
1206
|
+
return join11(STATE_DIR2, `${projectKey}.json`);
|
|
1207
|
+
}
|
|
1208
|
+
function lockPath2(projectKey) {
|
|
1209
|
+
return join11(STATE_DIR2, `${projectKey}.lock`);
|
|
1210
|
+
}
|
|
1211
|
+
function deriveProjectKey(cwd) {
|
|
1212
|
+
const project = basename(cwd) || "unknown";
|
|
1213
|
+
let signature = null;
|
|
1214
|
+
try {
|
|
1215
|
+
signature = execSync2("git config --get remote.origin.url", {
|
|
1216
|
+
cwd,
|
|
1217
|
+
encoding: "utf-8",
|
|
1218
|
+
stdio: ["ignore", "pipe", "ignore"]
|
|
1219
|
+
}).trim() || null;
|
|
1220
|
+
} catch {
|
|
1221
|
+
}
|
|
1222
|
+
const input = signature ?? cwd;
|
|
1223
|
+
const key = createHash("sha1").update(input).digest("hex").slice(0, 16);
|
|
1224
|
+
return { key, project };
|
|
1225
|
+
}
|
|
1226
|
+
function readState2(projectKey) {
|
|
1227
|
+
const p = statePath2(projectKey);
|
|
1228
|
+
if (!existsSync6(p))
|
|
1229
|
+
return null;
|
|
1230
|
+
try {
|
|
1231
|
+
return JSON.parse(readFileSync5(p, "utf-8"));
|
|
1232
|
+
} catch {
|
|
1233
|
+
return null;
|
|
1234
|
+
}
|
|
1235
|
+
}
|
|
1236
|
+
function writeState2(projectKey, state) {
|
|
1237
|
+
mkdirSync6(STATE_DIR2, { recursive: true });
|
|
1238
|
+
const p = statePath2(projectKey);
|
|
1239
|
+
const tmp = `${p}.${process.pid}.${Date.now()}.tmp`;
|
|
1240
|
+
writeFileSync5(tmp, JSON.stringify(state, null, 2));
|
|
1241
|
+
renameSync2(tmp, p);
|
|
1242
|
+
}
|
|
1243
|
+
function withRmwLock2(projectKey, fn) {
|
|
1244
|
+
mkdirSync6(STATE_DIR2, { recursive: true });
|
|
1245
|
+
const rmw = lockPath2(projectKey) + ".rmw";
|
|
1246
|
+
const deadline = Date.now() + 2e3;
|
|
1247
|
+
let fd = null;
|
|
1248
|
+
while (fd === null) {
|
|
1249
|
+
try {
|
|
1250
|
+
fd = openSync3(rmw, "wx");
|
|
1251
|
+
} catch (e) {
|
|
1252
|
+
if (e.code !== "EEXIST")
|
|
1253
|
+
throw e;
|
|
1254
|
+
if (Date.now() > deadline) {
|
|
1255
|
+
dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`);
|
|
1256
|
+
try {
|
|
1257
|
+
unlinkSync3(rmw);
|
|
1258
|
+
} catch (unlinkErr) {
|
|
1259
|
+
dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`);
|
|
1260
|
+
}
|
|
1261
|
+
continue;
|
|
1262
|
+
}
|
|
1263
|
+
Atomics.wait(YIELD_BUF2, 0, 0, 10);
|
|
1264
|
+
}
|
|
1265
|
+
}
|
|
1266
|
+
try {
|
|
1267
|
+
return fn();
|
|
1268
|
+
} finally {
|
|
1269
|
+
closeSync3(fd);
|
|
1270
|
+
try {
|
|
1271
|
+
unlinkSync3(rmw);
|
|
1272
|
+
} catch (unlinkErr) {
|
|
1273
|
+
dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`);
|
|
1274
|
+
}
|
|
1275
|
+
}
|
|
1276
|
+
}
|
|
1277
|
+
function bumpStopCounter(cwd) {
|
|
1278
|
+
const { key, project } = deriveProjectKey(cwd);
|
|
1279
|
+
return withRmwLock2(key, () => {
|
|
1280
|
+
const existing = readState2(key);
|
|
1281
|
+
const next = existing ? { ...existing, counter: existing.counter + 1, updatedAt: Date.now() } : {
|
|
1282
|
+
project,
|
|
1283
|
+
projectKey: key,
|
|
1284
|
+
counter: 1,
|
|
1285
|
+
lastUuid: null,
|
|
1286
|
+
lastDate: null,
|
|
1287
|
+
skillsGenerated: [],
|
|
1288
|
+
updatedAt: Date.now()
|
|
1289
|
+
};
|
|
1290
|
+
writeState2(key, next);
|
|
1291
|
+
return next;
|
|
1292
|
+
});
|
|
1293
|
+
}
|
|
1294
|
+
function resetCounter(projectKey) {
|
|
1295
|
+
withRmwLock2(projectKey, () => {
|
|
1296
|
+
const s = readState2(projectKey);
|
|
1297
|
+
if (!s)
|
|
1298
|
+
return;
|
|
1299
|
+
writeState2(projectKey, { ...s, counter: 0, updatedAt: Date.now() });
|
|
1300
|
+
});
|
|
1301
|
+
}
|
|
1302
|
+
function tryAcquireWorkerLock(projectKey, maxAgeMs = 10 * 60 * 1e3) {
|
|
1303
|
+
mkdirSync6(STATE_DIR2, { recursive: true });
|
|
1304
|
+
const p = lockPath2(projectKey);
|
|
1305
|
+
if (existsSync6(p)) {
|
|
1306
|
+
try {
|
|
1307
|
+
const ageMs = Date.now() - parseInt(readFileSync5(p, "utf-8"), 10);
|
|
1308
|
+
if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
|
|
1309
|
+
return false;
|
|
1310
|
+
} catch (readErr) {
|
|
1311
|
+
dlog2(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`);
|
|
1312
|
+
}
|
|
1313
|
+
try {
|
|
1314
|
+
unlinkSync3(p);
|
|
1315
|
+
} catch (unlinkErr) {
|
|
1316
|
+
dlog2(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`);
|
|
1317
|
+
return false;
|
|
1318
|
+
}
|
|
1319
|
+
}
|
|
1320
|
+
try {
|
|
1321
|
+
const fd = openSync3(p, "wx");
|
|
1322
|
+
try {
|
|
1323
|
+
writeSync3(fd, String(Date.now()));
|
|
1324
|
+
} finally {
|
|
1325
|
+
closeSync3(fd);
|
|
1326
|
+
}
|
|
1327
|
+
return true;
|
|
1328
|
+
} catch {
|
|
1329
|
+
return false;
|
|
1330
|
+
}
|
|
1331
|
+
}
|
|
1332
|
+
function releaseWorkerLock(projectKey) {
|
|
1333
|
+
const p = lockPath2(projectKey);
|
|
1334
|
+
try {
|
|
1335
|
+
unlinkSync3(p);
|
|
1336
|
+
} catch {
|
|
1337
|
+
}
|
|
1338
|
+
}
|
|
1339
|
+
|
|
1340
|
+
// dist/src/skilify/scope-config.js
|
|
1341
|
+
import { existsSync as existsSync7, mkdirSync as mkdirSync7, readFileSync as readFileSync6, writeFileSync as writeFileSync6 } from "node:fs";
|
|
1342
|
+
import { homedir as homedir10 } from "node:os";
|
|
1343
|
+
import { join as join12 } from "node:path";
|
|
1344
|
+
var STATE_DIR3 = join12(homedir10(), ".deeplake", "state", "skilify");
|
|
1345
|
+
var CONFIG_PATH = join12(STATE_DIR3, "config.json");
|
|
1346
|
+
var DEFAULT = { scope: "me", team: [], install: "project" };
|
|
1347
|
+
function loadScopeConfig() {
|
|
1348
|
+
if (!existsSync7(CONFIG_PATH))
|
|
1349
|
+
return DEFAULT;
|
|
1350
|
+
try {
|
|
1351
|
+
const raw = JSON.parse(readFileSync6(CONFIG_PATH, "utf-8"));
|
|
1352
|
+
const scope = raw.scope === "team" || raw.scope === "org" ? raw.scope : "me";
|
|
1353
|
+
const team = Array.isArray(raw.team) ? raw.team.filter((s) => typeof s === "string") : [];
|
|
1354
|
+
const install = raw.install === "global" ? "global" : "project";
|
|
1355
|
+
return { scope, team, install };
|
|
1356
|
+
} catch {
|
|
1357
|
+
return DEFAULT;
|
|
1358
|
+
}
|
|
1359
|
+
}
|
|
1360
|
+
|
|
1361
|
+
// dist/src/skilify/triggers.js
|
|
1362
|
+
function tryStopCounterTrigger(opts) {
|
|
1363
|
+
if (process.env.HIVEMIND_SKILIFY_WORKER === "1")
|
|
1364
|
+
return;
|
|
1365
|
+
if (!opts.cwd)
|
|
1366
|
+
return;
|
|
1367
|
+
try {
|
|
1368
|
+
const state = bumpStopCounter(opts.cwd);
|
|
1369
|
+
if (state.counter < TRIGGER_THRESHOLD)
|
|
1370
|
+
return;
|
|
1371
|
+
if (!tryAcquireWorkerLock(state.projectKey)) {
|
|
1372
|
+
skilifyLog(`Stop: trigger suppressed (worker lock held) project=${state.project}`);
|
|
1373
|
+
return;
|
|
1374
|
+
}
|
|
1375
|
+
skilifyLog(`Stop: threshold hit (counter=${state.counter}, N=${TRIGGER_THRESHOLD}) project=${state.project} agent=${opts.agent}`);
|
|
1376
|
+
resetCounter(state.projectKey);
|
|
1377
|
+
try {
|
|
1378
|
+
spawnSkilifyWorker({
|
|
1379
|
+
config: opts.config,
|
|
1380
|
+
cwd: opts.cwd,
|
|
1381
|
+
projectKey: state.projectKey,
|
|
1382
|
+
project: state.project,
|
|
1383
|
+
bundleDir: opts.bundleDir,
|
|
1384
|
+
agent: opts.agent,
|
|
1385
|
+
scopeConfig: loadScopeConfig(),
|
|
1386
|
+
currentSessionId: opts.sessionId,
|
|
1387
|
+
reason: "Stop"
|
|
1388
|
+
});
|
|
1389
|
+
} catch (e) {
|
|
1390
|
+
skilifyLog(`Stop spawn failed: ${e?.message ?? e}`);
|
|
1391
|
+
try {
|
|
1392
|
+
releaseWorkerLock(state.projectKey);
|
|
1393
|
+
} catch {
|
|
1394
|
+
}
|
|
1395
|
+
}
|
|
1396
|
+
} catch (e) {
|
|
1397
|
+
skilifyLog(`Stop trigger error: ${e?.message ?? e}`);
|
|
1398
|
+
}
|
|
1399
|
+
}
|
|
1400
|
+
|
|
396
1401
|
// dist/src/hooks/cursor/capture.js
|
|
397
|
-
var
|
|
1402
|
+
var log4 = (msg) => log("cursor-capture", msg);
|
|
1403
|
+
function resolveEmbedDaemonPath() {
|
|
1404
|
+
return join13(dirname3(fileURLToPath3(import.meta.url)), "embeddings", "embed-daemon.js");
|
|
1405
|
+
}
|
|
398
1406
|
var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false";
|
|
399
1407
|
function resolveCwd(input) {
|
|
400
1408
|
if (typeof input.cwd === "string" && input.cwd)
|
|
@@ -410,7 +1418,7 @@ async function main() {
|
|
|
410
1418
|
const input = await readStdin();
|
|
411
1419
|
const config = loadConfig();
|
|
412
1420
|
if (!config) {
|
|
413
|
-
|
|
1421
|
+
log4("no config");
|
|
414
1422
|
return;
|
|
415
1423
|
}
|
|
416
1424
|
const sessionId = input.conversation_id ?? `cursor-${Date.now()}`;
|
|
@@ -429,10 +1437,10 @@ async function main() {
|
|
|
429
1437
|
};
|
|
430
1438
|
let entry = null;
|
|
431
1439
|
if (event === "beforeSubmitPrompt" && typeof input.prompt === "string") {
|
|
432
|
-
|
|
1440
|
+
log4(`user session=${sessionId}`);
|
|
433
1441
|
entry = { id: crypto.randomUUID(), ...meta, type: "user_message", content: input.prompt };
|
|
434
1442
|
} else if (event === "postToolUse" && typeof input.tool_name === "string") {
|
|
435
|
-
|
|
1443
|
+
log4(`tool=${input.tool_name} session=${sessionId}`);
|
|
436
1444
|
entry = {
|
|
437
1445
|
id: crypto.randomUUID(),
|
|
438
1446
|
...meta,
|
|
@@ -444,10 +1452,10 @@ async function main() {
|
|
|
444
1452
|
tool_response: typeof input.tool_output === "string" ? input.tool_output : JSON.stringify(input.tool_output)
|
|
445
1453
|
};
|
|
446
1454
|
} else if (event === "afterAgentResponse" && typeof input.text === "string") {
|
|
447
|
-
|
|
1455
|
+
log4(`assistant session=${sessionId}`);
|
|
448
1456
|
entry = { id: crypto.randomUUID(), ...meta, type: "assistant_message", content: input.text };
|
|
449
1457
|
} else if (event === "stop") {
|
|
450
|
-
|
|
1458
|
+
log4(`stop session=${sessionId} status=${input.status ?? "unknown"}`);
|
|
451
1459
|
entry = {
|
|
452
1460
|
id: crypto.randomUUID(),
|
|
453
1461
|
...meta,
|
|
@@ -456,30 +1464,74 @@ async function main() {
|
|
|
456
1464
|
loop_count: input.loop_count
|
|
457
1465
|
};
|
|
458
1466
|
} else {
|
|
459
|
-
|
|
1467
|
+
log4(`unknown event: ${event}, skipping`);
|
|
460
1468
|
return;
|
|
461
1469
|
}
|
|
462
1470
|
const sessionPath = buildSessionPath(config, sessionId);
|
|
463
1471
|
const line = JSON.stringify(entry);
|
|
464
|
-
|
|
1472
|
+
log4(`writing to ${sessionPath}`);
|
|
465
1473
|
const projectName = cwd.split("/").pop() || "unknown";
|
|
466
1474
|
const filename = sessionPath.split("/").pop() ?? "";
|
|
467
1475
|
const jsonForSql = line.replace(/'/g, "''");
|
|
468
|
-
const
|
|
1476
|
+
const embedding = embeddingsDisabled() ? null : await new EmbedClient({ daemonEntry: resolveEmbedDaemonPath() }).embed(line, "document");
|
|
1477
|
+
const embeddingSql = embeddingSqlLiteral(embedding);
|
|
1478
|
+
const insertSql = `INSERT INTO "${sessionsTable}" (id, path, filename, message, message_embedding, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, ${embeddingSql}, '${sqlStr(config.userName)}', ${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', '${sqlStr(event)}', 'cursor', '${ts}', '${ts}')`;
|
|
469
1479
|
try {
|
|
470
1480
|
await api.query(insertSql);
|
|
471
1481
|
} catch (e) {
|
|
472
1482
|
if (e.message?.includes("permission denied") || e.message?.includes("does not exist")) {
|
|
473
|
-
|
|
1483
|
+
log4("table missing, creating and retrying");
|
|
474
1484
|
await api.ensureSessionsTable(sessionsTable);
|
|
475
1485
|
await api.query(insertSql);
|
|
476
1486
|
} else {
|
|
477
1487
|
throw e;
|
|
478
1488
|
}
|
|
479
1489
|
}
|
|
480
|
-
|
|
1490
|
+
log4("capture ok \u2192 cloud");
|
|
1491
|
+
maybeTriggerPeriodicSummary(sessionId, cwd, config);
|
|
1492
|
+
if (event === "afterAgentResponse" && process.env.HIVEMIND_WIKI_WORKER !== "1" && process.env.HIVEMIND_SKILIFY_WORKER !== "1") {
|
|
1493
|
+
tryStopCounterTrigger({
|
|
1494
|
+
config,
|
|
1495
|
+
cwd,
|
|
1496
|
+
bundleDir: bundleDirFromImportMeta(import.meta.url),
|
|
1497
|
+
agent: "cursor",
|
|
1498
|
+
sessionId
|
|
1499
|
+
});
|
|
1500
|
+
}
|
|
1501
|
+
}
|
|
1502
|
+
function maybeTriggerPeriodicSummary(sessionId, cwd, config) {
|
|
1503
|
+
if (process.env.HIVEMIND_WIKI_WORKER === "1")
|
|
1504
|
+
return;
|
|
1505
|
+
try {
|
|
1506
|
+
const state = bumpTotalCount(sessionId);
|
|
1507
|
+
const cfg = loadTriggerConfig();
|
|
1508
|
+
if (!shouldTrigger(state, cfg))
|
|
1509
|
+
return;
|
|
1510
|
+
if (!tryAcquireLock(sessionId)) {
|
|
1511
|
+
log4(`periodic trigger suppressed (lock held) session=${sessionId}`);
|
|
1512
|
+
return;
|
|
1513
|
+
}
|
|
1514
|
+
wikiLog(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`);
|
|
1515
|
+
try {
|
|
1516
|
+
spawnCursorWikiWorker({
|
|
1517
|
+
config,
|
|
1518
|
+
sessionId,
|
|
1519
|
+
cwd,
|
|
1520
|
+
bundleDir: bundleDirFromImportMeta(import.meta.url),
|
|
1521
|
+
reason: "Periodic"
|
|
1522
|
+
});
|
|
1523
|
+
} catch (e) {
|
|
1524
|
+
log4(`periodic spawn failed: ${e.message}`);
|
|
1525
|
+
try {
|
|
1526
|
+
releaseLock(sessionId);
|
|
1527
|
+
} catch {
|
|
1528
|
+
}
|
|
1529
|
+
}
|
|
1530
|
+
} catch (e) {
|
|
1531
|
+
log4(`periodic trigger error: ${e.message}`);
|
|
1532
|
+
}
|
|
481
1533
|
}
|
|
482
1534
|
main().catch((e) => {
|
|
483
|
-
|
|
1535
|
+
log4(`fatal: ${e.message}`);
|
|
484
1536
|
process.exit(0);
|
|
485
1537
|
});
|