@deeplake/hivemind 0.6.48 → 0.7.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/.claude-plugin/marketplace.json +2 -2
  2. package/.claude-plugin/plugin.json +1 -1
  3. package/README.md +244 -20
  4. package/bundle/cli.js +1369 -112
  5. package/codex/bundle/capture.js +546 -96
  6. package/codex/bundle/commands/auth-login.js +290 -81
  7. package/codex/bundle/embeddings/embed-daemon.js +243 -0
  8. package/codex/bundle/pre-tool-use.js +666 -111
  9. package/codex/bundle/session-start-setup.js +231 -64
  10. package/codex/bundle/session-start.js +52 -13
  11. package/codex/bundle/shell/deeplake-shell.js +716 -119
  12. package/codex/bundle/skilify-worker.js +907 -0
  13. package/codex/bundle/stop.js +819 -79
  14. package/codex/bundle/wiki-worker.js +312 -11
  15. package/cursor/bundle/capture.js +1116 -64
  16. package/cursor/bundle/commands/auth-login.js +290 -81
  17. package/cursor/bundle/embeddings/embed-daemon.js +243 -0
  18. package/cursor/bundle/pre-tool-use.js +598 -77
  19. package/cursor/bundle/session-end.js +520 -2
  20. package/cursor/bundle/session-start.js +257 -65
  21. package/cursor/bundle/shell/deeplake-shell.js +716 -119
  22. package/cursor/bundle/skilify-worker.js +907 -0
  23. package/cursor/bundle/wiki-worker.js +571 -0
  24. package/hermes/bundle/capture.js +1119 -65
  25. package/hermes/bundle/commands/auth-login.js +290 -81
  26. package/hermes/bundle/embeddings/embed-daemon.js +243 -0
  27. package/hermes/bundle/pre-tool-use.js +597 -76
  28. package/hermes/bundle/session-end.js +522 -1
  29. package/hermes/bundle/session-start.js +260 -65
  30. package/hermes/bundle/shell/deeplake-shell.js +716 -119
  31. package/hermes/bundle/skilify-worker.js +907 -0
  32. package/hermes/bundle/wiki-worker.js +572 -0
  33. package/mcp/bundle/server.js +290 -75
  34. package/openclaw/dist/chunks/auth-creds-AEKS6D3P.js +14 -0
  35. package/openclaw/dist/chunks/chunk-SRCBBT4H.js +37 -0
  36. package/openclaw/dist/chunks/config-ZLH6JFJS.js +34 -0
  37. package/openclaw/dist/chunks/index-marker-store-PGT5CW6T.js +33 -0
  38. package/openclaw/dist/chunks/setup-config-C35UK4LP.js +114 -0
  39. package/openclaw/dist/index.js +929 -710
  40. package/openclaw/dist/skilify-worker.js +907 -0
  41. package/openclaw/openclaw.plugin.json +1 -1
  42. package/openclaw/package.json +1 -1
  43. package/openclaw/skills/SKILL.md +19 -0
  44. package/package.json +7 -1
  45. package/pi/extension-source/hivemind.ts +603 -22
@@ -1,3 +1,56 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __getOwnPropNames = Object.getOwnPropertyNames;
3
+ var __esm = (fn, res) => function __init() {
4
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
5
+ };
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+
11
+ // dist/src/index-marker-store.js
12
+ var index_marker_store_exports = {};
13
+ __export(index_marker_store_exports, {
14
+ buildIndexMarkerPath: () => buildIndexMarkerPath,
15
+ getIndexMarkerDir: () => getIndexMarkerDir,
16
+ hasFreshIndexMarker: () => hasFreshIndexMarker,
17
+ writeIndexMarker: () => writeIndexMarker
18
+ });
19
+ import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs";
20
+ import { join as join3 } from "node:path";
21
+ import { tmpdir } from "node:os";
22
+ function getIndexMarkerDir() {
23
+ return process.env.HIVEMIND_INDEX_MARKER_DIR ?? join3(tmpdir(), "hivemind-deeplake-indexes");
24
+ }
25
+ function buildIndexMarkerPath(workspaceId, orgId, table, suffix) {
26
+ const markerKey = [workspaceId, orgId, table, suffix].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
27
+ return join3(getIndexMarkerDir(), `${markerKey}.json`);
28
+ }
29
+ function hasFreshIndexMarker(markerPath) {
30
+ if (!existsSync2(markerPath))
31
+ return false;
32
+ try {
33
+ const raw = JSON.parse(readFileSync2(markerPath, "utf-8"));
34
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
35
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
36
+ return false;
37
+ return true;
38
+ } catch {
39
+ return false;
40
+ }
41
+ }
42
+ function writeIndexMarker(markerPath) {
43
+ mkdirSync(getIndexMarkerDir(), { recursive: true });
44
+ writeFileSync(markerPath, JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
45
+ }
46
+ var INDEX_MARKER_TTL_MS;
47
+ var init_index_marker_store = __esm({
48
+ "dist/src/index-marker-store.js"() {
49
+ "use strict";
50
+ INDEX_MARKER_TTL_MS = Number(process.env.HIVEMIND_INDEX_MARKER_TTL_MS ?? 6 * 60 * 6e4);
51
+ }
52
+ });
53
+
1
54
  // dist/src/utils/stdin.js
2
55
  function readStdin() {
3
56
  return new Promise((resolve, reject) => {
@@ -43,15 +96,13 @@ function loadConfig() {
43
96
  apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
44
97
  tableName: process.env.HIVEMIND_TABLE ?? "memory",
45
98
  sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
99
+ skillsTableName: process.env.HIVEMIND_SKILLS_TABLE ?? "skills",
46
100
  memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join(home, ".deeplake", "memory")
47
101
  };
48
102
  }
49
103
 
50
104
  // dist/src/deeplake-api.js
51
105
  import { randomUUID } from "node:crypto";
52
- import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs";
53
- import { join as join3 } from "node:path";
54
- import { tmpdir } from "node:os";
55
106
 
56
107
  // dist/src/utils/debug.js
57
108
  import { appendFileSync } from "node:fs";
@@ -59,6 +110,9 @@ import { join as join2 } from "node:path";
59
110
  import { homedir as homedir2 } from "node:os";
60
111
  var DEBUG = process.env.HIVEMIND_DEBUG === "1";
61
112
  var LOG = join2(homedir2(), ".deeplake", "hook-debug.log");
113
+ function utcTimestamp(d = /* @__PURE__ */ new Date()) {
114
+ return d.toISOString().replace("T", " ").slice(0, 19) + " UTC";
115
+ }
62
116
  function log(tag, msg) {
63
117
  if (!DEBUG)
64
118
  return;
@@ -70,8 +124,33 @@ function log(tag, msg) {
70
124
  function sqlStr(value) {
71
125
  return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
72
126
  }
127
+ function sqlIdent(name) {
128
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
129
+ throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
130
+ }
131
+ return name;
132
+ }
133
+
134
+ // dist/src/embeddings/columns.js
135
+ var SUMMARY_EMBEDDING_COL = "summary_embedding";
136
+ var MESSAGE_EMBEDDING_COL = "message_embedding";
137
+
138
+ // dist/src/utils/client-header.js
139
+ var DEEPLAKE_CLIENT_HEADER = "X-Deeplake-Client";
140
+ function deeplakeClientValue() {
141
+ return "hivemind";
142
+ }
143
+ function deeplakeClientHeader() {
144
+ return { [DEEPLAKE_CLIENT_HEADER]: deeplakeClientValue() };
145
+ }
73
146
 
74
147
  // dist/src/deeplake-api.js
148
+ var indexMarkerStorePromise = null;
149
+ function getIndexMarkerStore() {
150
+ if (!indexMarkerStorePromise)
151
+ indexMarkerStorePromise = Promise.resolve().then(() => (init_index_marker_store(), index_marker_store_exports));
152
+ return indexMarkerStorePromise;
153
+ }
75
154
  var log2 = (msg) => log("sdk", msg);
76
155
  function summarizeSql(sql, maxLen = 220) {
77
156
  const compact = sql.replace(/\s+/g, " ").trim();
@@ -91,7 +170,6 @@ var MAX_RETRIES = 3;
91
170
  var BASE_DELAY_MS = 500;
92
171
  var MAX_CONCURRENCY = 5;
93
172
  var QUERY_TIMEOUT_MS = Number(process.env.HIVEMIND_QUERY_TIMEOUT_MS ?? 1e4);
94
- var INDEX_MARKER_TTL_MS = Number(process.env.HIVEMIND_INDEX_MARKER_TTL_MS ?? 6 * 60 * 6e4);
95
173
  function sleep(ms) {
96
174
  return new Promise((resolve) => setTimeout(resolve, ms));
97
175
  }
@@ -111,9 +189,6 @@ function isTransientHtml403(text) {
111
189
  const body = text.toLowerCase();
112
190
  return body.includes("<html") || body.includes("403 forbidden") || body.includes("cloudflare") || body.includes("nginx");
113
191
  }
114
- function getIndexMarkerDir() {
115
- return process.env.HIVEMIND_INDEX_MARKER_DIR ?? join3(tmpdir(), "hivemind-deeplake-indexes");
116
- }
117
192
  var Semaphore = class {
118
193
  max;
119
194
  waiting = [];
@@ -182,7 +257,8 @@ var DeeplakeApi = class {
182
257
  headers: {
183
258
  Authorization: `Bearer ${this.token}`,
184
259
  "Content-Type": "application/json",
185
- "X-Activeloop-Org-Id": this.orgId
260
+ "X-Activeloop-Org-Id": this.orgId,
261
+ ...deeplakeClientHeader()
186
262
  },
187
263
  signal,
188
264
  body: JSON.stringify({ query: sql })
@@ -209,7 +285,8 @@ var DeeplakeApi = class {
209
285
  }
210
286
  const text = await resp.text().catch(() => "");
211
287
  const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
212
- if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
288
+ const alreadyExists = resp.status === 500 && isDuplicateIndexError(text);
289
+ if (!alreadyExists && attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
213
290
  const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
214
291
  log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
215
292
  await sleep(delay);
@@ -243,7 +320,7 @@ var DeeplakeApi = class {
243
320
  const lud = row.lastUpdateDate ?? ts;
244
321
  const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`);
245
322
  if (exists.length > 0) {
246
- let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`;
323
+ let setClauses = `summary = E'${sqlStr(row.contentText)}', ${SUMMARY_EMBEDDING_COL} = NULL, mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`;
247
324
  if (row.project !== void 0)
248
325
  setClauses += `, project = '${sqlStr(row.project)}'`;
249
326
  if (row.description !== void 0)
@@ -251,8 +328,8 @@ var DeeplakeApi = class {
251
328
  await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`);
252
329
  } else {
253
330
  const id = randomUUID();
254
- let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date";
255
- let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`;
331
+ let cols = `id, path, filename, summary, ${SUMMARY_EMBEDDING_COL}, mime_type, size_bytes, creation_date, last_update_date`;
332
+ let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', NULL, '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`;
256
333
  if (row.project !== void 0) {
257
334
  cols += ", project";
258
335
  vals += `, '${sqlStr(row.project)}'`;
@@ -277,48 +354,83 @@ var DeeplakeApi = class {
277
354
  buildLookupIndexName(table, suffix) {
278
355
  return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
279
356
  }
280
- getLookupIndexMarkerPath(table, suffix) {
281
- const markerKey = [
282
- this.workspaceId,
283
- this.orgId,
284
- table,
285
- suffix
286
- ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
287
- return join3(getIndexMarkerDir(), `${markerKey}.json`);
288
- }
289
- hasFreshLookupIndexMarker(table, suffix) {
290
- const markerPath = this.getLookupIndexMarkerPath(table, suffix);
291
- if (!existsSync2(markerPath))
292
- return false;
293
- try {
294
- const raw = JSON.parse(readFileSync2(markerPath, "utf-8"));
295
- const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
296
- if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
297
- return false;
298
- return true;
299
- } catch {
300
- return false;
301
- }
302
- }
303
- markLookupIndexReady(table, suffix) {
304
- mkdirSync(getIndexMarkerDir(), { recursive: true });
305
- writeFileSync(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
306
- }
307
357
  async ensureLookupIndex(table, suffix, columnsSql) {
308
- if (this.hasFreshLookupIndexMarker(table, suffix))
358
+ const markers = await getIndexMarkerStore();
359
+ const markerPath = markers.buildIndexMarkerPath(this.workspaceId, this.orgId, table, suffix);
360
+ if (markers.hasFreshIndexMarker(markerPath))
309
361
  return;
310
362
  const indexName = this.buildLookupIndexName(table, suffix);
311
363
  try {
312
364
  await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
313
- this.markLookupIndexReady(table, suffix);
365
+ markers.writeIndexMarker(markerPath);
314
366
  } catch (e) {
315
367
  if (isDuplicateIndexError(e)) {
316
- this.markLookupIndexReady(table, suffix);
368
+ markers.writeIndexMarker(markerPath);
317
369
  return;
318
370
  }
319
371
  log2(`index "${indexName}" skipped: ${e.message}`);
320
372
  }
321
373
  }
374
+ /**
375
+ * Ensure a vector column exists on the given table.
376
+ *
377
+ * The previous implementation always issued `ALTER TABLE ADD COLUMN IF NOT
378
+ * EXISTS …` on every SessionStart. On a long-running workspace that's
379
+ * already migrated, every call returns 500 "Column already exists" — noisy
380
+ * in the log and a wasted round-trip. Worse, the very first call after the
381
+ * column is genuinely added triggers Deeplake's post-ALTER `vector::at`
382
+ * window (~30s) during which subsequent INSERTs fail; minimising the
383
+ * number of ALTER calls minimises exposure to that window.
384
+ *
385
+ * New flow:
386
+ * 1. Check the local marker file (mirrors ensureLookupIndex). If fresh,
387
+ * return — zero network calls.
388
+ * 2. SELECT 1 FROM information_schema.columns WHERE table_name = T AND
389
+ * column_name = C. Read-only, idempotent, can't tickle the post-ALTER
390
+ * bug. If the column is present → mark + return.
391
+ * 3. Only if step 2 says the column is missing, fall back to ALTER ADD
392
+ * COLUMN IF NOT EXISTS. Mark on success, also mark if Deeplake reports
393
+ * "already exists" (race: another client added it between our SELECT
394
+ * and ALTER).
395
+ *
396
+ * Marker uses the same dir / TTL as ensureLookupIndex so both schema
397
+ * caches share an opt-out (HIVEMIND_INDEX_MARKER_DIR) and a TTL knob.
398
+ */
399
+ async ensureEmbeddingColumn(table, column) {
400
+ await this.ensureColumn(table, column, "FLOAT4[]");
401
+ }
402
+ /**
403
+ * Generic marker-gated column migration. Same SELECT-then-ALTER flow as
404
+ * ensureEmbeddingColumn, parameterized by SQL type so it can patch up any
405
+ * column that was added to the schema after the table was originally
406
+ * created. Used today for `summary_embedding`, `message_embedding`, and
407
+ * the `agent` column (added 2026-04-11) — the latter has no fallback if
408
+ * a user upgraded over a pre-2026-04-11 table, so every INSERT fails
409
+ * with `column "agent" does not exist`.
410
+ */
411
+ async ensureColumn(table, column, sqlType) {
412
+ const markers = await getIndexMarkerStore();
413
+ const markerPath = markers.buildIndexMarkerPath(this.workspaceId, this.orgId, table, `col_${column}`);
414
+ if (markers.hasFreshIndexMarker(markerPath))
415
+ return;
416
+ const colCheck = `SELECT 1 FROM information_schema.columns WHERE table_name = '${sqlStr(table)}' AND column_name = '${sqlStr(column)}' AND table_schema = '${sqlStr(this.workspaceId)}' LIMIT 1`;
417
+ const rows = await this.query(colCheck);
418
+ if (rows.length > 0) {
419
+ markers.writeIndexMarker(markerPath);
420
+ return;
421
+ }
422
+ try {
423
+ await this.query(`ALTER TABLE "${table}" ADD COLUMN ${column} ${sqlType}`);
424
+ } catch (e) {
425
+ const msg = e instanceof Error ? e.message : String(e);
426
+ if (!/already exists/i.test(msg))
427
+ throw e;
428
+ const recheck = await this.query(colCheck);
429
+ if (recheck.length === 0)
430
+ throw e;
431
+ }
432
+ markers.writeIndexMarker(markerPath);
433
+ }
322
434
  /** List all tables in the workspace (with retry). */
323
435
  async listTables(forceRefresh = false) {
324
436
  if (!forceRefresh && this._tablesCache)
@@ -334,7 +446,8 @@ var DeeplakeApi = class {
334
446
  const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
335
447
  headers: {
336
448
  Authorization: `Bearer ${this.token}`,
337
- "X-Activeloop-Org-Id": this.orgId
449
+ "X-Activeloop-Org-Id": this.orgId,
450
+ ...deeplakeClientHeader()
338
451
  }
339
452
  });
340
453
  if (resp.ok) {
@@ -359,29 +472,84 @@ var DeeplakeApi = class {
359
472
  }
360
473
  return { tables: [], cacheable: false };
361
474
  }
475
+ /**
476
+ * Run a `CREATE TABLE` with an extra outer retry budget. The base
477
+ * `query()` already retries 3 times on fetch errors (~3.5s total), but a
478
+ * failed CREATE is permanent corruption — every subsequent SELECT against
479
+ * the missing table fails. Wrapping in an outer loop with longer backoff
480
+ * (2s, 5s, then 10s) gives us ~17s of reach across transient network
481
+ * blips before giving up. Failures still propagate; getApi() resets its
482
+ * cache on init failure (openclaw plugin) so the next call retries the
483
+ * whole init flow.
484
+ */
485
+ async createTableWithRetry(sql, label) {
486
+ const OUTER_BACKOFFS_MS = [2e3, 5e3, 1e4];
487
+ let lastErr = null;
488
+ for (let attempt = 0; attempt <= OUTER_BACKOFFS_MS.length; attempt++) {
489
+ try {
490
+ await this.query(sql);
491
+ return;
492
+ } catch (err) {
493
+ lastErr = err;
494
+ const msg = err instanceof Error ? err.message : String(err);
495
+ log2(`CREATE TABLE "${label}" attempt ${attempt + 1}/${OUTER_BACKOFFS_MS.length + 1} failed: ${msg}`);
496
+ if (attempt < OUTER_BACKOFFS_MS.length) {
497
+ await sleep(OUTER_BACKOFFS_MS[attempt]);
498
+ }
499
+ }
500
+ }
501
+ throw lastErr;
502
+ }
362
503
  /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
363
504
  async ensureTable(name) {
364
- const tbl = name ?? this.tableName;
505
+ const tbl = sqlIdent(name ?? this.tableName);
365
506
  const tables = await this.listTables();
366
507
  if (!tables.includes(tbl)) {
367
508
  log2(`table "${tbl}" not found, creating`);
368
- await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
509
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', summary_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, tbl);
369
510
  log2(`table "${tbl}" created`);
370
511
  if (!tables.includes(tbl))
371
512
  this._tablesCache = [...tables, tbl];
372
513
  }
514
+ await this.ensureEmbeddingColumn(tbl, SUMMARY_EMBEDDING_COL);
515
+ await this.ensureColumn(tbl, "agent", "TEXT NOT NULL DEFAULT ''");
373
516
  }
374
517
  /** Create the sessions table (uses JSONB for message since every row is a JSON event). */
375
518
  async ensureSessionsTable(name) {
519
+ const safe = sqlIdent(name);
376
520
  const tables = await this.listTables();
377
- if (!tables.includes(name)) {
378
- log2(`table "${name}" not found, creating`);
379
- await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
380
- log2(`table "${name}" created`);
381
- if (!tables.includes(name))
382
- this._tablesCache = [...tables, name];
521
+ if (!tables.includes(safe)) {
522
+ log2(`table "${safe}" not found, creating`);
523
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, message_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
524
+ log2(`table "${safe}" created`);
525
+ if (!tables.includes(safe))
526
+ this._tablesCache = [...tables, safe];
383
527
  }
384
- await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
528
+ await this.ensureEmbeddingColumn(safe, MESSAGE_EMBEDDING_COL);
529
+ await this.ensureColumn(safe, "agent", "TEXT NOT NULL DEFAULT ''");
530
+ await this.ensureLookupIndex(safe, "path_creation_date", `("path", "creation_date")`);
531
+ }
532
+ /**
533
+ * Create the skills table.
534
+ *
535
+ * One row per skill version. Workers INSERT a fresh row on every KEEP /
536
+ * MERGE rather than UPDATE-ing in place, so the full version history is
537
+ * recoverable. Uniqueness in the *current* state is by (project_key, name)
538
+ * — newer rows shadow older ones at read time (ORDER BY version DESC).
539
+ * This sidesteps the Deeplake UPDATE-coalescing quirk that bit the wiki
540
+ * worker.
541
+ */
542
+ async ensureSkillsTable(name) {
543
+ const safe = sqlIdent(name);
544
+ const tables = await this.listTables();
545
+ if (!tables.includes(safe)) {
546
+ log2(`table "${safe}" not found, creating`);
547
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
548
+ log2(`table "${safe}" created`);
549
+ if (!tables.includes(safe))
550
+ this._tablesCache = [...tables, safe];
551
+ }
552
+ await this.ensureLookupIndex(safe, "project_key_name", `("project_key", "name")`);
385
553
  }
386
554
  };
387
555
 
@@ -391,8 +559,850 @@ function buildSessionPath(config, sessionId) {
391
559
  return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${workspace}_${sessionId}.jsonl`;
392
560
  }
393
561
 
562
+ // dist/src/embeddings/client.js
563
+ import { connect } from "node:net";
564
+ import { spawn } from "node:child_process";
565
+ import { openSync, closeSync, writeSync, unlinkSync, existsSync as existsSync3, readFileSync as readFileSync3 } from "node:fs";
566
+ import { homedir as homedir3 } from "node:os";
567
+ import { join as join4 } from "node:path";
568
+
569
+ // dist/src/embeddings/protocol.js
570
+ var DEFAULT_SOCKET_DIR = "/tmp";
571
+ var DEFAULT_IDLE_TIMEOUT_MS = 10 * 60 * 1e3;
572
+ var DEFAULT_CLIENT_TIMEOUT_MS = 2e3;
573
+ function socketPathFor(uid, dir = DEFAULT_SOCKET_DIR) {
574
+ return `${dir}/hivemind-embed-${uid}.sock`;
575
+ }
576
+ function pidPathFor(uid, dir = DEFAULT_SOCKET_DIR) {
577
+ return `${dir}/hivemind-embed-${uid}.pid`;
578
+ }
579
+
580
+ // dist/src/embeddings/client.js
581
+ var SHARED_DAEMON_PATH = join4(homedir3(), ".hivemind", "embed-deps", "embed-daemon.js");
582
+ var log3 = (m) => log("embed-client", m);
583
+ function getUid() {
584
+ const uid = typeof process.getuid === "function" ? process.getuid() : void 0;
585
+ return uid !== void 0 ? String(uid) : process.env.USER ?? "default";
586
+ }
587
+ var EmbedClient = class {
588
+ socketPath;
589
+ pidPath;
590
+ timeoutMs;
591
+ daemonEntry;
592
+ autoSpawn;
593
+ spawnWaitMs;
594
+ nextId = 0;
595
+ constructor(opts = {}) {
596
+ const uid = getUid();
597
+ const dir = opts.socketDir ?? "/tmp";
598
+ this.socketPath = socketPathFor(uid, dir);
599
+ this.pidPath = pidPathFor(uid, dir);
600
+ this.timeoutMs = opts.timeoutMs ?? DEFAULT_CLIENT_TIMEOUT_MS;
601
+ this.daemonEntry = opts.daemonEntry ?? process.env.HIVEMIND_EMBED_DAEMON ?? (existsSync3(SHARED_DAEMON_PATH) ? SHARED_DAEMON_PATH : void 0);
602
+ this.autoSpawn = opts.autoSpawn ?? true;
603
+ this.spawnWaitMs = opts.spawnWaitMs ?? 5e3;
604
+ }
605
+ /**
606
+ * Returns an embedding vector, or null on timeout/failure. Hooks MUST treat
607
+ * null as "skip embedding column" — never block the write path on us.
608
+ *
609
+ * Fire-and-forget spawn on miss: if the daemon isn't up, this call returns
610
+ * null AND kicks off a background spawn. The next call finds a ready daemon.
611
+ */
612
+ async embed(text, kind = "document") {
613
+ let sock;
614
+ try {
615
+ sock = await this.connectOnce();
616
+ } catch {
617
+ if (this.autoSpawn)
618
+ this.trySpawnDaemon();
619
+ return null;
620
+ }
621
+ try {
622
+ const id = String(++this.nextId);
623
+ const req = { op: "embed", id, kind, text };
624
+ const resp = await this.sendAndWait(sock, req);
625
+ if (resp.error || !("embedding" in resp) || !resp.embedding) {
626
+ log3(`embed err: ${resp.error ?? "no embedding"}`);
627
+ return null;
628
+ }
629
+ return resp.embedding;
630
+ } catch (e) {
631
+ const err = e instanceof Error ? e.message : String(e);
632
+ log3(`embed failed: ${err}`);
633
+ return null;
634
+ } finally {
635
+ try {
636
+ sock.end();
637
+ } catch {
638
+ }
639
+ }
640
+ }
641
+ /**
642
+ * Wait up to spawnWaitMs for the daemon to accept connections, spawning if
643
+ * necessary. Meant for SessionStart / long-running batches — not the hot path.
644
+ */
645
+ async warmup() {
646
+ try {
647
+ const s = await this.connectOnce();
648
+ s.end();
649
+ return true;
650
+ } catch {
651
+ if (!this.autoSpawn)
652
+ return false;
653
+ this.trySpawnDaemon();
654
+ try {
655
+ const s = await this.waitForSocket();
656
+ s.end();
657
+ return true;
658
+ } catch {
659
+ return false;
660
+ }
661
+ }
662
+ }
663
+ connectOnce() {
664
+ return new Promise((resolve, reject) => {
665
+ const sock = connect(this.socketPath);
666
+ const to = setTimeout(() => {
667
+ sock.destroy();
668
+ reject(new Error("connect timeout"));
669
+ }, this.timeoutMs);
670
+ sock.once("connect", () => {
671
+ clearTimeout(to);
672
+ resolve(sock);
673
+ });
674
+ sock.once("error", (e) => {
675
+ clearTimeout(to);
676
+ reject(e);
677
+ });
678
+ });
679
+ }
680
+ trySpawnDaemon() {
681
+ let fd;
682
+ try {
683
+ fd = openSync(this.pidPath, "wx", 384);
684
+ writeSync(fd, String(process.pid));
685
+ } catch (e) {
686
+ if (this.isPidFileStale()) {
687
+ try {
688
+ unlinkSync(this.pidPath);
689
+ } catch {
690
+ }
691
+ try {
692
+ fd = openSync(this.pidPath, "wx", 384);
693
+ writeSync(fd, String(process.pid));
694
+ } catch {
695
+ return;
696
+ }
697
+ } else {
698
+ return;
699
+ }
700
+ }
701
+ if (!this.daemonEntry || !existsSync3(this.daemonEntry)) {
702
+ log3(`daemonEntry not configured or missing: ${this.daemonEntry}`);
703
+ try {
704
+ closeSync(fd);
705
+ unlinkSync(this.pidPath);
706
+ } catch {
707
+ }
708
+ return;
709
+ }
710
+ try {
711
+ const child = spawn(process.execPath, [this.daemonEntry], {
712
+ detached: true,
713
+ stdio: "ignore",
714
+ env: process.env
715
+ });
716
+ child.unref();
717
+ log3(`spawned daemon pid=${child.pid}`);
718
+ } finally {
719
+ closeSync(fd);
720
+ }
721
+ }
722
+ isPidFileStale() {
723
+ try {
724
+ const raw = readFileSync3(this.pidPath, "utf-8").trim();
725
+ const pid = Number(raw);
726
+ if (!pid || Number.isNaN(pid))
727
+ return true;
728
+ try {
729
+ process.kill(pid, 0);
730
+ return false;
731
+ } catch {
732
+ return true;
733
+ }
734
+ } catch {
735
+ return true;
736
+ }
737
+ }
738
+ async waitForSocket() {
739
+ const deadline = Date.now() + this.spawnWaitMs;
740
+ let delay = 30;
741
+ while (Date.now() < deadline) {
742
+ await sleep2(delay);
743
+ delay = Math.min(delay * 1.5, 300);
744
+ if (!existsSync3(this.socketPath))
745
+ continue;
746
+ try {
747
+ return await this.connectOnce();
748
+ } catch {
749
+ }
750
+ }
751
+ throw new Error("daemon did not become ready within spawnWaitMs");
752
+ }
753
+ sendAndWait(sock, req) {
754
+ return new Promise((resolve, reject) => {
755
+ let buf = "";
756
+ const to = setTimeout(() => {
757
+ sock.destroy();
758
+ reject(new Error("request timeout"));
759
+ }, this.timeoutMs);
760
+ sock.setEncoding("utf-8");
761
+ sock.on("data", (chunk) => {
762
+ buf += chunk;
763
+ const nl = buf.indexOf("\n");
764
+ if (nl === -1)
765
+ return;
766
+ const line = buf.slice(0, nl);
767
+ clearTimeout(to);
768
+ try {
769
+ resolve(JSON.parse(line));
770
+ } catch (e) {
771
+ reject(e);
772
+ }
773
+ });
774
+ sock.on("error", (e) => {
775
+ clearTimeout(to);
776
+ reject(e);
777
+ });
778
+ sock.on("end", () => {
779
+ clearTimeout(to);
780
+ reject(new Error("connection closed without response"));
781
+ });
782
+ sock.write(JSON.stringify(req) + "\n");
783
+ });
784
+ }
785
+ };
786
+ function sleep2(ms) {
787
+ return new Promise((r) => setTimeout(r, ms));
788
+ }
789
+
790
+ // dist/src/embeddings/sql.js
791
+ function embeddingSqlLiteral(vec) {
792
+ if (!vec || vec.length === 0)
793
+ return "NULL";
794
+ const parts = [];
795
+ for (const v of vec) {
796
+ if (!Number.isFinite(v))
797
+ return "NULL";
798
+ parts.push(String(v));
799
+ }
800
+ return `ARRAY[${parts.join(",")}]::float4[]`;
801
+ }
802
+
803
+ // dist/src/embeddings/disable.js
804
+ import { createRequire } from "node:module";
805
+ import { homedir as homedir4 } from "node:os";
806
+ import { join as join5 } from "node:path";
807
+ import { pathToFileURL } from "node:url";
808
+ var cachedStatus = null;
809
+ function defaultResolveTransformers() {
810
+ try {
811
+ createRequire(import.meta.url).resolve("@huggingface/transformers");
812
+ return;
813
+ } catch {
814
+ }
815
+ const sharedDir = join5(homedir4(), ".hivemind", "embed-deps");
816
+ createRequire(pathToFileURL(`${sharedDir}/`).href).resolve("@huggingface/transformers");
817
+ }
818
+ var _resolve = defaultResolveTransformers;
819
+ function detectStatus() {
820
+ if (process.env.HIVEMIND_EMBEDDINGS === "false")
821
+ return "env-disabled";
822
+ try {
823
+ _resolve();
824
+ return "enabled";
825
+ } catch {
826
+ return "no-transformers";
827
+ }
828
+ }
829
+ function embeddingsStatus() {
830
+ if (cachedStatus !== null)
831
+ return cachedStatus;
832
+ cachedStatus = detectStatus();
833
+ return cachedStatus;
834
+ }
835
+ function embeddingsDisabled() {
836
+ return embeddingsStatus() !== "enabled";
837
+ }
838
+
394
839
  // dist/src/hooks/hermes/capture.js
395
- var log3 = (msg) => log("hermes-capture", msg);
840
+ import { fileURLToPath as fileURLToPath3 } from "node:url";
841
+ import { dirname as dirname3, join as join13 } from "node:path";
842
+
843
+ // dist/src/hooks/summary-state.js
844
+ import { readFileSync as readFileSync4, writeFileSync as writeFileSync2, writeSync as writeSync2, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync4, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs";
845
+ import { homedir as homedir5 } from "node:os";
846
+ import { join as join6 } from "node:path";
847
+ var dlog = (msg) => log("summary-state", msg);
848
+ var STATE_DIR = join6(homedir5(), ".claude", "hooks", "summary-state");
849
+ var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4));
850
+ function statePath(sessionId) {
851
+ return join6(STATE_DIR, `${sessionId}.json`);
852
+ }
853
+ function lockPath(sessionId) {
854
+ return join6(STATE_DIR, `${sessionId}.lock`);
855
+ }
856
+ function readState(sessionId) {
857
+ const p = statePath(sessionId);
858
+ if (!existsSync4(p))
859
+ return null;
860
+ try {
861
+ return JSON.parse(readFileSync4(p, "utf-8"));
862
+ } catch {
863
+ return null;
864
+ }
865
+ }
866
+ function writeState(sessionId, state) {
867
+ mkdirSync2(STATE_DIR, { recursive: true });
868
+ const p = statePath(sessionId);
869
+ const tmp = `${p}.${process.pid}.${Date.now()}.tmp`;
870
+ writeFileSync2(tmp, JSON.stringify(state));
871
+ renameSync(tmp, p);
872
+ }
873
+ function withRmwLock(sessionId, fn) {
874
+ mkdirSync2(STATE_DIR, { recursive: true });
875
+ const rmwLock = statePath(sessionId) + ".rmw";
876
+ const deadline = Date.now() + 2e3;
877
+ let fd = null;
878
+ while (fd === null) {
879
+ try {
880
+ fd = openSync2(rmwLock, "wx");
881
+ } catch (e) {
882
+ if (e.code !== "EEXIST")
883
+ throw e;
884
+ if (Date.now() > deadline) {
885
+ dlog(`rmw lock deadline exceeded for ${sessionId}, reclaiming stale lock`);
886
+ try {
887
+ unlinkSync2(rmwLock);
888
+ } catch (unlinkErr) {
889
+ dlog(`stale rmw lock unlink failed for ${sessionId}: ${unlinkErr.message}`);
890
+ }
891
+ continue;
892
+ }
893
+ Atomics.wait(YIELD_BUF, 0, 0, 10);
894
+ }
895
+ }
896
+ try {
897
+ return fn();
898
+ } finally {
899
+ closeSync2(fd);
900
+ try {
901
+ unlinkSync2(rmwLock);
902
+ } catch (unlinkErr) {
903
+ dlog(`rmw lock cleanup failed for ${sessionId}: ${unlinkErr.message}`);
904
+ }
905
+ }
906
+ }
907
+ function bumpTotalCount(sessionId) {
908
+ return withRmwLock(sessionId, () => {
909
+ const now = Date.now();
910
+ const existing = readState(sessionId);
911
+ const next = existing ? { ...existing, totalCount: existing.totalCount + 1 } : { lastSummaryAt: now, lastSummaryCount: 0, totalCount: 1 };
912
+ writeState(sessionId, next);
913
+ return next;
914
+ });
915
+ }
916
+ function loadTriggerConfig() {
917
+ const n = Number(process.env.HIVEMIND_SUMMARY_EVERY_N_MSGS ?? "");
918
+ const h = Number(process.env.HIVEMIND_SUMMARY_EVERY_HOURS ?? "");
919
+ return {
920
+ everyNMessages: Number.isInteger(n) && n > 0 ? n : 50,
921
+ everyHours: Number.isFinite(h) && h > 0 ? h : 2
922
+ };
923
+ }
924
+ var FIRST_SUMMARY_AT = 10;
925
+ function shouldTrigger(state, cfg, now = Date.now()) {
926
+ const msgsSince = state.totalCount - state.lastSummaryCount;
927
+ if (state.lastSummaryCount === 0 && state.totalCount >= FIRST_SUMMARY_AT)
928
+ return true;
929
+ if (msgsSince >= cfg.everyNMessages)
930
+ return true;
931
+ if (msgsSince > 0 && now - state.lastSummaryAt >= cfg.everyHours * 3600 * 1e3)
932
+ return true;
933
+ return false;
934
+ }
935
+ function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) {
936
+ mkdirSync2(STATE_DIR, { recursive: true });
937
+ const p = lockPath(sessionId);
938
+ if (existsSync4(p)) {
939
+ try {
940
+ const ageMs = Date.now() - parseInt(readFileSync4(p, "utf-8"), 10);
941
+ if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
942
+ return false;
943
+ } catch (readErr) {
944
+ dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`);
945
+ }
946
+ try {
947
+ unlinkSync2(p);
948
+ } catch (unlinkErr) {
949
+ dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`);
950
+ return false;
951
+ }
952
+ }
953
+ try {
954
+ const fd = openSync2(p, "wx");
955
+ try {
956
+ writeSync2(fd, String(Date.now()));
957
+ } finally {
958
+ closeSync2(fd);
959
+ }
960
+ return true;
961
+ } catch (e) {
962
+ if (e.code === "EEXIST")
963
+ return false;
964
+ throw e;
965
+ }
966
+ }
967
+ function releaseLock(sessionId) {
968
+ try {
969
+ unlinkSync2(lockPath(sessionId));
970
+ } catch (e) {
971
+ if (e?.code !== "ENOENT") {
972
+ dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`);
973
+ }
974
+ }
975
+ }
976
+
977
+ // dist/src/hooks/hermes/spawn-wiki-worker.js
978
+ import { spawn as spawn2, execSync } from "node:child_process";
979
+ import { fileURLToPath } from "node:url";
980
+ import { dirname, join as join8 } from "node:path";
981
+ import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync4 } from "node:fs";
982
+ import { homedir as homedir6, tmpdir as tmpdir2 } from "node:os";
983
+
984
+ // dist/src/utils/wiki-log.js
985
+ import { mkdirSync as mkdirSync3, appendFileSync as appendFileSync2 } from "node:fs";
986
+ import { join as join7 } from "node:path";
987
+ function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") {
988
+ const path = join7(hooksDir, filename);
989
+ return {
990
+ path,
991
+ log(msg) {
992
+ try {
993
+ mkdirSync3(hooksDir, { recursive: true });
994
+ appendFileSync2(path, `[${utcTimestamp()}] ${msg}
995
+ `);
996
+ } catch {
997
+ }
998
+ }
999
+ };
1000
+ }
1001
+
1002
+ // dist/src/hooks/hermes/spawn-wiki-worker.js
1003
+ var HOME = homedir6();
1004
+ var wikiLogger = makeWikiLogger(join8(HOME, ".hermes", "hooks"));
1005
+ var WIKI_LOG = wikiLogger.path;
1006
+ var WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge \u2014 entities, decisions, relationships, and facts \u2014 into a structured, searchable wiki entry.
1007
+
1008
+ SESSION JSONL path: __JSONL__
1009
+ SUMMARY FILE to write: __SUMMARY__
1010
+ SESSION ID: __SESSION_ID__
1011
+ PROJECT: __PROJECT__
1012
+ PREVIOUS JSONL OFFSET (lines already processed): __PREV_OFFSET__
1013
+ CURRENT JSONL LINES: __JSONL_LINES__
1014
+
1015
+ Steps:
1016
+ 1. Read the session JSONL at the path above.
1017
+ - If PREVIOUS JSONL OFFSET > 0, this is a resumed session. Read the existing summary file first,
1018
+ then focus on lines AFTER the offset for new content. Merge new facts into the existing summary.
1019
+ - If offset is 0, generate from scratch.
1020
+
1021
+ 2. Write the summary file at the path above with this EXACT format:
1022
+
1023
+ # Session __SESSION_ID__
1024
+ - **Source**: __JSONL_SERVER_PATH__
1025
+ - **Started**: <extract from JSONL>
1026
+ - **Ended**: <now>
1027
+ - **Project**: __PROJECT__
1028
+ - **JSONL offset**: __JSONL_LINES__
1029
+
1030
+ ## What Happened
1031
+ <2-3 dense sentences. What was the goal, what was accomplished, what's left.>
1032
+
1033
+ ## People
1034
+ <For each person mentioned: name, role, what they did/said. Format: **Name** \u2014 role \u2014 action>
1035
+
1036
+ ## Entities
1037
+ <Every named thing: repos, branches, files, APIs, tools, services, tables, features, bugs.
1038
+ Format: **entity** (type) \u2014 what was done with it, its current state>
1039
+
1040
+ ## Decisions & Reasoning
1041
+ <Every decision made and WHY.>
1042
+
1043
+ ## Key Facts
1044
+ <Bullet list of atomic facts that could answer future questions.>
1045
+
1046
+ ## Files Modified
1047
+ <bullet list: path (new/modified/deleted) \u2014 what changed>
1048
+
1049
+ ## Open Questions / TODO
1050
+ <Anything unresolved, blocked, or explicitly deferred>
1051
+
1052
+ IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact.
1053
+ PRIVACY: Never include absolute filesystem paths in the summary.
1054
+ LENGTH LIMIT: Keep the total summary under 4000 characters.`;
1055
+ var wikiLog = wikiLogger.log;
1056
+ function findHermesBin() {
1057
+ try {
1058
+ return execSync("which hermes 2>/dev/null", { encoding: "utf-8" }).trim() || "hermes";
1059
+ } catch {
1060
+ return "hermes";
1061
+ }
1062
+ }
1063
+ function spawnHermesWikiWorker(opts) {
1064
+ const { config, sessionId, cwd, bundleDir, reason } = opts;
1065
+ const projectName = cwd.split("/").pop() || "unknown";
1066
+ const tmpDir = join8(tmpdir2(), `deeplake-wiki-${sessionId}-${Date.now()}`);
1067
+ mkdirSync4(tmpDir, { recursive: true });
1068
+ const configFile = join8(tmpDir, "config.json");
1069
+ writeFileSync3(configFile, JSON.stringify({
1070
+ apiUrl: config.apiUrl,
1071
+ token: config.token,
1072
+ orgId: config.orgId,
1073
+ workspaceId: config.workspaceId,
1074
+ memoryTable: config.tableName,
1075
+ sessionsTable: config.sessionsTableName,
1076
+ sessionId,
1077
+ userName: config.userName,
1078
+ project: projectName,
1079
+ tmpDir,
1080
+ hermesBin: findHermesBin(),
1081
+ hermesProvider: process.env.HIVEMIND_HERMES_PROVIDER ?? "openrouter",
1082
+ hermesModel: process.env.HIVEMIND_HERMES_MODEL ?? "anthropic/claude-haiku-4-5",
1083
+ wikiLog: WIKI_LOG,
1084
+ hooksDir: join8(HOME, ".hermes", "hooks"),
1085
+ promptTemplate: WIKI_PROMPT_TEMPLATE
1086
+ }));
1087
+ wikiLog(`${reason}: spawning summary worker for ${sessionId}`);
1088
+ const workerPath = join8(bundleDir, "wiki-worker.js");
1089
+ spawn2("nohup", ["node", workerPath, configFile], {
1090
+ detached: true,
1091
+ stdio: ["ignore", "ignore", "ignore"]
1092
+ }).unref();
1093
+ wikiLog(`${reason}: spawned summary worker for ${sessionId}`);
1094
+ }
1095
+ function bundleDirFromImportMeta(importMetaUrl) {
1096
+ return dirname(fileURLToPath(importMetaUrl));
1097
+ }
1098
+
1099
+ // dist/src/skilify/spawn-skilify-worker.js
1100
+ import { spawn as spawn3 } from "node:child_process";
1101
+ import { fileURLToPath as fileURLToPath2 } from "node:url";
1102
+ import { dirname as dirname2, join as join10 } from "node:path";
1103
+ import { writeFileSync as writeFileSync4, mkdirSync as mkdirSync5, appendFileSync as appendFileSync3, chmodSync } from "node:fs";
1104
+ import { homedir as homedir8, tmpdir as tmpdir3 } from "node:os";
1105
+
1106
+ // dist/src/skilify/gate-runner.js
1107
+ import { execFileSync } from "node:child_process";
1108
+ import { existsSync as existsSync5 } from "node:fs";
1109
+ import { homedir as homedir7 } from "node:os";
1110
+ import { join as join9 } from "node:path";
1111
+ function findAgentBin(agent) {
1112
+ const which = (name) => {
1113
+ try {
1114
+ const out = execFileSync("which", [name], {
1115
+ encoding: "utf-8",
1116
+ stdio: ["ignore", "pipe", "ignore"]
1117
+ });
1118
+ return out.trim() || null;
1119
+ } catch {
1120
+ return null;
1121
+ }
1122
+ };
1123
+ switch (agent) {
1124
+ case "claude_code":
1125
+ return which("claude") ?? join9(homedir7(), ".claude", "local", "claude");
1126
+ case "codex":
1127
+ return which("codex") ?? "/usr/local/bin/codex";
1128
+ case "cursor":
1129
+ return which("cursor-agent") ?? "/usr/local/bin/cursor-agent";
1130
+ case "hermes":
1131
+ return which("hermes") ?? join9(homedir7(), ".local", "bin", "hermes");
1132
+ case "pi":
1133
+ return which("pi") ?? join9(homedir7(), ".local", "bin", "pi");
1134
+ }
1135
+ }
1136
+
1137
+ // dist/src/skilify/spawn-skilify-worker.js
1138
+ var HOME2 = homedir8();
1139
+ var SKILIFY_LOG = join10(HOME2, ".claude", "hooks", "skilify.log");
1140
+ function skilifyLog(msg) {
1141
+ try {
1142
+ mkdirSync5(dirname2(SKILIFY_LOG), { recursive: true });
1143
+ appendFileSync3(SKILIFY_LOG, `[${utcTimestamp()}] ${msg}
1144
+ `);
1145
+ } catch {
1146
+ }
1147
+ }
1148
+ function spawnSkilifyWorker(opts) {
1149
+ const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts;
1150
+ const tmpDir = join10(tmpdir3(), `deeplake-skilify-${projectKey}-${Date.now()}`);
1151
+ mkdirSync5(tmpDir, { recursive: true, mode: 448 });
1152
+ const gateBin = findAgentBin(agent);
1153
+ const configFile = join10(tmpDir, "config.json");
1154
+ writeFileSync4(configFile, JSON.stringify({
1155
+ apiUrl: config.apiUrl,
1156
+ token: config.token,
1157
+ orgId: config.orgId,
1158
+ workspaceId: config.workspaceId,
1159
+ sessionsTable: config.sessionsTableName,
1160
+ skillsTable: config.skillsTableName,
1161
+ userName: config.userName,
1162
+ cwd,
1163
+ projectKey,
1164
+ project,
1165
+ agent,
1166
+ scope: scopeConfig.scope,
1167
+ team: scopeConfig.team,
1168
+ install: scopeConfig.install,
1169
+ tmpDir,
1170
+ gateBin,
1171
+ cursorModel: process.env.HIVEMIND_CURSOR_MODEL,
1172
+ hermesProvider: process.env.HIVEMIND_HERMES_PROVIDER,
1173
+ hermesModel: process.env.HIVEMIND_HERMES_MODEL,
1174
+ piProvider: process.env.HIVEMIND_PI_PROVIDER,
1175
+ piModel: process.env.HIVEMIND_PI_MODEL,
1176
+ skilifyLog: SKILIFY_LOG,
1177
+ currentSessionId
1178
+ }), { mode: 384 });
1179
+ try {
1180
+ chmodSync(configFile, 384);
1181
+ } catch {
1182
+ }
1183
+ skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`);
1184
+ const workerPath = join10(bundleDir, "skilify-worker.js");
1185
+ spawn3("nohup", ["node", workerPath, configFile], {
1186
+ detached: true,
1187
+ stdio: ["ignore", "ignore", "ignore"]
1188
+ }).unref();
1189
+ skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`);
1190
+ }
1191
+
1192
+ // dist/src/skilify/state.js
1193
+ import { readFileSync as readFileSync5, writeFileSync as writeFileSync5, writeSync as writeSync3, mkdirSync as mkdirSync6, renameSync as renameSync2, existsSync as existsSync6, unlinkSync as unlinkSync3, openSync as openSync3, closeSync as closeSync3 } from "node:fs";
1194
+ import { execSync as execSync2 } from "node:child_process";
1195
+ import { homedir as homedir9 } from "node:os";
1196
+ import { createHash } from "node:crypto";
1197
+ import { join as join11, basename } from "node:path";
1198
+ var dlog2 = (msg) => log("skilify-state", msg);
1199
+ var STATE_DIR2 = join11(homedir9(), ".deeplake", "state", "skilify");
1200
+ var YIELD_BUF2 = new Int32Array(new SharedArrayBuffer(4));
1201
+ var TRIGGER_THRESHOLD = (() => {
1202
+ const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? "");
1203
+ return Number.isInteger(n) && n > 0 ? n : 20;
1204
+ })();
1205
+ function statePath2(projectKey) {
1206
+ return join11(STATE_DIR2, `${projectKey}.json`);
1207
+ }
1208
+ function lockPath2(projectKey) {
1209
+ return join11(STATE_DIR2, `${projectKey}.lock`);
1210
+ }
1211
+ function deriveProjectKey(cwd) {
1212
+ const project = basename(cwd) || "unknown";
1213
+ let signature = null;
1214
+ try {
1215
+ signature = execSync2("git config --get remote.origin.url", {
1216
+ cwd,
1217
+ encoding: "utf-8",
1218
+ stdio: ["ignore", "pipe", "ignore"]
1219
+ }).trim() || null;
1220
+ } catch {
1221
+ }
1222
+ const input = signature ?? cwd;
1223
+ const key = createHash("sha1").update(input).digest("hex").slice(0, 16);
1224
+ return { key, project };
1225
+ }
1226
+ function readState2(projectKey) {
1227
+ const p = statePath2(projectKey);
1228
+ if (!existsSync6(p))
1229
+ return null;
1230
+ try {
1231
+ return JSON.parse(readFileSync5(p, "utf-8"));
1232
+ } catch {
1233
+ return null;
1234
+ }
1235
+ }
1236
+ function writeState2(projectKey, state) {
1237
+ mkdirSync6(STATE_DIR2, { recursive: true });
1238
+ const p = statePath2(projectKey);
1239
+ const tmp = `${p}.${process.pid}.${Date.now()}.tmp`;
1240
+ writeFileSync5(tmp, JSON.stringify(state, null, 2));
1241
+ renameSync2(tmp, p);
1242
+ }
1243
+ function withRmwLock2(projectKey, fn) {
1244
+ mkdirSync6(STATE_DIR2, { recursive: true });
1245
+ const rmw = lockPath2(projectKey) + ".rmw";
1246
+ const deadline = Date.now() + 2e3;
1247
+ let fd = null;
1248
+ while (fd === null) {
1249
+ try {
1250
+ fd = openSync3(rmw, "wx");
1251
+ } catch (e) {
1252
+ if (e.code !== "EEXIST")
1253
+ throw e;
1254
+ if (Date.now() > deadline) {
1255
+ dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`);
1256
+ try {
1257
+ unlinkSync3(rmw);
1258
+ } catch (unlinkErr) {
1259
+ dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`);
1260
+ }
1261
+ continue;
1262
+ }
1263
+ Atomics.wait(YIELD_BUF2, 0, 0, 10);
1264
+ }
1265
+ }
1266
+ try {
1267
+ return fn();
1268
+ } finally {
1269
+ closeSync3(fd);
1270
+ try {
1271
+ unlinkSync3(rmw);
1272
+ } catch (unlinkErr) {
1273
+ dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`);
1274
+ }
1275
+ }
1276
+ }
1277
+ function bumpStopCounter(cwd) {
1278
+ const { key, project } = deriveProjectKey(cwd);
1279
+ return withRmwLock2(key, () => {
1280
+ const existing = readState2(key);
1281
+ const next = existing ? { ...existing, counter: existing.counter + 1, updatedAt: Date.now() } : {
1282
+ project,
1283
+ projectKey: key,
1284
+ counter: 1,
1285
+ lastUuid: null,
1286
+ lastDate: null,
1287
+ skillsGenerated: [],
1288
+ updatedAt: Date.now()
1289
+ };
1290
+ writeState2(key, next);
1291
+ return next;
1292
+ });
1293
+ }
1294
+ function resetCounter(projectKey) {
1295
+ withRmwLock2(projectKey, () => {
1296
+ const s = readState2(projectKey);
1297
+ if (!s)
1298
+ return;
1299
+ writeState2(projectKey, { ...s, counter: 0, updatedAt: Date.now() });
1300
+ });
1301
+ }
1302
+ function tryAcquireWorkerLock(projectKey, maxAgeMs = 10 * 60 * 1e3) {
1303
+ mkdirSync6(STATE_DIR2, { recursive: true });
1304
+ const p = lockPath2(projectKey);
1305
+ if (existsSync6(p)) {
1306
+ try {
1307
+ const ageMs = Date.now() - parseInt(readFileSync5(p, "utf-8"), 10);
1308
+ if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
1309
+ return false;
1310
+ } catch (readErr) {
1311
+ dlog2(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`);
1312
+ }
1313
+ try {
1314
+ unlinkSync3(p);
1315
+ } catch (unlinkErr) {
1316
+ dlog2(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`);
1317
+ return false;
1318
+ }
1319
+ }
1320
+ try {
1321
+ const fd = openSync3(p, "wx");
1322
+ try {
1323
+ writeSync3(fd, String(Date.now()));
1324
+ } finally {
1325
+ closeSync3(fd);
1326
+ }
1327
+ return true;
1328
+ } catch {
1329
+ return false;
1330
+ }
1331
+ }
1332
+ function releaseWorkerLock(projectKey) {
1333
+ const p = lockPath2(projectKey);
1334
+ try {
1335
+ unlinkSync3(p);
1336
+ } catch {
1337
+ }
1338
+ }
1339
+
1340
+ // dist/src/skilify/scope-config.js
1341
+ import { existsSync as existsSync7, mkdirSync as mkdirSync7, readFileSync as readFileSync6, writeFileSync as writeFileSync6 } from "node:fs";
1342
+ import { homedir as homedir10 } from "node:os";
1343
+ import { join as join12 } from "node:path";
1344
+ var STATE_DIR3 = join12(homedir10(), ".deeplake", "state", "skilify");
1345
+ var CONFIG_PATH = join12(STATE_DIR3, "config.json");
1346
+ var DEFAULT = { scope: "me", team: [], install: "project" };
1347
+ function loadScopeConfig() {
1348
+ if (!existsSync7(CONFIG_PATH))
1349
+ return DEFAULT;
1350
+ try {
1351
+ const raw = JSON.parse(readFileSync6(CONFIG_PATH, "utf-8"));
1352
+ const scope = raw.scope === "team" || raw.scope === "org" ? raw.scope : "me";
1353
+ const team = Array.isArray(raw.team) ? raw.team.filter((s) => typeof s === "string") : [];
1354
+ const install = raw.install === "global" ? "global" : "project";
1355
+ return { scope, team, install };
1356
+ } catch {
1357
+ return DEFAULT;
1358
+ }
1359
+ }
1360
+
1361
+ // dist/src/skilify/triggers.js
1362
+ function tryStopCounterTrigger(opts) {
1363
+ if (process.env.HIVEMIND_SKILIFY_WORKER === "1")
1364
+ return;
1365
+ if (!opts.cwd)
1366
+ return;
1367
+ try {
1368
+ const state = bumpStopCounter(opts.cwd);
1369
+ if (state.counter < TRIGGER_THRESHOLD)
1370
+ return;
1371
+ if (!tryAcquireWorkerLock(state.projectKey)) {
1372
+ skilifyLog(`Stop: trigger suppressed (worker lock held) project=${state.project}`);
1373
+ return;
1374
+ }
1375
+ skilifyLog(`Stop: threshold hit (counter=${state.counter}, N=${TRIGGER_THRESHOLD}) project=${state.project} agent=${opts.agent}`);
1376
+ resetCounter(state.projectKey);
1377
+ try {
1378
+ spawnSkilifyWorker({
1379
+ config: opts.config,
1380
+ cwd: opts.cwd,
1381
+ projectKey: state.projectKey,
1382
+ project: state.project,
1383
+ bundleDir: opts.bundleDir,
1384
+ agent: opts.agent,
1385
+ scopeConfig: loadScopeConfig(),
1386
+ currentSessionId: opts.sessionId,
1387
+ reason: "Stop"
1388
+ });
1389
+ } catch (e) {
1390
+ skilifyLog(`Stop spawn failed: ${e?.message ?? e}`);
1391
+ try {
1392
+ releaseWorkerLock(state.projectKey);
1393
+ } catch {
1394
+ }
1395
+ }
1396
+ } catch (e) {
1397
+ skilifyLog(`Stop trigger error: ${e?.message ?? e}`);
1398
+ }
1399
+ }
1400
+
1401
+ // dist/src/hooks/hermes/capture.js
1402
+ var log4 = (msg) => log("hermes-capture", msg);
1403
+ function resolveEmbedDaemonPath() {
1404
+ return join13(dirname3(fileURLToPath3(import.meta.url)), "embeddings", "embed-daemon.js");
1405
+ }
396
1406
  var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false";
397
1407
  function pickString(...candidates) {
398
1408
  for (const c of candidates) {
@@ -407,7 +1417,7 @@ async function main() {
407
1417
  const input = await readStdin();
408
1418
  const config = loadConfig();
409
1419
  if (!config) {
410
- log3("no config");
1420
+ log4("no config");
411
1421
  return;
412
1422
  }
413
1423
  const sessionId = input.session_id ?? `hermes-${Date.now()}`;
@@ -427,14 +1437,14 @@ async function main() {
427
1437
  if (event === "pre_llm_call") {
428
1438
  const prompt = pickString(extra.prompt, extra.user_message, extra.message?.content);
429
1439
  if (!prompt) {
430
- log3(`pre_llm_call: no prompt found in extra`);
1440
+ log4(`pre_llm_call: no prompt found in extra`);
431
1441
  return;
432
1442
  }
433
- log3(`user session=${sessionId}`);
1443
+ log4(`user session=${sessionId}`);
434
1444
  entry = { id: crypto.randomUUID(), ...meta, type: "user_message", content: prompt };
435
1445
  } else if (event === "post_tool_call" && typeof input.tool_name === "string") {
436
1446
  const toolResponse = extra.tool_result ?? extra.tool_output ?? extra.result ?? extra.output;
437
- log3(`tool=${input.tool_name} session=${sessionId}`);
1447
+ log4(`tool=${input.tool_name} session=${sessionId}`);
438
1448
  entry = {
439
1449
  id: crypto.randomUUID(),
440
1450
  ...meta,
@@ -446,36 +1456,80 @@ async function main() {
446
1456
  } else if (event === "post_llm_call") {
447
1457
  const text = pickString(extra.response, extra.assistant_message, extra.message?.content);
448
1458
  if (!text) {
449
- log3(`post_llm_call: no response found in extra`);
1459
+ log4(`post_llm_call: no response found in extra`);
450
1460
  return;
451
1461
  }
452
- log3(`assistant session=${sessionId}`);
1462
+ log4(`assistant session=${sessionId}`);
453
1463
  entry = { id: crypto.randomUUID(), ...meta, type: "assistant_message", content: text };
454
1464
  } else {
455
- log3(`unknown/unhandled event: ${event}, skipping`);
1465
+ log4(`unknown/unhandled event: ${event}, skipping`);
456
1466
  return;
457
1467
  }
458
1468
  const sessionPath = buildSessionPath(config, sessionId);
459
1469
  const line = JSON.stringify(entry);
460
- log3(`writing to ${sessionPath}`);
1470
+ log4(`writing to ${sessionPath}`);
461
1471
  const projectName = cwd.split("/").pop() || "unknown";
462
1472
  const filename = sessionPath.split("/").pop() ?? "";
463
1473
  const jsonForSql = line.replace(/'/g, "''");
464
- const insertSql = `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', '${sqlStr(event)}', 'hermes', '${ts}', '${ts}')`;
1474
+ const embedding = embeddingsDisabled() ? null : await new EmbedClient({ daemonEntry: resolveEmbedDaemonPath() }).embed(line, "document");
1475
+ const embeddingSql = embeddingSqlLiteral(embedding);
1476
+ const insertSql = `INSERT INTO "${sessionsTable}" (id, path, filename, message, message_embedding, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, ${embeddingSql}, '${sqlStr(config.userName)}', ${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', '${sqlStr(event)}', 'hermes', '${ts}', '${ts}')`;
465
1477
  try {
466
1478
  await api.query(insertSql);
467
1479
  } catch (e) {
468
1480
  if (e.message?.includes("permission denied") || e.message?.includes("does not exist")) {
469
- log3("table missing, creating and retrying");
1481
+ log4("table missing, creating and retrying");
470
1482
  await api.ensureSessionsTable(sessionsTable);
471
1483
  await api.query(insertSql);
472
1484
  } else {
473
1485
  throw e;
474
1486
  }
475
1487
  }
476
- log3("capture ok \u2192 cloud");
1488
+ log4("capture ok \u2192 cloud");
1489
+ maybeTriggerPeriodicSummary(sessionId, cwd, config);
1490
+ if (event === "post_llm_call" && process.env.HIVEMIND_WIKI_WORKER !== "1" && process.env.HIVEMIND_SKILIFY_WORKER !== "1") {
1491
+ tryStopCounterTrigger({
1492
+ config,
1493
+ cwd,
1494
+ bundleDir: bundleDirFromImportMeta(import.meta.url),
1495
+ agent: "hermes",
1496
+ sessionId
1497
+ });
1498
+ }
1499
+ }
1500
+ function maybeTriggerPeriodicSummary(sessionId, cwd, config) {
1501
+ if (process.env.HIVEMIND_WIKI_WORKER === "1")
1502
+ return;
1503
+ try {
1504
+ const state = bumpTotalCount(sessionId);
1505
+ const cfg = loadTriggerConfig();
1506
+ if (!shouldTrigger(state, cfg))
1507
+ return;
1508
+ if (!tryAcquireLock(sessionId)) {
1509
+ log4(`periodic trigger suppressed (lock held) session=${sessionId}`);
1510
+ return;
1511
+ }
1512
+ wikiLog(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`);
1513
+ try {
1514
+ spawnHermesWikiWorker({
1515
+ config,
1516
+ sessionId,
1517
+ cwd,
1518
+ bundleDir: bundleDirFromImportMeta(import.meta.url),
1519
+ reason: "Periodic"
1520
+ });
1521
+ } catch (e) {
1522
+ log4(`periodic spawn failed: ${e.message}`);
1523
+ try {
1524
+ releaseLock(sessionId);
1525
+ } catch {
1526
+ }
1527
+ }
1528
+ } catch (e) {
1529
+ log4(`periodic trigger error: ${e.message}`);
1530
+ }
477
1531
  }
478
1532
  main().catch((e) => {
479
- log3(`fatal: ${e.message}`);
1533
+ log4(`fatal: ${e.message}`);
480
1534
  process.exit(0);
481
1535
  });