@deeplake/hivemind 0.6.48 → 0.7.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/.claude-plugin/marketplace.json +2 -2
  2. package/.claude-plugin/plugin.json +1 -1
  3. package/README.md +244 -20
  4. package/bundle/cli.js +1369 -112
  5. package/codex/bundle/capture.js +546 -96
  6. package/codex/bundle/commands/auth-login.js +290 -81
  7. package/codex/bundle/embeddings/embed-daemon.js +243 -0
  8. package/codex/bundle/pre-tool-use.js +666 -111
  9. package/codex/bundle/session-start-setup.js +231 -64
  10. package/codex/bundle/session-start.js +52 -13
  11. package/codex/bundle/shell/deeplake-shell.js +716 -119
  12. package/codex/bundle/skilify-worker.js +907 -0
  13. package/codex/bundle/stop.js +819 -79
  14. package/codex/bundle/wiki-worker.js +312 -11
  15. package/cursor/bundle/capture.js +1116 -64
  16. package/cursor/bundle/commands/auth-login.js +290 -81
  17. package/cursor/bundle/embeddings/embed-daemon.js +243 -0
  18. package/cursor/bundle/pre-tool-use.js +598 -77
  19. package/cursor/bundle/session-end.js +520 -2
  20. package/cursor/bundle/session-start.js +257 -65
  21. package/cursor/bundle/shell/deeplake-shell.js +716 -119
  22. package/cursor/bundle/skilify-worker.js +907 -0
  23. package/cursor/bundle/wiki-worker.js +571 -0
  24. package/hermes/bundle/capture.js +1119 -65
  25. package/hermes/bundle/commands/auth-login.js +290 -81
  26. package/hermes/bundle/embeddings/embed-daemon.js +243 -0
  27. package/hermes/bundle/pre-tool-use.js +597 -76
  28. package/hermes/bundle/session-end.js +522 -1
  29. package/hermes/bundle/session-start.js +260 -65
  30. package/hermes/bundle/shell/deeplake-shell.js +716 -119
  31. package/hermes/bundle/skilify-worker.js +907 -0
  32. package/hermes/bundle/wiki-worker.js +572 -0
  33. package/mcp/bundle/server.js +290 -75
  34. package/openclaw/dist/chunks/auth-creds-AEKS6D3P.js +14 -0
  35. package/openclaw/dist/chunks/chunk-SRCBBT4H.js +37 -0
  36. package/openclaw/dist/chunks/config-ZLH6JFJS.js +34 -0
  37. package/openclaw/dist/chunks/index-marker-store-PGT5CW6T.js +33 -0
  38. package/openclaw/dist/chunks/setup-config-C35UK4LP.js +114 -0
  39. package/openclaw/dist/index.js +929 -710
  40. package/openclaw/dist/skilify-worker.js +907 -0
  41. package/openclaw/openclaw.plugin.json +1 -1
  42. package/openclaw/package.json +1 -1
  43. package/openclaw/skills/SKILL.md +19 -0
  44. package/package.json +7 -1
  45. package/pi/extension-source/hivemind.ts +603 -22
@@ -21,6 +21,9 @@ import { join } from "node:path";
21
21
  import { homedir } from "node:os";
22
22
  var DEBUG = process.env.HIVEMIND_DEBUG === "1";
23
23
  var LOG = join(homedir(), ".deeplake", "hook-debug.log");
24
+ function utcTimestamp(d = /* @__PURE__ */ new Date()) {
25
+ return d.toISOString().replace("T", " ").slice(0, 19) + " UTC";
26
+ }
24
27
  function log(tag, msg) {
25
28
  if (!DEBUG)
26
29
  return;
@@ -28,13 +31,531 @@ function log(tag, msg) {
28
31
  `);
29
32
  }
30
33
 
34
+ // dist/src/config.js
35
+ import { readFileSync, existsSync } from "node:fs";
36
+ import { join as join2 } from "node:path";
37
+ import { homedir as homedir2, userInfo } from "node:os";
38
+ function loadConfig() {
39
+ const home = homedir2();
40
+ const credPath = join2(home, ".deeplake", "credentials.json");
41
+ let creds = null;
42
+ if (existsSync(credPath)) {
43
+ try {
44
+ creds = JSON.parse(readFileSync(credPath, "utf-8"));
45
+ } catch {
46
+ return null;
47
+ }
48
+ }
49
+ const token = process.env.HIVEMIND_TOKEN ?? creds?.token;
50
+ const orgId = process.env.HIVEMIND_ORG_ID ?? creds?.orgId;
51
+ if (!token || !orgId)
52
+ return null;
53
+ return {
54
+ token,
55
+ orgId,
56
+ orgName: creds?.orgName ?? orgId,
57
+ userName: creds?.userName || userInfo().username || "unknown",
58
+ workspaceId: process.env.HIVEMIND_WORKSPACE_ID ?? creds?.workspaceId ?? "default",
59
+ apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
60
+ tableName: process.env.HIVEMIND_TABLE ?? "memory",
61
+ sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
62
+ skillsTableName: process.env.HIVEMIND_SKILLS_TABLE ?? "skills",
63
+ memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join2(home, ".deeplake", "memory")
64
+ };
65
+ }
66
+
67
+ // dist/src/hooks/summary-state.js
68
+ import { readFileSync as readFileSync2, writeFileSync, writeSync, mkdirSync, renameSync, existsSync as existsSync2, unlinkSync, openSync, closeSync } from "node:fs";
69
+ import { homedir as homedir3 } from "node:os";
70
+ import { join as join3 } from "node:path";
71
+ var dlog = (msg) => log("summary-state", msg);
72
+ var STATE_DIR = join3(homedir3(), ".claude", "hooks", "summary-state");
73
+ var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4));
74
+ function lockPath(sessionId) {
75
+ return join3(STATE_DIR, `${sessionId}.lock`);
76
+ }
77
+ function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) {
78
+ mkdirSync(STATE_DIR, { recursive: true });
79
+ const p = lockPath(sessionId);
80
+ if (existsSync2(p)) {
81
+ try {
82
+ const ageMs = Date.now() - parseInt(readFileSync2(p, "utf-8"), 10);
83
+ if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
84
+ return false;
85
+ } catch (readErr) {
86
+ dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`);
87
+ }
88
+ try {
89
+ unlinkSync(p);
90
+ } catch (unlinkErr) {
91
+ dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`);
92
+ return false;
93
+ }
94
+ }
95
+ try {
96
+ const fd = openSync(p, "wx");
97
+ try {
98
+ writeSync(fd, String(Date.now()));
99
+ } finally {
100
+ closeSync(fd);
101
+ }
102
+ return true;
103
+ } catch (e) {
104
+ if (e.code === "EEXIST")
105
+ return false;
106
+ throw e;
107
+ }
108
+ }
109
+
110
+ // dist/src/hooks/hermes/spawn-wiki-worker.js
111
+ import { spawn, execSync } from "node:child_process";
112
+ import { fileURLToPath } from "node:url";
113
+ import { dirname, join as join5 } from "node:path";
114
+ import { writeFileSync as writeFileSync2, mkdirSync as mkdirSync3 } from "node:fs";
115
+ import { homedir as homedir4, tmpdir } from "node:os";
116
+
117
+ // dist/src/utils/wiki-log.js
118
+ import { mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs";
119
+ import { join as join4 } from "node:path";
120
+ function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") {
121
+ const path = join4(hooksDir, filename);
122
+ return {
123
+ path,
124
+ log(msg) {
125
+ try {
126
+ mkdirSync2(hooksDir, { recursive: true });
127
+ appendFileSync2(path, `[${utcTimestamp()}] ${msg}
128
+ `);
129
+ } catch {
130
+ }
131
+ }
132
+ };
133
+ }
134
+
135
+ // dist/src/hooks/hermes/spawn-wiki-worker.js
136
+ var HOME = homedir4();
137
+ var wikiLogger = makeWikiLogger(join5(HOME, ".hermes", "hooks"));
138
+ var WIKI_LOG = wikiLogger.path;
139
+ var WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge \u2014 entities, decisions, relationships, and facts \u2014 into a structured, searchable wiki entry.
140
+
141
+ SESSION JSONL path: __JSONL__
142
+ SUMMARY FILE to write: __SUMMARY__
143
+ SESSION ID: __SESSION_ID__
144
+ PROJECT: __PROJECT__
145
+ PREVIOUS JSONL OFFSET (lines already processed): __PREV_OFFSET__
146
+ CURRENT JSONL LINES: __JSONL_LINES__
147
+
148
+ Steps:
149
+ 1. Read the session JSONL at the path above.
150
+ - If PREVIOUS JSONL OFFSET > 0, this is a resumed session. Read the existing summary file first,
151
+ then focus on lines AFTER the offset for new content. Merge new facts into the existing summary.
152
+ - If offset is 0, generate from scratch.
153
+
154
+ 2. Write the summary file at the path above with this EXACT format:
155
+
156
+ # Session __SESSION_ID__
157
+ - **Source**: __JSONL_SERVER_PATH__
158
+ - **Started**: <extract from JSONL>
159
+ - **Ended**: <now>
160
+ - **Project**: __PROJECT__
161
+ - **JSONL offset**: __JSONL_LINES__
162
+
163
+ ## What Happened
164
+ <2-3 dense sentences. What was the goal, what was accomplished, what's left.>
165
+
166
+ ## People
167
+ <For each person mentioned: name, role, what they did/said. Format: **Name** \u2014 role \u2014 action>
168
+
169
+ ## Entities
170
+ <Every named thing: repos, branches, files, APIs, tools, services, tables, features, bugs.
171
+ Format: **entity** (type) \u2014 what was done with it, its current state>
172
+
173
+ ## Decisions & Reasoning
174
+ <Every decision made and WHY.>
175
+
176
+ ## Key Facts
177
+ <Bullet list of atomic facts that could answer future questions.>
178
+
179
+ ## Files Modified
180
+ <bullet list: path (new/modified/deleted) \u2014 what changed>
181
+
182
+ ## Open Questions / TODO
183
+ <Anything unresolved, blocked, or explicitly deferred>
184
+
185
+ IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact.
186
+ PRIVACY: Never include absolute filesystem paths in the summary.
187
+ LENGTH LIMIT: Keep the total summary under 4000 characters.`;
188
+ var wikiLog = wikiLogger.log;
189
+ function findHermesBin() {
190
+ try {
191
+ return execSync("which hermes 2>/dev/null", { encoding: "utf-8" }).trim() || "hermes";
192
+ } catch {
193
+ return "hermes";
194
+ }
195
+ }
196
+ function spawnHermesWikiWorker(opts) {
197
+ const { config, sessionId, cwd, bundleDir, reason } = opts;
198
+ const projectName = cwd.split("/").pop() || "unknown";
199
+ const tmpDir = join5(tmpdir(), `deeplake-wiki-${sessionId}-${Date.now()}`);
200
+ mkdirSync3(tmpDir, { recursive: true });
201
+ const configFile = join5(tmpDir, "config.json");
202
+ writeFileSync2(configFile, JSON.stringify({
203
+ apiUrl: config.apiUrl,
204
+ token: config.token,
205
+ orgId: config.orgId,
206
+ workspaceId: config.workspaceId,
207
+ memoryTable: config.tableName,
208
+ sessionsTable: config.sessionsTableName,
209
+ sessionId,
210
+ userName: config.userName,
211
+ project: projectName,
212
+ tmpDir,
213
+ hermesBin: findHermesBin(),
214
+ hermesProvider: process.env.HIVEMIND_HERMES_PROVIDER ?? "openrouter",
215
+ hermesModel: process.env.HIVEMIND_HERMES_MODEL ?? "anthropic/claude-haiku-4-5",
216
+ wikiLog: WIKI_LOG,
217
+ hooksDir: join5(HOME, ".hermes", "hooks"),
218
+ promptTemplate: WIKI_PROMPT_TEMPLATE
219
+ }));
220
+ wikiLog(`${reason}: spawning summary worker for ${sessionId}`);
221
+ const workerPath = join5(bundleDir, "wiki-worker.js");
222
+ spawn("nohup", ["node", workerPath, configFile], {
223
+ detached: true,
224
+ stdio: ["ignore", "ignore", "ignore"]
225
+ }).unref();
226
+ wikiLog(`${reason}: spawned summary worker for ${sessionId}`);
227
+ }
228
+ function bundleDirFromImportMeta(importMetaUrl) {
229
+ return dirname(fileURLToPath(importMetaUrl));
230
+ }
231
+
232
+ // dist/src/skilify/spawn-skilify-worker.js
233
+ import { spawn as spawn2 } from "node:child_process";
234
+ import { fileURLToPath as fileURLToPath2 } from "node:url";
235
+ import { dirname as dirname2, join as join7 } from "node:path";
236
+ import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync4, appendFileSync as appendFileSync3, chmodSync } from "node:fs";
237
+ import { homedir as homedir6, tmpdir as tmpdir2 } from "node:os";
238
+
239
+ // dist/src/skilify/gate-runner.js
240
+ import { execFileSync } from "node:child_process";
241
+ import { existsSync as existsSync3 } from "node:fs";
242
+ import { homedir as homedir5 } from "node:os";
243
+ import { join as join6 } from "node:path";
244
+ function findAgentBin(agent) {
245
+ const which = (name) => {
246
+ try {
247
+ const out = execFileSync("which", [name], {
248
+ encoding: "utf-8",
249
+ stdio: ["ignore", "pipe", "ignore"]
250
+ });
251
+ return out.trim() || null;
252
+ } catch {
253
+ return null;
254
+ }
255
+ };
256
+ switch (agent) {
257
+ case "claude_code":
258
+ return which("claude") ?? join6(homedir5(), ".claude", "local", "claude");
259
+ case "codex":
260
+ return which("codex") ?? "/usr/local/bin/codex";
261
+ case "cursor":
262
+ return which("cursor-agent") ?? "/usr/local/bin/cursor-agent";
263
+ case "hermes":
264
+ return which("hermes") ?? join6(homedir5(), ".local", "bin", "hermes");
265
+ case "pi":
266
+ return which("pi") ?? join6(homedir5(), ".local", "bin", "pi");
267
+ }
268
+ }
269
+
270
+ // dist/src/skilify/spawn-skilify-worker.js
271
+ var HOME2 = homedir6();
272
+ var SKILIFY_LOG = join7(HOME2, ".claude", "hooks", "skilify.log");
273
+ function skilifyLog(msg) {
274
+ try {
275
+ mkdirSync4(dirname2(SKILIFY_LOG), { recursive: true });
276
+ appendFileSync3(SKILIFY_LOG, `[${utcTimestamp()}] ${msg}
277
+ `);
278
+ } catch {
279
+ }
280
+ }
281
+ function spawnSkilifyWorker(opts) {
282
+ const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts;
283
+ const tmpDir = join7(tmpdir2(), `deeplake-skilify-${projectKey}-${Date.now()}`);
284
+ mkdirSync4(tmpDir, { recursive: true, mode: 448 });
285
+ const gateBin = findAgentBin(agent);
286
+ const configFile = join7(tmpDir, "config.json");
287
+ writeFileSync3(configFile, JSON.stringify({
288
+ apiUrl: config.apiUrl,
289
+ token: config.token,
290
+ orgId: config.orgId,
291
+ workspaceId: config.workspaceId,
292
+ sessionsTable: config.sessionsTableName,
293
+ skillsTable: config.skillsTableName,
294
+ userName: config.userName,
295
+ cwd,
296
+ projectKey,
297
+ project,
298
+ agent,
299
+ scope: scopeConfig.scope,
300
+ team: scopeConfig.team,
301
+ install: scopeConfig.install,
302
+ tmpDir,
303
+ gateBin,
304
+ cursorModel: process.env.HIVEMIND_CURSOR_MODEL,
305
+ hermesProvider: process.env.HIVEMIND_HERMES_PROVIDER,
306
+ hermesModel: process.env.HIVEMIND_HERMES_MODEL,
307
+ piProvider: process.env.HIVEMIND_PI_PROVIDER,
308
+ piModel: process.env.HIVEMIND_PI_MODEL,
309
+ skilifyLog: SKILIFY_LOG,
310
+ currentSessionId
311
+ }), { mode: 384 });
312
+ try {
313
+ chmodSync(configFile, 384);
314
+ } catch {
315
+ }
316
+ skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`);
317
+ const workerPath = join7(bundleDir, "skilify-worker.js");
318
+ spawn2("nohup", ["node", workerPath, configFile], {
319
+ detached: true,
320
+ stdio: ["ignore", "ignore", "ignore"]
321
+ }).unref();
322
+ skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`);
323
+ }
324
+
325
+ // dist/src/skilify/state.js
326
+ import { readFileSync as readFileSync3, writeFileSync as writeFileSync4, writeSync as writeSync2, mkdirSync as mkdirSync5, renameSync as renameSync2, existsSync as existsSync4, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs";
327
+ import { execSync as execSync2 } from "node:child_process";
328
+ import { homedir as homedir7 } from "node:os";
329
+ import { createHash } from "node:crypto";
330
+ import { join as join8, basename } from "node:path";
331
+ var dlog2 = (msg) => log("skilify-state", msg);
332
+ var STATE_DIR2 = join8(homedir7(), ".deeplake", "state", "skilify");
333
+ var YIELD_BUF2 = new Int32Array(new SharedArrayBuffer(4));
334
+ var TRIGGER_THRESHOLD = (() => {
335
+ const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? "");
336
+ return Number.isInteger(n) && n > 0 ? n : 20;
337
+ })();
338
+ function statePath(projectKey) {
339
+ return join8(STATE_DIR2, `${projectKey}.json`);
340
+ }
341
+ function lockPath2(projectKey) {
342
+ return join8(STATE_DIR2, `${projectKey}.lock`);
343
+ }
344
+ function deriveProjectKey(cwd) {
345
+ const project = basename(cwd) || "unknown";
346
+ let signature = null;
347
+ try {
348
+ signature = execSync2("git config --get remote.origin.url", {
349
+ cwd,
350
+ encoding: "utf-8",
351
+ stdio: ["ignore", "pipe", "ignore"]
352
+ }).trim() || null;
353
+ } catch {
354
+ }
355
+ const input = signature ?? cwd;
356
+ const key = createHash("sha1").update(input).digest("hex").slice(0, 16);
357
+ return { key, project };
358
+ }
359
+ function readState(projectKey) {
360
+ const p = statePath(projectKey);
361
+ if (!existsSync4(p))
362
+ return null;
363
+ try {
364
+ return JSON.parse(readFileSync3(p, "utf-8"));
365
+ } catch {
366
+ return null;
367
+ }
368
+ }
369
+ function writeState(projectKey, state) {
370
+ mkdirSync5(STATE_DIR2, { recursive: true });
371
+ const p = statePath(projectKey);
372
+ const tmp = `${p}.${process.pid}.${Date.now()}.tmp`;
373
+ writeFileSync4(tmp, JSON.stringify(state, null, 2));
374
+ renameSync2(tmp, p);
375
+ }
376
+ function withRmwLock(projectKey, fn) {
377
+ mkdirSync5(STATE_DIR2, { recursive: true });
378
+ const rmw = lockPath2(projectKey) + ".rmw";
379
+ const deadline = Date.now() + 2e3;
380
+ let fd = null;
381
+ while (fd === null) {
382
+ try {
383
+ fd = openSync2(rmw, "wx");
384
+ } catch (e) {
385
+ if (e.code !== "EEXIST")
386
+ throw e;
387
+ if (Date.now() > deadline) {
388
+ dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`);
389
+ try {
390
+ unlinkSync2(rmw);
391
+ } catch (unlinkErr) {
392
+ dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`);
393
+ }
394
+ continue;
395
+ }
396
+ Atomics.wait(YIELD_BUF2, 0, 0, 10);
397
+ }
398
+ }
399
+ try {
400
+ return fn();
401
+ } finally {
402
+ closeSync2(fd);
403
+ try {
404
+ unlinkSync2(rmw);
405
+ } catch (unlinkErr) {
406
+ dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`);
407
+ }
408
+ }
409
+ }
410
+ function resetCounter(projectKey) {
411
+ withRmwLock(projectKey, () => {
412
+ const s = readState(projectKey);
413
+ if (!s)
414
+ return;
415
+ writeState(projectKey, { ...s, counter: 0, updatedAt: Date.now() });
416
+ });
417
+ }
418
+ function tryAcquireWorkerLock(projectKey, maxAgeMs = 10 * 60 * 1e3) {
419
+ mkdirSync5(STATE_DIR2, { recursive: true });
420
+ const p = lockPath2(projectKey);
421
+ if (existsSync4(p)) {
422
+ try {
423
+ const ageMs = Date.now() - parseInt(readFileSync3(p, "utf-8"), 10);
424
+ if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
425
+ return false;
426
+ } catch (readErr) {
427
+ dlog2(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`);
428
+ }
429
+ try {
430
+ unlinkSync2(p);
431
+ } catch (unlinkErr) {
432
+ dlog2(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`);
433
+ return false;
434
+ }
435
+ }
436
+ try {
437
+ const fd = openSync2(p, "wx");
438
+ try {
439
+ writeSync2(fd, String(Date.now()));
440
+ } finally {
441
+ closeSync2(fd);
442
+ }
443
+ return true;
444
+ } catch {
445
+ return false;
446
+ }
447
+ }
448
+ function releaseWorkerLock(projectKey) {
449
+ const p = lockPath2(projectKey);
450
+ try {
451
+ unlinkSync2(p);
452
+ } catch {
453
+ }
454
+ }
455
+
456
+ // dist/src/skilify/scope-config.js
457
+ import { existsSync as existsSync5, mkdirSync as mkdirSync6, readFileSync as readFileSync4, writeFileSync as writeFileSync5 } from "node:fs";
458
+ import { homedir as homedir8 } from "node:os";
459
+ import { join as join9 } from "node:path";
460
+ var STATE_DIR3 = join9(homedir8(), ".deeplake", "state", "skilify");
461
+ var CONFIG_PATH = join9(STATE_DIR3, "config.json");
462
+ var DEFAULT = { scope: "me", team: [], install: "project" };
463
+ function loadScopeConfig() {
464
+ if (!existsSync5(CONFIG_PATH))
465
+ return DEFAULT;
466
+ try {
467
+ const raw = JSON.parse(readFileSync4(CONFIG_PATH, "utf-8"));
468
+ const scope = raw.scope === "team" || raw.scope === "org" ? raw.scope : "me";
469
+ const team = Array.isArray(raw.team) ? raw.team.filter((s) => typeof s === "string") : [];
470
+ const install = raw.install === "global" ? "global" : "project";
471
+ return { scope, team, install };
472
+ } catch {
473
+ return DEFAULT;
474
+ }
475
+ }
476
+
477
+ // dist/src/skilify/triggers.js
478
+ function forceSessionEndTrigger(opts) {
479
+ if (process.env.HIVEMIND_SKILIFY_WORKER === "1")
480
+ return;
481
+ if (!opts.cwd)
482
+ return;
483
+ try {
484
+ const { key: projectKey, project } = deriveProjectKey(opts.cwd);
485
+ if (!tryAcquireWorkerLock(projectKey)) {
486
+ skilifyLog(`SessionEnd: skilify worker already running for ${projectKey}, skipping`);
487
+ return;
488
+ }
489
+ if (readState(projectKey)) {
490
+ resetCounter(projectKey);
491
+ }
492
+ skilifyLog(`SessionEnd: spawning skilify worker for project=${project} agent=${opts.agent}`);
493
+ try {
494
+ spawnSkilifyWorker({
495
+ config: opts.config,
496
+ cwd: opts.cwd,
497
+ projectKey,
498
+ project,
499
+ bundleDir: opts.bundleDir,
500
+ agent: opts.agent,
501
+ scopeConfig: loadScopeConfig(),
502
+ currentSessionId: opts.sessionId,
503
+ reason: "SessionEnd"
504
+ });
505
+ } catch (e) {
506
+ skilifyLog(`SessionEnd spawn failed: ${e?.message ?? e}`);
507
+ try {
508
+ releaseWorkerLock(projectKey);
509
+ } catch {
510
+ }
511
+ }
512
+ } catch (e) {
513
+ skilifyLog(`SessionEnd trigger error: ${e?.message ?? e}`);
514
+ }
515
+ }
516
+
31
517
  // dist/src/hooks/hermes/session-end.js
32
518
  var log2 = (msg) => log("hermes-session-end", msg);
33
519
  async function main() {
34
520
  if (process.env.HIVEMIND_WIKI_WORKER === "1")
35
521
  return;
36
522
  const input = await readStdin();
37
- log2(`session=${input.session_id ?? "?"} cwd=${input.cwd ?? "?"}`);
523
+ const sessionId = input.session_id ?? "";
524
+ log2(`session=${sessionId || "?"} cwd=${input.cwd ?? "?"}`);
525
+ if (!sessionId)
526
+ return;
527
+ if (!tryAcquireLock(sessionId)) {
528
+ wikiLog(`SessionEnd: periodic worker already running for ${sessionId}, skipping final`);
529
+ return;
530
+ }
531
+ const config = loadConfig();
532
+ if (!config) {
533
+ wikiLog(`SessionEnd: no config, skipping summary`);
534
+ return;
535
+ }
536
+ const cwd = input.cwd ?? process.cwd();
537
+ try {
538
+ spawnHermesWikiWorker({
539
+ config,
540
+ sessionId,
541
+ cwd,
542
+ bundleDir: bundleDirFromImportMeta(import.meta.url),
543
+ reason: "SessionEnd"
544
+ });
545
+ } catch (e) {
546
+ wikiLog(`SessionEnd: wiki spawn failed: ${e?.message ?? e}`);
547
+ }
548
+ try {
549
+ forceSessionEndTrigger({
550
+ config,
551
+ cwd,
552
+ bundleDir: bundleDirFromImportMeta(import.meta.url),
553
+ agent: "hermes",
554
+ sessionId
555
+ });
556
+ } catch (e) {
557
+ wikiLog(`SessionEnd: skilify trigger failed: ${e?.message ?? e}`);
558
+ }
38
559
  }
39
560
  main().catch((e) => {
40
561
  log2(`fatal: ${e.message}`);