@deeplake/hivemind 0.7.4 → 0.7.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +2 -2
- package/.claude-plugin/plugin.json +1 -1
- package/README.md +97 -0
- package/bundle/cli.js +820 -20
- package/codex/bundle/capture.js +40 -10
- package/codex/bundle/commands/auth-login.js +84 -18
- package/codex/bundle/pre-tool-use.js +41 -11
- package/codex/bundle/session-start-setup.js +40 -10
- package/codex/bundle/session-start.js +27 -3
- package/codex/bundle/shell/deeplake-shell.js +41 -11
- package/codex/bundle/skilify-worker.js +907 -0
- package/codex/bundle/stop.js +373 -51
- package/cursor/bundle/capture.js +354 -13
- package/cursor/bundle/commands/auth-login.js +84 -18
- package/cursor/bundle/pre-tool-use.js +40 -10
- package/cursor/bundle/session-end.js +303 -6
- package/cursor/bundle/session-start.js +68 -14
- package/cursor/bundle/shell/deeplake-shell.js +41 -11
- package/cursor/bundle/skilify-worker.js +907 -0
- package/hermes/bundle/capture.js +354 -13
- package/hermes/bundle/commands/auth-login.js +84 -18
- package/hermes/bundle/pre-tool-use.js +40 -10
- package/hermes/bundle/session-end.js +305 -7
- package/hermes/bundle/session-start.js +68 -14
- package/hermes/bundle/shell/deeplake-shell.js +41 -11
- package/hermes/bundle/skilify-worker.js +907 -0
- package/mcp/bundle/server.js +41 -11
- package/openclaw/dist/chunks/{config-G23NI5TV.js → config-ZLH6JFJS.js} +1 -0
- package/openclaw/dist/index.js +185 -16
- package/openclaw/dist/skilify-worker.js +907 -0
- package/openclaw/openclaw.plugin.json +1 -1
- package/openclaw/package.json +2 -2
- package/openclaw/skills/SKILL.md +19 -0
- package/package.json +6 -1
- package/pi/extension-source/hivemind.ts +130 -1
package/openclaw/package.json
CHANGED
package/openclaw/skills/SKILL.md
CHANGED
|
@@ -45,6 +45,25 @@ Do NOT jump straight to reading raw JSONL files. Always start with `hivemind_ind
|
|
|
45
45
|
- `/hivemind_update` — shows how to install (ask the agent, or run `openclaw plugins update hivemind` in your terminal)
|
|
46
46
|
- `/hivemind_autoupdate [on|off]` — toggle the agent-facing update nudge (on by default: when a newer version is available, the agent is prompted to install it via `exec` if you ask to update)
|
|
47
47
|
|
|
48
|
+
## Skill Management (skilify)
|
|
49
|
+
|
|
50
|
+
Hivemind also mines reusable Claude skills from agent sessions and stores them in a per-org Deeplake table. Openclaw itself doesn't run sessions to mine, but you can pull skills others have already mined for the user. These run in the user's terminal (the openclaw plugin does not register them as `/hivemind_*` commands):
|
|
51
|
+
|
|
52
|
+
- `hivemind skilify` — show scope/team/install + per-project state
|
|
53
|
+
- `hivemind skilify pull` — sync skills for the current project from the org table
|
|
54
|
+
- `hivemind skilify pull --user <email>` — only that author's skills
|
|
55
|
+
- `hivemind skilify pull --users a,b,c` — multiple authors (CSV)
|
|
56
|
+
- `hivemind skilify pull --all-users` — explicit "no author filter"
|
|
57
|
+
- `hivemind skilify pull --to project|global` — install location (`<cwd>/.claude/skills/` vs `~/.claude/skills/`)
|
|
58
|
+
- `hivemind skilify pull --dry-run` — preview without touching disk
|
|
59
|
+
- `hivemind skilify pull --force` — overwrite local (creates `.bak`)
|
|
60
|
+
- `hivemind skilify pull <skill-name>` — pull only that one skill (combines with `--user`)
|
|
61
|
+
- `hivemind skilify scope <me|team|org>` — set sharing scope for new skills
|
|
62
|
+
- `hivemind skilify install <project|global>` — default install location
|
|
63
|
+
- `hivemind skilify team add|remove|list <name>` — manage team list
|
|
64
|
+
|
|
65
|
+
If the user asks to "pull skills from X", "share skills with the team", or similar, suggest the matching `hivemind skilify` command. Run `hivemind skilify --help` for the full reference.
|
|
66
|
+
|
|
48
67
|
## Limits
|
|
49
68
|
|
|
50
69
|
Do NOT delegate to subagents when reading Hivemind memory. If a tool call returns empty after 2 attempts, skip it and move on. Report what you found rather than exhaustively retrying.
|
package/package.json
CHANGED
|
@@ -1,8 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@deeplake/hivemind",
|
|
3
|
-
"version": "0.7.
|
|
3
|
+
"version": "0.7.11",
|
|
4
4
|
"description": "Cloud-backed persistent shared memory for AI agents powered by Deeplake",
|
|
5
5
|
"type": "module",
|
|
6
|
+
"repository": {
|
|
7
|
+
"type": "git",
|
|
8
|
+
"url": "git+https://github.com/activeloopai/hivemind.git"
|
|
9
|
+
},
|
|
6
10
|
"publishConfig": {
|
|
7
11
|
"access": "public"
|
|
8
12
|
},
|
|
@@ -35,6 +39,7 @@
|
|
|
35
39
|
"typecheck": "tsc --noEmit",
|
|
36
40
|
"dup": "jscpd src",
|
|
37
41
|
"audit:openclaw": "node scripts/audit-openclaw-bundle.mjs",
|
|
42
|
+
"pack:check": "node scripts/pack-check.mjs",
|
|
38
43
|
"ci": "npm run typecheck && npm run dup && npm test",
|
|
39
44
|
"prepare": "husky",
|
|
40
45
|
"prepublishOnly": "npm run build"
|
|
@@ -32,6 +32,7 @@ import { homedir, tmpdir } from "node:os";
|
|
|
32
32
|
import { join, dirname } from "node:path";
|
|
33
33
|
import { connect } from "node:net";
|
|
34
34
|
import { spawn, execSync } from "node:child_process";
|
|
35
|
+
import { createHash } from "node:crypto";
|
|
35
36
|
|
|
36
37
|
// ---------- diagnostic logging --------------------------------------------------
|
|
37
38
|
//
|
|
@@ -198,6 +199,11 @@ function tryEmbedOverSocket(text: string, kind: "document" | "query"): Promise<n
|
|
|
198
199
|
|
|
199
200
|
const SUMMARY_STATE_DIR = join(homedir(), ".claude", "hooks", "summary-state");
|
|
200
201
|
const PI_WIKI_WORKER_PATH = join(homedir(), ".pi", "agent", "hivemind", "wiki-worker.js");
|
|
202
|
+
// Skilify worker installed alongside wiki-worker by `hivemind pi install`.
|
|
203
|
+
// Spawned on session_shutdown to mine reusable Claude skills from the just-
|
|
204
|
+
// finished session. Same shared bundle used by CC/Codex/Cursor/Hermes.
|
|
205
|
+
const PI_SKILIFY_WORKER_PATH = join(homedir(), ".pi", "agent", "hivemind", "skilify-worker.js");
|
|
206
|
+
const SKILIFY_STATE_DIR = join(homedir(), ".deeplake", "state", "skilify");
|
|
201
207
|
|
|
202
208
|
interface SummaryState {
|
|
203
209
|
lastSummaryAt: number;
|
|
@@ -392,6 +398,96 @@ function spawnWikiWorker(
|
|
|
392
398
|
}
|
|
393
399
|
}
|
|
394
400
|
|
|
401
|
+
// ---------- skilify worker spawn ---------------------------------------------
|
|
402
|
+
//
|
|
403
|
+
// Mirror of src/skilify/spawn-skilify-worker.ts and src/skilify/triggers.ts —
|
|
404
|
+
// inlined here because pi/extension-source/hivemind.ts is shipped as raw .ts
|
|
405
|
+
// with zero non-builtin runtime dependencies (pi compiles + loads it at
|
|
406
|
+
// extension-load time). The shared TypeScript modules under src/skilify/
|
|
407
|
+
// can't be imported from this file.
|
|
408
|
+
//
|
|
409
|
+
// The skilify worker mines the just-finished session for reusable Claude
|
|
410
|
+
// skills, gates each cluster via a model call, and writes SKILL.md files +
|
|
411
|
+
// rows in the org's skills Deeplake table.
|
|
412
|
+
|
|
413
|
+
/** Stable project key — sha1(cwd) truncated, mirrors src/skilify/state.ts deriveProjectKey. */
|
|
414
|
+
function deriveSkilifyProjectKey(cwd: string): { key: string; project: string } {
|
|
415
|
+
const project = (cwd ?? "").split("/").pop() || "unknown";
|
|
416
|
+
// Pi's extension can't easily run `git config` synchronously here; use cwd
|
|
417
|
+
// as the signature. Two checkouts of the same repo at different paths get
|
|
418
|
+
// different project_keys, which is acceptable for pi (the other agents
|
|
419
|
+
// hash the git remote when available; pi falls back to cwd-only).
|
|
420
|
+
const key = createHash("sha1").update(cwd ?? "").digest("hex").slice(0, 16);
|
|
421
|
+
return { key, project };
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
function spawnPiSkilifyWorker(creds: Creds, sessionId: string, cwd: string): void {
|
|
425
|
+
if (!existsSync(PI_SKILIFY_WORKER_PATH)) {
|
|
426
|
+
logHm(`spawnPiSkilifyWorker: no worker at ${PI_SKILIFY_WORKER_PATH} — install via 'hivemind pi install' or rebuild`);
|
|
427
|
+
return;
|
|
428
|
+
}
|
|
429
|
+
const { key: projectKey, project } = deriveSkilifyProjectKey(cwd);
|
|
430
|
+
|
|
431
|
+
// No spawn-side lock: the worker itself acquires `<projectKey>.lock` via
|
|
432
|
+
// src/skilify/state.ts:tryAcquireWorkerLock and releases it on exit (with
|
|
433
|
+
// a 10-min stale-lock fallback). A spawn-side lock here would create a
|
|
434
|
+
// SECOND lockfile (`<projectKey>.worker.lock`) that nobody releases,
|
|
435
|
+
// permanently blocking subsequent spawns from the same Pi runtime
|
|
436
|
+
// instance. Let the worker's own lock be the single source of truth;
|
|
437
|
+
// back-to-back spawns where a worker is in flight cost only one extra
|
|
438
|
+
// node cold-start (~50ms) before the worker self-skips on the lock.
|
|
439
|
+
|
|
440
|
+
const tmpDir = join(tmpdir(), `deeplake-skilify-${projectKey}-${Date.now()}`);
|
|
441
|
+
try { mkdirSync(tmpDir, { recursive: true, mode: 0o700 }); }
|
|
442
|
+
catch (e: any) { logHm(`spawnPiSkilifyWorker: mkdir failed: ${e?.message ?? e}`); return; }
|
|
443
|
+
const configPath = join(tmpDir, "config.json");
|
|
444
|
+
|
|
445
|
+
// Same shape the spawn-skilify-worker.ts module writes for the other agents.
|
|
446
|
+
// Defaults match scope-config.ts: scope=me, install=project, no team list.
|
|
447
|
+
// Pi-specific: no per-agent gate binary (`gateBin: null`) — the worker's
|
|
448
|
+
// gate-runner falls back to its agent dispatch which for `agent: "pi"`
|
|
449
|
+
// resolves to the `pi --print` invocation we'd want for consistency.
|
|
450
|
+
const config = {
|
|
451
|
+
apiUrl: creds.apiUrl,
|
|
452
|
+
token: creds.token,
|
|
453
|
+
orgId: creds.orgId,
|
|
454
|
+
workspaceId: creds.workspaceId,
|
|
455
|
+
sessionsTable: SESSIONS_TABLE,
|
|
456
|
+
skillsTable: process.env.HIVEMIND_SKILLS_TABLE || "skills",
|
|
457
|
+
userName: creds.userName,
|
|
458
|
+
cwd,
|
|
459
|
+
projectKey,
|
|
460
|
+
project,
|
|
461
|
+
agent: "pi",
|
|
462
|
+
scope: "me" as const,
|
|
463
|
+
team: [] as string[],
|
|
464
|
+
install: "project" as const,
|
|
465
|
+
tmpDir,
|
|
466
|
+
gateBin: findPiBin(),
|
|
467
|
+
cursorModel: process.env.HIVEMIND_CURSOR_MODEL,
|
|
468
|
+
hermesProvider: process.env.HIVEMIND_HERMES_PROVIDER,
|
|
469
|
+
hermesModel: process.env.HIVEMIND_HERMES_MODEL,
|
|
470
|
+
// pi-specific gate args — match wikiWorker config defaults (google + gemini-2.5-flash)
|
|
471
|
+
piProvider: process.env.HIVEMIND_PI_PROVIDER ?? "google",
|
|
472
|
+
piModel: process.env.HIVEMIND_PI_MODEL ?? "gemini-2.5-flash",
|
|
473
|
+
skilifyLog: join(homedir(), ".deeplake", "hivemind-pi-skilify.log"),
|
|
474
|
+
currentSessionId: sessionId,
|
|
475
|
+
};
|
|
476
|
+
try { writeFileSync(configPath, JSON.stringify(config), { mode: 0o600 }); }
|
|
477
|
+
catch (e: any) { logHm(`spawnPiSkilifyWorker: config write failed: ${e?.message ?? e}`); return; }
|
|
478
|
+
|
|
479
|
+
logHm(`spawnPiSkilifyWorker: spawning ${PI_SKILIFY_WORKER_PATH} project=${project} key=${projectKey} session=${sessionId}`);
|
|
480
|
+
try {
|
|
481
|
+
spawn(process.execPath, [PI_SKILIFY_WORKER_PATH, configPath], {
|
|
482
|
+
detached: true,
|
|
483
|
+
stdio: "ignore",
|
|
484
|
+
env: { ...process.env, HIVEMIND_SKILIFY_WORKER: "1", HIVEMIND_CAPTURE: "false" },
|
|
485
|
+
}).unref();
|
|
486
|
+
} catch (e: any) {
|
|
487
|
+
logHm(`spawnPiSkilifyWorker: spawn failed: ${e?.message ?? e}`);
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
|
|
395
491
|
function maybeTriggerPeriodicSummary(creds: Creds, sessionId: string, cwd: string): void {
|
|
396
492
|
if (process.env.HIVEMIND_CAPTURE === "false") return;
|
|
397
493
|
const state = bumpCounter(sessionId);
|
|
@@ -541,7 +637,32 @@ Three hivemind tools are registered:
|
|
|
541
637
|
hivemind_read { path } read full content at a memory path
|
|
542
638
|
hivemind_index { prefix?, limit? } list summary entries
|
|
543
639
|
|
|
544
|
-
Prefer these tools — one call returns ranked hits across all summaries and sessions in a single SQL query. Different paths under /summaries/<username>/ are different users; do NOT merge or alias them. Fall back to grep on ~/.deeplake/memory/ only if tools are unavailable
|
|
640
|
+
Prefer these tools — one call returns ranked hits across all summaries and sessions in a single SQL query. Different paths under /summaries/<username>/ are different users; do NOT merge or alias them. Fall back to grep on ~/.deeplake/memory/ only if tools are unavailable.
|
|
641
|
+
|
|
642
|
+
Organization management — each argument is SEPARATE (do NOT quote subcommands together):
|
|
643
|
+
- hivemind login — SSO login
|
|
644
|
+
- hivemind whoami — show current user/org
|
|
645
|
+
- hivemind org list — list organizations
|
|
646
|
+
- hivemind org switch <name-or-id> — switch organization
|
|
647
|
+
- hivemind workspaces — list workspaces
|
|
648
|
+
- hivemind workspace <id> — switch workspace
|
|
649
|
+
- hivemind invite <email> <ADMIN|WRITE|READ> — invite member (ALWAYS ask user which role before inviting)
|
|
650
|
+
- hivemind members — list members
|
|
651
|
+
- hivemind remove <user-id> — remove member
|
|
652
|
+
|
|
653
|
+
SKILLS (skilify) — mine + share reusable skills across the org. Run these in a terminal (or via shell if available):
|
|
654
|
+
- hivemind skilify — show scope/team/install + per-project state
|
|
655
|
+
- hivemind skilify pull — sync project skills from the org table
|
|
656
|
+
- hivemind skilify pull --user <email> — only that author's skills
|
|
657
|
+
- hivemind skilify pull --users a,b,c — multiple authors (CSV)
|
|
658
|
+
- hivemind skilify pull --all-users — explicit "no author filter"
|
|
659
|
+
- hivemind skilify pull --to project|global — install location
|
|
660
|
+
- hivemind skilify pull --dry-run — preview only
|
|
661
|
+
- hivemind skilify pull --force — overwrite local (creates .bak)
|
|
662
|
+
- hivemind skilify pull <skill-name> — pull only that skill (combines with --user)
|
|
663
|
+
- hivemind skilify scope <me|team|org> — sharing scope for new skills
|
|
664
|
+
- hivemind skilify install <project|global> — default install location
|
|
665
|
+
- hivemind skilify team add|remove|list <name> — manage team list`;
|
|
545
666
|
|
|
546
667
|
export default function hivemindExtension(pi: ExtensionAPI): void {
|
|
547
668
|
const captureEnabled = process.env.HIVEMIND_CAPTURE !== "false";
|
|
@@ -800,6 +921,14 @@ export default function hivemindExtension(pi: ExtensionAPI): void {
|
|
|
800
921
|
// Always spawn for "final" — but the lock check inside spawnWikiWorker
|
|
801
922
|
// skips if a periodic worker is mid-flight. Non-fatal either way.
|
|
802
923
|
spawnWikiWorker(creds, sessionId, cwd, "final");
|
|
924
|
+
|
|
925
|
+
// Also kick off the skilify worker so this session's prompt+answer
|
|
926
|
+
// pairs become candidates for reusable skills. Lock keyed on
|
|
927
|
+
// projectKey, not sessionId — multiple sessions in the same project
|
|
928
|
+
// shouldn't race the gate. Non-fatal: failure here only loses the
|
|
929
|
+
// mining for this one session, never breaks the wiki summary above.
|
|
930
|
+
try { spawnPiSkilifyWorker(creds, sessionId, cwd); }
|
|
931
|
+
catch (e: any) { logHm(`session_shutdown: skilify spawn threw: ${e?.message ?? e}`); }
|
|
803
932
|
});
|
|
804
933
|
|
|
805
934
|
// Module-load breadcrumb so we know the extension's default export ran at all.
|