@deeplake/hivemind 0.7.4 → 0.7.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +2 -2
- package/.claude-plugin/plugin.json +1 -1
- package/README.md +97 -0
- package/bundle/cli.js +820 -20
- package/codex/bundle/capture.js +40 -10
- package/codex/bundle/commands/auth-login.js +84 -18
- package/codex/bundle/pre-tool-use.js +41 -11
- package/codex/bundle/session-start-setup.js +40 -10
- package/codex/bundle/session-start.js +27 -3
- package/codex/bundle/shell/deeplake-shell.js +41 -11
- package/codex/bundle/skilify-worker.js +907 -0
- package/codex/bundle/stop.js +373 -51
- package/cursor/bundle/capture.js +354 -13
- package/cursor/bundle/commands/auth-login.js +84 -18
- package/cursor/bundle/pre-tool-use.js +40 -10
- package/cursor/bundle/session-end.js +303 -6
- package/cursor/bundle/session-start.js +68 -14
- package/cursor/bundle/shell/deeplake-shell.js +41 -11
- package/cursor/bundle/skilify-worker.js +907 -0
- package/hermes/bundle/capture.js +354 -13
- package/hermes/bundle/commands/auth-login.js +84 -18
- package/hermes/bundle/pre-tool-use.js +40 -10
- package/hermes/bundle/session-end.js +305 -7
- package/hermes/bundle/session-start.js +68 -14
- package/hermes/bundle/shell/deeplake-shell.js +41 -11
- package/hermes/bundle/skilify-worker.js +907 -0
- package/mcp/bundle/server.js +41 -11
- package/openclaw/dist/chunks/{config-G23NI5TV.js → config-ZLH6JFJS.js} +1 -0
- package/openclaw/dist/index.js +185 -16
- package/openclaw/dist/skilify-worker.js +907 -0
- package/openclaw/openclaw.plugin.json +1 -1
- package/openclaw/package.json +1 -1
- package/openclaw/skills/SKILL.md +19 -0
- package/package.json +6 -1
- package/pi/extension-source/hivemind.ts +130 -1
|
@@ -61,6 +61,7 @@ function loadConfig() {
|
|
|
61
61
|
apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
|
|
62
62
|
tableName: process.env.HIVEMIND_TABLE ?? "memory",
|
|
63
63
|
sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
|
|
64
|
+
skillsTableName: process.env.HIVEMIND_SKILLS_TABLE ?? "skills",
|
|
64
65
|
memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join2(home, ".deeplake", "memory")
|
|
65
66
|
};
|
|
66
67
|
}
|
|
@@ -229,6 +230,291 @@ function bundleDirFromImportMeta(importMetaUrl) {
|
|
|
229
230
|
return dirname(fileURLToPath(importMetaUrl));
|
|
230
231
|
}
|
|
231
232
|
|
|
233
|
+
// dist/src/skilify/spawn-skilify-worker.js
|
|
234
|
+
import { spawn as spawn2 } from "node:child_process";
|
|
235
|
+
import { fileURLToPath as fileURLToPath2 } from "node:url";
|
|
236
|
+
import { dirname as dirname2, join as join7 } from "node:path";
|
|
237
|
+
import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync4, appendFileSync as appendFileSync3, chmodSync } from "node:fs";
|
|
238
|
+
import { homedir as homedir6, tmpdir as tmpdir2 } from "node:os";
|
|
239
|
+
|
|
240
|
+
// dist/src/skilify/gate-runner.js
|
|
241
|
+
import { execFileSync } from "node:child_process";
|
|
242
|
+
import { existsSync as existsSync3 } from "node:fs";
|
|
243
|
+
import { homedir as homedir5 } from "node:os";
|
|
244
|
+
import { join as join6 } from "node:path";
|
|
245
|
+
function findAgentBin(agent) {
|
|
246
|
+
const which = (name) => {
|
|
247
|
+
try {
|
|
248
|
+
const out = execFileSync("which", [name], {
|
|
249
|
+
encoding: "utf-8",
|
|
250
|
+
stdio: ["ignore", "pipe", "ignore"]
|
|
251
|
+
});
|
|
252
|
+
return out.trim() || null;
|
|
253
|
+
} catch {
|
|
254
|
+
return null;
|
|
255
|
+
}
|
|
256
|
+
};
|
|
257
|
+
switch (agent) {
|
|
258
|
+
case "claude_code":
|
|
259
|
+
return which("claude") ?? join6(homedir5(), ".claude", "local", "claude");
|
|
260
|
+
case "codex":
|
|
261
|
+
return which("codex") ?? "/usr/local/bin/codex";
|
|
262
|
+
case "cursor":
|
|
263
|
+
return which("cursor-agent") ?? "/usr/local/bin/cursor-agent";
|
|
264
|
+
case "hermes":
|
|
265
|
+
return which("hermes") ?? join6(homedir5(), ".local", "bin", "hermes");
|
|
266
|
+
case "pi":
|
|
267
|
+
return which("pi") ?? join6(homedir5(), ".local", "bin", "pi");
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
// dist/src/skilify/spawn-skilify-worker.js
|
|
272
|
+
var HOME2 = homedir6();
|
|
273
|
+
var SKILIFY_LOG = join7(HOME2, ".claude", "hooks", "skilify.log");
|
|
274
|
+
function skilifyLog(msg) {
|
|
275
|
+
try {
|
|
276
|
+
mkdirSync4(dirname2(SKILIFY_LOG), { recursive: true });
|
|
277
|
+
appendFileSync3(SKILIFY_LOG, `[${utcTimestamp()}] ${msg}
|
|
278
|
+
`);
|
|
279
|
+
} catch {
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
function spawnSkilifyWorker(opts) {
|
|
283
|
+
const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts;
|
|
284
|
+
const tmpDir = join7(tmpdir2(), `deeplake-skilify-${projectKey}-${Date.now()}`);
|
|
285
|
+
mkdirSync4(tmpDir, { recursive: true, mode: 448 });
|
|
286
|
+
const gateBin = findAgentBin(agent);
|
|
287
|
+
const configFile = join7(tmpDir, "config.json");
|
|
288
|
+
writeFileSync3(configFile, JSON.stringify({
|
|
289
|
+
apiUrl: config.apiUrl,
|
|
290
|
+
token: config.token,
|
|
291
|
+
orgId: config.orgId,
|
|
292
|
+
workspaceId: config.workspaceId,
|
|
293
|
+
sessionsTable: config.sessionsTableName,
|
|
294
|
+
skillsTable: config.skillsTableName,
|
|
295
|
+
userName: config.userName,
|
|
296
|
+
cwd,
|
|
297
|
+
projectKey,
|
|
298
|
+
project,
|
|
299
|
+
agent,
|
|
300
|
+
scope: scopeConfig.scope,
|
|
301
|
+
team: scopeConfig.team,
|
|
302
|
+
install: scopeConfig.install,
|
|
303
|
+
tmpDir,
|
|
304
|
+
gateBin,
|
|
305
|
+
cursorModel: process.env.HIVEMIND_CURSOR_MODEL,
|
|
306
|
+
hermesProvider: process.env.HIVEMIND_HERMES_PROVIDER,
|
|
307
|
+
hermesModel: process.env.HIVEMIND_HERMES_MODEL,
|
|
308
|
+
piProvider: process.env.HIVEMIND_PI_PROVIDER,
|
|
309
|
+
piModel: process.env.HIVEMIND_PI_MODEL,
|
|
310
|
+
skilifyLog: SKILIFY_LOG,
|
|
311
|
+
currentSessionId
|
|
312
|
+
}), { mode: 384 });
|
|
313
|
+
try {
|
|
314
|
+
chmodSync(configFile, 384);
|
|
315
|
+
} catch {
|
|
316
|
+
}
|
|
317
|
+
skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`);
|
|
318
|
+
const workerPath = join7(bundleDir, "skilify-worker.js");
|
|
319
|
+
spawn2("nohup", ["node", workerPath, configFile], {
|
|
320
|
+
detached: true,
|
|
321
|
+
stdio: ["ignore", "ignore", "ignore"]
|
|
322
|
+
}).unref();
|
|
323
|
+
skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`);
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
// dist/src/skilify/state.js
|
|
327
|
+
import { readFileSync as readFileSync3, writeFileSync as writeFileSync4, writeSync as writeSync2, mkdirSync as mkdirSync5, renameSync as renameSync2, existsSync as existsSync4, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs";
|
|
328
|
+
import { execSync as execSync2 } from "node:child_process";
|
|
329
|
+
import { homedir as homedir7 } from "node:os";
|
|
330
|
+
import { createHash } from "node:crypto";
|
|
331
|
+
import { join as join8, basename } from "node:path";
|
|
332
|
+
var dlog2 = (msg) => log("skilify-state", msg);
|
|
333
|
+
var STATE_DIR2 = join8(homedir7(), ".deeplake", "state", "skilify");
|
|
334
|
+
var YIELD_BUF2 = new Int32Array(new SharedArrayBuffer(4));
|
|
335
|
+
var TRIGGER_THRESHOLD = (() => {
|
|
336
|
+
const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? "");
|
|
337
|
+
return Number.isInteger(n) && n > 0 ? n : 20;
|
|
338
|
+
})();
|
|
339
|
+
function statePath(projectKey) {
|
|
340
|
+
return join8(STATE_DIR2, `${projectKey}.json`);
|
|
341
|
+
}
|
|
342
|
+
function lockPath2(projectKey) {
|
|
343
|
+
return join8(STATE_DIR2, `${projectKey}.lock`);
|
|
344
|
+
}
|
|
345
|
+
function deriveProjectKey(cwd) {
|
|
346
|
+
const project = basename(cwd) || "unknown";
|
|
347
|
+
let signature = null;
|
|
348
|
+
try {
|
|
349
|
+
signature = execSync2("git config --get remote.origin.url", {
|
|
350
|
+
cwd,
|
|
351
|
+
encoding: "utf-8",
|
|
352
|
+
stdio: ["ignore", "pipe", "ignore"]
|
|
353
|
+
}).trim() || null;
|
|
354
|
+
} catch {
|
|
355
|
+
}
|
|
356
|
+
const input = signature ?? cwd;
|
|
357
|
+
const key = createHash("sha1").update(input).digest("hex").slice(0, 16);
|
|
358
|
+
return { key, project };
|
|
359
|
+
}
|
|
360
|
+
function readState(projectKey) {
|
|
361
|
+
const p = statePath(projectKey);
|
|
362
|
+
if (!existsSync4(p))
|
|
363
|
+
return null;
|
|
364
|
+
try {
|
|
365
|
+
return JSON.parse(readFileSync3(p, "utf-8"));
|
|
366
|
+
} catch {
|
|
367
|
+
return null;
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
function writeState(projectKey, state) {
|
|
371
|
+
mkdirSync5(STATE_DIR2, { recursive: true });
|
|
372
|
+
const p = statePath(projectKey);
|
|
373
|
+
const tmp = `${p}.${process.pid}.${Date.now()}.tmp`;
|
|
374
|
+
writeFileSync4(tmp, JSON.stringify(state, null, 2));
|
|
375
|
+
renameSync2(tmp, p);
|
|
376
|
+
}
|
|
377
|
+
function withRmwLock(projectKey, fn) {
|
|
378
|
+
mkdirSync5(STATE_DIR2, { recursive: true });
|
|
379
|
+
const rmw = lockPath2(projectKey) + ".rmw";
|
|
380
|
+
const deadline = Date.now() + 2e3;
|
|
381
|
+
let fd = null;
|
|
382
|
+
while (fd === null) {
|
|
383
|
+
try {
|
|
384
|
+
fd = openSync2(rmw, "wx");
|
|
385
|
+
} catch (e) {
|
|
386
|
+
if (e.code !== "EEXIST")
|
|
387
|
+
throw e;
|
|
388
|
+
if (Date.now() > deadline) {
|
|
389
|
+
dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`);
|
|
390
|
+
try {
|
|
391
|
+
unlinkSync2(rmw);
|
|
392
|
+
} catch (unlinkErr) {
|
|
393
|
+
dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`);
|
|
394
|
+
}
|
|
395
|
+
continue;
|
|
396
|
+
}
|
|
397
|
+
Atomics.wait(YIELD_BUF2, 0, 0, 10);
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
try {
|
|
401
|
+
return fn();
|
|
402
|
+
} finally {
|
|
403
|
+
closeSync2(fd);
|
|
404
|
+
try {
|
|
405
|
+
unlinkSync2(rmw);
|
|
406
|
+
} catch (unlinkErr) {
|
|
407
|
+
dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`);
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
function resetCounter(projectKey) {
|
|
412
|
+
withRmwLock(projectKey, () => {
|
|
413
|
+
const s = readState(projectKey);
|
|
414
|
+
if (!s)
|
|
415
|
+
return;
|
|
416
|
+
writeState(projectKey, { ...s, counter: 0, updatedAt: Date.now() });
|
|
417
|
+
});
|
|
418
|
+
}
|
|
419
|
+
function tryAcquireWorkerLock(projectKey, maxAgeMs = 10 * 60 * 1e3) {
|
|
420
|
+
mkdirSync5(STATE_DIR2, { recursive: true });
|
|
421
|
+
const p = lockPath2(projectKey);
|
|
422
|
+
if (existsSync4(p)) {
|
|
423
|
+
try {
|
|
424
|
+
const ageMs = Date.now() - parseInt(readFileSync3(p, "utf-8"), 10);
|
|
425
|
+
if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
|
|
426
|
+
return false;
|
|
427
|
+
} catch (readErr) {
|
|
428
|
+
dlog2(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`);
|
|
429
|
+
}
|
|
430
|
+
try {
|
|
431
|
+
unlinkSync2(p);
|
|
432
|
+
} catch (unlinkErr) {
|
|
433
|
+
dlog2(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`);
|
|
434
|
+
return false;
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
try {
|
|
438
|
+
const fd = openSync2(p, "wx");
|
|
439
|
+
try {
|
|
440
|
+
writeSync2(fd, String(Date.now()));
|
|
441
|
+
} finally {
|
|
442
|
+
closeSync2(fd);
|
|
443
|
+
}
|
|
444
|
+
return true;
|
|
445
|
+
} catch {
|
|
446
|
+
return false;
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
function releaseWorkerLock(projectKey) {
|
|
450
|
+
const p = lockPath2(projectKey);
|
|
451
|
+
try {
|
|
452
|
+
unlinkSync2(p);
|
|
453
|
+
} catch {
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
// dist/src/skilify/scope-config.js
|
|
458
|
+
import { existsSync as existsSync5, mkdirSync as mkdirSync6, readFileSync as readFileSync4, writeFileSync as writeFileSync5 } from "node:fs";
|
|
459
|
+
import { homedir as homedir8 } from "node:os";
|
|
460
|
+
import { join as join9 } from "node:path";
|
|
461
|
+
var STATE_DIR3 = join9(homedir8(), ".deeplake", "state", "skilify");
|
|
462
|
+
var CONFIG_PATH = join9(STATE_DIR3, "config.json");
|
|
463
|
+
var DEFAULT = { scope: "me", team: [], install: "project" };
|
|
464
|
+
function loadScopeConfig() {
|
|
465
|
+
if (!existsSync5(CONFIG_PATH))
|
|
466
|
+
return DEFAULT;
|
|
467
|
+
try {
|
|
468
|
+
const raw = JSON.parse(readFileSync4(CONFIG_PATH, "utf-8"));
|
|
469
|
+
const scope = raw.scope === "team" || raw.scope === "org" ? raw.scope : "me";
|
|
470
|
+
const team = Array.isArray(raw.team) ? raw.team.filter((s) => typeof s === "string") : [];
|
|
471
|
+
const install = raw.install === "global" ? "global" : "project";
|
|
472
|
+
return { scope, team, install };
|
|
473
|
+
} catch {
|
|
474
|
+
return DEFAULT;
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
// dist/src/skilify/triggers.js
|
|
479
|
+
function forceSessionEndTrigger(opts) {
|
|
480
|
+
if (process.env.HIVEMIND_SKILIFY_WORKER === "1")
|
|
481
|
+
return;
|
|
482
|
+
if (!opts.cwd)
|
|
483
|
+
return;
|
|
484
|
+
try {
|
|
485
|
+
const { key: projectKey, project } = deriveProjectKey(opts.cwd);
|
|
486
|
+
if (!tryAcquireWorkerLock(projectKey)) {
|
|
487
|
+
skilifyLog(`SessionEnd: skilify worker already running for ${projectKey}, skipping`);
|
|
488
|
+
return;
|
|
489
|
+
}
|
|
490
|
+
if (readState(projectKey)) {
|
|
491
|
+
resetCounter(projectKey);
|
|
492
|
+
}
|
|
493
|
+
skilifyLog(`SessionEnd: spawning skilify worker for project=${project} agent=${opts.agent}`);
|
|
494
|
+
try {
|
|
495
|
+
spawnSkilifyWorker({
|
|
496
|
+
config: opts.config,
|
|
497
|
+
cwd: opts.cwd,
|
|
498
|
+
projectKey,
|
|
499
|
+
project,
|
|
500
|
+
bundleDir: opts.bundleDir,
|
|
501
|
+
agent: opts.agent,
|
|
502
|
+
scopeConfig: loadScopeConfig(),
|
|
503
|
+
currentSessionId: opts.sessionId,
|
|
504
|
+
reason: "SessionEnd"
|
|
505
|
+
});
|
|
506
|
+
} catch (e) {
|
|
507
|
+
skilifyLog(`SessionEnd spawn failed: ${e?.message ?? e}`);
|
|
508
|
+
try {
|
|
509
|
+
releaseWorkerLock(projectKey);
|
|
510
|
+
} catch {
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
} catch (e) {
|
|
514
|
+
skilifyLog(`SessionEnd trigger error: ${e?.message ?? e}`);
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
|
|
232
518
|
// dist/src/hooks/cursor/session-end.js
|
|
233
519
|
var log2 = (msg) => log("cursor-session-end", msg);
|
|
234
520
|
async function main() {
|
|
@@ -243,12 +529,12 @@ async function main() {
|
|
|
243
529
|
wikiLog(`SessionEnd: periodic worker already running for ${sessionId}, skipping final`);
|
|
244
530
|
return;
|
|
245
531
|
}
|
|
532
|
+
const config = loadConfig();
|
|
533
|
+
if (!config) {
|
|
534
|
+
wikiLog(`SessionEnd: no config, skipping summary`);
|
|
535
|
+
return;
|
|
536
|
+
}
|
|
246
537
|
try {
|
|
247
|
-
const config = loadConfig();
|
|
248
|
-
if (!config) {
|
|
249
|
-
wikiLog(`SessionEnd: no config, skipping summary`);
|
|
250
|
-
return;
|
|
251
|
-
}
|
|
252
538
|
spawnCursorWikiWorker({
|
|
253
539
|
config,
|
|
254
540
|
sessionId,
|
|
@@ -257,7 +543,18 @@ async function main() {
|
|
|
257
543
|
reason: "SessionEnd"
|
|
258
544
|
});
|
|
259
545
|
} catch (e) {
|
|
260
|
-
wikiLog(`SessionEnd: spawn failed: ${e?.message ?? e}`);
|
|
546
|
+
wikiLog(`SessionEnd: wiki spawn failed: ${e?.message ?? e}`);
|
|
547
|
+
}
|
|
548
|
+
try {
|
|
549
|
+
forceSessionEndTrigger({
|
|
550
|
+
config,
|
|
551
|
+
cwd: process.cwd(),
|
|
552
|
+
bundleDir: bundleDirFromImportMeta(import.meta.url),
|
|
553
|
+
agent: "cursor",
|
|
554
|
+
sessionId
|
|
555
|
+
});
|
|
556
|
+
} catch (e) {
|
|
557
|
+
wikiLog(`SessionEnd: skilify trigger failed: ${e?.message ?? e}`);
|
|
261
558
|
}
|
|
262
559
|
}
|
|
263
560
|
main().catch((e) => {
|
|
@@ -54,7 +54,7 @@ var init_index_marker_store = __esm({
|
|
|
54
54
|
|
|
55
55
|
// dist/src/hooks/cursor/session-start.js
|
|
56
56
|
import { fileURLToPath } from "node:url";
|
|
57
|
-
import { dirname as dirname2
|
|
57
|
+
import { dirname as dirname2 } from "node:path";
|
|
58
58
|
|
|
59
59
|
// dist/src/commands/auth.js
|
|
60
60
|
import { execSync } from "node:child_process";
|
|
@@ -114,6 +114,7 @@ function loadConfig() {
|
|
|
114
114
|
apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
|
|
115
115
|
tableName: process.env.HIVEMIND_TABLE ?? "memory",
|
|
116
116
|
sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
|
|
117
|
+
skillsTableName: process.env.HIVEMIND_SKILLS_TABLE ?? "skills",
|
|
117
118
|
memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join2(home, ".deeplake", "memory")
|
|
118
119
|
};
|
|
119
120
|
}
|
|
@@ -138,6 +139,12 @@ function log(tag, msg) {
|
|
|
138
139
|
function sqlStr(value) {
|
|
139
140
|
return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
|
|
140
141
|
}
|
|
142
|
+
function sqlIdent(name) {
|
|
143
|
+
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
|
|
144
|
+
throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
|
|
145
|
+
}
|
|
146
|
+
return name;
|
|
147
|
+
}
|
|
141
148
|
|
|
142
149
|
// dist/src/embeddings/columns.js
|
|
143
150
|
var SUMMARY_EMBEDDING_COL = "summary_embedding";
|
|
@@ -501,7 +508,7 @@ var DeeplakeApi = class {
|
|
|
501
508
|
}
|
|
502
509
|
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
|
|
503
510
|
async ensureTable(name) {
|
|
504
|
-
const tbl = name ?? this.tableName;
|
|
511
|
+
const tbl = sqlIdent(name ?? this.tableName);
|
|
505
512
|
const tables = await this.listTables();
|
|
506
513
|
if (!tables.includes(tbl)) {
|
|
507
514
|
log2(`table "${tbl}" not found, creating`);
|
|
@@ -515,17 +522,40 @@ var DeeplakeApi = class {
|
|
|
515
522
|
}
|
|
516
523
|
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
|
|
517
524
|
async ensureSessionsTable(name) {
|
|
525
|
+
const safe = sqlIdent(name);
|
|
526
|
+
const tables = await this.listTables();
|
|
527
|
+
if (!tables.includes(safe)) {
|
|
528
|
+
log2(`table "${safe}" not found, creating`);
|
|
529
|
+
await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, message_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
|
|
530
|
+
log2(`table "${safe}" created`);
|
|
531
|
+
if (!tables.includes(safe))
|
|
532
|
+
this._tablesCache = [...tables, safe];
|
|
533
|
+
}
|
|
534
|
+
await this.ensureEmbeddingColumn(safe, MESSAGE_EMBEDDING_COL);
|
|
535
|
+
await this.ensureColumn(safe, "agent", "TEXT NOT NULL DEFAULT ''");
|
|
536
|
+
await this.ensureLookupIndex(safe, "path_creation_date", `("path", "creation_date")`);
|
|
537
|
+
}
|
|
538
|
+
/**
|
|
539
|
+
* Create the skills table.
|
|
540
|
+
*
|
|
541
|
+
* One row per skill version. Workers INSERT a fresh row on every KEEP /
|
|
542
|
+
* MERGE rather than UPDATE-ing in place, so the full version history is
|
|
543
|
+
* recoverable. Uniqueness in the *current* state is by (project_key, name)
|
|
544
|
+
* — newer rows shadow older ones at read time (ORDER BY version DESC).
|
|
545
|
+
* This sidesteps the Deeplake UPDATE-coalescing quirk that bit the wiki
|
|
546
|
+
* worker.
|
|
547
|
+
*/
|
|
548
|
+
async ensureSkillsTable(name) {
|
|
549
|
+
const safe = sqlIdent(name);
|
|
518
550
|
const tables = await this.listTables();
|
|
519
|
-
if (!tables.includes(
|
|
520
|
-
log2(`table "${
|
|
521
|
-
await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${
|
|
522
|
-
log2(`table "${
|
|
523
|
-
if (!tables.includes(
|
|
524
|
-
this._tablesCache = [...tables,
|
|
551
|
+
if (!tables.includes(safe)) {
|
|
552
|
+
log2(`table "${safe}" not found, creating`);
|
|
553
|
+
await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
|
|
554
|
+
log2(`table "${safe}" created`);
|
|
555
|
+
if (!tables.includes(safe))
|
|
556
|
+
this._tablesCache = [...tables, safe];
|
|
525
557
|
}
|
|
526
|
-
await this.
|
|
527
|
-
await this.ensureColumn(name, "agent", "TEXT NOT NULL DEFAULT ''");
|
|
528
|
-
await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
|
|
558
|
+
await this.ensureLookupIndex(safe, "project_key_name", `("project_key", "name")`);
|
|
529
559
|
}
|
|
530
560
|
};
|
|
531
561
|
|
|
@@ -591,13 +621,37 @@ function getInstalledVersion(bundleDir, pluginManifestDir) {
|
|
|
591
621
|
// dist/src/hooks/cursor/session-start.js
|
|
592
622
|
var log3 = (msg) => log("cursor-session-start", msg);
|
|
593
623
|
var __bundleDir = dirname2(fileURLToPath(import.meta.url));
|
|
594
|
-
var AUTH_CMD = join6(__bundleDir, "commands", "auth-login.js");
|
|
595
624
|
var context = `DEEPLAKE MEMORY: Persistent memory at ~/.deeplake/memory/ shared across sessions, users, and agents.
|
|
596
625
|
|
|
597
626
|
Structure: index.md (start here) \u2192 summaries/*.md \u2192 sessions/*.jsonl (last resort). Do NOT jump straight to JSONL.
|
|
598
627
|
Search: use \`grep\` (NOT \`rg\`/ripgrep). Example: grep -ri "keyword" ~/.deeplake/memory/
|
|
599
628
|
IMPORTANT: Only use these bash builtins to interact with ~/.deeplake/memory/: cat, ls, grep, echo, jq, head, tail, sed, awk, wc, sort, find. Do NOT use rg/ripgrep, python, python3, node, curl, or other interpreters \u2014 they may not be installed and the memory filesystem only supports the listed builtins.
|
|
600
|
-
Do NOT spawn subagents to read deeplake memory
|
|
629
|
+
Do NOT spawn subagents to read deeplake memory.
|
|
630
|
+
|
|
631
|
+
Organization management \u2014 each argument is SEPARATE (do NOT quote subcommands together):
|
|
632
|
+
- hivemind login \u2014 SSO login
|
|
633
|
+
- hivemind whoami \u2014 show current user/org
|
|
634
|
+
- hivemind org list \u2014 list organizations
|
|
635
|
+
- hivemind org switch <name-or-id> \u2014 switch organization
|
|
636
|
+
- hivemind workspaces \u2014 list workspaces
|
|
637
|
+
- hivemind workspace <id> \u2014 switch workspace
|
|
638
|
+
- hivemind invite <email> <ADMIN|WRITE|READ> \u2014 invite member (ALWAYS ask user which role before inviting)
|
|
639
|
+
- hivemind members \u2014 list members
|
|
640
|
+
- hivemind remove <user-id> \u2014 remove member
|
|
641
|
+
|
|
642
|
+
SKILLS (skilify) \u2014 mine + share reusable skills across the org:
|
|
643
|
+
- hivemind skilify \u2014 show scope/team/install + per-project state
|
|
644
|
+
- hivemind skilify pull \u2014 sync project skills from the org table
|
|
645
|
+
- hivemind skilify pull --user <email> \u2014 only that author's skills
|
|
646
|
+
- hivemind skilify pull --users a,b,c \u2014 multiple authors (CSV)
|
|
647
|
+
- hivemind skilify pull --all-users \u2014 explicit "no author filter"
|
|
648
|
+
- hivemind skilify pull --to project|global \u2014 install location
|
|
649
|
+
- hivemind skilify pull --dry-run \u2014 preview only
|
|
650
|
+
- hivemind skilify pull --force \u2014 overwrite local (creates .bak)
|
|
651
|
+
- hivemind skilify pull <skill-name> \u2014 pull only that skill (combines with --user)
|
|
652
|
+
- hivemind skilify scope <me|team|org> \u2014 sharing scope for new skills
|
|
653
|
+
- hivemind skilify install <project|global> \u2014 default install location
|
|
654
|
+
- hivemind skilify team add|remove|list <name> \u2014 manage team list`;
|
|
601
655
|
function resolveSessionId(input) {
|
|
602
656
|
return input.session_id ?? input.conversation_id ?? `cursor-${Date.now()}`;
|
|
603
657
|
}
|
|
@@ -663,7 +717,7 @@ async function main() {
|
|
|
663
717
|
Hivemind v${current}`;
|
|
664
718
|
const additionalContext = creds?.token ? `${context}
|
|
665
719
|
Logged in to Deeplake as org: ${creds.orgName ?? creds.orgId} (workspace: ${creds.workspaceId ?? "default"})${versionNotice}` : `${context}
|
|
666
|
-
Not logged in to Deeplake. Run:
|
|
720
|
+
Not logged in to Deeplake. Run: hivemind login${versionNotice}`;
|
|
667
721
|
console.log(JSON.stringify({ additional_context: additionalContext }));
|
|
668
722
|
}
|
|
669
723
|
main().catch((e) => {
|
|
@@ -66791,6 +66791,7 @@ function loadConfig() {
|
|
|
66791
66791
|
apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
|
|
66792
66792
|
tableName: process.env.HIVEMIND_TABLE ?? "memory",
|
|
66793
66793
|
sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
|
|
66794
|
+
skillsTableName: process.env.HIVEMIND_SKILLS_TABLE ?? "skills",
|
|
66794
66795
|
memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join4(home, ".deeplake", "memory")
|
|
66795
66796
|
};
|
|
66796
66797
|
}
|
|
@@ -66818,6 +66819,12 @@ function sqlStr(value) {
|
|
|
66818
66819
|
function sqlLike(value) {
|
|
66819
66820
|
return sqlStr(value).replace(/%/g, "\\%").replace(/_/g, "\\_");
|
|
66820
66821
|
}
|
|
66822
|
+
function sqlIdent(name) {
|
|
66823
|
+
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
|
|
66824
|
+
throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
|
|
66825
|
+
}
|
|
66826
|
+
return name;
|
|
66827
|
+
}
|
|
66821
66828
|
|
|
66822
66829
|
// dist/src/embeddings/columns.js
|
|
66823
66830
|
var SUMMARY_EMBEDDING_COL = "summary_embedding";
|
|
@@ -67190,7 +67197,7 @@ var DeeplakeApi = class {
|
|
|
67190
67197
|
}
|
|
67191
67198
|
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
|
|
67192
67199
|
async ensureTable(name) {
|
|
67193
|
-
const tbl = name ?? this.tableName;
|
|
67200
|
+
const tbl = sqlIdent(name ?? this.tableName);
|
|
67194
67201
|
const tables = await this.listTables();
|
|
67195
67202
|
if (!tables.includes(tbl)) {
|
|
67196
67203
|
log2(`table "${tbl}" not found, creating`);
|
|
@@ -67204,17 +67211,40 @@ var DeeplakeApi = class {
|
|
|
67204
67211
|
}
|
|
67205
67212
|
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
|
|
67206
67213
|
async ensureSessionsTable(name) {
|
|
67214
|
+
const safe = sqlIdent(name);
|
|
67215
|
+
const tables = await this.listTables();
|
|
67216
|
+
if (!tables.includes(safe)) {
|
|
67217
|
+
log2(`table "${safe}" not found, creating`);
|
|
67218
|
+
await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, message_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
|
|
67219
|
+
log2(`table "${safe}" created`);
|
|
67220
|
+
if (!tables.includes(safe))
|
|
67221
|
+
this._tablesCache = [...tables, safe];
|
|
67222
|
+
}
|
|
67223
|
+
await this.ensureEmbeddingColumn(safe, MESSAGE_EMBEDDING_COL);
|
|
67224
|
+
await this.ensureColumn(safe, "agent", "TEXT NOT NULL DEFAULT ''");
|
|
67225
|
+
await this.ensureLookupIndex(safe, "path_creation_date", `("path", "creation_date")`);
|
|
67226
|
+
}
|
|
67227
|
+
/**
|
|
67228
|
+
* Create the skills table.
|
|
67229
|
+
*
|
|
67230
|
+
* One row per skill version. Workers INSERT a fresh row on every KEEP /
|
|
67231
|
+
* MERGE rather than UPDATE-ing in place, so the full version history is
|
|
67232
|
+
* recoverable. Uniqueness in the *current* state is by (project_key, name)
|
|
67233
|
+
* — newer rows shadow older ones at read time (ORDER BY version DESC).
|
|
67234
|
+
* This sidesteps the Deeplake UPDATE-coalescing quirk that bit the wiki
|
|
67235
|
+
* worker.
|
|
67236
|
+
*/
|
|
67237
|
+
async ensureSkillsTable(name) {
|
|
67238
|
+
const safe = sqlIdent(name);
|
|
67207
67239
|
const tables = await this.listTables();
|
|
67208
|
-
if (!tables.includes(
|
|
67209
|
-
log2(`table "${
|
|
67210
|
-
await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${
|
|
67211
|
-
log2(`table "${
|
|
67212
|
-
if (!tables.includes(
|
|
67213
|
-
this._tablesCache = [...tables,
|
|
67214
|
-
}
|
|
67215
|
-
await this.
|
|
67216
|
-
await this.ensureColumn(name, "agent", "TEXT NOT NULL DEFAULT ''");
|
|
67217
|
-
await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
|
|
67240
|
+
if (!tables.includes(safe)) {
|
|
67241
|
+
log2(`table "${safe}" not found, creating`);
|
|
67242
|
+
await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
|
|
67243
|
+
log2(`table "${safe}" created`);
|
|
67244
|
+
if (!tables.includes(safe))
|
|
67245
|
+
this._tablesCache = [...tables, safe];
|
|
67246
|
+
}
|
|
67247
|
+
await this.ensureLookupIndex(safe, "project_key_name", `("project_key", "name")`);
|
|
67218
67248
|
}
|
|
67219
67249
|
};
|
|
67220
67250
|
|