@deeplake/hivemind 0.7.4 → 0.7.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/.claude-plugin/marketplace.json +2 -2
  2. package/.claude-plugin/plugin.json +1 -1
  3. package/README.md +97 -0
  4. package/bundle/cli.js +820 -20
  5. package/codex/bundle/capture.js +40 -10
  6. package/codex/bundle/commands/auth-login.js +84 -18
  7. package/codex/bundle/pre-tool-use.js +41 -11
  8. package/codex/bundle/session-start-setup.js +40 -10
  9. package/codex/bundle/session-start.js +27 -3
  10. package/codex/bundle/shell/deeplake-shell.js +41 -11
  11. package/codex/bundle/skilify-worker.js +907 -0
  12. package/codex/bundle/stop.js +373 -51
  13. package/cursor/bundle/capture.js +354 -13
  14. package/cursor/bundle/commands/auth-login.js +84 -18
  15. package/cursor/bundle/pre-tool-use.js +40 -10
  16. package/cursor/bundle/session-end.js +303 -6
  17. package/cursor/bundle/session-start.js +68 -14
  18. package/cursor/bundle/shell/deeplake-shell.js +41 -11
  19. package/cursor/bundle/skilify-worker.js +907 -0
  20. package/hermes/bundle/capture.js +354 -13
  21. package/hermes/bundle/commands/auth-login.js +84 -18
  22. package/hermes/bundle/pre-tool-use.js +40 -10
  23. package/hermes/bundle/session-end.js +305 -7
  24. package/hermes/bundle/session-start.js +68 -14
  25. package/hermes/bundle/shell/deeplake-shell.js +41 -11
  26. package/hermes/bundle/skilify-worker.js +907 -0
  27. package/mcp/bundle/server.js +41 -11
  28. package/openclaw/dist/chunks/{config-G23NI5TV.js → config-ZLH6JFJS.js} +1 -0
  29. package/openclaw/dist/index.js +185 -16
  30. package/openclaw/dist/skilify-worker.js +907 -0
  31. package/openclaw/openclaw.plugin.json +1 -1
  32. package/openclaw/package.json +1 -1
  33. package/openclaw/skills/SKILL.md +19 -0
  34. package/package.json +6 -1
  35. package/pi/extension-source/hivemind.ts +130 -1
@@ -96,6 +96,7 @@ function loadConfig() {
96
96
  apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
97
97
  tableName: process.env.HIVEMIND_TABLE ?? "memory",
98
98
  sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
99
+ skillsTableName: process.env.HIVEMIND_SKILLS_TABLE ?? "skills",
99
100
  memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join(home, ".deeplake", "memory")
100
101
  };
101
102
  }
@@ -123,6 +124,12 @@ function log(tag, msg) {
123
124
  function sqlStr(value) {
124
125
  return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
125
126
  }
127
+ function sqlIdent(name) {
128
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
129
+ throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
130
+ }
131
+ return name;
132
+ }
126
133
 
127
134
  // dist/src/embeddings/columns.js
128
135
  var SUMMARY_EMBEDDING_COL = "summary_embedding";
@@ -495,7 +502,7 @@ var DeeplakeApi = class {
495
502
  }
496
503
  /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
497
504
  async ensureTable(name) {
498
- const tbl = name ?? this.tableName;
505
+ const tbl = sqlIdent(name ?? this.tableName);
499
506
  const tables = await this.listTables();
500
507
  if (!tables.includes(tbl)) {
501
508
  log2(`table "${tbl}" not found, creating`);
@@ -509,17 +516,40 @@ var DeeplakeApi = class {
509
516
  }
510
517
  /** Create the sessions table (uses JSONB for message since every row is a JSON event). */
511
518
  async ensureSessionsTable(name) {
519
+ const safe = sqlIdent(name);
512
520
  const tables = await this.listTables();
513
- if (!tables.includes(name)) {
514
- log2(`table "${name}" not found, creating`);
515
- await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, message_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, name);
516
- log2(`table "${name}" created`);
517
- if (!tables.includes(name))
518
- this._tablesCache = [...tables, name];
521
+ if (!tables.includes(safe)) {
522
+ log2(`table "${safe}" not found, creating`);
523
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, message_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
524
+ log2(`table "${safe}" created`);
525
+ if (!tables.includes(safe))
526
+ this._tablesCache = [...tables, safe];
519
527
  }
520
- await this.ensureEmbeddingColumn(name, MESSAGE_EMBEDDING_COL);
521
- await this.ensureColumn(name, "agent", "TEXT NOT NULL DEFAULT ''");
522
- await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
528
+ await this.ensureEmbeddingColumn(safe, MESSAGE_EMBEDDING_COL);
529
+ await this.ensureColumn(safe, "agent", "TEXT NOT NULL DEFAULT ''");
530
+ await this.ensureLookupIndex(safe, "path_creation_date", `("path", "creation_date")`);
531
+ }
532
+ /**
533
+ * Create the skills table.
534
+ *
535
+ * One row per skill version. Workers INSERT a fresh row on every KEEP /
536
+ * MERGE rather than UPDATE-ing in place, so the full version history is
537
+ * recoverable. Uniqueness in the *current* state is by (project_key, name)
538
+ * — newer rows shadow older ones at read time (ORDER BY version DESC).
539
+ * This sidesteps the Deeplake UPDATE-coalescing quirk that bit the wiki
540
+ * worker.
541
+ */
542
+ async ensureSkillsTable(name) {
543
+ const safe = sqlIdent(name);
544
+ const tables = await this.listTables();
545
+ if (!tables.includes(safe)) {
546
+ log2(`table "${safe}" not found, creating`);
547
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
548
+ log2(`table "${safe}" created`);
549
+ if (!tables.includes(safe))
550
+ this._tablesCache = [...tables, safe];
551
+ }
552
+ await this.ensureLookupIndex(safe, "project_key_name", `("project_key", "name")`);
523
553
  }
524
554
  };
525
555
 
@@ -807,8 +837,8 @@ function embeddingsDisabled() {
807
837
  }
808
838
 
809
839
  // dist/src/hooks/hermes/capture.js
810
- import { fileURLToPath as fileURLToPath2 } from "node:url";
811
- import { dirname as dirname2, join as join9 } from "node:path";
840
+ import { fileURLToPath as fileURLToPath3 } from "node:url";
841
+ import { dirname as dirname3, join as join13 } from "node:path";
812
842
 
813
843
  // dist/src/hooks/summary-state.js
814
844
  import { readFileSync as readFileSync4, writeFileSync as writeFileSync2, writeSync as writeSync2, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync4, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs";
@@ -1066,10 +1096,312 @@ function bundleDirFromImportMeta(importMetaUrl) {
1066
1096
  return dirname(fileURLToPath(importMetaUrl));
1067
1097
  }
1068
1098
 
1099
+ // dist/src/skilify/spawn-skilify-worker.js
1100
+ import { spawn as spawn3 } from "node:child_process";
1101
+ import { fileURLToPath as fileURLToPath2 } from "node:url";
1102
+ import { dirname as dirname2, join as join10 } from "node:path";
1103
+ import { writeFileSync as writeFileSync4, mkdirSync as mkdirSync5, appendFileSync as appendFileSync3, chmodSync } from "node:fs";
1104
+ import { homedir as homedir8, tmpdir as tmpdir3 } from "node:os";
1105
+
1106
+ // dist/src/skilify/gate-runner.js
1107
+ import { execFileSync } from "node:child_process";
1108
+ import { existsSync as existsSync5 } from "node:fs";
1109
+ import { homedir as homedir7 } from "node:os";
1110
+ import { join as join9 } from "node:path";
1111
+ function findAgentBin(agent) {
1112
+ const which = (name) => {
1113
+ try {
1114
+ const out = execFileSync("which", [name], {
1115
+ encoding: "utf-8",
1116
+ stdio: ["ignore", "pipe", "ignore"]
1117
+ });
1118
+ return out.trim() || null;
1119
+ } catch {
1120
+ return null;
1121
+ }
1122
+ };
1123
+ switch (agent) {
1124
+ case "claude_code":
1125
+ return which("claude") ?? join9(homedir7(), ".claude", "local", "claude");
1126
+ case "codex":
1127
+ return which("codex") ?? "/usr/local/bin/codex";
1128
+ case "cursor":
1129
+ return which("cursor-agent") ?? "/usr/local/bin/cursor-agent";
1130
+ case "hermes":
1131
+ return which("hermes") ?? join9(homedir7(), ".local", "bin", "hermes");
1132
+ case "pi":
1133
+ return which("pi") ?? join9(homedir7(), ".local", "bin", "pi");
1134
+ }
1135
+ }
1136
+
1137
+ // dist/src/skilify/spawn-skilify-worker.js
1138
+ var HOME2 = homedir8();
1139
+ var SKILIFY_LOG = join10(HOME2, ".claude", "hooks", "skilify.log");
1140
+ function skilifyLog(msg) {
1141
+ try {
1142
+ mkdirSync5(dirname2(SKILIFY_LOG), { recursive: true });
1143
+ appendFileSync3(SKILIFY_LOG, `[${utcTimestamp()}] ${msg}
1144
+ `);
1145
+ } catch {
1146
+ }
1147
+ }
1148
+ function spawnSkilifyWorker(opts) {
1149
+ const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts;
1150
+ const tmpDir = join10(tmpdir3(), `deeplake-skilify-${projectKey}-${Date.now()}`);
1151
+ mkdirSync5(tmpDir, { recursive: true, mode: 448 });
1152
+ const gateBin = findAgentBin(agent);
1153
+ const configFile = join10(tmpDir, "config.json");
1154
+ writeFileSync4(configFile, JSON.stringify({
1155
+ apiUrl: config.apiUrl,
1156
+ token: config.token,
1157
+ orgId: config.orgId,
1158
+ workspaceId: config.workspaceId,
1159
+ sessionsTable: config.sessionsTableName,
1160
+ skillsTable: config.skillsTableName,
1161
+ userName: config.userName,
1162
+ cwd,
1163
+ projectKey,
1164
+ project,
1165
+ agent,
1166
+ scope: scopeConfig.scope,
1167
+ team: scopeConfig.team,
1168
+ install: scopeConfig.install,
1169
+ tmpDir,
1170
+ gateBin,
1171
+ cursorModel: process.env.HIVEMIND_CURSOR_MODEL,
1172
+ hermesProvider: process.env.HIVEMIND_HERMES_PROVIDER,
1173
+ hermesModel: process.env.HIVEMIND_HERMES_MODEL,
1174
+ piProvider: process.env.HIVEMIND_PI_PROVIDER,
1175
+ piModel: process.env.HIVEMIND_PI_MODEL,
1176
+ skilifyLog: SKILIFY_LOG,
1177
+ currentSessionId
1178
+ }), { mode: 384 });
1179
+ try {
1180
+ chmodSync(configFile, 384);
1181
+ } catch {
1182
+ }
1183
+ skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`);
1184
+ const workerPath = join10(bundleDir, "skilify-worker.js");
1185
+ spawn3("nohup", ["node", workerPath, configFile], {
1186
+ detached: true,
1187
+ stdio: ["ignore", "ignore", "ignore"]
1188
+ }).unref();
1189
+ skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`);
1190
+ }
1191
+
1192
+ // dist/src/skilify/state.js
1193
+ import { readFileSync as readFileSync5, writeFileSync as writeFileSync5, writeSync as writeSync3, mkdirSync as mkdirSync6, renameSync as renameSync2, existsSync as existsSync6, unlinkSync as unlinkSync3, openSync as openSync3, closeSync as closeSync3 } from "node:fs";
1194
+ import { execSync as execSync2 } from "node:child_process";
1195
+ import { homedir as homedir9 } from "node:os";
1196
+ import { createHash } from "node:crypto";
1197
+ import { join as join11, basename } from "node:path";
1198
+ var dlog2 = (msg) => log("skilify-state", msg);
1199
+ var STATE_DIR2 = join11(homedir9(), ".deeplake", "state", "skilify");
1200
+ var YIELD_BUF2 = new Int32Array(new SharedArrayBuffer(4));
1201
+ var TRIGGER_THRESHOLD = (() => {
1202
+ const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? "");
1203
+ return Number.isInteger(n) && n > 0 ? n : 20;
1204
+ })();
1205
+ function statePath2(projectKey) {
1206
+ return join11(STATE_DIR2, `${projectKey}.json`);
1207
+ }
1208
+ function lockPath2(projectKey) {
1209
+ return join11(STATE_DIR2, `${projectKey}.lock`);
1210
+ }
1211
+ function deriveProjectKey(cwd) {
1212
+ const project = basename(cwd) || "unknown";
1213
+ let signature = null;
1214
+ try {
1215
+ signature = execSync2("git config --get remote.origin.url", {
1216
+ cwd,
1217
+ encoding: "utf-8",
1218
+ stdio: ["ignore", "pipe", "ignore"]
1219
+ }).trim() || null;
1220
+ } catch {
1221
+ }
1222
+ const input = signature ?? cwd;
1223
+ const key = createHash("sha1").update(input).digest("hex").slice(0, 16);
1224
+ return { key, project };
1225
+ }
1226
+ function readState2(projectKey) {
1227
+ const p = statePath2(projectKey);
1228
+ if (!existsSync6(p))
1229
+ return null;
1230
+ try {
1231
+ return JSON.parse(readFileSync5(p, "utf-8"));
1232
+ } catch {
1233
+ return null;
1234
+ }
1235
+ }
1236
+ function writeState2(projectKey, state) {
1237
+ mkdirSync6(STATE_DIR2, { recursive: true });
1238
+ const p = statePath2(projectKey);
1239
+ const tmp = `${p}.${process.pid}.${Date.now()}.tmp`;
1240
+ writeFileSync5(tmp, JSON.stringify(state, null, 2));
1241
+ renameSync2(tmp, p);
1242
+ }
1243
+ function withRmwLock2(projectKey, fn) {
1244
+ mkdirSync6(STATE_DIR2, { recursive: true });
1245
+ const rmw = lockPath2(projectKey) + ".rmw";
1246
+ const deadline = Date.now() + 2e3;
1247
+ let fd = null;
1248
+ while (fd === null) {
1249
+ try {
1250
+ fd = openSync3(rmw, "wx");
1251
+ } catch (e) {
1252
+ if (e.code !== "EEXIST")
1253
+ throw e;
1254
+ if (Date.now() > deadline) {
1255
+ dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`);
1256
+ try {
1257
+ unlinkSync3(rmw);
1258
+ } catch (unlinkErr) {
1259
+ dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`);
1260
+ }
1261
+ continue;
1262
+ }
1263
+ Atomics.wait(YIELD_BUF2, 0, 0, 10);
1264
+ }
1265
+ }
1266
+ try {
1267
+ return fn();
1268
+ } finally {
1269
+ closeSync3(fd);
1270
+ try {
1271
+ unlinkSync3(rmw);
1272
+ } catch (unlinkErr) {
1273
+ dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`);
1274
+ }
1275
+ }
1276
+ }
1277
+ function bumpStopCounter(cwd) {
1278
+ const { key, project } = deriveProjectKey(cwd);
1279
+ return withRmwLock2(key, () => {
1280
+ const existing = readState2(key);
1281
+ const next = existing ? { ...existing, counter: existing.counter + 1, updatedAt: Date.now() } : {
1282
+ project,
1283
+ projectKey: key,
1284
+ counter: 1,
1285
+ lastUuid: null,
1286
+ lastDate: null,
1287
+ skillsGenerated: [],
1288
+ updatedAt: Date.now()
1289
+ };
1290
+ writeState2(key, next);
1291
+ return next;
1292
+ });
1293
+ }
1294
+ function resetCounter(projectKey) {
1295
+ withRmwLock2(projectKey, () => {
1296
+ const s = readState2(projectKey);
1297
+ if (!s)
1298
+ return;
1299
+ writeState2(projectKey, { ...s, counter: 0, updatedAt: Date.now() });
1300
+ });
1301
+ }
1302
+ function tryAcquireWorkerLock(projectKey, maxAgeMs = 10 * 60 * 1e3) {
1303
+ mkdirSync6(STATE_DIR2, { recursive: true });
1304
+ const p = lockPath2(projectKey);
1305
+ if (existsSync6(p)) {
1306
+ try {
1307
+ const ageMs = Date.now() - parseInt(readFileSync5(p, "utf-8"), 10);
1308
+ if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
1309
+ return false;
1310
+ } catch (readErr) {
1311
+ dlog2(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`);
1312
+ }
1313
+ try {
1314
+ unlinkSync3(p);
1315
+ } catch (unlinkErr) {
1316
+ dlog2(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`);
1317
+ return false;
1318
+ }
1319
+ }
1320
+ try {
1321
+ const fd = openSync3(p, "wx");
1322
+ try {
1323
+ writeSync3(fd, String(Date.now()));
1324
+ } finally {
1325
+ closeSync3(fd);
1326
+ }
1327
+ return true;
1328
+ } catch {
1329
+ return false;
1330
+ }
1331
+ }
1332
+ function releaseWorkerLock(projectKey) {
1333
+ const p = lockPath2(projectKey);
1334
+ try {
1335
+ unlinkSync3(p);
1336
+ } catch {
1337
+ }
1338
+ }
1339
+
1340
+ // dist/src/skilify/scope-config.js
1341
+ import { existsSync as existsSync7, mkdirSync as mkdirSync7, readFileSync as readFileSync6, writeFileSync as writeFileSync6 } from "node:fs";
1342
+ import { homedir as homedir10 } from "node:os";
1343
+ import { join as join12 } from "node:path";
1344
+ var STATE_DIR3 = join12(homedir10(), ".deeplake", "state", "skilify");
1345
+ var CONFIG_PATH = join12(STATE_DIR3, "config.json");
1346
+ var DEFAULT = { scope: "me", team: [], install: "project" };
1347
+ function loadScopeConfig() {
1348
+ if (!existsSync7(CONFIG_PATH))
1349
+ return DEFAULT;
1350
+ try {
1351
+ const raw = JSON.parse(readFileSync6(CONFIG_PATH, "utf-8"));
1352
+ const scope = raw.scope === "team" || raw.scope === "org" ? raw.scope : "me";
1353
+ const team = Array.isArray(raw.team) ? raw.team.filter((s) => typeof s === "string") : [];
1354
+ const install = raw.install === "global" ? "global" : "project";
1355
+ return { scope, team, install };
1356
+ } catch {
1357
+ return DEFAULT;
1358
+ }
1359
+ }
1360
+
1361
+ // dist/src/skilify/triggers.js
1362
+ function tryStopCounterTrigger(opts) {
1363
+ if (process.env.HIVEMIND_SKILIFY_WORKER === "1")
1364
+ return;
1365
+ if (!opts.cwd)
1366
+ return;
1367
+ try {
1368
+ const state = bumpStopCounter(opts.cwd);
1369
+ if (state.counter < TRIGGER_THRESHOLD)
1370
+ return;
1371
+ if (!tryAcquireWorkerLock(state.projectKey)) {
1372
+ skilifyLog(`Stop: trigger suppressed (worker lock held) project=${state.project}`);
1373
+ return;
1374
+ }
1375
+ skilifyLog(`Stop: threshold hit (counter=${state.counter}, N=${TRIGGER_THRESHOLD}) project=${state.project} agent=${opts.agent}`);
1376
+ resetCounter(state.projectKey);
1377
+ try {
1378
+ spawnSkilifyWorker({
1379
+ config: opts.config,
1380
+ cwd: opts.cwd,
1381
+ projectKey: state.projectKey,
1382
+ project: state.project,
1383
+ bundleDir: opts.bundleDir,
1384
+ agent: opts.agent,
1385
+ scopeConfig: loadScopeConfig(),
1386
+ currentSessionId: opts.sessionId,
1387
+ reason: "Stop"
1388
+ });
1389
+ } catch (e) {
1390
+ skilifyLog(`Stop spawn failed: ${e?.message ?? e}`);
1391
+ try {
1392
+ releaseWorkerLock(state.projectKey);
1393
+ } catch {
1394
+ }
1395
+ }
1396
+ } catch (e) {
1397
+ skilifyLog(`Stop trigger error: ${e?.message ?? e}`);
1398
+ }
1399
+ }
1400
+
1069
1401
  // dist/src/hooks/hermes/capture.js
1070
1402
  var log4 = (msg) => log("hermes-capture", msg);
1071
1403
  function resolveEmbedDaemonPath() {
1072
- return join9(dirname2(fileURLToPath2(import.meta.url)), "embeddings", "embed-daemon.js");
1404
+ return join13(dirname3(fileURLToPath3(import.meta.url)), "embeddings", "embed-daemon.js");
1073
1405
  }
1074
1406
  var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false";
1075
1407
  function pickString(...candidates) {
@@ -1155,6 +1487,15 @@ async function main() {
1155
1487
  }
1156
1488
  log4("capture ok \u2192 cloud");
1157
1489
  maybeTriggerPeriodicSummary(sessionId, cwd, config);
1490
+ if (event === "post_llm_call" && process.env.HIVEMIND_WIKI_WORKER !== "1" && process.env.HIVEMIND_SKILIFY_WORKER !== "1") {
1491
+ tryStopCounterTrigger({
1492
+ config,
1493
+ cwd,
1494
+ bundleDir: bundleDirFromImportMeta(import.meta.url),
1495
+ agent: "hermes",
1496
+ sessionId
1497
+ });
1498
+ }
1158
1499
  }
1159
1500
  function maybeTriggerPeriodicSummary(sessionId, cwd, config) {
1160
1501
  if (process.env.HIVEMIND_WIKI_WORKER === "1")
@@ -299,6 +299,7 @@ function loadConfig() {
299
299
  apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
300
300
  tableName: process.env.HIVEMIND_TABLE ?? "memory",
301
301
  sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
302
+ skillsTableName: process.env.HIVEMIND_SKILLS_TABLE ?? "skills",
302
303
  memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join2(home, ".deeplake", "memory")
303
304
  };
304
305
  }
@@ -323,6 +324,12 @@ function log(tag, msg) {
323
324
  function sqlStr(value) {
324
325
  return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
325
326
  }
327
+ function sqlIdent(name) {
328
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
329
+ throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
330
+ }
331
+ return name;
332
+ }
326
333
 
327
334
  // dist/src/embeddings/columns.js
328
335
  var SUMMARY_EMBEDDING_COL = "summary_embedding";
@@ -686,7 +693,7 @@ var DeeplakeApi = class {
686
693
  }
687
694
  /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
688
695
  async ensureTable(name) {
689
- const tbl = name ?? this.tableName;
696
+ const tbl = sqlIdent(name ?? this.tableName);
690
697
  const tables = await this.listTables();
691
698
  if (!tables.includes(tbl)) {
692
699
  log2(`table "${tbl}" not found, creating`);
@@ -700,17 +707,40 @@ var DeeplakeApi = class {
700
707
  }
701
708
  /** Create the sessions table (uses JSONB for message since every row is a JSON event). */
702
709
  async ensureSessionsTable(name) {
710
+ const safe = sqlIdent(name);
703
711
  const tables = await this.listTables();
704
- if (!tables.includes(name)) {
705
- log2(`table "${name}" not found, creating`);
706
- await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, message_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, name);
707
- log2(`table "${name}" created`);
708
- if (!tables.includes(name))
709
- this._tablesCache = [...tables, name];
712
+ if (!tables.includes(safe)) {
713
+ log2(`table "${safe}" not found, creating`);
714
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, message_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
715
+ log2(`table "${safe}" created`);
716
+ if (!tables.includes(safe))
717
+ this._tablesCache = [...tables, safe];
710
718
  }
711
- await this.ensureEmbeddingColumn(name, MESSAGE_EMBEDDING_COL);
712
- await this.ensureColumn(name, "agent", "TEXT NOT NULL DEFAULT ''");
713
- await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
719
+ await this.ensureEmbeddingColumn(safe, MESSAGE_EMBEDDING_COL);
720
+ await this.ensureColumn(safe, "agent", "TEXT NOT NULL DEFAULT ''");
721
+ await this.ensureLookupIndex(safe, "path_creation_date", `("path", "creation_date")`);
722
+ }
723
+ /**
724
+ * Create the skills table.
725
+ *
726
+ * One row per skill version. Workers INSERT a fresh row on every KEEP /
727
+ * MERGE rather than UPDATE-ing in place, so the full version history is
728
+ * recoverable. Uniqueness in the *current* state is by (project_key, name)
729
+ * — newer rows shadow older ones at read time (ORDER BY version DESC).
730
+ * This sidesteps the Deeplake UPDATE-coalescing quirk that bit the wiki
731
+ * worker.
732
+ */
733
+ async ensureSkillsTable(name) {
734
+ const safe = sqlIdent(name);
735
+ const tables = await this.listTables();
736
+ if (!tables.includes(safe)) {
737
+ log2(`table "${safe}" not found, creating`);
738
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
739
+ log2(`table "${safe}" created`);
740
+ if (!tables.includes(safe))
741
+ this._tablesCache = [...tables, safe];
742
+ }
743
+ await this.ensureLookupIndex(safe, "project_key_name", `("project_key", "name")`);
714
744
  }
715
745
  };
716
746
 
@@ -890,8 +920,24 @@ async function runAuthCommand(args) {
890
920
  console.log(`Org not found: ${target}`);
891
921
  process.exit(1);
892
922
  }
923
+ const prevWs = creds.workspaceId ?? "default";
924
+ const lcPrev = prevWs.toLowerCase();
925
+ const wsList = await listWorkspaces(creds.token, apiUrl, match.id);
926
+ const matchedWs = wsList.find((w) => w.id === prevWs || w.name && w.name.toLowerCase() === lcPrev);
893
927
  await switchOrg(match.id, match.name);
894
928
  console.log(`Switched to org: ${match.name}`);
929
+ if (!matchedWs) {
930
+ if (prevWs !== "default") {
931
+ await switchWorkspace("default");
932
+ console.log(`Workspace '${prevWs}' is not in org '${match.name}'. Reset workspace to 'default'.`);
933
+ if (wsList.length > 0) {
934
+ console.log(`Available workspaces: ${wsList.map((w) => w.name || w.id).join(", ")}`);
935
+ }
936
+ }
937
+ } else if (matchedWs.id !== prevWs) {
938
+ await switchWorkspace(matchedWs.id);
939
+ console.log(`Workspace name '${prevWs}' resolved to id '${matchedWs.id}' in org '${match.name}'.`);
940
+ }
895
941
  } else {
896
942
  console.log("Usage: org list | org switch <name-or-id>");
897
943
  }
@@ -903,7 +949,7 @@ async function runAuthCommand(args) {
903
949
  process.exit(1);
904
950
  }
905
951
  const ws = await listWorkspaces(creds.token, apiUrl, creds.orgId);
906
- ws.forEach((w) => console.log(`${w.id} ${w.name}`));
952
+ ws.forEach((w) => console.log(w.name || w.id));
907
953
  break;
908
954
  }
909
955
  case "workspace": {
@@ -911,14 +957,34 @@ async function runAuthCommand(args) {
911
957
  console.log("Not logged in.");
912
958
  process.exit(1);
913
959
  }
914
- const wsId = args[1];
915
- if (!wsId) {
916
- console.log("Usage: workspace <id>");
917
- process.exit(1);
960
+ const sub = args[1];
961
+ if (sub === "list") {
962
+ const wsList = await listWorkspaces(creds.token, apiUrl, creds.orgId);
963
+ wsList.forEach((w) => console.log(w.name || w.id));
964
+ break;
918
965
  }
919
- await switchWorkspace(wsId);
920
- console.log(`Switched to workspace: ${wsId}`);
921
- break;
966
+ if (sub === "switch") {
967
+ const target = args[2];
968
+ if (!target) {
969
+ console.log("Usage: workspace switch <name-or-id>");
970
+ process.exit(1);
971
+ }
972
+ const wsList = await listWorkspaces(creds.token, apiUrl, creds.orgId);
973
+ const lcTarget = target.toLowerCase();
974
+ const match = wsList.find((w) => w.id === target || w.name && w.name.toLowerCase() === lcTarget);
975
+ if (!match) {
976
+ console.log(`Workspace not found: ${target}`);
977
+ if (wsList.length > 0) {
978
+ console.log(`Available workspaces: ${wsList.map((w) => w.name || w.id).join(", ")}`);
979
+ }
980
+ process.exit(1);
981
+ }
982
+ await switchWorkspace(match.id);
983
+ console.log(`Switched to workspace: ${match.name || match.id}`);
984
+ break;
985
+ }
986
+ console.log("Usage: workspace list | workspace switch <name-or-id>");
987
+ process.exit(1);
922
988
  }
923
989
  case "invite": {
924
990
  if (!creds) {
@@ -96,6 +96,7 @@ function loadConfig() {
96
96
  apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
97
97
  tableName: process.env.HIVEMIND_TABLE ?? "memory",
98
98
  sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
99
+ skillsTableName: process.env.HIVEMIND_SKILLS_TABLE ?? "skills",
99
100
  memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join(home, ".deeplake", "memory")
100
101
  };
101
102
  }
@@ -123,6 +124,12 @@ function sqlStr(value) {
123
124
  function sqlLike(value) {
124
125
  return sqlStr(value).replace(/%/g, "\\%").replace(/_/g, "\\_");
125
126
  }
127
+ function sqlIdent(name) {
128
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
129
+ throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
130
+ }
131
+ return name;
132
+ }
126
133
 
127
134
  // dist/src/embeddings/columns.js
128
135
  var SUMMARY_EMBEDDING_COL = "summary_embedding";
@@ -495,7 +502,7 @@ var DeeplakeApi = class {
495
502
  }
496
503
  /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
497
504
  async ensureTable(name) {
498
- const tbl = name ?? this.tableName;
505
+ const tbl = sqlIdent(name ?? this.tableName);
499
506
  const tables = await this.listTables();
500
507
  if (!tables.includes(tbl)) {
501
508
  log2(`table "${tbl}" not found, creating`);
@@ -509,17 +516,40 @@ var DeeplakeApi = class {
509
516
  }
510
517
  /** Create the sessions table (uses JSONB for message since every row is a JSON event). */
511
518
  async ensureSessionsTable(name) {
519
+ const safe = sqlIdent(name);
520
+ const tables = await this.listTables();
521
+ if (!tables.includes(safe)) {
522
+ log2(`table "${safe}" not found, creating`);
523
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, message_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
524
+ log2(`table "${safe}" created`);
525
+ if (!tables.includes(safe))
526
+ this._tablesCache = [...tables, safe];
527
+ }
528
+ await this.ensureEmbeddingColumn(safe, MESSAGE_EMBEDDING_COL);
529
+ await this.ensureColumn(safe, "agent", "TEXT NOT NULL DEFAULT ''");
530
+ await this.ensureLookupIndex(safe, "path_creation_date", `("path", "creation_date")`);
531
+ }
532
+ /**
533
+ * Create the skills table.
534
+ *
535
+ * One row per skill version. Workers INSERT a fresh row on every KEEP /
536
+ * MERGE rather than UPDATE-ing in place, so the full version history is
537
+ * recoverable. Uniqueness in the *current* state is by (project_key, name)
538
+ * — newer rows shadow older ones at read time (ORDER BY version DESC).
539
+ * This sidesteps the Deeplake UPDATE-coalescing quirk that bit the wiki
540
+ * worker.
541
+ */
542
+ async ensureSkillsTable(name) {
543
+ const safe = sqlIdent(name);
512
544
  const tables = await this.listTables();
513
- if (!tables.includes(name)) {
514
- log2(`table "${name}" not found, creating`);
515
- await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, message_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, name);
516
- log2(`table "${name}" created`);
517
- if (!tables.includes(name))
518
- this._tablesCache = [...tables, name];
545
+ if (!tables.includes(safe)) {
546
+ log2(`table "${safe}" not found, creating`);
547
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
548
+ log2(`table "${safe}" created`);
549
+ if (!tables.includes(safe))
550
+ this._tablesCache = [...tables, safe];
519
551
  }
520
- await this.ensureEmbeddingColumn(name, MESSAGE_EMBEDDING_COL);
521
- await this.ensureColumn(name, "agent", "TEXT NOT NULL DEFAULT ''");
522
- await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
552
+ await this.ensureLookupIndex(safe, "project_key_name", `("project_key", "name")`);
523
553
  }
524
554
  };
525
555