@deeplake/hivemind 0.7.16 → 0.7.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,13 +1,74 @@
1
1
  #!/usr/bin/env node
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropNames = Object.getOwnPropertyNames;
4
+ var __esm = (fn, res) => function __init() {
5
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
6
+ };
7
+ var __export = (target, all) => {
8
+ for (var name in all)
9
+ __defProp(target, name, { get: all[name], enumerable: true });
10
+ };
11
+
12
+ // dist/src/index-marker-store.js
13
+ var index_marker_store_exports = {};
14
+ __export(index_marker_store_exports, {
15
+ buildIndexMarkerPath: () => buildIndexMarkerPath,
16
+ getIndexMarkerDir: () => getIndexMarkerDir,
17
+ hasFreshIndexMarker: () => hasFreshIndexMarker,
18
+ writeIndexMarker: () => writeIndexMarker
19
+ });
20
+ import { existsSync as existsSync2, mkdirSync as mkdirSync2, readFileSync as readFileSync4, writeFileSync as writeFileSync2 } from "node:fs";
21
+ import { join as join5 } from "node:path";
22
+ import { tmpdir } from "node:os";
23
+ function getIndexMarkerDir() {
24
+ return process.env.HIVEMIND_INDEX_MARKER_DIR ?? join5(tmpdir(), "hivemind-deeplake-indexes");
25
+ }
26
+ function buildIndexMarkerPath(workspaceId, orgId, table, suffix) {
27
+ const markerKey = [workspaceId, orgId, table, suffix].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
28
+ return join5(getIndexMarkerDir(), `${markerKey}.json`);
29
+ }
30
+ function hasFreshIndexMarker(markerPath) {
31
+ if (!existsSync2(markerPath))
32
+ return false;
33
+ try {
34
+ const raw = JSON.parse(readFileSync4(markerPath, "utf-8"));
35
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
36
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
37
+ return false;
38
+ return true;
39
+ } catch {
40
+ return false;
41
+ }
42
+ }
43
+ function writeIndexMarker(markerPath) {
44
+ mkdirSync2(getIndexMarkerDir(), { recursive: true });
45
+ writeFileSync2(markerPath, JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
46
+ }
47
+ var INDEX_MARKER_TTL_MS;
48
+ var init_index_marker_store = __esm({
49
+ "dist/src/index-marker-store.js"() {
50
+ "use strict";
51
+ INDEX_MARKER_TTL_MS = Number(process.env.HIVEMIND_INDEX_MARKER_TTL_MS ?? 6 * 60 * 6e4);
52
+ }
53
+ });
2
54
 
3
55
  // dist/src/hooks/codex/session-start.js
4
56
  import { spawn } from "node:child_process";
5
57
  import { fileURLToPath } from "node:url";
6
- import { dirname as dirname2, join as join4 } from "node:path";
58
+ import { dirname as dirname4, join as join11 } from "node:path";
7
59
 
8
60
  // dist/src/commands/auth.js
9
61
  import { execSync } from "node:child_process";
10
62
 
63
+ // dist/src/utils/client-header.js
64
+ var DEEPLAKE_CLIENT_HEADER = "X-Deeplake-Client";
65
+ function deeplakeClientValue() {
66
+ return "hivemind";
67
+ }
68
+ function deeplakeClientHeader() {
69
+ return { [DEEPLAKE_CLIENT_HEADER]: deeplakeClientValue() };
70
+ }
71
+
11
72
  // dist/src/commands/auth-creds.js
12
73
  import { readFileSync, writeFileSync, mkdirSync, unlinkSync } from "node:fs";
13
74
  import { join } from "node:path";
@@ -98,9 +159,1075 @@ function getInstalledVersion(bundleDir, pluginManifestDir) {
98
159
  return null;
99
160
  }
100
161
 
162
+ // dist/src/config.js
163
+ import { readFileSync as readFileSync3, existsSync } from "node:fs";
164
+ import { join as join4 } from "node:path";
165
+ import { homedir as homedir3, userInfo } from "node:os";
166
+ function loadConfig() {
167
+ const home = homedir3();
168
+ const credPath = join4(home, ".deeplake", "credentials.json");
169
+ let creds = null;
170
+ if (existsSync(credPath)) {
171
+ try {
172
+ creds = JSON.parse(readFileSync3(credPath, "utf-8"));
173
+ } catch {
174
+ return null;
175
+ }
176
+ }
177
+ const token = process.env.HIVEMIND_TOKEN ?? creds?.token;
178
+ const orgId = process.env.HIVEMIND_ORG_ID ?? creds?.orgId;
179
+ if (!token || !orgId)
180
+ return null;
181
+ return {
182
+ token,
183
+ orgId,
184
+ orgName: creds?.orgName ?? orgId,
185
+ userName: creds?.userName || userInfo().username || "unknown",
186
+ workspaceId: process.env.HIVEMIND_WORKSPACE_ID ?? creds?.workspaceId ?? "default",
187
+ apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
188
+ tableName: process.env.HIVEMIND_TABLE ?? "memory",
189
+ sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
190
+ skillsTableName: process.env.HIVEMIND_SKILLS_TABLE ?? "skills",
191
+ memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join4(home, ".deeplake", "memory")
192
+ };
193
+ }
194
+
195
+ // dist/src/deeplake-api.js
196
+ import { randomUUID } from "node:crypto";
197
+
198
+ // dist/src/utils/sql.js
199
+ function sqlStr(value) {
200
+ return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
201
+ }
202
+ function sqlIdent(name) {
203
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
204
+ throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
205
+ }
206
+ return name;
207
+ }
208
+
209
+ // dist/src/embeddings/columns.js
210
+ var SUMMARY_EMBEDDING_COL = "summary_embedding";
211
+ var MESSAGE_EMBEDDING_COL = "message_embedding";
212
+
213
+ // dist/src/deeplake-api.js
214
+ var indexMarkerStorePromise = null;
215
+ function getIndexMarkerStore() {
216
+ if (!indexMarkerStorePromise)
217
+ indexMarkerStorePromise = Promise.resolve().then(() => (init_index_marker_store(), index_marker_store_exports));
218
+ return indexMarkerStorePromise;
219
+ }
220
+ var log2 = (msg) => log("sdk", msg);
221
+ function summarizeSql(sql, maxLen = 220) {
222
+ const compact = sql.replace(/\s+/g, " ").trim();
223
+ return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact;
224
+ }
225
+ function traceSql(msg) {
226
+ const traceEnabled = process.env.HIVEMIND_TRACE_SQL === "1" || process.env.HIVEMIND_DEBUG === "1";
227
+ if (!traceEnabled)
228
+ return;
229
+ process.stderr.write(`[deeplake-sql] ${msg}
230
+ `);
231
+ if (process.env.HIVEMIND_DEBUG === "1")
232
+ log2(msg);
233
+ }
234
+ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
235
+ var MAX_RETRIES = 3;
236
+ var BASE_DELAY_MS = 500;
237
+ var MAX_CONCURRENCY = 5;
238
+ var QUERY_TIMEOUT_MS = Number(process.env.HIVEMIND_QUERY_TIMEOUT_MS ?? 1e4);
239
+ function sleep(ms) {
240
+ return new Promise((resolve) => setTimeout(resolve, ms));
241
+ }
242
+ function isTimeoutError(error) {
243
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
244
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
245
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
246
+ }
247
+ function isDuplicateIndexError(error) {
248
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
249
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
250
+ }
251
+ function isSessionInsertQuery(sql) {
252
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
253
+ }
254
+ function isTransientHtml403(text) {
255
+ const body = text.toLowerCase();
256
+ return body.includes("<html") || body.includes("403 forbidden") || body.includes("cloudflare") || body.includes("nginx");
257
+ }
258
+ var Semaphore = class {
259
+ max;
260
+ waiting = [];
261
+ active = 0;
262
+ constructor(max) {
263
+ this.max = max;
264
+ }
265
+ async acquire() {
266
+ if (this.active < this.max) {
267
+ this.active++;
268
+ return;
269
+ }
270
+ await new Promise((resolve) => this.waiting.push(resolve));
271
+ }
272
+ release() {
273
+ this.active--;
274
+ const next = this.waiting.shift();
275
+ if (next) {
276
+ this.active++;
277
+ next();
278
+ }
279
+ }
280
+ };
281
+ var DeeplakeApi = class {
282
+ token;
283
+ apiUrl;
284
+ orgId;
285
+ workspaceId;
286
+ tableName;
287
+ _pendingRows = [];
288
+ _sem = new Semaphore(MAX_CONCURRENCY);
289
+ _tablesCache = null;
290
+ constructor(token, apiUrl, orgId, workspaceId, tableName) {
291
+ this.token = token;
292
+ this.apiUrl = apiUrl;
293
+ this.orgId = orgId;
294
+ this.workspaceId = workspaceId;
295
+ this.tableName = tableName;
296
+ }
297
+ /** Execute SQL with retry on transient errors and bounded concurrency. */
298
+ async query(sql) {
299
+ const startedAt = Date.now();
300
+ const summary = summarizeSql(sql);
301
+ traceSql(`query start: ${summary}`);
302
+ await this._sem.acquire();
303
+ try {
304
+ const rows = await this._queryWithRetry(sql);
305
+ traceSql(`query ok (${Date.now() - startedAt}ms, rows=${rows.length}): ${summary}`);
306
+ return rows;
307
+ } catch (e) {
308
+ const message = e instanceof Error ? e.message : String(e);
309
+ traceSql(`query fail (${Date.now() - startedAt}ms): ${summary} :: ${message}`);
310
+ throw e;
311
+ } finally {
312
+ this._sem.release();
313
+ }
314
+ }
315
+ async _queryWithRetry(sql) {
316
+ let lastError;
317
+ for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
318
+ let resp;
319
+ try {
320
+ const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
321
+ resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
322
+ method: "POST",
323
+ headers: {
324
+ Authorization: `Bearer ${this.token}`,
325
+ "Content-Type": "application/json",
326
+ "X-Activeloop-Org-Id": this.orgId,
327
+ ...deeplakeClientHeader()
328
+ },
329
+ signal,
330
+ body: JSON.stringify({ query: sql })
331
+ });
332
+ } catch (e) {
333
+ if (isTimeoutError(e)) {
334
+ lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
335
+ throw lastError;
336
+ }
337
+ lastError = e instanceof Error ? e : new Error(String(e));
338
+ if (attempt < MAX_RETRIES) {
339
+ const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
340
+ log2(`query retry ${attempt + 1}/${MAX_RETRIES} (fetch error: ${lastError.message}) in ${delay.toFixed(0)}ms`);
341
+ await sleep(delay);
342
+ continue;
343
+ }
344
+ throw lastError;
345
+ }
346
+ if (resp.ok) {
347
+ const raw = await resp.json();
348
+ if (!raw?.rows || !raw?.columns)
349
+ return [];
350
+ return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
351
+ }
352
+ const text = await resp.text().catch(() => "");
353
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
354
+ const alreadyExists = resp.status === 500 && isDuplicateIndexError(text);
355
+ if (!alreadyExists && attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
356
+ const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
357
+ log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
358
+ await sleep(delay);
359
+ continue;
360
+ }
361
+ throw new Error(`Query failed: ${resp.status}: ${text.slice(0, 200)}`);
362
+ }
363
+ throw lastError ?? new Error("Query failed: max retries exceeded");
364
+ }
365
+ // ── Writes ──────────────────────────────────────────────────────────────────
366
+ /** Queue rows for writing. Call commit() to flush. */
367
+ appendRows(rows) {
368
+ this._pendingRows.push(...rows);
369
+ }
370
+ /** Flush pending rows via SQL. */
371
+ async commit() {
372
+ if (this._pendingRows.length === 0)
373
+ return;
374
+ const rows = this._pendingRows;
375
+ this._pendingRows = [];
376
+ const CONCURRENCY = 10;
377
+ for (let i = 0; i < rows.length; i += CONCURRENCY) {
378
+ const chunk = rows.slice(i, i + CONCURRENCY);
379
+ await Promise.allSettled(chunk.map((r) => this.upsertRowSql(r)));
380
+ }
381
+ log2(`commit: ${rows.length} rows`);
382
+ }
383
+ async upsertRowSql(row) {
384
+ const ts = (/* @__PURE__ */ new Date()).toISOString();
385
+ const cd = row.creationDate ?? ts;
386
+ const lud = row.lastUpdateDate ?? ts;
387
+ const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`);
388
+ if (exists.length > 0) {
389
+ let setClauses = `summary = E'${sqlStr(row.contentText)}', ${SUMMARY_EMBEDDING_COL} = NULL, mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`;
390
+ if (row.project !== void 0)
391
+ setClauses += `, project = '${sqlStr(row.project)}'`;
392
+ if (row.description !== void 0)
393
+ setClauses += `, description = '${sqlStr(row.description)}'`;
394
+ await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`);
395
+ } else {
396
+ const id = randomUUID();
397
+ let cols = `id, path, filename, summary, ${SUMMARY_EMBEDDING_COL}, mime_type, size_bytes, creation_date, last_update_date`;
398
+ let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', NULL, '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`;
399
+ if (row.project !== void 0) {
400
+ cols += ", project";
401
+ vals += `, '${sqlStr(row.project)}'`;
402
+ }
403
+ if (row.description !== void 0) {
404
+ cols += ", description";
405
+ vals += `, '${sqlStr(row.description)}'`;
406
+ }
407
+ await this.query(`INSERT INTO "${this.tableName}" (${cols}) VALUES (${vals})`);
408
+ }
409
+ }
410
+ /** Update specific columns on a row by path. */
411
+ async updateColumns(path, columns) {
412
+ const setClauses = Object.entries(columns).map(([col, val]) => typeof val === "number" ? `${col} = ${val}` : `${col} = '${sqlStr(String(val))}'`).join(", ");
413
+ await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(path)}'`);
414
+ }
415
+ // ── Convenience ─────────────────────────────────────────────────────────────
416
+ /** Create a BM25 search index on a column. */
417
+ async createIndex(column) {
418
+ await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
419
+ }
420
+ buildLookupIndexName(table, suffix) {
421
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
422
+ }
423
+ async ensureLookupIndex(table, suffix, columnsSql) {
424
+ const markers = await getIndexMarkerStore();
425
+ const markerPath = markers.buildIndexMarkerPath(this.workspaceId, this.orgId, table, suffix);
426
+ if (markers.hasFreshIndexMarker(markerPath))
427
+ return;
428
+ const indexName = this.buildLookupIndexName(table, suffix);
429
+ try {
430
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
431
+ markers.writeIndexMarker(markerPath);
432
+ } catch (e) {
433
+ if (isDuplicateIndexError(e)) {
434
+ markers.writeIndexMarker(markerPath);
435
+ return;
436
+ }
437
+ log2(`index "${indexName}" skipped: ${e.message}`);
438
+ }
439
+ }
440
+ /**
441
+ * Ensure a vector column exists on the given table.
442
+ *
443
+ * The previous implementation always issued `ALTER TABLE ADD COLUMN IF NOT
444
+ * EXISTS …` on every SessionStart. On a long-running workspace that's
445
+ * already migrated, every call returns 500 "Column already exists" — noisy
446
+ * in the log and a wasted round-trip. Worse, the very first call after the
447
+ * column is genuinely added triggers Deeplake's post-ALTER `vector::at`
448
+ * window (~30s) during which subsequent INSERTs fail; minimising the
449
+ * number of ALTER calls minimises exposure to that window.
450
+ *
451
+ * New flow:
452
+ * 1. Check the local marker file (mirrors ensureLookupIndex). If fresh,
453
+ * return — zero network calls.
454
+ * 2. SELECT 1 FROM information_schema.columns WHERE table_name = T AND
455
+ * column_name = C. Read-only, idempotent, can't tickle the post-ALTER
456
+ * bug. If the column is present → mark + return.
457
+ * 3. Only if step 2 says the column is missing, fall back to ALTER ADD
458
+ * COLUMN IF NOT EXISTS. Mark on success, also mark if Deeplake reports
459
+ * "already exists" (race: another client added it between our SELECT
460
+ * and ALTER).
461
+ *
462
+ * Marker uses the same dir / TTL as ensureLookupIndex so both schema
463
+ * caches share an opt-out (HIVEMIND_INDEX_MARKER_DIR) and a TTL knob.
464
+ */
465
+ async ensureEmbeddingColumn(table, column) {
466
+ await this.ensureColumn(table, column, "FLOAT4[]");
467
+ }
468
+ /**
469
+ * Generic marker-gated column migration. Same SELECT-then-ALTER flow as
470
+ * ensureEmbeddingColumn, parameterized by SQL type so it can patch up any
471
+ * column that was added to the schema after the table was originally
472
+ * created. Used today for `summary_embedding`, `message_embedding`, and
473
+ * the `agent` column (added 2026-04-11) — the latter has no fallback if
474
+ * a user upgraded over a pre-2026-04-11 table, so every INSERT fails
475
+ * with `column "agent" does not exist`.
476
+ */
477
+ async ensureColumn(table, column, sqlType) {
478
+ const markers = await getIndexMarkerStore();
479
+ const markerPath = markers.buildIndexMarkerPath(this.workspaceId, this.orgId, table, `col_${column}`);
480
+ if (markers.hasFreshIndexMarker(markerPath))
481
+ return;
482
+ const colCheck = `SELECT 1 FROM information_schema.columns WHERE table_name = '${sqlStr(table)}' AND column_name = '${sqlStr(column)}' AND table_schema = '${sqlStr(this.workspaceId)}' LIMIT 1`;
483
+ const rows = await this.query(colCheck);
484
+ if (rows.length > 0) {
485
+ markers.writeIndexMarker(markerPath);
486
+ return;
487
+ }
488
+ try {
489
+ await this.query(`ALTER TABLE "${table}" ADD COLUMN ${column} ${sqlType}`);
490
+ } catch (e) {
491
+ const msg = e instanceof Error ? e.message : String(e);
492
+ if (!/already exists/i.test(msg))
493
+ throw e;
494
+ const recheck = await this.query(colCheck);
495
+ if (recheck.length === 0)
496
+ throw e;
497
+ }
498
+ markers.writeIndexMarker(markerPath);
499
+ }
500
+ /** List all tables in the workspace (with retry). */
501
+ async listTables(forceRefresh = false) {
502
+ if (!forceRefresh && this._tablesCache)
503
+ return [...this._tablesCache];
504
+ const { tables, cacheable } = await this._fetchTables();
505
+ if (cacheable)
506
+ this._tablesCache = [...tables];
507
+ return tables;
508
+ }
509
+ async _fetchTables() {
510
+ for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
511
+ try {
512
+ const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
513
+ headers: {
514
+ Authorization: `Bearer ${this.token}`,
515
+ "X-Activeloop-Org-Id": this.orgId,
516
+ ...deeplakeClientHeader()
517
+ }
518
+ });
519
+ if (resp.ok) {
520
+ const data = await resp.json();
521
+ return {
522
+ tables: (data.tables ?? []).map((t) => t.table_name),
523
+ cacheable: true
524
+ };
525
+ }
526
+ if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
527
+ await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
528
+ continue;
529
+ }
530
+ return { tables: [], cacheable: false };
531
+ } catch {
532
+ if (attempt < MAX_RETRIES) {
533
+ await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
534
+ continue;
535
+ }
536
+ return { tables: [], cacheable: false };
537
+ }
538
+ }
539
+ return { tables: [], cacheable: false };
540
+ }
541
+ /**
542
+ * Run a `CREATE TABLE` with an extra outer retry budget. The base
543
+ * `query()` already retries 3 times on fetch errors (~3.5s total), but a
544
+ * failed CREATE is permanent corruption — every subsequent SELECT against
545
+ * the missing table fails. Wrapping in an outer loop with longer backoff
546
+ * (2s, 5s, then 10s) gives us ~17s of reach across transient network
547
+ * blips before giving up. Failures still propagate; getApi() resets its
548
+ * cache on init failure (openclaw plugin) so the next call retries the
549
+ * whole init flow.
550
+ */
551
+ async createTableWithRetry(sql, label) {
552
+ const OUTER_BACKOFFS_MS = [2e3, 5e3, 1e4];
553
+ let lastErr = null;
554
+ for (let attempt = 0; attempt <= OUTER_BACKOFFS_MS.length; attempt++) {
555
+ try {
556
+ await this.query(sql);
557
+ return;
558
+ } catch (err) {
559
+ lastErr = err;
560
+ const msg = err instanceof Error ? err.message : String(err);
561
+ log2(`CREATE TABLE "${label}" attempt ${attempt + 1}/${OUTER_BACKOFFS_MS.length + 1} failed: ${msg}`);
562
+ if (attempt < OUTER_BACKOFFS_MS.length) {
563
+ await sleep(OUTER_BACKOFFS_MS[attempt]);
564
+ }
565
+ }
566
+ }
567
+ throw lastErr;
568
+ }
569
+ /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
570
+ async ensureTable(name) {
571
+ const tbl = sqlIdent(name ?? this.tableName);
572
+ const tables = await this.listTables();
573
+ if (!tables.includes(tbl)) {
574
+ log2(`table "${tbl}" not found, creating`);
575
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', summary_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, tbl);
576
+ log2(`table "${tbl}" created`);
577
+ if (!tables.includes(tbl))
578
+ this._tablesCache = [...tables, tbl];
579
+ }
580
+ await this.ensureEmbeddingColumn(tbl, SUMMARY_EMBEDDING_COL);
581
+ await this.ensureColumn(tbl, "agent", "TEXT NOT NULL DEFAULT ''");
582
+ }
583
+ /** Create the sessions table (uses JSONB for message since every row is a JSON event). */
584
+ async ensureSessionsTable(name) {
585
+ const safe = sqlIdent(name);
586
+ const tables = await this.listTables();
587
+ if (!tables.includes(safe)) {
588
+ log2(`table "${safe}" not found, creating`);
589
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, message_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
590
+ log2(`table "${safe}" created`);
591
+ if (!tables.includes(safe))
592
+ this._tablesCache = [...tables, safe];
593
+ }
594
+ await this.ensureEmbeddingColumn(safe, MESSAGE_EMBEDDING_COL);
595
+ await this.ensureColumn(safe, "agent", "TEXT NOT NULL DEFAULT ''");
596
+ await this.ensureLookupIndex(safe, "path_creation_date", `("path", "creation_date")`);
597
+ }
598
+ /**
599
+ * Create the skills table.
600
+ *
601
+ * One row per skill version. Workers INSERT a fresh row on every KEEP /
602
+ * MERGE rather than UPDATE-ing in place, so the full version history is
603
+ * recoverable. Uniqueness in the *current* state is by (project_key, name)
604
+ * — newer rows shadow older ones at read time (ORDER BY version DESC).
605
+ * This sidesteps the Deeplake UPDATE-coalescing quirk that bit the wiki
606
+ * worker.
607
+ */
608
+ async ensureSkillsTable(name) {
609
+ const safe = sqlIdent(name);
610
+ const tables = await this.listTables();
611
+ if (!tables.includes(safe)) {
612
+ log2(`table "${safe}" not found, creating`);
613
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
614
+ log2(`table "${safe}" created`);
615
+ if (!tables.includes(safe))
616
+ this._tablesCache = [...tables, safe];
617
+ }
618
+ await this.ensureLookupIndex(safe, "project_key_name", `("project_key", "name")`);
619
+ }
620
+ };
621
+
622
+ // dist/src/skillify/pull.js
623
+ import { existsSync as existsSync7, readFileSync as readFileSync7, writeFileSync as writeFileSync5, mkdirSync as mkdirSync5, renameSync as renameSync3, lstatSync as lstatSync2, readlinkSync, symlinkSync, unlinkSync as unlinkSync3 } from "node:fs";
624
+ import { homedir as homedir8 } from "node:os";
625
+ import { dirname as dirname3, join as join10 } from "node:path";
626
+
627
+ // dist/src/skillify/skill-writer.js
628
+ import { existsSync as existsSync3, mkdirSync as mkdirSync3, readFileSync as readFileSync5, readdirSync, statSync, writeFileSync as writeFileSync3 } from "node:fs";
629
+ import { homedir as homedir4 } from "node:os";
630
+ import { join as join6 } from "node:path";
631
+ function assertValidSkillName(name) {
632
+ if (typeof name !== "string" || name.length === 0) {
633
+ throw new Error(`invalid skill name: empty or non-string`);
634
+ }
635
+ if (name.length > 100) {
636
+ throw new Error(`invalid skill name: too long (${name.length} chars)`);
637
+ }
638
+ if (name.includes("/") || name.includes("\\") || name.includes("..")) {
639
+ throw new Error(`invalid skill name: contains path separator or '..': ${name}`);
640
+ }
641
+ if (!/^[a-z0-9]+(?:-[a-z0-9]+)*$/.test(name)) {
642
+ throw new Error(`invalid skill name: must be kebab-case (lowercase a-z, 0-9, hyphen): ${name}`);
643
+ }
644
+ }
645
+ function parseFrontmatter(text) {
646
+ if (!text.startsWith("---\n") && !text.startsWith("---\r\n"))
647
+ return null;
648
+ const end = text.indexOf("\n---", 4);
649
+ if (end < 0)
650
+ return null;
651
+ const head = text.slice(4, end).trim();
652
+ const body = text.slice(end + 4).replace(/^\r?\n/, "");
653
+ const fm = { source_sessions: [] };
654
+ let mode = "kv";
655
+ for (const raw of head.split(/\r?\n/)) {
656
+ if (mode === "sources") {
657
+ const m2 = raw.match(/^\s+-\s+(.+)$/);
658
+ if (m2) {
659
+ fm.source_sessions.push(m2[1].trim());
660
+ continue;
661
+ }
662
+ mode = "kv";
663
+ }
664
+ if (raw.startsWith("source_sessions:")) {
665
+ mode = "sources";
666
+ continue;
667
+ }
668
+ const m = raw.match(/^([a-zA-Z_]+):\s*(.*)$/);
669
+ if (!m)
670
+ continue;
671
+ const [, k, v] = m;
672
+ let val = v;
673
+ if (v.startsWith('"') && v.endsWith('"')) {
674
+ try {
675
+ val = JSON.parse(v);
676
+ } catch {
677
+ }
678
+ } else if (k === "version") {
679
+ const n = parseInt(v, 10);
680
+ if (Number.isFinite(n))
681
+ val = n;
682
+ }
683
+ fm[k] = val;
684
+ }
685
+ return { fm, body };
686
+ }
687
+
688
+ // dist/src/skillify/manifest.js
689
+ import { existsSync as existsSync5, lstatSync, mkdirSync as mkdirSync4, readFileSync as readFileSync6, renameSync as renameSync2, unlinkSync as unlinkSync2, writeFileSync as writeFileSync4 } from "node:fs";
690
+ import { homedir as homedir6 } from "node:os";
691
+ import { dirname as dirname2, join as join8 } from "node:path";
692
+
693
+ // dist/src/skillify/legacy-migration.js
694
+ import { existsSync as existsSync4, renameSync } from "node:fs";
695
+ import { homedir as homedir5 } from "node:os";
696
+ import { join as join7 } from "node:path";
697
+ var dlog = (msg) => log("skillify-migrate", msg);
698
+ var attempted = false;
699
+ function migrateLegacyStateDir() {
700
+ if (attempted)
701
+ return;
702
+ attempted = true;
703
+ const root = join7(homedir5(), ".deeplake", "state");
704
+ const legacy = join7(root, "skilify");
705
+ const current = join7(root, "skillify");
706
+ if (!existsSync4(legacy))
707
+ return;
708
+ if (existsSync4(current))
709
+ return;
710
+ try {
711
+ renameSync(legacy, current);
712
+ dlog(`migrated ${legacy} -> ${current}`);
713
+ } catch (err) {
714
+ const code = err.code;
715
+ if (code === "EXDEV" || code === "EPERM") {
716
+ dlog(`migration failed (${code}); leaving legacy dir in place`);
717
+ return;
718
+ }
719
+ throw err;
720
+ }
721
+ }
722
+
723
+ // dist/src/skillify/manifest.js
724
+ function emptyManifest() {
725
+ return { version: 1, entries: [] };
726
+ }
727
+ function manifestPath() {
728
+ return join8(homedir6(), ".deeplake", "state", "skillify", "pulled.json");
729
+ }
730
+ function loadManifest(path = manifestPath()) {
731
+ migrateLegacyStateDir();
732
+ if (!existsSync5(path))
733
+ return emptyManifest();
734
+ let raw;
735
+ try {
736
+ raw = readFileSync6(path, "utf-8");
737
+ } catch {
738
+ return emptyManifest();
739
+ }
740
+ try {
741
+ const parsed = JSON.parse(raw);
742
+ if (!parsed || typeof parsed !== "object")
743
+ return emptyManifest();
744
+ if (parsed.version !== 1 || !Array.isArray(parsed.entries))
745
+ return emptyManifest();
746
+ const entries = [];
747
+ for (const e of parsed.entries) {
748
+ if (!e || typeof e !== "object")
749
+ continue;
750
+ if (typeof e.dirName !== "string" || !e.dirName)
751
+ continue;
752
+ if (e.dirName.includes("/") || e.dirName.includes("\\") || e.dirName.includes(".."))
753
+ continue;
754
+ if (typeof e.name !== "string" || !e.name)
755
+ continue;
756
+ if (typeof e.author !== "string")
757
+ continue;
758
+ if (typeof e.installRoot !== "string" || !e.installRoot)
759
+ continue;
760
+ if (e.install !== "global" && e.install !== "project")
761
+ continue;
762
+ const symlinks = Array.isArray(e.symlinks) ? e.symlinks.filter((p) => typeof p === "string" && p.length > 0 && (p.startsWith("/") || /^[A-Za-z]:[\\/]/.test(p)) && // absolute (POSIX or Windows)
763
+ !p.includes("..")) : [];
764
+ entries.push({
765
+ dirName: e.dirName,
766
+ name: e.name,
767
+ author: e.author,
768
+ projectKey: typeof e.projectKey === "string" ? e.projectKey : "",
769
+ remoteVersion: typeof e.remoteVersion === "number" ? e.remoteVersion : 1,
770
+ install: e.install,
771
+ installRoot: e.installRoot,
772
+ pulledAt: typeof e.pulledAt === "string" ? e.pulledAt : (/* @__PURE__ */ new Date()).toISOString(),
773
+ symlinks
774
+ });
775
+ }
776
+ return { version: 1, entries };
777
+ } catch {
778
+ return emptyManifest();
779
+ }
780
+ }
781
+ function saveManifest(m, path = manifestPath()) {
782
+ migrateLegacyStateDir();
783
+ mkdirSync4(dirname2(path), { recursive: true });
784
+ const tmp = `${path}.tmp`;
785
+ writeFileSync4(tmp, JSON.stringify(m, null, 2) + "\n", { mode: 384 });
786
+ renameSync2(tmp, path);
787
+ }
788
+ function recordPull(entry, path = manifestPath()) {
789
+ const m = loadManifest(path);
790
+ const idx = m.entries.findIndex((e) => e.install === entry.install && e.installRoot === entry.installRoot && e.dirName === entry.dirName);
791
+ if (idx >= 0)
792
+ m.entries[idx] = entry;
793
+ else
794
+ m.entries.push(entry);
795
+ saveManifest(m, path);
796
+ }
797
+ function entriesForRoot(m, install, installRoot) {
798
+ return m.entries.filter((e) => e.install === install && e.installRoot === installRoot);
799
+ }
800
+ function unlinkSymlinks(paths) {
801
+ for (const path of paths) {
802
+ let st;
803
+ try {
804
+ st = lstatSync(path);
805
+ } catch {
806
+ continue;
807
+ }
808
+ if (!st.isSymbolicLink())
809
+ continue;
810
+ try {
811
+ unlinkSync2(path);
812
+ } catch {
813
+ }
814
+ }
815
+ }
816
+ function pruneOrphanedEntries(path = manifestPath()) {
817
+ const m = loadManifest(path);
818
+ const live = [];
819
+ let pruned = 0;
820
+ for (const e of m.entries) {
821
+ if (existsSync5(join8(e.installRoot, e.dirName))) {
822
+ live.push(e);
823
+ continue;
824
+ }
825
+ unlinkSymlinks(e.symlinks);
826
+ pruned++;
827
+ }
828
+ if (pruned > 0)
829
+ saveManifest({ version: 1, entries: live }, path);
830
+ return pruned;
831
+ }
832
+
833
+ // dist/src/skillify/agent-roots.js
834
+ import { existsSync as existsSync6 } from "node:fs";
835
+ import { homedir as homedir7 } from "node:os";
836
+ import { join as join9 } from "node:path";
837
+ function resolveDetected(home) {
838
+ const out = [];
839
+ const codexInstalled = existsSync6(join9(home, ".codex"));
840
+ const piInstalled = existsSync6(join9(home, ".pi", "agent"));
841
+ const hermesInstalled = existsSync6(join9(home, ".hermes"));
842
+ if (codexInstalled || piInstalled) {
843
+ out.push(join9(home, ".agents", "skills"));
844
+ }
845
+ if (hermesInstalled) {
846
+ out.push(join9(home, ".hermes", "skills"));
847
+ }
848
+ if (piInstalled) {
849
+ out.push(join9(home, ".pi", "agent", "skills"));
850
+ }
851
+ return out;
852
+ }
853
+ function detectAgentSkillsRoots(canonicalRoot, home = homedir7()) {
854
+ return resolveDetected(home).filter((p) => p !== canonicalRoot);
855
+ }
856
+
857
+ // dist/src/skillify/pull.js
858
+ function assertValidAuthor(author) {
859
+ if (!author)
860
+ throw new Error("author is empty");
861
+ if (author.length > 64)
862
+ throw new Error(`author too long (${author.length}): ${author.slice(0, 32)}\u2026`);
863
+ if (!/^[A-Za-z0-9_.\-@]+$/.test(author)) {
864
+ throw new Error(`author contains invalid characters: ${author}`);
865
+ }
866
+ }
867
+ function esc(s) {
868
+ return s.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
869
+ }
870
+ function buildPullSql(args) {
871
+ const where = [];
872
+ if (args.users.length > 0) {
873
+ const list = args.users.map((u) => `'${esc(u)}'`).join(", ");
874
+ where.push(`author IN (${list})`);
875
+ }
876
+ if (args.skillName) {
877
+ where.push(`name = '${esc(args.skillName)}'`);
878
+ }
879
+ const whereClause = where.length > 0 ? ` WHERE ${where.join(" AND ")}` : "";
880
+ return `SELECT name, project, project_key, body, version, source_agent, scope, author, description, trigger_text, source_sessions, install, created_at, updated_at FROM "${args.tableName}"${whereClause} ORDER BY project_key ASC, name ASC, version DESC`;
881
+ }
882
+ function isMissingTableError(message) {
883
+ if (!message)
884
+ return false;
885
+ return /Table does not exist|relation .* does not exist|no such table/i.test(message);
886
+ }
887
+ function resolvePullDestination(install, cwd) {
888
+ if (install === "global")
889
+ return join10(homedir8(), ".claude", "skills");
890
+ if (!cwd)
891
+ throw new Error("install=project requires a cwd");
892
+ return join10(cwd, ".claude", "skills");
893
+ }
894
+ function fanOutSymlinks(canonicalDir, dirName, agentRoots) {
895
+ const out = [];
896
+ for (const root of agentRoots) {
897
+ const link = join10(root, dirName);
898
+ let existing;
899
+ try {
900
+ existing = lstatSync2(link);
901
+ } catch {
902
+ existing = null;
903
+ }
904
+ if (existing) {
905
+ if (!existing.isSymbolicLink()) {
906
+ continue;
907
+ }
908
+ let current;
909
+ try {
910
+ current = readlinkSync(link);
911
+ } catch {
912
+ current = null;
913
+ }
914
+ if (current === canonicalDir) {
915
+ out.push(link);
916
+ continue;
917
+ }
918
+ try {
919
+ unlinkSync3(link);
920
+ } catch {
921
+ continue;
922
+ }
923
+ }
924
+ try {
925
+ mkdirSync5(dirname3(link), { recursive: true });
926
+ symlinkSync(canonicalDir, link, "dir");
927
+ out.push(link);
928
+ } catch {
929
+ }
930
+ }
931
+ return out;
932
+ }
933
+ function backfillSymlinks(installRoot) {
934
+ const manifest = loadManifest();
935
+ const entries = entriesForRoot(manifest, "global", installRoot);
936
+ if (entries.length === 0)
937
+ return;
938
+ const detected = detectAgentSkillsRoots(installRoot);
939
+ for (const entry of entries) {
940
+ const canonical = join10(entry.installRoot, entry.dirName);
941
+ if (!existsSync7(canonical))
942
+ continue;
943
+ const fresh = fanOutSymlinks(canonical, entry.dirName, detected);
944
+ if (sameSorted(fresh, entry.symlinks))
945
+ continue;
946
+ try {
947
+ recordPull({ ...entry, symlinks: fresh });
948
+ } catch {
949
+ }
950
+ }
951
+ }
952
+ function sameSorted(a, b) {
953
+ if (a.length !== b.length)
954
+ return false;
955
+ const sa = [...a].sort();
956
+ const sb = [...b].sort();
957
+ for (let i = 0; i < sa.length; i++)
958
+ if (sa[i] !== sb[i])
959
+ return false;
960
+ return true;
961
+ }
962
+ function selectLatestPerName(rows) {
963
+ const seen = /* @__PURE__ */ new Set();
964
+ const out = [];
965
+ for (const r of rows) {
966
+ const name = String(r.name ?? "");
967
+ const projectKey = String(r.project_key ?? "");
968
+ if (!name)
969
+ continue;
970
+ const key = `${projectKey}\0${name}`;
971
+ if (seen.has(key))
972
+ continue;
973
+ seen.add(key);
974
+ out.push(r);
975
+ }
976
+ return out;
977
+ }
978
+ function renderSkillFile(row) {
979
+ const sources = parseSourceSessions(row.source_sessions);
980
+ const fm = {
981
+ name: String(row.name ?? ""),
982
+ description: String(row.description ?? ""),
983
+ trigger: typeof row.trigger_text === "string" && row.trigger_text.length > 0 ? String(row.trigger_text) : void 0,
984
+ source_sessions: sources,
985
+ version: Number(row.version ?? 1),
986
+ created_by_agent: String(row.source_agent ?? "unknown"),
987
+ created_at: String(row.created_at ?? (/* @__PURE__ */ new Date()).toISOString()),
988
+ updated_at: String(row.updated_at ?? (/* @__PURE__ */ new Date()).toISOString())
989
+ };
990
+ const body = String(row.body ?? "").trim();
991
+ return `${renderFrontmatter(fm)}
992
+
993
+ ${body}
994
+ `;
995
+ }
996
+ function parseSourceSessions(v) {
997
+ if (Array.isArray(v))
998
+ return v.map(String);
999
+ if (typeof v === "string") {
1000
+ try {
1001
+ const parsed = JSON.parse(v);
1002
+ if (Array.isArray(parsed))
1003
+ return parsed.map(String);
1004
+ } catch {
1005
+ }
1006
+ }
1007
+ return [];
1008
+ }
1009
+ function renderFrontmatter(fm) {
1010
+ const lines = ["---"];
1011
+ lines.push(`name: ${fm.name}`);
1012
+ lines.push(`description: ${JSON.stringify(fm.description)}`);
1013
+ if (fm.trigger)
1014
+ lines.push(`trigger: ${JSON.stringify(fm.trigger)}`);
1015
+ lines.push(`source_sessions:`);
1016
+ for (const s of fm.source_sessions)
1017
+ lines.push(` - ${s}`);
1018
+ lines.push(`version: ${fm.version}`);
1019
+ lines.push(`created_by_agent: ${fm.created_by_agent}`);
1020
+ lines.push(`created_at: ${fm.created_at}`);
1021
+ lines.push(`updated_at: ${fm.updated_at}`);
1022
+ lines.push("---");
1023
+ return lines.join("\n");
1024
+ }
1025
+ function readLocalVersion(path) {
1026
+ if (!existsSync7(path))
1027
+ return null;
1028
+ try {
1029
+ const text = readFileSync7(path, "utf-8");
1030
+ const parsed = parseFrontmatter(text);
1031
+ if (!parsed)
1032
+ return null;
1033
+ const v = parsed.fm.version;
1034
+ return typeof v === "number" ? v : null;
1035
+ } catch {
1036
+ return null;
1037
+ }
1038
+ }
1039
+ function decideAction(args) {
1040
+ const shouldWrite = args.localVersion === null || args.remoteVersion > args.localVersion || args.force;
1041
+ if (!shouldWrite)
1042
+ return "skipped";
1043
+ return args.dryRun ? "dryrun" : "wrote";
1044
+ }
1045
+ async function runPull(opts) {
1046
+ if (!opts.dryRun)
1047
+ pruneOrphanedEntries();
1048
+ const sql = buildPullSql({
1049
+ tableName: opts.tableName,
1050
+ users: opts.users,
1051
+ skillName: opts.skillName
1052
+ });
1053
+ let rows = [];
1054
+ try {
1055
+ rows = await opts.query(sql);
1056
+ } catch (e) {
1057
+ if (isMissingTableError(e?.message))
1058
+ rows = [];
1059
+ else
1060
+ throw e;
1061
+ }
1062
+ const latest = selectLatestPerName(rows);
1063
+ const root = resolvePullDestination(opts.install, opts.cwd);
1064
+ const summary = { scanned: latest.length, wrote: 0, skipped: 0, dryrun: 0, entries: [] };
1065
+ for (const row of latest) {
1066
+ const name = String(row.name ?? "");
1067
+ if (!name)
1068
+ continue;
1069
+ try {
1070
+ assertValidSkillName(name);
1071
+ } catch (e) {
1072
+ summary.entries.push({
1073
+ name,
1074
+ remoteVersion: Number(row.version ?? 1),
1075
+ localVersion: null,
1076
+ action: "skipped",
1077
+ destination: "(invalid name \u2014 skipped)",
1078
+ author: String(row.author ?? ""),
1079
+ sourceAgent: String(row.source_agent ?? "")
1080
+ });
1081
+ summary.skipped++;
1082
+ continue;
1083
+ }
1084
+ const author = String(row.author ?? "");
1085
+ if (!author) {
1086
+ summary.entries.push({
1087
+ name,
1088
+ remoteVersion: Number(row.version ?? 1),
1089
+ localVersion: null,
1090
+ action: "skipped",
1091
+ destination: "(empty author \u2014 skipped)",
1092
+ author: "",
1093
+ sourceAgent: String(row.source_agent ?? "")
1094
+ });
1095
+ summary.skipped++;
1096
+ continue;
1097
+ }
1098
+ let dirName;
1099
+ try {
1100
+ assertValidAuthor(author);
1101
+ dirName = `${name}--${author}`;
1102
+ } catch (e) {
1103
+ summary.entries.push({
1104
+ name,
1105
+ remoteVersion: Number(row.version ?? 1),
1106
+ localVersion: null,
1107
+ action: "skipped",
1108
+ destination: `(invalid author '${author}' \u2014 skipped)`,
1109
+ author,
1110
+ sourceAgent: String(row.source_agent ?? "")
1111
+ });
1112
+ summary.skipped++;
1113
+ continue;
1114
+ }
1115
+ const skillDir = join10(root, dirName);
1116
+ const skillFile = join10(skillDir, "SKILL.md");
1117
+ const remoteVersion = Number(row.version ?? 1);
1118
+ const localVersion = readLocalVersion(skillFile);
1119
+ const action = decideAction({
1120
+ remoteVersion,
1121
+ localVersion,
1122
+ force: opts.force ?? false,
1123
+ dryRun: opts.dryRun ?? false
1124
+ });
1125
+ let manifestError;
1126
+ if (action === "wrote") {
1127
+ mkdirSync5(skillDir, { recursive: true });
1128
+ if (existsSync7(skillFile)) {
1129
+ try {
1130
+ renameSync3(skillFile, `${skillFile}.bak`);
1131
+ } catch {
1132
+ }
1133
+ }
1134
+ writeFileSync5(skillFile, renderSkillFile(row));
1135
+ const symlinks = opts.install === "global" ? fanOutSymlinks(skillDir, dirName, detectAgentSkillsRoots(root)) : [];
1136
+ try {
1137
+ recordPull({
1138
+ dirName,
1139
+ name,
1140
+ author,
1141
+ projectKey: String(row.project_key ?? ""),
1142
+ remoteVersion,
1143
+ install: opts.install,
1144
+ installRoot: root,
1145
+ pulledAt: (/* @__PURE__ */ new Date()).toISOString(),
1146
+ symlinks
1147
+ });
1148
+ } catch (e) {
1149
+ manifestError = e?.message ?? String(e);
1150
+ }
1151
+ }
1152
+ summary.entries.push({
1153
+ name,
1154
+ remoteVersion,
1155
+ localVersion,
1156
+ action,
1157
+ destination: skillFile,
1158
+ author: String(row.author ?? ""),
1159
+ sourceAgent: String(row.source_agent ?? ""),
1160
+ manifestError
1161
+ });
1162
+ if (action === "wrote")
1163
+ summary.wrote++;
1164
+ else if (action === "dryrun")
1165
+ summary.dryrun++;
1166
+ else
1167
+ summary.skipped++;
1168
+ }
1169
+ if (!opts.dryRun && opts.install === "global") {
1170
+ backfillSymlinks(root);
1171
+ }
1172
+ return summary;
1173
+ }
1174
+
1175
+ // dist/src/skillify/auto-pull.js
1176
+ var log3 = (msg) => log("skillify-autopull", msg);
1177
+ var DEFAULT_TIMEOUT_MS = 5e3;
1178
+ function withTimeout(p, ms) {
1179
+ let timer = null;
1180
+ const timeout = new Promise((_, reject) => {
1181
+ timer = setTimeout(() => reject(new Error(`autopull timeout after ${ms}ms`)), ms);
1182
+ if (typeof timer.unref === "function")
1183
+ timer.unref();
1184
+ });
1185
+ return Promise.race([p, timeout]).finally(() => {
1186
+ if (timer)
1187
+ clearTimeout(timer);
1188
+ });
1189
+ }
1190
+ async function autoPullSkills(deps = {}) {
1191
+ if (process.env.HIVEMIND_AUTOPULL_DISABLED === "1") {
1192
+ log3("disabled via HIVEMIND_AUTOPULL_DISABLED=1");
1193
+ return { pulled: 0, skipped: true, reason: "disabled" };
1194
+ }
1195
+ const loadFn = deps.loadConfigFn ?? loadConfig;
1196
+ const config = loadFn();
1197
+ if (!config) {
1198
+ log3("skipped: not logged in");
1199
+ return { pulled: 0, skipped: true, reason: "not-logged-in" };
1200
+ }
1201
+ let query;
1202
+ if (deps.queryFn) {
1203
+ query = deps.queryFn;
1204
+ } else {
1205
+ const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, config.skillsTableName);
1206
+ query = (sql) => api.query(sql);
1207
+ }
1208
+ const install = deps.install ?? "global";
1209
+ const timeoutMs = deps.timeoutMs ?? DEFAULT_TIMEOUT_MS;
1210
+ try {
1211
+ const summary = await withTimeout(runPull({
1212
+ query,
1213
+ tableName: config.skillsTableName,
1214
+ install,
1215
+ cwd: install === "project" ? deps.cwd ?? process.cwd() : void 0,
1216
+ users: [],
1217
+ dryRun: false,
1218
+ force: false
1219
+ }), timeoutMs);
1220
+ log3(`pulled scanned=${summary.scanned} wrote=${summary.wrote} skipped=${summary.skipped}`);
1221
+ return { pulled: summary.wrote, skipped: false };
1222
+ } catch (e) {
1223
+ log3(`pull failed (swallowed): ${e?.message ?? e}`);
1224
+ return { pulled: 0, skipped: true, reason: "error" };
1225
+ }
1226
+ }
1227
+
101
1228
  // dist/src/hooks/codex/session-start.js
102
- var log2 = (msg) => log("codex-session-start", msg);
103
- var __bundleDir = dirname2(fileURLToPath(import.meta.url));
1229
+ var log4 = (msg) => log("codex-session-start", msg);
1230
+ var __bundleDir = dirname4(fileURLToPath(import.meta.url));
104
1231
  var context = `DEEPLAKE MEMORY: Persistent memory at ~/.deeplake/memory/ shared across sessions, users, and agents.
105
1232
 
106
1233
  Deeplake memory has THREE tiers \u2014 pick the right one for the question:
@@ -130,35 +1257,35 @@ Organization management \u2014 each argument is SEPARATE (do NOT quote subcomman
130
1257
  - hivemind members \u2014 list members
131
1258
  - hivemind remove <user-id> \u2014 remove member
132
1259
 
133
- SKILLS (skilify) \u2014 mine + share reusable skills across the org:
134
- - hivemind skilify \u2014 show scope/team/install + per-project state
135
- - hivemind skilify pull \u2014 sync project skills from the org table
136
- - hivemind skilify pull --user <email> \u2014 only that author's skills
137
- - hivemind skilify pull --users a,b,c \u2014 multiple authors (CSV)
138
- - hivemind skilify pull --all-users \u2014 explicit "no author filter"
139
- - hivemind skilify pull --to project|global \u2014 install location
140
- - hivemind skilify pull --dry-run \u2014 preview only
141
- - hivemind skilify pull --force \u2014 overwrite local (creates .bak)
142
- - hivemind skilify pull <skill-name> \u2014 pull only that skill (combines with --user)
143
- - hivemind skilify unpull \u2014 remove every skill previously installed by pull
144
- - hivemind skilify unpull --user <email> \u2014 remove only that author's pulls
145
- - hivemind skilify unpull --not-mine \u2014 remove all pulls except your own
146
- - hivemind skilify unpull --dry-run \u2014 preview without touching disk
147
- - hivemind skilify scope <me|team|org> \u2014 sharing scope for new skills
148
- - hivemind skilify install <project|global> \u2014 default install location
149
- - hivemind skilify team add|remove|list <name> \u2014 manage team list`;
1260
+ SKILLS (skillify) \u2014 mine + share reusable skills across the org:
1261
+ - hivemind skillify \u2014 show scope/team/install + per-project state
1262
+ - hivemind skillify pull \u2014 sync project skills from the org table
1263
+ - hivemind skillify pull --user <email> \u2014 only that author's skills
1264
+ - hivemind skillify pull --users a,b,c \u2014 multiple authors (CSV)
1265
+ - hivemind skillify pull --all-users \u2014 explicit "no author filter"
1266
+ - hivemind skillify pull --to project|global \u2014 install location
1267
+ - hivemind skillify pull --dry-run \u2014 preview only
1268
+ - hivemind skillify pull --force \u2014 overwrite local (creates .bak)
1269
+ - hivemind skillify pull <skill-name> \u2014 pull only that skill (combines with --user)
1270
+ - hivemind skillify unpull \u2014 remove every skill previously installed by pull
1271
+ - hivemind skillify unpull --user <email> \u2014 remove only that author's pulls
1272
+ - hivemind skillify unpull --not-mine \u2014 remove all pulls except your own
1273
+ - hivemind skillify unpull --dry-run \u2014 preview without touching disk
1274
+ - hivemind skillify scope <me|team|org> \u2014 sharing scope for new skills
1275
+ - hivemind skillify install <project|global> \u2014 default install location
1276
+ - hivemind skillify team add|remove|list <name> \u2014 manage team list`;
150
1277
  async function main() {
151
1278
  if (process.env.HIVEMIND_WIKI_WORKER === "1")
152
1279
  return;
153
1280
  const input = await readStdin();
154
1281
  const creds = loadCredentials();
155
1282
  if (!creds?.token) {
156
- log2("no credentials found \u2014 run auth login to authenticate");
1283
+ log4("no credentials found \u2014 run auth login to authenticate");
157
1284
  } else {
158
- log2(`credentials loaded: org=${creds.orgName ?? creds.orgId}`);
1285
+ log4(`credentials loaded: org=${creds.orgName ?? creds.orgId}`);
159
1286
  }
160
1287
  if (creds?.token) {
161
- const setupScript = join4(__bundleDir, "session-start-setup.js");
1288
+ const setupScript = join11(__bundleDir, "session-start-setup.js");
162
1289
  const child = spawn("node", [setupScript], {
163
1290
  detached: true,
164
1291
  stdio: ["pipe", "ignore", "ignore"],
@@ -167,8 +1294,10 @@ async function main() {
167
1294
  child.stdin?.write(JSON.stringify(input));
168
1295
  child.stdin?.end();
169
1296
  child.unref();
170
- log2("spawned async setup process");
1297
+ log4("spawned async setup process");
171
1298
  }
1299
+ const pullResult = await autoPullSkills();
1300
+ log4(`autopull: pulled=${pullResult.pulled} skipped=${pullResult.skipped}`);
172
1301
  let versionNotice = "";
173
1302
  const current = getInstalledVersion(__bundleDir, ".codex-plugin");
174
1303
  if (current) {
@@ -181,6 +1310,6 @@ Not logged in to Deeplake. Run: hivemind login${versionNotice}`;
181
1310
  console.log(additionalContext);
182
1311
  }
183
1312
  main().catch((e) => {
184
- log2(`fatal: ${e.message}`);
1313
+ log4(`fatal: ${e.message}`);
185
1314
  process.exit(0);
186
1315
  });