@deeplake/hivemind 0.7.15 → 0.7.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,13 +1,74 @@
1
1
  #!/usr/bin/env node
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropNames = Object.getOwnPropertyNames;
4
+ var __esm = (fn, res) => function __init() {
5
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
6
+ };
7
+ var __export = (target, all) => {
8
+ for (var name in all)
9
+ __defProp(target, name, { get: all[name], enumerable: true });
10
+ };
11
+
12
+ // dist/src/index-marker-store.js
13
+ var index_marker_store_exports = {};
14
+ __export(index_marker_store_exports, {
15
+ buildIndexMarkerPath: () => buildIndexMarkerPath,
16
+ getIndexMarkerDir: () => getIndexMarkerDir,
17
+ hasFreshIndexMarker: () => hasFreshIndexMarker,
18
+ writeIndexMarker: () => writeIndexMarker
19
+ });
20
+ import { existsSync as existsSync2, mkdirSync as mkdirSync2, readFileSync as readFileSync4, writeFileSync as writeFileSync2 } from "node:fs";
21
+ import { join as join5 } from "node:path";
22
+ import { tmpdir } from "node:os";
23
+ function getIndexMarkerDir() {
24
+ return process.env.HIVEMIND_INDEX_MARKER_DIR ?? join5(tmpdir(), "hivemind-deeplake-indexes");
25
+ }
26
+ function buildIndexMarkerPath(workspaceId, orgId, table, suffix) {
27
+ const markerKey = [workspaceId, orgId, table, suffix].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
28
+ return join5(getIndexMarkerDir(), `${markerKey}.json`);
29
+ }
30
+ function hasFreshIndexMarker(markerPath) {
31
+ if (!existsSync2(markerPath))
32
+ return false;
33
+ try {
34
+ const raw = JSON.parse(readFileSync4(markerPath, "utf-8"));
35
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
36
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
37
+ return false;
38
+ return true;
39
+ } catch {
40
+ return false;
41
+ }
42
+ }
43
+ function writeIndexMarker(markerPath) {
44
+ mkdirSync2(getIndexMarkerDir(), { recursive: true });
45
+ writeFileSync2(markerPath, JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
46
+ }
47
+ var INDEX_MARKER_TTL_MS;
48
+ var init_index_marker_store = __esm({
49
+ "dist/src/index-marker-store.js"() {
50
+ "use strict";
51
+ INDEX_MARKER_TTL_MS = Number(process.env.HIVEMIND_INDEX_MARKER_TTL_MS ?? 6 * 60 * 6e4);
52
+ }
53
+ });
2
54
 
3
55
  // dist/src/hooks/codex/session-start.js
4
56
  import { spawn } from "node:child_process";
5
57
  import { fileURLToPath } from "node:url";
6
- import { dirname as dirname2, join as join4 } from "node:path";
58
+ import { dirname as dirname4, join as join10 } from "node:path";
7
59
 
8
60
  // dist/src/commands/auth.js
9
61
  import { execSync } from "node:child_process";
10
62
 
63
+ // dist/src/utils/client-header.js
64
+ var DEEPLAKE_CLIENT_HEADER = "X-Deeplake-Client";
65
+ function deeplakeClientValue() {
66
+ return "hivemind";
67
+ }
68
+ function deeplakeClientHeader() {
69
+ return { [DEEPLAKE_CLIENT_HEADER]: deeplakeClientValue() };
70
+ }
71
+
11
72
  // dist/src/commands/auth-creds.js
12
73
  import { readFileSync, writeFileSync, mkdirSync, unlinkSync } from "node:fs";
13
74
  import { join } from "node:path";
@@ -98,9 +159,1041 @@ function getInstalledVersion(bundleDir, pluginManifestDir) {
98
159
  return null;
99
160
  }
100
161
 
162
+ // dist/src/config.js
163
+ import { readFileSync as readFileSync3, existsSync } from "node:fs";
164
+ import { join as join4 } from "node:path";
165
+ import { homedir as homedir3, userInfo } from "node:os";
166
+ function loadConfig() {
167
+ const home = homedir3();
168
+ const credPath = join4(home, ".deeplake", "credentials.json");
169
+ let creds = null;
170
+ if (existsSync(credPath)) {
171
+ try {
172
+ creds = JSON.parse(readFileSync3(credPath, "utf-8"));
173
+ } catch {
174
+ return null;
175
+ }
176
+ }
177
+ const token = process.env.HIVEMIND_TOKEN ?? creds?.token;
178
+ const orgId = process.env.HIVEMIND_ORG_ID ?? creds?.orgId;
179
+ if (!token || !orgId)
180
+ return null;
181
+ return {
182
+ token,
183
+ orgId,
184
+ orgName: creds?.orgName ?? orgId,
185
+ userName: creds?.userName || userInfo().username || "unknown",
186
+ workspaceId: process.env.HIVEMIND_WORKSPACE_ID ?? creds?.workspaceId ?? "default",
187
+ apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
188
+ tableName: process.env.HIVEMIND_TABLE ?? "memory",
189
+ sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
190
+ skillsTableName: process.env.HIVEMIND_SKILLS_TABLE ?? "skills",
191
+ memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join4(home, ".deeplake", "memory")
192
+ };
193
+ }
194
+
195
+ // dist/src/deeplake-api.js
196
+ import { randomUUID } from "node:crypto";
197
+
198
+ // dist/src/utils/sql.js
199
+ function sqlStr(value) {
200
+ return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
201
+ }
202
+ function sqlIdent(name) {
203
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
204
+ throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
205
+ }
206
+ return name;
207
+ }
208
+
209
+ // dist/src/embeddings/columns.js
210
+ var SUMMARY_EMBEDDING_COL = "summary_embedding";
211
+ var MESSAGE_EMBEDDING_COL = "message_embedding";
212
+
213
+ // dist/src/deeplake-api.js
214
+ var indexMarkerStorePromise = null;
215
+ function getIndexMarkerStore() {
216
+ if (!indexMarkerStorePromise)
217
+ indexMarkerStorePromise = Promise.resolve().then(() => (init_index_marker_store(), index_marker_store_exports));
218
+ return indexMarkerStorePromise;
219
+ }
220
+ var log2 = (msg) => log("sdk", msg);
221
+ function summarizeSql(sql, maxLen = 220) {
222
+ const compact = sql.replace(/\s+/g, " ").trim();
223
+ return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact;
224
+ }
225
+ function traceSql(msg) {
226
+ const traceEnabled = process.env.HIVEMIND_TRACE_SQL === "1" || process.env.HIVEMIND_DEBUG === "1";
227
+ if (!traceEnabled)
228
+ return;
229
+ process.stderr.write(`[deeplake-sql] ${msg}
230
+ `);
231
+ if (process.env.HIVEMIND_DEBUG === "1")
232
+ log2(msg);
233
+ }
234
+ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
235
+ var MAX_RETRIES = 3;
236
+ var BASE_DELAY_MS = 500;
237
+ var MAX_CONCURRENCY = 5;
238
+ var QUERY_TIMEOUT_MS = Number(process.env.HIVEMIND_QUERY_TIMEOUT_MS ?? 1e4);
239
+ function sleep(ms) {
240
+ return new Promise((resolve) => setTimeout(resolve, ms));
241
+ }
242
+ function isTimeoutError(error) {
243
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
244
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
245
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
246
+ }
247
+ function isDuplicateIndexError(error) {
248
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
249
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
250
+ }
251
+ function isSessionInsertQuery(sql) {
252
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
253
+ }
254
+ function isTransientHtml403(text) {
255
+ const body = text.toLowerCase();
256
+ return body.includes("<html") || body.includes("403 forbidden") || body.includes("cloudflare") || body.includes("nginx");
257
+ }
258
+ var Semaphore = class {
259
+ max;
260
+ waiting = [];
261
+ active = 0;
262
+ constructor(max) {
263
+ this.max = max;
264
+ }
265
+ async acquire() {
266
+ if (this.active < this.max) {
267
+ this.active++;
268
+ return;
269
+ }
270
+ await new Promise((resolve) => this.waiting.push(resolve));
271
+ }
272
+ release() {
273
+ this.active--;
274
+ const next = this.waiting.shift();
275
+ if (next) {
276
+ this.active++;
277
+ next();
278
+ }
279
+ }
280
+ };
281
+ var DeeplakeApi = class {
282
+ token;
283
+ apiUrl;
284
+ orgId;
285
+ workspaceId;
286
+ tableName;
287
+ _pendingRows = [];
288
+ _sem = new Semaphore(MAX_CONCURRENCY);
289
+ _tablesCache = null;
290
+ constructor(token, apiUrl, orgId, workspaceId, tableName) {
291
+ this.token = token;
292
+ this.apiUrl = apiUrl;
293
+ this.orgId = orgId;
294
+ this.workspaceId = workspaceId;
295
+ this.tableName = tableName;
296
+ }
297
+ /** Execute SQL with retry on transient errors and bounded concurrency. */
298
+ async query(sql) {
299
+ const startedAt = Date.now();
300
+ const summary = summarizeSql(sql);
301
+ traceSql(`query start: ${summary}`);
302
+ await this._sem.acquire();
303
+ try {
304
+ const rows = await this._queryWithRetry(sql);
305
+ traceSql(`query ok (${Date.now() - startedAt}ms, rows=${rows.length}): ${summary}`);
306
+ return rows;
307
+ } catch (e) {
308
+ const message = e instanceof Error ? e.message : String(e);
309
+ traceSql(`query fail (${Date.now() - startedAt}ms): ${summary} :: ${message}`);
310
+ throw e;
311
+ } finally {
312
+ this._sem.release();
313
+ }
314
+ }
315
+ async _queryWithRetry(sql) {
316
+ let lastError;
317
+ for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
318
+ let resp;
319
+ try {
320
+ const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
321
+ resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
322
+ method: "POST",
323
+ headers: {
324
+ Authorization: `Bearer ${this.token}`,
325
+ "Content-Type": "application/json",
326
+ "X-Activeloop-Org-Id": this.orgId,
327
+ ...deeplakeClientHeader()
328
+ },
329
+ signal,
330
+ body: JSON.stringify({ query: sql })
331
+ });
332
+ } catch (e) {
333
+ if (isTimeoutError(e)) {
334
+ lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
335
+ throw lastError;
336
+ }
337
+ lastError = e instanceof Error ? e : new Error(String(e));
338
+ if (attempt < MAX_RETRIES) {
339
+ const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
340
+ log2(`query retry ${attempt + 1}/${MAX_RETRIES} (fetch error: ${lastError.message}) in ${delay.toFixed(0)}ms`);
341
+ await sleep(delay);
342
+ continue;
343
+ }
344
+ throw lastError;
345
+ }
346
+ if (resp.ok) {
347
+ const raw = await resp.json();
348
+ if (!raw?.rows || !raw?.columns)
349
+ return [];
350
+ return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
351
+ }
352
+ const text = await resp.text().catch(() => "");
353
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
354
+ const alreadyExists = resp.status === 500 && isDuplicateIndexError(text);
355
+ if (!alreadyExists && attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
356
+ const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
357
+ log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
358
+ await sleep(delay);
359
+ continue;
360
+ }
361
+ throw new Error(`Query failed: ${resp.status}: ${text.slice(0, 200)}`);
362
+ }
363
+ throw lastError ?? new Error("Query failed: max retries exceeded");
364
+ }
365
+ // ── Writes ──────────────────────────────────────────────────────────────────
366
+ /** Queue rows for writing. Call commit() to flush. */
367
+ appendRows(rows) {
368
+ this._pendingRows.push(...rows);
369
+ }
370
+ /** Flush pending rows via SQL. */
371
+ async commit() {
372
+ if (this._pendingRows.length === 0)
373
+ return;
374
+ const rows = this._pendingRows;
375
+ this._pendingRows = [];
376
+ const CONCURRENCY = 10;
377
+ for (let i = 0; i < rows.length; i += CONCURRENCY) {
378
+ const chunk = rows.slice(i, i + CONCURRENCY);
379
+ await Promise.allSettled(chunk.map((r) => this.upsertRowSql(r)));
380
+ }
381
+ log2(`commit: ${rows.length} rows`);
382
+ }
383
+ async upsertRowSql(row) {
384
+ const ts = (/* @__PURE__ */ new Date()).toISOString();
385
+ const cd = row.creationDate ?? ts;
386
+ const lud = row.lastUpdateDate ?? ts;
387
+ const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`);
388
+ if (exists.length > 0) {
389
+ let setClauses = `summary = E'${sqlStr(row.contentText)}', ${SUMMARY_EMBEDDING_COL} = NULL, mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`;
390
+ if (row.project !== void 0)
391
+ setClauses += `, project = '${sqlStr(row.project)}'`;
392
+ if (row.description !== void 0)
393
+ setClauses += `, description = '${sqlStr(row.description)}'`;
394
+ await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`);
395
+ } else {
396
+ const id = randomUUID();
397
+ let cols = `id, path, filename, summary, ${SUMMARY_EMBEDDING_COL}, mime_type, size_bytes, creation_date, last_update_date`;
398
+ let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', NULL, '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`;
399
+ if (row.project !== void 0) {
400
+ cols += ", project";
401
+ vals += `, '${sqlStr(row.project)}'`;
402
+ }
403
+ if (row.description !== void 0) {
404
+ cols += ", description";
405
+ vals += `, '${sqlStr(row.description)}'`;
406
+ }
407
+ await this.query(`INSERT INTO "${this.tableName}" (${cols}) VALUES (${vals})`);
408
+ }
409
+ }
410
+ /** Update specific columns on a row by path. */
411
+ async updateColumns(path, columns) {
412
+ const setClauses = Object.entries(columns).map(([col, val]) => typeof val === "number" ? `${col} = ${val}` : `${col} = '${sqlStr(String(val))}'`).join(", ");
413
+ await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(path)}'`);
414
+ }
415
+ // ── Convenience ─────────────────────────────────────────────────────────────
416
+ /** Create a BM25 search index on a column. */
417
+ async createIndex(column) {
418
+ await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
419
+ }
420
+ buildLookupIndexName(table, suffix) {
421
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
422
+ }
423
+ async ensureLookupIndex(table, suffix, columnsSql) {
424
+ const markers = await getIndexMarkerStore();
425
+ const markerPath = markers.buildIndexMarkerPath(this.workspaceId, this.orgId, table, suffix);
426
+ if (markers.hasFreshIndexMarker(markerPath))
427
+ return;
428
+ const indexName = this.buildLookupIndexName(table, suffix);
429
+ try {
430
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
431
+ markers.writeIndexMarker(markerPath);
432
+ } catch (e) {
433
+ if (isDuplicateIndexError(e)) {
434
+ markers.writeIndexMarker(markerPath);
435
+ return;
436
+ }
437
+ log2(`index "${indexName}" skipped: ${e.message}`);
438
+ }
439
+ }
440
+ /**
441
+ * Ensure a vector column exists on the given table.
442
+ *
443
+ * The previous implementation always issued `ALTER TABLE ADD COLUMN IF NOT
444
+ * EXISTS …` on every SessionStart. On a long-running workspace that's
445
+ * already migrated, every call returns 500 "Column already exists" — noisy
446
+ * in the log and a wasted round-trip. Worse, the very first call after the
447
+ * column is genuinely added triggers Deeplake's post-ALTER `vector::at`
448
+ * window (~30s) during which subsequent INSERTs fail; minimising the
449
+ * number of ALTER calls minimises exposure to that window.
450
+ *
451
+ * New flow:
452
+ * 1. Check the local marker file (mirrors ensureLookupIndex). If fresh,
453
+ * return — zero network calls.
454
+ * 2. SELECT 1 FROM information_schema.columns WHERE table_name = T AND
455
+ * column_name = C. Read-only, idempotent, can't tickle the post-ALTER
456
+ * bug. If the column is present → mark + return.
457
+ * 3. Only if step 2 says the column is missing, fall back to ALTER ADD
458
+ * COLUMN IF NOT EXISTS. Mark on success, also mark if Deeplake reports
459
+ * "already exists" (race: another client added it between our SELECT
460
+ * and ALTER).
461
+ *
462
+ * Marker uses the same dir / TTL as ensureLookupIndex so both schema
463
+ * caches share an opt-out (HIVEMIND_INDEX_MARKER_DIR) and a TTL knob.
464
+ */
465
+ async ensureEmbeddingColumn(table, column) {
466
+ await this.ensureColumn(table, column, "FLOAT4[]");
467
+ }
468
+ /**
469
+ * Generic marker-gated column migration. Same SELECT-then-ALTER flow as
470
+ * ensureEmbeddingColumn, parameterized by SQL type so it can patch up any
471
+ * column that was added to the schema after the table was originally
472
+ * created. Used today for `summary_embedding`, `message_embedding`, and
473
+ * the `agent` column (added 2026-04-11) — the latter has no fallback if
474
+ * a user upgraded over a pre-2026-04-11 table, so every INSERT fails
475
+ * with `column "agent" does not exist`.
476
+ */
477
+ async ensureColumn(table, column, sqlType) {
478
+ const markers = await getIndexMarkerStore();
479
+ const markerPath = markers.buildIndexMarkerPath(this.workspaceId, this.orgId, table, `col_${column}`);
480
+ if (markers.hasFreshIndexMarker(markerPath))
481
+ return;
482
+ const colCheck = `SELECT 1 FROM information_schema.columns WHERE table_name = '${sqlStr(table)}' AND column_name = '${sqlStr(column)}' AND table_schema = '${sqlStr(this.workspaceId)}' LIMIT 1`;
483
+ const rows = await this.query(colCheck);
484
+ if (rows.length > 0) {
485
+ markers.writeIndexMarker(markerPath);
486
+ return;
487
+ }
488
+ try {
489
+ await this.query(`ALTER TABLE "${table}" ADD COLUMN ${column} ${sqlType}`);
490
+ } catch (e) {
491
+ const msg = e instanceof Error ? e.message : String(e);
492
+ if (!/already exists/i.test(msg))
493
+ throw e;
494
+ const recheck = await this.query(colCheck);
495
+ if (recheck.length === 0)
496
+ throw e;
497
+ }
498
+ markers.writeIndexMarker(markerPath);
499
+ }
500
+ /** List all tables in the workspace (with retry). */
501
+ async listTables(forceRefresh = false) {
502
+ if (!forceRefresh && this._tablesCache)
503
+ return [...this._tablesCache];
504
+ const { tables, cacheable } = await this._fetchTables();
505
+ if (cacheable)
506
+ this._tablesCache = [...tables];
507
+ return tables;
508
+ }
509
+ async _fetchTables() {
510
+ for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
511
+ try {
512
+ const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
513
+ headers: {
514
+ Authorization: `Bearer ${this.token}`,
515
+ "X-Activeloop-Org-Id": this.orgId,
516
+ ...deeplakeClientHeader()
517
+ }
518
+ });
519
+ if (resp.ok) {
520
+ const data = await resp.json();
521
+ return {
522
+ tables: (data.tables ?? []).map((t) => t.table_name),
523
+ cacheable: true
524
+ };
525
+ }
526
+ if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
527
+ await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
528
+ continue;
529
+ }
530
+ return { tables: [], cacheable: false };
531
+ } catch {
532
+ if (attempt < MAX_RETRIES) {
533
+ await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
534
+ continue;
535
+ }
536
+ return { tables: [], cacheable: false };
537
+ }
538
+ }
539
+ return { tables: [], cacheable: false };
540
+ }
541
+ /**
542
+ * Run a `CREATE TABLE` with an extra outer retry budget. The base
543
+ * `query()` already retries 3 times on fetch errors (~3.5s total), but a
544
+ * failed CREATE is permanent corruption — every subsequent SELECT against
545
+ * the missing table fails. Wrapping in an outer loop with longer backoff
546
+ * (2s, 5s, then 10s) gives us ~17s of reach across transient network
547
+ * blips before giving up. Failures still propagate; getApi() resets its
548
+ * cache on init failure (openclaw plugin) so the next call retries the
549
+ * whole init flow.
550
+ */
551
+ async createTableWithRetry(sql, label) {
552
+ const OUTER_BACKOFFS_MS = [2e3, 5e3, 1e4];
553
+ let lastErr = null;
554
+ for (let attempt = 0; attempt <= OUTER_BACKOFFS_MS.length; attempt++) {
555
+ try {
556
+ await this.query(sql);
557
+ return;
558
+ } catch (err) {
559
+ lastErr = err;
560
+ const msg = err instanceof Error ? err.message : String(err);
561
+ log2(`CREATE TABLE "${label}" attempt ${attempt + 1}/${OUTER_BACKOFFS_MS.length + 1} failed: ${msg}`);
562
+ if (attempt < OUTER_BACKOFFS_MS.length) {
563
+ await sleep(OUTER_BACKOFFS_MS[attempt]);
564
+ }
565
+ }
566
+ }
567
+ throw lastErr;
568
+ }
569
+ /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
570
+ async ensureTable(name) {
571
+ const tbl = sqlIdent(name ?? this.tableName);
572
+ const tables = await this.listTables();
573
+ if (!tables.includes(tbl)) {
574
+ log2(`table "${tbl}" not found, creating`);
575
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', summary_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, tbl);
576
+ log2(`table "${tbl}" created`);
577
+ if (!tables.includes(tbl))
578
+ this._tablesCache = [...tables, tbl];
579
+ }
580
+ await this.ensureEmbeddingColumn(tbl, SUMMARY_EMBEDDING_COL);
581
+ await this.ensureColumn(tbl, "agent", "TEXT NOT NULL DEFAULT ''");
582
+ }
583
+ /** Create the sessions table (uses JSONB for message since every row is a JSON event). */
584
+ async ensureSessionsTable(name) {
585
+ const safe = sqlIdent(name);
586
+ const tables = await this.listTables();
587
+ if (!tables.includes(safe)) {
588
+ log2(`table "${safe}" not found, creating`);
589
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, message_embedding FLOAT4[], author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
590
+ log2(`table "${safe}" created`);
591
+ if (!tables.includes(safe))
592
+ this._tablesCache = [...tables, safe];
593
+ }
594
+ await this.ensureEmbeddingColumn(safe, MESSAGE_EMBEDDING_COL);
595
+ await this.ensureColumn(safe, "agent", "TEXT NOT NULL DEFAULT ''");
596
+ await this.ensureLookupIndex(safe, "path_creation_date", `("path", "creation_date")`);
597
+ }
598
+ /**
599
+ * Create the skills table.
600
+ *
601
+ * One row per skill version. Workers INSERT a fresh row on every KEEP /
602
+ * MERGE rather than UPDATE-ing in place, so the full version history is
603
+ * recoverable. Uniqueness in the *current* state is by (project_key, name)
604
+ * — newer rows shadow older ones at read time (ORDER BY version DESC).
605
+ * This sidesteps the Deeplake UPDATE-coalescing quirk that bit the wiki
606
+ * worker.
607
+ */
608
+ async ensureSkillsTable(name) {
609
+ const safe = sqlIdent(name);
610
+ const tables = await this.listTables();
611
+ if (!tables.includes(safe)) {
612
+ log2(`table "${safe}" not found, creating`);
613
+ await this.createTableWithRetry(`CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`, safe);
614
+ log2(`table "${safe}" created`);
615
+ if (!tables.includes(safe))
616
+ this._tablesCache = [...tables, safe];
617
+ }
618
+ await this.ensureLookupIndex(safe, "project_key_name", `("project_key", "name")`);
619
+ }
620
+ };
621
+
622
+ // dist/src/skilify/pull.js
623
+ import { existsSync as existsSync6, readFileSync as readFileSync7, writeFileSync as writeFileSync5, mkdirSync as mkdirSync5, renameSync as renameSync2, lstatSync as lstatSync2, readlinkSync, symlinkSync, unlinkSync as unlinkSync3 } from "node:fs";
624
+ import { homedir as homedir7 } from "node:os";
625
+ import { dirname as dirname3, join as join9 } from "node:path";
626
+
627
+ // dist/src/skilify/skill-writer.js
628
+ import { existsSync as existsSync3, mkdirSync as mkdirSync3, readFileSync as readFileSync5, readdirSync, statSync, writeFileSync as writeFileSync3 } from "node:fs";
629
+ import { homedir as homedir4 } from "node:os";
630
+ import { join as join6 } from "node:path";
631
+ function assertValidSkillName(name) {
632
+ if (typeof name !== "string" || name.length === 0) {
633
+ throw new Error(`invalid skill name: empty or non-string`);
634
+ }
635
+ if (name.length > 100) {
636
+ throw new Error(`invalid skill name: too long (${name.length} chars)`);
637
+ }
638
+ if (name.includes("/") || name.includes("\\") || name.includes("..")) {
639
+ throw new Error(`invalid skill name: contains path separator or '..': ${name}`);
640
+ }
641
+ if (!/^[a-z0-9]+(?:-[a-z0-9]+)*$/.test(name)) {
642
+ throw new Error(`invalid skill name: must be kebab-case (lowercase a-z, 0-9, hyphen): ${name}`);
643
+ }
644
+ }
645
+ function parseFrontmatter(text) {
646
+ if (!text.startsWith("---\n") && !text.startsWith("---\r\n"))
647
+ return null;
648
+ const end = text.indexOf("\n---", 4);
649
+ if (end < 0)
650
+ return null;
651
+ const head = text.slice(4, end).trim();
652
+ const body = text.slice(end + 4).replace(/^\r?\n/, "");
653
+ const fm = { source_sessions: [] };
654
+ let mode = "kv";
655
+ for (const raw of head.split(/\r?\n/)) {
656
+ if (mode === "sources") {
657
+ const m2 = raw.match(/^\s+-\s+(.+)$/);
658
+ if (m2) {
659
+ fm.source_sessions.push(m2[1].trim());
660
+ continue;
661
+ }
662
+ mode = "kv";
663
+ }
664
+ if (raw.startsWith("source_sessions:")) {
665
+ mode = "sources";
666
+ continue;
667
+ }
668
+ const m = raw.match(/^([a-zA-Z_]+):\s*(.*)$/);
669
+ if (!m)
670
+ continue;
671
+ const [, k, v] = m;
672
+ let val = v;
673
+ if (v.startsWith('"') && v.endsWith('"')) {
674
+ try {
675
+ val = JSON.parse(v);
676
+ } catch {
677
+ }
678
+ } else if (k === "version") {
679
+ const n = parseInt(v, 10);
680
+ if (Number.isFinite(n))
681
+ val = n;
682
+ }
683
+ fm[k] = val;
684
+ }
685
+ return { fm, body };
686
+ }
687
+
688
+ // dist/src/skilify/manifest.js
689
+ import { existsSync as existsSync4, lstatSync, mkdirSync as mkdirSync4, readFileSync as readFileSync6, renameSync, unlinkSync as unlinkSync2, writeFileSync as writeFileSync4 } from "node:fs";
690
+ import { homedir as homedir5 } from "node:os";
691
+ import { dirname as dirname2, join as join7 } from "node:path";
692
+ function emptyManifest() {
693
+ return { version: 1, entries: [] };
694
+ }
695
+ function manifestPath() {
696
+ return join7(homedir5(), ".deeplake", "state", "skilify", "pulled.json");
697
+ }
698
+ function loadManifest(path = manifestPath()) {
699
+ if (!existsSync4(path))
700
+ return emptyManifest();
701
+ let raw;
702
+ try {
703
+ raw = readFileSync6(path, "utf-8");
704
+ } catch {
705
+ return emptyManifest();
706
+ }
707
+ try {
708
+ const parsed = JSON.parse(raw);
709
+ if (!parsed || typeof parsed !== "object")
710
+ return emptyManifest();
711
+ if (parsed.version !== 1 || !Array.isArray(parsed.entries))
712
+ return emptyManifest();
713
+ const entries = [];
714
+ for (const e of parsed.entries) {
715
+ if (!e || typeof e !== "object")
716
+ continue;
717
+ if (typeof e.dirName !== "string" || !e.dirName)
718
+ continue;
719
+ if (e.dirName.includes("/") || e.dirName.includes("\\") || e.dirName.includes(".."))
720
+ continue;
721
+ if (typeof e.name !== "string" || !e.name)
722
+ continue;
723
+ if (typeof e.author !== "string")
724
+ continue;
725
+ if (typeof e.installRoot !== "string" || !e.installRoot)
726
+ continue;
727
+ if (e.install !== "global" && e.install !== "project")
728
+ continue;
729
+ const symlinks = Array.isArray(e.symlinks) ? e.symlinks.filter((p) => typeof p === "string" && p.length > 0 && (p.startsWith("/") || /^[A-Za-z]:[\\/]/.test(p)) && // absolute (POSIX or Windows)
730
+ !p.includes("..")) : [];
731
+ entries.push({
732
+ dirName: e.dirName,
733
+ name: e.name,
734
+ author: e.author,
735
+ projectKey: typeof e.projectKey === "string" ? e.projectKey : "",
736
+ remoteVersion: typeof e.remoteVersion === "number" ? e.remoteVersion : 1,
737
+ install: e.install,
738
+ installRoot: e.installRoot,
739
+ pulledAt: typeof e.pulledAt === "string" ? e.pulledAt : (/* @__PURE__ */ new Date()).toISOString(),
740
+ symlinks
741
+ });
742
+ }
743
+ return { version: 1, entries };
744
+ } catch {
745
+ return emptyManifest();
746
+ }
747
+ }
748
+ function saveManifest(m, path = manifestPath()) {
749
+ mkdirSync4(dirname2(path), { recursive: true });
750
+ const tmp = `${path}.tmp`;
751
+ writeFileSync4(tmp, JSON.stringify(m, null, 2) + "\n", { mode: 384 });
752
+ renameSync(tmp, path);
753
+ }
754
+ function recordPull(entry, path = manifestPath()) {
755
+ const m = loadManifest(path);
756
+ const idx = m.entries.findIndex((e) => e.install === entry.install && e.installRoot === entry.installRoot && e.dirName === entry.dirName);
757
+ if (idx >= 0)
758
+ m.entries[idx] = entry;
759
+ else
760
+ m.entries.push(entry);
761
+ saveManifest(m, path);
762
+ }
763
+ function entriesForRoot(m, install, installRoot) {
764
+ return m.entries.filter((e) => e.install === install && e.installRoot === installRoot);
765
+ }
766
+ function unlinkSymlinks(paths) {
767
+ for (const path of paths) {
768
+ let st;
769
+ try {
770
+ st = lstatSync(path);
771
+ } catch {
772
+ continue;
773
+ }
774
+ if (!st.isSymbolicLink())
775
+ continue;
776
+ try {
777
+ unlinkSync2(path);
778
+ } catch {
779
+ }
780
+ }
781
+ }
782
+ function pruneOrphanedEntries(path = manifestPath()) {
783
+ const m = loadManifest(path);
784
+ const live = [];
785
+ let pruned = 0;
786
+ for (const e of m.entries) {
787
+ if (existsSync4(join7(e.installRoot, e.dirName))) {
788
+ live.push(e);
789
+ continue;
790
+ }
791
+ unlinkSymlinks(e.symlinks);
792
+ pruned++;
793
+ }
794
+ if (pruned > 0)
795
+ saveManifest({ version: 1, entries: live }, path);
796
+ return pruned;
797
+ }
798
+
799
+ // dist/src/skilify/agent-roots.js
800
+ import { existsSync as existsSync5 } from "node:fs";
801
+ import { homedir as homedir6 } from "node:os";
802
+ import { join as join8 } from "node:path";
803
+ function resolveDetected(home) {
804
+ const out = [];
805
+ const codexInstalled = existsSync5(join8(home, ".codex"));
806
+ const piInstalled = existsSync5(join8(home, ".pi", "agent"));
807
+ const hermesInstalled = existsSync5(join8(home, ".hermes"));
808
+ if (codexInstalled || piInstalled) {
809
+ out.push(join8(home, ".agents", "skills"));
810
+ }
811
+ if (hermesInstalled) {
812
+ out.push(join8(home, ".hermes", "skills"));
813
+ }
814
+ if (piInstalled) {
815
+ out.push(join8(home, ".pi", "agent", "skills"));
816
+ }
817
+ return out;
818
+ }
819
+ function detectAgentSkillsRoots(canonicalRoot, home = homedir6()) {
820
+ return resolveDetected(home).filter((p) => p !== canonicalRoot);
821
+ }
822
+
823
+ // dist/src/skilify/pull.js
824
+ function assertValidAuthor(author) {
825
+ if (!author)
826
+ throw new Error("author is empty");
827
+ if (author.length > 64)
828
+ throw new Error(`author too long (${author.length}): ${author.slice(0, 32)}\u2026`);
829
+ if (!/^[A-Za-z0-9_.\-@]+$/.test(author)) {
830
+ throw new Error(`author contains invalid characters: ${author}`);
831
+ }
832
+ }
833
+ function esc(s) {
834
+ return s.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
835
+ }
836
+ function buildPullSql(args) {
837
+ const where = [];
838
+ if (args.users.length > 0) {
839
+ const list = args.users.map((u) => `'${esc(u)}'`).join(", ");
840
+ where.push(`author IN (${list})`);
841
+ }
842
+ if (args.skillName) {
843
+ where.push(`name = '${esc(args.skillName)}'`);
844
+ }
845
+ const whereClause = where.length > 0 ? ` WHERE ${where.join(" AND ")}` : "";
846
+ return `SELECT name, project, project_key, body, version, source_agent, scope, author, description, trigger_text, source_sessions, install, created_at, updated_at FROM "${args.tableName}"${whereClause} ORDER BY project_key ASC, name ASC, version DESC`;
847
+ }
848
+ function isMissingTableError(message) {
849
+ if (!message)
850
+ return false;
851
+ return /Table does not exist|relation .* does not exist|no such table/i.test(message);
852
+ }
853
+ function resolvePullDestination(install, cwd) {
854
+ if (install === "global")
855
+ return join9(homedir7(), ".claude", "skills");
856
+ if (!cwd)
857
+ throw new Error("install=project requires a cwd");
858
+ return join9(cwd, ".claude", "skills");
859
+ }
860
+ function fanOutSymlinks(canonicalDir, dirName, agentRoots) {
861
+ const out = [];
862
+ for (const root of agentRoots) {
863
+ const link = join9(root, dirName);
864
+ let existing;
865
+ try {
866
+ existing = lstatSync2(link);
867
+ } catch {
868
+ existing = null;
869
+ }
870
+ if (existing) {
871
+ if (!existing.isSymbolicLink()) {
872
+ continue;
873
+ }
874
+ let current;
875
+ try {
876
+ current = readlinkSync(link);
877
+ } catch {
878
+ current = null;
879
+ }
880
+ if (current === canonicalDir) {
881
+ out.push(link);
882
+ continue;
883
+ }
884
+ try {
885
+ unlinkSync3(link);
886
+ } catch {
887
+ continue;
888
+ }
889
+ }
890
+ try {
891
+ mkdirSync5(dirname3(link), { recursive: true });
892
+ symlinkSync(canonicalDir, link, "dir");
893
+ out.push(link);
894
+ } catch {
895
+ }
896
+ }
897
+ return out;
898
+ }
899
+ function backfillSymlinks(installRoot) {
900
+ const manifest = loadManifest();
901
+ const entries = entriesForRoot(manifest, "global", installRoot);
902
+ if (entries.length === 0)
903
+ return;
904
+ const detected = detectAgentSkillsRoots(installRoot);
905
+ for (const entry of entries) {
906
+ const canonical = join9(entry.installRoot, entry.dirName);
907
+ if (!existsSync6(canonical))
908
+ continue;
909
+ const fresh = fanOutSymlinks(canonical, entry.dirName, detected);
910
+ if (sameSorted(fresh, entry.symlinks))
911
+ continue;
912
+ try {
913
+ recordPull({ ...entry, symlinks: fresh });
914
+ } catch {
915
+ }
916
+ }
917
+ }
918
+ function sameSorted(a, b) {
919
+ if (a.length !== b.length)
920
+ return false;
921
+ const sa = [...a].sort();
922
+ const sb = [...b].sort();
923
+ for (let i = 0; i < sa.length; i++)
924
+ if (sa[i] !== sb[i])
925
+ return false;
926
+ return true;
927
+ }
928
+ function selectLatestPerName(rows) {
929
+ const seen = /* @__PURE__ */ new Set();
930
+ const out = [];
931
+ for (const r of rows) {
932
+ const name = String(r.name ?? "");
933
+ const projectKey = String(r.project_key ?? "");
934
+ if (!name)
935
+ continue;
936
+ const key = `${projectKey}\0${name}`;
937
+ if (seen.has(key))
938
+ continue;
939
+ seen.add(key);
940
+ out.push(r);
941
+ }
942
+ return out;
943
+ }
944
+ function renderSkillFile(row) {
945
+ const sources = parseSourceSessions(row.source_sessions);
946
+ const fm = {
947
+ name: String(row.name ?? ""),
948
+ description: String(row.description ?? ""),
949
+ trigger: typeof row.trigger_text === "string" && row.trigger_text.length > 0 ? String(row.trigger_text) : void 0,
950
+ source_sessions: sources,
951
+ version: Number(row.version ?? 1),
952
+ created_by_agent: String(row.source_agent ?? "unknown"),
953
+ created_at: String(row.created_at ?? (/* @__PURE__ */ new Date()).toISOString()),
954
+ updated_at: String(row.updated_at ?? (/* @__PURE__ */ new Date()).toISOString())
955
+ };
956
+ const body = String(row.body ?? "").trim();
957
+ return `${renderFrontmatter(fm)}
958
+
959
+ ${body}
960
+ `;
961
+ }
962
+ function parseSourceSessions(v) {
963
+ if (Array.isArray(v))
964
+ return v.map(String);
965
+ if (typeof v === "string") {
966
+ try {
967
+ const parsed = JSON.parse(v);
968
+ if (Array.isArray(parsed))
969
+ return parsed.map(String);
970
+ } catch {
971
+ }
972
+ }
973
+ return [];
974
+ }
975
+ function renderFrontmatter(fm) {
976
+ const lines = ["---"];
977
+ lines.push(`name: ${fm.name}`);
978
+ lines.push(`description: ${JSON.stringify(fm.description)}`);
979
+ if (fm.trigger)
980
+ lines.push(`trigger: ${JSON.stringify(fm.trigger)}`);
981
+ lines.push(`source_sessions:`);
982
+ for (const s of fm.source_sessions)
983
+ lines.push(` - ${s}`);
984
+ lines.push(`version: ${fm.version}`);
985
+ lines.push(`created_by_agent: ${fm.created_by_agent}`);
986
+ lines.push(`created_at: ${fm.created_at}`);
987
+ lines.push(`updated_at: ${fm.updated_at}`);
988
+ lines.push("---");
989
+ return lines.join("\n");
990
+ }
991
+ function readLocalVersion(path) {
992
+ if (!existsSync6(path))
993
+ return null;
994
+ try {
995
+ const text = readFileSync7(path, "utf-8");
996
+ const parsed = parseFrontmatter(text);
997
+ if (!parsed)
998
+ return null;
999
+ const v = parsed.fm.version;
1000
+ return typeof v === "number" ? v : null;
1001
+ } catch {
1002
+ return null;
1003
+ }
1004
+ }
1005
+ function decideAction(args) {
1006
+ const shouldWrite = args.localVersion === null || args.remoteVersion > args.localVersion || args.force;
1007
+ if (!shouldWrite)
1008
+ return "skipped";
1009
+ return args.dryRun ? "dryrun" : "wrote";
1010
+ }
1011
+ async function runPull(opts) {
1012
+ if (!opts.dryRun)
1013
+ pruneOrphanedEntries();
1014
+ const sql = buildPullSql({
1015
+ tableName: opts.tableName,
1016
+ users: opts.users,
1017
+ skillName: opts.skillName
1018
+ });
1019
+ let rows = [];
1020
+ try {
1021
+ rows = await opts.query(sql);
1022
+ } catch (e) {
1023
+ if (isMissingTableError(e?.message))
1024
+ rows = [];
1025
+ else
1026
+ throw e;
1027
+ }
1028
+ const latest = selectLatestPerName(rows);
1029
+ const root = resolvePullDestination(opts.install, opts.cwd);
1030
+ const summary = { scanned: latest.length, wrote: 0, skipped: 0, dryrun: 0, entries: [] };
1031
+ for (const row of latest) {
1032
+ const name = String(row.name ?? "");
1033
+ if (!name)
1034
+ continue;
1035
+ try {
1036
+ assertValidSkillName(name);
1037
+ } catch (e) {
1038
+ summary.entries.push({
1039
+ name,
1040
+ remoteVersion: Number(row.version ?? 1),
1041
+ localVersion: null,
1042
+ action: "skipped",
1043
+ destination: "(invalid name \u2014 skipped)",
1044
+ author: String(row.author ?? ""),
1045
+ sourceAgent: String(row.source_agent ?? "")
1046
+ });
1047
+ summary.skipped++;
1048
+ continue;
1049
+ }
1050
+ const author = String(row.author ?? "");
1051
+ if (!author) {
1052
+ summary.entries.push({
1053
+ name,
1054
+ remoteVersion: Number(row.version ?? 1),
1055
+ localVersion: null,
1056
+ action: "skipped",
1057
+ destination: "(empty author \u2014 skipped)",
1058
+ author: "",
1059
+ sourceAgent: String(row.source_agent ?? "")
1060
+ });
1061
+ summary.skipped++;
1062
+ continue;
1063
+ }
1064
+ let dirName;
1065
+ try {
1066
+ assertValidAuthor(author);
1067
+ dirName = `${name}--${author}`;
1068
+ } catch (e) {
1069
+ summary.entries.push({
1070
+ name,
1071
+ remoteVersion: Number(row.version ?? 1),
1072
+ localVersion: null,
1073
+ action: "skipped",
1074
+ destination: `(invalid author '${author}' \u2014 skipped)`,
1075
+ author,
1076
+ sourceAgent: String(row.source_agent ?? "")
1077
+ });
1078
+ summary.skipped++;
1079
+ continue;
1080
+ }
1081
+ const skillDir = join9(root, dirName);
1082
+ const skillFile = join9(skillDir, "SKILL.md");
1083
+ const remoteVersion = Number(row.version ?? 1);
1084
+ const localVersion = readLocalVersion(skillFile);
1085
+ const action = decideAction({
1086
+ remoteVersion,
1087
+ localVersion,
1088
+ force: opts.force ?? false,
1089
+ dryRun: opts.dryRun ?? false
1090
+ });
1091
+ let manifestError;
1092
+ if (action === "wrote") {
1093
+ mkdirSync5(skillDir, { recursive: true });
1094
+ if (existsSync6(skillFile)) {
1095
+ try {
1096
+ renameSync2(skillFile, `${skillFile}.bak`);
1097
+ } catch {
1098
+ }
1099
+ }
1100
+ writeFileSync5(skillFile, renderSkillFile(row));
1101
+ const symlinks = opts.install === "global" ? fanOutSymlinks(skillDir, dirName, detectAgentSkillsRoots(root)) : [];
1102
+ try {
1103
+ recordPull({
1104
+ dirName,
1105
+ name,
1106
+ author,
1107
+ projectKey: String(row.project_key ?? ""),
1108
+ remoteVersion,
1109
+ install: opts.install,
1110
+ installRoot: root,
1111
+ pulledAt: (/* @__PURE__ */ new Date()).toISOString(),
1112
+ symlinks
1113
+ });
1114
+ } catch (e) {
1115
+ manifestError = e?.message ?? String(e);
1116
+ }
1117
+ }
1118
+ summary.entries.push({
1119
+ name,
1120
+ remoteVersion,
1121
+ localVersion,
1122
+ action,
1123
+ destination: skillFile,
1124
+ author: String(row.author ?? ""),
1125
+ sourceAgent: String(row.source_agent ?? ""),
1126
+ manifestError
1127
+ });
1128
+ if (action === "wrote")
1129
+ summary.wrote++;
1130
+ else if (action === "dryrun")
1131
+ summary.dryrun++;
1132
+ else
1133
+ summary.skipped++;
1134
+ }
1135
+ if (!opts.dryRun && opts.install === "global") {
1136
+ backfillSymlinks(root);
1137
+ }
1138
+ return summary;
1139
+ }
1140
+
1141
+ // dist/src/skilify/auto-pull.js
1142
+ var log3 = (msg) => log("skilify-autopull", msg);
1143
+ var DEFAULT_TIMEOUT_MS = 5e3;
1144
+ function withTimeout(p, ms) {
1145
+ let timer = null;
1146
+ const timeout = new Promise((_, reject) => {
1147
+ timer = setTimeout(() => reject(new Error(`autopull timeout after ${ms}ms`)), ms);
1148
+ if (typeof timer.unref === "function")
1149
+ timer.unref();
1150
+ });
1151
+ return Promise.race([p, timeout]).finally(() => {
1152
+ if (timer)
1153
+ clearTimeout(timer);
1154
+ });
1155
+ }
1156
+ async function autoPullSkills(deps = {}) {
1157
+ if (process.env.HIVEMIND_AUTOPULL_DISABLED === "1") {
1158
+ log3("disabled via HIVEMIND_AUTOPULL_DISABLED=1");
1159
+ return { pulled: 0, skipped: true, reason: "disabled" };
1160
+ }
1161
+ const loadFn = deps.loadConfigFn ?? loadConfig;
1162
+ const config = loadFn();
1163
+ if (!config) {
1164
+ log3("skipped: not logged in");
1165
+ return { pulled: 0, skipped: true, reason: "not-logged-in" };
1166
+ }
1167
+ let query;
1168
+ if (deps.queryFn) {
1169
+ query = deps.queryFn;
1170
+ } else {
1171
+ const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, config.skillsTableName);
1172
+ query = (sql) => api.query(sql);
1173
+ }
1174
+ const install = deps.install ?? "global";
1175
+ const timeoutMs = deps.timeoutMs ?? DEFAULT_TIMEOUT_MS;
1176
+ try {
1177
+ const summary = await withTimeout(runPull({
1178
+ query,
1179
+ tableName: config.skillsTableName,
1180
+ install,
1181
+ cwd: install === "project" ? deps.cwd ?? process.cwd() : void 0,
1182
+ users: [],
1183
+ dryRun: false,
1184
+ force: false
1185
+ }), timeoutMs);
1186
+ log3(`pulled scanned=${summary.scanned} wrote=${summary.wrote} skipped=${summary.skipped}`);
1187
+ return { pulled: summary.wrote, skipped: false };
1188
+ } catch (e) {
1189
+ log3(`pull failed (swallowed): ${e?.message ?? e}`);
1190
+ return { pulled: 0, skipped: true, reason: "error" };
1191
+ }
1192
+ }
1193
+
101
1194
  // dist/src/hooks/codex/session-start.js
102
- var log2 = (msg) => log("codex-session-start", msg);
103
- var __bundleDir = dirname2(fileURLToPath(import.meta.url));
1195
+ var log4 = (msg) => log("codex-session-start", msg);
1196
+ var __bundleDir = dirname4(fileURLToPath(import.meta.url));
104
1197
  var context = `DEEPLAKE MEMORY: Persistent memory at ~/.deeplake/memory/ shared across sessions, users, and agents.
105
1198
 
106
1199
  Deeplake memory has THREE tiers \u2014 pick the right one for the question:
@@ -153,12 +1246,12 @@ async function main() {
153
1246
  const input = await readStdin();
154
1247
  const creds = loadCredentials();
155
1248
  if (!creds?.token) {
156
- log2("no credentials found \u2014 run auth login to authenticate");
1249
+ log4("no credentials found \u2014 run auth login to authenticate");
157
1250
  } else {
158
- log2(`credentials loaded: org=${creds.orgName ?? creds.orgId}`);
1251
+ log4(`credentials loaded: org=${creds.orgName ?? creds.orgId}`);
159
1252
  }
160
1253
  if (creds?.token) {
161
- const setupScript = join4(__bundleDir, "session-start-setup.js");
1254
+ const setupScript = join10(__bundleDir, "session-start-setup.js");
162
1255
  const child = spawn("node", [setupScript], {
163
1256
  detached: true,
164
1257
  stdio: ["pipe", "ignore", "ignore"],
@@ -167,8 +1260,10 @@ async function main() {
167
1260
  child.stdin?.write(JSON.stringify(input));
168
1261
  child.stdin?.end();
169
1262
  child.unref();
170
- log2("spawned async setup process");
1263
+ log4("spawned async setup process");
171
1264
  }
1265
+ const pullResult = await autoPullSkills();
1266
+ log4(`autopull: pulled=${pullResult.pulled} skipped=${pullResult.skipped}`);
172
1267
  let versionNotice = "";
173
1268
  const current = getInstalledVersion(__bundleDir, ".codex-plugin");
174
1269
  if (current) {
@@ -181,6 +1276,6 @@ Not logged in to Deeplake. Run: hivemind login${versionNotice}`;
181
1276
  console.log(additionalContext);
182
1277
  }
183
1278
  main().catch((e) => {
184
- log2(`fatal: ${e.message}`);
1279
+ log4(`fatal: ${e.message}`);
185
1280
  process.exit(0);
186
1281
  });