@deeplake/hivemind 0.6.47

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,585 @@
1
+ #!/usr/bin/env node
2
+
3
+ // dist/src/hooks/codex/session-start-setup.js
4
+ import { fileURLToPath } from "node:url";
5
+ import { dirname as dirname2, join as join7 } from "node:path";
6
+ import { execSync as execSync2 } from "node:child_process";
7
+ import { homedir as homedir4 } from "node:os";
8
+
9
+ // dist/src/commands/auth.js
10
+ import { readFileSync, writeFileSync, existsSync, mkdirSync, unlinkSync } from "node:fs";
11
+ import { join } from "node:path";
12
+ import { homedir } from "node:os";
13
+ import { execSync } from "node:child_process";
14
+ var CONFIG_DIR = join(homedir(), ".deeplake");
15
+ var CREDS_PATH = join(CONFIG_DIR, "credentials.json");
16
+ function loadCredentials() {
17
+ if (!existsSync(CREDS_PATH))
18
+ return null;
19
+ try {
20
+ return JSON.parse(readFileSync(CREDS_PATH, "utf-8"));
21
+ } catch {
22
+ return null;
23
+ }
24
+ }
25
+ function saveCredentials(creds) {
26
+ if (!existsSync(CONFIG_DIR))
27
+ mkdirSync(CONFIG_DIR, { recursive: true, mode: 448 });
28
+ writeFileSync(CREDS_PATH, JSON.stringify({ ...creds, savedAt: (/* @__PURE__ */ new Date()).toISOString() }, null, 2), { mode: 384 });
29
+ }
30
+
31
+ // dist/src/config.js
32
+ import { readFileSync as readFileSync2, existsSync as existsSync2 } from "node:fs";
33
+ import { join as join2 } from "node:path";
34
+ import { homedir as homedir2, userInfo } from "node:os";
35
+ function loadConfig() {
36
+ const home = homedir2();
37
+ const credPath = join2(home, ".deeplake", "credentials.json");
38
+ let creds = null;
39
+ if (existsSync2(credPath)) {
40
+ try {
41
+ creds = JSON.parse(readFileSync2(credPath, "utf-8"));
42
+ } catch {
43
+ return null;
44
+ }
45
+ }
46
+ const token = process.env.HIVEMIND_TOKEN ?? creds?.token;
47
+ const orgId = process.env.HIVEMIND_ORG_ID ?? creds?.orgId;
48
+ if (!token || !orgId)
49
+ return null;
50
+ return {
51
+ token,
52
+ orgId,
53
+ orgName: creds?.orgName ?? orgId,
54
+ userName: creds?.userName || userInfo().username || "unknown",
55
+ workspaceId: process.env.HIVEMIND_WORKSPACE_ID ?? creds?.workspaceId ?? "default",
56
+ apiUrl: process.env.HIVEMIND_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
57
+ tableName: process.env.HIVEMIND_TABLE ?? "memory",
58
+ sessionsTableName: process.env.HIVEMIND_SESSIONS_TABLE ?? "sessions",
59
+ memoryPath: process.env.HIVEMIND_MEMORY_PATH ?? join2(home, ".deeplake", "memory")
60
+ };
61
+ }
62
+
63
+ // dist/src/deeplake-api.js
64
+ import { randomUUID } from "node:crypto";
65
+ import { existsSync as existsSync3, mkdirSync as mkdirSync2, readFileSync as readFileSync3, writeFileSync as writeFileSync2 } from "node:fs";
66
+ import { join as join4 } from "node:path";
67
+ import { tmpdir } from "node:os";
68
+
69
+ // dist/src/utils/debug.js
70
+ import { appendFileSync } from "node:fs";
71
+ import { join as join3 } from "node:path";
72
+ import { homedir as homedir3 } from "node:os";
73
+ var DEBUG = process.env.HIVEMIND_DEBUG === "1";
74
+ var LOG = join3(homedir3(), ".deeplake", "hook-debug.log");
75
+ function utcTimestamp(d = /* @__PURE__ */ new Date()) {
76
+ return d.toISOString().replace("T", " ").slice(0, 19) + " UTC";
77
+ }
78
+ function log(tag, msg) {
79
+ if (!DEBUG)
80
+ return;
81
+ appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg}
82
+ `);
83
+ }
84
+
85
+ // dist/src/utils/sql.js
86
+ function sqlStr(value) {
87
+ return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
88
+ }
89
+
90
+ // dist/src/deeplake-api.js
91
+ var log2 = (msg) => log("sdk", msg);
92
+ function summarizeSql(sql, maxLen = 220) {
93
+ const compact = sql.replace(/\s+/g, " ").trim();
94
+ return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact;
95
+ }
96
+ function traceSql(msg) {
97
+ const traceEnabled = process.env.HIVEMIND_TRACE_SQL === "1" || process.env.HIVEMIND_DEBUG === "1";
98
+ if (!traceEnabled)
99
+ return;
100
+ process.stderr.write(`[deeplake-sql] ${msg}
101
+ `);
102
+ if (process.env.HIVEMIND_DEBUG === "1")
103
+ log2(msg);
104
+ }
105
+ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
106
+ var MAX_RETRIES = 3;
107
+ var BASE_DELAY_MS = 500;
108
+ var MAX_CONCURRENCY = 5;
109
+ var QUERY_TIMEOUT_MS = Number(process.env.HIVEMIND_QUERY_TIMEOUT_MS ?? 1e4);
110
+ var INDEX_MARKER_TTL_MS = Number(process.env.HIVEMIND_INDEX_MARKER_TTL_MS ?? 6 * 60 * 6e4);
111
+ function sleep(ms) {
112
+ return new Promise((resolve) => setTimeout(resolve, ms));
113
+ }
114
+ function isTimeoutError(error) {
115
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
116
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
117
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
118
+ }
119
+ function isDuplicateIndexError(error) {
120
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
121
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
122
+ }
123
+ function isSessionInsertQuery(sql) {
124
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
125
+ }
126
+ function isTransientHtml403(text) {
127
+ const body = text.toLowerCase();
128
+ return body.includes("<html") || body.includes("403 forbidden") || body.includes("cloudflare") || body.includes("nginx");
129
+ }
130
+ function getIndexMarkerDir() {
131
+ return process.env.HIVEMIND_INDEX_MARKER_DIR ?? join4(tmpdir(), "hivemind-deeplake-indexes");
132
+ }
133
+ var Semaphore = class {
134
+ max;
135
+ waiting = [];
136
+ active = 0;
137
+ constructor(max) {
138
+ this.max = max;
139
+ }
140
+ async acquire() {
141
+ if (this.active < this.max) {
142
+ this.active++;
143
+ return;
144
+ }
145
+ await new Promise((resolve) => this.waiting.push(resolve));
146
+ }
147
+ release() {
148
+ this.active--;
149
+ const next = this.waiting.shift();
150
+ if (next) {
151
+ this.active++;
152
+ next();
153
+ }
154
+ }
155
+ };
156
+ var DeeplakeApi = class {
157
+ token;
158
+ apiUrl;
159
+ orgId;
160
+ workspaceId;
161
+ tableName;
162
+ _pendingRows = [];
163
+ _sem = new Semaphore(MAX_CONCURRENCY);
164
+ _tablesCache = null;
165
+ constructor(token, apiUrl, orgId, workspaceId, tableName) {
166
+ this.token = token;
167
+ this.apiUrl = apiUrl;
168
+ this.orgId = orgId;
169
+ this.workspaceId = workspaceId;
170
+ this.tableName = tableName;
171
+ }
172
+ /** Execute SQL with retry on transient errors and bounded concurrency. */
173
+ async query(sql) {
174
+ const startedAt = Date.now();
175
+ const summary = summarizeSql(sql);
176
+ traceSql(`query start: ${summary}`);
177
+ await this._sem.acquire();
178
+ try {
179
+ const rows = await this._queryWithRetry(sql);
180
+ traceSql(`query ok (${Date.now() - startedAt}ms, rows=${rows.length}): ${summary}`);
181
+ return rows;
182
+ } catch (e) {
183
+ const message = e instanceof Error ? e.message : String(e);
184
+ traceSql(`query fail (${Date.now() - startedAt}ms): ${summary} :: ${message}`);
185
+ throw e;
186
+ } finally {
187
+ this._sem.release();
188
+ }
189
+ }
190
+ async _queryWithRetry(sql) {
191
+ let lastError;
192
+ for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
193
+ let resp;
194
+ try {
195
+ const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
196
+ resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
197
+ method: "POST",
198
+ headers: {
199
+ Authorization: `Bearer ${this.token}`,
200
+ "Content-Type": "application/json",
201
+ "X-Activeloop-Org-Id": this.orgId
202
+ },
203
+ signal,
204
+ body: JSON.stringify({ query: sql })
205
+ });
206
+ } catch (e) {
207
+ if (isTimeoutError(e)) {
208
+ lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
209
+ throw lastError;
210
+ }
211
+ lastError = e instanceof Error ? e : new Error(String(e));
212
+ if (attempt < MAX_RETRIES) {
213
+ const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
214
+ log2(`query retry ${attempt + 1}/${MAX_RETRIES} (fetch error: ${lastError.message}) in ${delay.toFixed(0)}ms`);
215
+ await sleep(delay);
216
+ continue;
217
+ }
218
+ throw lastError;
219
+ }
220
+ if (resp.ok) {
221
+ const raw = await resp.json();
222
+ if (!raw?.rows || !raw?.columns)
223
+ return [];
224
+ return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
225
+ }
226
+ const text = await resp.text().catch(() => "");
227
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
228
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
229
+ const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
230
+ log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
231
+ await sleep(delay);
232
+ continue;
233
+ }
234
+ throw new Error(`Query failed: ${resp.status}: ${text.slice(0, 200)}`);
235
+ }
236
+ throw lastError ?? new Error("Query failed: max retries exceeded");
237
+ }
238
+ // ── Writes ──────────────────────────────────────────────────────────────────
239
+ /** Queue rows for writing. Call commit() to flush. */
240
+ appendRows(rows) {
241
+ this._pendingRows.push(...rows);
242
+ }
243
+ /** Flush pending rows via SQL. */
244
+ async commit() {
245
+ if (this._pendingRows.length === 0)
246
+ return;
247
+ const rows = this._pendingRows;
248
+ this._pendingRows = [];
249
+ const CONCURRENCY = 10;
250
+ for (let i = 0; i < rows.length; i += CONCURRENCY) {
251
+ const chunk = rows.slice(i, i + CONCURRENCY);
252
+ await Promise.allSettled(chunk.map((r) => this.upsertRowSql(r)));
253
+ }
254
+ log2(`commit: ${rows.length} rows`);
255
+ }
256
+ async upsertRowSql(row) {
257
+ const ts = (/* @__PURE__ */ new Date()).toISOString();
258
+ const cd = row.creationDate ?? ts;
259
+ const lud = row.lastUpdateDate ?? ts;
260
+ const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`);
261
+ if (exists.length > 0) {
262
+ let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`;
263
+ if (row.project !== void 0)
264
+ setClauses += `, project = '${sqlStr(row.project)}'`;
265
+ if (row.description !== void 0)
266
+ setClauses += `, description = '${sqlStr(row.description)}'`;
267
+ await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`);
268
+ } else {
269
+ const id = randomUUID();
270
+ let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date";
271
+ let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`;
272
+ if (row.project !== void 0) {
273
+ cols += ", project";
274
+ vals += `, '${sqlStr(row.project)}'`;
275
+ }
276
+ if (row.description !== void 0) {
277
+ cols += ", description";
278
+ vals += `, '${sqlStr(row.description)}'`;
279
+ }
280
+ await this.query(`INSERT INTO "${this.tableName}" (${cols}) VALUES (${vals})`);
281
+ }
282
+ }
283
+ /** Update specific columns on a row by path. */
284
+ async updateColumns(path, columns) {
285
+ const setClauses = Object.entries(columns).map(([col, val]) => typeof val === "number" ? `${col} = ${val}` : `${col} = '${sqlStr(String(val))}'`).join(", ");
286
+ await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(path)}'`);
287
+ }
288
+ // ── Convenience ─────────────────────────────────────────────────────────────
289
+ /** Create a BM25 search index on a column. */
290
+ async createIndex(column) {
291
+ await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
292
+ }
293
+ buildLookupIndexName(table, suffix) {
294
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
295
+ }
296
+ getLookupIndexMarkerPath(table, suffix) {
297
+ const markerKey = [
298
+ this.workspaceId,
299
+ this.orgId,
300
+ table,
301
+ suffix
302
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
303
+ return join4(getIndexMarkerDir(), `${markerKey}.json`);
304
+ }
305
+ hasFreshLookupIndexMarker(table, suffix) {
306
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
307
+ if (!existsSync3(markerPath))
308
+ return false;
309
+ try {
310
+ const raw = JSON.parse(readFileSync3(markerPath, "utf-8"));
311
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
312
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
313
+ return false;
314
+ return true;
315
+ } catch {
316
+ return false;
317
+ }
318
+ }
319
+ markLookupIndexReady(table, suffix) {
320
+ mkdirSync2(getIndexMarkerDir(), { recursive: true });
321
+ writeFileSync2(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
322
+ }
323
+ async ensureLookupIndex(table, suffix, columnsSql) {
324
+ if (this.hasFreshLookupIndexMarker(table, suffix))
325
+ return;
326
+ const indexName = this.buildLookupIndexName(table, suffix);
327
+ try {
328
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
329
+ this.markLookupIndexReady(table, suffix);
330
+ } catch (e) {
331
+ if (isDuplicateIndexError(e)) {
332
+ this.markLookupIndexReady(table, suffix);
333
+ return;
334
+ }
335
+ log2(`index "${indexName}" skipped: ${e.message}`);
336
+ }
337
+ }
338
+ /** List all tables in the workspace (with retry). */
339
+ async listTables(forceRefresh = false) {
340
+ if (!forceRefresh && this._tablesCache)
341
+ return [...this._tablesCache];
342
+ const { tables, cacheable } = await this._fetchTables();
343
+ if (cacheable)
344
+ this._tablesCache = [...tables];
345
+ return tables;
346
+ }
347
+ async _fetchTables() {
348
+ for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
349
+ try {
350
+ const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
351
+ headers: {
352
+ Authorization: `Bearer ${this.token}`,
353
+ "X-Activeloop-Org-Id": this.orgId
354
+ }
355
+ });
356
+ if (resp.ok) {
357
+ const data = await resp.json();
358
+ return {
359
+ tables: (data.tables ?? []).map((t) => t.table_name),
360
+ cacheable: true
361
+ };
362
+ }
363
+ if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
364
+ await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
365
+ continue;
366
+ }
367
+ return { tables: [], cacheable: false };
368
+ } catch {
369
+ if (attempt < MAX_RETRIES) {
370
+ await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
371
+ continue;
372
+ }
373
+ return { tables: [], cacheable: false };
374
+ }
375
+ }
376
+ return { tables: [], cacheable: false };
377
+ }
378
+ /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
379
+ async ensureTable(name) {
380
+ const tbl = name ?? this.tableName;
381
+ const tables = await this.listTables();
382
+ if (!tables.includes(tbl)) {
383
+ log2(`table "${tbl}" not found, creating`);
384
+ await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
385
+ log2(`table "${tbl}" created`);
386
+ if (!tables.includes(tbl))
387
+ this._tablesCache = [...tables, tbl];
388
+ }
389
+ }
390
+ /** Create the sessions table (uses JSONB for message since every row is a JSON event). */
391
+ async ensureSessionsTable(name) {
392
+ const tables = await this.listTables();
393
+ if (!tables.includes(name)) {
394
+ log2(`table "${name}" not found, creating`);
395
+ await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
396
+ log2(`table "${name}" created`);
397
+ if (!tables.includes(name))
398
+ this._tablesCache = [...tables, name];
399
+ }
400
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
401
+ }
402
+ };
403
+
404
+ // dist/src/utils/stdin.js
405
+ function readStdin() {
406
+ return new Promise((resolve, reject) => {
407
+ let data = "";
408
+ process.stdin.setEncoding("utf-8");
409
+ process.stdin.on("data", (chunk) => data += chunk);
410
+ process.stdin.on("end", () => {
411
+ try {
412
+ resolve(JSON.parse(data));
413
+ } catch (err) {
414
+ reject(new Error(`Failed to parse hook input: ${err}`));
415
+ }
416
+ });
417
+ process.stdin.on("error", reject);
418
+ });
419
+ }
420
+
421
+ // dist/src/utils/version-check.js
422
+ import { readFileSync as readFileSync4 } from "node:fs";
423
+ import { dirname, join as join5 } from "node:path";
424
+ var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json";
425
+ function getInstalledVersion(bundleDir, pluginManifestDir) {
426
+ try {
427
+ const pluginJson = join5(bundleDir, "..", pluginManifestDir, "plugin.json");
428
+ const plugin = JSON.parse(readFileSync4(pluginJson, "utf-8"));
429
+ if (plugin.version)
430
+ return plugin.version;
431
+ } catch {
432
+ }
433
+ let dir = bundleDir;
434
+ for (let i = 0; i < 5; i++) {
435
+ const candidate = join5(dir, "package.json");
436
+ try {
437
+ const pkg = JSON.parse(readFileSync4(candidate, "utf-8"));
438
+ if ((pkg.name === "hivemind" || pkg.name === "hivemind-codex") && pkg.version)
439
+ return pkg.version;
440
+ } catch {
441
+ }
442
+ const parent = dirname(dir);
443
+ if (parent === dir)
444
+ break;
445
+ dir = parent;
446
+ }
447
+ return null;
448
+ }
449
+ async function getLatestVersion(timeoutMs = 3e3) {
450
+ try {
451
+ const res = await fetch(GITHUB_RAW_PKG, { signal: AbortSignal.timeout(timeoutMs) });
452
+ if (!res.ok)
453
+ return null;
454
+ const pkg = await res.json();
455
+ return pkg.version ?? null;
456
+ } catch {
457
+ return null;
458
+ }
459
+ }
460
+ function isNewer(latest, current) {
461
+ const parse = (v) => v.split(".").map(Number);
462
+ const [la, lb, lc] = parse(latest);
463
+ const [ca, cb, cc] = parse(current);
464
+ return la > ca || la === ca && lb > cb || la === ca && lb === cb && lc > cc;
465
+ }
466
+
467
+ // dist/src/utils/wiki-log.js
468
+ import { mkdirSync as mkdirSync3, appendFileSync as appendFileSync2 } from "node:fs";
469
+ import { join as join6 } from "node:path";
470
+ function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") {
471
+ const path = join6(hooksDir, filename);
472
+ return {
473
+ path,
474
+ log(msg) {
475
+ try {
476
+ mkdirSync3(hooksDir, { recursive: true });
477
+ appendFileSync2(path, `[${utcTimestamp()}] ${msg}
478
+ `);
479
+ } catch {
480
+ }
481
+ }
482
+ };
483
+ }
484
+
485
+ // dist/src/hooks/codex/session-start-setup.js
486
+ var log3 = (msg) => log("codex-session-setup", msg);
487
+ var __bundleDir = dirname2(fileURLToPath(import.meta.url));
488
+ var { log: wikiLog } = makeWikiLogger(join7(homedir4(), ".codex", "hooks"));
489
+ async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, workspaceId) {
490
+ const summaryPath = `/summaries/${userName}/${sessionId}.md`;
491
+ const existing = await api.query(`SELECT path FROM "${table}" WHERE path = '${sqlStr(summaryPath)}' LIMIT 1`);
492
+ if (existing.length > 0) {
493
+ wikiLog(`SessionSetup: summary exists for ${sessionId} (resumed)`);
494
+ return;
495
+ }
496
+ const now = (/* @__PURE__ */ new Date()).toISOString();
497
+ const projectName = cwd.split("/").pop() ?? "unknown";
498
+ const sessionSource = `/sessions/${userName}/${userName}_${orgName}_${workspaceId}_${sessionId}.jsonl`;
499
+ const content = [
500
+ `# Session ${sessionId}`,
501
+ `- **Source**: ${sessionSource}`,
502
+ `- **Started**: ${now}`,
503
+ `- **Project**: ${projectName}`,
504
+ `- **Status**: in-progress`,
505
+ ""
506
+ ].join("\n");
507
+ const filename = `${sessionId}.md`;
508
+ await api.query(`INSERT INTO "${table}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', 'codex', '${now}', '${now}')`);
509
+ wikiLog(`SessionSetup: created placeholder for ${sessionId} (${cwd})`);
510
+ }
511
+ async function main() {
512
+ if (process.env.HIVEMIND_WIKI_WORKER === "1")
513
+ return;
514
+ const input = await readStdin();
515
+ const creds = loadCredentials();
516
+ if (!creds?.token) {
517
+ log3("no credentials");
518
+ return;
519
+ }
520
+ if (!creds.userName) {
521
+ try {
522
+ const { userInfo: userInfo2 } = await import("node:os");
523
+ creds.userName = userInfo2().username ?? "unknown";
524
+ saveCredentials(creds);
525
+ log3(`backfilled userName: ${creds.userName}`);
526
+ } catch {
527
+ }
528
+ }
529
+ const captureEnabled = process.env.HIVEMIND_CAPTURE !== "false";
530
+ if (input.session_id) {
531
+ try {
532
+ const config = loadConfig();
533
+ if (config) {
534
+ const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, config.tableName);
535
+ await api.ensureTable();
536
+ await api.ensureSessionsTable(config.sessionsTableName);
537
+ if (captureEnabled) {
538
+ await createPlaceholder(api, config.tableName, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId);
539
+ }
540
+ log3("setup complete");
541
+ }
542
+ } catch (e) {
543
+ log3(`setup failed: ${e.message}`);
544
+ wikiLog(`SessionSetup: failed for ${input.session_id}: ${e.message}`);
545
+ }
546
+ }
547
+ const autoupdate = creds.autoupdate !== false;
548
+ try {
549
+ const current = getInstalledVersion(__bundleDir, ".codex-plugin");
550
+ if (current) {
551
+ const latest = await getLatestVersion();
552
+ if (latest && isNewer(latest, current)) {
553
+ if (autoupdate) {
554
+ log3(`autoupdate: updating ${current} \u2192 ${latest}`);
555
+ try {
556
+ const tag = `v${latest}`;
557
+ if (!/^v\d+\.\d+\.\d+$/.test(tag))
558
+ throw new Error(`unsafe version tag: ${tag}`);
559
+ const findCmd = `INSTALL_DIR=""; CACHE_DIR=$(find ~/.codex/plugins/cache -maxdepth 3 -name "hivemind" -type d 2>/dev/null | head -1); if [ -n "$CACHE_DIR" ]; then INSTALL_DIR=$(ls -1d "$CACHE_DIR"/*/ 2>/dev/null | tail -1); elif [ -d ~/.codex/hivemind ]; then INSTALL_DIR=~/.codex/hivemind; fi; if [ -n "$INSTALL_DIR" ]; then TMPDIR=$(mktemp -d); git clone --depth 1 --branch ${tag} -q https://github.com/activeloopai/hivemind.git "$TMPDIR/hivemind" 2>/dev/null && cp -r "$TMPDIR/hivemind/codex/"* "$INSTALL_DIR/" 2>/dev/null; rm -rf "$TMPDIR"; fi`;
560
+ execSync2(findCmd, { stdio: "ignore", timeout: 6e4 });
561
+ process.stderr.write(`Hivemind auto-updated: ${current} \u2192 ${latest}. Restart Codex to apply.
562
+ `);
563
+ log3(`autoupdate succeeded: ${current} \u2192 ${latest} (tag: ${tag})`);
564
+ } catch (e) {
565
+ process.stderr.write(`Hivemind update available: ${current} \u2192 ${latest}. Auto-update failed.
566
+ `);
567
+ log3(`autoupdate failed: ${e.message}`);
568
+ }
569
+ } else {
570
+ process.stderr.write(`Hivemind update available: ${current} \u2192 ${latest}.
571
+ `);
572
+ log3(`update available (autoupdate off): ${current} \u2192 ${latest}`);
573
+ }
574
+ } else {
575
+ log3(`version up to date: ${current}`);
576
+ }
577
+ }
578
+ } catch (e) {
579
+ log3(`version check failed: ${e.message}`);
580
+ }
581
+ }
582
+ main().catch((e) => {
583
+ log3(`fatal: ${e.message}`);
584
+ process.exit(0);
585
+ });