@gamaze/hicortex 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,119 @@
1
+ # @gamaze/hicortex — Long-term Memory for OpenClaw Agents
2
+
3
+ Your agents remember past decisions, avoid repeated mistakes, and get smarter every day.
4
+
5
+ Hicortex captures session activity, distills knowledge nightly, scores importance, generates actionable lessons via reflection, and automatically injects them into agent context. No manual intervention needed after install.
6
+
7
+ ## Requirements
8
+
9
+ - OpenClaw gateway (Node.js 18+)
10
+ - LLM provider configured in OpenClaw (auto-detected, 20+ providers supported)
11
+ - ~500MB disk for database + embedding model
12
+
13
+ ## Install
14
+
15
+ ```bash
16
+ openclaw plugins install @gamaze/hicortex
17
+ ```
18
+
19
+ ## Configure
20
+
21
+ No configuration needed for most users. The plugin auto-detects your LLM provider from OpenClaw settings on first startup.
22
+
23
+ Optional config (add to plugin entry in `~/.openclaw/openclaw.json`):
24
+
25
+ | Field | Default | Description |
26
+ |-------|---------|-------------|
27
+ | `licenseKey` | _(none)_ | License key. Free tier (250 memories) without key. |
28
+ | `llmBaseUrl` | _(auto)_ | Override LLM base URL (auto-detected from OC config) |
29
+ | `llmApiKey` | _(auto)_ | Override LLM API key (auto-detected from OC auth) |
30
+ | `llmModel` | _(auto)_ | Override model for scoring and distillation |
31
+ | `reflectModel` | _(auto)_ | Override model for nightly reflection |
32
+ | `consolidateHour` | `2` | Hour (0-23, local time) for nightly consolidation |
33
+ | `dbPath` | _(auto)_ | Custom database path |
34
+
35
+ Restart the gateway after config changes:
36
+
37
+ ```bash
38
+ openclaw gateway restart
39
+ ```
40
+
41
+ ## What Happens Automatically
42
+
43
+ | When | What | How |
44
+ |------|------|-----|
45
+ | Agent start | Recent lessons injected into context | `before_agent_start` hook |
46
+ | Agent end | Conversation captured and distilled | `agent_end` hook + LLM |
47
+ | Nightly | Score importance, reflect, link, decay | In-process consolidation pipeline |
48
+
49
+ ## Agent Tools
50
+
51
+ The plugin registers these tools for agents to use:
52
+
53
+ - **hicortex_search** — Semantic search across all stored knowledge
54
+ - **hicortex_context** — Get recent decisions and project state
55
+ - **hicortex_ingest** — Store a piece of knowledge directly
56
+ - **hicortex_lessons** — Get actionable lessons from reflection
57
+
58
+ Skills: `/learn` to save explicit learnings.
59
+
60
+ ## Architecture
61
+
62
+ ```
63
+ OpenClaw Gateway (Node.js)
64
+ └── @gamaze/hicortex plugin (TypeScript, in-process)
65
+ ├── before_agent_start → inject lessons into agent context
66
+ ├── agent_end → capture conversation, distill via LLM
67
+ ├── session_end → check if consolidation overdue
68
+ ├── registerTool → hicortex_search, hicortex_context, hicortex_ingest, hicortex_lessons
69
+ └── registerService → DB init, LLM auto-config, nightly consolidation
70
+ ├── SQLite + sqlite-vec + FTS5 (single file, in-process)
71
+ ├── bge-small-en-v1.5 embeddings (ONNX, local CPU)
72
+ └── BM25 + vector search with RRF fusion
73
+ ```
74
+
75
+ No sidecar, no HTTP server, no separate process. Everything runs inside the gateway.
76
+
77
+ ## Pricing
78
+
79
+ | Tier | Price | Memories | Features |
80
+ |------|-------|----------|----------|
81
+ | Free | $0 | 250 | Full features: search, reflection, lessons, linking |
82
+ | Pro | $9/month | Unlimited | Everything in Free, unlimited |
83
+ | Lifetime | $149 | Unlimited | Pro forever |
84
+ | Team | $29/month | Unlimited | Multi-agent shared memory |
85
+
86
+ Get a license key at [hicortex.gamaze.com](https://hicortex.gamaze.com).
87
+
88
+ ## Uninstall
89
+
90
+ ```bash
91
+ openclaw plugins uninstall hicortex
92
+ ```
93
+
94
+ Your memory database is preserved by default. To remove it: delete `~/.openclaw/data/hicortex.db`.
95
+
96
+ ## Development
97
+
98
+ ```bash
99
+ # Local dev install
100
+ openclaw plugins install -l ./packages/openclaw-plugin
101
+
102
+ # Build
103
+ cd packages/openclaw-plugin
104
+ npm install
105
+ npm run build
106
+
107
+ # Test
108
+ npm test
109
+ ```
110
+
111
+ ## Troubleshooting
112
+
113
+ **Tools not visible to agent:** The plugin auto-adds tools to `tools.allow` on startup. If tools still don't appear, check that the gateway was restarted after install.
114
+
115
+ **LLM auto-config failed:** Check the gateway log for `[hicortex] WARNING`. You may need to add `llmBaseUrl` to the plugin config manually for non-standard providers.
116
+
117
+ **No lessons generated:** Reflection requires an LLM. Check that your LLM provider is accessible and has sufficient quota.
118
+
119
+ **First startup slow:** The embedding model (~130MB) downloads on first run. Allow up to 2 minutes.
@@ -0,0 +1,36 @@
1
+ /**
2
+ * Nightly consolidation pipeline — importance scoring, reflection,
3
+ * link discovery, decay/prune.
4
+ * Ported from hicortex/consolidate/ (stages.py, __init__.py, budget.py).
5
+ */
6
+ import type Database from "better-sqlite3";
7
+ import type { ConsolidationReport } from "./types.js";
8
+ import type { LlmClient } from "./llm.js";
9
+ import type { EmbedFn } from "./retrieval.js";
10
+ export declare class BudgetTracker {
11
+ maxCalls: number;
12
+ callsUsed: number;
13
+ callsByStage: Record<string, number>;
14
+ constructor(maxCalls: number);
15
+ get exhausted(): boolean;
16
+ get remaining(): number;
17
+ use(stage: string, count?: number): boolean;
18
+ summary(): NonNullable<ConsolidationReport["budget"]>;
19
+ }
20
+ /**
21
+ * Parse JSON from LLM output, tolerating markdown fences and indexed formats.
22
+ */
23
+ export declare function parseJsonLenient<T>(text: string, fallback: T): T;
24
+ /**
25
+ * Run the full consolidation pipeline. Returns a structured report.
26
+ */
27
+ export declare function runConsolidation(db: Database.Database, llm: LlmClient, embedFn: EmbedFn, dryRun?: boolean): Promise<ConsolidationReport>;
28
+ /**
29
+ * Calculate milliseconds until the next occurrence of a given hour (local time).
30
+ */
31
+ export declare function msUntilHour(hour: number): number;
32
+ /**
33
+ * Schedule the consolidation pipeline to run nightly.
34
+ * Returns a cleanup function to cancel the timer.
35
+ */
36
+ export declare function scheduleConsolidation(db: Database.Database, llm: LlmClient, embedFn: EmbedFn, hour?: number): () => void;
@@ -0,0 +1,482 @@
1
+ "use strict";
2
+ /**
3
+ * Nightly consolidation pipeline — importance scoring, reflection,
4
+ * link discovery, decay/prune.
5
+ * Ported from hicortex/consolidate/ (stages.py, __init__.py, budget.py).
6
+ */
7
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
8
+ if (k2 === undefined) k2 = k;
9
+ var desc = Object.getOwnPropertyDescriptor(m, k);
10
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
11
+ desc = { enumerable: true, get: function() { return m[k]; } };
12
+ }
13
+ Object.defineProperty(o, k2, desc);
14
+ }) : (function(o, m, k, k2) {
15
+ if (k2 === undefined) k2 = k;
16
+ o[k2] = m[k];
17
+ }));
18
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
19
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
20
+ }) : function(o, v) {
21
+ o["default"] = v;
22
+ });
23
+ var __importStar = (this && this.__importStar) || (function () {
24
+ var ownKeys = function(o) {
25
+ ownKeys = Object.getOwnPropertyNames || function (o) {
26
+ var ar = [];
27
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
28
+ return ar;
29
+ };
30
+ return ownKeys(o);
31
+ };
32
+ return function (mod) {
33
+ if (mod && mod.__esModule) return mod;
34
+ var result = {};
35
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
36
+ __setModuleDefault(result, mod);
37
+ return result;
38
+ };
39
+ })();
40
+ Object.defineProperty(exports, "__esModule", { value: true });
41
+ exports.BudgetTracker = void 0;
42
+ exports.parseJsonLenient = parseJsonLenient;
43
+ exports.runConsolidation = runConsolidation;
44
+ exports.msUntilHour = msUntilHour;
45
+ exports.scheduleConsolidation = scheduleConsolidation;
46
+ const node_fs_1 = require("node:fs");
47
+ const node_path_1 = require("node:path");
48
+ const node_os_1 = require("node:os");
49
+ const retrieval_js_1 = require("./retrieval.js");
50
+ const storage = __importStar(require("./storage.js"));
51
+ const prompts_js_1 = require("./prompts.js");
52
+ const LAST_CONSOLIDATED_PATH = (0, node_path_1.join)((0, node_os_1.homedir)(), ".claude", "memory", "last-consolidated.txt");
53
+ // Default config constants (matching Python config.py)
54
+ const CONSOLIDATE_MAX_LLM_CALLS = 200;
55
+ const CONSOLIDATE_PRUNE_MIN_AGE_DAYS = 90;
56
+ const CONSOLIDATE_LINK_THRESHOLD = 0.55;
57
+ // ---------------------------------------------------------------------------
58
+ // BudgetTracker
59
+ // ---------------------------------------------------------------------------
60
+ class BudgetTracker {
61
+ maxCalls;
62
+ callsUsed = 0;
63
+ callsByStage = {};
64
+ constructor(maxCalls) {
65
+ this.maxCalls = maxCalls;
66
+ }
67
+ get exhausted() {
68
+ return this.callsUsed >= this.maxCalls;
69
+ }
70
+ get remaining() {
71
+ return Math.max(0, this.maxCalls - this.callsUsed);
72
+ }
73
+ use(stage, count = 1) {
74
+ if (this.callsUsed + count > this.maxCalls) {
75
+ console.warn(`[hicortex] Budget exhausted: ${this.callsUsed}/${this.maxCalls} used, ` +
76
+ `requested ${count} more (stage: ${stage})`);
77
+ return false;
78
+ }
79
+ this.callsUsed += count;
80
+ this.callsByStage[stage] = (this.callsByStage[stage] ?? 0) + count;
81
+ return true;
82
+ }
83
+ summary() {
84
+ return {
85
+ max_calls: this.maxCalls,
86
+ calls_used: this.callsUsed,
87
+ calls_remaining: this.remaining,
88
+ calls_by_stage: { ...this.callsByStage },
89
+ };
90
+ }
91
+ }
92
+ exports.BudgetTracker = BudgetTracker;
93
+ // ---------------------------------------------------------------------------
94
+ // JSON parsing helper
95
+ // ---------------------------------------------------------------------------
96
+ /**
97
+ * Parse JSON from LLM output, tolerating markdown fences and indexed formats.
98
+ */
99
+ function parseJsonLenient(text, fallback) {
100
+ text = text.trim();
101
+ // Strip markdown code fences
102
+ if (text.startsWith("```")) {
103
+ const lines = text.split("\n");
104
+ const stripped = lines.slice(1);
105
+ if (stripped.length > 0 && stripped[stripped.length - 1].trim() === "```") {
106
+ stripped.pop();
107
+ }
108
+ text = stripped.join("\n").trim();
109
+ }
110
+ try {
111
+ return JSON.parse(text);
112
+ }
113
+ catch {
114
+ // Ignore
115
+ }
116
+ // Handle "[0] 0.7\n[1] 0.6\n..." format
117
+ const indexed = [...text.matchAll(/\[\d+\]\s*([\d.]+)/g)];
118
+ if (indexed.length > 0) {
119
+ try {
120
+ return indexed.map((m) => parseFloat(m[1]));
121
+ }
122
+ catch {
123
+ // Ignore
124
+ }
125
+ }
126
+ console.warn(`[hicortex] Failed to parse LLM output: ${text.slice(0, 200)}`);
127
+ return fallback;
128
+ }
129
+ // ---------------------------------------------------------------------------
130
+ // Stage 1: Pre-check
131
+ // ---------------------------------------------------------------------------
132
+ function readLastConsolidated() {
133
+ try {
134
+ return (0, node_fs_1.readFileSync)(LAST_CONSOLIDATED_PATH, "utf-8").trim();
135
+ }
136
+ catch {
137
+ return "";
138
+ }
139
+ }
140
+ function stagePrecheck(db) {
141
+ const lastTs = readLastConsolidated();
142
+ const lastDt = lastTs || "1970-01-01T00:00:00.000Z";
143
+ const newMemories = storage.getMemoriesSince(db, lastDt);
144
+ if (newMemories.length === 0) {
145
+ return {
146
+ skip: true,
147
+ reason: "No new memories since last consolidation",
148
+ newMemories: [],
149
+ lastDt,
150
+ };
151
+ }
152
+ return {
153
+ skip: false,
154
+ reason: `${newMemories.length} new memories found`,
155
+ newMemories,
156
+ lastDt,
157
+ };
158
+ }
159
+ // ---------------------------------------------------------------------------
160
+ // Stage 2: Importance Scoring
161
+ // ---------------------------------------------------------------------------
162
+ async function stageImportance(db, memories, llm, budget, dryRun) {
163
+ const batchSize = 10;
164
+ let scored = 0;
165
+ let failed = 0;
166
+ let skippedBudget = 0;
167
+ for (let i = 0; i < memories.length; i += batchSize) {
168
+ if (budget.exhausted) {
169
+ skippedBudget += memories.length - i;
170
+ break;
171
+ }
172
+ const batch = memories.slice(i, i + batchSize);
173
+ const lines = batch.map((mem, idx) => `[${idx}] ${mem.content.slice(0, 500)}`);
174
+ const memoriesBlock = lines.join("\n\n");
175
+ const prompt = (0, prompts_js_1.importanceScoring)(memoriesBlock);
176
+ if (dryRun)
177
+ continue;
178
+ if (!budget.use("importance")) {
179
+ skippedBudget += memories.length - i;
180
+ break;
181
+ }
182
+ try {
183
+ const raw = await llm.completeFast(prompt, 256);
184
+ let scores = parseJsonLenient(raw, null);
185
+ if (!Array.isArray(scores)) {
186
+ scores = new Array(batch.length).fill(0.5);
187
+ }
188
+ while (scores.length < batch.length)
189
+ scores.push(0.5);
190
+ scores = scores.slice(0, batch.length);
191
+ for (let j = 0; j < batch.length; j++) {
192
+ let scoreVal = 0.5;
193
+ try {
194
+ scoreVal = Math.max(0, Math.min(1, Number(scores[j])));
195
+ if (isNaN(scoreVal))
196
+ scoreVal = 0.5;
197
+ }
198
+ catch {
199
+ scoreVal = 0.5;
200
+ }
201
+ try {
202
+ storage.updateMemory(db, batch[j].id, { base_strength: scoreVal });
203
+ scored++;
204
+ }
205
+ catch {
206
+ failed++;
207
+ }
208
+ }
209
+ }
210
+ catch {
211
+ failed += batch.length;
212
+ }
213
+ }
214
+ return { scored, failed, skipped_budget: skippedBudget };
215
+ }
216
+ // ---------------------------------------------------------------------------
217
+ // Stage 2.5: Reflection
218
+ // ---------------------------------------------------------------------------
219
+ async function stageReflection(db, memories, llm, budget, embedFn, dryRun) {
220
+ if (memories.length === 0) {
221
+ return { lessons_generated: 0, skipped: true, reason: "no memories" };
222
+ }
223
+ // Build summary
224
+ const lines = memories.slice(0, 50).map((mem) => {
225
+ const project = mem.project ?? "unknown";
226
+ const agent = mem.source_agent ?? "unknown";
227
+ const content = mem.content.slice(0, 300);
228
+ return `[${project}] [${agent}] ${content}`;
229
+ });
230
+ const memoriesBlock = lines.join("\n\n");
231
+ const prompt = (0, prompts_js_1.reflection)(memoriesBlock);
232
+ if (dryRun) {
233
+ return { lessons_generated: 0, skipped: false };
234
+ }
235
+ if (!budget.use("reflection")) {
236
+ return { lessons_generated: 0, skipped: true, reason: "budget_exhausted" };
237
+ }
238
+ try {
239
+ const raw = await llm.completeReflect(prompt, 2048);
240
+ const lessons = parseJsonLenient(raw, []);
241
+ if (!Array.isArray(lessons)) {
242
+ return { lessons_generated: 0, failed: true };
243
+ }
244
+ let generated = 0;
245
+ for (const lessonObj of lessons) {
246
+ if (typeof lessonObj !== "object" || lessonObj === null)
247
+ continue;
248
+ const lo = lessonObj;
249
+ const lessonText = String(lo.lesson ?? "");
250
+ if (!lessonText)
251
+ continue;
252
+ const project = String(lo.project ?? "global");
253
+ const severity = String(lo.severity ?? "important");
254
+ const confidence = String(lo.confidence ?? "medium");
255
+ const sourcePattern = String(lo.source_pattern ?? "");
256
+ let content = `## Lesson: ${lessonText}\n\n`;
257
+ content += `**Severity:** ${severity}\n`;
258
+ content += `**Confidence:** ${confidence}\n`;
259
+ if (sourcePattern)
260
+ content += `**Pattern:** ${sourcePattern}\n`;
261
+ content += `**Generated:** ${new Date().toISOString().slice(0, 10)}`;
262
+ const baseStrength = {
263
+ critical: 0.95,
264
+ important: 0.8,
265
+ minor: 0.6,
266
+ };
267
+ try {
268
+ // Check memory cap before storing lesson
269
+ const { getFeatures } = await import("./license.js");
270
+ const features = getFeatures("");
271
+ if (features.maxMemories > 0 && storage.countMemories(db) >= features.maxMemories) {
272
+ console.log(`[hicortex] Free tier limit (${features.maxMemories} memories). ` +
273
+ `Existing memories and lessons still work. New lessons won't be saved. ` +
274
+ `Upgrade for unlimited usage: https://hicortex.gamaze.com/`);
275
+ break;
276
+ }
277
+ const embedding = await embedFn(content);
278
+ storage.insertMemory(db, content, embedding, {
279
+ sourceAgent: "hicortex/reflection",
280
+ project,
281
+ memoryType: "lesson",
282
+ baseStrength: baseStrength[severity] ?? 0.8,
283
+ privacy: "WORK",
284
+ });
285
+ generated++;
286
+ }
287
+ catch {
288
+ // Failed to store lesson
289
+ }
290
+ }
291
+ return { lessons_generated: generated, failed: false };
292
+ }
293
+ catch {
294
+ return { lessons_generated: 0, failed: true };
295
+ }
296
+ }
297
+ // ---------------------------------------------------------------------------
298
+ // Stage 3: Link Discovery (vector similarity auto-link)
299
+ // ---------------------------------------------------------------------------
300
+ async function stageLinks(db, memories, embedFn, dryRun) {
301
+ let autoLinked = 0;
302
+ let failed = 0;
303
+ for (const mem of memories) {
304
+ try {
305
+ const embedding = await embedFn(mem.content);
306
+ const neighbors = storage.vectorSearch(db, embedding, 10, [mem.id]);
307
+ for (const neighbor of neighbors) {
308
+ const similarity = 1.0 - neighbor.distance;
309
+ if (similarity > CONSOLIDATE_LINK_THRESHOLD) {
310
+ if (!dryRun) {
311
+ try {
312
+ storage.addLink(db, mem.id, neighbor.id, "relates_to", similarity);
313
+ autoLinked++;
314
+ }
315
+ catch {
316
+ failed++;
317
+ }
318
+ }
319
+ else {
320
+ autoLinked++;
321
+ }
322
+ }
323
+ }
324
+ }
325
+ catch {
326
+ failed++;
327
+ }
328
+ }
329
+ return { auto_linked: autoLinked, failed };
330
+ }
331
+ // ---------------------------------------------------------------------------
332
+ // Stage 4: Decay & Prune
333
+ // ---------------------------------------------------------------------------
334
+ function stageDecayPrune(db, dryRun) {
335
+ const now = new Date();
336
+ const cutoff = new Date(now.getTime() - CONSOLIDATE_PRUNE_MIN_AGE_DAYS * 24 * 60 * 60 * 1000);
337
+ const oldUnaccessed = storage.getPruneCandidates(db, cutoff.toISOString());
338
+ const linkCounts = storage.getAllLinkCounts(db);
339
+ let candidates = 0;
340
+ let pruned = 0;
341
+ let failed = 0;
342
+ for (const mem of oldUnaccessed) {
343
+ const memLinkCount = linkCounts.get(mem.id) ?? 0;
344
+ const eff = (0, retrieval_js_1.effectiveStrength)(mem.base_strength ?? 0.5, mem.last_accessed, now, {
345
+ accessCount: 0,
346
+ linkCount: memLinkCount,
347
+ });
348
+ if (eff >= 0.01)
349
+ continue;
350
+ candidates++;
351
+ if (dryRun)
352
+ continue;
353
+ try {
354
+ storage.deleteMemory(db, mem.id);
355
+ pruned++;
356
+ }
357
+ catch {
358
+ failed++;
359
+ }
360
+ }
361
+ return { candidates, pruned, failed };
362
+ }
363
+ // ---------------------------------------------------------------------------
364
+ // Full pipeline
365
+ // ---------------------------------------------------------------------------
366
+ /**
367
+ * Run the full consolidation pipeline. Returns a structured report.
368
+ */
369
+ async function runConsolidation(db, llm, embedFn, dryRun = false) {
370
+ const start = new Date();
371
+ const report = {
372
+ started_at: start.toISOString(),
373
+ dry_run: dryRun,
374
+ status: "completed",
375
+ stages: {},
376
+ };
377
+ // Stage 1: Pre-check
378
+ const precheck = stagePrecheck(db);
379
+ // Also check for unscored memories
380
+ const unscored = storage.getUnscoredMemories(db);
381
+ const newIds = new Set(precheck.newMemories.map((m) => m.id));
382
+ const scoreMemories = [
383
+ ...precheck.newMemories,
384
+ ...unscored.filter((m) => !newIds.has(m.id)),
385
+ ];
386
+ const skip = scoreMemories.length === 0;
387
+ report.stages.precheck = {
388
+ skip,
389
+ reason: skip
390
+ ? precheck.reason
391
+ : `${precheck.newMemories.length} new + ${scoreMemories.length - precheck.newMemories.length} unscored memories`,
392
+ new_memory_count: precheck.newMemories.length,
393
+ unscored_count: scoreMemories.length - precheck.newMemories.length,
394
+ };
395
+ if (skip) {
396
+ report.status = "skipped";
397
+ report.completed_at = new Date().toISOString();
398
+ return report;
399
+ }
400
+ const budget = new BudgetTracker(CONSOLIDATE_MAX_LLM_CALLS);
401
+ try {
402
+ // Stage 2: Importance Scoring
403
+ report.stages.importance = await stageImportance(db, scoreMemories, llm, budget, dryRun);
404
+ // Stage 2.5: Reflection
405
+ report.stages.reflection = await stageReflection(db, precheck.newMemories, llm, budget, embedFn, dryRun);
406
+ // Stage 3: Link Discovery
407
+ report.stages.links = await stageLinks(db, precheck.newMemories, embedFn, dryRun);
408
+ // Stage 4: Decay & Prune
409
+ report.stages.decay_prune = stageDecayPrune(db, dryRun);
410
+ }
411
+ catch (err) {
412
+ report.status = "failed";
413
+ console.error("[hicortex] Consolidation pipeline error:", err);
414
+ }
415
+ // Update last-consolidated timestamp
416
+ if (!dryRun && report.status === "completed") {
417
+ try {
418
+ const dir = (0, node_path_1.join)((0, node_os_1.homedir)(), ".claude", "memory");
419
+ (0, node_fs_1.mkdirSync)(dir, { recursive: true });
420
+ (0, node_fs_1.writeFileSync)(LAST_CONSOLIDATED_PATH, new Date().toISOString());
421
+ }
422
+ catch {
423
+ // Non-fatal
424
+ }
425
+ }
426
+ report.budget = budget.summary();
427
+ report.completed_at = new Date().toISOString();
428
+ report.elapsed_seconds =
429
+ Math.round((Date.now() - start.getTime()) / 100) / 10;
430
+ return report;
431
+ }
432
+ // ---------------------------------------------------------------------------
433
+ // Scheduling
434
+ // ---------------------------------------------------------------------------
435
+ /**
436
+ * Calculate milliseconds until the next occurrence of a given hour (local time).
437
+ */
438
+ function msUntilHour(hour) {
439
+ const now = new Date();
440
+ const target = new Date(now);
441
+ target.setHours(hour, 30, 0, 0); // :30 past the hour
442
+ if (target.getTime() <= now.getTime()) {
443
+ // Already passed today, schedule for tomorrow
444
+ target.setDate(target.getDate() + 1);
445
+ }
446
+ return target.getTime() - now.getTime();
447
+ }
448
+ const ONE_DAY_MS = 24 * 60 * 60 * 1000;
449
+ /**
450
+ * Schedule the consolidation pipeline to run nightly.
451
+ * Returns a cleanup function to cancel the timer.
452
+ */
453
+ function scheduleConsolidation(db, llm, embedFn, hour = 2) {
454
+ let timeout = null;
455
+ let interval = null;
456
+ const runAndScheduleInterval = () => {
457
+ runConsolidation(db, llm, embedFn)
458
+ .then((report) => {
459
+ console.log(`[hicortex] Consolidation ${report.status} in ${report.elapsed_seconds}s`);
460
+ })
461
+ .catch((err) => {
462
+ console.error("[hicortex] Consolidation failed:", err);
463
+ });
464
+ // Schedule recurring daily runs
465
+ if (!interval) {
466
+ interval = setInterval(() => {
467
+ runConsolidation(db, llm, embedFn).catch((err) => {
468
+ console.error("[hicortex] Consolidation failed:", err);
469
+ });
470
+ }, ONE_DAY_MS);
471
+ }
472
+ };
473
+ const delay = msUntilHour(hour);
474
+ console.log(`[hicortex] Consolidation scheduled in ${Math.round(delay / 60_000)} minutes`);
475
+ timeout = setTimeout(runAndScheduleInterval, delay);
476
+ return () => {
477
+ if (timeout)
478
+ clearTimeout(timeout);
479
+ if (interval)
480
+ clearInterval(interval);
481
+ };
482
+ }
package/dist/db.d.ts ADDED
@@ -0,0 +1,19 @@
1
+ /**
2
+ * Database initialization with better-sqlite3 + sqlite-vec.
3
+ * Ported from hicortex/db.py — same schema for migration compatibility.
4
+ */
5
+ import Database from "better-sqlite3";
6
+ /**
7
+ * Initialize the database: load sqlite-vec, enable WAL, create all tables.
8
+ * Returns the open Database instance (caller manages lifetime).
9
+ */
10
+ export declare function initDb(dbPath: string): Database.Database;
11
+ /**
12
+ * Return database statistics.
13
+ */
14
+ export declare function getStats(db: Database.Database, dbPath: string): {
15
+ memories: number;
16
+ links: number;
17
+ db_size_bytes: number;
18
+ by_type: Record<string, number>;
19
+ };