@akashabot/openclaw-memory-offline-core 0.1.1 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/index.d.ts +106 -0
  2. package/dist/index.js +472 -74
  3. package/package.json +23 -15
package/dist/index.d.ts CHANGED
@@ -18,6 +18,22 @@ export type MemItem = {
18
18
  text: string;
19
19
  tags: string | null;
20
20
  meta: string | null;
21
+ entity_id: string | null;
22
+ process_id: string | null;
23
+ session_id: string | null;
24
+ };
25
+ export type Fact = {
26
+ id: string;
27
+ created_at: number;
28
+ subject: string;
29
+ predicate: string;
30
+ object: string;
31
+ confidence: number;
32
+ source_item_id: string | null;
33
+ entity_id: string | null;
34
+ };
35
+ export type InsertFactInput = Omit<Fact, 'created_at'> & {
36
+ created_at?: number;
21
37
  };
22
38
  export type InsertItemInput = Omit<MemItem, 'created_at'> & {
23
39
  created_at?: number;
@@ -32,6 +48,11 @@ export type HybridResult = LexicalResult & {
32
48
  };
33
49
  export declare function openDb(dbPath: string): Database.Database;
34
50
  export declare function initSchema(db: Database.Database): void;
51
+ /**
52
+ * Run migrations for existing databases that lack Phase 1 columns.
53
+ * Safe to call on every startup - will only add missing columns.
54
+ */
55
+ export declare function runMigrations(db: Database.Database): void;
35
56
  /**
36
57
  * Minimal escaping for FTS5 queries.
37
58
  *
@@ -60,3 +81,88 @@ export declare function hybridSearch(db: Database.Database, cfg: MemConfig, quer
60
81
  candidates?: number;
61
82
  semanticWeight?: number;
62
83
  }): Promise<HybridResult[]>;
84
+ export type FilterOpts = {
85
+ entity_id?: string | null;
86
+ process_id?: string | null;
87
+ session_id?: string | null;
88
+ };
89
+ /**
90
+ * Filter hybrid search results by attribution/session fields.
91
+ */
92
+ export declare function filterResults(results: HybridResult[], opts: FilterOpts): HybridResult[];
93
+ /**
94
+ * Hybrid search with built-in filtering (more efficient than filter + hybridSearch).
95
+ */
96
+ export declare function hybridSearchFiltered(db: Database.Database, cfg: MemConfig, query: string, opts?: {
97
+ topK?: number;
98
+ candidates?: number;
99
+ semanticWeight?: number;
100
+ filter?: FilterOpts;
101
+ }): Promise<HybridResult[]>;
102
+ /**
103
+ * Get all memories for a specific entity (e.g., "what did Loïc tell me?").
104
+ */
105
+ export declare function getMemoriesByEntity(db: Database.Database, entity_id: string, limit?: number): MemItem[];
106
+ /**
107
+ * Get all memories for a specific session/conversation.
108
+ */
109
+ export declare function getMemoriesBySession(db: Database.Database, session_id: string, limit?: number): MemItem[];
110
+ /**
111
+ * Get all memories captured by a specific process/agent.
112
+ */
113
+ export declare function getMemoriesByProcess(db: Database.Database, process_id: string, limit?: number): MemItem[];
114
+ /**
115
+ * List distinct entity_ids in the database.
116
+ */
117
+ export declare function listEntities(db: Database.Database): string[];
118
+ /**
119
+ * List distinct session_ids in the database.
120
+ */
121
+ export declare function listSessions(db: Database.Database): string[];
122
+ /**
123
+ * Insert a new fact into the database.
124
+ */
125
+ export declare function insertFact(db: Database.Database, input: InsertFactInput): Fact;
126
+ /**
127
+ * Get all facts about a specific subject.
128
+ */
129
+ export declare function getFactsBySubject(db: Database.Database, subject: string, limit?: number): Fact[];
130
+ /**
131
+ * Get all facts with a specific predicate.
132
+ */
133
+ export declare function getFactsByPredicate(db: Database.Database, predicate: string, limit?: number): Fact[];
134
+ /**
135
+ * Search facts by subject, predicate, or object (simple LIKE search).
136
+ */
137
+ export declare function searchFacts(db: Database.Database, query: string, limit?: number): Fact[];
138
+ /**
139
+ * Get all facts (optionally filtered by entity_id).
140
+ */
141
+ export declare function getAllFacts(db: Database.Database, entityId?: string, limit?: number): Fact[];
142
+ /**
143
+ * List distinct subjects in the facts table.
144
+ */
145
+ export declare function listSubjects(db: Database.Database): string[];
146
+ /**
147
+ * List distinct predicates in the facts table.
148
+ */
149
+ export declare function listPredicates(db: Database.Database): string[];
150
+ /**
151
+ * Delete a fact by ID.
152
+ */
153
+ export declare function deleteFact(db: Database.Database, id: string): boolean;
154
+ /**
155
+ * Delete all facts derived from a specific memory item.
156
+ */
157
+ export declare function deleteFactsBySourceItem(db: Database.Database, sourceItemId: string): number;
158
+ /**
159
+ * Simple pattern-based fact extraction.
160
+ * Looks for common patterns like "X works at Y", "X prefers Y", etc.
161
+ * Returns an array of potential facts (not inserted yet).
162
+ */
163
+ export declare function extractFactsSimple(text: string, entityId?: string): Array<{
164
+ subject: string;
165
+ predicate: string;
166
+ object: string;
167
+ confidence: number;
168
+ }>;
package/dist/index.js CHANGED
@@ -6,52 +6,103 @@ export function openDb(dbPath) {
6
6
  return db;
7
7
  }
8
8
  export function initSchema(db) {
9
- db.exec(`
10
- CREATE TABLE IF NOT EXISTS items (
11
- id TEXT PRIMARY KEY,
12
- created_at INTEGER NOT NULL,
13
- source TEXT,
14
- source_id TEXT,
15
- title TEXT,
16
- text TEXT NOT NULL,
17
- tags TEXT,
18
- meta TEXT
19
- );
20
-
21
- CREATE VIRTUAL TABLE IF NOT EXISTS items_fts USING fts5(
22
- title,
23
- text,
24
- tags,
25
- content='items',
26
- content_rowid='rowid'
27
- );
28
-
29
- -- Keep the FTS index in sync with items.
30
- CREATE TRIGGER IF NOT EXISTS items_ai AFTER INSERT ON items BEGIN
31
- INSERT INTO items_fts(rowid, title, text, tags)
32
- VALUES (new.rowid, new.title, new.text, new.tags);
33
- END;
34
-
35
- CREATE TRIGGER IF NOT EXISTS items_ad AFTER DELETE ON items BEGIN
36
- INSERT INTO items_fts(items_fts, rowid, title, text, tags)
37
- VALUES('delete', old.rowid, old.title, old.text, old.tags);
38
- END;
39
-
40
- CREATE TRIGGER IF NOT EXISTS items_au AFTER UPDATE ON items BEGIN
41
- INSERT INTO items_fts(items_fts, rowid, title, text, tags)
42
- VALUES('delete', old.rowid, old.title, old.text, old.tags);
43
- INSERT INTO items_fts(rowid, title, text, tags)
44
- VALUES (new.rowid, new.title, new.text, new.tags);
45
- END;
46
-
47
- CREATE TABLE IF NOT EXISTS embeddings (
48
- item_id TEXT PRIMARY KEY,
49
- model TEXT NOT NULL,
50
- dims INTEGER NOT NULL,
51
- vector BLOB NOT NULL,
52
- updated_at INTEGER NOT NULL,
53
- FOREIGN KEY(item_id) REFERENCES items(id)
54
- );
9
+ db.exec(`
10
+ CREATE TABLE IF NOT EXISTS items (
11
+ id TEXT PRIMARY KEY,
12
+ created_at INTEGER NOT NULL,
13
+ source TEXT,
14
+ source_id TEXT,
15
+ title TEXT,
16
+ text TEXT NOT NULL,
17
+ tags TEXT,
18
+ meta TEXT,
19
+ -- Phase 1: Attribution & Session
20
+ entity_id TEXT,
21
+ process_id TEXT,
22
+ session_id TEXT
23
+ );
24
+
25
+ CREATE VIRTUAL TABLE IF NOT EXISTS items_fts USING fts5(
26
+ title,
27
+ text,
28
+ tags,
29
+ content='items',
30
+ content_rowid='rowid'
31
+ );
32
+
33
+ -- Keep the FTS index in sync with items.
34
+ CREATE TRIGGER IF NOT EXISTS items_ai AFTER INSERT ON items BEGIN
35
+ INSERT INTO items_fts(rowid, title, text, tags)
36
+ VALUES (new.rowid, new.title, new.text, new.tags);
37
+ END;
38
+
39
+ CREATE TRIGGER IF NOT EXISTS items_ad AFTER DELETE ON items BEGIN
40
+ INSERT INTO items_fts(items_fts, rowid, title, text, tags)
41
+ VALUES('delete', old.rowid, old.title, old.text, old.tags);
42
+ END;
43
+
44
+ CREATE TRIGGER IF NOT EXISTS items_au AFTER UPDATE ON items BEGIN
45
+ INSERT INTO items_fts(items_fts, rowid, title, text, tags)
46
+ VALUES('delete', old.rowid, old.title, old.text, old.tags);
47
+ INSERT INTO items_fts(rowid, title, text, tags)
48
+ VALUES (new.rowid, new.title, new.text, new.tags);
49
+ END;
50
+
51
+ CREATE TABLE IF NOT EXISTS embeddings (
52
+ item_id TEXT PRIMARY KEY,
53
+ model TEXT NOT NULL,
54
+ dims INTEGER NOT NULL,
55
+ vector BLOB NOT NULL,
56
+ updated_at INTEGER NOT NULL,
57
+ FOREIGN KEY(item_id) REFERENCES items(id)
58
+ );
59
+
60
+ -- Phase 2: Structured Facts Table
61
+ CREATE TABLE IF NOT EXISTS facts (
62
+ id TEXT PRIMARY KEY,
63
+ created_at INTEGER NOT NULL,
64
+ subject TEXT NOT NULL,
65
+ predicate TEXT NOT NULL,
66
+ object TEXT NOT NULL,
67
+ confidence REAL NOT NULL DEFAULT 0.5,
68
+ source_item_id TEXT,
69
+ entity_id TEXT,
70
+ FOREIGN KEY(source_item_id) REFERENCES items(id)
71
+ );
72
+
73
+ -- Indexes for Phase 1: Attribution & Session filtering
74
+ CREATE INDEX IF NOT EXISTS idx_items_entity_id ON items(entity_id);
75
+ CREATE INDEX IF NOT EXISTS idx_items_process_id ON items(process_id);
76
+ CREATE INDEX IF NOT EXISTS idx_items_session_id ON items(session_id);
77
+
78
+ -- Indexes for Phase 2: Facts queries
79
+ CREATE INDEX IF NOT EXISTS idx_facts_subject ON facts(subject);
80
+ CREATE INDEX IF NOT EXISTS idx_facts_predicate ON facts(predicate);
81
+ CREATE INDEX IF NOT EXISTS idx_facts_entity_id ON facts(entity_id);
82
+ `);
83
+ }
84
+ /**
85
+ * Run migrations for existing databases that lack Phase 1 columns.
86
+ * Safe to call on every startup - will only add missing columns.
87
+ */
88
+ export function runMigrations(db) {
89
+ const cols = db.prepare("PRAGMA table_info(items)").all();
90
+ const existing = new Set(cols.map(c => c.name));
91
+ const migrations = [
92
+ { col: 'entity_id', sql: 'ALTER TABLE items ADD COLUMN entity_id TEXT' },
93
+ { col: 'process_id', sql: 'ALTER TABLE items ADD COLUMN process_id TEXT' },
94
+ { col: 'session_id', sql: 'ALTER TABLE items ADD COLUMN session_id TEXT' },
95
+ ];
96
+ for (const m of migrations) {
97
+ if (!existing.has(m.col)) {
98
+ db.exec(m.sql);
99
+ }
100
+ }
101
+ // Ensure indexes exist (safe to repeat)
102
+ db.exec(`
103
+ CREATE INDEX IF NOT EXISTS idx_items_entity_id ON items(entity_id);
104
+ CREATE INDEX IF NOT EXISTS idx_items_process_id ON items(process_id);
105
+ CREATE INDEX IF NOT EXISTS idx_items_session_id ON items(session_id);
55
106
  `);
56
107
  }
57
108
  /**
@@ -88,9 +139,9 @@ export function searchItems(db, query, limit = 10) {
88
139
  }
89
140
  export function insertItem(db, input) {
90
141
  const now = input.created_at ?? Date.now();
91
- const stmt = db.prepare(`
92
- INSERT INTO items (id, created_at, source, source_id, title, text, tags, meta)
93
- VALUES (@id, @created_at, @source, @source_id, @title, @text, @tags, @meta)
142
+ const stmt = db.prepare(`
143
+ INSERT INTO items (id, created_at, source, source_id, title, text, tags, meta, entity_id, process_id, session_id)
144
+ VALUES (@id, @created_at, @source, @source_id, @title, @text, @tags, @meta, @entity_id, @process_id, @session_id)
94
145
  `);
95
146
  stmt.run({
96
147
  id: input.id,
@@ -101,6 +152,9 @@ export function insertItem(db, input) {
101
152
  text: input.text,
102
153
  tags: input.tags ?? null,
103
154
  meta: input.meta ?? null,
155
+ entity_id: input.entity_id ?? null,
156
+ process_id: input.process_id ?? null,
157
+ session_id: input.session_id ?? null,
104
158
  });
105
159
  return {
106
160
  id: input.id,
@@ -111,26 +165,32 @@ export function insertItem(db, input) {
111
165
  text: input.text,
112
166
  tags: input.tags ?? null,
113
167
  meta: input.meta ?? null,
168
+ entity_id: input.entity_id ?? null,
169
+ process_id: input.process_id ?? null,
170
+ session_id: input.session_id ?? null,
114
171
  };
115
172
  }
116
173
  export function lexicalSearch(db, query, limit = 10) {
117
174
  const rows = db
118
- .prepare(`
119
- SELECT
120
- i.id,
121
- i.created_at,
122
- i.source,
123
- i.source_id,
124
- i.title,
125
- i.text,
126
- i.tags,
127
- i.meta,
128
- bm25(items_fts) AS bm25
129
- FROM items_fts
130
- JOIN items i ON i.rowid = items_fts.rowid
131
- WHERE items_fts MATCH ?
132
- ORDER BY bm25 ASC
133
- LIMIT ?
175
+ .prepare(`
176
+ SELECT
177
+ i.id,
178
+ i.created_at,
179
+ i.source,
180
+ i.source_id,
181
+ i.title,
182
+ i.text,
183
+ i.tags,
184
+ i.meta,
185
+ i.entity_id,
186
+ i.process_id,
187
+ i.session_id,
188
+ bm25(items_fts) AS bm25
189
+ FROM items_fts
190
+ JOIN items i ON i.rowid = items_fts.rowid
191
+ WHERE items_fts MATCH ?
192
+ ORDER BY bm25 ASC
193
+ LIMIT ?
134
194
  `)
135
195
  .all(query, limit);
136
196
  return rows.map((r) => ({
@@ -143,6 +203,9 @@ export function lexicalSearch(db, query, limit = 10) {
143
203
  text: r.text,
144
204
  tags: r.tags,
145
205
  meta: r.meta,
206
+ entity_id: r.entity_id,
207
+ process_id: r.process_id,
208
+ session_id: r.session_id,
146
209
  },
147
210
  // bm25: lower is better; flip sign so higher is better
148
211
  lexicalScore: -Number(r.bm25),
@@ -243,12 +306,12 @@ async function getOrCreateItemEmbedding(db, cfg, itemId, text) {
243
306
  try {
244
307
  const emb = await fetchEmbedding(cfg, text);
245
308
  const blob = vectorToBlob(emb.vector);
246
- db.prepare(`INSERT INTO embeddings (item_id, model, dims, vector, updated_at)
247
- VALUES (?, ?, ?, ?, ?)
248
- ON CONFLICT(item_id) DO UPDATE SET
249
- model=excluded.model,
250
- dims=excluded.dims,
251
- vector=excluded.vector,
309
+ db.prepare(`INSERT INTO embeddings (item_id, model, dims, vector, updated_at)
310
+ VALUES (?, ?, ?, ?, ?)
311
+ ON CONFLICT(item_id) DO UPDATE SET
312
+ model=excluded.model,
313
+ dims=excluded.dims,
314
+ vector=excluded.vector,
252
315
  updated_at=excluded.updated_at`).run(itemId, emb.model, emb.dims, blob, Date.now());
253
316
  return emb;
254
317
  }
@@ -263,9 +326,9 @@ export async function hybridSearch(db, cfg, query, opts) {
263
326
  // Candidates: lexical hits + recents (merged).
264
327
  const lexHits = lexicalSearch(db, query, candidates);
265
328
  const recentRows = db
266
- .prepare(`SELECT id, created_at, source, source_id, title, text, tags, meta
267
- FROM items
268
- ORDER BY created_at DESC
329
+ .prepare(`SELECT id, created_at, source, source_id, title, text, tags, meta, entity_id, process_id, session_id
330
+ FROM items
331
+ ORDER BY created_at DESC
269
332
  LIMIT ?`)
270
333
  .all(candidates);
271
334
  const recent = recentRows.map((r) => ({
@@ -278,6 +341,9 @@ export async function hybridSearch(db, cfg, query, opts) {
278
341
  text: r.text,
279
342
  tags: r.tags,
280
343
  meta: r.meta,
344
+ entity_id: r.entity_id,
345
+ process_id: r.process_id,
346
+ session_id: r.session_id,
281
347
  },
282
348
  lexicalScore: 0,
283
349
  }));
@@ -316,3 +382,335 @@ export async function hybridSearch(db, cfg, query, opts) {
316
382
  out.sort((a, b) => b.score - a.score);
317
383
  return out.slice(0, topK);
318
384
  }
385
+ /**
386
+ * Filter hybrid search results by attribution/session fields.
387
+ */
388
+ export function filterResults(results, opts) {
389
+ return results.filter(r => {
390
+ if (opts.entity_id !== undefined && r.item.entity_id !== opts.entity_id)
391
+ return false;
392
+ if (opts.process_id !== undefined && r.item.process_id !== opts.process_id)
393
+ return false;
394
+ if (opts.session_id !== undefined && r.item.session_id !== opts.session_id)
395
+ return false;
396
+ return true;
397
+ });
398
+ }
399
+ /**
400
+ * Hybrid search with built-in filtering (more efficient than filter + hybridSearch).
401
+ */
402
+ export async function hybridSearchFiltered(db, cfg, query, opts) {
403
+ const results = await hybridSearch(db, cfg, query, opts);
404
+ if (!opts?.filter)
405
+ return results;
406
+ return filterResults(results, opts.filter);
407
+ }
408
+ /**
409
+ * Get all memories for a specific entity (e.g., "what did Loïc tell me?").
410
+ */
411
+ export function getMemoriesByEntity(db, entity_id, limit = 50) {
412
+ const rows = db
413
+ .prepare(`SELECT id, created_at, source, source_id, title, text, tags, meta, entity_id, process_id, session_id
414
+ FROM items
415
+ WHERE entity_id = ?
416
+ ORDER BY created_at DESC
417
+ LIMIT ?`)
418
+ .all(entity_id, limit);
419
+ return rows.map(r => ({
420
+ id: r.id,
421
+ created_at: r.created_at,
422
+ source: r.source,
423
+ source_id: r.source_id,
424
+ title: r.title,
425
+ text: r.text,
426
+ tags: r.tags,
427
+ meta: r.meta,
428
+ entity_id: r.entity_id,
429
+ process_id: r.process_id,
430
+ session_id: r.session_id,
431
+ }));
432
+ }
433
+ /**
434
+ * Get all memories for a specific session/conversation.
435
+ */
436
+ export function getMemoriesBySession(db, session_id, limit = 100) {
437
+ const rows = db
438
+ .prepare(`SELECT id, created_at, source, source_id, title, text, tags, meta, entity_id, process_id, session_id
439
+ FROM items
440
+ WHERE session_id = ?
441
+ ORDER BY created_at ASC
442
+ LIMIT ?`)
443
+ .all(session_id, limit);
444
+ return rows.map(r => ({
445
+ id: r.id,
446
+ created_at: r.created_at,
447
+ source: r.source,
448
+ source_id: r.source_id,
449
+ title: r.title,
450
+ text: r.text,
451
+ tags: r.tags,
452
+ meta: r.meta,
453
+ entity_id: r.entity_id,
454
+ process_id: r.process_id,
455
+ session_id: r.session_id,
456
+ }));
457
+ }
458
+ /**
459
+ * Get all memories captured by a specific process/agent.
460
+ */
461
+ export function getMemoriesByProcess(db, process_id, limit = 100) {
462
+ const rows = db
463
+ .prepare(`SELECT id, created_at, source, source_id, title, text, tags, meta, entity_id, process_id, session_id
464
+ FROM items
465
+ WHERE process_id = ?
466
+ ORDER BY created_at DESC
467
+ LIMIT ?`)
468
+ .all(process_id, limit);
469
+ return rows.map(r => ({
470
+ id: r.id,
471
+ created_at: r.created_at,
472
+ source: r.source,
473
+ source_id: r.source_id,
474
+ title: r.title,
475
+ text: r.text,
476
+ tags: r.tags,
477
+ meta: r.meta,
478
+ entity_id: r.entity_id,
479
+ process_id: r.process_id,
480
+ session_id: r.session_id,
481
+ }));
482
+ }
483
+ /**
484
+ * List distinct entity_ids in the database.
485
+ */
486
+ export function listEntities(db) {
487
+ const rows = db
488
+ .prepare(`SELECT DISTINCT entity_id FROM items WHERE entity_id IS NOT NULL ORDER BY entity_id`)
489
+ .all();
490
+ return rows.map(r => r.entity_id);
491
+ }
492
+ /**
493
+ * List distinct session_ids in the database.
494
+ */
495
+ export function listSessions(db) {
496
+ const rows = db
497
+ .prepare(`SELECT DISTINCT session_id FROM items WHERE session_id IS NOT NULL ORDER BY session_id`)
498
+ .all();
499
+ return rows.map(r => r.session_id);
500
+ }
501
+ // ============================================================================
502
+ // Phase 2: Structured Facts
503
+ // ============================================================================
504
+ /**
505
+ * Insert a new fact into the database.
506
+ */
507
+ export function insertFact(db, input) {
508
+ const stmt = db.prepare(`
509
+ INSERT INTO facts (id, created_at, subject, predicate, object, confidence, source_item_id, entity_id)
510
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
511
+ `);
512
+ const created_at = input.created_at ?? Date.now();
513
+ stmt.run(input.id, created_at, input.subject, input.predicate, input.object, input.confidence, input.source_item_id ?? null, input.entity_id ?? null);
514
+ return { ...input, created_at };
515
+ }
516
+ /**
517
+ * Get all facts about a specific subject.
518
+ */
519
+ export function getFactsBySubject(db, subject, limit = 100) {
520
+ const rows = db
521
+ .prepare(`SELECT id, created_at, subject, predicate, object, confidence, source_item_id, entity_id
522
+ FROM facts
523
+ WHERE subject = ?
524
+ ORDER BY confidence DESC, created_at DESC
525
+ LIMIT ?`)
526
+ .all(subject, limit);
527
+ return rows.map(r => ({
528
+ id: r.id,
529
+ created_at: r.created_at,
530
+ subject: r.subject,
531
+ predicate: r.predicate,
532
+ object: r.object,
533
+ confidence: r.confidence,
534
+ source_item_id: r.source_item_id,
535
+ entity_id: r.entity_id,
536
+ }));
537
+ }
538
+ /**
539
+ * Get all facts with a specific predicate.
540
+ */
541
+ export function getFactsByPredicate(db, predicate, limit = 100) {
542
+ const rows = db
543
+ .prepare(`SELECT id, created_at, subject, predicate, object, confidence, source_item_id, entity_id
544
+ FROM facts
545
+ WHERE predicate = ?
546
+ ORDER BY confidence DESC, created_at DESC
547
+ LIMIT ?`)
548
+ .all(predicate, limit);
549
+ return rows.map(r => ({
550
+ id: r.id,
551
+ created_at: r.created_at,
552
+ subject: r.subject,
553
+ predicate: r.predicate,
554
+ object: r.object,
555
+ confidence: r.confidence,
556
+ source_item_id: r.source_item_id,
557
+ entity_id: r.entity_id,
558
+ }));
559
+ }
560
+ /**
561
+ * Search facts by subject, predicate, or object (simple LIKE search).
562
+ */
563
+ export function searchFacts(db, query, limit = 50) {
564
+ const pattern = `%${query}%`;
565
+ const rows = db
566
+ .prepare(`SELECT id, created_at, subject, predicate, object, confidence, source_item_id, entity_id
567
+ FROM facts
568
+ WHERE subject LIKE ? OR predicate LIKE ? OR object LIKE ?
569
+ ORDER BY confidence DESC, created_at DESC
570
+ LIMIT ?`)
571
+ .all(pattern, pattern, pattern, limit);
572
+ return rows.map(r => ({
573
+ id: r.id,
574
+ created_at: r.created_at,
575
+ subject: r.subject,
576
+ predicate: r.predicate,
577
+ object: r.object,
578
+ confidence: r.confidence,
579
+ source_item_id: r.source_item_id,
580
+ entity_id: r.entity_id,
581
+ }));
582
+ }
583
+ /**
584
+ * Get all facts (optionally filtered by entity_id).
585
+ */
586
+ export function getAllFacts(db, entityId, limit = 100) {
587
+ let rows;
588
+ if (entityId) {
589
+ rows = db
590
+ .prepare(`SELECT id, created_at, subject, predicate, object, confidence, source_item_id, entity_id
591
+ FROM facts
592
+ WHERE entity_id = ?
593
+ ORDER BY created_at DESC
594
+ LIMIT ?`)
595
+ .all(entityId, limit);
596
+ }
597
+ else {
598
+ rows = db
599
+ .prepare(`SELECT id, created_at, subject, predicate, object, confidence, source_item_id, entity_id
600
+ FROM facts
601
+ ORDER BY created_at DESC
602
+ LIMIT ?`)
603
+ .all(limit);
604
+ }
605
+ return rows.map(r => ({
606
+ id: r.id,
607
+ created_at: r.created_at,
608
+ subject: r.subject,
609
+ predicate: r.predicate,
610
+ object: r.object,
611
+ confidence: r.confidence,
612
+ source_item_id: r.source_item_id,
613
+ entity_id: r.entity_id,
614
+ }));
615
+ }
616
+ /**
617
+ * List distinct subjects in the facts table.
618
+ */
619
+ export function listSubjects(db) {
620
+ const rows = db
621
+ .prepare(`SELECT DISTINCT subject FROM facts ORDER BY subject`)
622
+ .all();
623
+ return rows.map(r => r.subject);
624
+ }
625
+ /**
626
+ * List distinct predicates in the facts table.
627
+ */
628
+ export function listPredicates(db) {
629
+ const rows = db
630
+ .prepare(`SELECT DISTINCT predicate FROM facts ORDER BY predicate`)
631
+ .all();
632
+ return rows.map(r => r.predicate);
633
+ }
634
+ /**
635
+ * Delete a fact by ID.
636
+ */
637
+ export function deleteFact(db, id) {
638
+ const stmt = db.prepare('DELETE FROM facts WHERE id = ?');
639
+ const result = stmt.run(id);
640
+ return (result.changes ?? 0) > 0;
641
+ }
642
+ /**
643
+ * Delete all facts derived from a specific memory item.
644
+ */
645
+ export function deleteFactsBySourceItem(db, sourceItemId) {
646
+ const stmt = db.prepare('DELETE FROM facts WHERE source_item_id = ?');
647
+ const result = stmt.run(sourceItemId);
648
+ return result.changes ?? 0;
649
+ }
650
+ /**
651
+ * Simple pattern-based fact extraction.
652
+ * Looks for common patterns like "X works at Y", "X prefers Y", etc.
653
+ * Returns an array of potential facts (not inserted yet).
654
+ */
655
+ export function extractFactsSimple(text, entityId) {
656
+ const facts = [];
657
+ const lower = text.toLowerCase();
658
+ // Pattern: "X works at Y" / "X travaille chez Y"
659
+ const workPatterns = [
660
+ /(\w+)\s+(?:works at|travaille chez|works for|work at)\s+([\w\s]+?)(?:\.|,|$)/gi,
661
+ ];
662
+ for (const pattern of workPatterns) {
663
+ let match;
664
+ while ((match = pattern.exec(text)) !== null) {
665
+ facts.push({
666
+ subject: match[1].trim(),
667
+ predicate: 'works_at',
668
+ object: match[2].trim(),
669
+ confidence: 0.7,
670
+ });
671
+ }
672
+ }
673
+ // Pattern: "X prefers Y" / "X préfère Y"
674
+ const preferPatterns = [
675
+ /(\w+)\s+(?:prefers?|préfère)\s+([\w\s]+?)(?:\.|,|$)/gi,
676
+ ];
677
+ for (const pattern of preferPatterns) {
678
+ let match;
679
+ while ((match = pattern.exec(text)) !== null) {
680
+ facts.push({
681
+ subject: match[1].trim(),
682
+ predicate: 'prefers',
683
+ object: match[2].trim(),
684
+ confidence: 0.8,
685
+ });
686
+ }
687
+ }
688
+ // Pattern: "X is Y" / "X est Y"
689
+ const isPatterns = [
690
+ /(\w+)\s+(?:is|est)\s+(?:a |an |un |une )?([\w\s]+?)(?:\.|,|$)/gi,
691
+ ];
692
+ for (const pattern of isPatterns) {
693
+ let match;
694
+ while ((match = pattern.exec(text)) !== null) {
695
+ const subject = match[1].trim();
696
+ // Skip common false positives
697
+ if (['it', 'this', 'that', 'ce', 'il', 'elle', 'cette'].includes(subject.toLowerCase()))
698
+ continue;
699
+ facts.push({
700
+ subject,
701
+ predicate: 'is',
702
+ object: match[2].trim(),
703
+ confidence: 0.6,
704
+ });
705
+ }
706
+ }
707
+ // Dedupe by subject+predicate+object
708
+ const seen = new Set();
709
+ return facts.filter(f => {
710
+ const key = `${f.subject}|${f.predicate}|${f.object}`.toLowerCase();
711
+ if (seen.has(key))
712
+ return false;
713
+ seen.add(key);
714
+ return true;
715
+ });
716
+ }
package/package.json CHANGED
@@ -1,15 +1,23 @@
1
- {
2
- "name": "@akashabot/openclaw-memory-offline-core",
3
- "version": "0.1.1",
4
- "type": "module",
5
- "main": "./dist/index.js",
6
- "types": "./dist/index.d.ts",
7
- "files": ["dist"],
8
- "dependencies": {
9
- "better-sqlite3": "^11.8.0",
10
- "node-fetch": "^3.3.2"
11
- },
12
- "devDependencies": {
13
- "@types/better-sqlite3": "^7.6.11"
14
- }
15
- }
1
+ {
2
+ "name": "@akashabot/openclaw-memory-offline-core",
3
+ "version": "0.3.0",
4
+ "type": "module",
5
+ "description": "Core library for OpenClaw Offline Memory (SQLite FTS + optional embeddings)",
6
+ "main": "./dist/index.js",
7
+ "types": "./dist/index.d.ts",
8
+ "files": ["dist"],
9
+ "repository": {
10
+ "type": "git",
11
+ "url": "git+https://github.com/AkashaBot/openclaw-memory-offline-sqlite.git",
12
+ "directory": "packages/core"
13
+ },
14
+ "author": "AkashaBot",
15
+ "license": "MIT",
16
+ "dependencies": {
17
+ "better-sqlite3": "^11.8.0",
18
+ "node-fetch": "^3.3.2"
19
+ },
20
+ "devDependencies": {
21
+ "@types/better-sqlite3": "^7.6.11"
22
+ }
23
+ }