omni-cortex 1.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,729 @@
1
+ """Database query functions for reading omni-cortex SQLite databases."""
2
+
3
+ import json
4
+ import sqlite3
5
+ from collections import Counter
6
+ from datetime import datetime, timedelta
7
+ from pathlib import Path
8
+ from typing import Optional
9
+
10
+ from models import Activity, FilterParams, Memory, MemoryStats, MemoryUpdate, Session, TimelineEntry
11
+
12
+
13
+ def get_connection(db_path: str) -> sqlite3.Connection:
14
+ """Get a read-only connection to the database."""
15
+ conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
16
+ conn.row_factory = sqlite3.Row
17
+ return conn
18
+
19
+
20
+ def get_write_connection(db_path: str) -> sqlite3.Connection:
21
+ """Get a writable connection to the database."""
22
+ conn = sqlite3.connect(db_path)
23
+ conn.row_factory = sqlite3.Row
24
+ return conn
25
+
26
+
27
+ def parse_tags(tags_str: Optional[str]) -> list[str]:
28
+ """Parse tags from JSON string."""
29
+ if not tags_str:
30
+ return []
31
+ try:
32
+ tags = json.loads(tags_str)
33
+ return tags if isinstance(tags, list) else []
34
+ except (json.JSONDecodeError, TypeError):
35
+ return []
36
+
37
+
38
+ def get_memories(db_path: str, filters: FilterParams) -> list[Memory]:
39
+ """Get memories with filtering, sorting, and pagination."""
40
+ conn = get_connection(db_path)
41
+
42
+ # Build query
43
+ query = "SELECT * FROM memories WHERE 1=1"
44
+ params: list = []
45
+
46
+ if filters.memory_type:
47
+ query += " AND type = ?"
48
+ params.append(filters.memory_type)
49
+
50
+ if filters.status:
51
+ query += " AND status = ?"
52
+ params.append(filters.status)
53
+
54
+ if filters.min_importance is not None:
55
+ query += " AND importance_score >= ?"
56
+ params.append(filters.min_importance)
57
+
58
+ if filters.max_importance is not None:
59
+ query += " AND importance_score <= ?"
60
+ params.append(filters.max_importance)
61
+
62
+ if filters.search:
63
+ query += " AND (content LIKE ? OR context LIKE ?)"
64
+ search_term = f"%{filters.search}%"
65
+ params.extend([search_term, search_term])
66
+
67
+ # Sorting
68
+ valid_sort_columns = ["created_at", "last_accessed", "importance_score", "access_count"]
69
+ sort_by = filters.sort_by if filters.sort_by in valid_sort_columns else "last_accessed"
70
+ sort_order = "DESC" if filters.sort_order.lower() == "desc" else "ASC"
71
+ query += f" ORDER BY {sort_by} {sort_order}"
72
+
73
+ # Pagination
74
+ query += " LIMIT ? OFFSET ?"
75
+ params.extend([filters.limit, filters.offset])
76
+
77
+ cursor = conn.execute(query, params)
78
+ rows = cursor.fetchall()
79
+
80
+ memories = []
81
+ for row in rows:
82
+ # Parse tags from JSON string
83
+ tags = parse_tags(row["tags"])
84
+
85
+ memories.append(
86
+ Memory(
87
+ id=row["id"],
88
+ content=row["content"],
89
+ context=row["context"],
90
+ type=row["type"],
91
+ status=row["status"] or "fresh",
92
+ importance_score=int(row["importance_score"] or 50),
93
+ access_count=row["access_count"] or 0,
94
+ created_at=datetime.fromisoformat(row["created_at"]),
95
+ last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
96
+ tags=tags,
97
+ )
98
+ )
99
+
100
+ conn.close()
101
+ return memories
102
+
103
+
104
+ def get_memory_by_id(db_path: str, memory_id: str) -> Optional[Memory]:
105
+ """Get a single memory by ID."""
106
+ conn = get_connection(db_path)
107
+
108
+ cursor = conn.execute("SELECT * FROM memories WHERE id = ?", (memory_id,))
109
+ row = cursor.fetchone()
110
+
111
+ if not row:
112
+ conn.close()
113
+ return None
114
+
115
+ # Parse tags from JSON string
116
+ tags = parse_tags(row["tags"])
117
+
118
+ memory = Memory(
119
+ id=row["id"],
120
+ content=row["content"],
121
+ context=row["context"],
122
+ type=row["type"],
123
+ status=row["status"] or "fresh",
124
+ importance_score=int(row["importance_score"] or 50),
125
+ access_count=row["access_count"] or 0,
126
+ created_at=datetime.fromisoformat(row["created_at"]),
127
+ last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
128
+ tags=tags,
129
+ )
130
+
131
+ conn.close()
132
+ return memory
133
+
134
+
135
+ def get_memory_stats(db_path: str) -> MemoryStats:
136
+ """Get statistics about memories in the database."""
137
+ conn = get_connection(db_path)
138
+
139
+ # Total count
140
+ total = conn.execute("SELECT COUNT(*) FROM memories").fetchone()[0]
141
+
142
+ # By type
143
+ type_cursor = conn.execute("SELECT type, COUNT(*) as count FROM memories GROUP BY type")
144
+ by_type = {row["type"]: row["count"] for row in type_cursor.fetchall()}
145
+
146
+ # By status
147
+ status_cursor = conn.execute("SELECT status, COUNT(*) as count FROM memories GROUP BY status")
148
+ by_status = {(row["status"] or "fresh"): row["count"] for row in status_cursor.fetchall()}
149
+
150
+ # Average importance
151
+ avg_cursor = conn.execute("SELECT AVG(importance_score) FROM memories")
152
+ avg_importance = avg_cursor.fetchone()[0] or 0.0
153
+
154
+ # Total access count
155
+ access_cursor = conn.execute("SELECT SUM(access_count) FROM memories")
156
+ total_access = access_cursor.fetchone()[0] or 0
157
+
158
+ # Tags with counts - extract from JSON column
159
+ tags_cursor = conn.execute("SELECT tags FROM memories WHERE tags IS NOT NULL AND tags != ''")
160
+ tag_counter: Counter = Counter()
161
+ for row in tags_cursor.fetchall():
162
+ tags = parse_tags(row["tags"])
163
+ tag_counter.update(tags)
164
+
165
+ tags = [{"name": name, "count": count} for name, count in tag_counter.most_common(50)]
166
+
167
+ conn.close()
168
+
169
+ return MemoryStats(
170
+ total_count=total,
171
+ by_type=by_type,
172
+ by_status=by_status,
173
+ avg_importance=round(avg_importance, 1),
174
+ total_access_count=total_access,
175
+ tags=tags,
176
+ )
177
+
178
+
179
+ def get_activities(
180
+ db_path: str,
181
+ event_type: Optional[str] = None,
182
+ tool_name: Optional[str] = None,
183
+ limit: int = 100,
184
+ offset: int = 0,
185
+ ) -> list[Activity]:
186
+ """Get activity log entries."""
187
+ conn = get_connection(db_path)
188
+
189
+ query = "SELECT * FROM activities WHERE 1=1"
190
+ params: list = []
191
+
192
+ if event_type:
193
+ query += " AND event_type = ?"
194
+ params.append(event_type)
195
+
196
+ if tool_name:
197
+ query += " AND tool_name = ?"
198
+ params.append(tool_name)
199
+
200
+ query += " ORDER BY timestamp DESC LIMIT ? OFFSET ?"
201
+ params.extend([limit, offset])
202
+
203
+ cursor = conn.execute(query, params)
204
+ activities = []
205
+
206
+ for row in cursor.fetchall():
207
+ # Parse timestamp - handle both with and without timezone
208
+ ts_str = row["timestamp"]
209
+ try:
210
+ ts = datetime.fromisoformat(ts_str)
211
+ except ValueError:
212
+ # Fallback for edge cases
213
+ ts = datetime.now()
214
+
215
+ activities.append(
216
+ Activity(
217
+ id=row["id"],
218
+ session_id=row["session_id"],
219
+ event_type=row["event_type"],
220
+ tool_name=row["tool_name"],
221
+ tool_input=row["tool_input"],
222
+ tool_output=row["tool_output"],
223
+ success=bool(row["success"]),
224
+ error_message=row["error_message"],
225
+ duration_ms=row["duration_ms"],
226
+ file_path=row["file_path"],
227
+ timestamp=ts,
228
+ )
229
+ )
230
+
231
+ conn.close()
232
+ return activities
233
+
234
+
235
+ def get_timeline(
236
+ db_path: str,
237
+ hours: int = 24,
238
+ include_memories: bool = True,
239
+ include_activities: bool = True,
240
+ ) -> list[TimelineEntry]:
241
+ """Get a timeline of memories and activities."""
242
+ conn = get_connection(db_path)
243
+ since = datetime.now() - timedelta(hours=hours)
244
+ since_str = since.isoformat()
245
+
246
+ entries: list[TimelineEntry] = []
247
+
248
+ if include_memories:
249
+ cursor = conn.execute(
250
+ "SELECT * FROM memories WHERE created_at >= ? ORDER BY created_at DESC",
251
+ (since_str,),
252
+ )
253
+ for row in cursor.fetchall():
254
+ entries.append(
255
+ TimelineEntry(
256
+ timestamp=datetime.fromisoformat(row["created_at"]),
257
+ entry_type="memory",
258
+ data={
259
+ "id": row["id"],
260
+ "content": row["content"][:200] + "..." if len(row["content"]) > 200 else row["content"],
261
+ "type": row["type"],
262
+ "importance": row["importance_score"],
263
+ },
264
+ )
265
+ )
266
+
267
+ if include_activities:
268
+ cursor = conn.execute(
269
+ "SELECT * FROM activities WHERE timestamp >= ? ORDER BY timestamp DESC",
270
+ (since_str,),
271
+ )
272
+ for row in cursor.fetchall():
273
+ entries.append(
274
+ TimelineEntry(
275
+ timestamp=datetime.fromisoformat(row["timestamp"]),
276
+ entry_type="activity",
277
+ data={
278
+ "id": row["id"],
279
+ "event_type": row["event_type"],
280
+ "tool_name": row["tool_name"],
281
+ "success": bool(row["success"]),
282
+ "duration_ms": row["duration_ms"],
283
+ },
284
+ )
285
+ )
286
+
287
+ # Sort by timestamp descending
288
+ entries.sort(key=lambda e: e.timestamp, reverse=True)
289
+
290
+ conn.close()
291
+ return entries
292
+
293
+
294
+ def get_sessions(db_path: str, limit: int = 20) -> list[Session]:
295
+ """Get recent sessions."""
296
+ conn = get_connection(db_path)
297
+
298
+ cursor = conn.execute(
299
+ """
300
+ SELECT s.*, COUNT(a.id) as activity_count
301
+ FROM sessions s
302
+ LEFT JOIN activities a ON s.id = a.session_id
303
+ GROUP BY s.id
304
+ ORDER BY s.started_at DESC
305
+ LIMIT ?
306
+ """,
307
+ (limit,),
308
+ )
309
+
310
+ sessions = []
311
+ for row in cursor.fetchall():
312
+ sessions.append(
313
+ Session(
314
+ id=row["id"],
315
+ project_path=row["project_path"],
316
+ started_at=datetime.fromisoformat(row["started_at"]),
317
+ ended_at=datetime.fromisoformat(row["ended_at"]) if row["ended_at"] else None,
318
+ summary=row["summary"],
319
+ activity_count=row["activity_count"],
320
+ )
321
+ )
322
+
323
+ conn.close()
324
+ return sessions
325
+
326
+
327
+ def get_all_tags(db_path: str) -> list[dict]:
328
+ """Get all tags with their usage counts."""
329
+ conn = get_connection(db_path)
330
+
331
+ # Extract tags from JSON column
332
+ cursor = conn.execute("SELECT tags FROM memories WHERE tags IS NOT NULL AND tags != ''")
333
+ tag_counter: Counter = Counter()
334
+ for row in cursor.fetchall():
335
+ tags = parse_tags(row["tags"])
336
+ tag_counter.update(tags)
337
+
338
+ tags = [{"name": name, "count": count} for name, count in tag_counter.most_common()]
339
+
340
+ conn.close()
341
+ return tags
342
+
343
+
344
+ def get_type_distribution(db_path: str) -> dict[str, int]:
345
+ """Get memory type distribution."""
346
+ conn = get_connection(db_path)
347
+
348
+ cursor = conn.execute("SELECT type, COUNT(*) as count FROM memories GROUP BY type")
349
+ distribution = {row["type"]: row["count"] for row in cursor.fetchall()}
350
+
351
+ conn.close()
352
+ return distribution
353
+
354
+
355
+ def search_memories(db_path: str, query: str, limit: int = 20) -> list[Memory]:
356
+ """Search memories using FTS if available, otherwise LIKE."""
357
+ conn = get_connection(db_path)
358
+
359
+ # Check if FTS table exists
360
+ fts_check = conn.execute(
361
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='memories_fts'"
362
+ ).fetchone()
363
+
364
+ if fts_check:
365
+ # Use FTS search - FTS5 uses rowid to match the memories table rowid
366
+ # Escape special FTS5 characters and wrap in quotes for phrase search
367
+ safe_query = query.replace('"', '""')
368
+ try:
369
+ cursor = conn.execute(
370
+ """
371
+ SELECT m.* FROM memories m
372
+ JOIN memories_fts fts ON m.rowid = fts.rowid
373
+ WHERE memories_fts MATCH ?
374
+ ORDER BY rank
375
+ LIMIT ?
376
+ """,
377
+ (f'"{safe_query}"', limit),
378
+ )
379
+ except sqlite3.OperationalError:
380
+ # Fallback if FTS query fails
381
+ search_term = f"%{query}%"
382
+ cursor = conn.execute(
383
+ """
384
+ SELECT * FROM memories
385
+ WHERE content LIKE ? OR context LIKE ?
386
+ ORDER BY importance_score DESC
387
+ LIMIT ?
388
+ """,
389
+ (search_term, search_term, limit),
390
+ )
391
+ else:
392
+ # Fallback to LIKE
393
+ search_term = f"%{query}%"
394
+ cursor = conn.execute(
395
+ """
396
+ SELECT * FROM memories
397
+ WHERE content LIKE ? OR context LIKE ?
398
+ ORDER BY importance_score DESC
399
+ LIMIT ?
400
+ """,
401
+ (search_term, search_term, limit),
402
+ )
403
+
404
+ memories = []
405
+ for row in cursor.fetchall():
406
+ # Parse tags from JSON string
407
+ tags = parse_tags(row["tags"])
408
+
409
+ memories.append(
410
+ Memory(
411
+ id=row["id"],
412
+ content=row["content"],
413
+ context=row["context"],
414
+ type=row["type"],
415
+ status=row["status"] or "fresh",
416
+ importance_score=int(row["importance_score"] or 50),
417
+ access_count=row["access_count"] or 0,
418
+ created_at=datetime.fromisoformat(row["created_at"]),
419
+ last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
420
+ tags=tags,
421
+ )
422
+ )
423
+
424
+ conn.close()
425
+ return memories
426
+
427
+
428
+ def update_memory(db_path: str, memory_id: str, updates: MemoryUpdate) -> Optional[Memory]:
429
+ """Update a memory and return the updated record."""
430
+ conn = get_write_connection(db_path)
431
+
432
+ # Build update query dynamically based on provided fields
433
+ update_fields = []
434
+ params = []
435
+
436
+ if updates.content is not None:
437
+ update_fields.append("content = ?")
438
+ params.append(updates.content)
439
+
440
+ if updates.context is not None:
441
+ update_fields.append("context = ?")
442
+ params.append(updates.context)
443
+
444
+ if updates.memory_type is not None:
445
+ update_fields.append("type = ?")
446
+ params.append(updates.memory_type)
447
+
448
+ if updates.status is not None:
449
+ update_fields.append("status = ?")
450
+ params.append(updates.status)
451
+
452
+ if updates.importance_score is not None:
453
+ update_fields.append("importance_score = ?")
454
+ params.append(updates.importance_score)
455
+
456
+ if updates.tags is not None:
457
+ update_fields.append("tags = ?")
458
+ params.append(json.dumps(updates.tags))
459
+
460
+ if not update_fields:
461
+ conn.close()
462
+ return get_memory_by_id(db_path, memory_id)
463
+
464
+ # Add updated timestamp
465
+ update_fields.append("last_accessed = ?")
466
+ params.append(datetime.now().isoformat())
467
+
468
+ # Add memory_id to params
469
+ params.append(memory_id)
470
+
471
+ query = f"UPDATE memories SET {', '.join(update_fields)} WHERE id = ?"
472
+ cursor = conn.execute(query, params)
473
+ conn.commit()
474
+
475
+ if cursor.rowcount == 0:
476
+ conn.close()
477
+ return None
478
+
479
+ conn.close()
480
+ return get_memory_by_id(db_path, memory_id)
481
+
482
+
483
+ def delete_memory(db_path: str, memory_id: str) -> bool:
484
+ """Delete a memory by ID. Returns True if deleted, False if not found."""
485
+ conn = get_write_connection(db_path)
486
+
487
+ # Also delete related entries in memory_relationships
488
+ conn.execute(
489
+ "DELETE FROM memory_relationships WHERE source_id = ? OR target_id = ?",
490
+ (memory_id, memory_id),
491
+ )
492
+
493
+ cursor = conn.execute("DELETE FROM memories WHERE id = ?", (memory_id,))
494
+ conn.commit()
495
+
496
+ deleted = cursor.rowcount > 0
497
+ conn.close()
498
+ return deleted
499
+
500
+
501
+ # --- Stats Functions for Dashboard Charts ---
502
+
503
+
504
+ def get_activity_heatmap(db_path: str, days: int = 90) -> list[dict]:
505
+ """Get activity counts grouped by day for heatmap visualization."""
506
+ conn = get_connection(db_path)
507
+ query = """
508
+ SELECT date(timestamp) as date, COUNT(*) as count
509
+ FROM activities
510
+ WHERE timestamp >= date('now', ?)
511
+ GROUP BY date(timestamp)
512
+ ORDER BY date
513
+ """
514
+ cursor = conn.execute(query, (f'-{days} days',))
515
+ result = [{"date": row["date"], "count": row["count"]} for row in cursor.fetchall()]
516
+ conn.close()
517
+ return result
518
+
519
+
520
+ def get_tool_usage(db_path: str, limit: int = 10) -> list[dict]:
521
+ """Get tool usage statistics with success rates."""
522
+ conn = get_connection(db_path)
523
+ query = """
524
+ SELECT
525
+ tool_name,
526
+ COUNT(*) as count,
527
+ SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate
528
+ FROM activities
529
+ WHERE tool_name IS NOT NULL AND tool_name != ''
530
+ GROUP BY tool_name
531
+ ORDER BY count DESC
532
+ LIMIT ?
533
+ """
534
+ cursor = conn.execute(query, (limit,))
535
+ result = [
536
+ {
537
+ "tool_name": row["tool_name"],
538
+ "count": row["count"],
539
+ "success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
540
+ }
541
+ for row in cursor.fetchall()
542
+ ]
543
+ conn.close()
544
+ return result
545
+
546
+
547
+ def get_memory_growth(db_path: str, days: int = 30) -> list[dict]:
548
+ """Get memory creation over time with cumulative totals."""
549
+ conn = get_connection(db_path)
550
+ query = """
551
+ WITH daily_counts AS (
552
+ SELECT date(created_at) as date, COUNT(*) as count
553
+ FROM memories
554
+ WHERE created_at >= date('now', ?)
555
+ GROUP BY date(created_at)
556
+ )
557
+ SELECT
558
+ date,
559
+ count,
560
+ SUM(count) OVER (ORDER BY date) as cumulative
561
+ FROM daily_counts
562
+ ORDER BY date
563
+ """
564
+ cursor = conn.execute(query, (f'-{days} days',))
565
+ result = [
566
+ {"date": row["date"], "count": row["count"], "cumulative": row["cumulative"]}
567
+ for row in cursor.fetchall()
568
+ ]
569
+ conn.close()
570
+ return result
571
+
572
+
573
+ def get_recent_sessions(db_path: str, limit: int = 5) -> list[dict]:
574
+ """Get recent sessions with activity counts and memory counts."""
575
+ conn = get_connection(db_path)
576
+ query = """
577
+ SELECT
578
+ s.id,
579
+ s.project_path,
580
+ s.started_at,
581
+ s.ended_at,
582
+ s.summary,
583
+ COUNT(DISTINCT a.id) as activity_count
584
+ FROM sessions s
585
+ LEFT JOIN activities a ON a.session_id = s.id
586
+ GROUP BY s.id
587
+ ORDER BY s.started_at DESC
588
+ LIMIT ?
589
+ """
590
+ cursor = conn.execute(query, (limit,))
591
+ result = [
592
+ {
593
+ "id": row["id"],
594
+ "project_path": row["project_path"],
595
+ "started_at": row["started_at"],
596
+ "ended_at": row["ended_at"],
597
+ "summary": row["summary"],
598
+ "activity_count": row["activity_count"],
599
+ }
600
+ for row in cursor.fetchall()
601
+ ]
602
+ conn.close()
603
+ return result
604
+
605
+
606
+ def bulk_update_memory_status(db_path: str, memory_ids: list[str], status: str) -> int:
607
+ """Update status for multiple memories. Returns count updated."""
608
+ if not memory_ids:
609
+ return 0
610
+ conn = get_write_connection(db_path)
611
+ placeholders = ','.join('?' * len(memory_ids))
612
+ query = f"UPDATE memories SET status = ?, last_accessed = datetime('now') WHERE id IN ({placeholders})"
613
+ cursor = conn.execute(query, [status] + memory_ids)
614
+ conn.commit()
615
+ count = cursor.rowcount
616
+ conn.close()
617
+ return count
618
+
619
+
620
+ def get_memories_needing_review(db_path: str, days_threshold: int = 30, limit: int = 50) -> list[Memory]:
621
+ """Get memories that haven't been accessed recently and may need review."""
622
+ conn = get_connection(db_path)
623
+ query = """
624
+ SELECT * FROM memories
625
+ WHERE last_accessed < date('now', ?)
626
+ OR last_accessed IS NULL
627
+ ORDER BY last_accessed ASC NULLS FIRST, importance_score DESC
628
+ LIMIT ?
629
+ """
630
+ cursor = conn.execute(query, (f'-{days_threshold} days', limit))
631
+
632
+ memories = []
633
+ for row in cursor.fetchall():
634
+ tags = parse_tags(row["tags"])
635
+ memories.append(
636
+ Memory(
637
+ id=row["id"],
638
+ content=row["content"],
639
+ context=row["context"],
640
+ type=row["type"],
641
+ status=row["status"] or "fresh",
642
+ importance_score=int(row["importance_score"] or 50),
643
+ access_count=row["access_count"] or 0,
644
+ created_at=datetime.fromisoformat(row["created_at"]),
645
+ last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
646
+ tags=tags,
647
+ )
648
+ )
649
+
650
+ conn.close()
651
+ return memories
652
+
653
+
654
+ def get_relationships(db_path: str, memory_id: Optional[str] = None) -> list[dict]:
655
+ """Get memory relationships for graph visualization."""
656
+ conn = get_connection(db_path)
657
+
658
+ # Check if memory_relationships table exists
659
+ table_check = conn.execute(
660
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='memory_relationships'"
661
+ ).fetchone()
662
+
663
+ if not table_check:
664
+ conn.close()
665
+ return []
666
+
667
+ query = """
668
+ SELECT
669
+ r.source_memory_id as source_id,
670
+ r.target_memory_id as target_id,
671
+ r.relationship_type,
672
+ r.strength,
673
+ ms.content as source_content,
674
+ ms.type as source_type,
675
+ mt.content as target_content,
676
+ mt.type as target_type
677
+ FROM memory_relationships r
678
+ JOIN memories ms ON r.source_memory_id = ms.id
679
+ JOIN memories mt ON r.target_memory_id = mt.id
680
+ """
681
+
682
+ try:
683
+ if memory_id:
684
+ query += " WHERE r.source_memory_id = ? OR r.target_memory_id = ?"
685
+ cursor = conn.execute(query, (memory_id, memory_id))
686
+ else:
687
+ cursor = conn.execute(query)
688
+
689
+ result = [dict(row) for row in cursor.fetchall()]
690
+ except Exception as e:
691
+ print(f"[Database] Error querying relationships: {e}")
692
+ result = []
693
+ finally:
694
+ conn.close()
695
+
696
+ return result
697
+
698
+
699
+ def get_relationship_graph(db_path: str, center_id: Optional[str] = None, depth: int = 2) -> dict:
700
+ """Get graph data with nodes and edges for D3 visualization."""
701
+ relationships = get_relationships(db_path, center_id)
702
+
703
+ nodes = {}
704
+ edges = []
705
+
706
+ for rel in relationships:
707
+ # Add source node
708
+ if rel["source_id"] not in nodes:
709
+ nodes[rel["source_id"]] = {
710
+ "id": rel["source_id"],
711
+ "content": rel["source_content"][:100] if rel["source_content"] else "",
712
+ "type": rel["source_type"],
713
+ }
714
+ # Add target node
715
+ if rel["target_id"] not in nodes:
716
+ nodes[rel["target_id"]] = {
717
+ "id": rel["target_id"],
718
+ "content": rel["target_content"][:100] if rel["target_content"] else "",
719
+ "type": rel["target_type"],
720
+ }
721
+ # Add edge
722
+ edges.append({
723
+ "source": rel["source_id"],
724
+ "target": rel["target_id"],
725
+ "type": rel["relationship_type"],
726
+ "strength": rel["strength"] or 1.0,
727
+ })
728
+
729
+ return {"nodes": list(nodes.values()), "edges": edges}