omni-cortex 1.17.1__py3-none-any.whl → 1.17.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. omni_cortex/__init__.py +3 -0
  2. omni_cortex/_bundled/dashboard/backend/.env.example +12 -0
  3. omni_cortex/_bundled/dashboard/backend/backfill_summaries.py +280 -0
  4. omni_cortex/_bundled/dashboard/backend/chat_service.py +631 -0
  5. omni_cortex/_bundled/dashboard/backend/database.py +1773 -0
  6. omni_cortex/_bundled/dashboard/backend/image_service.py +552 -0
  7. omni_cortex/_bundled/dashboard/backend/logging_config.py +122 -0
  8. omni_cortex/_bundled/dashboard/backend/main.py +1888 -0
  9. omni_cortex/_bundled/dashboard/backend/models.py +472 -0
  10. omni_cortex/_bundled/dashboard/backend/project_config.py +170 -0
  11. omni_cortex/_bundled/dashboard/backend/project_scanner.py +164 -0
  12. omni_cortex/_bundled/dashboard/backend/prompt_security.py +111 -0
  13. omni_cortex/_bundled/dashboard/backend/pyproject.toml +23 -0
  14. omni_cortex/_bundled/dashboard/backend/security.py +104 -0
  15. omni_cortex/_bundled/dashboard/backend/uv.lock +1110 -0
  16. omni_cortex/_bundled/dashboard/backend/websocket_manager.py +104 -0
  17. omni_cortex/_bundled/hooks/post_tool_use.py +497 -0
  18. omni_cortex/_bundled/hooks/pre_tool_use.py +277 -0
  19. omni_cortex/_bundled/hooks/session_utils.py +186 -0
  20. omni_cortex/_bundled/hooks/stop.py +219 -0
  21. omni_cortex/_bundled/hooks/subagent_stop.py +120 -0
  22. omni_cortex/_bundled/hooks/user_prompt.py +220 -0
  23. omni_cortex/categorization/__init__.py +9 -0
  24. omni_cortex/categorization/auto_tags.py +166 -0
  25. omni_cortex/categorization/auto_type.py +165 -0
  26. omni_cortex/config.py +141 -0
  27. omni_cortex/dashboard.py +238 -0
  28. omni_cortex/database/__init__.py +24 -0
  29. omni_cortex/database/connection.py +137 -0
  30. omni_cortex/database/migrations.py +210 -0
  31. omni_cortex/database/schema.py +212 -0
  32. omni_cortex/database/sync.py +421 -0
  33. omni_cortex/decay/__init__.py +7 -0
  34. omni_cortex/decay/importance.py +147 -0
  35. omni_cortex/embeddings/__init__.py +35 -0
  36. omni_cortex/embeddings/local.py +442 -0
  37. omni_cortex/models/__init__.py +20 -0
  38. omni_cortex/models/activity.py +265 -0
  39. omni_cortex/models/agent.py +144 -0
  40. omni_cortex/models/memory.py +395 -0
  41. omni_cortex/models/relationship.py +206 -0
  42. omni_cortex/models/session.py +290 -0
  43. omni_cortex/resources/__init__.py +1 -0
  44. omni_cortex/search/__init__.py +22 -0
  45. omni_cortex/search/hybrid.py +197 -0
  46. omni_cortex/search/keyword.py +204 -0
  47. omni_cortex/search/ranking.py +127 -0
  48. omni_cortex/search/semantic.py +232 -0
  49. omni_cortex/server.py +360 -0
  50. omni_cortex/setup.py +284 -0
  51. omni_cortex/tools/__init__.py +13 -0
  52. omni_cortex/tools/activities.py +453 -0
  53. omni_cortex/tools/memories.py +536 -0
  54. omni_cortex/tools/sessions.py +311 -0
  55. omni_cortex/tools/utilities.py +477 -0
  56. omni_cortex/utils/__init__.py +13 -0
  57. omni_cortex/utils/formatting.py +282 -0
  58. omni_cortex/utils/ids.py +72 -0
  59. omni_cortex/utils/timestamps.py +129 -0
  60. omni_cortex/utils/truncation.py +111 -0
  61. {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.3.dist-info}/METADATA +1 -1
  62. omni_cortex-1.17.3.dist-info/RECORD +86 -0
  63. omni_cortex-1.17.1.dist-info/RECORD +0 -26
  64. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/.env.example +0 -0
  65. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +0 -0
  66. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/chat_service.py +0 -0
  67. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/database.py +0 -0
  68. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/image_service.py +0 -0
  69. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/logging_config.py +0 -0
  70. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/main.py +0 -0
  71. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/models.py +0 -0
  72. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/project_config.py +0 -0
  73. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/project_scanner.py +0 -0
  74. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/prompt_security.py +0 -0
  75. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/pyproject.toml +0 -0
  76. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/security.py +0 -0
  77. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/uv.lock +0 -0
  78. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/websocket_manager.py +0 -0
  79. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/post_tool_use.py +0 -0
  80. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/pre_tool_use.py +0 -0
  81. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/session_utils.py +0 -0
  82. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/stop.py +0 -0
  83. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/subagent_stop.py +0 -0
  84. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/user_prompt.py +0 -0
  85. {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.3.dist-info}/WHEEL +0 -0
  86. {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.3.dist-info}/entry_points.txt +0 -0
  87. {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.3.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,421 @@
1
+ """Global index synchronization for cross-project memory search.
2
+
3
+ This module handles syncing memories from project-local databases to the
4
+ global database at ~/.omni-cortex/global.db, enabling cross-project search.
5
+ """
6
+
7
+ import json
8
+ import logging
9
+ import sqlite3
10
+ from pathlib import Path
11
+ from typing import Optional
12
+
13
+ from .connection import get_connection, init_database
14
+ from ..config import get_global_db_path, get_project_path, load_config
15
+ from ..utils.timestamps import now_iso
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+
20
+ def sync_memory_to_global(
21
+ memory_id: str,
22
+ content: str,
23
+ memory_type: str,
24
+ tags: list[str],
25
+ context: Optional[str],
26
+ importance_score: float,
27
+ status: str,
28
+ project_path: str,
29
+ created_at: str,
30
+ updated_at: str,
31
+ ) -> bool:
32
+ """Sync a single memory to the global index.
33
+
34
+ Args:
35
+ memory_id: The memory ID
36
+ content: Memory content
37
+ memory_type: Memory type
38
+ tags: List of tags
39
+ context: Optional context
40
+ importance_score: Importance score
41
+ status: Memory status
42
+ project_path: Source project path
43
+ created_at: Creation timestamp
44
+ updated_at: Update timestamp
45
+
46
+ Returns:
47
+ True if synced successfully
48
+ """
49
+ config = load_config()
50
+ if not config.global_sync_enabled:
51
+ return False
52
+
53
+ try:
54
+ global_conn = init_database(is_global=True)
55
+
56
+ cursor = global_conn.cursor()
57
+
58
+ # Upsert the memory to global index
59
+ cursor.execute(
60
+ """
61
+ INSERT INTO memories (
62
+ id, content, type, tags, context,
63
+ created_at, updated_at, last_accessed,
64
+ access_count, importance_score, status,
65
+ project_path, has_embedding
66
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0, ?, ?, ?, 0)
67
+ ON CONFLICT(id) DO UPDATE SET
68
+ content = excluded.content,
69
+ type = excluded.type,
70
+ tags = excluded.tags,
71
+ context = excluded.context,
72
+ updated_at = excluded.updated_at,
73
+ importance_score = excluded.importance_score,
74
+ status = excluded.status
75
+ """,
76
+ (
77
+ memory_id,
78
+ content,
79
+ memory_type,
80
+ json.dumps(tags),
81
+ context,
82
+ created_at,
83
+ updated_at,
84
+ now_iso(),
85
+ importance_score,
86
+ status,
87
+ project_path,
88
+ ),
89
+ )
90
+
91
+ global_conn.commit()
92
+ logger.debug(f"Synced memory {memory_id} to global index")
93
+ return True
94
+
95
+ except Exception as e:
96
+ logger.warning(f"Failed to sync memory {memory_id} to global: {e}")
97
+ return False
98
+
99
+
100
+ def delete_memory_from_global(memory_id: str) -> bool:
101
+ """Remove a memory from the global index.
102
+
103
+ Args:
104
+ memory_id: The memory ID to remove
105
+
106
+ Returns:
107
+ True if removed successfully
108
+ """
109
+ config = load_config()
110
+ if not config.global_sync_enabled:
111
+ return False
112
+
113
+ try:
114
+ global_conn = init_database(is_global=True)
115
+ cursor = global_conn.cursor()
116
+
117
+ cursor.execute("DELETE FROM memories WHERE id = ?", (memory_id,))
118
+ global_conn.commit()
119
+
120
+ if cursor.rowcount > 0:
121
+ logger.debug(f"Removed memory {memory_id} from global index")
122
+ return True
123
+ return False
124
+
125
+ except Exception as e:
126
+ logger.warning(f"Failed to remove memory {memory_id} from global: {e}")
127
+ return False
128
+
129
+
130
+ def search_global_memories(
131
+ query: str,
132
+ type_filter: Optional[str] = None,
133
+ tags_filter: Optional[list[str]] = None,
134
+ project_filter: Optional[str] = None,
135
+ limit: int = 20,
136
+ ) -> list[dict]:
137
+ """Search memories across all projects via global index.
138
+
139
+ Args:
140
+ query: Search query
141
+ type_filter: Filter by memory type
142
+ tags_filter: Filter by tags
143
+ project_filter: Filter by project path (substring match)
144
+ limit: Maximum results
145
+
146
+ Returns:
147
+ List of memory dicts with project_path included
148
+ """
149
+ try:
150
+ global_conn = init_database(is_global=True)
151
+ cursor = global_conn.cursor()
152
+
153
+ # Escape FTS5 special characters
154
+ fts_query = _escape_fts_query(query)
155
+
156
+ # Build WHERE conditions
157
+ where_conditions = []
158
+ params: list = [fts_query]
159
+
160
+ if type_filter:
161
+ where_conditions.append("m.type = ?")
162
+ params.append(type_filter)
163
+
164
+ if project_filter:
165
+ where_conditions.append("m.project_path LIKE ?")
166
+ params.append(f"%{project_filter}%")
167
+
168
+ where_conditions.append("m.status != 'archived'")
169
+
170
+ if tags_filter:
171
+ tag_conditions = []
172
+ for tag in tags_filter:
173
+ tag_conditions.append("m.tags LIKE ?")
174
+ params.append(f'%"{tag}"%')
175
+ where_conditions.append(f"({' OR '.join(tag_conditions)})")
176
+
177
+ where_sql = ""
178
+ if where_conditions:
179
+ where_sql = "AND " + " AND ".join(where_conditions)
180
+
181
+ params.append(limit)
182
+
183
+ try:
184
+ cursor.execute(
185
+ f"""
186
+ SELECT m.*, bm25(memories_fts) as score
187
+ FROM memories_fts fts
188
+ JOIN memories m ON fts.rowid = m.rowid
189
+ WHERE memories_fts MATCH ?
190
+ {where_sql}
191
+ ORDER BY score
192
+ LIMIT ?
193
+ """,
194
+ params,
195
+ )
196
+ except sqlite3.OperationalError:
197
+ # Fallback to LIKE search if FTS fails
198
+ return _fallback_global_search(
199
+ global_conn, query, type_filter, tags_filter, project_filter, limit
200
+ )
201
+
202
+ results = []
203
+ for row in cursor.fetchall():
204
+ tags = row["tags"]
205
+ if tags and isinstance(tags, str):
206
+ try:
207
+ tags = json.loads(tags)
208
+ except json.JSONDecodeError:
209
+ tags = []
210
+
211
+ results.append({
212
+ "id": row["id"],
213
+ "content": row["content"],
214
+ "type": row["type"],
215
+ "tags": tags,
216
+ "context": row["context"],
217
+ "importance_score": row["importance_score"],
218
+ "status": row["status"],
219
+ "project_path": row["project_path"],
220
+ "created_at": row["created_at"],
221
+ "updated_at": row["updated_at"],
222
+ "score": -row["score"], # bm25 returns negative scores
223
+ })
224
+
225
+ return results
226
+
227
+ except Exception as e:
228
+ logger.error(f"Global search failed: {e}")
229
+ return []
230
+
231
+
232
+ def _escape_fts_query(query: str) -> str:
233
+ """Escape special characters for FTS5 query."""
234
+ special_chars = ['"', "'", "(", ")", "*", ":", "^", "-", "+"]
235
+ escaped = query
236
+ for char in special_chars:
237
+ escaped = escaped.replace(char, " ")
238
+
239
+ words = escaped.split()
240
+ if not words:
241
+ return '""'
242
+
243
+ if len(words) == 1:
244
+ return f'"{words[0]}"'
245
+
246
+ return " OR ".join(f'"{word}"' for word in words)
247
+
248
+
249
+ def _fallback_global_search(
250
+ conn: sqlite3.Connection,
251
+ query: str,
252
+ type_filter: Optional[str],
253
+ tags_filter: Optional[list[str]],
254
+ project_filter: Optional[str],
255
+ limit: int,
256
+ ) -> list[dict]:
257
+ """Fallback to LIKE search if FTS5 fails."""
258
+ words = query.lower().split()
259
+ if not words:
260
+ return []
261
+
262
+ where_conditions = []
263
+ params: list = []
264
+
265
+ # Match any word in content or context
266
+ word_conditions = []
267
+ for word in words:
268
+ word_conditions.append("(LOWER(content) LIKE ? OR LOWER(context) LIKE ?)")
269
+ params.extend([f"%{word}%", f"%{word}%"])
270
+ where_conditions.append(f"({' OR '.join(word_conditions)})")
271
+
272
+ if type_filter:
273
+ where_conditions.append("type = ?")
274
+ params.append(type_filter)
275
+
276
+ if project_filter:
277
+ where_conditions.append("project_path LIKE ?")
278
+ params.append(f"%{project_filter}%")
279
+
280
+ where_conditions.append("status != 'archived'")
281
+
282
+ if tags_filter:
283
+ tag_conds = []
284
+ for tag in tags_filter:
285
+ tag_conds.append("tags LIKE ?")
286
+ params.append(f'%"{tag}"%')
287
+ where_conditions.append(f"({' OR '.join(tag_conds)})")
288
+
289
+ params.append(limit)
290
+
291
+ cursor = conn.cursor()
292
+ cursor.execute(
293
+ f"""
294
+ SELECT *
295
+ FROM memories
296
+ WHERE {' AND '.join(where_conditions)}
297
+ ORDER BY importance_score DESC, updated_at DESC
298
+ LIMIT ?
299
+ """,
300
+ params,
301
+ )
302
+
303
+ results = []
304
+ for row in cursor.fetchall():
305
+ tags = row["tags"]
306
+ if tags and isinstance(tags, str):
307
+ try:
308
+ tags = json.loads(tags)
309
+ except json.JSONDecodeError:
310
+ tags = []
311
+
312
+ content = (row["content"] + " " + (row["context"] or "")).lower()
313
+ score = sum(1 for word in words if word in content)
314
+
315
+ results.append({
316
+ "id": row["id"],
317
+ "content": row["content"],
318
+ "type": row["type"],
319
+ "tags": tags,
320
+ "context": row["context"],
321
+ "importance_score": row["importance_score"],
322
+ "status": row["status"],
323
+ "project_path": row["project_path"],
324
+ "created_at": row["created_at"],
325
+ "updated_at": row["updated_at"],
326
+ "score": float(score),
327
+ })
328
+
329
+ return results
330
+
331
+
332
+ def get_global_stats() -> dict:
333
+ """Get statistics from the global index.
334
+
335
+ Returns:
336
+ Dict with counts by project, type, etc.
337
+ """
338
+ try:
339
+ global_conn = init_database(is_global=True)
340
+ cursor = global_conn.cursor()
341
+
342
+ stats = {}
343
+
344
+ # Total memories
345
+ cursor.execute("SELECT COUNT(*) FROM memories")
346
+ stats["total_memories"] = cursor.fetchone()[0]
347
+
348
+ # By project
349
+ cursor.execute("""
350
+ SELECT project_path, COUNT(*) as cnt
351
+ FROM memories
352
+ GROUP BY project_path
353
+ ORDER BY cnt DESC
354
+ """)
355
+ stats["by_project"] = {row["project_path"]: row["cnt"] for row in cursor.fetchall()}
356
+
357
+ # By type
358
+ cursor.execute("""
359
+ SELECT type, COUNT(*) as cnt
360
+ FROM memories
361
+ GROUP BY type
362
+ ORDER BY cnt DESC
363
+ """)
364
+ stats["by_type"] = {row["type"]: row["cnt"] for row in cursor.fetchall()}
365
+
366
+ return stats
367
+
368
+ except Exception as e:
369
+ logger.error(f"Failed to get global stats: {e}")
370
+ return {"error": str(e)}
371
+
372
+
373
+ def sync_all_project_memories() -> int:
374
+ """Sync all memories from current project to global index.
375
+
376
+ Returns:
377
+ Number of memories synced
378
+ """
379
+ config = load_config()
380
+ if not config.global_sync_enabled:
381
+ return 0
382
+
383
+ try:
384
+ project_conn = init_database()
385
+ project_path = str(get_project_path())
386
+
387
+ cursor = project_conn.cursor()
388
+ cursor.execute("SELECT * FROM memories WHERE status != 'archived'")
389
+
390
+ count = 0
391
+ for row in cursor.fetchall():
392
+ tags = row["tags"]
393
+ if tags and isinstance(tags, str):
394
+ try:
395
+ tags = json.loads(tags)
396
+ except json.JSONDecodeError:
397
+ tags = []
398
+ else:
399
+ tags = []
400
+
401
+ synced = sync_memory_to_global(
402
+ memory_id=row["id"],
403
+ content=row["content"],
404
+ memory_type=row["type"],
405
+ tags=tags,
406
+ context=row["context"],
407
+ importance_score=row["importance_score"],
408
+ status=row["status"],
409
+ project_path=project_path,
410
+ created_at=row["created_at"],
411
+ updated_at=row["updated_at"],
412
+ )
413
+ if synced:
414
+ count += 1
415
+
416
+ logger.info(f"Synced {count} memories to global index")
417
+ return count
418
+
419
+ except Exception as e:
420
+ logger.error(f"Failed to sync project memories: {e}")
421
+ return 0
@@ -0,0 +1,7 @@
1
+ """Importance decay algorithms."""
2
+
3
+ from .importance import calculate_decayed_importance
4
+
5
+ __all__ = [
6
+ "calculate_decayed_importance",
7
+ ]
@@ -0,0 +1,147 @@
1
+ """Importance decay algorithm for memories."""
2
+
3
+ import math
4
+ from datetime import datetime, timezone
5
+ from typing import Optional
6
+
7
+ from ..utils.timestamps import parse_iso
8
+
9
+
10
+ def calculate_decayed_importance(
11
+ base_importance: float,
12
+ last_accessed: str,
13
+ access_count: int,
14
+ manual_importance: Optional[int] = None,
15
+ decay_rate: float = 0.5,
16
+ ) -> float:
17
+ """Calculate the decayed importance score for a memory.
18
+
19
+ The decay formula:
20
+ - Starts from base importance (typically 50 or user-set value)
21
+ - Decays linearly by decay_rate points per day since last access
22
+ - Gains back importance from access frequency (log scale)
23
+ - Manual importance overrides all calculations
24
+
25
+ Args:
26
+ base_importance: Original importance score (0-100)
27
+ last_accessed: ISO timestamp of last access
28
+ access_count: Number of times memory was accessed
29
+ manual_importance: User-set importance (overrides calculation)
30
+ decay_rate: Points to decay per day (default 0.5)
31
+
32
+ Returns:
33
+ Current importance score (0-100)
34
+ """
35
+ # Manual importance always wins
36
+ if manual_importance is not None:
37
+ return float(manual_importance)
38
+
39
+ # Calculate days since last access
40
+ last_dt = parse_iso(last_accessed)
41
+ now = datetime.now(timezone.utc)
42
+ days = (now - last_dt).days
43
+
44
+ # Apply decay
45
+ decayed = base_importance - (days * decay_rate)
46
+
47
+ # Access boost: frequently used memories resist decay
48
+ # log1p(10) ≈ 2.4, so 10 accesses = +12 importance
49
+ access_boost = math.log1p(access_count) * 5.0
50
+
51
+ # Calculate final score
52
+ final = decayed + access_boost
53
+
54
+ # Clamp to 0-100
55
+ return max(0.0, min(100.0, final))
56
+
57
+
58
+ def should_mark_for_review(
59
+ last_verified: Optional[str],
60
+ review_days: int = 30,
61
+ ) -> bool:
62
+ """Check if a memory should be marked for review.
63
+
64
+ Args:
65
+ last_verified: ISO timestamp of last verification, or None
66
+ review_days: Days threshold for review
67
+
68
+ Returns:
69
+ True if memory should be reviewed
70
+ """
71
+ if last_verified is None:
72
+ return False # Never verified, use created_at logic elsewhere
73
+
74
+ verified_dt = parse_iso(last_verified)
75
+ now = datetime.now(timezone.utc)
76
+ days = (now - verified_dt).days
77
+
78
+ return days >= review_days
79
+
80
+
81
+ def get_freshness_status(
82
+ created_at: str,
83
+ last_verified: Optional[str],
84
+ current_status: str,
85
+ review_days: int = 30,
86
+ ) -> str:
87
+ """Determine the freshness status for a memory.
88
+
89
+ Args:
90
+ created_at: ISO timestamp of creation
91
+ last_verified: ISO timestamp of last verification
92
+ current_status: Current status value
93
+ review_days: Days threshold for review
94
+
95
+ Returns:
96
+ New status: fresh, needs_review, outdated, or archived
97
+ """
98
+ # Archived stays archived until explicitly changed
99
+ if current_status == "archived":
100
+ return "archived"
101
+
102
+ # Outdated stays outdated until verified
103
+ if current_status == "outdated":
104
+ return "outdated"
105
+
106
+ # Check if needs review
107
+ reference_date = last_verified or created_at
108
+ reference_dt = parse_iso(reference_date)
109
+ now = datetime.now(timezone.utc)
110
+ days = (now - reference_dt).days
111
+
112
+ if days >= review_days * 2:
113
+ return "outdated"
114
+ elif days >= review_days:
115
+ return "needs_review"
116
+
117
+ return "fresh"
118
+
119
+
120
+ def apply_decay_to_memory(
121
+ importance_score: float,
122
+ last_accessed: str,
123
+ access_count: int,
124
+ manual_importance: Optional[int],
125
+ decay_rate: float = 0.5,
126
+ ) -> float:
127
+ """Apply decay calculation to a memory's importance.
128
+
129
+ This is a convenience function that wraps calculate_decayed_importance.
130
+
131
+ Args:
132
+ importance_score: Current stored importance
133
+ last_accessed: Last access timestamp
134
+ access_count: Access count
135
+ manual_importance: Manual override if any
136
+ decay_rate: Decay rate
137
+
138
+ Returns:
139
+ Updated importance score
140
+ """
141
+ return calculate_decayed_importance(
142
+ base_importance=importance_score,
143
+ last_accessed=last_accessed,
144
+ access_count=access_count,
145
+ manual_importance=manual_importance,
146
+ decay_rate=decay_rate,
147
+ )
@@ -0,0 +1,35 @@
1
+ """Embedding generation for semantic search."""
2
+
3
+ from .local import (
4
+ DEFAULT_MODEL_NAME,
5
+ EMBEDDING_DIMENSIONS,
6
+ generate_embedding,
7
+ generate_embeddings_batch,
8
+ generate_and_store_embedding,
9
+ get_embedding,
10
+ get_all_embeddings,
11
+ store_embedding,
12
+ delete_embedding,
13
+ vector_to_blob,
14
+ blob_to_vector,
15
+ get_memories_without_embeddings,
16
+ backfill_embeddings,
17
+ is_model_available,
18
+ )
19
+
20
+ __all__ = [
21
+ "DEFAULT_MODEL_NAME",
22
+ "EMBEDDING_DIMENSIONS",
23
+ "generate_embedding",
24
+ "generate_embeddings_batch",
25
+ "generate_and_store_embedding",
26
+ "get_embedding",
27
+ "get_all_embeddings",
28
+ "store_embedding",
29
+ "delete_embedding",
30
+ "vector_to_blob",
31
+ "blob_to_vector",
32
+ "get_memories_without_embeddings",
33
+ "backfill_embeddings",
34
+ "is_model_available",
35
+ ]