powermem 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. powermem/__init__.py +103 -0
  2. powermem/agent/__init__.py +35 -0
  3. powermem/agent/abstract/__init__.py +22 -0
  4. powermem/agent/abstract/collaboration.py +259 -0
  5. powermem/agent/abstract/context.py +187 -0
  6. powermem/agent/abstract/manager.py +232 -0
  7. powermem/agent/abstract/permission.py +217 -0
  8. powermem/agent/abstract/privacy.py +267 -0
  9. powermem/agent/abstract/scope.py +199 -0
  10. powermem/agent/agent.py +791 -0
  11. powermem/agent/components/__init__.py +18 -0
  12. powermem/agent/components/collaboration_coordinator.py +645 -0
  13. powermem/agent/components/permission_controller.py +586 -0
  14. powermem/agent/components/privacy_protector.py +767 -0
  15. powermem/agent/components/scope_controller.py +685 -0
  16. powermem/agent/factories/__init__.py +16 -0
  17. powermem/agent/factories/agent_factory.py +266 -0
  18. powermem/agent/factories/config_factory.py +308 -0
  19. powermem/agent/factories/memory_factory.py +229 -0
  20. powermem/agent/implementations/__init__.py +16 -0
  21. powermem/agent/implementations/hybrid.py +728 -0
  22. powermem/agent/implementations/multi_agent.py +1040 -0
  23. powermem/agent/implementations/multi_user.py +1020 -0
  24. powermem/agent/types.py +53 -0
  25. powermem/agent/wrappers/__init__.py +14 -0
  26. powermem/agent/wrappers/agent_memory_wrapper.py +427 -0
  27. powermem/agent/wrappers/compatibility_wrapper.py +520 -0
  28. powermem/config_loader.py +318 -0
  29. powermem/configs.py +249 -0
  30. powermem/core/__init__.py +19 -0
  31. powermem/core/async_memory.py +1493 -0
  32. powermem/core/audit.py +258 -0
  33. powermem/core/base.py +165 -0
  34. powermem/core/memory.py +1567 -0
  35. powermem/core/setup.py +162 -0
  36. powermem/core/telemetry.py +215 -0
  37. powermem/integrations/__init__.py +17 -0
  38. powermem/integrations/embeddings/__init__.py +13 -0
  39. powermem/integrations/embeddings/aws_bedrock.py +100 -0
  40. powermem/integrations/embeddings/azure_openai.py +55 -0
  41. powermem/integrations/embeddings/base.py +31 -0
  42. powermem/integrations/embeddings/config/base.py +132 -0
  43. powermem/integrations/embeddings/configs.py +31 -0
  44. powermem/integrations/embeddings/factory.py +48 -0
  45. powermem/integrations/embeddings/gemini.py +39 -0
  46. powermem/integrations/embeddings/huggingface.py +41 -0
  47. powermem/integrations/embeddings/langchain.py +35 -0
  48. powermem/integrations/embeddings/lmstudio.py +29 -0
  49. powermem/integrations/embeddings/mock.py +11 -0
  50. powermem/integrations/embeddings/ollama.py +53 -0
  51. powermem/integrations/embeddings/openai.py +49 -0
  52. powermem/integrations/embeddings/qwen.py +102 -0
  53. powermem/integrations/embeddings/together.py +31 -0
  54. powermem/integrations/embeddings/vertexai.py +54 -0
  55. powermem/integrations/llm/__init__.py +18 -0
  56. powermem/integrations/llm/anthropic.py +87 -0
  57. powermem/integrations/llm/base.py +132 -0
  58. powermem/integrations/llm/config/anthropic.py +56 -0
  59. powermem/integrations/llm/config/azure.py +56 -0
  60. powermem/integrations/llm/config/base.py +62 -0
  61. powermem/integrations/llm/config/deepseek.py +56 -0
  62. powermem/integrations/llm/config/ollama.py +56 -0
  63. powermem/integrations/llm/config/openai.py +79 -0
  64. powermem/integrations/llm/config/qwen.py +68 -0
  65. powermem/integrations/llm/config/qwen_asr.py +46 -0
  66. powermem/integrations/llm/config/vllm.py +56 -0
  67. powermem/integrations/llm/configs.py +26 -0
  68. powermem/integrations/llm/deepseek.py +106 -0
  69. powermem/integrations/llm/factory.py +118 -0
  70. powermem/integrations/llm/gemini.py +201 -0
  71. powermem/integrations/llm/langchain.py +65 -0
  72. powermem/integrations/llm/ollama.py +106 -0
  73. powermem/integrations/llm/openai.py +166 -0
  74. powermem/integrations/llm/openai_structured.py +80 -0
  75. powermem/integrations/llm/qwen.py +207 -0
  76. powermem/integrations/llm/qwen_asr.py +171 -0
  77. powermem/integrations/llm/vllm.py +106 -0
  78. powermem/integrations/rerank/__init__.py +20 -0
  79. powermem/integrations/rerank/base.py +43 -0
  80. powermem/integrations/rerank/config/__init__.py +7 -0
  81. powermem/integrations/rerank/config/base.py +27 -0
  82. powermem/integrations/rerank/configs.py +23 -0
  83. powermem/integrations/rerank/factory.py +68 -0
  84. powermem/integrations/rerank/qwen.py +159 -0
  85. powermem/intelligence/__init__.py +17 -0
  86. powermem/intelligence/ebbinghaus_algorithm.py +354 -0
  87. powermem/intelligence/importance_evaluator.py +361 -0
  88. powermem/intelligence/intelligent_memory_manager.py +284 -0
  89. powermem/intelligence/manager.py +148 -0
  90. powermem/intelligence/plugin.py +229 -0
  91. powermem/prompts/__init__.py +29 -0
  92. powermem/prompts/graph/graph_prompts.py +217 -0
  93. powermem/prompts/graph/graph_tools_prompts.py +469 -0
  94. powermem/prompts/importance_evaluation.py +246 -0
  95. powermem/prompts/intelligent_memory_prompts.py +163 -0
  96. powermem/prompts/templates.py +193 -0
  97. powermem/storage/__init__.py +14 -0
  98. powermem/storage/adapter.py +896 -0
  99. powermem/storage/base.py +109 -0
  100. powermem/storage/config/base.py +13 -0
  101. powermem/storage/config/oceanbase.py +58 -0
  102. powermem/storage/config/pgvector.py +52 -0
  103. powermem/storage/config/sqlite.py +27 -0
  104. powermem/storage/configs.py +159 -0
  105. powermem/storage/factory.py +59 -0
  106. powermem/storage/migration_manager.py +438 -0
  107. powermem/storage/oceanbase/__init__.py +8 -0
  108. powermem/storage/oceanbase/constants.py +162 -0
  109. powermem/storage/oceanbase/oceanbase.py +1384 -0
  110. powermem/storage/oceanbase/oceanbase_graph.py +1441 -0
  111. powermem/storage/pgvector/__init__.py +7 -0
  112. powermem/storage/pgvector/pgvector.py +420 -0
  113. powermem/storage/sqlite/__init__.py +0 -0
  114. powermem/storage/sqlite/sqlite.py +218 -0
  115. powermem/storage/sqlite/sqlite_vector_store.py +311 -0
  116. powermem/utils/__init__.py +35 -0
  117. powermem/utils/utils.py +605 -0
  118. powermem/version.py +23 -0
  119. powermem-0.1.0.dist-info/METADATA +187 -0
  120. powermem-0.1.0.dist-info/RECORD +123 -0
  121. powermem-0.1.0.dist-info/WHEEL +5 -0
  122. powermem-0.1.0.dist-info/licenses/LICENSE +206 -0
  123. powermem-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,7 @@
1
+ """
2
+ PostgreSQL storage module initialization
3
+ """
4
+
5
+ from .pgvector import PGVectorStore
6
+
7
+ __all__ = ["PGVectorStore"]
@@ -0,0 +1,420 @@
1
+ import json
2
+ import logging
3
+ from contextlib import contextmanager
4
+ from typing import Any, List, Optional
5
+
6
+ from powermem.storage.base import VectorStoreBase, OutputData
7
+ from powermem.utils.utils import generate_snowflake_id
8
+
9
+ # Try to import psycopg (psycopg3) first, then fall back to psycopg2
10
+ try:
11
+ from psycopg.types.json import Json
12
+ from psycopg_pool import ConnectionPool
13
+ PSYCOPG_VERSION = 3
14
+ logger = logging.getLogger(__name__)
15
+ logger.info("Using psycopg (psycopg3) with ConnectionPool for PostgreSQL connections")
16
+ except ImportError:
17
+ try:
18
+ from psycopg2.extras import Json, execute_values
19
+ from psycopg2.pool import ThreadedConnectionPool as ConnectionPool
20
+ PSYCOPG_VERSION = 2
21
+ logger = logging.getLogger(__name__)
22
+ logger.info("Using psycopg2 with ThreadedConnectionPool for PostgreSQL connections")
23
+ except ImportError:
24
+ raise ImportError(
25
+ "Neither 'psycopg' nor 'psycopg2' library is available. "
26
+ "Please install one of them using 'pip install psycopg[pool]' or 'pip install psycopg2'"
27
+ )
28
+
29
+ logger = logging.getLogger(__name__)
30
+
31
+ class PGVectorStore(VectorStoreBase):
32
+ def __init__(
33
+ self,
34
+ dbname,
35
+ collection_name,
36
+ embedding_model_dims,
37
+ user,
38
+ password,
39
+ host,
40
+ port,
41
+ diskann,
42
+ hnsw,
43
+ minconn=1,
44
+ maxconn=5,
45
+ sslmode=None,
46
+ connection_string=None,
47
+ connection_pool=None,
48
+ ):
49
+ """
50
+ Initialize the PGVector database.
51
+
52
+ Args:
53
+ dbname (str): Database name
54
+ collection_name (str): Collection name
55
+ embedding_model_dims (int): Dimension of the embedding vector
56
+ user (str): Database user
57
+ password (str): Database password
58
+ host (str, optional): Database host
59
+ port (int, optional): Database port
60
+ diskann (bool, optional): Use DiskANN for faster search
61
+ hnsw (bool, optional): Use HNSW for faster search
62
+ minconn (int): Minimum number of connections to keep in the connection pool
63
+ maxconn (int): Maximum number of connections allowed in the connection pool
64
+ sslmode (str, optional): SSL mode for PostgreSQL connection (e.g., 'require', 'prefer', 'disable')
65
+ connection_string (str, optional): PostgreSQL connection string (overrides individual connection parameters)
66
+ connection_pool (Any, optional): psycopg2 connection pool object (overrides connection string and individual parameters)
67
+ """
68
+ self.collection_name = collection_name
69
+ self.use_diskann = diskann
70
+ self.use_hnsw = hnsw
71
+ self.embedding_model_dims = embedding_model_dims
72
+ self.connection_pool = None
73
+
74
+ # Connection setup with priority: connection_pool > connection_string > individual parameters
75
+ if connection_pool is not None:
76
+ # Use provided connection pool
77
+ self.connection_pool = connection_pool
78
+ elif connection_string:
79
+ if sslmode:
80
+ # Append sslmode to connection string if provided
81
+ if 'sslmode=' in connection_string:
82
+ # Replace existing sslmode
83
+ import re
84
+ connection_string = re.sub(r'sslmode=[^ ]*', f'sslmode={sslmode}', connection_string)
85
+ else:
86
+ # Add sslmode to connection string
87
+ connection_string = f"{connection_string} sslmode={sslmode}"
88
+ else:
89
+ connection_string = f"postgresql://{user}:{password}@{host}:{port}/{dbname}"
90
+ if sslmode:
91
+ connection_string = f"{connection_string} sslmode={sslmode}"
92
+
93
+ if self.connection_pool is None:
94
+ if PSYCOPG_VERSION == 3:
95
+ # psycopg3 ConnectionPool
96
+ self.connection_pool = ConnectionPool(conninfo=connection_string, min_size=minconn, max_size=maxconn, open=True)
97
+ else:
98
+ # psycopg2 ThreadedConnectionPool
99
+ self.connection_pool = ConnectionPool(minconn=minconn, maxconn=maxconn, dsn=connection_string)
100
+
101
+ collections = self.list_cols()
102
+ if collection_name not in collections:
103
+ self.create_col()
104
+
105
+ @contextmanager
106
+ def _get_cursor(self, commit: bool = False):
107
+ """
108
+ Unified context manager to get a cursor from the appropriate pool.
109
+ Auto-commits or rolls back based on exception, and returns the connection to the pool.
110
+ """
111
+ if PSYCOPG_VERSION == 3:
112
+ # psycopg3 auto-manages commit/rollback and pool return
113
+ with self.connection_pool.connection() as conn:
114
+ with conn.cursor() as cur:
115
+ try:
116
+ yield cur
117
+ if commit:
118
+ conn.commit()
119
+ except Exception:
120
+ conn.rollback()
121
+ logger.error("Error in cursor context (psycopg3)", exc_info=True)
122
+ raise
123
+ else:
124
+ # psycopg2 manual getconn/putconn
125
+ conn = self.connection_pool.getconn()
126
+ cur = conn.cursor()
127
+ try:
128
+ yield cur
129
+ if commit:
130
+ conn.commit()
131
+ except Exception as exc:
132
+ conn.rollback()
133
+ logger.error(f"Error occurred: {exc}")
134
+ raise exc
135
+ finally:
136
+ cur.close()
137
+ self.connection_pool.putconn(conn)
138
+
139
+ def create_col(self, name=None, vector_size=None, distance=None) -> None:
140
+ """
141
+ Create a new collection (table in PostgreSQL).
142
+ Will also initialize vector search index if specified.
143
+ """
144
+ with self._get_cursor(commit=True) as cur:
145
+ cur.execute("CREATE EXTENSION IF NOT EXISTS vector")
146
+ cur.execute(
147
+ f"""
148
+ CREATE TABLE IF NOT EXISTS {self.collection_name} (
149
+ id BIGINT PRIMARY KEY,
150
+ vector vector({self.embedding_model_dims}),
151
+ payload JSONB
152
+ );
153
+ """
154
+ )
155
+ if self.use_diskann and self.embedding_model_dims < 2000:
156
+ cur.execute("SELECT * FROM pg_extension WHERE extname = 'vectorscale'")
157
+ if cur.fetchone():
158
+ # Create DiskANN index if extension is installed for faster search
159
+ cur.execute(
160
+ f"""
161
+ CREATE INDEX IF NOT EXISTS {self.collection_name}_diskann_idx
162
+ ON {self.collection_name}
163
+ USING diskann (vector);
164
+ """
165
+ )
166
+ elif self.use_hnsw:
167
+ cur.execute(
168
+ f"""
169
+ CREATE INDEX IF NOT EXISTS {self.collection_name}_hnsw_idx
170
+ ON {self.collection_name}
171
+ USING hnsw (vector vector_cosine_ops)
172
+ """
173
+ )
174
+
175
+ def insert(self, vectors: list[list[float]], payloads=None, ids=None) -> list[int]:
176
+ """
177
+ Insert vectors into the collection.
178
+
179
+ Args:
180
+ vectors: List of vectors to insert
181
+ payloads: List of payload dictionaries
182
+ ids: Deprecated parameter (ignored), IDs are now generated using Snowflake algorithm
183
+
184
+ Returns:
185
+ List[int]: List of generated Snowflake IDs
186
+ """
187
+ logger.info(f"Inserting {len(vectors)} vectors into collection {self.collection_name}")
188
+ if not vectors:
189
+ return []
190
+
191
+ # Generate Snowflake IDs for each vector
192
+ generated_ids = [generate_snowflake_id() for _ in range(len(vectors))]
193
+
194
+ json_payloads = [json.dumps(payload) for payload in payloads]
195
+ # Include the generated Snowflake ID in the data
196
+ data = [(vector_id, vector, payload) for vector_id, vector, payload in zip(generated_ids, vectors, json_payloads)]
197
+
198
+ if PSYCOPG_VERSION == 3:
199
+ with self._get_cursor(commit=True) as cur:
200
+ # Insert with explicit IDs
201
+ cur.executemany(
202
+ f"INSERT INTO {self.collection_name} (id, vector, payload) VALUES (%s, %s, %s)",
203
+ data,
204
+ )
205
+ else:
206
+ with self._get_cursor(commit=True) as cur:
207
+ # psycopg2: use execute_values
208
+ execute_values(
209
+ cur,
210
+ f"INSERT INTO {self.collection_name} (id, vector, payload) VALUES %s",
211
+ data,
212
+ )
213
+
214
+ logger.debug(f"Successfully inserted {len(vectors)} vectors, generated Snowflake IDs: {generated_ids}")
215
+ return generated_ids
216
+
217
+ def search(
218
+ self,
219
+ query: str,
220
+ vectors: list[float],
221
+ limit: Optional[int] = 5,
222
+ filters: Optional[dict] = None,
223
+ ) -> List[OutputData]:
224
+ """
225
+ Search for similar vectors.
226
+
227
+ Args:
228
+ query (str): Query.
229
+ vectors (List[float]): Query vector.
230
+ limit (int, optional): Number of results to return. Defaults to 5.
231
+ filters (Dict, optional): Filters to apply to the search. Defaults to None.
232
+
233
+ Returns:
234
+ list: Search results.
235
+ """
236
+ filter_conditions = []
237
+ filter_params = []
238
+
239
+ if filters:
240
+ for k, v in filters.items():
241
+ filter_conditions.append("payload->>%s = %s")
242
+ filter_params.extend([k, str(v)])
243
+
244
+ filter_clause = "WHERE " + " AND ".join(filter_conditions) if filter_conditions else ""
245
+
246
+ with self._get_cursor() as cur:
247
+ cur.execute(
248
+ f"""
249
+ SELECT id, vector <=> %s::vector AS distance, payload
250
+ FROM {self.collection_name}
251
+ {filter_clause}
252
+ ORDER BY distance
253
+ LIMIT %s
254
+ """,
255
+ (vectors, *filter_params, limit),
256
+ )
257
+
258
+ results = cur.fetchall()
259
+ return [OutputData(id=r[0], score=float(r[1]), payload=r[2]) for r in results]
260
+
261
+ def delete(self, vector_id: int) -> None:
262
+ """
263
+ Delete a vector by ID.
264
+
265
+ Args:
266
+ vector_id (int): ID of the vector to delete.
267
+ """
268
+ with self._get_cursor(commit=True) as cur:
269
+ cur.execute(f"DELETE FROM {self.collection_name} WHERE id = %s", (vector_id,))
270
+
271
+ def update(
272
+ self,
273
+ vector_id: int,
274
+ vector: Optional[list[float]] = None,
275
+ payload: Optional[dict] = None,
276
+ ) -> None:
277
+ """
278
+ Update a vector and its payload.
279
+
280
+ Args:
281
+ vector_id (int): ID of the vector to update.
282
+ vector (List[float], optional): Updated vector.
283
+ payload (Dict, optional): Updated payload.
284
+ """
285
+ with self._get_cursor(commit=True) as cur:
286
+ if vector:
287
+ cur.execute(
288
+ f"UPDATE {self.collection_name} SET vector = %s WHERE id = %s",
289
+ (vector, vector_id),
290
+ )
291
+ if payload:
292
+ # Handle JSON serialization based on psycopg version
293
+ if PSYCOPG_VERSION == 3:
294
+ # psycopg3 uses psycopg.types.json.Json
295
+ cur.execute(
296
+ f"UPDATE {self.collection_name} SET payload = %s WHERE id = %s",
297
+ (Json(payload), vector_id),
298
+ )
299
+ else:
300
+ # psycopg2 uses psycopg2.extras.Json
301
+ cur.execute(
302
+ f"UPDATE {self.collection_name} SET payload = %s WHERE id = %s",
303
+ (Json(payload), vector_id),
304
+ )
305
+
306
+
307
+ def get(self, vector_id: int) -> OutputData | None:
308
+ """
309
+ Retrieve a vector by ID.
310
+
311
+ Args:
312
+ vector_id (int): ID of the vector to retrieve.
313
+
314
+ Returns:
315
+ OutputData: Retrieved vector.
316
+ """
317
+ with self._get_cursor() as cur:
318
+ cur.execute(
319
+ f"SELECT id, vector, payload FROM {self.collection_name} WHERE id = %s",
320
+ (vector_id,),
321
+ )
322
+ result = cur.fetchone()
323
+ if not result:
324
+ return None
325
+ return OutputData(id=result[0], score=None, payload=result[2])
326
+
327
+ def list_cols(self) -> List[str]:
328
+ """
329
+ List all collections.
330
+
331
+ Returns:
332
+ List[str]: List of collection names.
333
+ """
334
+ with self._get_cursor() as cur:
335
+ cur.execute("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'")
336
+ return [row[0] for row in cur.fetchall()]
337
+
338
+ def delete_col(self) -> None:
339
+ """Delete a collection."""
340
+ with self._get_cursor(commit=True) as cur:
341
+ cur.execute(f"DROP TABLE IF EXISTS {self.collection_name}")
342
+
343
+ def col_info(self) -> dict[str, Any]:
344
+ """
345
+ Get information about a collection.
346
+
347
+ Returns:
348
+ Dict[str, Any]: Collection information.
349
+ """
350
+ with self._get_cursor() as cur:
351
+ cur.execute(
352
+ f"""
353
+ SELECT
354
+ table_name,
355
+ (SELECT COUNT(*) FROM {self.collection_name}) as row_count,
356
+ (SELECT pg_size_pretty(pg_total_relation_size('{self.collection_name}'))) as total_size
357
+ FROM information_schema.tables
358
+ WHERE table_schema = 'public' AND table_name = %s
359
+ """,
360
+ (self.collection_name,),
361
+ )
362
+ result = cur.fetchone()
363
+ return {"name": result[0], "count": result[1], "size": result[2]}
364
+
365
+ def list(
366
+ self,
367
+ filters: Optional[dict] = None,
368
+ limit: Optional[int] = 100
369
+ ) -> List[OutputData]:
370
+ """
371
+ List all vectors in a collection.
372
+
373
+ Args:
374
+ filters (Dict, optional): Filters to apply to the list.
375
+ limit (int, optional): Number of vectors to return. Defaults to 100.
376
+
377
+ Returns:
378
+ List[OutputData]: List of vectors.
379
+ """
380
+ filter_conditions = []
381
+ filter_params = []
382
+
383
+ if filters:
384
+ for k, v in filters.items():
385
+ filter_conditions.append("payload->>%s = %s")
386
+ filter_params.extend([k, str(v)])
387
+
388
+ filter_clause = "WHERE " + " AND ".join(filter_conditions) if filter_conditions else ""
389
+
390
+ query = f"""
391
+ SELECT id, vector, payload
392
+ FROM {self.collection_name}
393
+ {filter_clause}
394
+ LIMIT %s
395
+ """
396
+
397
+ with self._get_cursor() as cur:
398
+ cur.execute(query, (*filter_params, limit))
399
+ results = cur.fetchall()
400
+ return [OutputData(id=r[0], score=None, payload=r[2]) for r in results]
401
+
402
+ def __del__(self) -> None:
403
+ """
404
+ Close the database connection pool when the object is deleted.
405
+ """
406
+ try:
407
+ # Close pool appropriately
408
+ if PSYCOPG_VERSION == 3:
409
+ self.connection_pool.close()
410
+ else:
411
+ self.connection_pool.closeall()
412
+ except Exception:
413
+ logger.error("Error closing database connection pool")
414
+ pass
415
+
416
+ def reset(self) -> None:
417
+ """Reset the index by deleting and recreating it."""
418
+ logger.warning(f"Resetting index {self.collection_name}...")
419
+ self.delete_col()
420
+ self.create_col()
File without changes
@@ -0,0 +1,218 @@
1
+ import logging
2
+ import sqlite3
3
+ import threading
4
+ import uuid
5
+ from typing import Any, Dict, List, Optional
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+
10
+ class SQLiteManager:
11
+ def __init__(self, db_path: str = ":memory:"):
12
+ self.db_path = db_path
13
+ self.connection = sqlite3.connect(self.db_path, check_same_thread=False)
14
+ self._lock = threading.Lock()
15
+ self._migrate_history_table()
16
+ self._create_history_table()
17
+
18
+ def _migrate_history_table(self) -> None:
19
+ """
20
+ If a pre-existing history table had the old group-chat columns,
21
+ rename it, create the new schema, copy the intersecting data, then
22
+ drop the old table.
23
+ """
24
+ with self._lock:
25
+ try:
26
+ # Start a transaction
27
+ self.connection.execute("BEGIN")
28
+ cur = self.connection.cursor()
29
+
30
+ cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='history'")
31
+ if cur.fetchone() is None:
32
+ self.connection.execute("COMMIT")
33
+ return # nothing to migrate
34
+
35
+ cur.execute("PRAGMA table_info(history)")
36
+ old_cols = {row[1] for row in cur.fetchall()}
37
+
38
+ expected_cols = {
39
+ "id",
40
+ "memory_id",
41
+ "old_memory",
42
+ "new_memory",
43
+ "event",
44
+ "created_at",
45
+ "updated_at",
46
+ "is_deleted",
47
+ "actor_id",
48
+ "role",
49
+ }
50
+
51
+ if old_cols == expected_cols:
52
+ self.connection.execute("COMMIT")
53
+ return
54
+
55
+ logger.info("Migrating history table to new schema (no convo columns).")
56
+
57
+ # Clean up any existing history_old table from previous failed migration
58
+ cur.execute("DROP TABLE IF EXISTS history_old")
59
+
60
+ # Rename the current history table
61
+ cur.execute("ALTER TABLE history RENAME TO history_old")
62
+
63
+ # Create the new history table with updated schema
64
+ cur.execute(
65
+ """
66
+ CREATE TABLE history (
67
+ id TEXT PRIMARY KEY,
68
+ memory_id TEXT,
69
+ old_memory TEXT,
70
+ new_memory TEXT,
71
+ event TEXT,
72
+ created_at DATETIME,
73
+ updated_at DATETIME,
74
+ is_deleted INTEGER,
75
+ actor_id TEXT,
76
+ role TEXT
77
+ )
78
+ """
79
+ )
80
+
81
+ # Copy data from old table to new table
82
+ intersecting = list(expected_cols & old_cols)
83
+ if intersecting:
84
+ cols_csv = ", ".join(intersecting)
85
+ cur.execute(f"INSERT INTO history ({cols_csv}) SELECT {cols_csv} FROM history_old")
86
+
87
+ # Drop the old table
88
+ cur.execute("DROP TABLE history_old")
89
+
90
+ # Commit the transaction
91
+ self.connection.execute("COMMIT")
92
+ logger.info("History table migration completed successfully.")
93
+
94
+ except Exception as e:
95
+ # Rollback the transaction on any error
96
+ self.connection.execute("ROLLBACK")
97
+ logger.error(f"History table migration failed: {e}")
98
+ raise
99
+
100
+ def _create_history_table(self) -> None:
101
+ with self._lock:
102
+ try:
103
+ self.connection.execute("BEGIN")
104
+ self.connection.execute(
105
+ """
106
+ CREATE TABLE IF NOT EXISTS history (
107
+ id TEXT PRIMARY KEY,
108
+ memory_id TEXT,
109
+ old_memory TEXT,
110
+ new_memory TEXT,
111
+ event TEXT,
112
+ created_at DATETIME,
113
+ updated_at DATETIME,
114
+ is_deleted INTEGER,
115
+ actor_id TEXT,
116
+ role TEXT
117
+ )
118
+ """
119
+ )
120
+ self.connection.execute("COMMIT")
121
+ except Exception as e:
122
+ self.connection.execute("ROLLBACK")
123
+ logger.error(f"Failed to create history table: {e}")
124
+ raise
125
+
126
+ def add_history(
127
+ self,
128
+ memory_id: str,
129
+ old_memory: Optional[str],
130
+ new_memory: Optional[str],
131
+ event: str,
132
+ *,
133
+ created_at: Optional[str] = None,
134
+ updated_at: Optional[str] = None,
135
+ is_deleted: int = 0,
136
+ actor_id: Optional[str] = None,
137
+ role: Optional[str] = None,
138
+ ) -> None:
139
+ with self._lock:
140
+ try:
141
+ self.connection.execute("BEGIN")
142
+ self.connection.execute(
143
+ """
144
+ INSERT INTO history (
145
+ id, memory_id, old_memory, new_memory, event,
146
+ created_at, updated_at, is_deleted, actor_id, role
147
+ )
148
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
149
+ """,
150
+ (
151
+ str(uuid.uuid4()),
152
+ memory_id,
153
+ old_memory,
154
+ new_memory,
155
+ event,
156
+ created_at,
157
+ updated_at,
158
+ is_deleted,
159
+ actor_id,
160
+ role,
161
+ ),
162
+ )
163
+ self.connection.execute("COMMIT")
164
+ except Exception as e:
165
+ self.connection.execute("ROLLBACK")
166
+ logger.error(f"Failed to add history record: {e}")
167
+ raise
168
+
169
+ def get_history(self, memory_id: str) -> List[Dict[str, Any]]:
170
+ with self._lock:
171
+ cur = self.connection.execute(
172
+ """
173
+ SELECT id, memory_id, old_memory, new_memory, event,
174
+ created_at, updated_at, is_deleted, actor_id, role
175
+ FROM history
176
+ WHERE memory_id = ?
177
+ ORDER BY created_at ASC, DATETIME(updated_at) ASC
178
+ """,
179
+ (memory_id,),
180
+ )
181
+ rows = cur.fetchall()
182
+
183
+ return [
184
+ {
185
+ "id": r[0],
186
+ "memory_id": r[1],
187
+ "old_memory": r[2],
188
+ "new_memory": r[3],
189
+ "event": r[4],
190
+ "created_at": r[5],
191
+ "updated_at": r[6],
192
+ "is_deleted": bool(r[7]),
193
+ "actor_id": r[8],
194
+ "role": r[9],
195
+ }
196
+ for r in rows
197
+ ]
198
+
199
+ def reset(self) -> None:
200
+ """Drop and recreate the history table."""
201
+ with self._lock:
202
+ try:
203
+ self.connection.execute("BEGIN")
204
+ self.connection.execute("DROP TABLE IF EXISTS history")
205
+ self.connection.execute("COMMIT")
206
+ self._create_history_table()
207
+ except Exception as e:
208
+ self.connection.execute("ROLLBACK")
209
+ logger.error(f"Failed to reset history table: {e}")
210
+ raise
211
+
212
+ def close(self) -> None:
213
+ if self.connection:
214
+ self.connection.close()
215
+ self.connection = None
216
+
217
+ def __del__(self):
218
+ self.close()