memorisdk 1.0.2__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of memorisdk might be problematic. Click here for more details.

Files changed (46) hide show
  1. memori/__init__.py +24 -8
  2. memori/agents/conscious_agent.py +252 -414
  3. memori/agents/memory_agent.py +487 -224
  4. memori/agents/retrieval_agent.py +416 -60
  5. memori/config/memory_manager.py +323 -0
  6. memori/core/conversation.py +393 -0
  7. memori/core/database.py +386 -371
  8. memori/core/memory.py +1638 -531
  9. memori/core/providers.py +217 -0
  10. memori/database/adapters/__init__.py +10 -0
  11. memori/database/adapters/mysql_adapter.py +331 -0
  12. memori/database/adapters/postgresql_adapter.py +291 -0
  13. memori/database/adapters/sqlite_adapter.py +229 -0
  14. memori/database/auto_creator.py +320 -0
  15. memori/database/connection_utils.py +207 -0
  16. memori/database/connectors/base_connector.py +283 -0
  17. memori/database/connectors/mysql_connector.py +240 -18
  18. memori/database/connectors/postgres_connector.py +277 -4
  19. memori/database/connectors/sqlite_connector.py +178 -3
  20. memori/database/models.py +400 -0
  21. memori/database/queries/base_queries.py +1 -1
  22. memori/database/queries/memory_queries.py +91 -2
  23. memori/database/query_translator.py +222 -0
  24. memori/database/schema_generators/__init__.py +7 -0
  25. memori/database/schema_generators/mysql_schema_generator.py +215 -0
  26. memori/database/search/__init__.py +8 -0
  27. memori/database/search/mysql_search_adapter.py +255 -0
  28. memori/database/search/sqlite_search_adapter.py +180 -0
  29. memori/database/search_service.py +548 -0
  30. memori/database/sqlalchemy_manager.py +839 -0
  31. memori/integrations/__init__.py +36 -11
  32. memori/integrations/litellm_integration.py +340 -6
  33. memori/integrations/openai_integration.py +506 -240
  34. memori/utils/input_validator.py +395 -0
  35. memori/utils/pydantic_models.py +138 -36
  36. memori/utils/query_builder.py +530 -0
  37. memori/utils/security_audit.py +594 -0
  38. memori/utils/security_integration.py +339 -0
  39. memori/utils/transaction_manager.py +547 -0
  40. {memorisdk-1.0.2.dist-info → memorisdk-2.0.0.dist-info}/METADATA +44 -17
  41. memorisdk-2.0.0.dist-info/RECORD +67 -0
  42. memorisdk-1.0.2.dist-info/RECORD +0 -44
  43. memorisdk-1.0.2.dist-info/entry_points.txt +0 -2
  44. {memorisdk-1.0.2.dist-info → memorisdk-2.0.0.dist-info}/WHEEL +0 -0
  45. {memorisdk-1.0.2.dist-info → memorisdk-2.0.0.dist-info}/licenses/LICENSE +0 -0
  46. {memorisdk-1.0.2.dist-info → memorisdk-2.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,400 @@
1
+ """
2
+ SQLAlchemy models for Memori v2.0
3
+ Provides cross-database compatibility using SQLAlchemy ORM
4
+ """
5
+
6
+ from datetime import datetime
7
+ from typing import Any, Dict
8
+
9
+ from sqlalchemy import (
10
+ JSON,
11
+ Boolean,
12
+ Column,
13
+ DateTime,
14
+ Float,
15
+ ForeignKey,
16
+ Index,
17
+ Integer,
18
+ String,
19
+ Text,
20
+ create_engine,
21
+ )
22
+ from sqlalchemy.ext.declarative import declarative_base
23
+ from sqlalchemy.orm import relationship, sessionmaker
24
+
25
+ Base: Any = declarative_base()
26
+
27
+
28
+ class ChatHistory(Base):
29
+ """Chat history table - stores all conversations"""
30
+
31
+ __tablename__ = "chat_history"
32
+
33
+ chat_id = Column(String(255), primary_key=True)
34
+ user_input = Column(Text, nullable=False)
35
+ ai_output = Column(Text, nullable=False)
36
+ model = Column(String(255), nullable=False)
37
+ timestamp = Column(DateTime, nullable=False, default=datetime.utcnow)
38
+ session_id = Column(String(255), nullable=False)
39
+ namespace = Column(String(255), nullable=False, default="default")
40
+ tokens_used = Column(Integer, default=0)
41
+ metadata_json = Column(JSON)
42
+
43
+ # Relationships
44
+ short_term_memories = relationship(
45
+ "ShortTermMemory", back_populates="chat", cascade="all, delete-orphan"
46
+ )
47
+
48
+ # Indexes
49
+ __table_args__ = (
50
+ Index("idx_chat_namespace_session", "namespace", "session_id"),
51
+ Index("idx_chat_timestamp", "timestamp"),
52
+ Index("idx_chat_model", "model"),
53
+ )
54
+
55
+
56
+ class ShortTermMemory(Base):
57
+ """Short-term memory table with expiration"""
58
+
59
+ __tablename__ = "short_term_memory"
60
+
61
+ memory_id = Column(String(255), primary_key=True)
62
+ chat_id = Column(
63
+ String(255), ForeignKey("chat_history.chat_id", ondelete="SET NULL")
64
+ )
65
+ processed_data = Column(JSON, nullable=False)
66
+ importance_score = Column(Float, nullable=False, default=0.5)
67
+ category_primary = Column(String(255), nullable=False)
68
+ retention_type = Column(String(50), nullable=False, default="short_term")
69
+ namespace = Column(String(255), nullable=False, default="default")
70
+ created_at = Column(DateTime, nullable=False, default=datetime.utcnow)
71
+ expires_at = Column(DateTime)
72
+ access_count = Column(Integer, default=0)
73
+ last_accessed = Column(DateTime)
74
+ searchable_content = Column(Text, nullable=False)
75
+ summary = Column(Text, nullable=False)
76
+ is_permanent_context = Column(Boolean, default=False)
77
+
78
+ # Relationships
79
+ chat = relationship("ChatHistory", back_populates="short_term_memories")
80
+
81
+ # Indexes
82
+ __table_args__ = (
83
+ Index("idx_short_term_namespace", "namespace"),
84
+ Index("idx_short_term_category", "category_primary"),
85
+ Index("idx_short_term_importance", "importance_score"),
86
+ Index("idx_short_term_expires", "expires_at"),
87
+ Index("idx_short_term_created", "created_at"),
88
+ Index("idx_short_term_access", "access_count", "last_accessed"),
89
+ Index("idx_short_term_permanent", "is_permanent_context"),
90
+ Index(
91
+ "idx_short_term_namespace_category",
92
+ "namespace",
93
+ "category_primary",
94
+ "importance_score",
95
+ ),
96
+ )
97
+
98
+
99
+ class LongTermMemory(Base):
100
+ """Long-term memory table with enhanced classification"""
101
+
102
+ __tablename__ = "long_term_memory"
103
+
104
+ memory_id = Column(String(255), primary_key=True)
105
+ original_chat_id = Column(String(255))
106
+ processed_data = Column(JSON, nullable=False)
107
+ importance_score = Column(Float, nullable=False, default=0.5)
108
+ category_primary = Column(String(255), nullable=False)
109
+ retention_type = Column(String(50), nullable=False, default="long_term")
110
+ namespace = Column(String(255), nullable=False, default="default")
111
+ created_at = Column(DateTime, nullable=False, default=datetime.utcnow)
112
+ access_count = Column(Integer, default=0)
113
+ last_accessed = Column(DateTime)
114
+ searchable_content = Column(Text, nullable=False)
115
+ summary = Column(Text, nullable=False)
116
+ novelty_score = Column(Float, default=0.5)
117
+ relevance_score = Column(Float, default=0.5)
118
+ actionability_score = Column(Float, default=0.5)
119
+
120
+ # Enhanced Classification Fields
121
+ classification = Column(String(50), nullable=False, default="conversational")
122
+ memory_importance = Column(String(20), nullable=False, default="medium")
123
+ topic = Column(String(255))
124
+ entities_json = Column(JSON)
125
+ keywords_json = Column(JSON)
126
+
127
+ # Conscious Context Flags
128
+ is_user_context = Column(Boolean, default=False)
129
+ is_preference = Column(Boolean, default=False)
130
+ is_skill_knowledge = Column(Boolean, default=False)
131
+ is_current_project = Column(Boolean, default=False)
132
+ promotion_eligible = Column(Boolean, default=False)
133
+
134
+ # Memory Management
135
+ duplicate_of = Column(String(255))
136
+ supersedes_json = Column(JSON)
137
+ related_memories_json = Column(JSON)
138
+
139
+ # Technical Metadata
140
+ confidence_score = Column(Float, default=0.8)
141
+ extraction_timestamp = Column(DateTime, nullable=False, default=datetime.utcnow)
142
+ classification_reason = Column(Text)
143
+
144
+ # Processing Status
145
+ processed_for_duplicates = Column(Boolean, default=False)
146
+ conscious_processed = Column(Boolean, default=False)
147
+
148
+ # Indexes
149
+ __table_args__ = (
150
+ Index("idx_long_term_namespace", "namespace"),
151
+ Index("idx_long_term_category", "category_primary"),
152
+ Index("idx_long_term_importance", "importance_score"),
153
+ Index("idx_long_term_created", "created_at"),
154
+ Index("idx_long_term_access", "access_count", "last_accessed"),
155
+ Index(
156
+ "idx_long_term_scores",
157
+ "novelty_score",
158
+ "relevance_score",
159
+ "actionability_score",
160
+ ),
161
+ Index("idx_long_term_classification", "classification"),
162
+ Index("idx_long_term_memory_importance", "memory_importance"),
163
+ Index("idx_long_term_topic", "topic"),
164
+ Index(
165
+ "idx_long_term_conscious_flags",
166
+ "is_user_context",
167
+ "is_preference",
168
+ "is_skill_knowledge",
169
+ "promotion_eligible",
170
+ ),
171
+ Index("idx_long_term_conscious_processed", "conscious_processed"),
172
+ Index("idx_long_term_duplicates", "processed_for_duplicates"),
173
+ Index("idx_long_term_confidence", "confidence_score"),
174
+ Index(
175
+ "idx_long_term_namespace_category",
176
+ "namespace",
177
+ "category_primary",
178
+ "importance_score",
179
+ ),
180
+ )
181
+
182
+
183
+ # Database-specific configurations
184
+ def configure_mysql_fulltext(engine):
185
+ """Configure MySQL FULLTEXT indexes"""
186
+ if engine.dialect.name == "mysql":
187
+ with engine.connect() as conn:
188
+ try:
189
+ # Create FULLTEXT indexes for MySQL
190
+ conn.execute(
191
+ "ALTER TABLE short_term_memory ADD FULLTEXT INDEX ft_short_term_search (searchable_content, summary)"
192
+ )
193
+ conn.execute(
194
+ "ALTER TABLE long_term_memory ADD FULLTEXT INDEX ft_long_term_search (searchable_content, summary)"
195
+ )
196
+ conn.execute(
197
+ "ALTER TABLE long_term_memory ADD FULLTEXT INDEX ft_long_term_topic (topic)"
198
+ )
199
+ conn.commit()
200
+ except Exception:
201
+ # Indexes might already exist
202
+ pass
203
+
204
+
205
+ def configure_postgresql_fts(engine):
206
+ """Configure PostgreSQL full-text search"""
207
+ if engine.dialect.name == "postgresql":
208
+ with engine.connect() as conn:
209
+ try:
210
+ # Add tsvector columns for PostgreSQL
211
+ conn.execute(
212
+ "ALTER TABLE short_term_memory ADD COLUMN IF NOT EXISTS search_vector tsvector"
213
+ )
214
+ conn.execute(
215
+ "ALTER TABLE long_term_memory ADD COLUMN IF NOT EXISTS search_vector tsvector"
216
+ )
217
+
218
+ # Create GIN indexes
219
+ conn.execute(
220
+ "CREATE INDEX IF NOT EXISTS idx_short_term_search_vector ON short_term_memory USING GIN(search_vector)"
221
+ )
222
+ conn.execute(
223
+ "CREATE INDEX IF NOT EXISTS idx_long_term_search_vector ON long_term_memory USING GIN(search_vector)"
224
+ )
225
+
226
+ # Create triggers to maintain tsvector
227
+ conn.execute(
228
+ """
229
+ CREATE OR REPLACE FUNCTION update_short_term_search_vector() RETURNS trigger AS $$
230
+ BEGIN
231
+ NEW.search_vector := to_tsvector('english', COALESCE(NEW.searchable_content, '') || ' ' || COALESCE(NEW.summary, ''));
232
+ RETURN NEW;
233
+ END
234
+ $$ LANGUAGE plpgsql;
235
+ """
236
+ )
237
+
238
+ conn.execute(
239
+ """
240
+ CREATE TRIGGER update_short_term_search_vector_trigger
241
+ BEFORE INSERT OR UPDATE ON short_term_memory
242
+ FOR EACH ROW EXECUTE FUNCTION update_short_term_search_vector();
243
+ """
244
+ )
245
+
246
+ conn.execute(
247
+ """
248
+ CREATE OR REPLACE FUNCTION update_long_term_search_vector() RETURNS trigger AS $$
249
+ BEGIN
250
+ NEW.search_vector := to_tsvector('english', COALESCE(NEW.searchable_content, '') || ' ' || COALESCE(NEW.summary, '') || ' ' || COALESCE(NEW.topic, ''));
251
+ RETURN NEW;
252
+ END
253
+ $$ LANGUAGE plpgsql;
254
+ """
255
+ )
256
+
257
+ conn.execute(
258
+ """
259
+ CREATE TRIGGER update_long_term_search_vector_trigger
260
+ BEFORE INSERT OR UPDATE ON long_term_memory
261
+ FOR EACH ROW EXECUTE FUNCTION update_long_term_search_vector();
262
+ """
263
+ )
264
+
265
+ conn.commit()
266
+ except Exception:
267
+ # Extensions or functions might already exist
268
+ pass
269
+
270
+
271
+ def configure_sqlite_fts(engine):
272
+ """Configure SQLite FTS5"""
273
+ if engine.dialect.name == "sqlite":
274
+ with engine.connect() as conn:
275
+ try:
276
+ # Create FTS5 virtual table for SQLite
277
+ conn.execute(
278
+ """
279
+ CREATE VIRTUAL TABLE IF NOT EXISTS memory_search_fts USING fts5(
280
+ memory_id,
281
+ memory_type,
282
+ namespace,
283
+ searchable_content,
284
+ summary,
285
+ category_primary,
286
+ content='',
287
+ contentless_delete=1
288
+ )
289
+ """
290
+ )
291
+
292
+ # Create triggers to maintain FTS5 index
293
+ conn.execute(
294
+ """
295
+ CREATE TRIGGER IF NOT EXISTS short_term_memory_fts_insert AFTER INSERT ON short_term_memory
296
+ BEGIN
297
+ INSERT INTO memory_search_fts(memory_id, memory_type, namespace, searchable_content, summary, category_primary)
298
+ VALUES (NEW.memory_id, 'short_term', NEW.namespace, NEW.searchable_content, NEW.summary, NEW.category_primary);
299
+ END
300
+ """
301
+ )
302
+
303
+ conn.execute(
304
+ """
305
+ CREATE TRIGGER IF NOT EXISTS long_term_memory_fts_insert AFTER INSERT ON long_term_memory
306
+ BEGIN
307
+ INSERT INTO memory_search_fts(memory_id, memory_type, namespace, searchable_content, summary, category_primary)
308
+ VALUES (NEW.memory_id, 'long_term', NEW.namespace, NEW.searchable_content, NEW.summary, NEW.category_primary);
309
+ END
310
+ """
311
+ )
312
+
313
+ conn.execute(
314
+ """
315
+ CREATE TRIGGER IF NOT EXISTS short_term_memory_fts_delete AFTER DELETE ON short_term_memory
316
+ BEGIN
317
+ DELETE FROM memory_search_fts WHERE memory_id = OLD.memory_id AND memory_type = 'short_term';
318
+ END
319
+ """
320
+ )
321
+
322
+ conn.execute(
323
+ """
324
+ CREATE TRIGGER IF NOT EXISTS long_term_memory_fts_delete AFTER DELETE ON long_term_memory
325
+ BEGIN
326
+ DELETE FROM memory_search_fts WHERE memory_id = OLD.memory_id AND memory_type = 'long_term';
327
+ END
328
+ """
329
+ )
330
+
331
+ conn.commit()
332
+ except Exception:
333
+ # FTS5 might not be available
334
+ pass
335
+
336
+
337
+ class DatabaseManager:
338
+ """SQLAlchemy-based database manager for cross-database compatibility"""
339
+
340
+ def __init__(self, database_url: str):
341
+ self.database_url = database_url
342
+ self.engine = create_engine(
343
+ database_url,
344
+ json_serializer=self._json_serializer,
345
+ json_deserializer=self._json_deserializer,
346
+ echo=False, # Set to True for SQL debugging
347
+ )
348
+
349
+ # Configure database-specific features
350
+ self._setup_database_features()
351
+
352
+ # Create session factory
353
+ self.SessionLocal = sessionmaker(bind=self.engine)
354
+
355
+ def _json_serializer(self, obj):
356
+ """Custom JSON serializer"""
357
+ import json
358
+
359
+ return json.dumps(obj, default=str, ensure_ascii=False)
360
+
361
+ def _json_deserializer(self, value):
362
+ """Custom JSON deserializer"""
363
+ import json
364
+
365
+ return json.loads(value)
366
+
367
+ def _setup_database_features(self):
368
+ """Setup database-specific features like full-text search"""
369
+ dialect_name = self.engine.dialect.name
370
+
371
+ if dialect_name == "mysql":
372
+ configure_mysql_fulltext(self.engine)
373
+ elif dialect_name == "postgresql":
374
+ configure_postgresql_fts(self.engine)
375
+ elif dialect_name == "sqlite":
376
+ configure_sqlite_fts(self.engine)
377
+
378
+ def create_tables(self):
379
+ """Create all tables"""
380
+ Base.metadata.create_all(bind=self.engine)
381
+
382
+ # Setup database-specific search features after table creation
383
+ self._setup_database_features()
384
+
385
+ def get_session(self):
386
+ """Get database session"""
387
+ return self.SessionLocal()
388
+
389
+ def get_database_info(self) -> Dict[str, Any]:
390
+ """Get database information"""
391
+ return {
392
+ "database_type": self.engine.dialect.name,
393
+ "database_url": (
394
+ self.database_url.split("@")[-1]
395
+ if "@" in self.database_url
396
+ else self.database_url
397
+ ),
398
+ "driver": self.engine.dialect.driver,
399
+ "server_version": getattr(self.engine.dialect, "server_version_info", None),
400
+ }
@@ -85,7 +85,7 @@ class SchemaQueries:
85
85
  rule_text TEXT NOT NULL,
86
86
  rule_type TEXT NOT NULL,
87
87
  priority INTEGER DEFAULT 5,
88
- active BOOLEAN DEFAULT 1,
88
+ active BOOLEAN DEFAULT TRUE,
89
89
  context_conditions TEXT,
90
90
  namespace TEXT NOT NULL DEFAULT 'default',
91
91
  created_at TIMESTAMP NOT NULL,
@@ -48,8 +48,13 @@ class MemoryQueries(BaseQueries):
48
48
  INSERT INTO long_term_memory (
49
49
  memory_id, original_chat_id, processed_data, importance_score, category_primary,
50
50
  retention_type, namespace, created_at, searchable_content, summary,
51
- novelty_score, relevance_score, actionability_score
52
- ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
51
+ novelty_score, relevance_score, actionability_score,
52
+ classification, memory_importance, topic, entities_json, keywords_json,
53
+ is_user_context, is_preference, is_skill_knowledge, is_current_project, promotion_eligible,
54
+ duplicate_of, supersedes_json, related_memories_json,
55
+ confidence_score, extraction_timestamp, classification_reason,
56
+ processed_for_duplicates, conscious_processed
57
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
53
58
  """
54
59
 
55
60
  INSERT_RULES_MEMORY = """
@@ -176,3 +181,87 @@ class MemoryQueries(BaseQueries):
176
181
  ORDER BY created_at DESC
177
182
  LIMIT ?
178
183
  """
184
+
185
+ # Conscious Context Queries
186
+ SELECT_CONSCIOUS_MEMORIES = """
187
+ SELECT memory_id, processed_data, summary, classification, importance_score,
188
+ is_user_context, is_preference, is_skill_knowledge, is_current_project,
189
+ promotion_eligible, created_at
190
+ FROM long_term_memory
191
+ WHERE namespace = ?
192
+ AND (
193
+ classification = 'conscious-info'
194
+ OR promotion_eligible = ?
195
+ OR is_user_context = ?
196
+ )
197
+ ORDER BY importance_score DESC, extraction_timestamp DESC
198
+ """
199
+
200
+ SELECT_UNPROCESSED_CONSCIOUS = """
201
+ SELECT memory_id, processed_data, classification, is_user_context, promotion_eligible
202
+ FROM long_term_memory
203
+ WHERE namespace = ? AND conscious_processed = ?
204
+ AND (classification = 'conscious-info' OR promotion_eligible = ? OR is_user_context = ?)
205
+ """
206
+
207
+ SELECT_USER_CONTEXT_PROFILE = """
208
+ SELECT processed_data FROM short_term_memory
209
+ WHERE namespace = ? AND is_permanent_context = ?
210
+ AND category_primary = 'user_context'
211
+ """
212
+
213
+ INSERT_USER_CONTEXT_PROFILE = """
214
+ INSERT OR REPLACE INTO short_term_memory (
215
+ memory_id, processed_data, importance_score, category_primary,
216
+ retention_type, namespace, created_at, expires_at,
217
+ searchable_content, summary, is_permanent_context
218
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
219
+ """
220
+
221
+ MARK_CONSCIOUS_PROCESSED = """
222
+ UPDATE long_term_memory
223
+ SET conscious_processed = ?
224
+ WHERE memory_id = ? AND namespace = ?
225
+ """
226
+
227
+ # Classification and Filtering Queries
228
+ SELECT_MEMORIES_BY_CLASSIFICATION = """
229
+ SELECT memory_id, processed_data, importance_score, classification, created_at, summary
230
+ FROM long_term_memory
231
+ WHERE namespace = ? AND classification = ?
232
+ ORDER BY importance_score DESC, created_at DESC
233
+ LIMIT ?
234
+ """
235
+
236
+ SELECT_MEMORIES_FOR_DEDUPLICATION = """
237
+ SELECT memory_id, summary, searchable_content, classification, created_at
238
+ FROM long_term_memory
239
+ WHERE namespace = :namespace AND processed_for_duplicates = :processed_for_duplicates
240
+ ORDER BY created_at DESC
241
+ LIMIT :limit
242
+ """
243
+
244
+ UPDATE_DUPLICATE_STATUS = """
245
+ UPDATE long_term_memory
246
+ SET duplicate_of = ?, processed_for_duplicates = ?
247
+ WHERE memory_id = ? AND namespace = ?
248
+ """
249
+
250
+ SELECT_PROMOTION_ELIGIBLE_MEMORIES = """
251
+ SELECT memory_id, processed_data, summary, classification
252
+ FROM long_term_memory
253
+ WHERE namespace = ? AND promotion_eligible = ?
254
+ AND conscious_processed = ?
255
+ """
256
+
257
+ # Performance Queries
258
+ SELECT_MEMORIES_WITH_CONTEXT_FLAGS = """
259
+ SELECT memory_id, processed_data, classification,
260
+ is_user_context, is_preference, is_skill_knowledge, is_current_project,
261
+ confidence_score, created_at
262
+ FROM long_term_memory
263
+ WHERE namespace = ?
264
+ AND (is_user_context = ? OR is_preference = ? OR is_skill_knowledge = ? OR is_current_project = ?)
265
+ ORDER BY importance_score DESC, created_at DESC
266
+ LIMIT ?
267
+ """