memorisdk 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of memorisdk might be problematic. Click here for more details.

Files changed (44) hide show
  1. memoriai/__init__.py +140 -0
  2. memoriai/agents/__init__.py +7 -0
  3. memoriai/agents/conscious_agent.py +506 -0
  4. memoriai/agents/memory_agent.py +322 -0
  5. memoriai/agents/retrieval_agent.py +579 -0
  6. memoriai/config/__init__.py +14 -0
  7. memoriai/config/manager.py +281 -0
  8. memoriai/config/settings.py +287 -0
  9. memoriai/core/__init__.py +6 -0
  10. memoriai/core/database.py +966 -0
  11. memoriai/core/memory.py +1349 -0
  12. memoriai/database/__init__.py +5 -0
  13. memoriai/database/connectors/__init__.py +9 -0
  14. memoriai/database/connectors/mysql_connector.py +159 -0
  15. memoriai/database/connectors/postgres_connector.py +158 -0
  16. memoriai/database/connectors/sqlite_connector.py +148 -0
  17. memoriai/database/queries/__init__.py +15 -0
  18. memoriai/database/queries/base_queries.py +204 -0
  19. memoriai/database/queries/chat_queries.py +157 -0
  20. memoriai/database/queries/entity_queries.py +236 -0
  21. memoriai/database/queries/memory_queries.py +178 -0
  22. memoriai/database/templates/__init__.py +0 -0
  23. memoriai/database/templates/basic_template.py +0 -0
  24. memoriai/database/templates/schemas/__init__.py +0 -0
  25. memoriai/integrations/__init__.py +68 -0
  26. memoriai/integrations/anthropic_integration.py +194 -0
  27. memoriai/integrations/litellm_integration.py +11 -0
  28. memoriai/integrations/openai_integration.py +273 -0
  29. memoriai/scripts/llm_text.py +50 -0
  30. memoriai/tools/__init__.py +5 -0
  31. memoriai/tools/memory_tool.py +544 -0
  32. memoriai/utils/__init__.py +89 -0
  33. memoriai/utils/exceptions.py +418 -0
  34. memoriai/utils/helpers.py +433 -0
  35. memoriai/utils/logging.py +204 -0
  36. memoriai/utils/pydantic_models.py +258 -0
  37. memoriai/utils/schemas.py +0 -0
  38. memoriai/utils/validators.py +339 -0
  39. memorisdk-1.0.0.dist-info/METADATA +386 -0
  40. memorisdk-1.0.0.dist-info/RECORD +44 -0
  41. memorisdk-1.0.0.dist-info/WHEEL +5 -0
  42. memorisdk-1.0.0.dist-info/entry_points.txt +2 -0
  43. memorisdk-1.0.0.dist-info/licenses/LICENSE +203 -0
  44. memorisdk-1.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,433 @@
1
+ """
2
+ Utility helper functions for Memoriai
3
+ """
4
+
5
+ import asyncio
6
+ import functools
7
+ import hashlib
8
+ import json
9
+ import uuid
10
+ from datetime import datetime, timedelta
11
+ from pathlib import Path
12
+ from typing import Any, Awaitable, Callable, Dict, List, Optional, TypeVar, Union
13
+
14
+ from .exceptions import MemoriError
15
+
16
+ T = TypeVar("T")
17
+
18
+
19
+ class StringUtils:
20
+ """String manipulation utilities"""
21
+
22
+ @staticmethod
23
+ def generate_id(prefix: str = "") -> str:
24
+ """Generate a unique ID"""
25
+ unique_id = str(uuid.uuid4())
26
+ return f"{prefix}{unique_id}" if prefix else unique_id
27
+
28
+ @staticmethod
29
+ def truncate_text(text: str, max_length: int = 100, suffix: str = "...") -> str:
30
+ """Truncate text to specified length"""
31
+ if len(text) <= max_length:
32
+ return text
33
+ return text[: max_length - len(suffix)] + suffix
34
+
35
+ @staticmethod
36
+ def sanitize_filename(filename: str) -> str:
37
+ """Sanitize filename for safe file system usage"""
38
+ import re
39
+
40
+ # Remove or replace invalid characters
41
+ sanitized = re.sub(r'[<>:"/\\|?*]', "_", filename)
42
+ # Remove leading/trailing spaces and dots
43
+ sanitized = sanitized.strip(" .")
44
+ # Ensure it's not empty
45
+ return sanitized or "unnamed"
46
+
47
+ @staticmethod
48
+ def hash_text(text: str, algorithm: str = "sha256") -> str:
49
+ """Generate hash of text"""
50
+ hash_obj = hashlib.new(algorithm)
51
+ hash_obj.update(text.encode("utf-8"))
52
+ return hash_obj.hexdigest()
53
+
54
+ @staticmethod
55
+ def extract_keywords(text: str, max_keywords: int = 10) -> List[str]:
56
+ """Extract keywords from text (simple implementation)"""
57
+ import re
58
+
59
+ # Simple keyword extraction - remove common stopwords
60
+ stopwords = {
61
+ "the",
62
+ "a",
63
+ "an",
64
+ "and",
65
+ "or",
66
+ "but",
67
+ "in",
68
+ "on",
69
+ "at",
70
+ "to",
71
+ "for",
72
+ "of",
73
+ "with",
74
+ "by",
75
+ "from",
76
+ "up",
77
+ "about",
78
+ "into",
79
+ "through",
80
+ "during",
81
+ "before",
82
+ "after",
83
+ "above",
84
+ "below",
85
+ "between",
86
+ "among",
87
+ "is",
88
+ "are",
89
+ "was",
90
+ "were",
91
+ "be",
92
+ "been",
93
+ "being",
94
+ "have",
95
+ "has",
96
+ "had",
97
+ "do",
98
+ "does",
99
+ "did",
100
+ "will",
101
+ "would",
102
+ "could",
103
+ "should",
104
+ "may",
105
+ "might",
106
+ "must",
107
+ "can",
108
+ "this",
109
+ "that",
110
+ "these",
111
+ "those",
112
+ "i",
113
+ "you",
114
+ "he",
115
+ "she",
116
+ "it",
117
+ "we",
118
+ "they",
119
+ }
120
+
121
+ # Extract words (alphanumeric sequences)
122
+ words = re.findall(r"\b[a-zA-Z]{3,}\b", text.lower())
123
+
124
+ # Filter out stopwords and get unique words
125
+ keywords = list({word for word in words if word not in stopwords})
126
+
127
+ # Sort by length (longer words often more meaningful)
128
+ keywords.sort(key=len, reverse=True)
129
+
130
+ return keywords[:max_keywords]
131
+
132
+
133
+ class DateTimeUtils:
134
+ """Date and time utilities"""
135
+
136
+ @staticmethod
137
+ def now() -> datetime:
138
+ """Get current datetime"""
139
+ return datetime.now()
140
+
141
+ @staticmethod
142
+ def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str:
143
+ """Format datetime to string"""
144
+ return dt.strftime(format_str)
145
+
146
+ @staticmethod
147
+ def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime:
148
+ """Parse datetime from string"""
149
+ return datetime.strptime(dt_str, format_str)
150
+
151
+ @staticmethod
152
+ def add_days(dt: datetime, days: int) -> datetime:
153
+ """Add days to datetime"""
154
+ return dt + timedelta(days=days)
155
+
156
+ @staticmethod
157
+ def subtract_days(dt: datetime, days: int) -> datetime:
158
+ """Subtract days from datetime"""
159
+ return dt - timedelta(days=days)
160
+
161
+ @staticmethod
162
+ def is_expired(dt: datetime, expiry_hours: int = 24) -> bool:
163
+ """Check if datetime is expired"""
164
+ return datetime.now() > dt + timedelta(hours=expiry_hours)
165
+
166
+ @staticmethod
167
+ def time_ago_string(dt: datetime) -> str:
168
+ """Generate human-readable time ago string"""
169
+ now = datetime.now()
170
+ diff = now - dt
171
+
172
+ if diff.days > 0:
173
+ return f"{diff.days} day{'s' if diff.days != 1 else ''} ago"
174
+ elif diff.seconds > 3600:
175
+ hours = diff.seconds // 3600
176
+ return f"{hours} hour{'s' if hours != 1 else ''} ago"
177
+ elif diff.seconds > 60:
178
+ minutes = diff.seconds // 60
179
+ return f"{minutes} minute{'s' if minutes != 1 else ''} ago"
180
+ else:
181
+ return "just now"
182
+
183
+
184
+ class JsonUtils:
185
+ """JSON handling utilities"""
186
+
187
+ @staticmethod
188
+ def safe_loads(json_str: str, default: Any = None) -> Any:
189
+ """Safely load JSON string"""
190
+ try:
191
+ return json.loads(json_str) if json_str else default
192
+ except (json.JSONDecodeError, TypeError):
193
+ return default
194
+
195
+ @staticmethod
196
+ def safe_dumps(obj: Any, default: Any = None, indent: int = 2) -> str:
197
+ """Safely dump object to JSON string"""
198
+ try:
199
+ return json.dumps(obj, default=str, indent=indent, ensure_ascii=False)
200
+ except (TypeError, ValueError):
201
+ return json.dumps(default or {}, indent=indent)
202
+
203
+ @staticmethod
204
+ def merge_dicts(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
205
+ """Deep merge two dictionaries"""
206
+ result = base.copy()
207
+
208
+ for key, value in override.items():
209
+ if (
210
+ key in result
211
+ and isinstance(result[key], dict)
212
+ and isinstance(value, dict)
213
+ ):
214
+ result[key] = JsonUtils.merge_dicts(result[key], value)
215
+ else:
216
+ result[key] = value
217
+
218
+ return result
219
+
220
+
221
+ class FileUtils:
222
+ """File handling utilities"""
223
+
224
+ @staticmethod
225
+ def ensure_directory(path: Union[str, Path]) -> Path:
226
+ """Ensure directory exists, create if not"""
227
+ path = Path(path)
228
+ path.mkdir(parents=True, exist_ok=True)
229
+ return path
230
+
231
+ @staticmethod
232
+ def safe_read_text(
233
+ file_path: Union[str, Path], encoding: str = "utf-8"
234
+ ) -> Optional[str]:
235
+ """Safely read text file"""
236
+ try:
237
+ return Path(file_path).read_text(encoding=encoding)
238
+ except (FileNotFoundError, PermissionError, UnicodeDecodeError):
239
+ return None
240
+
241
+ @staticmethod
242
+ def safe_write_text(
243
+ file_path: Union[str, Path], content: str, encoding: str = "utf-8"
244
+ ) -> bool:
245
+ """Safely write text file"""
246
+ try:
247
+ path = Path(file_path)
248
+ path.parent.mkdir(parents=True, exist_ok=True)
249
+ path.write_text(content, encoding=encoding)
250
+ return True
251
+ except (PermissionError, UnicodeEncodeError):
252
+ return False
253
+
254
+ @staticmethod
255
+ def get_file_size(file_path: Union[str, Path]) -> int:
256
+ """Get file size in bytes"""
257
+ try:
258
+ return Path(file_path).stat().st_size
259
+ except (FileNotFoundError, PermissionError):
260
+ return 0
261
+
262
+ @staticmethod
263
+ def is_file_recent(file_path: Union[str, Path], hours: int = 24) -> bool:
264
+ """Check if file was modified recently"""
265
+ try:
266
+ mtime = datetime.fromtimestamp(Path(file_path).stat().st_mtime)
267
+ return not DateTimeUtils.is_expired(mtime, hours)
268
+ except (FileNotFoundError, PermissionError):
269
+ return False
270
+
271
+
272
+ class RetryUtils:
273
+ """Retry and error handling utilities"""
274
+
275
+ @staticmethod
276
+ def retry_on_exception(
277
+ max_attempts: int = 3,
278
+ delay: float = 1.0,
279
+ backoff: float = 2.0,
280
+ exceptions: tuple = (Exception,),
281
+ ) -> Callable:
282
+ """Decorator for retrying function calls on exceptions"""
283
+
284
+ def decorator(func: Callable[..., T]) -> Callable[..., T]:
285
+ @functools.wraps(func)
286
+ def wrapper(*args, **kwargs) -> T:
287
+ last_exception = None
288
+
289
+ for attempt in range(max_attempts):
290
+ try:
291
+ return func(*args, **kwargs)
292
+ except exceptions as e:
293
+ last_exception = e
294
+ if attempt < max_attempts - 1:
295
+ sleep_time = delay * (backoff**attempt)
296
+ import time
297
+
298
+ time.sleep(sleep_time)
299
+ continue
300
+
301
+ # If all attempts failed, raise the last exception
302
+ if last_exception:
303
+ raise last_exception
304
+
305
+ # This shouldn't happen, but just in case
306
+ raise MemoriError("Retry attempts exhausted")
307
+
308
+ return wrapper
309
+
310
+ return decorator
311
+
312
+ @staticmethod
313
+ async def async_retry_on_exception(
314
+ max_attempts: int = 3,
315
+ delay: float = 1.0,
316
+ backoff: float = 2.0,
317
+ exceptions: tuple = (Exception,),
318
+ ) -> Callable:
319
+ """Async decorator for retrying function calls on exceptions"""
320
+
321
+ def decorator(func: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
322
+ @functools.wraps(func)
323
+ async def wrapper(*args, **kwargs) -> T:
324
+ last_exception = None
325
+
326
+ for attempt in range(max_attempts):
327
+ try:
328
+ return await func(*args, **kwargs)
329
+ except exceptions as e:
330
+ last_exception = e
331
+ if attempt < max_attempts - 1:
332
+ sleep_time = delay * (backoff**attempt)
333
+ await asyncio.sleep(sleep_time)
334
+ continue
335
+
336
+ if last_exception:
337
+ raise last_exception
338
+
339
+ raise MemoriError("Async retry attempts exhausted")
340
+
341
+ return wrapper
342
+
343
+ return decorator
344
+
345
+
346
+ class PerformanceUtils:
347
+ """Performance monitoring utilities"""
348
+
349
+ @staticmethod
350
+ def time_function(func: Callable[..., T]) -> Callable[..., T]:
351
+ """Decorator to time function execution"""
352
+
353
+ @functools.wraps(func)
354
+ def wrapper(*args, **kwargs) -> T:
355
+ import time
356
+
357
+ start_time = time.time()
358
+
359
+ try:
360
+ result = func(*args, **kwargs)
361
+ end_time = time.time()
362
+ execution_time = end_time - start_time
363
+
364
+ # Log execution time (you can customize this)
365
+ from .logging import get_logger
366
+
367
+ logger = get_logger("performance")
368
+ logger.debug(
369
+ f"{func.__name__} executed in {execution_time:.4f} seconds"
370
+ )
371
+
372
+ return result
373
+ except Exception as e:
374
+ end_time = time.time()
375
+ execution_time = end_time - start_time
376
+
377
+ from .logging import get_logger
378
+
379
+ logger = get_logger("performance")
380
+ logger.error(
381
+ f"{func.__name__} failed after {execution_time:.4f} seconds: {e}"
382
+ )
383
+
384
+ raise
385
+
386
+ return wrapper
387
+
388
+ @staticmethod
389
+ def memory_usage() -> Dict[str, float]:
390
+ """Get current memory usage"""
391
+ try:
392
+ import psutil
393
+
394
+ process = psutil.Process()
395
+ memory_info = process.memory_info()
396
+
397
+ return {
398
+ "rss_mb": memory_info.rss / 1024 / 1024, # Resident Set Size
399
+ "vms_mb": memory_info.vms / 1024 / 1024, # Virtual Memory Size
400
+ "percent": process.memory_percent(),
401
+ }
402
+ except ImportError:
403
+ return {"error": "psutil not available"}
404
+ except Exception as e:
405
+ return {"error": str(e)}
406
+
407
+
408
+ class AsyncUtils:
409
+ """Async utilities"""
410
+
411
+ @staticmethod
412
+ async def run_in_executor(func: Callable[..., T], *args, **kwargs) -> T:
413
+ """Run synchronous function in executor"""
414
+ loop = asyncio.get_event_loop()
415
+ return await loop.run_in_executor(
416
+ None, functools.partial(func, **kwargs), *args
417
+ )
418
+
419
+ @staticmethod
420
+ async def gather_with_concurrency(limit: int, *tasks) -> List[Any]:
421
+ """Run tasks with concurrency limit"""
422
+ semaphore = asyncio.Semaphore(limit)
423
+
424
+ async def sem_task(task):
425
+ async with semaphore:
426
+ return await task
427
+
428
+ return await asyncio.gather(*[sem_task(task) for task in tasks])
429
+
430
+ @staticmethod
431
+ def create_task_with_timeout(coro: Awaitable[T], timeout: float) -> asyncio.Task:
432
+ """Create task with timeout"""
433
+ return asyncio.create_task(asyncio.wait_for(coro, timeout=timeout))
@@ -0,0 +1,204 @@
1
+ """
2
+ Centralized logging configuration for Memoriai
3
+ """
4
+
5
+ import sys
6
+ from pathlib import Path
7
+ from typing import Any, Dict, Optional
8
+
9
+ from loguru import logger
10
+
11
+ from ..config.settings import LoggingSettings, LogLevel
12
+ from .exceptions import ConfigurationError
13
+
14
+
15
+ class LoggingManager:
16
+ """Centralized logging management"""
17
+
18
+ _initialized = False
19
+ _current_config: Optional[LoggingSettings] = None
20
+
21
+ @classmethod
22
+ def setup_logging(cls, settings: LoggingSettings, verbose: bool = False) -> None:
23
+ """Setup logging configuration"""
24
+ try:
25
+ # Remove default handler if it exists
26
+ if not cls._initialized:
27
+ logger.remove()
28
+
29
+ if verbose:
30
+ # When verbose mode is enabled, disable all other loggers and show only loguru
31
+ cls._disable_other_loggers()
32
+
33
+ # Configure console logging with DEBUG level and full formatting
34
+ logger.add(
35
+ sys.stderr,
36
+ level="DEBUG",
37
+ format=settings.format,
38
+ colorize=True,
39
+ backtrace=True,
40
+ diagnose=True,
41
+ )
42
+ else:
43
+ # When verbose is False, minimize loguru output to essential logs only
44
+ logger.add(
45
+ sys.stderr,
46
+ level="WARNING", # Only show warnings and errors
47
+ format="<level>{level}</level>: {message}", # Simplified format
48
+ colorize=False,
49
+ backtrace=False,
50
+ diagnose=False,
51
+ )
52
+
53
+ # Configure file logging if enabled
54
+ if settings.log_to_file:
55
+ log_path = Path(settings.log_file_path)
56
+ log_path.parent.mkdir(parents=True, exist_ok=True)
57
+
58
+ if settings.structured_logging:
59
+ # JSON structured logging
60
+ logger.add(
61
+ log_path,
62
+ level=settings.level.value,
63
+ format="{time:YYYY-MM-DD HH:mm:ss.SSS} | {level} | {name}:{function}:{line} | {message}",
64
+ rotation=settings.log_rotation,
65
+ retention=settings.log_retention,
66
+ compression=settings.log_compression,
67
+ serialize=True,
68
+ )
69
+ else:
70
+ # Regular text logging
71
+ logger.add(
72
+ log_path,
73
+ level=settings.level.value,
74
+ format=settings.format,
75
+ rotation=settings.log_rotation,
76
+ retention=settings.log_retention,
77
+ compression=settings.log_compression,
78
+ )
79
+
80
+ cls._initialized = True
81
+ cls._current_config = settings
82
+ logger.info("Logging configuration initialized")
83
+
84
+ except Exception as e:
85
+ raise ConfigurationError(f"Failed to setup logging: {e}") from e
86
+
87
+ @classmethod
88
+ def get_logger(cls, name: str) -> "logger":
89
+ """Get a logger instance with the given name"""
90
+ return logger.bind(name=name)
91
+
92
+ @classmethod
93
+ def update_log_level(cls, level: LogLevel) -> None:
94
+ """Update the logging level"""
95
+ if not cls._initialized:
96
+ raise ConfigurationError("Logging not initialized")
97
+
98
+ try:
99
+ # Remove existing handlers and recreate with new level
100
+ logger.remove()
101
+
102
+ if cls._current_config:
103
+ cls._current_config.level = level
104
+ cls.setup_logging(cls._current_config)
105
+
106
+ except Exception as e:
107
+ logger.error(f"Failed to update log level: {e}")
108
+
109
+ @classmethod
110
+ def add_custom_handler(cls, handler_config: Dict[str, Any]) -> None:
111
+ """Add a custom logging handler"""
112
+ try:
113
+ logger.add(**handler_config)
114
+ logger.debug(f"Added custom logging handler: {handler_config}")
115
+ except Exception as e:
116
+ logger.error(f"Failed to add custom handler: {e}")
117
+
118
+ @classmethod
119
+ def is_initialized(cls) -> bool:
120
+ """Check if logging is initialized"""
121
+ return cls._initialized
122
+
123
+ @classmethod
124
+ def get_current_config(cls) -> Optional[LoggingSettings]:
125
+ """Get current logging configuration"""
126
+ return cls._current_config
127
+
128
+ @classmethod
129
+ def _disable_other_loggers(cls) -> None:
130
+ """Disable all other loggers when verbose mode is enabled"""
131
+ import logging
132
+
133
+ # Set the root logger to CRITICAL and disable it
134
+ root_logger = logging.getLogger()
135
+ root_logger.setLevel(logging.CRITICAL)
136
+ root_logger.disabled = True
137
+
138
+ # Remove all handlers from the root logger
139
+ for handler in root_logger.handlers[:]:
140
+ root_logger.removeHandler(handler)
141
+
142
+ # Disable common third-party library loggers
143
+ third_party_loggers = [
144
+ "urllib3",
145
+ "requests",
146
+ "httpx",
147
+ "httpcore",
148
+ "openai",
149
+ "anthropic",
150
+ "litellm",
151
+ "sqlalchemy",
152
+ "alembic",
153
+ "asyncio",
154
+ "concurrent.futures",
155
+ "charset_normalizer",
156
+ "certifi",
157
+ "idna",
158
+ ]
159
+
160
+ for logger_name in third_party_loggers:
161
+ lib_logger = logging.getLogger(logger_name)
162
+ lib_logger.disabled = True
163
+ lib_logger.setLevel(logging.CRITICAL)
164
+ # Remove all handlers
165
+ for handler in lib_logger.handlers[:]:
166
+ lib_logger.removeHandler(handler)
167
+
168
+ # Set all existing loggers to CRITICAL level and disable them
169
+ for name in list(logging.Logger.manager.loggerDict.keys()):
170
+ existing_logger = logging.getLogger(name)
171
+ existing_logger.setLevel(logging.CRITICAL)
172
+ existing_logger.disabled = True
173
+ # Remove all handlers
174
+ for handler in existing_logger.handlers[:]:
175
+ existing_logger.removeHandler(handler)
176
+
177
+ # Also disable warnings from the warnings module
178
+ import warnings
179
+
180
+ warnings.filterwarnings("ignore")
181
+
182
+ # Override the logging module's basicConfig to prevent new loggers
183
+ def disabled_basicConfig(*args, **kwargs):
184
+ pass
185
+
186
+ logging.basicConfig = disabled_basicConfig
187
+
188
+ # Override the getLogger function to disable new loggers immediately
189
+ original_getLogger = logging.getLogger
190
+
191
+ def disabled_getLogger(name=None):
192
+ logger_instance = original_getLogger(name)
193
+ logger_instance.disabled = True
194
+ logger_instance.setLevel(logging.CRITICAL)
195
+ for handler in logger_instance.handlers[:]:
196
+ logger_instance.removeHandler(handler)
197
+ return logger_instance
198
+
199
+ logging.getLogger = disabled_getLogger
200
+
201
+
202
+ def get_logger(name: str = "memori") -> "logger":
203
+ """Convenience function to get a logger"""
204
+ return LoggingManager.get_logger(name)