aiecs 1.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (302) hide show
  1. aiecs/__init__.py +72 -0
  2. aiecs/__main__.py +41 -0
  3. aiecs/aiecs_client.py +469 -0
  4. aiecs/application/__init__.py +10 -0
  5. aiecs/application/executors/__init__.py +10 -0
  6. aiecs/application/executors/operation_executor.py +363 -0
  7. aiecs/application/knowledge_graph/__init__.py +7 -0
  8. aiecs/application/knowledge_graph/builder/__init__.py +37 -0
  9. aiecs/application/knowledge_graph/builder/document_builder.py +375 -0
  10. aiecs/application/knowledge_graph/builder/graph_builder.py +356 -0
  11. aiecs/application/knowledge_graph/builder/schema_mapping.py +531 -0
  12. aiecs/application/knowledge_graph/builder/structured_pipeline.py +443 -0
  13. aiecs/application/knowledge_graph/builder/text_chunker.py +319 -0
  14. aiecs/application/knowledge_graph/extractors/__init__.py +27 -0
  15. aiecs/application/knowledge_graph/extractors/base.py +100 -0
  16. aiecs/application/knowledge_graph/extractors/llm_entity_extractor.py +327 -0
  17. aiecs/application/knowledge_graph/extractors/llm_relation_extractor.py +349 -0
  18. aiecs/application/knowledge_graph/extractors/ner_entity_extractor.py +244 -0
  19. aiecs/application/knowledge_graph/fusion/__init__.py +23 -0
  20. aiecs/application/knowledge_graph/fusion/entity_deduplicator.py +387 -0
  21. aiecs/application/knowledge_graph/fusion/entity_linker.py +343 -0
  22. aiecs/application/knowledge_graph/fusion/knowledge_fusion.py +580 -0
  23. aiecs/application/knowledge_graph/fusion/relation_deduplicator.py +189 -0
  24. aiecs/application/knowledge_graph/pattern_matching/__init__.py +21 -0
  25. aiecs/application/knowledge_graph/pattern_matching/pattern_matcher.py +344 -0
  26. aiecs/application/knowledge_graph/pattern_matching/query_executor.py +378 -0
  27. aiecs/application/knowledge_graph/profiling/__init__.py +12 -0
  28. aiecs/application/knowledge_graph/profiling/query_plan_visualizer.py +199 -0
  29. aiecs/application/knowledge_graph/profiling/query_profiler.py +223 -0
  30. aiecs/application/knowledge_graph/reasoning/__init__.py +27 -0
  31. aiecs/application/knowledge_graph/reasoning/evidence_synthesis.py +347 -0
  32. aiecs/application/knowledge_graph/reasoning/inference_engine.py +504 -0
  33. aiecs/application/knowledge_graph/reasoning/logic_form_parser.py +167 -0
  34. aiecs/application/knowledge_graph/reasoning/logic_parser/__init__.py +79 -0
  35. aiecs/application/knowledge_graph/reasoning/logic_parser/ast_builder.py +513 -0
  36. aiecs/application/knowledge_graph/reasoning/logic_parser/ast_nodes.py +630 -0
  37. aiecs/application/knowledge_graph/reasoning/logic_parser/ast_validator.py +654 -0
  38. aiecs/application/knowledge_graph/reasoning/logic_parser/error_handler.py +477 -0
  39. aiecs/application/knowledge_graph/reasoning/logic_parser/parser.py +390 -0
  40. aiecs/application/knowledge_graph/reasoning/logic_parser/query_context.py +217 -0
  41. aiecs/application/knowledge_graph/reasoning/logic_query_integration.py +169 -0
  42. aiecs/application/knowledge_graph/reasoning/query_planner.py +872 -0
  43. aiecs/application/knowledge_graph/reasoning/reasoning_engine.py +554 -0
  44. aiecs/application/knowledge_graph/retrieval/__init__.py +19 -0
  45. aiecs/application/knowledge_graph/retrieval/retrieval_strategies.py +596 -0
  46. aiecs/application/knowledge_graph/search/__init__.py +59 -0
  47. aiecs/application/knowledge_graph/search/hybrid_search.py +423 -0
  48. aiecs/application/knowledge_graph/search/reranker.py +295 -0
  49. aiecs/application/knowledge_graph/search/reranker_strategies.py +553 -0
  50. aiecs/application/knowledge_graph/search/text_similarity.py +398 -0
  51. aiecs/application/knowledge_graph/traversal/__init__.py +15 -0
  52. aiecs/application/knowledge_graph/traversal/enhanced_traversal.py +329 -0
  53. aiecs/application/knowledge_graph/traversal/path_scorer.py +269 -0
  54. aiecs/application/knowledge_graph/validators/__init__.py +13 -0
  55. aiecs/application/knowledge_graph/validators/relation_validator.py +189 -0
  56. aiecs/application/knowledge_graph/visualization/__init__.py +11 -0
  57. aiecs/application/knowledge_graph/visualization/graph_visualizer.py +321 -0
  58. aiecs/common/__init__.py +9 -0
  59. aiecs/common/knowledge_graph/__init__.py +17 -0
  60. aiecs/common/knowledge_graph/runnable.py +484 -0
  61. aiecs/config/__init__.py +16 -0
  62. aiecs/config/config.py +498 -0
  63. aiecs/config/graph_config.py +137 -0
  64. aiecs/config/registry.py +23 -0
  65. aiecs/core/__init__.py +46 -0
  66. aiecs/core/interface/__init__.py +34 -0
  67. aiecs/core/interface/execution_interface.py +152 -0
  68. aiecs/core/interface/storage_interface.py +171 -0
  69. aiecs/domain/__init__.py +289 -0
  70. aiecs/domain/agent/__init__.py +189 -0
  71. aiecs/domain/agent/base_agent.py +697 -0
  72. aiecs/domain/agent/exceptions.py +103 -0
  73. aiecs/domain/agent/graph_aware_mixin.py +559 -0
  74. aiecs/domain/agent/hybrid_agent.py +490 -0
  75. aiecs/domain/agent/integration/__init__.py +26 -0
  76. aiecs/domain/agent/integration/context_compressor.py +222 -0
  77. aiecs/domain/agent/integration/context_engine_adapter.py +252 -0
  78. aiecs/domain/agent/integration/retry_policy.py +219 -0
  79. aiecs/domain/agent/integration/role_config.py +213 -0
  80. aiecs/domain/agent/knowledge_aware_agent.py +646 -0
  81. aiecs/domain/agent/lifecycle.py +296 -0
  82. aiecs/domain/agent/llm_agent.py +300 -0
  83. aiecs/domain/agent/memory/__init__.py +12 -0
  84. aiecs/domain/agent/memory/conversation.py +197 -0
  85. aiecs/domain/agent/migration/__init__.py +14 -0
  86. aiecs/domain/agent/migration/conversion.py +160 -0
  87. aiecs/domain/agent/migration/legacy_wrapper.py +90 -0
  88. aiecs/domain/agent/models.py +317 -0
  89. aiecs/domain/agent/observability.py +407 -0
  90. aiecs/domain/agent/persistence.py +289 -0
  91. aiecs/domain/agent/prompts/__init__.py +29 -0
  92. aiecs/domain/agent/prompts/builder.py +161 -0
  93. aiecs/domain/agent/prompts/formatters.py +189 -0
  94. aiecs/domain/agent/prompts/template.py +255 -0
  95. aiecs/domain/agent/registry.py +260 -0
  96. aiecs/domain/agent/tool_agent.py +257 -0
  97. aiecs/domain/agent/tools/__init__.py +12 -0
  98. aiecs/domain/agent/tools/schema_generator.py +221 -0
  99. aiecs/domain/community/__init__.py +155 -0
  100. aiecs/domain/community/agent_adapter.py +477 -0
  101. aiecs/domain/community/analytics.py +481 -0
  102. aiecs/domain/community/collaborative_workflow.py +642 -0
  103. aiecs/domain/community/communication_hub.py +645 -0
  104. aiecs/domain/community/community_builder.py +320 -0
  105. aiecs/domain/community/community_integration.py +800 -0
  106. aiecs/domain/community/community_manager.py +813 -0
  107. aiecs/domain/community/decision_engine.py +879 -0
  108. aiecs/domain/community/exceptions.py +225 -0
  109. aiecs/domain/community/models/__init__.py +33 -0
  110. aiecs/domain/community/models/community_models.py +268 -0
  111. aiecs/domain/community/resource_manager.py +457 -0
  112. aiecs/domain/community/shared_context_manager.py +603 -0
  113. aiecs/domain/context/__init__.py +58 -0
  114. aiecs/domain/context/context_engine.py +989 -0
  115. aiecs/domain/context/conversation_models.py +354 -0
  116. aiecs/domain/context/graph_memory.py +467 -0
  117. aiecs/domain/execution/__init__.py +12 -0
  118. aiecs/domain/execution/model.py +57 -0
  119. aiecs/domain/knowledge_graph/__init__.py +19 -0
  120. aiecs/domain/knowledge_graph/models/__init__.py +52 -0
  121. aiecs/domain/knowledge_graph/models/entity.py +130 -0
  122. aiecs/domain/knowledge_graph/models/evidence.py +194 -0
  123. aiecs/domain/knowledge_graph/models/inference_rule.py +186 -0
  124. aiecs/domain/knowledge_graph/models/path.py +179 -0
  125. aiecs/domain/knowledge_graph/models/path_pattern.py +173 -0
  126. aiecs/domain/knowledge_graph/models/query.py +272 -0
  127. aiecs/domain/knowledge_graph/models/query_plan.py +187 -0
  128. aiecs/domain/knowledge_graph/models/relation.py +136 -0
  129. aiecs/domain/knowledge_graph/schema/__init__.py +23 -0
  130. aiecs/domain/knowledge_graph/schema/entity_type.py +135 -0
  131. aiecs/domain/knowledge_graph/schema/graph_schema.py +271 -0
  132. aiecs/domain/knowledge_graph/schema/property_schema.py +155 -0
  133. aiecs/domain/knowledge_graph/schema/relation_type.py +171 -0
  134. aiecs/domain/knowledge_graph/schema/schema_manager.py +496 -0
  135. aiecs/domain/knowledge_graph/schema/type_enums.py +205 -0
  136. aiecs/domain/task/__init__.py +13 -0
  137. aiecs/domain/task/dsl_processor.py +613 -0
  138. aiecs/domain/task/model.py +62 -0
  139. aiecs/domain/task/task_context.py +268 -0
  140. aiecs/infrastructure/__init__.py +24 -0
  141. aiecs/infrastructure/graph_storage/__init__.py +11 -0
  142. aiecs/infrastructure/graph_storage/base.py +601 -0
  143. aiecs/infrastructure/graph_storage/batch_operations.py +449 -0
  144. aiecs/infrastructure/graph_storage/cache.py +429 -0
  145. aiecs/infrastructure/graph_storage/distributed.py +226 -0
  146. aiecs/infrastructure/graph_storage/error_handling.py +390 -0
  147. aiecs/infrastructure/graph_storage/graceful_degradation.py +306 -0
  148. aiecs/infrastructure/graph_storage/health_checks.py +378 -0
  149. aiecs/infrastructure/graph_storage/in_memory.py +514 -0
  150. aiecs/infrastructure/graph_storage/index_optimization.py +483 -0
  151. aiecs/infrastructure/graph_storage/lazy_loading.py +410 -0
  152. aiecs/infrastructure/graph_storage/metrics.py +357 -0
  153. aiecs/infrastructure/graph_storage/migration.py +413 -0
  154. aiecs/infrastructure/graph_storage/pagination.py +471 -0
  155. aiecs/infrastructure/graph_storage/performance_monitoring.py +466 -0
  156. aiecs/infrastructure/graph_storage/postgres.py +871 -0
  157. aiecs/infrastructure/graph_storage/query_optimizer.py +635 -0
  158. aiecs/infrastructure/graph_storage/schema_cache.py +290 -0
  159. aiecs/infrastructure/graph_storage/sqlite.py +623 -0
  160. aiecs/infrastructure/graph_storage/streaming.py +495 -0
  161. aiecs/infrastructure/messaging/__init__.py +13 -0
  162. aiecs/infrastructure/messaging/celery_task_manager.py +383 -0
  163. aiecs/infrastructure/messaging/websocket_manager.py +298 -0
  164. aiecs/infrastructure/monitoring/__init__.py +34 -0
  165. aiecs/infrastructure/monitoring/executor_metrics.py +174 -0
  166. aiecs/infrastructure/monitoring/global_metrics_manager.py +213 -0
  167. aiecs/infrastructure/monitoring/structured_logger.py +48 -0
  168. aiecs/infrastructure/monitoring/tracing_manager.py +410 -0
  169. aiecs/infrastructure/persistence/__init__.py +24 -0
  170. aiecs/infrastructure/persistence/context_engine_client.py +187 -0
  171. aiecs/infrastructure/persistence/database_manager.py +333 -0
  172. aiecs/infrastructure/persistence/file_storage.py +754 -0
  173. aiecs/infrastructure/persistence/redis_client.py +220 -0
  174. aiecs/llm/__init__.py +86 -0
  175. aiecs/llm/callbacks/__init__.py +11 -0
  176. aiecs/llm/callbacks/custom_callbacks.py +264 -0
  177. aiecs/llm/client_factory.py +420 -0
  178. aiecs/llm/clients/__init__.py +33 -0
  179. aiecs/llm/clients/base_client.py +193 -0
  180. aiecs/llm/clients/googleai_client.py +181 -0
  181. aiecs/llm/clients/openai_client.py +131 -0
  182. aiecs/llm/clients/vertex_client.py +437 -0
  183. aiecs/llm/clients/xai_client.py +184 -0
  184. aiecs/llm/config/__init__.py +51 -0
  185. aiecs/llm/config/config_loader.py +275 -0
  186. aiecs/llm/config/config_validator.py +236 -0
  187. aiecs/llm/config/model_config.py +151 -0
  188. aiecs/llm/utils/__init__.py +10 -0
  189. aiecs/llm/utils/validate_config.py +91 -0
  190. aiecs/main.py +363 -0
  191. aiecs/scripts/__init__.py +3 -0
  192. aiecs/scripts/aid/VERSION_MANAGEMENT.md +97 -0
  193. aiecs/scripts/aid/__init__.py +19 -0
  194. aiecs/scripts/aid/version_manager.py +215 -0
  195. aiecs/scripts/dependance_check/DEPENDENCY_SYSTEM_SUMMARY.md +242 -0
  196. aiecs/scripts/dependance_check/README_DEPENDENCY_CHECKER.md +310 -0
  197. aiecs/scripts/dependance_check/__init__.py +17 -0
  198. aiecs/scripts/dependance_check/dependency_checker.py +938 -0
  199. aiecs/scripts/dependance_check/dependency_fixer.py +391 -0
  200. aiecs/scripts/dependance_check/download_nlp_data.py +396 -0
  201. aiecs/scripts/dependance_check/quick_dependency_check.py +270 -0
  202. aiecs/scripts/dependance_check/setup_nlp_data.sh +217 -0
  203. aiecs/scripts/dependance_patch/__init__.py +7 -0
  204. aiecs/scripts/dependance_patch/fix_weasel/README_WEASEL_PATCH.md +126 -0
  205. aiecs/scripts/dependance_patch/fix_weasel/__init__.py +11 -0
  206. aiecs/scripts/dependance_patch/fix_weasel/fix_weasel_validator.py +128 -0
  207. aiecs/scripts/dependance_patch/fix_weasel/fix_weasel_validator.sh +82 -0
  208. aiecs/scripts/dependance_patch/fix_weasel/patch_weasel_library.sh +188 -0
  209. aiecs/scripts/dependance_patch/fix_weasel/run_weasel_patch.sh +41 -0
  210. aiecs/scripts/tools_develop/README.md +449 -0
  211. aiecs/scripts/tools_develop/TOOL_AUTO_DISCOVERY.md +234 -0
  212. aiecs/scripts/tools_develop/__init__.py +21 -0
  213. aiecs/scripts/tools_develop/check_type_annotations.py +259 -0
  214. aiecs/scripts/tools_develop/validate_tool_schemas.py +422 -0
  215. aiecs/scripts/tools_develop/verify_tools.py +356 -0
  216. aiecs/tasks/__init__.py +1 -0
  217. aiecs/tasks/worker.py +172 -0
  218. aiecs/tools/__init__.py +299 -0
  219. aiecs/tools/apisource/__init__.py +99 -0
  220. aiecs/tools/apisource/intelligence/__init__.py +19 -0
  221. aiecs/tools/apisource/intelligence/data_fusion.py +381 -0
  222. aiecs/tools/apisource/intelligence/query_analyzer.py +413 -0
  223. aiecs/tools/apisource/intelligence/search_enhancer.py +388 -0
  224. aiecs/tools/apisource/monitoring/__init__.py +9 -0
  225. aiecs/tools/apisource/monitoring/metrics.py +303 -0
  226. aiecs/tools/apisource/providers/__init__.py +115 -0
  227. aiecs/tools/apisource/providers/base.py +664 -0
  228. aiecs/tools/apisource/providers/census.py +401 -0
  229. aiecs/tools/apisource/providers/fred.py +564 -0
  230. aiecs/tools/apisource/providers/newsapi.py +412 -0
  231. aiecs/tools/apisource/providers/worldbank.py +357 -0
  232. aiecs/tools/apisource/reliability/__init__.py +12 -0
  233. aiecs/tools/apisource/reliability/error_handler.py +375 -0
  234. aiecs/tools/apisource/reliability/fallback_strategy.py +391 -0
  235. aiecs/tools/apisource/tool.py +850 -0
  236. aiecs/tools/apisource/utils/__init__.py +9 -0
  237. aiecs/tools/apisource/utils/validators.py +338 -0
  238. aiecs/tools/base_tool.py +201 -0
  239. aiecs/tools/docs/__init__.py +121 -0
  240. aiecs/tools/docs/ai_document_orchestrator.py +599 -0
  241. aiecs/tools/docs/ai_document_writer_orchestrator.py +2403 -0
  242. aiecs/tools/docs/content_insertion_tool.py +1333 -0
  243. aiecs/tools/docs/document_creator_tool.py +1317 -0
  244. aiecs/tools/docs/document_layout_tool.py +1166 -0
  245. aiecs/tools/docs/document_parser_tool.py +994 -0
  246. aiecs/tools/docs/document_writer_tool.py +1818 -0
  247. aiecs/tools/knowledge_graph/__init__.py +17 -0
  248. aiecs/tools/knowledge_graph/graph_reasoning_tool.py +734 -0
  249. aiecs/tools/knowledge_graph/graph_search_tool.py +923 -0
  250. aiecs/tools/knowledge_graph/kg_builder_tool.py +476 -0
  251. aiecs/tools/langchain_adapter.py +542 -0
  252. aiecs/tools/schema_generator.py +275 -0
  253. aiecs/tools/search_tool/__init__.py +100 -0
  254. aiecs/tools/search_tool/analyzers.py +589 -0
  255. aiecs/tools/search_tool/cache.py +260 -0
  256. aiecs/tools/search_tool/constants.py +128 -0
  257. aiecs/tools/search_tool/context.py +216 -0
  258. aiecs/tools/search_tool/core.py +749 -0
  259. aiecs/tools/search_tool/deduplicator.py +123 -0
  260. aiecs/tools/search_tool/error_handler.py +271 -0
  261. aiecs/tools/search_tool/metrics.py +371 -0
  262. aiecs/tools/search_tool/rate_limiter.py +178 -0
  263. aiecs/tools/search_tool/schemas.py +277 -0
  264. aiecs/tools/statistics/__init__.py +80 -0
  265. aiecs/tools/statistics/ai_data_analysis_orchestrator.py +643 -0
  266. aiecs/tools/statistics/ai_insight_generator_tool.py +505 -0
  267. aiecs/tools/statistics/ai_report_orchestrator_tool.py +694 -0
  268. aiecs/tools/statistics/data_loader_tool.py +564 -0
  269. aiecs/tools/statistics/data_profiler_tool.py +658 -0
  270. aiecs/tools/statistics/data_transformer_tool.py +573 -0
  271. aiecs/tools/statistics/data_visualizer_tool.py +495 -0
  272. aiecs/tools/statistics/model_trainer_tool.py +487 -0
  273. aiecs/tools/statistics/statistical_analyzer_tool.py +459 -0
  274. aiecs/tools/task_tools/__init__.py +86 -0
  275. aiecs/tools/task_tools/chart_tool.py +732 -0
  276. aiecs/tools/task_tools/classfire_tool.py +922 -0
  277. aiecs/tools/task_tools/image_tool.py +447 -0
  278. aiecs/tools/task_tools/office_tool.py +684 -0
  279. aiecs/tools/task_tools/pandas_tool.py +635 -0
  280. aiecs/tools/task_tools/report_tool.py +635 -0
  281. aiecs/tools/task_tools/research_tool.py +392 -0
  282. aiecs/tools/task_tools/scraper_tool.py +715 -0
  283. aiecs/tools/task_tools/stats_tool.py +688 -0
  284. aiecs/tools/temp_file_manager.py +130 -0
  285. aiecs/tools/tool_executor/__init__.py +37 -0
  286. aiecs/tools/tool_executor/tool_executor.py +881 -0
  287. aiecs/utils/LLM_output_structor.py +445 -0
  288. aiecs/utils/__init__.py +34 -0
  289. aiecs/utils/base_callback.py +47 -0
  290. aiecs/utils/cache_provider.py +695 -0
  291. aiecs/utils/execution_utils.py +184 -0
  292. aiecs/utils/logging.py +1 -0
  293. aiecs/utils/prompt_loader.py +14 -0
  294. aiecs/utils/token_usage_repository.py +323 -0
  295. aiecs/ws/__init__.py +0 -0
  296. aiecs/ws/socket_server.py +52 -0
  297. aiecs-1.5.1.dist-info/METADATA +608 -0
  298. aiecs-1.5.1.dist-info/RECORD +302 -0
  299. aiecs-1.5.1.dist-info/WHEEL +5 -0
  300. aiecs-1.5.1.dist-info/entry_points.txt +10 -0
  301. aiecs-1.5.1.dist-info/licenses/LICENSE +225 -0
  302. aiecs-1.5.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,989 @@
1
+ """
2
+ ContextEngine: Advanced Context and Session Management Engine
3
+
4
+ This engine extends TaskContext capabilities to provide comprehensive
5
+ session management, conversation tracking, and persistent storage for BaseAIService.
6
+
7
+ Key Features:
8
+ 1. Multi-session management (extends TaskContext from single task to multiple sessions)
9
+ 2. Redis backend storage for persistence and scalability
10
+ 3. Conversation history management with optimization
11
+ 4. Performance metrics and analytics
12
+ 5. Resource and lifecycle management
13
+ 6. Integration with BaseServiceCheckpointer
14
+ """
15
+
16
+ from aiecs.core.interface.storage_interface import (
17
+ IStorageBackend,
18
+ ICheckpointerBackend,
19
+ )
20
+ from aiecs.domain.task.task_context import TaskContext, ContextUpdate
21
+ import json
22
+ import logging
23
+ import uuid
24
+ from datetime import datetime, timedelta
25
+ from typing import Dict, Any, Optional, List
26
+ from dataclasses import dataclass, asdict
27
+
28
+
29
+ class DateTimeEncoder(json.JSONEncoder):
30
+ """Custom JSON encoder to handle datetime objects."""
31
+
32
+ def default(self, obj):
33
+ if isinstance(obj, datetime):
34
+ return obj.isoformat()
35
+ return super().default(obj)
36
+
37
+
38
+ # Import TaskContext for base functionality
39
+
40
+ # Import core storage interfaces
41
+
42
+ # Redis client import - use existing infrastructure
43
+ try:
44
+ import redis.asyncio as redis
45
+ from aiecs.infrastructure.persistence.redis_client import get_redis_client
46
+
47
+ REDIS_AVAILABLE = True
48
+ except ImportError:
49
+ redis = None
50
+ get_redis_client = None
51
+ REDIS_AVAILABLE = False
52
+
53
+ logger = logging.getLogger(__name__)
54
+
55
+
56
+ @dataclass
57
+ class SessionMetrics:
58
+ """Session-level performance metrics."""
59
+
60
+ session_id: str
61
+ user_id: str
62
+ created_at: datetime
63
+ last_activity: datetime
64
+ request_count: int = 0
65
+ error_count: int = 0
66
+ total_processing_time: float = 0.0
67
+ status: str = "active" # active, completed, failed, expired
68
+
69
+ def to_dict(self) -> Dict[str, Any]:
70
+ return {
71
+ **asdict(self),
72
+ "created_at": self.created_at.isoformat(),
73
+ "last_activity": self.last_activity.isoformat(),
74
+ }
75
+
76
+ @classmethod
77
+ def from_dict(cls, data: Dict[str, Any]) -> "SessionMetrics":
78
+ data = data.copy()
79
+ data["created_at"] = datetime.fromisoformat(data["created_at"])
80
+ data["last_activity"] = datetime.fromisoformat(data["last_activity"])
81
+ return cls(**data)
82
+
83
+
84
+ @dataclass
85
+ class ConversationMessage:
86
+ """Structured conversation message."""
87
+
88
+ role: str # user, assistant, system
89
+ content: str
90
+ timestamp: datetime
91
+ metadata: Dict[str, Any] = None
92
+
93
+ def to_dict(self) -> Dict[str, Any]:
94
+ return {
95
+ "role": self.role,
96
+ "content": self.content,
97
+ "timestamp": self.timestamp.isoformat(),
98
+ "metadata": self.metadata or {},
99
+ }
100
+
101
+ @classmethod
102
+ def from_dict(cls, data: Dict[str, Any]) -> "ConversationMessage":
103
+ data = data.copy()
104
+ data["timestamp"] = datetime.fromisoformat(data["timestamp"])
105
+ return cls(**data)
106
+
107
+
108
+ class ContextEngine(IStorageBackend, ICheckpointerBackend):
109
+ """
110
+ Advanced Context and Session Management Engine.
111
+
112
+ Implements core storage interfaces to provide comprehensive session management
113
+ with Redis backend storage for BaseAIService and BaseServiceCheckpointer.
114
+
115
+ This implementation follows the middleware's core interface pattern,
116
+ enabling dependency inversion and clean architecture.
117
+ """
118
+
119
+ def __init__(self, use_existing_redis: bool = True):
120
+ """
121
+ Initialize ContextEngine.
122
+
123
+ Args:
124
+ use_existing_redis: Whether to use the existing Redis client from infrastructure
125
+ (已弃用: 现在总是创建独立的 RedisClient 实例以避免事件循环冲突)
126
+ """
127
+ self.use_existing_redis = use_existing_redis
128
+ self.redis_client: Optional[redis.Redis] = None
129
+ self._redis_client_wrapper: Optional[Any] = None # RedisClient 包装器实例
130
+
131
+ # Fallback to memory storage if Redis not available
132
+ self._memory_sessions: Dict[str, SessionMetrics] = {}
133
+ self._memory_conversations: Dict[str, List[ConversationMessage]] = {}
134
+ self._memory_contexts: Dict[str, TaskContext] = {}
135
+ self._memory_checkpoints: Dict[str, Dict[str, Any]] = {}
136
+
137
+ # Configuration
138
+ self.session_ttl = 3600 * 24 # 24 hours default TTL
139
+ self.conversation_limit = 1000 # Max messages per conversation
140
+ self.checkpoint_ttl = 3600 * 24 * 7 # 7 days for checkpoints
141
+
142
+ # Metrics
143
+ self._global_metrics = {
144
+ "total_sessions": 0,
145
+ "active_sessions": 0,
146
+ "total_messages": 0,
147
+ "total_checkpoints": 0,
148
+ }
149
+
150
+ logger.info("ContextEngine initialized")
151
+
152
+ async def initialize(self) -> bool:
153
+ """Initialize Redis connection and validate setup."""
154
+ if not REDIS_AVAILABLE:
155
+ logger.warning("Redis not available, using memory storage")
156
+ return True
157
+
158
+ try:
159
+ # ✅ 修复方案:在当前事件循环中创建新的 RedisClient 实例
160
+ #
161
+ # 问题根源:
162
+ # - 全局 RedisClient 单例在应用启动的事件循环A中创建
163
+ # - ContextEngine 可能在不同的事件循环B中被初始化(例如在请求处理中)
164
+ # - redis.asyncio 的连接池绑定到创建时的事件循环
165
+ # - 跨事件循环使用会导致 "Task got Future attached to a different loop" 错误
166
+ #
167
+ # 解决方案:
168
+ # - 为每个 ContextEngine 实例创建独立的 RedisClient
169
+ # - 使用 RedisClient 包装器保持架构一致性
170
+ # - 在当前事件循环中初始化,确保事件循环匹配
171
+
172
+ from aiecs.infrastructure.persistence.redis_client import (
173
+ RedisClient,
174
+ )
175
+
176
+ # 创建专属的 RedisClient 实例(在当前事件循环中)
177
+ self._redis_client_wrapper = RedisClient()
178
+ await self._redis_client_wrapper.initialize()
179
+
180
+ # 获取底层 redis.Redis 客户端用于现有代码
181
+ self.redis_client = await self._redis_client_wrapper.get_client()
182
+
183
+ # Test connection
184
+ await self.redis_client.ping()
185
+ logger.info(
186
+ "ContextEngine connected to Redis successfully using RedisClient wrapper in current event loop"
187
+ )
188
+ return True
189
+
190
+ except Exception as e:
191
+ logger.error(f"Failed to connect to Redis: {e}")
192
+ logger.warning("Falling back to memory storage")
193
+ self.redis_client = None
194
+ self._redis_client_wrapper = None
195
+ return False
196
+
197
+ async def close(self):
198
+ """Close Redis connection."""
199
+ if hasattr(self, "_redis_client_wrapper") and self._redis_client_wrapper:
200
+ # 使用 RedisClient 包装器的 close 方法
201
+ await self._redis_client_wrapper.close()
202
+ self._redis_client_wrapper = None
203
+ self.redis_client = None
204
+ elif self.redis_client:
205
+ # 兼容性处理:直接关闭 redis 客户端
206
+ await self.redis_client.close()
207
+ self.redis_client = None
208
+
209
+ # ==================== Session Management ====================
210
+
211
+ async def create_session(
212
+ self, session_id: str, user_id: str, metadata: Dict[str, Any] = None
213
+ ) -> SessionMetrics:
214
+ """Create a new session."""
215
+ now = datetime.utcnow()
216
+ session = SessionMetrics(
217
+ session_id=session_id,
218
+ user_id=user_id,
219
+ created_at=now,
220
+ last_activity=now,
221
+ )
222
+
223
+ # Store session
224
+ await self._store_session(session)
225
+
226
+ # Create associated TaskContext
227
+ task_context = TaskContext(
228
+ {
229
+ "user_id": user_id,
230
+ "chat_id": session_id,
231
+ "metadata": metadata or {},
232
+ }
233
+ )
234
+ await self._store_task_context(session_id, task_context)
235
+
236
+ # Update metrics
237
+ self._global_metrics["total_sessions"] += 1
238
+ self._global_metrics["active_sessions"] += 1
239
+
240
+ logger.info(f"Created session {session_id} for user {user_id}")
241
+ return session
242
+
243
+ async def get_session(self, session_id: str) -> Optional[SessionMetrics]:
244
+ """Get session by ID."""
245
+ if self.redis_client:
246
+ try:
247
+ data = await self.redis_client.hget("sessions", session_id)
248
+ if data:
249
+ return SessionMetrics.from_dict(json.loads(data))
250
+ except Exception as e:
251
+ logger.error(f"Failed to get session from Redis: {e}")
252
+
253
+ # Fallback to memory
254
+ return self._memory_sessions.get(session_id)
255
+
256
+ async def update_session(
257
+ self,
258
+ session_id: str,
259
+ updates: Dict[str, Any] = None,
260
+ increment_requests: bool = False,
261
+ add_processing_time: float = 0.0,
262
+ mark_error: bool = False,
263
+ ) -> bool:
264
+ """Update session with activity and metrics."""
265
+ session = await self.get_session(session_id)
266
+ if not session:
267
+ return False
268
+
269
+ # Update activity
270
+ session.last_activity = datetime.utcnow()
271
+
272
+ # Update metrics
273
+ if increment_requests:
274
+ session.request_count += 1
275
+ if add_processing_time > 0:
276
+ session.total_processing_time += add_processing_time
277
+ if mark_error:
278
+ session.error_count += 1
279
+
280
+ # Apply custom updates
281
+ if updates:
282
+ for key, value in updates.items():
283
+ if hasattr(session, key):
284
+ setattr(session, key, value)
285
+
286
+ # Store updated session
287
+ await self._store_session(session)
288
+ return True
289
+
290
+ async def end_session(self, session_id: str, status: str = "completed") -> bool:
291
+ """End a session and update metrics."""
292
+ session = await self.get_session(session_id)
293
+ if not session:
294
+ return False
295
+
296
+ session.status = status
297
+ session.last_activity = datetime.utcnow()
298
+
299
+ # Store final state
300
+ await self._store_session(session)
301
+
302
+ # Update global metrics
303
+ self._global_metrics["active_sessions"] = max(
304
+ 0, self._global_metrics["active_sessions"] - 1
305
+ )
306
+
307
+ logger.info(f"Ended session {session_id} with status: {status}")
308
+ return True
309
+
310
+ async def _store_session(self, session: SessionMetrics):
311
+ """Store session to Redis or memory."""
312
+ if self.redis_client:
313
+ try:
314
+ await self.redis_client.hset(
315
+ "sessions",
316
+ session.session_id,
317
+ json.dumps(session.to_dict(), cls=DateTimeEncoder),
318
+ )
319
+ await self.redis_client.expire("sessions", self.session_ttl)
320
+ return
321
+ except Exception as e:
322
+ logger.error(f"Failed to store session to Redis: {e}")
323
+
324
+ # Fallback to memory
325
+ self._memory_sessions[session.session_id] = session
326
+
327
+ # ==================== Conversation Management ====================
328
+
329
+ async def add_conversation_message(
330
+ self,
331
+ session_id: str,
332
+ role: str,
333
+ content: str,
334
+ metadata: Dict[str, Any] = None,
335
+ ) -> bool:
336
+ """Add message to conversation history."""
337
+ message = ConversationMessage(
338
+ role=role,
339
+ content=content,
340
+ timestamp=datetime.utcnow(),
341
+ metadata=metadata,
342
+ )
343
+
344
+ # Store message
345
+ await self._store_conversation_message(session_id, message)
346
+
347
+ # Update session activity
348
+ await self.update_session(session_id)
349
+
350
+ # Update global metrics
351
+ self._global_metrics["total_messages"] += 1
352
+
353
+ return True
354
+
355
+ async def get_conversation_history(
356
+ self, session_id: str, limit: int = 50
357
+ ) -> List[ConversationMessage]:
358
+ """Get conversation history for a session."""
359
+ if self.redis_client:
360
+ try:
361
+ messages_data = await self.redis_client.lrange(
362
+ f"conversation:{session_id}", -limit, -1
363
+ )
364
+ # Since lpush adds to the beginning, we need to reverse to get
365
+ # chronological order
366
+ messages = [
367
+ ConversationMessage.from_dict(json.loads(msg))
368
+ for msg in reversed(messages_data)
369
+ ]
370
+ return messages
371
+ except Exception as e:
372
+ logger.error(f"Failed to get conversation from Redis: {e}")
373
+
374
+ # Fallback to memory
375
+ messages = self._memory_conversations.get(session_id, [])
376
+ return messages[-limit:] if limit > 0 else messages
377
+
378
+ async def _store_conversation_message(self, session_id: str, message: ConversationMessage):
379
+ """Store conversation message to Redis or memory."""
380
+ if self.redis_client:
381
+ try:
382
+ # Add to list
383
+ await self.redis_client.lpush(
384
+ f"conversation:{session_id}",
385
+ json.dumps(message.to_dict(), cls=DateTimeEncoder),
386
+ )
387
+ # Trim to limit
388
+ await self.redis_client.ltrim(
389
+ f"conversation:{session_id}", -self.conversation_limit, -1
390
+ )
391
+ # Set TTL
392
+ await self.redis_client.expire(f"conversation:{session_id}", self.session_ttl)
393
+ return
394
+ except Exception as e:
395
+ logger.error(f"Failed to store message to Redis: {e}")
396
+
397
+ # Fallback to memory
398
+ if session_id not in self._memory_conversations:
399
+ self._memory_conversations[session_id] = []
400
+
401
+ self._memory_conversations[session_id].append(message)
402
+
403
+ # Trim to limit
404
+ if len(self._memory_conversations[session_id]) > self.conversation_limit:
405
+ self._memory_conversations[session_id] = self._memory_conversations[session_id][
406
+ -self.conversation_limit :
407
+ ]
408
+
409
+ # ==================== TaskContext Integration ====================
410
+
411
+ async def get_task_context(self, session_id: str) -> Optional[TaskContext]:
412
+ """Get TaskContext for a session."""
413
+ if self.redis_client:
414
+ try:
415
+ data = await self.redis_client.hget("task_contexts", session_id)
416
+ if data:
417
+ context_data = json.loads(data)
418
+ # Reconstruct TaskContext from stored data
419
+ return self._reconstruct_task_context(context_data)
420
+ except Exception as e:
421
+ logger.error(f"Failed to get TaskContext from Redis: {e}")
422
+
423
+ # Fallback to memory
424
+ return self._memory_contexts.get(session_id)
425
+
426
+ async def _store_task_context(self, session_id: str, context: TaskContext):
427
+ """Store TaskContext to Redis or memory."""
428
+ if self.redis_client:
429
+ try:
430
+ await self.redis_client.hset(
431
+ "task_contexts",
432
+ session_id,
433
+ json.dumps(context.to_dict(), cls=DateTimeEncoder),
434
+ )
435
+ await self.redis_client.expire("task_contexts", self.session_ttl)
436
+ return
437
+ except Exception as e:
438
+ logger.error(f"Failed to store TaskContext to Redis: {e}")
439
+
440
+ # Fallback to memory
441
+ self._memory_contexts[session_id] = context
442
+
443
+ def _reconstruct_task_context(self, data: Dict[str, Any]) -> TaskContext:
444
+ """Reconstruct TaskContext from stored data."""
445
+ # Create new TaskContext with stored data
446
+ context = TaskContext(data)
447
+
448
+ # Restore context history
449
+ if "context_history" in data:
450
+ context.context_history = [
451
+ ContextUpdate(
452
+ timestamp=entry["timestamp"],
453
+ update_type=entry["update_type"],
454
+ data=entry["data"],
455
+ metadata=entry["metadata"],
456
+ )
457
+ for entry in data["context_history"]
458
+ ]
459
+
460
+ return context
461
+
462
+ # ==================== Checkpoint Management (for BaseServiceCheckpointer)
463
+
464
+ async def store_checkpoint(
465
+ self,
466
+ thread_id: str,
467
+ checkpoint_id: str,
468
+ checkpoint_data: Dict[str, Any],
469
+ metadata: Dict[str, Any] = None,
470
+ ) -> bool:
471
+ """Store checkpoint data for LangGraph workflows."""
472
+ checkpoint = {
473
+ "checkpoint_id": checkpoint_id,
474
+ "thread_id": thread_id,
475
+ "data": checkpoint_data,
476
+ "metadata": metadata or {},
477
+ "created_at": datetime.utcnow().isoformat(),
478
+ }
479
+
480
+ if self.redis_client:
481
+ try:
482
+ # Store checkpoint
483
+ await self.redis_client.hset(
484
+ f"checkpoints:{thread_id}",
485
+ checkpoint_id,
486
+ json.dumps(checkpoint, cls=DateTimeEncoder),
487
+ )
488
+ # Set TTL
489
+ await self.redis_client.expire(f"checkpoints:{thread_id}", self.checkpoint_ttl)
490
+
491
+ # Update global metrics
492
+ self._global_metrics["total_checkpoints"] += 1
493
+ return True
494
+
495
+ except Exception as e:
496
+ logger.error(f"Failed to store checkpoint to Redis: {e}")
497
+
498
+ # Fallback to memory
499
+ key = f"{thread_id}:{checkpoint_id}"
500
+ self._memory_checkpoints[key] = checkpoint
501
+ return True
502
+
503
+ async def get_checkpoint(
504
+ self, thread_id: str, checkpoint_id: str = None
505
+ ) -> Optional[Dict[str, Any]]:
506
+ """Get checkpoint data. If checkpoint_id is None, get the latest."""
507
+ if self.redis_client:
508
+ try:
509
+ if checkpoint_id:
510
+ # Get specific checkpoint
511
+ data = await self.redis_client.hget(f"checkpoints:{thread_id}", checkpoint_id)
512
+ if data:
513
+ return json.loads(data)
514
+ else:
515
+ # Get latest checkpoint
516
+ checkpoints = await self.redis_client.hgetall(f"checkpoints:{thread_id}")
517
+ if checkpoints:
518
+ # Sort by creation time and get latest
519
+ latest = max(
520
+ checkpoints.values(),
521
+ key=lambda x: json.loads(x)["created_at"],
522
+ )
523
+ return json.loads(latest)
524
+ except Exception as e:
525
+ logger.error(f"Failed to get checkpoint from Redis: {e}")
526
+
527
+ # Fallback to memory
528
+ if checkpoint_id:
529
+ key = f"{thread_id}:{checkpoint_id}"
530
+ return self._memory_checkpoints.get(key)
531
+ else:
532
+ # Get latest from memory
533
+ thread_checkpoints = {
534
+ k: v for k, v in self._memory_checkpoints.items() if k.startswith(f"{thread_id}:")
535
+ }
536
+ if thread_checkpoints:
537
+ latest_key = max(
538
+ thread_checkpoints.keys(),
539
+ key=lambda k: thread_checkpoints[k]["created_at"],
540
+ )
541
+ return thread_checkpoints[latest_key]
542
+
543
+ return None
544
+
545
+ async def list_checkpoints(self, thread_id: str, limit: int = 10) -> List[Dict[str, Any]]:
546
+ """List checkpoints for a thread, ordered by creation time (newest first)."""
547
+ if self.redis_client:
548
+ try:
549
+ checkpoints_data = await self.redis_client.hgetall(f"checkpoints:{thread_id}")
550
+ checkpoints = [json.loads(data) for data in checkpoints_data.values()]
551
+ # Sort by creation time (newest first)
552
+ checkpoints.sort(key=lambda x: x["created_at"], reverse=True)
553
+ return checkpoints[:limit]
554
+ except Exception as e:
555
+ logger.error(f"Failed to list checkpoints from Redis: {e}")
556
+
557
+ # Fallback to memory
558
+ thread_checkpoints = [
559
+ v for k, v in self._memory_checkpoints.items() if k.startswith(f"{thread_id}:")
560
+ ]
561
+ thread_checkpoints.sort(key=lambda x: x["created_at"], reverse=True)
562
+ return thread_checkpoints[:limit]
563
+
564
+ # ==================== Cleanup and Maintenance ====================
565
+
566
+ async def cleanup_expired_sessions(self, max_idle_hours: int = 24) -> int:
567
+ """Clean up expired sessions and associated data."""
568
+ cutoff_time = datetime.utcnow() - timedelta(hours=max_idle_hours)
569
+ cleaned_count = 0
570
+
571
+ if self.redis_client:
572
+ try:
573
+ # Get all sessions
574
+ sessions_data = await self.redis_client.hgetall("sessions")
575
+ expired_sessions = []
576
+
577
+ for session_id, data in sessions_data.items():
578
+ session = SessionMetrics.from_dict(json.loads(data))
579
+ if session.last_activity < cutoff_time:
580
+ expired_sessions.append(session_id)
581
+
582
+ # Clean up expired sessions
583
+ for session_id in expired_sessions:
584
+ await self._cleanup_session_data(session_id)
585
+ cleaned_count += 1
586
+
587
+ except Exception as e:
588
+ logger.error(f"Failed to cleanup expired sessions from Redis: {e}")
589
+ else:
590
+ # Memory cleanup
591
+ expired_sessions = [
592
+ session_id
593
+ for session_id, session in self._memory_sessions.items()
594
+ if session.last_activity < cutoff_time
595
+ ]
596
+
597
+ for session_id in expired_sessions:
598
+ await self._cleanup_session_data(session_id)
599
+ cleaned_count += 1
600
+
601
+ if cleaned_count > 0:
602
+ logger.info(f"Cleaned up {cleaned_count} expired sessions")
603
+
604
+ return cleaned_count
605
+
606
+ async def _cleanup_session_data(self, session_id: str):
607
+ """Clean up all data associated with a session."""
608
+ if self.redis_client:
609
+ try:
610
+ # Remove session
611
+ await self.redis_client.hdel("sessions", session_id)
612
+ # Remove conversation
613
+ await self.redis_client.delete(f"conversation:{session_id}")
614
+ # Remove task context
615
+ await self.redis_client.hdel("task_contexts", session_id)
616
+ # Remove checkpoints
617
+ await self.redis_client.delete(f"checkpoints:{session_id}")
618
+ except Exception as e:
619
+ logger.error(f"Failed to cleanup session data from Redis: {e}")
620
+ else:
621
+ # Memory cleanup
622
+ self._memory_sessions.pop(session_id, None)
623
+ self._memory_conversations.pop(session_id, None)
624
+ self._memory_contexts.pop(session_id, None)
625
+
626
+ # Remove checkpoints
627
+ checkpoint_keys = [
628
+ k for k in self._memory_checkpoints.keys() if k.startswith(f"{session_id}:")
629
+ ]
630
+ for key in checkpoint_keys:
631
+ self._memory_checkpoints.pop(key, None)
632
+
633
+ # ==================== Metrics and Health ====================
634
+
635
+ async def get_metrics(self) -> Dict[str, Any]:
636
+ """Get comprehensive metrics."""
637
+ active_sessions_count = 0
638
+
639
+ if self.redis_client:
640
+ try:
641
+ sessions_data = await self.redis_client.hgetall("sessions")
642
+ active_sessions_count = len(
643
+ [s for s in sessions_data.values() if json.loads(s)["status"] == "active"]
644
+ )
645
+ except Exception as e:
646
+ logger.error(f"Failed to get metrics from Redis: {e}")
647
+ else:
648
+ active_sessions_count = len(
649
+ [s for s in self._memory_sessions.values() if s.status == "active"]
650
+ )
651
+
652
+ return {
653
+ **self._global_metrics,
654
+ "active_sessions": active_sessions_count,
655
+ "storage_backend": "redis" if self.redis_client else "memory",
656
+ "redis_connected": self.redis_client is not None,
657
+ "timestamp": datetime.utcnow().isoformat(),
658
+ }
659
+
660
+ async def health_check(self) -> Dict[str, Any]:
661
+ """Perform health check."""
662
+ health = {
663
+ "status": "healthy",
664
+ "storage_backend": "redis" if self.redis_client else "memory",
665
+ "redis_connected": False,
666
+ "issues": [],
667
+ }
668
+
669
+ # Check Redis connection
670
+ if self.redis_client:
671
+ try:
672
+ await self.redis_client.ping()
673
+ health["redis_connected"] = True
674
+ except Exception as e:
675
+ health["issues"].append(f"Redis connection failed: {e}")
676
+ health["status"] = "degraded"
677
+
678
+ # Check memory usage (basic check)
679
+ if not self.redis_client:
680
+ total_memory_items = (
681
+ len(self._memory_sessions)
682
+ + len(self._memory_conversations)
683
+ + len(self._memory_contexts)
684
+ + len(self._memory_checkpoints)
685
+ )
686
+ if total_memory_items > 10000: # Arbitrary threshold
687
+ health["issues"].append(f"High memory usage: {total_memory_items} items")
688
+ health["status"] = "warning"
689
+
690
+ return health
691
+
692
+ # ==================== ICheckpointerBackend Implementation ===============
693
+
694
+ async def put_checkpoint(
695
+ self,
696
+ thread_id: str,
697
+ checkpoint_id: str,
698
+ checkpoint_data: Dict[str, Any],
699
+ metadata: Dict[str, Any] = None,
700
+ ) -> bool:
701
+ """Store a checkpoint for LangGraph workflows (ICheckpointerBackend interface)."""
702
+ return await self.store_checkpoint(thread_id, checkpoint_id, checkpoint_data, metadata)
703
+
704
+ async def put_writes(
705
+ self,
706
+ thread_id: str,
707
+ checkpoint_id: str,
708
+ task_id: str,
709
+ writes_data: List[tuple],
710
+ ) -> bool:
711
+ """Store intermediate writes for a checkpoint (ICheckpointerBackend interface)."""
712
+ writes_key = f"writes:{thread_id}:{checkpoint_id}:{task_id}"
713
+ writes_payload = {
714
+ "thread_id": thread_id,
715
+ "checkpoint_id": checkpoint_id,
716
+ "task_id": task_id,
717
+ "writes": writes_data,
718
+ "created_at": datetime.utcnow().isoformat(),
719
+ }
720
+
721
+ if self.redis_client:
722
+ try:
723
+ await self.redis_client.hset(
724
+ f"checkpoint_writes:{thread_id}",
725
+ f"{checkpoint_id}:{task_id}",
726
+ json.dumps(writes_payload, cls=DateTimeEncoder),
727
+ )
728
+ await self.redis_client.expire(
729
+ f"checkpoint_writes:{thread_id}", self.checkpoint_ttl
730
+ )
731
+ return True
732
+ except Exception as e:
733
+ logger.error(f"Failed to store writes to Redis: {e}")
734
+
735
+ # Fallback to memory
736
+ self._memory_checkpoints[writes_key] = writes_payload
737
+ return True
738
+
739
+ async def get_writes(self, thread_id: str, checkpoint_id: str) -> List[tuple]:
740
+ """Get intermediate writes for a checkpoint (ICheckpointerBackend interface)."""
741
+ if self.redis_client:
742
+ try:
743
+ writes_data = await self.redis_client.hgetall(f"checkpoint_writes:{thread_id}")
744
+ writes = []
745
+ for key, data in writes_data.items():
746
+ if key.startswith(f"{checkpoint_id}:"):
747
+ payload = json.loads(data)
748
+ writes.extend(payload.get("writes", []))
749
+ return writes
750
+ except Exception as e:
751
+ logger.error(f"Failed to get writes from Redis: {e}")
752
+
753
+ # Fallback to memory
754
+ writes = []
755
+ writes_prefix = f"writes:{thread_id}:{checkpoint_id}:"
756
+ for key, payload in self._memory_checkpoints.items():
757
+ if key.startswith(writes_prefix):
758
+ writes.extend(payload.get("writes", []))
759
+ return writes
760
+
761
+ # ==================== ITaskContextStorage Implementation ================
762
+
763
+ async def store_task_context(self, session_id: str, context: Any) -> bool:
764
+ """Store TaskContext for a session (ITaskContextStorage interface)."""
765
+ return await self._store_task_context(session_id, context)
766
+
767
+ # ==================== Agent Communication and Conversation Isolation ====
768
+
769
+ async def create_conversation_session(
770
+ self,
771
+ session_id: str,
772
+ participants: List[Dict[str, Any]],
773
+ session_type: str,
774
+ metadata: Dict[str, Any] = None,
775
+ ) -> str:
776
+ """
777
+ Create an isolated conversation session between participants.
778
+
779
+ Args:
780
+ session_id: Base session ID
781
+ participants: List of participant dictionaries with id, type, role
782
+ session_type: Type of conversation ('user_to_mc', 'mc_to_agent', 'agent_to_agent', 'user_to_agent')
783
+ metadata: Additional session metadata
784
+
785
+ Returns:
786
+ Generated session key for conversation isolation
787
+ """
788
+ from .conversation_models import (
789
+ ConversationSession,
790
+ ConversationParticipant,
791
+ )
792
+
793
+ # Create participant objects
794
+ participant_objects = [
795
+ ConversationParticipant(
796
+ participant_id=p.get("id"),
797
+ participant_type=p.get("type"),
798
+ participant_role=p.get("role"),
799
+ metadata=p.get("metadata", {}),
800
+ )
801
+ for p in participants
802
+ ]
803
+
804
+ # Create conversation session
805
+ conversation_session = ConversationSession(
806
+ session_id=session_id,
807
+ participants=participant_objects,
808
+ session_type=session_type,
809
+ created_at=datetime.utcnow(),
810
+ last_activity=datetime.utcnow(),
811
+ metadata=metadata or {},
812
+ )
813
+
814
+ # Generate unique session key
815
+ session_key = conversation_session.generate_session_key()
816
+
817
+ # Store conversation session metadata
818
+ await self._store_conversation_session(session_key, conversation_session)
819
+
820
+ logger.info(f"Created conversation session: {session_key} (type: {session_type})")
821
+ return session_key
822
+
823
+ async def add_agent_communication_message(
824
+ self,
825
+ session_key: str,
826
+ sender_id: str,
827
+ sender_type: str,
828
+ sender_role: Optional[str],
829
+ recipient_id: str,
830
+ recipient_type: str,
831
+ recipient_role: Optional[str],
832
+ content: str,
833
+ message_type: str = "communication",
834
+ metadata: Dict[str, Any] = None,
835
+ ) -> bool:
836
+ """
837
+ Add a message to an agent communication session.
838
+
839
+ Args:
840
+ session_key: Isolated session key
841
+ sender_id: ID of the sender
842
+ sender_type: Type of sender ('master_controller', 'agent', 'user')
843
+ sender_role: Role of sender (for agents)
844
+ recipient_id: ID of the recipient
845
+ recipient_type: Type of recipient
846
+ recipient_role: Role of recipient (for agents)
847
+ content: Message content
848
+ message_type: Type of message
849
+ metadata: Additional message metadata
850
+
851
+ Returns:
852
+ Success status
853
+ """
854
+ from .conversation_models import AgentCommunicationMessage
855
+
856
+ # Create agent communication message
857
+ message = AgentCommunicationMessage(
858
+ message_id=str(uuid.uuid4()),
859
+ session_key=session_key,
860
+ sender_id=sender_id,
861
+ sender_type=sender_type,
862
+ sender_role=sender_role,
863
+ recipient_id=recipient_id,
864
+ recipient_type=recipient_type,
865
+ recipient_role=recipient_role,
866
+ content=content,
867
+ message_type=message_type,
868
+ timestamp=datetime.utcnow(),
869
+ metadata=metadata or {},
870
+ )
871
+
872
+ # Convert to conversation message format and store
873
+ conv_message_dict = message.to_conversation_message_dict()
874
+
875
+ # Store using existing conversation message infrastructure
876
+ await self.add_conversation_message(
877
+ session_id=session_key,
878
+ role=conv_message_dict["role"],
879
+ content=conv_message_dict["content"],
880
+ metadata=conv_message_dict["metadata"],
881
+ )
882
+
883
+ # Update session activity
884
+ await self._update_conversation_session_activity(session_key)
885
+
886
+ logger.debug(f"Added agent communication message to session {session_key}")
887
+ return True
888
+
889
+ async def get_agent_conversation_history(
890
+ self,
891
+ session_key: str,
892
+ limit: int = 50,
893
+ message_types: Optional[List[str]] = None,
894
+ ) -> List[Dict[str, Any]]:
895
+ """
896
+ Get conversation history for an agent communication session.
897
+
898
+ Args:
899
+ session_key: Isolated session key
900
+ limit: Maximum number of messages to retrieve
901
+ message_types: Filter by message types
902
+
903
+ Returns:
904
+ List of conversation messages
905
+ """
906
+ # Get conversation history using existing infrastructure
907
+ messages = await self.get_conversation_history(session_key, limit)
908
+
909
+ # Filter by message types if specified
910
+ if message_types:
911
+ filtered_messages = []
912
+ for msg in messages:
913
+ if hasattr(msg, "to_dict"):
914
+ msg_dict = msg.to_dict()
915
+ else:
916
+ msg_dict = msg
917
+
918
+ msg_metadata = msg_dict.get("metadata", {})
919
+ msg_type = msg_metadata.get("message_type", "communication")
920
+
921
+ if msg_type in message_types:
922
+ filtered_messages.append(msg_dict)
923
+
924
+ return filtered_messages
925
+
926
+ # Convert messages to dict format
927
+ return [msg.to_dict() if hasattr(msg, "to_dict") else msg for msg in messages]
928
+
929
+ async def _store_conversation_session(self, session_key: str, conversation_session) -> None:
930
+ """Store conversation session metadata."""
931
+ session_data = {
932
+ "session_id": conversation_session.session_id,
933
+ "participants": [
934
+ {
935
+ "participant_id": p.participant_id,
936
+ "participant_type": p.participant_type,
937
+ "participant_role": p.participant_role,
938
+ "metadata": p.metadata,
939
+ }
940
+ for p in conversation_session.participants
941
+ ],
942
+ "session_type": conversation_session.session_type,
943
+ "created_at": conversation_session.created_at.isoformat(),
944
+ "last_activity": conversation_session.last_activity.isoformat(),
945
+ "metadata": conversation_session.metadata,
946
+ }
947
+
948
+ if self.redis_client:
949
+ try:
950
+ await self.redis_client.hset(
951
+ "conversation_sessions",
952
+ session_key,
953
+ json.dumps(session_data, cls=DateTimeEncoder),
954
+ )
955
+ await self.redis_client.expire("conversation_sessions", self.session_ttl)
956
+ return
957
+ except Exception as e:
958
+ logger.error(f"Failed to store conversation session to Redis: {e}")
959
+
960
+ # Fallback to memory (extend memory storage)
961
+ if not hasattr(self, "_memory_conversation_sessions"):
962
+ self._memory_conversation_sessions = {}
963
+ self._memory_conversation_sessions[session_key] = session_data
964
+
965
+ async def _update_conversation_session_activity(self, session_key: str) -> None:
966
+ """Update last activity timestamp for a conversation session."""
967
+ if self.redis_client:
968
+ try:
969
+ session_data = await self.redis_client.hget("conversation_sessions", session_key)
970
+ if session_data:
971
+ session_dict = json.loads(session_data)
972
+ session_dict["last_activity"] = datetime.utcnow().isoformat()
973
+ await self.redis_client.hset(
974
+ "conversation_sessions",
975
+ session_key,
976
+ json.dumps(session_dict, cls=DateTimeEncoder),
977
+ )
978
+ return
979
+ except Exception as e:
980
+ logger.error(f"Failed to update conversation session activity in Redis: {e}")
981
+
982
+ # Fallback to memory
983
+ if (
984
+ hasattr(self, "_memory_conversation_sessions")
985
+ and session_key in self._memory_conversation_sessions
986
+ ):
987
+ self._memory_conversation_sessions[session_key][
988
+ "last_activity"
989
+ ] = datetime.utcnow().isoformat()