aiecs 1.0.1__py3-none-any.whl → 1.7.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aiecs might be problematic. Click here for more details.
- aiecs/__init__.py +13 -16
- aiecs/__main__.py +7 -7
- aiecs/aiecs_client.py +269 -75
- aiecs/application/executors/operation_executor.py +79 -54
- aiecs/application/knowledge_graph/__init__.py +7 -0
- aiecs/application/knowledge_graph/builder/__init__.py +37 -0
- aiecs/application/knowledge_graph/builder/data_quality.py +302 -0
- aiecs/application/knowledge_graph/builder/data_reshaping.py +293 -0
- aiecs/application/knowledge_graph/builder/document_builder.py +369 -0
- aiecs/application/knowledge_graph/builder/graph_builder.py +490 -0
- aiecs/application/knowledge_graph/builder/import_optimizer.py +396 -0
- aiecs/application/knowledge_graph/builder/schema_inference.py +462 -0
- aiecs/application/knowledge_graph/builder/schema_mapping.py +563 -0
- aiecs/application/knowledge_graph/builder/structured_pipeline.py +1384 -0
- aiecs/application/knowledge_graph/builder/text_chunker.py +317 -0
- aiecs/application/knowledge_graph/extractors/__init__.py +27 -0
- aiecs/application/knowledge_graph/extractors/base.py +98 -0
- aiecs/application/knowledge_graph/extractors/llm_entity_extractor.py +422 -0
- aiecs/application/knowledge_graph/extractors/llm_relation_extractor.py +347 -0
- aiecs/application/knowledge_graph/extractors/ner_entity_extractor.py +241 -0
- aiecs/application/knowledge_graph/fusion/__init__.py +78 -0
- aiecs/application/knowledge_graph/fusion/ab_testing.py +395 -0
- aiecs/application/knowledge_graph/fusion/abbreviation_expander.py +327 -0
- aiecs/application/knowledge_graph/fusion/alias_index.py +597 -0
- aiecs/application/knowledge_graph/fusion/alias_matcher.py +384 -0
- aiecs/application/knowledge_graph/fusion/cache_coordinator.py +343 -0
- aiecs/application/knowledge_graph/fusion/entity_deduplicator.py +433 -0
- aiecs/application/knowledge_graph/fusion/entity_linker.py +511 -0
- aiecs/application/knowledge_graph/fusion/evaluation_dataset.py +240 -0
- aiecs/application/knowledge_graph/fusion/knowledge_fusion.py +632 -0
- aiecs/application/knowledge_graph/fusion/matching_config.py +489 -0
- aiecs/application/knowledge_graph/fusion/name_normalizer.py +352 -0
- aiecs/application/knowledge_graph/fusion/relation_deduplicator.py +183 -0
- aiecs/application/knowledge_graph/fusion/semantic_name_matcher.py +464 -0
- aiecs/application/knowledge_graph/fusion/similarity_pipeline.py +534 -0
- aiecs/application/knowledge_graph/pattern_matching/__init__.py +21 -0
- aiecs/application/knowledge_graph/pattern_matching/pattern_matcher.py +342 -0
- aiecs/application/knowledge_graph/pattern_matching/query_executor.py +366 -0
- aiecs/application/knowledge_graph/profiling/__init__.py +12 -0
- aiecs/application/knowledge_graph/profiling/query_plan_visualizer.py +195 -0
- aiecs/application/knowledge_graph/profiling/query_profiler.py +223 -0
- aiecs/application/knowledge_graph/reasoning/__init__.py +27 -0
- aiecs/application/knowledge_graph/reasoning/evidence_synthesis.py +341 -0
- aiecs/application/knowledge_graph/reasoning/inference_engine.py +500 -0
- aiecs/application/knowledge_graph/reasoning/logic_form_parser.py +163 -0
- aiecs/application/knowledge_graph/reasoning/logic_parser/__init__.py +79 -0
- aiecs/application/knowledge_graph/reasoning/logic_parser/ast_builder.py +513 -0
- aiecs/application/knowledge_graph/reasoning/logic_parser/ast_nodes.py +913 -0
- aiecs/application/knowledge_graph/reasoning/logic_parser/ast_validator.py +866 -0
- aiecs/application/knowledge_graph/reasoning/logic_parser/error_handler.py +475 -0
- aiecs/application/knowledge_graph/reasoning/logic_parser/parser.py +396 -0
- aiecs/application/knowledge_graph/reasoning/logic_parser/query_context.py +208 -0
- aiecs/application/knowledge_graph/reasoning/logic_query_integration.py +170 -0
- aiecs/application/knowledge_graph/reasoning/query_planner.py +855 -0
- aiecs/application/knowledge_graph/reasoning/reasoning_engine.py +518 -0
- aiecs/application/knowledge_graph/retrieval/__init__.py +27 -0
- aiecs/application/knowledge_graph/retrieval/query_intent_classifier.py +211 -0
- aiecs/application/knowledge_graph/retrieval/retrieval_strategies.py +592 -0
- aiecs/application/knowledge_graph/retrieval/strategy_types.py +23 -0
- aiecs/application/knowledge_graph/search/__init__.py +59 -0
- aiecs/application/knowledge_graph/search/hybrid_search.py +457 -0
- aiecs/application/knowledge_graph/search/reranker.py +293 -0
- aiecs/application/knowledge_graph/search/reranker_strategies.py +535 -0
- aiecs/application/knowledge_graph/search/text_similarity.py +392 -0
- aiecs/application/knowledge_graph/traversal/__init__.py +15 -0
- aiecs/application/knowledge_graph/traversal/enhanced_traversal.py +305 -0
- aiecs/application/knowledge_graph/traversal/path_scorer.py +271 -0
- aiecs/application/knowledge_graph/validators/__init__.py +13 -0
- aiecs/application/knowledge_graph/validators/relation_validator.py +239 -0
- aiecs/application/knowledge_graph/visualization/__init__.py +11 -0
- aiecs/application/knowledge_graph/visualization/graph_visualizer.py +313 -0
- aiecs/common/__init__.py +9 -0
- aiecs/common/knowledge_graph/__init__.py +17 -0
- aiecs/common/knowledge_graph/runnable.py +471 -0
- aiecs/config/__init__.py +20 -5
- aiecs/config/config.py +762 -31
- aiecs/config/graph_config.py +131 -0
- aiecs/config/tool_config.py +435 -0
- aiecs/core/__init__.py +29 -13
- aiecs/core/interface/__init__.py +2 -2
- aiecs/core/interface/execution_interface.py +22 -22
- aiecs/core/interface/storage_interface.py +37 -88
- aiecs/core/registry/__init__.py +31 -0
- aiecs/core/registry/service_registry.py +92 -0
- aiecs/domain/__init__.py +270 -1
- aiecs/domain/agent/__init__.py +191 -0
- aiecs/domain/agent/base_agent.py +3949 -0
- aiecs/domain/agent/exceptions.py +99 -0
- aiecs/domain/agent/graph_aware_mixin.py +569 -0
- aiecs/domain/agent/hybrid_agent.py +1731 -0
- aiecs/domain/agent/integration/__init__.py +29 -0
- aiecs/domain/agent/integration/context_compressor.py +216 -0
- aiecs/domain/agent/integration/context_engine_adapter.py +587 -0
- aiecs/domain/agent/integration/protocols.py +281 -0
- aiecs/domain/agent/integration/retry_policy.py +218 -0
- aiecs/domain/agent/integration/role_config.py +213 -0
- aiecs/domain/agent/knowledge_aware_agent.py +1892 -0
- aiecs/domain/agent/lifecycle.py +291 -0
- aiecs/domain/agent/llm_agent.py +692 -0
- aiecs/domain/agent/memory/__init__.py +12 -0
- aiecs/domain/agent/memory/conversation.py +1124 -0
- aiecs/domain/agent/migration/__init__.py +14 -0
- aiecs/domain/agent/migration/conversion.py +163 -0
- aiecs/domain/agent/migration/legacy_wrapper.py +86 -0
- aiecs/domain/agent/models.py +894 -0
- aiecs/domain/agent/observability.py +479 -0
- aiecs/domain/agent/persistence.py +449 -0
- aiecs/domain/agent/prompts/__init__.py +29 -0
- aiecs/domain/agent/prompts/builder.py +159 -0
- aiecs/domain/agent/prompts/formatters.py +187 -0
- aiecs/domain/agent/prompts/template.py +255 -0
- aiecs/domain/agent/registry.py +253 -0
- aiecs/domain/agent/tool_agent.py +444 -0
- aiecs/domain/agent/tools/__init__.py +15 -0
- aiecs/domain/agent/tools/schema_generator.py +377 -0
- aiecs/domain/community/__init__.py +155 -0
- aiecs/domain/community/agent_adapter.py +469 -0
- aiecs/domain/community/analytics.py +432 -0
- aiecs/domain/community/collaborative_workflow.py +648 -0
- aiecs/domain/community/communication_hub.py +634 -0
- aiecs/domain/community/community_builder.py +320 -0
- aiecs/domain/community/community_integration.py +796 -0
- aiecs/domain/community/community_manager.py +803 -0
- aiecs/domain/community/decision_engine.py +849 -0
- aiecs/domain/community/exceptions.py +231 -0
- aiecs/domain/community/models/__init__.py +33 -0
- aiecs/domain/community/models/community_models.py +234 -0
- aiecs/domain/community/resource_manager.py +461 -0
- aiecs/domain/community/shared_context_manager.py +589 -0
- aiecs/domain/context/__init__.py +40 -10
- aiecs/domain/context/context_engine.py +1910 -0
- aiecs/domain/context/conversation_models.py +87 -53
- aiecs/domain/context/graph_memory.py +582 -0
- aiecs/domain/execution/model.py +12 -4
- aiecs/domain/knowledge_graph/__init__.py +19 -0
- aiecs/domain/knowledge_graph/models/__init__.py +52 -0
- aiecs/domain/knowledge_graph/models/entity.py +148 -0
- aiecs/domain/knowledge_graph/models/evidence.py +178 -0
- aiecs/domain/knowledge_graph/models/inference_rule.py +184 -0
- aiecs/domain/knowledge_graph/models/path.py +171 -0
- aiecs/domain/knowledge_graph/models/path_pattern.py +171 -0
- aiecs/domain/knowledge_graph/models/query.py +261 -0
- aiecs/domain/knowledge_graph/models/query_plan.py +181 -0
- aiecs/domain/knowledge_graph/models/relation.py +202 -0
- aiecs/domain/knowledge_graph/schema/__init__.py +23 -0
- aiecs/domain/knowledge_graph/schema/entity_type.py +131 -0
- aiecs/domain/knowledge_graph/schema/graph_schema.py +253 -0
- aiecs/domain/knowledge_graph/schema/property_schema.py +143 -0
- aiecs/domain/knowledge_graph/schema/relation_type.py +163 -0
- aiecs/domain/knowledge_graph/schema/schema_manager.py +691 -0
- aiecs/domain/knowledge_graph/schema/type_enums.py +209 -0
- aiecs/domain/task/dsl_processor.py +172 -56
- aiecs/domain/task/model.py +20 -8
- aiecs/domain/task/task_context.py +27 -24
- aiecs/infrastructure/__init__.py +0 -2
- aiecs/infrastructure/graph_storage/__init__.py +11 -0
- aiecs/infrastructure/graph_storage/base.py +837 -0
- aiecs/infrastructure/graph_storage/batch_operations.py +458 -0
- aiecs/infrastructure/graph_storage/cache.py +424 -0
- aiecs/infrastructure/graph_storage/distributed.py +223 -0
- aiecs/infrastructure/graph_storage/error_handling.py +380 -0
- aiecs/infrastructure/graph_storage/graceful_degradation.py +294 -0
- aiecs/infrastructure/graph_storage/health_checks.py +378 -0
- aiecs/infrastructure/graph_storage/in_memory.py +1197 -0
- aiecs/infrastructure/graph_storage/index_optimization.py +446 -0
- aiecs/infrastructure/graph_storage/lazy_loading.py +431 -0
- aiecs/infrastructure/graph_storage/metrics.py +344 -0
- aiecs/infrastructure/graph_storage/migration.py +400 -0
- aiecs/infrastructure/graph_storage/pagination.py +483 -0
- aiecs/infrastructure/graph_storage/performance_monitoring.py +456 -0
- aiecs/infrastructure/graph_storage/postgres.py +1563 -0
- aiecs/infrastructure/graph_storage/property_storage.py +353 -0
- aiecs/infrastructure/graph_storage/protocols.py +76 -0
- aiecs/infrastructure/graph_storage/query_optimizer.py +642 -0
- aiecs/infrastructure/graph_storage/schema_cache.py +290 -0
- aiecs/infrastructure/graph_storage/sqlite.py +1373 -0
- aiecs/infrastructure/graph_storage/streaming.py +487 -0
- aiecs/infrastructure/graph_storage/tenant.py +412 -0
- aiecs/infrastructure/messaging/celery_task_manager.py +92 -54
- aiecs/infrastructure/messaging/websocket_manager.py +51 -35
- aiecs/infrastructure/monitoring/__init__.py +22 -0
- aiecs/infrastructure/monitoring/executor_metrics.py +45 -11
- aiecs/infrastructure/monitoring/global_metrics_manager.py +212 -0
- aiecs/infrastructure/monitoring/structured_logger.py +3 -7
- aiecs/infrastructure/monitoring/tracing_manager.py +63 -35
- aiecs/infrastructure/persistence/__init__.py +14 -1
- aiecs/infrastructure/persistence/context_engine_client.py +184 -0
- aiecs/infrastructure/persistence/database_manager.py +67 -43
- aiecs/infrastructure/persistence/file_storage.py +180 -103
- aiecs/infrastructure/persistence/redis_client.py +74 -21
- aiecs/llm/__init__.py +73 -25
- aiecs/llm/callbacks/__init__.py +11 -0
- aiecs/llm/{custom_callbacks.py → callbacks/custom_callbacks.py} +26 -19
- aiecs/llm/client_factory.py +230 -37
- aiecs/llm/client_resolver.py +155 -0
- aiecs/llm/clients/__init__.py +38 -0
- aiecs/llm/clients/base_client.py +328 -0
- aiecs/llm/clients/google_function_calling_mixin.py +415 -0
- aiecs/llm/clients/googleai_client.py +314 -0
- aiecs/llm/clients/openai_client.py +158 -0
- aiecs/llm/clients/openai_compatible_mixin.py +367 -0
- aiecs/llm/clients/vertex_client.py +1186 -0
- aiecs/llm/clients/xai_client.py +201 -0
- aiecs/llm/config/__init__.py +51 -0
- aiecs/llm/config/config_loader.py +272 -0
- aiecs/llm/config/config_validator.py +206 -0
- aiecs/llm/config/model_config.py +143 -0
- aiecs/llm/protocols.py +149 -0
- aiecs/llm/utils/__init__.py +10 -0
- aiecs/llm/utils/validate_config.py +89 -0
- aiecs/main.py +140 -121
- aiecs/scripts/aid/VERSION_MANAGEMENT.md +138 -0
- aiecs/scripts/aid/__init__.py +19 -0
- aiecs/scripts/aid/module_checker.py +499 -0
- aiecs/scripts/aid/version_manager.py +235 -0
- aiecs/scripts/{DEPENDENCY_SYSTEM_SUMMARY.md → dependance_check/DEPENDENCY_SYSTEM_SUMMARY.md} +1 -0
- aiecs/scripts/{README_DEPENDENCY_CHECKER.md → dependance_check/README_DEPENDENCY_CHECKER.md} +1 -0
- aiecs/scripts/dependance_check/__init__.py +15 -0
- aiecs/scripts/dependance_check/dependency_checker.py +1835 -0
- aiecs/scripts/{dependency_fixer.py → dependance_check/dependency_fixer.py} +192 -90
- aiecs/scripts/{download_nlp_data.py → dependance_check/download_nlp_data.py} +203 -71
- aiecs/scripts/dependance_patch/__init__.py +7 -0
- aiecs/scripts/dependance_patch/fix_weasel/__init__.py +11 -0
- aiecs/scripts/{fix_weasel_validator.py → dependance_patch/fix_weasel/fix_weasel_validator.py} +21 -14
- aiecs/scripts/{patch_weasel_library.sh → dependance_patch/fix_weasel/patch_weasel_library.sh} +1 -1
- aiecs/scripts/knowledge_graph/__init__.py +3 -0
- aiecs/scripts/knowledge_graph/run_threshold_experiments.py +212 -0
- aiecs/scripts/migrations/multi_tenancy/README.md +142 -0
- aiecs/scripts/tools_develop/README.md +671 -0
- aiecs/scripts/tools_develop/README_CONFIG_CHECKER.md +273 -0
- aiecs/scripts/tools_develop/TOOLS_CONFIG_GUIDE.md +1287 -0
- aiecs/scripts/tools_develop/TOOL_AUTO_DISCOVERY.md +234 -0
- aiecs/scripts/tools_develop/__init__.py +21 -0
- aiecs/scripts/tools_develop/check_all_tools_config.py +548 -0
- aiecs/scripts/tools_develop/check_type_annotations.py +257 -0
- aiecs/scripts/tools_develop/pre-commit-schema-coverage.sh +66 -0
- aiecs/scripts/tools_develop/schema_coverage.py +511 -0
- aiecs/scripts/tools_develop/validate_tool_schemas.py +475 -0
- aiecs/scripts/tools_develop/verify_executor_config_fix.py +98 -0
- aiecs/scripts/tools_develop/verify_tools.py +352 -0
- aiecs/tasks/__init__.py +0 -1
- aiecs/tasks/worker.py +115 -47
- aiecs/tools/__init__.py +194 -72
- aiecs/tools/apisource/__init__.py +99 -0
- aiecs/tools/apisource/intelligence/__init__.py +19 -0
- aiecs/tools/apisource/intelligence/data_fusion.py +632 -0
- aiecs/tools/apisource/intelligence/query_analyzer.py +417 -0
- aiecs/tools/apisource/intelligence/search_enhancer.py +385 -0
- aiecs/tools/apisource/monitoring/__init__.py +9 -0
- aiecs/tools/apisource/monitoring/metrics.py +330 -0
- aiecs/tools/apisource/providers/__init__.py +112 -0
- aiecs/tools/apisource/providers/base.py +671 -0
- aiecs/tools/apisource/providers/census.py +397 -0
- aiecs/tools/apisource/providers/fred.py +535 -0
- aiecs/tools/apisource/providers/newsapi.py +409 -0
- aiecs/tools/apisource/providers/worldbank.py +352 -0
- aiecs/tools/apisource/reliability/__init__.py +12 -0
- aiecs/tools/apisource/reliability/error_handler.py +363 -0
- aiecs/tools/apisource/reliability/fallback_strategy.py +376 -0
- aiecs/tools/apisource/tool.py +832 -0
- aiecs/tools/apisource/utils/__init__.py +9 -0
- aiecs/tools/apisource/utils/validators.py +334 -0
- aiecs/tools/base_tool.py +415 -21
- aiecs/tools/docs/__init__.py +121 -0
- aiecs/tools/docs/ai_document_orchestrator.py +607 -0
- aiecs/tools/docs/ai_document_writer_orchestrator.py +2350 -0
- aiecs/tools/docs/content_insertion_tool.py +1320 -0
- aiecs/tools/docs/document_creator_tool.py +1464 -0
- aiecs/tools/docs/document_layout_tool.py +1160 -0
- aiecs/tools/docs/document_parser_tool.py +1016 -0
- aiecs/tools/docs/document_writer_tool.py +2008 -0
- aiecs/tools/knowledge_graph/__init__.py +17 -0
- aiecs/tools/knowledge_graph/graph_reasoning_tool.py +807 -0
- aiecs/tools/knowledge_graph/graph_search_tool.py +944 -0
- aiecs/tools/knowledge_graph/kg_builder_tool.py +524 -0
- aiecs/tools/langchain_adapter.py +300 -138
- aiecs/tools/schema_generator.py +455 -0
- aiecs/tools/search_tool/__init__.py +100 -0
- aiecs/tools/search_tool/analyzers.py +581 -0
- aiecs/tools/search_tool/cache.py +264 -0
- aiecs/tools/search_tool/constants.py +128 -0
- aiecs/tools/search_tool/context.py +224 -0
- aiecs/tools/search_tool/core.py +778 -0
- aiecs/tools/search_tool/deduplicator.py +119 -0
- aiecs/tools/search_tool/error_handler.py +242 -0
- aiecs/tools/search_tool/metrics.py +343 -0
- aiecs/tools/search_tool/rate_limiter.py +172 -0
- aiecs/tools/search_tool/schemas.py +275 -0
- aiecs/tools/statistics/__init__.py +80 -0
- aiecs/tools/statistics/ai_data_analysis_orchestrator.py +646 -0
- aiecs/tools/statistics/ai_insight_generator_tool.py +508 -0
- aiecs/tools/statistics/ai_report_orchestrator_tool.py +684 -0
- aiecs/tools/statistics/data_loader_tool.py +555 -0
- aiecs/tools/statistics/data_profiler_tool.py +638 -0
- aiecs/tools/statistics/data_transformer_tool.py +580 -0
- aiecs/tools/statistics/data_visualizer_tool.py +498 -0
- aiecs/tools/statistics/model_trainer_tool.py +507 -0
- aiecs/tools/statistics/statistical_analyzer_tool.py +472 -0
- aiecs/tools/task_tools/__init__.py +49 -36
- aiecs/tools/task_tools/chart_tool.py +200 -184
- aiecs/tools/task_tools/classfire_tool.py +268 -267
- aiecs/tools/task_tools/image_tool.py +220 -141
- aiecs/tools/task_tools/office_tool.py +226 -146
- aiecs/tools/task_tools/pandas_tool.py +477 -121
- aiecs/tools/task_tools/report_tool.py +390 -142
- aiecs/tools/task_tools/research_tool.py +149 -79
- aiecs/tools/task_tools/scraper_tool.py +339 -145
- aiecs/tools/task_tools/stats_tool.py +448 -209
- aiecs/tools/temp_file_manager.py +26 -24
- aiecs/tools/tool_executor/__init__.py +18 -16
- aiecs/tools/tool_executor/tool_executor.py +364 -52
- aiecs/utils/LLM_output_structor.py +74 -48
- aiecs/utils/__init__.py +14 -3
- aiecs/utils/base_callback.py +0 -3
- aiecs/utils/cache_provider.py +696 -0
- aiecs/utils/execution_utils.py +50 -31
- aiecs/utils/prompt_loader.py +1 -0
- aiecs/utils/token_usage_repository.py +37 -11
- aiecs/ws/socket_server.py +14 -4
- {aiecs-1.0.1.dist-info → aiecs-1.7.17.dist-info}/METADATA +52 -15
- aiecs-1.7.17.dist-info/RECORD +337 -0
- aiecs-1.7.17.dist-info/entry_points.txt +13 -0
- aiecs/config/registry.py +0 -19
- aiecs/domain/context/content_engine.py +0 -982
- aiecs/llm/base_client.py +0 -99
- aiecs/llm/openai_client.py +0 -125
- aiecs/llm/vertex_client.py +0 -186
- aiecs/llm/xai_client.py +0 -184
- aiecs/scripts/dependency_checker.py +0 -857
- aiecs/scripts/quick_dependency_check.py +0 -269
- aiecs/tools/task_tools/search_api.py +0 -7
- aiecs-1.0.1.dist-info/RECORD +0 -90
- aiecs-1.0.1.dist-info/entry_points.txt +0 -7
- /aiecs/scripts/{setup_nlp_data.sh → dependance_check/setup_nlp_data.sh} +0 -0
- /aiecs/scripts/{README_WEASEL_PATCH.md → dependance_patch/fix_weasel/README_WEASEL_PATCH.md} +0 -0
- /aiecs/scripts/{fix_weasel_validator.sh → dependance_patch/fix_weasel/fix_weasel_validator.sh} +0 -0
- /aiecs/scripts/{run_weasel_patch.sh → dependance_patch/fix_weasel/run_weasel_patch.sh} +0 -0
- {aiecs-1.0.1.dist-info → aiecs-1.7.17.dist-info}/WHEEL +0 -0
- {aiecs-1.0.1.dist-info → aiecs-1.7.17.dist-info}/licenses/LICENSE +0 -0
- {aiecs-1.0.1.dist-info → aiecs-1.7.17.dist-info}/top_level.txt +0 -0
|
@@ -1,982 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
ContextEngine: Advanced Context and Session Management Engine
|
|
3
|
-
|
|
4
|
-
This engine extends TaskContext capabilities to provide comprehensive
|
|
5
|
-
session management, conversation tracking, and persistent storage for BaseAIService.
|
|
6
|
-
|
|
7
|
-
Key Features:
|
|
8
|
-
1. Multi-session management (extends TaskContext from single task to multiple sessions)
|
|
9
|
-
2. Redis backend storage for persistence and scalability
|
|
10
|
-
3. Conversation history management with optimization
|
|
11
|
-
4. Performance metrics and analytics
|
|
12
|
-
5. Resource and lifecycle management
|
|
13
|
-
6. Integration with BaseServiceCheckpointer
|
|
14
|
-
"""
|
|
15
|
-
|
|
16
|
-
import json
|
|
17
|
-
import logging
|
|
18
|
-
import time
|
|
19
|
-
import uuid
|
|
20
|
-
from datetime import datetime, timedelta
|
|
21
|
-
from typing import Dict, Any, Optional, List, AsyncGenerator, Union
|
|
22
|
-
from dataclasses import dataclass, asdict
|
|
23
|
-
from contextlib import asynccontextmanager
|
|
24
|
-
|
|
25
|
-
# Import TaskContext for base functionality
|
|
26
|
-
from app.domain.task.task_context import TaskContext, ContextUpdate
|
|
27
|
-
|
|
28
|
-
# Import core storage interfaces
|
|
29
|
-
from app.core.interface.storage_interface import IStorageBackend, ICheckpointerBackend
|
|
30
|
-
|
|
31
|
-
# Redis client import - use existing infrastructure
|
|
32
|
-
try:
|
|
33
|
-
import redis.asyncio as redis
|
|
34
|
-
from app.infrastructure.persistence.redis_client import get_redis_client
|
|
35
|
-
REDIS_AVAILABLE = True
|
|
36
|
-
except ImportError:
|
|
37
|
-
redis = None
|
|
38
|
-
get_redis_client = None
|
|
39
|
-
REDIS_AVAILABLE = False
|
|
40
|
-
|
|
41
|
-
logger = logging.getLogger(__name__)
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
@dataclass
|
|
45
|
-
class SessionMetrics:
|
|
46
|
-
"""Session-level performance metrics."""
|
|
47
|
-
session_id: str
|
|
48
|
-
user_id: str
|
|
49
|
-
created_at: datetime
|
|
50
|
-
last_activity: datetime
|
|
51
|
-
request_count: int = 0
|
|
52
|
-
error_count: int = 0
|
|
53
|
-
total_processing_time: float = 0.0
|
|
54
|
-
status: str = "active" # active, completed, failed, expired
|
|
55
|
-
|
|
56
|
-
def to_dict(self) -> Dict[str, Any]:
|
|
57
|
-
return {
|
|
58
|
-
**asdict(self),
|
|
59
|
-
"created_at": self.created_at.isoformat(),
|
|
60
|
-
"last_activity": self.last_activity.isoformat()
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
@classmethod
|
|
64
|
-
def from_dict(cls, data: Dict[str, Any]) -> 'SessionMetrics':
|
|
65
|
-
data = data.copy()
|
|
66
|
-
data["created_at"] = datetime.fromisoformat(data["created_at"])
|
|
67
|
-
data["last_activity"] = datetime.fromisoformat(data["last_activity"])
|
|
68
|
-
return cls(**data)
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
@dataclass
|
|
72
|
-
class ConversationMessage:
|
|
73
|
-
"""Structured conversation message."""
|
|
74
|
-
role: str # user, assistant, system
|
|
75
|
-
content: str
|
|
76
|
-
timestamp: datetime
|
|
77
|
-
metadata: Dict[str, Any] = None
|
|
78
|
-
|
|
79
|
-
def to_dict(self) -> Dict[str, Any]:
|
|
80
|
-
return {
|
|
81
|
-
"role": self.role,
|
|
82
|
-
"content": self.content,
|
|
83
|
-
"timestamp": self.timestamp.isoformat(),
|
|
84
|
-
"metadata": self.metadata or {}
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
@classmethod
|
|
88
|
-
def from_dict(cls, data: Dict[str, Any]) -> 'ConversationMessage':
|
|
89
|
-
data = data.copy()
|
|
90
|
-
data["timestamp"] = datetime.fromisoformat(data["timestamp"])
|
|
91
|
-
return cls(**data)
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
class ContextEngine(IStorageBackend, ICheckpointerBackend):
|
|
95
|
-
"""
|
|
96
|
-
Advanced Context and Session Management Engine.
|
|
97
|
-
|
|
98
|
-
Implements core storage interfaces to provide comprehensive session management
|
|
99
|
-
with Redis backend storage for BaseAIService and BaseServiceCheckpointer.
|
|
100
|
-
|
|
101
|
-
This implementation follows the middleware's core interface pattern,
|
|
102
|
-
enabling dependency inversion and clean architecture.
|
|
103
|
-
"""
|
|
104
|
-
|
|
105
|
-
def __init__(self, use_existing_redis: bool = True):
|
|
106
|
-
"""
|
|
107
|
-
Initialize ContextEngine.
|
|
108
|
-
|
|
109
|
-
Args:
|
|
110
|
-
use_existing_redis: Whether to use the existing Redis client from infrastructure
|
|
111
|
-
"""
|
|
112
|
-
self.use_existing_redis = use_existing_redis
|
|
113
|
-
self.redis_client: Optional[redis.Redis] = None
|
|
114
|
-
|
|
115
|
-
# Fallback to memory storage if Redis not available
|
|
116
|
-
self._memory_sessions: Dict[str, SessionMetrics] = {}
|
|
117
|
-
self._memory_conversations: Dict[str, List[ConversationMessage]] = {}
|
|
118
|
-
self._memory_contexts: Dict[str, TaskContext] = {}
|
|
119
|
-
self._memory_checkpoints: Dict[str, Dict[str, Any]] = {}
|
|
120
|
-
|
|
121
|
-
# Configuration
|
|
122
|
-
self.session_ttl = 3600 * 24 # 24 hours default TTL
|
|
123
|
-
self.conversation_limit = 1000 # Max messages per conversation
|
|
124
|
-
self.checkpoint_ttl = 3600 * 24 * 7 # 7 days for checkpoints
|
|
125
|
-
|
|
126
|
-
# Metrics
|
|
127
|
-
self._global_metrics = {
|
|
128
|
-
"total_sessions": 0,
|
|
129
|
-
"active_sessions": 0,
|
|
130
|
-
"total_messages": 0,
|
|
131
|
-
"total_checkpoints": 0
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
logger.info("ContextEngine initialized")
|
|
135
|
-
|
|
136
|
-
async def initialize(self) -> bool:
|
|
137
|
-
"""Initialize Redis connection and validate setup."""
|
|
138
|
-
if not REDIS_AVAILABLE:
|
|
139
|
-
logger.warning("Redis not available, using memory storage")
|
|
140
|
-
return True
|
|
141
|
-
|
|
142
|
-
try:
|
|
143
|
-
if self.use_existing_redis and get_redis_client:
|
|
144
|
-
# First ensure Redis client is initialized
|
|
145
|
-
from app.infrastructure.persistence.redis_client import initialize_redis_client
|
|
146
|
-
try:
|
|
147
|
-
await initialize_redis_client()
|
|
148
|
-
except Exception as init_error:
|
|
149
|
-
logger.warning(f"Failed to initialize Redis client: {init_error}")
|
|
150
|
-
# Fall through to direct connection attempt
|
|
151
|
-
|
|
152
|
-
# Use existing Redis client from infrastructure
|
|
153
|
-
redis_client_instance = await get_redis_client()
|
|
154
|
-
self.redis_client = await redis_client_instance.get_client()
|
|
155
|
-
|
|
156
|
-
# Test connection
|
|
157
|
-
await self.redis_client.ping()
|
|
158
|
-
logger.info("ContextEngine connected to existing Redis client successfully")
|
|
159
|
-
return True
|
|
160
|
-
else:
|
|
161
|
-
# Fallback to direct Redis connection (for testing or standalone use)
|
|
162
|
-
import os
|
|
163
|
-
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
|
|
164
|
-
self.redis_client = redis.from_url(
|
|
165
|
-
redis_url,
|
|
166
|
-
decode_responses=True,
|
|
167
|
-
socket_connect_timeout=5,
|
|
168
|
-
socket_timeout=5
|
|
169
|
-
)
|
|
170
|
-
|
|
171
|
-
# Test connection
|
|
172
|
-
await self.redis_client.ping()
|
|
173
|
-
logger.info("ContextEngine connected to Redis directly")
|
|
174
|
-
return True
|
|
175
|
-
|
|
176
|
-
except Exception as e:
|
|
177
|
-
logger.error(f"Failed to connect to Redis: {e}")
|
|
178
|
-
logger.warning("Falling back to memory storage")
|
|
179
|
-
self.redis_client = None
|
|
180
|
-
return False
|
|
181
|
-
|
|
182
|
-
async def close(self):
|
|
183
|
-
"""Close Redis connection."""
|
|
184
|
-
if self.redis_client:
|
|
185
|
-
await self.redis_client.close()
|
|
186
|
-
|
|
187
|
-
# ==================== Session Management ====================
|
|
188
|
-
|
|
189
|
-
async def create_session(
|
|
190
|
-
self,
|
|
191
|
-
session_id: str,
|
|
192
|
-
user_id: str,
|
|
193
|
-
metadata: Dict[str, Any] = None
|
|
194
|
-
) -> SessionMetrics:
|
|
195
|
-
"""Create a new session."""
|
|
196
|
-
now = datetime.utcnow()
|
|
197
|
-
session = SessionMetrics(
|
|
198
|
-
session_id=session_id,
|
|
199
|
-
user_id=user_id,
|
|
200
|
-
created_at=now,
|
|
201
|
-
last_activity=now
|
|
202
|
-
)
|
|
203
|
-
|
|
204
|
-
# Store session
|
|
205
|
-
await self._store_session(session)
|
|
206
|
-
|
|
207
|
-
# Create associated TaskContext
|
|
208
|
-
task_context = TaskContext({
|
|
209
|
-
"user_id": user_id,
|
|
210
|
-
"chat_id": session_id,
|
|
211
|
-
"metadata": metadata or {}
|
|
212
|
-
})
|
|
213
|
-
await self._store_task_context(session_id, task_context)
|
|
214
|
-
|
|
215
|
-
# Update metrics
|
|
216
|
-
self._global_metrics["total_sessions"] += 1
|
|
217
|
-
self._global_metrics["active_sessions"] += 1
|
|
218
|
-
|
|
219
|
-
logger.info(f"Created session {session_id} for user {user_id}")
|
|
220
|
-
return session
|
|
221
|
-
|
|
222
|
-
async def get_session(self, session_id: str) -> Optional[SessionMetrics]:
|
|
223
|
-
"""Get session by ID."""
|
|
224
|
-
if self.redis_client:
|
|
225
|
-
try:
|
|
226
|
-
data = await self.redis_client.hget("sessions", session_id)
|
|
227
|
-
if data:
|
|
228
|
-
return SessionMetrics.from_dict(json.loads(data))
|
|
229
|
-
except Exception as e:
|
|
230
|
-
logger.error(f"Failed to get session from Redis: {e}")
|
|
231
|
-
|
|
232
|
-
# Fallback to memory
|
|
233
|
-
return self._memory_sessions.get(session_id)
|
|
234
|
-
|
|
235
|
-
async def update_session(
|
|
236
|
-
self,
|
|
237
|
-
session_id: str,
|
|
238
|
-
updates: Dict[str, Any] = None,
|
|
239
|
-
increment_requests: bool = False,
|
|
240
|
-
add_processing_time: float = 0.0,
|
|
241
|
-
mark_error: bool = False
|
|
242
|
-
) -> bool:
|
|
243
|
-
"""Update session with activity and metrics."""
|
|
244
|
-
session = await self.get_session(session_id)
|
|
245
|
-
if not session:
|
|
246
|
-
return False
|
|
247
|
-
|
|
248
|
-
# Update activity
|
|
249
|
-
session.last_activity = datetime.utcnow()
|
|
250
|
-
|
|
251
|
-
# Update metrics
|
|
252
|
-
if increment_requests:
|
|
253
|
-
session.request_count += 1
|
|
254
|
-
if add_processing_time > 0:
|
|
255
|
-
session.total_processing_time += add_processing_time
|
|
256
|
-
if mark_error:
|
|
257
|
-
session.error_count += 1
|
|
258
|
-
|
|
259
|
-
# Apply custom updates
|
|
260
|
-
if updates:
|
|
261
|
-
for key, value in updates.items():
|
|
262
|
-
if hasattr(session, key):
|
|
263
|
-
setattr(session, key, value)
|
|
264
|
-
|
|
265
|
-
# Store updated session
|
|
266
|
-
await self._store_session(session)
|
|
267
|
-
return True
|
|
268
|
-
|
|
269
|
-
async def end_session(self, session_id: str, status: str = "completed") -> bool:
|
|
270
|
-
"""End a session and update metrics."""
|
|
271
|
-
session = await self.get_session(session_id)
|
|
272
|
-
if not session:
|
|
273
|
-
return False
|
|
274
|
-
|
|
275
|
-
session.status = status
|
|
276
|
-
session.last_activity = datetime.utcnow()
|
|
277
|
-
|
|
278
|
-
# Store final state
|
|
279
|
-
await self._store_session(session)
|
|
280
|
-
|
|
281
|
-
# Update global metrics
|
|
282
|
-
self._global_metrics["active_sessions"] = max(0, self._global_metrics["active_sessions"] - 1)
|
|
283
|
-
|
|
284
|
-
logger.info(f"Ended session {session_id} with status: {status}")
|
|
285
|
-
return True
|
|
286
|
-
|
|
287
|
-
async def _store_session(self, session: SessionMetrics):
|
|
288
|
-
"""Store session to Redis or memory."""
|
|
289
|
-
if self.redis_client:
|
|
290
|
-
try:
|
|
291
|
-
await self.redis_client.hset(
|
|
292
|
-
"sessions",
|
|
293
|
-
session.session_id,
|
|
294
|
-
json.dumps(session.to_dict())
|
|
295
|
-
)
|
|
296
|
-
await self.redis_client.expire(f"sessions", self.session_ttl)
|
|
297
|
-
return
|
|
298
|
-
except Exception as e:
|
|
299
|
-
logger.error(f"Failed to store session to Redis: {e}")
|
|
300
|
-
|
|
301
|
-
# Fallback to memory
|
|
302
|
-
self._memory_sessions[session.session_id] = session
|
|
303
|
-
|
|
304
|
-
# ==================== Conversation Management ====================
|
|
305
|
-
|
|
306
|
-
async def add_conversation_message(
|
|
307
|
-
self,
|
|
308
|
-
session_id: str,
|
|
309
|
-
role: str,
|
|
310
|
-
content: str,
|
|
311
|
-
metadata: Dict[str, Any] = None
|
|
312
|
-
) -> bool:
|
|
313
|
-
"""Add message to conversation history."""
|
|
314
|
-
message = ConversationMessage(
|
|
315
|
-
role=role,
|
|
316
|
-
content=content,
|
|
317
|
-
timestamp=datetime.utcnow(),
|
|
318
|
-
metadata=metadata
|
|
319
|
-
)
|
|
320
|
-
|
|
321
|
-
# Store message
|
|
322
|
-
await self._store_conversation_message(session_id, message)
|
|
323
|
-
|
|
324
|
-
# Update session activity
|
|
325
|
-
await self.update_session(session_id)
|
|
326
|
-
|
|
327
|
-
# Update global metrics
|
|
328
|
-
self._global_metrics["total_messages"] += 1
|
|
329
|
-
|
|
330
|
-
return True
|
|
331
|
-
|
|
332
|
-
async def get_conversation_history(
|
|
333
|
-
self,
|
|
334
|
-
session_id: str,
|
|
335
|
-
limit: int = 50
|
|
336
|
-
) -> List[ConversationMessage]:
|
|
337
|
-
"""Get conversation history for a session."""
|
|
338
|
-
if self.redis_client:
|
|
339
|
-
try:
|
|
340
|
-
messages_data = await self.redis_client.lrange(
|
|
341
|
-
f"conversation:{session_id}",
|
|
342
|
-
-limit,
|
|
343
|
-
-1
|
|
344
|
-
)
|
|
345
|
-
# Since lpush adds to the beginning, we need to reverse to get chronological order
|
|
346
|
-
messages = [
|
|
347
|
-
ConversationMessage.from_dict(json.loads(msg))
|
|
348
|
-
for msg in reversed(messages_data)
|
|
349
|
-
]
|
|
350
|
-
return messages
|
|
351
|
-
except Exception as e:
|
|
352
|
-
logger.error(f"Failed to get conversation from Redis: {e}")
|
|
353
|
-
|
|
354
|
-
# Fallback to memory
|
|
355
|
-
messages = self._memory_conversations.get(session_id, [])
|
|
356
|
-
return messages[-limit:] if limit > 0 else messages
|
|
357
|
-
|
|
358
|
-
async def _store_conversation_message(self, session_id: str, message: ConversationMessage):
|
|
359
|
-
"""Store conversation message to Redis or memory."""
|
|
360
|
-
if self.redis_client:
|
|
361
|
-
try:
|
|
362
|
-
# Add to list
|
|
363
|
-
await self.redis_client.lpush(
|
|
364
|
-
f"conversation:{session_id}",
|
|
365
|
-
json.dumps(message.to_dict())
|
|
366
|
-
)
|
|
367
|
-
# Trim to limit
|
|
368
|
-
await self.redis_client.ltrim(
|
|
369
|
-
f"conversation:{session_id}",
|
|
370
|
-
-self.conversation_limit,
|
|
371
|
-
-1
|
|
372
|
-
)
|
|
373
|
-
# Set TTL
|
|
374
|
-
await self.redis_client.expire(
|
|
375
|
-
f"conversation:{session_id}",
|
|
376
|
-
self.session_ttl
|
|
377
|
-
)
|
|
378
|
-
return
|
|
379
|
-
except Exception as e:
|
|
380
|
-
logger.error(f"Failed to store message to Redis: {e}")
|
|
381
|
-
|
|
382
|
-
# Fallback to memory
|
|
383
|
-
if session_id not in self._memory_conversations:
|
|
384
|
-
self._memory_conversations[session_id] = []
|
|
385
|
-
|
|
386
|
-
self._memory_conversations[session_id].append(message)
|
|
387
|
-
|
|
388
|
-
# Trim to limit
|
|
389
|
-
if len(self._memory_conversations[session_id]) > self.conversation_limit:
|
|
390
|
-
self._memory_conversations[session_id] = self._memory_conversations[session_id][-self.conversation_limit:]
|
|
391
|
-
|
|
392
|
-
# ==================== TaskContext Integration ====================
|
|
393
|
-
|
|
394
|
-
async def get_task_context(self, session_id: str) -> Optional[TaskContext]:
|
|
395
|
-
"""Get TaskContext for a session."""
|
|
396
|
-
if self.redis_client:
|
|
397
|
-
try:
|
|
398
|
-
data = await self.redis_client.hget("task_contexts", session_id)
|
|
399
|
-
if data:
|
|
400
|
-
context_data = json.loads(data)
|
|
401
|
-
# Reconstruct TaskContext from stored data
|
|
402
|
-
return self._reconstruct_task_context(context_data)
|
|
403
|
-
except Exception as e:
|
|
404
|
-
logger.error(f"Failed to get TaskContext from Redis: {e}")
|
|
405
|
-
|
|
406
|
-
# Fallback to memory
|
|
407
|
-
return self._memory_contexts.get(session_id)
|
|
408
|
-
|
|
409
|
-
async def _store_task_context(self, session_id: str, context: TaskContext):
|
|
410
|
-
"""Store TaskContext to Redis or memory."""
|
|
411
|
-
if self.redis_client:
|
|
412
|
-
try:
|
|
413
|
-
await self.redis_client.hset(
|
|
414
|
-
"task_contexts",
|
|
415
|
-
session_id,
|
|
416
|
-
json.dumps(context.to_dict())
|
|
417
|
-
)
|
|
418
|
-
await self.redis_client.expire("task_contexts", self.session_ttl)
|
|
419
|
-
return
|
|
420
|
-
except Exception as e:
|
|
421
|
-
logger.error(f"Failed to store TaskContext to Redis: {e}")
|
|
422
|
-
|
|
423
|
-
# Fallback to memory
|
|
424
|
-
self._memory_contexts[session_id] = context
|
|
425
|
-
|
|
426
|
-
def _reconstruct_task_context(self, data: Dict[str, Any]) -> TaskContext:
|
|
427
|
-
"""Reconstruct TaskContext from stored data."""
|
|
428
|
-
# Create new TaskContext with stored data
|
|
429
|
-
context = TaskContext(data)
|
|
430
|
-
|
|
431
|
-
# Restore context history
|
|
432
|
-
if "context_history" in data:
|
|
433
|
-
context.context_history = [
|
|
434
|
-
ContextUpdate(
|
|
435
|
-
timestamp=entry["timestamp"],
|
|
436
|
-
update_type=entry["update_type"],
|
|
437
|
-
data=entry["data"],
|
|
438
|
-
metadata=entry["metadata"]
|
|
439
|
-
)
|
|
440
|
-
for entry in data["context_history"]
|
|
441
|
-
]
|
|
442
|
-
|
|
443
|
-
return context
|
|
444
|
-
|
|
445
|
-
# ==================== Checkpoint Management (for BaseServiceCheckpointer) ====================
|
|
446
|
-
|
|
447
|
-
async def store_checkpoint(
|
|
448
|
-
self,
|
|
449
|
-
thread_id: str,
|
|
450
|
-
checkpoint_id: str,
|
|
451
|
-
checkpoint_data: Dict[str, Any],
|
|
452
|
-
metadata: Dict[str, Any] = None
|
|
453
|
-
) -> bool:
|
|
454
|
-
"""Store checkpoint data for LangGraph workflows."""
|
|
455
|
-
checkpoint = {
|
|
456
|
-
"checkpoint_id": checkpoint_id,
|
|
457
|
-
"thread_id": thread_id,
|
|
458
|
-
"data": checkpoint_data,
|
|
459
|
-
"metadata": metadata or {},
|
|
460
|
-
"created_at": datetime.utcnow().isoformat()
|
|
461
|
-
}
|
|
462
|
-
|
|
463
|
-
if self.redis_client:
|
|
464
|
-
try:
|
|
465
|
-
# Store checkpoint
|
|
466
|
-
await self.redis_client.hset(
|
|
467
|
-
f"checkpoints:{thread_id}",
|
|
468
|
-
checkpoint_id,
|
|
469
|
-
json.dumps(checkpoint)
|
|
470
|
-
)
|
|
471
|
-
# Set TTL
|
|
472
|
-
await self.redis_client.expire(
|
|
473
|
-
f"checkpoints:{thread_id}",
|
|
474
|
-
self.checkpoint_ttl
|
|
475
|
-
)
|
|
476
|
-
|
|
477
|
-
# Update global metrics
|
|
478
|
-
self._global_metrics["total_checkpoints"] += 1
|
|
479
|
-
return True
|
|
480
|
-
|
|
481
|
-
except Exception as e:
|
|
482
|
-
logger.error(f"Failed to store checkpoint to Redis: {e}")
|
|
483
|
-
|
|
484
|
-
# Fallback to memory
|
|
485
|
-
key = f"{thread_id}:{checkpoint_id}"
|
|
486
|
-
self._memory_checkpoints[key] = checkpoint
|
|
487
|
-
return True
|
|
488
|
-
|
|
489
|
-
async def get_checkpoint(
|
|
490
|
-
self,
|
|
491
|
-
thread_id: str,
|
|
492
|
-
checkpoint_id: str = None
|
|
493
|
-
) -> Optional[Dict[str, Any]]:
|
|
494
|
-
"""Get checkpoint data. If checkpoint_id is None, get the latest."""
|
|
495
|
-
if self.redis_client:
|
|
496
|
-
try:
|
|
497
|
-
if checkpoint_id:
|
|
498
|
-
# Get specific checkpoint
|
|
499
|
-
data = await self.redis_client.hget(f"checkpoints:{thread_id}", checkpoint_id)
|
|
500
|
-
if data:
|
|
501
|
-
return json.loads(data)
|
|
502
|
-
else:
|
|
503
|
-
# Get latest checkpoint
|
|
504
|
-
checkpoints = await self.redis_client.hgetall(f"checkpoints:{thread_id}")
|
|
505
|
-
if checkpoints:
|
|
506
|
-
# Sort by creation time and get latest
|
|
507
|
-
latest = max(
|
|
508
|
-
checkpoints.values(),
|
|
509
|
-
key=lambda x: json.loads(x)["created_at"]
|
|
510
|
-
)
|
|
511
|
-
return json.loads(latest)
|
|
512
|
-
except Exception as e:
|
|
513
|
-
logger.error(f"Failed to get checkpoint from Redis: {e}")
|
|
514
|
-
|
|
515
|
-
# Fallback to memory
|
|
516
|
-
if checkpoint_id:
|
|
517
|
-
key = f"{thread_id}:{checkpoint_id}"
|
|
518
|
-
return self._memory_checkpoints.get(key)
|
|
519
|
-
else:
|
|
520
|
-
# Get latest from memory
|
|
521
|
-
thread_checkpoints = {
|
|
522
|
-
k: v for k, v in self._memory_checkpoints.items()
|
|
523
|
-
if k.startswith(f"{thread_id}:")
|
|
524
|
-
}
|
|
525
|
-
if thread_checkpoints:
|
|
526
|
-
latest_key = max(
|
|
527
|
-
thread_checkpoints.keys(),
|
|
528
|
-
key=lambda k: thread_checkpoints[k]["created_at"]
|
|
529
|
-
)
|
|
530
|
-
return thread_checkpoints[latest_key]
|
|
531
|
-
|
|
532
|
-
return None
|
|
533
|
-
|
|
534
|
-
async def list_checkpoints(
|
|
535
|
-
self,
|
|
536
|
-
thread_id: str,
|
|
537
|
-
limit: int = 10
|
|
538
|
-
) -> List[Dict[str, Any]]:
|
|
539
|
-
"""List checkpoints for a thread, ordered by creation time (newest first)."""
|
|
540
|
-
if self.redis_client:
|
|
541
|
-
try:
|
|
542
|
-
checkpoints_data = await self.redis_client.hgetall(f"checkpoints:{thread_id}")
|
|
543
|
-
checkpoints = [json.loads(data) for data in checkpoints_data.values()]
|
|
544
|
-
# Sort by creation time (newest first)
|
|
545
|
-
checkpoints.sort(key=lambda x: x["created_at"], reverse=True)
|
|
546
|
-
return checkpoints[:limit]
|
|
547
|
-
except Exception as e:
|
|
548
|
-
logger.error(f"Failed to list checkpoints from Redis: {e}")
|
|
549
|
-
|
|
550
|
-
# Fallback to memory
|
|
551
|
-
thread_checkpoints = [
|
|
552
|
-
v for k, v in self._memory_checkpoints.items()
|
|
553
|
-
if k.startswith(f"{thread_id}:")
|
|
554
|
-
]
|
|
555
|
-
thread_checkpoints.sort(key=lambda x: x["created_at"], reverse=True)
|
|
556
|
-
return thread_checkpoints[:limit]
|
|
557
|
-
|
|
558
|
-
# ==================== Cleanup and Maintenance ====================
|
|
559
|
-
|
|
560
|
-
async def cleanup_expired_sessions(self, max_idle_hours: int = 24) -> int:
|
|
561
|
-
"""Clean up expired sessions and associated data."""
|
|
562
|
-
cutoff_time = datetime.utcnow() - timedelta(hours=max_idle_hours)
|
|
563
|
-
cleaned_count = 0
|
|
564
|
-
|
|
565
|
-
if self.redis_client:
|
|
566
|
-
try:
|
|
567
|
-
# Get all sessions
|
|
568
|
-
sessions_data = await self.redis_client.hgetall("sessions")
|
|
569
|
-
expired_sessions = []
|
|
570
|
-
|
|
571
|
-
for session_id, data in sessions_data.items():
|
|
572
|
-
session = SessionMetrics.from_dict(json.loads(data))
|
|
573
|
-
if session.last_activity < cutoff_time:
|
|
574
|
-
expired_sessions.append(session_id)
|
|
575
|
-
|
|
576
|
-
# Clean up expired sessions
|
|
577
|
-
for session_id in expired_sessions:
|
|
578
|
-
await self._cleanup_session_data(session_id)
|
|
579
|
-
cleaned_count += 1
|
|
580
|
-
|
|
581
|
-
except Exception as e:
|
|
582
|
-
logger.error(f"Failed to cleanup expired sessions from Redis: {e}")
|
|
583
|
-
else:
|
|
584
|
-
# Memory cleanup
|
|
585
|
-
expired_sessions = [
|
|
586
|
-
session_id for session_id, session in self._memory_sessions.items()
|
|
587
|
-
if session.last_activity < cutoff_time
|
|
588
|
-
]
|
|
589
|
-
|
|
590
|
-
for session_id in expired_sessions:
|
|
591
|
-
await self._cleanup_session_data(session_id)
|
|
592
|
-
cleaned_count += 1
|
|
593
|
-
|
|
594
|
-
if cleaned_count > 0:
|
|
595
|
-
logger.info(f"Cleaned up {cleaned_count} expired sessions")
|
|
596
|
-
|
|
597
|
-
return cleaned_count
|
|
598
|
-
|
|
599
|
-
async def _cleanup_session_data(self, session_id: str):
|
|
600
|
-
"""Clean up all data associated with a session."""
|
|
601
|
-
if self.redis_client:
|
|
602
|
-
try:
|
|
603
|
-
# Remove session
|
|
604
|
-
await self.redis_client.hdel("sessions", session_id)
|
|
605
|
-
# Remove conversation
|
|
606
|
-
await self.redis_client.delete(f"conversation:{session_id}")
|
|
607
|
-
# Remove task context
|
|
608
|
-
await self.redis_client.hdel("task_contexts", session_id)
|
|
609
|
-
# Remove checkpoints
|
|
610
|
-
await self.redis_client.delete(f"checkpoints:{session_id}")
|
|
611
|
-
except Exception as e:
|
|
612
|
-
logger.error(f"Failed to cleanup session data from Redis: {e}")
|
|
613
|
-
else:
|
|
614
|
-
# Memory cleanup
|
|
615
|
-
self._memory_sessions.pop(session_id, None)
|
|
616
|
-
self._memory_conversations.pop(session_id, None)
|
|
617
|
-
self._memory_contexts.pop(session_id, None)
|
|
618
|
-
|
|
619
|
-
# Remove checkpoints
|
|
620
|
-
checkpoint_keys = [
|
|
621
|
-
k for k in self._memory_checkpoints.keys()
|
|
622
|
-
if k.startswith(f"{session_id}:")
|
|
623
|
-
]
|
|
624
|
-
for key in checkpoint_keys:
|
|
625
|
-
self._memory_checkpoints.pop(key, None)
|
|
626
|
-
|
|
627
|
-
# ==================== Metrics and Health ====================
|
|
628
|
-
|
|
629
|
-
async def get_metrics(self) -> Dict[str, Any]:
|
|
630
|
-
"""Get comprehensive metrics."""
|
|
631
|
-
active_sessions_count = 0
|
|
632
|
-
|
|
633
|
-
if self.redis_client:
|
|
634
|
-
try:
|
|
635
|
-
sessions_data = await self.redis_client.hgetall("sessions")
|
|
636
|
-
active_sessions_count = len([
|
|
637
|
-
s for s in sessions_data.values()
|
|
638
|
-
if json.loads(s)["status"] == "active"
|
|
639
|
-
])
|
|
640
|
-
except Exception as e:
|
|
641
|
-
logger.error(f"Failed to get metrics from Redis: {e}")
|
|
642
|
-
else:
|
|
643
|
-
active_sessions_count = len([
|
|
644
|
-
s for s in self._memory_sessions.values()
|
|
645
|
-
if s.status == "active"
|
|
646
|
-
])
|
|
647
|
-
|
|
648
|
-
return {
|
|
649
|
-
**self._global_metrics,
|
|
650
|
-
"active_sessions": active_sessions_count,
|
|
651
|
-
"storage_backend": "redis" if self.redis_client else "memory",
|
|
652
|
-
"redis_connected": self.redis_client is not None,
|
|
653
|
-
"timestamp": datetime.utcnow().isoformat()
|
|
654
|
-
}
|
|
655
|
-
|
|
656
|
-
async def health_check(self) -> Dict[str, Any]:
|
|
657
|
-
"""Perform health check."""
|
|
658
|
-
health = {
|
|
659
|
-
"status": "healthy",
|
|
660
|
-
"storage_backend": "redis" if self.redis_client else "memory",
|
|
661
|
-
"redis_connected": False,
|
|
662
|
-
"issues": []
|
|
663
|
-
}
|
|
664
|
-
|
|
665
|
-
# Check Redis connection
|
|
666
|
-
if self.redis_client:
|
|
667
|
-
try:
|
|
668
|
-
await self.redis_client.ping()
|
|
669
|
-
health["redis_connected"] = True
|
|
670
|
-
except Exception as e:
|
|
671
|
-
health["issues"].append(f"Redis connection failed: {e}")
|
|
672
|
-
health["status"] = "degraded"
|
|
673
|
-
|
|
674
|
-
# Check memory usage (basic check)
|
|
675
|
-
if not self.redis_client:
|
|
676
|
-
total_memory_items = (
|
|
677
|
-
len(self._memory_sessions) +
|
|
678
|
-
len(self._memory_conversations) +
|
|
679
|
-
len(self._memory_contexts) +
|
|
680
|
-
len(self._memory_checkpoints)
|
|
681
|
-
)
|
|
682
|
-
if total_memory_items > 10000: # Arbitrary threshold
|
|
683
|
-
health["issues"].append(f"High memory usage: {total_memory_items} items")
|
|
684
|
-
health["status"] = "warning"
|
|
685
|
-
|
|
686
|
-
return health
|
|
687
|
-
|
|
688
|
-
# ==================== ICheckpointerBackend Implementation ====================
|
|
689
|
-
|
|
690
|
-
async def put_checkpoint(
|
|
691
|
-
self,
|
|
692
|
-
thread_id: str,
|
|
693
|
-
checkpoint_id: str,
|
|
694
|
-
checkpoint_data: Dict[str, Any],
|
|
695
|
-
metadata: Dict[str, Any] = None
|
|
696
|
-
) -> bool:
|
|
697
|
-
"""Store a checkpoint for LangGraph workflows (ICheckpointerBackend interface)."""
|
|
698
|
-
return await self.store_checkpoint(thread_id, checkpoint_id, checkpoint_data, metadata)
|
|
699
|
-
|
|
700
|
-
async def put_writes(
|
|
701
|
-
self,
|
|
702
|
-
thread_id: str,
|
|
703
|
-
checkpoint_id: str,
|
|
704
|
-
task_id: str,
|
|
705
|
-
writes_data: List[tuple]
|
|
706
|
-
) -> bool:
|
|
707
|
-
"""Store intermediate writes for a checkpoint (ICheckpointerBackend interface)."""
|
|
708
|
-
writes_key = f"writes:{thread_id}:{checkpoint_id}:{task_id}"
|
|
709
|
-
writes_payload = {
|
|
710
|
-
"thread_id": thread_id,
|
|
711
|
-
"checkpoint_id": checkpoint_id,
|
|
712
|
-
"task_id": task_id,
|
|
713
|
-
"writes": writes_data,
|
|
714
|
-
"created_at": datetime.utcnow().isoformat()
|
|
715
|
-
}
|
|
716
|
-
|
|
717
|
-
if self.redis_client:
|
|
718
|
-
try:
|
|
719
|
-
await self.redis_client.hset(
|
|
720
|
-
f"checkpoint_writes:{thread_id}",
|
|
721
|
-
f"{checkpoint_id}:{task_id}",
|
|
722
|
-
json.dumps(writes_payload)
|
|
723
|
-
)
|
|
724
|
-
await self.redis_client.expire(
|
|
725
|
-
f"checkpoint_writes:{thread_id}",
|
|
726
|
-
self.checkpoint_ttl
|
|
727
|
-
)
|
|
728
|
-
return True
|
|
729
|
-
except Exception as e:
|
|
730
|
-
logger.error(f"Failed to store writes to Redis: {e}")
|
|
731
|
-
|
|
732
|
-
# Fallback to memory
|
|
733
|
-
self._memory_checkpoints[writes_key] = writes_payload
|
|
734
|
-
return True
|
|
735
|
-
|
|
736
|
-
async def get_writes(
|
|
737
|
-
self,
|
|
738
|
-
thread_id: str,
|
|
739
|
-
checkpoint_id: str
|
|
740
|
-
) -> List[tuple]:
|
|
741
|
-
"""Get intermediate writes for a checkpoint (ICheckpointerBackend interface)."""
|
|
742
|
-
if self.redis_client:
|
|
743
|
-
try:
|
|
744
|
-
writes_data = await self.redis_client.hgetall(f"checkpoint_writes:{thread_id}")
|
|
745
|
-
writes = []
|
|
746
|
-
for key, data in writes_data.items():
|
|
747
|
-
if key.startswith(f"{checkpoint_id}:"):
|
|
748
|
-
payload = json.loads(data)
|
|
749
|
-
writes.extend(payload.get("writes", []))
|
|
750
|
-
return writes
|
|
751
|
-
except Exception as e:
|
|
752
|
-
logger.error(f"Failed to get writes from Redis: {e}")
|
|
753
|
-
|
|
754
|
-
# Fallback to memory
|
|
755
|
-
writes = []
|
|
756
|
-
writes_prefix = f"writes:{thread_id}:{checkpoint_id}:"
|
|
757
|
-
for key, payload in self._memory_checkpoints.items():
|
|
758
|
-
if key.startswith(writes_prefix):
|
|
759
|
-
writes.extend(payload.get("writes", []))
|
|
760
|
-
return writes
|
|
761
|
-
|
|
762
|
-
# ==================== ITaskContextStorage Implementation ====================
|
|
763
|
-
|
|
764
|
-
async def store_task_context(self, session_id: str, context: Any) -> bool:
|
|
765
|
-
"""Store TaskContext for a session (ITaskContextStorage interface)."""
|
|
766
|
-
return await self._store_task_context(session_id, context)
|
|
767
|
-
|
|
768
|
-
# ==================== Agent Communication and Conversation Isolation ====================
|
|
769
|
-
|
|
770
|
-
async def create_conversation_session(
|
|
771
|
-
self,
|
|
772
|
-
session_id: str,
|
|
773
|
-
participants: List[Dict[str, Any]],
|
|
774
|
-
session_type: str,
|
|
775
|
-
metadata: Dict[str, Any] = None
|
|
776
|
-
) -> str:
|
|
777
|
-
"""
|
|
778
|
-
Create an isolated conversation session between participants.
|
|
779
|
-
|
|
780
|
-
Args:
|
|
781
|
-
session_id: Base session ID
|
|
782
|
-
participants: List of participant dictionaries with id, type, role
|
|
783
|
-
session_type: Type of conversation ('user_to_mc', 'mc_to_agent', 'agent_to_agent', 'user_to_agent')
|
|
784
|
-
metadata: Additional session metadata
|
|
785
|
-
|
|
786
|
-
Returns:
|
|
787
|
-
Generated session key for conversation isolation
|
|
788
|
-
"""
|
|
789
|
-
from .conversation_models import ConversationSession, ConversationParticipant
|
|
790
|
-
|
|
791
|
-
# Create participant objects
|
|
792
|
-
participant_objects = [
|
|
793
|
-
ConversationParticipant(
|
|
794
|
-
participant_id=p.get('id'),
|
|
795
|
-
participant_type=p.get('type'),
|
|
796
|
-
participant_role=p.get('role'),
|
|
797
|
-
metadata=p.get('metadata', {})
|
|
798
|
-
)
|
|
799
|
-
for p in participants
|
|
800
|
-
]
|
|
801
|
-
|
|
802
|
-
# Create conversation session
|
|
803
|
-
conversation_session = ConversationSession(
|
|
804
|
-
session_id=session_id,
|
|
805
|
-
participants=participant_objects,
|
|
806
|
-
session_type=session_type,
|
|
807
|
-
created_at=datetime.utcnow(),
|
|
808
|
-
last_activity=datetime.utcnow(),
|
|
809
|
-
metadata=metadata or {}
|
|
810
|
-
)
|
|
811
|
-
|
|
812
|
-
# Generate unique session key
|
|
813
|
-
session_key = conversation_session.generate_session_key()
|
|
814
|
-
|
|
815
|
-
# Store conversation session metadata
|
|
816
|
-
await self._store_conversation_session(session_key, conversation_session)
|
|
817
|
-
|
|
818
|
-
logger.info(f"Created conversation session: {session_key} (type: {session_type})")
|
|
819
|
-
return session_key
|
|
820
|
-
|
|
821
|
-
async def add_agent_communication_message(
|
|
822
|
-
self,
|
|
823
|
-
session_key: str,
|
|
824
|
-
sender_id: str,
|
|
825
|
-
sender_type: str,
|
|
826
|
-
sender_role: Optional[str],
|
|
827
|
-
recipient_id: str,
|
|
828
|
-
recipient_type: str,
|
|
829
|
-
recipient_role: Optional[str],
|
|
830
|
-
content: str,
|
|
831
|
-
message_type: str = "communication",
|
|
832
|
-
metadata: Dict[str, Any] = None
|
|
833
|
-
) -> bool:
|
|
834
|
-
"""
|
|
835
|
-
Add a message to an agent communication session.
|
|
836
|
-
|
|
837
|
-
Args:
|
|
838
|
-
session_key: Isolated session key
|
|
839
|
-
sender_id: ID of the sender
|
|
840
|
-
sender_type: Type of sender ('master_controller', 'agent', 'user')
|
|
841
|
-
sender_role: Role of sender (for agents)
|
|
842
|
-
recipient_id: ID of the recipient
|
|
843
|
-
recipient_type: Type of recipient
|
|
844
|
-
recipient_role: Role of recipient (for agents)
|
|
845
|
-
content: Message content
|
|
846
|
-
message_type: Type of message
|
|
847
|
-
metadata: Additional message metadata
|
|
848
|
-
|
|
849
|
-
Returns:
|
|
850
|
-
Success status
|
|
851
|
-
"""
|
|
852
|
-
from .conversation_models import AgentCommunicationMessage
|
|
853
|
-
|
|
854
|
-
# Create agent communication message
|
|
855
|
-
message = AgentCommunicationMessage(
|
|
856
|
-
message_id=str(uuid.uuid4()),
|
|
857
|
-
session_key=session_key,
|
|
858
|
-
sender_id=sender_id,
|
|
859
|
-
sender_type=sender_type,
|
|
860
|
-
sender_role=sender_role,
|
|
861
|
-
recipient_id=recipient_id,
|
|
862
|
-
recipient_type=recipient_type,
|
|
863
|
-
recipient_role=recipient_role,
|
|
864
|
-
content=content,
|
|
865
|
-
message_type=message_type,
|
|
866
|
-
timestamp=datetime.utcnow(),
|
|
867
|
-
metadata=metadata or {}
|
|
868
|
-
)
|
|
869
|
-
|
|
870
|
-
# Convert to conversation message format and store
|
|
871
|
-
conv_message_dict = message.to_conversation_message_dict()
|
|
872
|
-
|
|
873
|
-
# Store using existing conversation message infrastructure
|
|
874
|
-
await self.add_conversation_message(
|
|
875
|
-
session_id=session_key,
|
|
876
|
-
role=conv_message_dict["role"],
|
|
877
|
-
content=conv_message_dict["content"],
|
|
878
|
-
metadata=conv_message_dict["metadata"]
|
|
879
|
-
)
|
|
880
|
-
|
|
881
|
-
# Update session activity
|
|
882
|
-
await self._update_conversation_session_activity(session_key)
|
|
883
|
-
|
|
884
|
-
logger.debug(f"Added agent communication message to session {session_key}")
|
|
885
|
-
return True
|
|
886
|
-
|
|
887
|
-
async def get_agent_conversation_history(
|
|
888
|
-
self,
|
|
889
|
-
session_key: str,
|
|
890
|
-
limit: int = 50,
|
|
891
|
-
message_types: Optional[List[str]] = None
|
|
892
|
-
) -> List[Dict[str, Any]]:
|
|
893
|
-
"""
|
|
894
|
-
Get conversation history for an agent communication session.
|
|
895
|
-
|
|
896
|
-
Args:
|
|
897
|
-
session_key: Isolated session key
|
|
898
|
-
limit: Maximum number of messages to retrieve
|
|
899
|
-
message_types: Filter by message types
|
|
900
|
-
|
|
901
|
-
Returns:
|
|
902
|
-
List of conversation messages
|
|
903
|
-
"""
|
|
904
|
-
# Get conversation history using existing infrastructure
|
|
905
|
-
messages = await self.get_conversation_history(session_key, limit)
|
|
906
|
-
|
|
907
|
-
# Filter by message types if specified
|
|
908
|
-
if message_types:
|
|
909
|
-
filtered_messages = []
|
|
910
|
-
for msg in messages:
|
|
911
|
-
if hasattr(msg, 'to_dict'):
|
|
912
|
-
msg_dict = msg.to_dict()
|
|
913
|
-
else:
|
|
914
|
-
msg_dict = msg
|
|
915
|
-
|
|
916
|
-
msg_metadata = msg_dict.get('metadata', {})
|
|
917
|
-
msg_type = msg_metadata.get('message_type', 'communication')
|
|
918
|
-
|
|
919
|
-
if msg_type in message_types:
|
|
920
|
-
filtered_messages.append(msg_dict)
|
|
921
|
-
|
|
922
|
-
return filtered_messages
|
|
923
|
-
|
|
924
|
-
# Convert messages to dict format
|
|
925
|
-
return [msg.to_dict() if hasattr(msg, 'to_dict') else msg for msg in messages]
|
|
926
|
-
|
|
927
|
-
async def _store_conversation_session(self, session_key: str, conversation_session) -> None:
|
|
928
|
-
"""Store conversation session metadata."""
|
|
929
|
-
session_data = {
|
|
930
|
-
"session_id": conversation_session.session_id,
|
|
931
|
-
"participants": [
|
|
932
|
-
{
|
|
933
|
-
"participant_id": p.participant_id,
|
|
934
|
-
"participant_type": p.participant_type,
|
|
935
|
-
"participant_role": p.participant_role,
|
|
936
|
-
"metadata": p.metadata
|
|
937
|
-
}
|
|
938
|
-
for p in conversation_session.participants
|
|
939
|
-
],
|
|
940
|
-
"session_type": conversation_session.session_type,
|
|
941
|
-
"created_at": conversation_session.created_at.isoformat(),
|
|
942
|
-
"last_activity": conversation_session.last_activity.isoformat(),
|
|
943
|
-
"metadata": conversation_session.metadata
|
|
944
|
-
}
|
|
945
|
-
|
|
946
|
-
if self.redis_client:
|
|
947
|
-
try:
|
|
948
|
-
await self.redis_client.hset(
|
|
949
|
-
"conversation_sessions",
|
|
950
|
-
session_key,
|
|
951
|
-
json.dumps(session_data)
|
|
952
|
-
)
|
|
953
|
-
await self.redis_client.expire("conversation_sessions", self.session_ttl)
|
|
954
|
-
return
|
|
955
|
-
except Exception as e:
|
|
956
|
-
logger.error(f"Failed to store conversation session to Redis: {e}")
|
|
957
|
-
|
|
958
|
-
# Fallback to memory (extend memory storage)
|
|
959
|
-
if not hasattr(self, '_memory_conversation_sessions'):
|
|
960
|
-
self._memory_conversation_sessions = {}
|
|
961
|
-
self._memory_conversation_sessions[session_key] = session_data
|
|
962
|
-
|
|
963
|
-
async def _update_conversation_session_activity(self, session_key: str) -> None:
|
|
964
|
-
"""Update last activity timestamp for a conversation session."""
|
|
965
|
-
if self.redis_client:
|
|
966
|
-
try:
|
|
967
|
-
session_data = await self.redis_client.hget("conversation_sessions", session_key)
|
|
968
|
-
if session_data:
|
|
969
|
-
session_dict = json.loads(session_data)
|
|
970
|
-
session_dict["last_activity"] = datetime.utcnow().isoformat()
|
|
971
|
-
await self.redis_client.hset(
|
|
972
|
-
"conversation_sessions",
|
|
973
|
-
session_key,
|
|
974
|
-
json.dumps(session_dict)
|
|
975
|
-
)
|
|
976
|
-
return
|
|
977
|
-
except Exception as e:
|
|
978
|
-
logger.error(f"Failed to update conversation session activity in Redis: {e}")
|
|
979
|
-
|
|
980
|
-
# Fallback to memory
|
|
981
|
-
if hasattr(self, '_memory_conversation_sessions') and session_key in self._memory_conversation_sessions:
|
|
982
|
-
self._memory_conversation_sessions[session_key]["last_activity"] = datetime.utcnow().isoformat()
|