aiecs 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aiecs might be problematic. Click here for more details.

Files changed (90) hide show
  1. aiecs/__init__.py +75 -0
  2. aiecs/__main__.py +41 -0
  3. aiecs/aiecs_client.py +295 -0
  4. aiecs/application/__init__.py +10 -0
  5. aiecs/application/executors/__init__.py +10 -0
  6. aiecs/application/executors/operation_executor.py +341 -0
  7. aiecs/config/__init__.py +15 -0
  8. aiecs/config/config.py +117 -0
  9. aiecs/config/registry.py +19 -0
  10. aiecs/core/__init__.py +46 -0
  11. aiecs/core/interface/__init__.py +34 -0
  12. aiecs/core/interface/execution_interface.py +150 -0
  13. aiecs/core/interface/storage_interface.py +214 -0
  14. aiecs/domain/__init__.py +20 -0
  15. aiecs/domain/context/__init__.py +28 -0
  16. aiecs/domain/context/content_engine.py +982 -0
  17. aiecs/domain/context/conversation_models.py +306 -0
  18. aiecs/domain/execution/__init__.py +12 -0
  19. aiecs/domain/execution/model.py +49 -0
  20. aiecs/domain/task/__init__.py +13 -0
  21. aiecs/domain/task/dsl_processor.py +460 -0
  22. aiecs/domain/task/model.py +50 -0
  23. aiecs/domain/task/task_context.py +257 -0
  24. aiecs/infrastructure/__init__.py +26 -0
  25. aiecs/infrastructure/messaging/__init__.py +13 -0
  26. aiecs/infrastructure/messaging/celery_task_manager.py +341 -0
  27. aiecs/infrastructure/messaging/websocket_manager.py +289 -0
  28. aiecs/infrastructure/monitoring/__init__.py +12 -0
  29. aiecs/infrastructure/monitoring/executor_metrics.py +138 -0
  30. aiecs/infrastructure/monitoring/structured_logger.py +50 -0
  31. aiecs/infrastructure/monitoring/tracing_manager.py +376 -0
  32. aiecs/infrastructure/persistence/__init__.py +12 -0
  33. aiecs/infrastructure/persistence/database_manager.py +286 -0
  34. aiecs/infrastructure/persistence/file_storage.py +671 -0
  35. aiecs/infrastructure/persistence/redis_client.py +162 -0
  36. aiecs/llm/__init__.py +54 -0
  37. aiecs/llm/base_client.py +99 -0
  38. aiecs/llm/client_factory.py +339 -0
  39. aiecs/llm/custom_callbacks.py +228 -0
  40. aiecs/llm/openai_client.py +125 -0
  41. aiecs/llm/vertex_client.py +186 -0
  42. aiecs/llm/xai_client.py +184 -0
  43. aiecs/main.py +351 -0
  44. aiecs/scripts/DEPENDENCY_SYSTEM_SUMMARY.md +241 -0
  45. aiecs/scripts/README_DEPENDENCY_CHECKER.md +309 -0
  46. aiecs/scripts/README_WEASEL_PATCH.md +126 -0
  47. aiecs/scripts/__init__.py +3 -0
  48. aiecs/scripts/dependency_checker.py +825 -0
  49. aiecs/scripts/dependency_fixer.py +348 -0
  50. aiecs/scripts/download_nlp_data.py +348 -0
  51. aiecs/scripts/fix_weasel_validator.py +121 -0
  52. aiecs/scripts/fix_weasel_validator.sh +82 -0
  53. aiecs/scripts/patch_weasel_library.sh +188 -0
  54. aiecs/scripts/quick_dependency_check.py +269 -0
  55. aiecs/scripts/run_weasel_patch.sh +41 -0
  56. aiecs/scripts/setup_nlp_data.sh +217 -0
  57. aiecs/tasks/__init__.py +2 -0
  58. aiecs/tasks/worker.py +111 -0
  59. aiecs/tools/__init__.py +196 -0
  60. aiecs/tools/base_tool.py +202 -0
  61. aiecs/tools/langchain_adapter.py +361 -0
  62. aiecs/tools/task_tools/__init__.py +82 -0
  63. aiecs/tools/task_tools/chart_tool.py +704 -0
  64. aiecs/tools/task_tools/classfire_tool.py +901 -0
  65. aiecs/tools/task_tools/image_tool.py +397 -0
  66. aiecs/tools/task_tools/office_tool.py +600 -0
  67. aiecs/tools/task_tools/pandas_tool.py +565 -0
  68. aiecs/tools/task_tools/report_tool.py +499 -0
  69. aiecs/tools/task_tools/research_tool.py +363 -0
  70. aiecs/tools/task_tools/scraper_tool.py +548 -0
  71. aiecs/tools/task_tools/search_api.py +7 -0
  72. aiecs/tools/task_tools/stats_tool.py +513 -0
  73. aiecs/tools/temp_file_manager.py +126 -0
  74. aiecs/tools/tool_executor/__init__.py +35 -0
  75. aiecs/tools/tool_executor/tool_executor.py +518 -0
  76. aiecs/utils/LLM_output_structor.py +409 -0
  77. aiecs/utils/__init__.py +23 -0
  78. aiecs/utils/base_callback.py +50 -0
  79. aiecs/utils/execution_utils.py +158 -0
  80. aiecs/utils/logging.py +1 -0
  81. aiecs/utils/prompt_loader.py +13 -0
  82. aiecs/utils/token_usage_repository.py +279 -0
  83. aiecs/ws/__init__.py +0 -0
  84. aiecs/ws/socket_server.py +41 -0
  85. aiecs-1.0.0.dist-info/METADATA +610 -0
  86. aiecs-1.0.0.dist-info/RECORD +90 -0
  87. aiecs-1.0.0.dist-info/WHEEL +5 -0
  88. aiecs-1.0.0.dist-info/entry_points.txt +7 -0
  89. aiecs-1.0.0.dist-info/licenses/LICENSE +225 -0
  90. aiecs-1.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,341 @@
1
+ import asyncio
2
+ import logging
3
+ from typing import Dict, List, Any, Optional
4
+ from aiecs.tools import get_tool
5
+ from aiecs.tools.tool_executor import ToolExecutor
6
+ from aiecs.utils.execution_utils import ExecutionUtils
7
+ from aiecs.domain.execution.model import TaskStepResult, TaskStatus, ErrorCode
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class OperationExecutor:
13
+ """
14
+ Core logic for handling operation execution
15
+ """
16
+
17
+ def __init__(self, tool_executor: ToolExecutor, execution_utils: ExecutionUtils, config: Dict[str, Any]):
18
+ self.tool_executor = tool_executor
19
+ self.execution_utils = execution_utils
20
+ self.config = config
21
+ self._tool_instances = {}
22
+ self.semaphore = asyncio.Semaphore(config.get('rate_limit_requests_per_second', 5))
23
+
24
+ def _filter_tool_params(self, params: Dict[str, Any]) -> Dict[str, Any]:
25
+ """
26
+ Filter out system-related parameters, keeping only parameters needed by tool methods
27
+ """
28
+ # System-related parameters that should not be passed to tool methods
29
+ system_params = {'user_id', 'task_id', 'op'}
30
+ return {k: v for k, v in params.items() if k not in system_params}
31
+
32
+ def _filter_tool_call_params(self, params: Dict[str, Any]) -> Dict[str, Any]:
33
+ """
34
+ Filter out system-related parameters in tool calls, but keep 'op' parameter (needed by BaseTool.run())
35
+ """
36
+ # Only filter user and task IDs, keep 'op' parameter for BaseTool.run() to use
37
+ system_params = {'user_id', 'task_id'}
38
+ return {k: v for k, v in params.items() if k not in system_params}
39
+
40
+ async def execute_operation(self, operation_spec: str, params: Dict[str, Any]) -> Any:
41
+ """
42
+ Execute a single operation (tool_name.operation_name)
43
+ """
44
+ if "." not in operation_spec:
45
+ raise ValueError(f"Invalid operation spec: {operation_spec}, expected 'tool_name.operation_name'")
46
+
47
+ tool_name, operation_name = operation_spec.split(".", 1)
48
+
49
+ # Get or create tool instance
50
+ if tool_name not in self._tool_instances:
51
+ self._tool_instances[tool_name] = get_tool(tool_name)
52
+
53
+ tool = self._tool_instances[tool_name]
54
+ if not hasattr(tool, operation_name):
55
+ raise ValueError(f"Operation '{operation_name}' not found in tool '{tool_name}'")
56
+
57
+ # Filter parameters, remove system-related parameters
58
+ tool_params = self._filter_tool_params(params)
59
+
60
+ # Use ToolExecutor to execute operation
61
+ operation = getattr(tool, operation_name)
62
+ if asyncio.iscoroutinefunction(operation):
63
+ return await self.tool_executor.execute_async(tool, operation_name, **tool_params)
64
+ else:
65
+ return self.tool_executor.execute(tool, operation_name, **tool_params)
66
+
67
+ async def batch_execute_operations(self, operations: List[Dict[str, Any]]) -> List[Any]:
68
+ """
69
+ Batch execute operations with rate limiting
70
+ """
71
+ results = []
72
+ batch_size = self.config.get('batch_size', 10)
73
+ rate_limit = self.config.get('rate_limit_requests_per_second', 5)
74
+
75
+ for i in range(0, len(operations), batch_size):
76
+ batch = operations[i:i + batch_size]
77
+ batch_results = await asyncio.gather(
78
+ *[self.execute_operation(op["operation"], op.get("params", {})) for op in batch],
79
+ return_exceptions=True
80
+ )
81
+ results.extend(batch_results)
82
+ await asyncio.sleep(1.0 / rate_limit)
83
+
84
+ return results
85
+
86
+ async def execute_operations_sequence(self, operations: List[Dict[str, Any]], user_id: str, task_id: str,
87
+ stop_on_failure: bool = False, save_callback=None) -> List[TaskStepResult]:
88
+ """
89
+ Execute operations sequence sequentially, with option to stop on failure
90
+ """
91
+ results = []
92
+
93
+ for step, op_info in enumerate(operations):
94
+ operation_spec = op_info.get("operation")
95
+ params = op_info.get("params", {})
96
+
97
+ # Process parameter references
98
+ processed_params = self._process_param_references(params, results)
99
+
100
+ try:
101
+ result = await self.execute_operation(operation_spec, processed_params)
102
+ step_result = TaskStepResult(
103
+ step=operation_spec,
104
+ result=result,
105
+ completed=True,
106
+ message=f"Completed operation {operation_spec}",
107
+ status=TaskStatus.COMPLETED.value
108
+ )
109
+ except Exception as e:
110
+ step_result = TaskStepResult(
111
+ step=operation_spec,
112
+ result=None,
113
+ completed=False,
114
+ message=f"Failed to execute {operation_spec}",
115
+ status=TaskStatus.FAILED.value,
116
+ error_code=ErrorCode.EXECUTION_ERROR.value,
117
+ error_message=str(e)
118
+ )
119
+
120
+ if stop_on_failure:
121
+ if save_callback:
122
+ await save_callback(user_id, task_id, step, step_result)
123
+ results.append(step_result)
124
+ break
125
+
126
+ # Save step result
127
+ if save_callback:
128
+ await save_callback(user_id, task_id, step, step_result)
129
+
130
+ results.append(step_result)
131
+
132
+ return results
133
+
134
+ def _process_param_references(self, params: Dict[str, Any], results: List[TaskStepResult]) -> Dict[str, Any]:
135
+ """
136
+ Process parameter references, such as $result[0] in operation parameters
137
+ """
138
+ processed = {}
139
+
140
+ for name, value in params.items():
141
+ if isinstance(value, str) and value.startswith('$result['):
142
+ try:
143
+ ref_parts = value[8:].split(']', 1)
144
+ idx = int(ref_parts[0])
145
+
146
+ if idx >= len(results):
147
+ raise ValueError(f"Referenced result index {idx} out of range")
148
+
149
+ ref_value = results[idx].result
150
+
151
+ # Handle nested attribute access, such as $result[0].data.field
152
+ if len(ref_parts) > 1 and ref_parts[1].startswith('.'):
153
+ for attr in ref_parts[1][1:].split('.'):
154
+ if attr:
155
+ if isinstance(ref_value, dict):
156
+ ref_value = ref_value.get(attr)
157
+ else:
158
+ ref_value = getattr(ref_value, attr)
159
+
160
+ processed[name] = ref_value
161
+ except Exception as e:
162
+ logger.error(f"Error processing parameter reference {value}: {e}")
163
+ processed[name] = value
164
+ else:
165
+ processed[name] = value
166
+
167
+ return processed
168
+
169
+ async def batch_tool_calls(self, tool_calls: List[Dict], tool_executor_func=None) -> List[Any]:
170
+ """
171
+ Execute batch tool calls with rate limiting
172
+ """
173
+ results = []
174
+ batch_size = self.config.get('batch_size', 10)
175
+ rate_limit = self.config.get('rate_limit_requests_per_second', 5)
176
+
177
+ for i in range(0, len(tool_calls), batch_size):
178
+ batch = tool_calls[i:i + batch_size]
179
+ batch_results = await asyncio.gather(
180
+ *[self._execute_tool_call(call, tool_executor_func) for call in batch],
181
+ return_exceptions=True
182
+ )
183
+ results.extend(batch_results)
184
+ await asyncio.sleep(1.0 / rate_limit)
185
+
186
+ return results
187
+
188
+ async def _execute_tool_call(self, call: Dict, tool_executor_func=None) -> Any:
189
+ """
190
+ Execute a single tool call with rate limiting
191
+ """
192
+ async with self.semaphore:
193
+ tool_name = call.get("tool")
194
+ params = call.get("params", {})
195
+
196
+ # Use context-aware caching
197
+ if self.config.get('enable_cache', True):
198
+ user_id = params.get("user_id", "anonymous")
199
+ task_id = params.get("task_id", "none")
200
+ cache_key = self.execution_utils.generate_cache_key("tool_call", user_id, task_id, (), params)
201
+ cached_result = self.execution_utils.get_from_cache(cache_key)
202
+ if cached_result is not None:
203
+ return cached_result
204
+
205
+ # Execute tool call
206
+ if tool_executor_func:
207
+ # Use provided tool executor function
208
+ result = await tool_executor_func(tool_name, params)
209
+ else:
210
+ # Use internal ToolExecutor
211
+ if tool_name not in self._tool_instances:
212
+ self._tool_instances[tool_name] = get_tool(tool_name)
213
+ tool = self._tool_instances[tool_name]
214
+
215
+ # Filter parameters, remove system-related parameters (but keep 'op' parameter)
216
+ tool_params = self._filter_tool_call_params(params)
217
+ # Execute through BaseTool.run method, passing filtered parameters
218
+ result = await self.tool_executor.execute_async(tool, "run", **tool_params)
219
+
220
+ # Cache result
221
+ if self.config.get('enable_cache', True):
222
+ self.execution_utils.add_to_cache(cache_key, result)
223
+
224
+ return result
225
+
226
+ def extract_tool_calls(self, description: str, input_data: Dict, context: Dict) -> List[Dict]:
227
+ """
228
+ Extract tool calls from description
229
+ """
230
+ import re
231
+
232
+ tool_calls = []
233
+ tool_pattern = r'\{\{(\w+)\((.*?)\)\}\}'
234
+ matches = re.finditer(tool_pattern, description)
235
+
236
+ for match in matches:
237
+ tool_name = match.group(1)
238
+ params_str = match.group(2)
239
+ params = {}
240
+
241
+ # Parse parameters
242
+ param_pattern = r'(\w+)=["\'](.*?)["\']'
243
+ param_matches = re.finditer(param_pattern, params_str)
244
+
245
+ for param_match in param_matches:
246
+ param_name = param_match.group(1)
247
+ param_value = param_match.group(2)
248
+
249
+ # Handle input data references
250
+ if param_value.startswith("input."):
251
+ key = param_value.split(".", 1)[1]
252
+ param_value = input_data.get(key, "")
253
+ elif param_value.startswith("context."):
254
+ key = param_value.split(".", 1)[1]
255
+ param_value = context.get(key, "")
256
+
257
+ params[param_name] = param_value
258
+
259
+ tool_calls.append({
260
+ "tool": tool_name,
261
+ "params": params
262
+ })
263
+
264
+ return tool_calls
265
+
266
+ async def execute_parallel_operations(self, operations: List[Dict[str, Any]]) -> List[TaskStepResult]:
267
+ """
268
+ Execute multiple operations in parallel
269
+ """
270
+ tasks = []
271
+
272
+ for i, op_info in enumerate(operations):
273
+ operation_spec = op_info.get("operation")
274
+ params = op_info.get("params", {})
275
+
276
+ async def execute_single_op(spec, p, index):
277
+ try:
278
+ result = await self.execute_operation(spec, p)
279
+ return TaskStepResult(
280
+ step=f"parallel_{index}_{spec}",
281
+ result=result,
282
+ completed=True,
283
+ message=f"Completed parallel operation {spec}",
284
+ status=TaskStatus.COMPLETED.value
285
+ )
286
+ except Exception as e:
287
+ return TaskStepResult(
288
+ step=f"parallel_{index}_{spec}",
289
+ result=None,
290
+ completed=False,
291
+ message=f"Failed parallel operation {spec}",
292
+ status=TaskStatus.FAILED.value,
293
+ error_code=ErrorCode.EXECUTION_ERROR.value,
294
+ error_message=str(e)
295
+ )
296
+
297
+ tasks.append(execute_single_op(operation_spec, params, i))
298
+
299
+ results = await asyncio.gather(*tasks, return_exceptions=True)
300
+
301
+ # Handle exception results
302
+ processed_results = []
303
+ for i, result in enumerate(results):
304
+ if isinstance(result, Exception):
305
+ processed_results.append(TaskStepResult(
306
+ step=f"parallel_{i}_error",
307
+ result=None,
308
+ completed=False,
309
+ message=f"Parallel operation failed with exception",
310
+ status=TaskStatus.FAILED.value,
311
+ error_code=ErrorCode.EXECUTION_ERROR.value,
312
+ error_message=str(result)
313
+ ))
314
+ else:
315
+ processed_results.append(result)
316
+
317
+ return processed_results
318
+
319
+ def get_tool_instance(self, tool_name: str):
320
+ """Get tool instance"""
321
+ if tool_name not in self._tool_instances:
322
+ self._tool_instances[tool_name] = get_tool(tool_name)
323
+ return self._tool_instances[tool_name]
324
+
325
+ def clear_tool_cache(self):
326
+ """Clear tool instance cache"""
327
+ self._tool_instances.clear()
328
+ logger.info("Tool instance cache cleared")
329
+
330
+ def get_stats(self) -> Dict[str, Any]:
331
+ """Get operation executor statistics"""
332
+ return {
333
+ "cached_tools": len(self._tool_instances),
334
+ "tool_names": list(self._tool_instances.keys()),
335
+ "semaphore_value": self.semaphore._value,
336
+ "config": {
337
+ "batch_size": self.config.get('batch_size', 10),
338
+ "rate_limit": self.config.get('rate_limit_requests_per_second', 5),
339
+ "enable_cache": self.config.get('enable_cache', True)
340
+ }
341
+ }
@@ -0,0 +1,15 @@
1
+ """Configuration module
2
+
3
+ Contains application configuration and service registry.
4
+ """
5
+
6
+ from .config import Settings, get_settings
7
+ from .registry import register_ai_service, get_ai_service, AI_SERVICE_REGISTRY
8
+
9
+ __all__ = [
10
+ "Settings",
11
+ "get_settings",
12
+ "register_ai_service",
13
+ "get_ai_service",
14
+ "AI_SERVICE_REGISTRY",
15
+ ]
aiecs/config/config.py ADDED
@@ -0,0 +1,117 @@
1
+ from pydantic import Field, ConfigDict
2
+ from pydantic_settings import BaseSettings
3
+ from functools import lru_cache
4
+
5
+ class Settings(BaseSettings):
6
+ # LLM Provider Configuration (optional until used)
7
+ openai_api_key: str = Field(default="", alias="OPENAI_API_KEY")
8
+ vertex_project_id: str = Field(default="", alias="VERTEX_PROJECT_ID")
9
+ vertex_location: str = Field(default="us-central1", alias="VERTEX_LOCATION")
10
+ google_application_credentials: str = Field(default="", alias="GOOGLE_APPLICATION_CREDENTIALS")
11
+ xai_api_key: str = Field(default="", alias="XAI_API_KEY")
12
+ grok_api_key: str = Field(default="", alias="GROK_API_KEY") # Backward compatibility
13
+
14
+ # Infrastructure Configuration (with sensible defaults)
15
+ celery_broker_url: str = Field(default="redis://localhost:6379/0", alias="CELERY_BROKER_URL")
16
+ cors_allowed_origins: str = Field(default="http://localhost:3000,http://express-gateway:3001", alias="CORS_ALLOWED_ORIGINS")
17
+
18
+ # PostgreSQL Database Configuration (with defaults)
19
+ db_host: str = Field(default="localhost", alias="DB_HOST")
20
+ db_user: str = Field(default="postgres", alias="DB_USER")
21
+ db_password: str = Field(default="", alias="DB_PASSWORD")
22
+ db_name: str = Field(default="aiecs", alias="DB_NAME")
23
+ db_port: int = Field(default=5432, alias="DB_PORT")
24
+ postgres_url: str = Field(default="", alias="POSTGRES_URL")
25
+
26
+ # Google Cloud Storage Configuration (optional)
27
+ google_cloud_project_id: str = Field(default="", alias="GOOGLE_CLOUD_PROJECT_ID")
28
+ google_cloud_storage_bucket: str = Field(default="", alias="GOOGLE_CLOUD_STORAGE_BUCKET")
29
+
30
+ # Qdrant configuration (legacy)
31
+ qdrant_url: str = Field("http://qdrant:6333", alias="QDRANT_URL")
32
+ qdrant_collection: str = Field("documents", alias="QDRANT_COLLECTION")
33
+
34
+ # Vertex AI Vector Search configuration
35
+ vertex_index_id: str | None = Field(default=None, alias="VERTEX_INDEX_ID")
36
+ vertex_endpoint_id: str | None = Field(default=None, alias="VERTEX_ENDPOINT_ID")
37
+ vertex_deployed_index_id: str | None = Field(default=None, alias="VERTEX_DEPLOYED_INDEX_ID")
38
+
39
+ # Vector store backend selection (Qdrant deprecated, using Vertex AI by default)
40
+ vector_store_backend: str = Field("vertex", alias="VECTOR_STORE_BACKEND") # "vertex" (qdrant deprecated)
41
+
42
+ model_config = ConfigDict(env_file=".env", env_file_encoding="utf-8")
43
+
44
+ @property
45
+ def database_config(self) -> dict:
46
+ """Get database configuration for asyncpg"""
47
+ return {
48
+ "host": self.db_host,
49
+ "user": self.db_user,
50
+ "password": self.db_password,
51
+ "database": self.db_name,
52
+ "port": self.db_port
53
+ }
54
+
55
+ @property
56
+ def file_storage_config(self) -> dict:
57
+ """Get file storage configuration for Google Cloud Storage"""
58
+ return {
59
+ "gcs_project_id": self.google_cloud_project_id,
60
+ "gcs_bucket_name": self.google_cloud_storage_bucket,
61
+ "gcs_credentials_path": self.google_application_credentials,
62
+ "enable_local_fallback": True,
63
+ "local_storage_path": "./storage"
64
+ }
65
+
66
+ @lru_cache()
67
+ def get_settings():
68
+ return Settings()
69
+
70
+
71
+ def validate_required_settings(operation_type: str = "full") -> bool:
72
+ """
73
+ Validate that required settings are present for specific operations
74
+
75
+ Args:
76
+ operation_type: Type of operation to validate for
77
+ - "basic": Only basic package functionality
78
+ - "llm": LLM provider functionality
79
+ - "database": Database operations
80
+ - "storage": Cloud storage operations
81
+ - "full": All functionality
82
+
83
+ Returns:
84
+ True if settings are valid, False otherwise
85
+
86
+ Raises:
87
+ ValueError: If required settings are missing for the operation type
88
+ """
89
+ settings = get_settings()
90
+ missing = []
91
+
92
+ if operation_type in ["llm", "full"]:
93
+ # At least one LLM provider should be configured
94
+ llm_configs = [
95
+ ("OpenAI", settings.openai_api_key),
96
+ ("Vertex AI", settings.vertex_project_id and settings.google_application_credentials),
97
+ ("xAI", settings.xai_api_key)
98
+ ]
99
+
100
+ if not any(config[1] for config in llm_configs):
101
+ missing.append("At least one LLM provider (OpenAI, Vertex AI, or xAI)")
102
+
103
+ if operation_type in ["database", "full"]:
104
+ if not settings.db_password:
105
+ missing.append("DB_PASSWORD")
106
+
107
+ if operation_type in ["storage", "full"]:
108
+ if settings.google_cloud_project_id and not settings.google_cloud_storage_bucket:
109
+ missing.append("GOOGLE_CLOUD_STORAGE_BUCKET (required when GOOGLE_CLOUD_PROJECT_ID is set)")
110
+
111
+ if missing:
112
+ raise ValueError(
113
+ f"Missing required settings for {operation_type} operation: {', '.join(missing)}\n"
114
+ "Please check your .env file or environment variables."
115
+ )
116
+
117
+ return True
@@ -0,0 +1,19 @@
1
+ AI_SERVICE_REGISTRY = {}
2
+
3
+ def register_ai_service(mode: str, service: str):
4
+ """
5
+ Decorator for registering a class to the service center, so it can be found and called by (mode, service).
6
+ """
7
+ def decorator(cls):
8
+ AI_SERVICE_REGISTRY[(mode, service)] = cls
9
+ return cls
10
+ return decorator
11
+
12
+ def get_ai_service(mode: str, service: str):
13
+ """
14
+ Find registered service class based on mode and service name.
15
+ """
16
+ key = (mode, service)
17
+ if key not in AI_SERVICE_REGISTRY:
18
+ raise ValueError(f"No registered service for mode '{mode}', service '{service}'")
19
+ return AI_SERVICE_REGISTRY[key]
aiecs/core/__init__.py ADDED
@@ -0,0 +1,46 @@
1
+ """
2
+ Core module for the Python middleware application.
3
+
4
+ This module provides the core interfaces and abstractions including:
5
+ - Execution interfaces
6
+ - Core abstractions
7
+ """
8
+
9
+ # Core interfaces
10
+ from .interface.execution_interface import (
11
+ ExecutionInterface,
12
+ IToolProvider,
13
+ IToolExecutor,
14
+ ICacheProvider,
15
+ IOperationExecutor
16
+ )
17
+
18
+ from .interface.storage_interface import (
19
+ ISessionStorage,
20
+ IConversationStorage,
21
+ ICheckpointStorage,
22
+ ITaskContextStorage,
23
+ IStorageBackend,
24
+ ICheckpointerBackend
25
+ )
26
+
27
+ __all__ = [
28
+ # Execution interfaces
29
+ 'ExecutionInterface',
30
+ 'IToolProvider',
31
+ 'IToolExecutor',
32
+ 'ICacheProvider',
33
+ 'IOperationExecutor',
34
+ # Storage interfaces
35
+ 'ISessionStorage',
36
+ 'IConversationStorage',
37
+ 'ICheckpointStorage',
38
+ 'ITaskContextStorage',
39
+ 'IStorageBackend',
40
+ 'ICheckpointerBackend',
41
+ ]
42
+
43
+ # Version information
44
+ __version__ = "1.0.0"
45
+ __author__ = "Python Middleware Team"
46
+ __description__ = "Core interfaces and abstractions for the middleware architecture"
@@ -0,0 +1,34 @@
1
+ """Core interfaces module"""
2
+
3
+ from .execution_interface import (
4
+ ExecutionInterface,
5
+ IToolProvider,
6
+ IToolExecutor,
7
+ ICacheProvider,
8
+ IOperationExecutor
9
+ )
10
+
11
+ from .storage_interface import (
12
+ ISessionStorage,
13
+ IConversationStorage,
14
+ ICheckpointStorage,
15
+ ITaskContextStorage,
16
+ IStorageBackend,
17
+ ICheckpointerBackend
18
+ )
19
+
20
+ __all__ = [
21
+ # Execution interfaces
22
+ "ExecutionInterface",
23
+ "IToolProvider",
24
+ "IToolExecutor",
25
+ "ICacheProvider",
26
+ "IOperationExecutor",
27
+ # Storage interfaces
28
+ "ISessionStorage",
29
+ "IConversationStorage",
30
+ "ICheckpointStorage",
31
+ "ITaskContextStorage",
32
+ "IStorageBackend",
33
+ "ICheckpointerBackend",
34
+ ]