aiecs 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aiecs might be problematic. Click here for more details.

Files changed (90) hide show
  1. aiecs/__init__.py +75 -0
  2. aiecs/__main__.py +41 -0
  3. aiecs/aiecs_client.py +295 -0
  4. aiecs/application/__init__.py +10 -0
  5. aiecs/application/executors/__init__.py +10 -0
  6. aiecs/application/executors/operation_executor.py +341 -0
  7. aiecs/config/__init__.py +15 -0
  8. aiecs/config/config.py +117 -0
  9. aiecs/config/registry.py +19 -0
  10. aiecs/core/__init__.py +46 -0
  11. aiecs/core/interface/__init__.py +34 -0
  12. aiecs/core/interface/execution_interface.py +150 -0
  13. aiecs/core/interface/storage_interface.py +214 -0
  14. aiecs/domain/__init__.py +20 -0
  15. aiecs/domain/context/__init__.py +28 -0
  16. aiecs/domain/context/content_engine.py +982 -0
  17. aiecs/domain/context/conversation_models.py +306 -0
  18. aiecs/domain/execution/__init__.py +12 -0
  19. aiecs/domain/execution/model.py +49 -0
  20. aiecs/domain/task/__init__.py +13 -0
  21. aiecs/domain/task/dsl_processor.py +460 -0
  22. aiecs/domain/task/model.py +50 -0
  23. aiecs/domain/task/task_context.py +257 -0
  24. aiecs/infrastructure/__init__.py +26 -0
  25. aiecs/infrastructure/messaging/__init__.py +13 -0
  26. aiecs/infrastructure/messaging/celery_task_manager.py +341 -0
  27. aiecs/infrastructure/messaging/websocket_manager.py +289 -0
  28. aiecs/infrastructure/monitoring/__init__.py +12 -0
  29. aiecs/infrastructure/monitoring/executor_metrics.py +138 -0
  30. aiecs/infrastructure/monitoring/structured_logger.py +50 -0
  31. aiecs/infrastructure/monitoring/tracing_manager.py +376 -0
  32. aiecs/infrastructure/persistence/__init__.py +12 -0
  33. aiecs/infrastructure/persistence/database_manager.py +286 -0
  34. aiecs/infrastructure/persistence/file_storage.py +671 -0
  35. aiecs/infrastructure/persistence/redis_client.py +162 -0
  36. aiecs/llm/__init__.py +54 -0
  37. aiecs/llm/base_client.py +99 -0
  38. aiecs/llm/client_factory.py +339 -0
  39. aiecs/llm/custom_callbacks.py +228 -0
  40. aiecs/llm/openai_client.py +125 -0
  41. aiecs/llm/vertex_client.py +186 -0
  42. aiecs/llm/xai_client.py +184 -0
  43. aiecs/main.py +351 -0
  44. aiecs/scripts/DEPENDENCY_SYSTEM_SUMMARY.md +241 -0
  45. aiecs/scripts/README_DEPENDENCY_CHECKER.md +309 -0
  46. aiecs/scripts/README_WEASEL_PATCH.md +126 -0
  47. aiecs/scripts/__init__.py +3 -0
  48. aiecs/scripts/dependency_checker.py +825 -0
  49. aiecs/scripts/dependency_fixer.py +348 -0
  50. aiecs/scripts/download_nlp_data.py +348 -0
  51. aiecs/scripts/fix_weasel_validator.py +121 -0
  52. aiecs/scripts/fix_weasel_validator.sh +82 -0
  53. aiecs/scripts/patch_weasel_library.sh +188 -0
  54. aiecs/scripts/quick_dependency_check.py +269 -0
  55. aiecs/scripts/run_weasel_patch.sh +41 -0
  56. aiecs/scripts/setup_nlp_data.sh +217 -0
  57. aiecs/tasks/__init__.py +2 -0
  58. aiecs/tasks/worker.py +111 -0
  59. aiecs/tools/__init__.py +196 -0
  60. aiecs/tools/base_tool.py +202 -0
  61. aiecs/tools/langchain_adapter.py +361 -0
  62. aiecs/tools/task_tools/__init__.py +82 -0
  63. aiecs/tools/task_tools/chart_tool.py +704 -0
  64. aiecs/tools/task_tools/classfire_tool.py +901 -0
  65. aiecs/tools/task_tools/image_tool.py +397 -0
  66. aiecs/tools/task_tools/office_tool.py +600 -0
  67. aiecs/tools/task_tools/pandas_tool.py +565 -0
  68. aiecs/tools/task_tools/report_tool.py +499 -0
  69. aiecs/tools/task_tools/research_tool.py +363 -0
  70. aiecs/tools/task_tools/scraper_tool.py +548 -0
  71. aiecs/tools/task_tools/search_api.py +7 -0
  72. aiecs/tools/task_tools/stats_tool.py +513 -0
  73. aiecs/tools/temp_file_manager.py +126 -0
  74. aiecs/tools/tool_executor/__init__.py +35 -0
  75. aiecs/tools/tool_executor/tool_executor.py +518 -0
  76. aiecs/utils/LLM_output_structor.py +409 -0
  77. aiecs/utils/__init__.py +23 -0
  78. aiecs/utils/base_callback.py +50 -0
  79. aiecs/utils/execution_utils.py +158 -0
  80. aiecs/utils/logging.py +1 -0
  81. aiecs/utils/prompt_loader.py +13 -0
  82. aiecs/utils/token_usage_repository.py +279 -0
  83. aiecs/ws/__init__.py +0 -0
  84. aiecs/ws/socket_server.py +41 -0
  85. aiecs-1.0.0.dist-info/METADATA +610 -0
  86. aiecs-1.0.0.dist-info/RECORD +90 -0
  87. aiecs-1.0.0.dist-info/WHEEL +5 -0
  88. aiecs-1.0.0.dist-info/entry_points.txt +7 -0
  89. aiecs-1.0.0.dist-info/licenses/LICENSE +225 -0
  90. aiecs-1.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,257 @@
1
+ import time
2
+ import logging
3
+ import json
4
+ import os
5
+ from typing import Dict, Any, Optional, AsyncGenerator, List, Tuple
6
+ from contextlib import asynccontextmanager
7
+ from dataclasses import dataclass
8
+ from datetime import datetime
9
+ from pathlib import Path
10
+ import asyncio
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+ @dataclass
15
+ class ContextUpdate:
16
+ """Represents a single update to the context (e.g., message, metadata, or resource)."""
17
+ timestamp: float
18
+ update_type: str # e.g., "message", "metadata", "resource"
19
+ data: Any # Content of the update (e.g., message text, metadata dict)
20
+ metadata: Dict[str, Any] # Additional metadata (e.g., file paths, model info)
21
+
22
+ class TaskContext:
23
+ """
24
+ Enhanced context manager for task execution with:
25
+ - Context history tracking and checkpointing
26
+ - Resource acquisition and release
27
+ - Performance tracking
28
+ - File and model tracking
29
+ - Persistent storage
30
+ - Metadata toggles
31
+ - Enhanced error handling
32
+ """
33
+ def __init__(self, data: dict, task_dir: str = "./tasks"):
34
+ self.user_id = data.get("user_id", "anonymous")
35
+ self.chat_id = data.get("chat_id", "none")
36
+ # Ensure metadata includes aiPreference
37
+ self.metadata = data.get("metadata", {})
38
+ if "aiPreference" in data:
39
+ self.metadata["aiPreference"] = data["aiPreference"]
40
+ self.task_dir = Path(task_dir)
41
+ self.start_time: Optional[float] = None
42
+ self.resources: Dict[str, Any] = {}
43
+ self.context_history: List[ContextUpdate] = []
44
+ self.file_tracker: Dict[str, Dict[str, Any]] = {} # Tracks file operations
45
+ self.model_tracker: List[Dict[str, Any]] = [] # Tracks model usage
46
+ self.metadata_toggles: Dict[str, bool] = data.get("metadata_toggles", {})
47
+ self._initialize_persistence()
48
+
49
+ def _initialize_persistence(self):
50
+ """Initialize persistent storage for context history."""
51
+ try:
52
+ self.task_dir.mkdir(parents=True, exist_ok=True)
53
+ history_file = self.task_dir / f"context_history_{self.chat_id}.json"
54
+ if history_file.exists():
55
+ with open(history_file, "r") as f:
56
+ raw_history = json.load(f)
57
+ self.context_history = [
58
+ ContextUpdate(
59
+ timestamp=entry["timestamp"],
60
+ update_type=entry["update_type"],
61
+ data=entry["data"],
62
+ metadata=entry["metadata"]
63
+ )
64
+ for entry in raw_history
65
+ ]
66
+ logger.debug(f"Loaded context history from {history_file}")
67
+ except Exception as e:
68
+ logger.error(f"Failed to initialize context history: {e}")
69
+
70
+ async def _save_context_history(self):
71
+ """Save context history to disk."""
72
+ try:
73
+ history_file = self.task_dir / f"context_history_{self.chat_id}.json"
74
+ serialized_history = [
75
+ {
76
+ "timestamp": update.timestamp,
77
+ "update_type": update.update_type,
78
+ "data": update.data,
79
+ "metadata": update.metadata
80
+ }
81
+ for update in self.context_history
82
+ ]
83
+ with open(history_file, "w") as f:
84
+ json.dump(serialized_history, f, indent=2)
85
+ logger.debug(f"Saved context history to {history_file}")
86
+ except Exception as e:
87
+ logger.error(f"Failed to save context history: {e}")
88
+
89
+ def add_context_update(self, update_type: str, data: Any, metadata: Dict[str, Any] = None):
90
+ """Add a context update (e.g., message, metadata change)."""
91
+ update = ContextUpdate(
92
+ timestamp=time.time(),
93
+ update_type=update_type,
94
+ data=data,
95
+ metadata=metadata or {}
96
+ )
97
+ self.context_history.append(update)
98
+ logger.debug(f"Added context update: {update_type}")
99
+
100
+ def add_resource(self, name: str, resource: Any) -> None:
101
+ """Add a resource that needs cleanup."""
102
+ self.resources[name] = resource
103
+ self.add_context_update("resource", {"name": name}, {"type": type(resource).__name__})
104
+ logger.debug(f"Added resource: {name}")
105
+
106
+ def track_file_operation(self, file_path: str, operation: str, source: str = "task"):
107
+ """Track a file operation (e.g., read, edit)."""
108
+ self.file_tracker[file_path] = {
109
+ "operation": operation,
110
+ "source": source,
111
+ "timestamp": time.time(),
112
+ "state": "active"
113
+ }
114
+ self.add_context_update("file_operation", {"path": file_path, "operation": operation}, {"source": source})
115
+ logger.debug(f"Tracked file operation: {operation} on {file_path}")
116
+
117
+ def track_model_usage(self, model_id: str, provider_id: str, mode: str):
118
+ """Track AI model usage."""
119
+ model_entry = {
120
+ "model_id": model_id,
121
+ "provider_id": provider_id,
122
+ "mode": mode,
123
+ "timestamp": time.time()
124
+ }
125
+ # Avoid duplicates
126
+ if not self.model_tracker or self.model_tracker[-1] != model_entry:
127
+ self.model_tracker.append(model_entry)
128
+ self.add_context_update("model_usage", model_entry)
129
+ logger.debug(f"Tracked model usage: {model_id} ({provider_id}, {mode})")
130
+
131
+ def optimize_context(self, max_size: int = 1000) -> bool:
132
+ """Optimize context by removing duplicates and old entries."""
133
+ deduplicated = {}
134
+ optimized_history = []
135
+ total_size = 0
136
+
137
+ for update in reversed(self.context_history):
138
+ key = f"{update.update_type}:{json.dumps(update.data, sort_keys=True)}"
139
+ if key not in deduplicated:
140
+ deduplicated[key] = update
141
+ data_size = len(str(update.data))
142
+ if total_size + data_size <= max_size:
143
+ optimized_history.append(update)
144
+ total_size += data_size
145
+
146
+ self.context_history = list(reversed(optimized_history))
147
+ if len(deduplicated) < len(self.context_history):
148
+ logger.debug(f"Optimized context: removed {len(self.context_history) - len(deduplicated)} duplicates")
149
+ return True
150
+ return False
151
+
152
+ async def truncate_context_history(self, timestamp: float):
153
+ """Truncate context history after a given timestamp."""
154
+ original_len = len(self.context_history)
155
+ self.context_history = [update for update in self.context_history if update.timestamp <= timestamp]
156
+ if len(self.context_history) < original_len:
157
+ await self._save_context_history()
158
+ logger.debug(f"Truncated context history at timestamp {timestamp}")
159
+
160
+ def get_active_metadata(self) -> Dict[str, Any]:
161
+ """Return metadata filtered by toggles."""
162
+ return {
163
+ key: value
164
+ for key, value in self.metadata.items()
165
+ if key not in self.metadata_toggles or self.metadata_toggles[key] is not False
166
+ }
167
+
168
+ def to_dict(self) -> Dict[str, Any]:
169
+ """Convert context to dictionary."""
170
+ return {
171
+ "user_id": self.user_id,
172
+ "chat_id": self.chat_id,
173
+ "metadata": self.get_active_metadata(),
174
+ "context_history": [
175
+ {
176
+ "timestamp": update.timestamp,
177
+ "update_type": update.update_type,
178
+ "data": update.data,
179
+ "metadata": update.metadata
180
+ }
181
+ for update in self.context_history
182
+ ],
183
+ "file_tracker": self.file_tracker,
184
+ "model_tracker": self.model_tracker
185
+ }
186
+
187
+ def __enter__(self):
188
+ """Synchronous context entry."""
189
+ self.start_time = time.time()
190
+ logger.debug(f"Starting task context for user {self.user_id}, chat {self.chat_id}")
191
+ return self
192
+
193
+ def __exit__(self, exc_type, exc_val, exc_tb):
194
+ """Synchronous context exit with cleanup."""
195
+ duration = time.time() - self.start_time
196
+ logger.debug(f"Completed task context in {duration:.2f}s for user {self.user_id}")
197
+ for resource_name, resource in self.resources.items():
198
+ try:
199
+ if hasattr(resource, 'close'):
200
+ resource.close()
201
+ logger.debug(f"Cleaned up resource: {resource_name}")
202
+ except Exception as e:
203
+ logger.error(f"Error cleaning up resource {resource_name}: {e}")
204
+ if exc_type:
205
+ logger.error(f"Task context exited with error: {exc_val}")
206
+ return False
207
+
208
+ async def __aenter__(self):
209
+ """Asynchronous context entry."""
210
+ self.start_time = time.time()
211
+ logger.debug(f"Starting async task context for user {self.user_id}, chat {self.chat_id}")
212
+ return self
213
+
214
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
215
+ """Asynchronous context exit with cleanup."""
216
+ duration = time.time() - self.start_time
217
+ logger.debug(f"Completed async task context in {duration:.2f}s for user {self.user_id}")
218
+ for resource_name, resource in self.resources.items():
219
+ try:
220
+ if hasattr(resource, 'close'):
221
+ if callable(getattr(resource, 'close')):
222
+ if hasattr(resource.close, '__await__'):
223
+ await resource.close()
224
+ else:
225
+ resource.close()
226
+ logger.debug(f"Cleaned up async resource: {resource_name}")
227
+ except Exception as e:
228
+ logger.error(f"Error cleaning up async resource {resource_name}: {e}")
229
+ if exc_type:
230
+ logger.error(f"Async task context exited with error: {exc_val}")
231
+ await self._save_context_history()
232
+ return False
233
+
234
+
235
+ def build_context(data: dict) -> dict:
236
+ """Build a simple context dictionary (for backward compatibility)."""
237
+ context = TaskContext(data)
238
+ return context.to_dict()
239
+
240
+
241
+ @asynccontextmanager
242
+ async def task_context(data: dict, task_dir: str = "./tasks") -> AsyncGenerator[TaskContext, None]:
243
+ """
244
+ Async context manager for task execution.
245
+
246
+ Usage:
247
+ async with task_context(request_data, task_dir="/path/to/tasks") as context:
248
+ context.add_context_update("message", "User input", {"source": "user"})
249
+ context.track_file_operation("example.py", "read", "tool")
250
+ result = await service_instance.run(data, context)
251
+ """
252
+ context = TaskContext(data, task_dir)
253
+ try:
254
+ await context.__aenter__()
255
+ yield context
256
+ finally:
257
+ await context.__aexit__(None, None, None)
@@ -0,0 +1,26 @@
1
+ """Infrastructure layer module
2
+
3
+ Contains external system integrations and technical concerns.
4
+ """
5
+
6
+ from .messaging.celery_task_manager import CeleryTaskManager
7
+ from .messaging.websocket_manager import WebSocketManager, UserConfirmation
8
+ from .persistence.database_manager import DatabaseManager
9
+ from .persistence.redis_client import RedisClient
10
+ from .monitoring.executor_metrics import ExecutorMetrics
11
+ from .monitoring.tracing_manager import TracingManager
12
+
13
+ __all__ = [
14
+ # Messaging
15
+ "CeleryTaskManager",
16
+ "WebSocketManager",
17
+ "UserConfirmation",
18
+
19
+ # Persistence
20
+ "DatabaseManager",
21
+ "RedisClient",
22
+
23
+ # Monitoring
24
+ "ExecutorMetrics",
25
+ "TracingManager",
26
+ ]
@@ -0,0 +1,13 @@
1
+ """Infrastructure messaging module
2
+
3
+ Contains messaging and communication infrastructure.
4
+ """
5
+
6
+ from .celery_task_manager import CeleryTaskManager
7
+ from .websocket_manager import WebSocketManager, UserConfirmation
8
+
9
+ __all__ = [
10
+ "CeleryTaskManager",
11
+ "WebSocketManager",
12
+ "UserConfirmation",
13
+ ]
@@ -0,0 +1,341 @@
1
+ import asyncio
2
+ import logging
3
+ import time
4
+ import uuid
5
+ from typing import Dict, List, Any, Optional
6
+ from celery import Celery
7
+ from celery.exceptions import TimeoutError as CeleryTimeoutError
8
+ from asyncio import TimeoutError as AsyncioTimeoutError
9
+ # Removed direct import to avoid circular dependency
10
+ # Tasks are referenced by string names instead
11
+ from aiecs.domain.execution.model import TaskStatus, ErrorCode
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class CeleryTaskManager:
17
+ """
18
+ Specialized handler for Celery distributed task scheduling and execution
19
+ """
20
+
21
+ def __init__(self, config: Dict[str, Any]):
22
+ self.config = config
23
+ self.celery_app = None
24
+ self._init_celery()
25
+
26
+ def _init_celery(self):
27
+ """Initialize Celery application"""
28
+ try:
29
+ self.celery_app = Celery(
30
+ 'service_executor',
31
+ broker=self.config.get('broker_url', 'redis://redis:6379/0'),
32
+ backend=self.config.get('backend_url', 'redis://redis:6379/0')
33
+ )
34
+
35
+ # Configure Celery
36
+ self.celery_app.conf.update(
37
+ task_serializer=self.config.get('task_serializer', 'json'),
38
+ accept_content=self.config.get('accept_content', ['json']),
39
+ result_serializer=self.config.get('result_serializer', 'json'),
40
+ timezone=self.config.get('timezone', 'UTC'),
41
+ enable_utc=self.config.get('enable_utc', True),
42
+ task_queues=self.config.get('task_queues', {
43
+ 'fast_tasks': {'exchange': 'fast_tasks', 'routing_key': 'fast_tasks'},
44
+ 'heavy_tasks': {'exchange': 'heavy_tasks', 'routing_key': 'heavy_tasks'}
45
+ }),
46
+ worker_concurrency=self.config.get('worker_concurrency', {
47
+ 'fast_worker': 10,
48
+ 'heavy_worker': 2
49
+ })
50
+ )
51
+
52
+ logger.info("Celery application initialized successfully")
53
+ except Exception as e:
54
+ logger.error(f"Failed to initialize Celery: {e}")
55
+ raise
56
+
57
+ def execute_celery_task(self, task_name: str, queue: str, user_id: str, task_id: str, step: int,
58
+ mode: str, service: str, input_data: Dict[str, Any], context: Dict[str, Any]):
59
+ """
60
+ Execute Celery task
61
+
62
+ Args:
63
+ task_name: Task name
64
+ queue: Queue name ('fast_tasks' or 'heavy_tasks')
65
+ user_id: User ID
66
+ task_id: Task ID
67
+ step: Step number
68
+ mode: Service mode
69
+ service: Service name
70
+ input_data: Input data
71
+ context: Context information
72
+
73
+ Returns:
74
+ Celery AsyncResult object
75
+ """
76
+ logger.info(f"Queueing task {task_name} to {queue} for user {user_id}, task {task_id}, step {step}")
77
+
78
+ # Determine Celery task to use based on queue
79
+ celery_task_name = "aiecs.tasks.worker.execute_task"
80
+ if queue == "heavy_tasks":
81
+ celery_task_name = "aiecs.tasks.worker.execute_heavy_task"
82
+
83
+ # Send task to Celery
84
+ return self.celery_app.send_task(
85
+ celery_task_name,
86
+ kwargs={
87
+ "task_name": task_name,
88
+ "user_id": user_id,
89
+ "task_id": task_id,
90
+ "step": step,
91
+ "mode": mode,
92
+ "service": service,
93
+ "input_data": input_data,
94
+ "context": context
95
+ },
96
+ queue=queue
97
+ )
98
+
99
+ async def execute_task(self, task_name: str, input_data: Dict[str, Any], context: Dict[str, Any]) -> Any:
100
+ """
101
+ Execute a single task using Celery for asynchronous processing
102
+ """
103
+ user_id = context.get("user_id", "anonymous")
104
+ task_id = input_data.get("task_id", str(uuid.uuid4()))
105
+ step = input_data.get("step", 0)
106
+ mode = input_data.get("mode", "default")
107
+ service = input_data.get("service", "default")
108
+ queue = input_data.get("queue", "fast_tasks")
109
+ timeout = self.config.get('task_timeout_seconds', 300)
110
+
111
+ try:
112
+ # Use string-based task names to avoid circular imports
113
+ celery_task_name = "aiecs.tasks.worker.execute_task"
114
+ if queue == 'heavy_tasks':
115
+ celery_task_name = "aiecs.tasks.worker.execute_heavy_task"
116
+
117
+ result = self.celery_app.send_task(
118
+ celery_task_name,
119
+ kwargs={
120
+ "task_name": task_name,
121
+ "user_id": user_id,
122
+ "task_id": task_id,
123
+ "step": step,
124
+ "mode": mode,
125
+ "service": service,
126
+ "input_data": input_data,
127
+ "context": context
128
+ },
129
+ queue=queue
130
+ )
131
+
132
+ return result.get(timeout=timeout)
133
+
134
+ except CeleryTimeoutError as e:
135
+ logger.error(f"Timeout executing Celery task {task_name}: {e}")
136
+ return {
137
+ "status": TaskStatus.TIMED_OUT,
138
+ "error_code": ErrorCode.TIMEOUT_ERROR,
139
+ "error_message": str(e)
140
+ }
141
+ except Exception as e:
142
+ logger.error(f"Error executing Celery task {task_name}: {e}", exc_info=True)
143
+ return {
144
+ "status": TaskStatus.FAILED,
145
+ "error_code": ErrorCode.EXECUTION_ERROR,
146
+ "error_message": str(e)
147
+ }
148
+
149
+ async def execute_heavy_task(self, task_name: str, input_data: Dict, context: Dict) -> Any:
150
+ """
151
+ Execute heavy task
152
+ """
153
+ input_data["queue"] = "heavy_tasks"
154
+ return await self.execute_task(task_name, input_data, context)
155
+
156
+ async def execute_dsl_task_step(self, step: Dict, input_data: Dict, context: Dict) -> Dict[str, Any]:
157
+ """
158
+ Execute DSL task step
159
+ """
160
+ task_name = step.get("task")
161
+ category = "process"
162
+
163
+ if not task_name:
164
+ return {
165
+ "step": "unknown",
166
+ "result": None,
167
+ "completed": False,
168
+ "message": "Invalid DSL step: missing task name",
169
+ "status": TaskStatus.FAILED,
170
+ "error_code": ErrorCode.VALIDATION_ERROR,
171
+ "error_message": "Task name is required"
172
+ }
173
+
174
+ # Determine task type
175
+ task_type = "fast"
176
+ try:
177
+ task_type_result = await self.execute_task(task_name, {"get_task_type": True}, context)
178
+ if isinstance(task_type_result, dict) and "task_type" in task_type_result:
179
+ task_type = task_type_result["task_type"]
180
+ except Exception:
181
+ logger.warning(f"Could not determine task type for {task_name}, defaulting to 'fast'")
182
+
183
+ queue = "heavy_tasks" if task_type == "heavy" else "fast_tasks"
184
+ celery_task_name = "aiecs.tasks.worker.execute_heavy_task" if task_type == "heavy" else "aiecs.tasks.worker.execute_task"
185
+
186
+ user_id = context.get("user_id", str(uuid.uuid4()))
187
+ task_id = context.get("task_id", str(uuid.uuid4()))
188
+ step_num = context.get("step", 0)
189
+
190
+ # Send task to Celery
191
+ celery_task = self.celery_app.send_task(
192
+ celery_task_name,
193
+ kwargs={
194
+ "task_name": task_name,
195
+ "user_id": user_id,
196
+ "task_id": task_id,
197
+ "step": step_num,
198
+ "mode": context.get("mode", "multi_task"),
199
+ "service": context.get("service", "summarizer"),
200
+ "input_data": input_data,
201
+ "context": context
202
+ },
203
+ queue=queue
204
+ )
205
+
206
+ try:
207
+ timeout_seconds = self.config.get('task_timeout_seconds', 300)
208
+ start_time = time.time()
209
+
210
+ # Wait for task completion
211
+ while not celery_task.ready():
212
+ if time.time() - start_time > timeout_seconds:
213
+ raise AsyncioTimeoutError(f"Task {task_name} timed out after {timeout_seconds} seconds")
214
+ await asyncio.sleep(0.5)
215
+
216
+ if celery_task.successful():
217
+ result = celery_task.get()
218
+ if isinstance(result, dict) and "step" in result:
219
+ return result
220
+ else:
221
+ return {
222
+ "step": f"{category}/{task_name}",
223
+ "result": result,
224
+ "completed": True,
225
+ "message": f"Completed task {task_name}",
226
+ "status": TaskStatus.COMPLETED
227
+ }
228
+ else:
229
+ error = celery_task.get(propagate=False)
230
+ status = TaskStatus.TIMED_OUT if isinstance(error, CeleryTimeoutError) else TaskStatus.FAILED
231
+ error_code = ErrorCode.TIMEOUT_ERROR if isinstance(error, CeleryTimeoutError) else ErrorCode.EXECUTION_ERROR
232
+
233
+ return {
234
+ "step": f"{category}/{task_name}",
235
+ "result": None,
236
+ "completed": False,
237
+ "message": f"Failed to execute task: {error}",
238
+ "status": status,
239
+ "error_code": error_code,
240
+ "error_message": str(error)
241
+ }
242
+
243
+ except AsyncioTimeoutError as e:
244
+ return {
245
+ "step": f"{category}/{task_name}",
246
+ "result": None,
247
+ "completed": False,
248
+ "message": "Task execution timed out",
249
+ "status": TaskStatus.TIMED_OUT,
250
+ "error_code": ErrorCode.TIMEOUT_ERROR,
251
+ "error_message": str(e)
252
+ }
253
+ except Exception as e:
254
+ return {
255
+ "step": f"{category}/{task_name}",
256
+ "result": None,
257
+ "completed": False,
258
+ "message": f"Failed to execute {category}/{task_name}",
259
+ "status": TaskStatus.FAILED,
260
+ "error_code": ErrorCode.EXECUTION_ERROR,
261
+ "error_message": str(e)
262
+ }
263
+
264
+ def get_task_result(self, task_id: str):
265
+ """Get task result"""
266
+ try:
267
+ result = self.celery_app.AsyncResult(task_id)
268
+ return {
269
+ "task_id": task_id,
270
+ "status": result.status,
271
+ "result": result.result if result.ready() else None,
272
+ "successful": result.successful() if result.ready() else None,
273
+ "failed": result.failed() if result.ready() else None
274
+ }
275
+ except Exception as e:
276
+ logger.error(f"Error getting task result for {task_id}: {e}")
277
+ return {
278
+ "task_id": task_id,
279
+ "status": "ERROR",
280
+ "error": str(e)
281
+ }
282
+
283
+ def cancel_task(self, task_id: str):
284
+ """Cancel task"""
285
+ try:
286
+ self.celery_app.control.revoke(task_id, terminate=True)
287
+ logger.info(f"Task {task_id} cancelled")
288
+ return True
289
+ except Exception as e:
290
+ logger.error(f"Error cancelling task {task_id}: {e}")
291
+ return False
292
+
293
+ async def batch_execute_tasks(self, tasks: List[Dict[str, Any]]) -> List[Any]:
294
+ """
295
+ Batch execute tasks
296
+ """
297
+ results = []
298
+ batch_size = self.config.get('batch_size', 10)
299
+ rate_limit = self.config.get('rate_limit_requests_per_second', 5)
300
+
301
+ for i in range(0, len(tasks), batch_size):
302
+ batch = tasks[i:i + batch_size]
303
+ batch_results = await asyncio.gather(
304
+ *[self.execute_task(
305
+ task["task_name"],
306
+ task.get("input_data", {}),
307
+ task.get("context", {})
308
+ ) for task in batch],
309
+ return_exceptions=True
310
+ )
311
+ results.extend(batch_results)
312
+ await asyncio.sleep(1.0 / rate_limit)
313
+
314
+ return results
315
+
316
+ def get_queue_info(self) -> Dict[str, Any]:
317
+ """Get queue information"""
318
+ try:
319
+ inspect = self.celery_app.control.inspect()
320
+ active_tasks = inspect.active()
321
+ scheduled_tasks = inspect.scheduled()
322
+ reserved_tasks = inspect.reserved()
323
+
324
+ return {
325
+ "active_tasks": active_tasks,
326
+ "scheduled_tasks": scheduled_tasks,
327
+ "reserved_tasks": reserved_tasks
328
+ }
329
+ except Exception as e:
330
+ logger.error(f"Error getting queue info: {e}")
331
+ return {"error": str(e)}
332
+
333
+ def get_worker_stats(self) -> Dict[str, Any]:
334
+ """Get worker statistics"""
335
+ try:
336
+ inspect = self.celery_app.control.inspect()
337
+ stats = inspect.stats()
338
+ return stats or {}
339
+ except Exception as e:
340
+ logger.error(f"Error getting worker stats: {e}")
341
+ return {"error": str(e)}