camel-ai 0.2.67__py3-none-any.whl → 0.2.80a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- camel/__init__.py +1 -1
- camel/agents/_types.py +6 -2
- camel/agents/_utils.py +38 -0
- camel/agents/chat_agent.py +4014 -410
- camel/agents/mcp_agent.py +30 -27
- camel/agents/repo_agent.py +2 -1
- camel/benchmarks/browsecomp.py +6 -6
- camel/configs/__init__.py +15 -0
- camel/configs/aihubmix_config.py +88 -0
- camel/configs/amd_config.py +70 -0
- camel/configs/cometapi_config.py +104 -0
- camel/configs/minimax_config.py +93 -0
- camel/configs/nebius_config.py +103 -0
- camel/configs/vllm_config.py +2 -0
- camel/data_collectors/alpaca_collector.py +15 -6
- camel/datagen/self_improving_cot.py +1 -1
- camel/datasets/base_generator.py +39 -10
- camel/environments/__init__.py +12 -0
- camel/environments/rlcards_env.py +860 -0
- camel/environments/single_step.py +28 -3
- camel/environments/tic_tac_toe.py +1 -1
- camel/interpreters/__init__.py +2 -0
- camel/interpreters/docker/Dockerfile +4 -16
- camel/interpreters/docker_interpreter.py +3 -2
- camel/interpreters/e2b_interpreter.py +34 -1
- camel/interpreters/internal_python_interpreter.py +51 -2
- camel/interpreters/microsandbox_interpreter.py +395 -0
- camel/loaders/__init__.py +11 -2
- camel/loaders/base_loader.py +85 -0
- camel/loaders/chunkr_reader.py +9 -0
- camel/loaders/firecrawl_reader.py +4 -4
- camel/logger.py +1 -1
- camel/memories/agent_memories.py +84 -1
- camel/memories/base.py +34 -0
- camel/memories/blocks/chat_history_block.py +122 -4
- camel/memories/blocks/vectordb_block.py +8 -1
- camel/memories/context_creators/score_based.py +29 -237
- camel/memories/records.py +88 -8
- camel/messages/base.py +166 -40
- camel/messages/func_message.py +32 -5
- camel/models/__init__.py +10 -0
- camel/models/aihubmix_model.py +83 -0
- camel/models/aiml_model.py +1 -16
- camel/models/amd_model.py +101 -0
- camel/models/anthropic_model.py +117 -18
- camel/models/aws_bedrock_model.py +2 -33
- camel/models/azure_openai_model.py +205 -91
- camel/models/base_audio_model.py +3 -1
- camel/models/base_model.py +189 -24
- camel/models/cohere_model.py +5 -17
- camel/models/cometapi_model.py +83 -0
- camel/models/crynux_model.py +1 -16
- camel/models/deepseek_model.py +6 -16
- camel/models/fish_audio_model.py +6 -0
- camel/models/gemini_model.py +71 -20
- camel/models/groq_model.py +1 -17
- camel/models/internlm_model.py +1 -16
- camel/models/litellm_model.py +49 -32
- camel/models/lmstudio_model.py +1 -17
- camel/models/minimax_model.py +83 -0
- camel/models/mistral_model.py +1 -16
- camel/models/model_factory.py +27 -1
- camel/models/model_manager.py +24 -6
- camel/models/modelscope_model.py +1 -16
- camel/models/moonshot_model.py +185 -19
- camel/models/nebius_model.py +83 -0
- camel/models/nemotron_model.py +0 -5
- camel/models/netmind_model.py +1 -16
- camel/models/novita_model.py +1 -16
- camel/models/nvidia_model.py +1 -16
- camel/models/ollama_model.py +4 -19
- camel/models/openai_compatible_model.py +171 -46
- camel/models/openai_model.py +205 -77
- camel/models/openrouter_model.py +1 -17
- camel/models/ppio_model.py +1 -16
- camel/models/qianfan_model.py +1 -16
- camel/models/qwen_model.py +1 -16
- camel/models/reka_model.py +1 -16
- camel/models/samba_model.py +34 -47
- camel/models/sglang_model.py +64 -31
- camel/models/siliconflow_model.py +1 -16
- camel/models/stub_model.py +0 -4
- camel/models/togetherai_model.py +1 -16
- camel/models/vllm_model.py +1 -16
- camel/models/volcano_model.py +0 -17
- camel/models/watsonx_model.py +1 -16
- camel/models/yi_model.py +1 -16
- camel/models/zhipuai_model.py +60 -16
- camel/parsers/__init__.py +18 -0
- camel/parsers/mcp_tool_call_parser.py +176 -0
- camel/retrievers/auto_retriever.py +1 -0
- camel/runtimes/configs.py +11 -11
- camel/runtimes/daytona_runtime.py +15 -16
- camel/runtimes/docker_runtime.py +6 -6
- camel/runtimes/remote_http_runtime.py +5 -5
- camel/services/agent_openapi_server.py +380 -0
- camel/societies/__init__.py +2 -0
- camel/societies/role_playing.py +26 -28
- camel/societies/workforce/__init__.py +2 -0
- camel/societies/workforce/events.py +122 -0
- camel/societies/workforce/prompts.py +249 -38
- camel/societies/workforce/role_playing_worker.py +82 -20
- camel/societies/workforce/single_agent_worker.py +634 -34
- camel/societies/workforce/structured_output_handler.py +512 -0
- camel/societies/workforce/task_channel.py +169 -23
- camel/societies/workforce/utils.py +176 -9
- camel/societies/workforce/worker.py +77 -23
- camel/societies/workforce/workflow_memory_manager.py +772 -0
- camel/societies/workforce/workforce.py +3168 -478
- camel/societies/workforce/workforce_callback.py +74 -0
- camel/societies/workforce/workforce_logger.py +203 -175
- camel/societies/workforce/workforce_metrics.py +33 -0
- camel/storages/__init__.py +4 -0
- camel/storages/key_value_storages/json.py +15 -2
- camel/storages/key_value_storages/mem0_cloud.py +48 -47
- camel/storages/object_storages/google_cloud.py +1 -1
- camel/storages/vectordb_storages/__init__.py +6 -0
- camel/storages/vectordb_storages/chroma.py +731 -0
- camel/storages/vectordb_storages/oceanbase.py +13 -13
- camel/storages/vectordb_storages/pgvector.py +349 -0
- camel/storages/vectordb_storages/qdrant.py +3 -3
- camel/storages/vectordb_storages/surreal.py +365 -0
- camel/storages/vectordb_storages/tidb.py +8 -6
- camel/tasks/task.py +244 -27
- camel/toolkits/__init__.py +46 -8
- camel/toolkits/aci_toolkit.py +64 -19
- camel/toolkits/arxiv_toolkit.py +6 -6
- camel/toolkits/base.py +63 -5
- camel/toolkits/code_execution.py +28 -1
- camel/toolkits/context_summarizer_toolkit.py +684 -0
- camel/toolkits/craw4ai_toolkit.py +93 -0
- camel/toolkits/dappier_toolkit.py +10 -6
- camel/toolkits/dingtalk.py +1135 -0
- camel/toolkits/edgeone_pages_mcp_toolkit.py +49 -0
- camel/toolkits/excel_toolkit.py +901 -67
- camel/toolkits/file_toolkit.py +1402 -0
- camel/toolkits/function_tool.py +30 -6
- camel/toolkits/github_toolkit.py +107 -20
- camel/toolkits/gmail_toolkit.py +1839 -0
- camel/toolkits/google_calendar_toolkit.py +38 -4
- camel/toolkits/google_drive_mcp_toolkit.py +54 -0
- camel/toolkits/human_toolkit.py +34 -10
- camel/toolkits/hybrid_browser_toolkit/__init__.py +18 -0
- camel/toolkits/hybrid_browser_toolkit/config_loader.py +185 -0
- camel/toolkits/hybrid_browser_toolkit/hybrid_browser_toolkit.py +246 -0
- camel/toolkits/hybrid_browser_toolkit/hybrid_browser_toolkit_ts.py +1973 -0
- camel/toolkits/hybrid_browser_toolkit/installer.py +203 -0
- camel/toolkits/hybrid_browser_toolkit/ts/package-lock.json +3749 -0
- camel/toolkits/hybrid_browser_toolkit/ts/package.json +32 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/browser-scripts.js +125 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/browser-session.ts +1815 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/config-loader.ts +233 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/hybrid-browser-toolkit.ts +590 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/index.ts +7 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/parent-child-filter.ts +226 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/snapshot-parser.ts +219 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/som-screenshot-injected.ts +543 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/types.ts +130 -0
- camel/toolkits/hybrid_browser_toolkit/ts/tsconfig.json +26 -0
- camel/toolkits/hybrid_browser_toolkit/ts/websocket-server.js +319 -0
- camel/toolkits/hybrid_browser_toolkit/ws_wrapper.py +1032 -0
- camel/toolkits/hybrid_browser_toolkit_py/__init__.py +17 -0
- camel/toolkits/hybrid_browser_toolkit_py/actions.py +575 -0
- camel/toolkits/hybrid_browser_toolkit_py/agent.py +311 -0
- camel/toolkits/hybrid_browser_toolkit_py/browser_session.py +787 -0
- camel/toolkits/hybrid_browser_toolkit_py/config_loader.py +490 -0
- camel/toolkits/hybrid_browser_toolkit_py/hybrid_browser_toolkit.py +2390 -0
- camel/toolkits/hybrid_browser_toolkit_py/snapshot.py +233 -0
- camel/toolkits/hybrid_browser_toolkit_py/stealth_script.js +0 -0
- camel/toolkits/hybrid_browser_toolkit_py/unified_analyzer.js +1043 -0
- camel/toolkits/image_generation_toolkit.py +390 -0
- camel/toolkits/jina_reranker_toolkit.py +3 -4
- camel/toolkits/klavis_toolkit.py +5 -1
- camel/toolkits/markitdown_toolkit.py +104 -0
- camel/toolkits/math_toolkit.py +64 -10
- camel/toolkits/mcp_toolkit.py +370 -45
- camel/toolkits/memory_toolkit.py +5 -1
- camel/toolkits/message_agent_toolkit.py +608 -0
- camel/toolkits/message_integration.py +724 -0
- camel/toolkits/minimax_mcp_toolkit.py +195 -0
- camel/toolkits/note_taking_toolkit.py +277 -0
- camel/toolkits/notion_mcp_toolkit.py +224 -0
- camel/toolkits/openbb_toolkit.py +5 -1
- camel/toolkits/origene_mcp_toolkit.py +56 -0
- camel/toolkits/playwright_mcp_toolkit.py +12 -31
- camel/toolkits/pptx_toolkit.py +25 -12
- camel/toolkits/resend_toolkit.py +168 -0
- camel/toolkits/screenshot_toolkit.py +213 -0
- camel/toolkits/search_toolkit.py +437 -142
- camel/toolkits/slack_toolkit.py +104 -50
- camel/toolkits/sympy_toolkit.py +1 -1
- camel/toolkits/task_planning_toolkit.py +3 -3
- camel/toolkits/terminal_toolkit/__init__.py +18 -0
- camel/toolkits/terminal_toolkit/terminal_toolkit.py +957 -0
- camel/toolkits/terminal_toolkit/utils.py +532 -0
- camel/toolkits/thinking_toolkit.py +1 -1
- camel/toolkits/vertex_ai_veo_toolkit.py +590 -0
- camel/toolkits/video_analysis_toolkit.py +106 -26
- camel/toolkits/video_download_toolkit.py +17 -14
- camel/toolkits/web_deploy_toolkit.py +1219 -0
- camel/toolkits/wechat_official_toolkit.py +483 -0
- camel/toolkits/zapier_toolkit.py +5 -1
- camel/types/__init__.py +2 -2
- camel/types/agents/tool_calling_record.py +4 -1
- camel/types/enums.py +316 -40
- camel/types/openai_types.py +2 -2
- camel/types/unified_model_type.py +31 -4
- camel/utils/commons.py +36 -5
- camel/utils/constants.py +3 -0
- camel/utils/context_utils.py +1003 -0
- camel/utils/mcp.py +138 -4
- camel/utils/mcp_client.py +45 -1
- camel/utils/message_summarizer.py +148 -0
- camel/utils/token_counting.py +43 -20
- camel/utils/tool_result.py +44 -0
- {camel_ai-0.2.67.dist-info → camel_ai-0.2.80a2.dist-info}/METADATA +296 -85
- {camel_ai-0.2.67.dist-info → camel_ai-0.2.80a2.dist-info}/RECORD +219 -146
- camel/loaders/pandas_reader.py +0 -368
- camel/toolkits/dalle_toolkit.py +0 -175
- camel/toolkits/file_write_toolkit.py +0 -444
- camel/toolkits/openai_agent_toolkit.py +0 -135
- camel/toolkits/terminal_toolkit.py +0 -1037
- {camel_ai-0.2.67.dist-info → camel_ai-0.2.80a2.dist-info}/WHEEL +0 -0
- {camel_ai-0.2.67.dist-info → camel_ai-0.2.80a2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
from abc import ABC, abstractmethod
|
|
17
|
+
|
|
18
|
+
from .events import (
|
|
19
|
+
AllTasksCompletedEvent,
|
|
20
|
+
TaskAssignedEvent,
|
|
21
|
+
TaskCompletedEvent,
|
|
22
|
+
TaskCreatedEvent,
|
|
23
|
+
TaskDecomposedEvent,
|
|
24
|
+
TaskFailedEvent,
|
|
25
|
+
TaskStartedEvent,
|
|
26
|
+
WorkerCreatedEvent,
|
|
27
|
+
WorkerDeletedEvent,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class WorkforceCallback(ABC):
|
|
32
|
+
r"""Interface for recording workforce lifecycle events.
|
|
33
|
+
|
|
34
|
+
Implementations should persist or stream events as appropriate.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
@abstractmethod
|
|
38
|
+
def log_task_created(
|
|
39
|
+
self,
|
|
40
|
+
event: TaskCreatedEvent,
|
|
41
|
+
) -> None:
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
@abstractmethod
|
|
45
|
+
def log_task_decomposed(self, event: TaskDecomposedEvent) -> None:
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
@abstractmethod
|
|
49
|
+
def log_task_assigned(self, event: TaskAssignedEvent) -> None:
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
@abstractmethod
|
|
53
|
+
def log_task_started(self, event: TaskStartedEvent) -> None:
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
@abstractmethod
|
|
57
|
+
def log_task_completed(self, event: TaskCompletedEvent) -> None:
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
@abstractmethod
|
|
61
|
+
def log_task_failed(self, event: TaskFailedEvent) -> None:
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
@abstractmethod
|
|
65
|
+
def log_worker_created(self, event: WorkerCreatedEvent) -> None:
|
|
66
|
+
pass
|
|
67
|
+
|
|
68
|
+
@abstractmethod
|
|
69
|
+
def log_worker_deleted(self, event: WorkerDeletedEvent) -> None:
|
|
70
|
+
pass
|
|
71
|
+
|
|
72
|
+
@abstractmethod
|
|
73
|
+
def log_all_tasks_completed(self, event: AllTasksCompletedEvent) -> None:
|
|
74
|
+
pass
|
|
@@ -16,12 +16,26 @@ from datetime import datetime, timezone
|
|
|
16
16
|
from typing import Any, Dict, List, Optional
|
|
17
17
|
|
|
18
18
|
from camel.logger import get_logger
|
|
19
|
+
from camel.societies.workforce.events import (
|
|
20
|
+
AllTasksCompletedEvent,
|
|
21
|
+
QueueStatusEvent,
|
|
22
|
+
TaskAssignedEvent,
|
|
23
|
+
TaskCompletedEvent,
|
|
24
|
+
TaskCreatedEvent,
|
|
25
|
+
TaskDecomposedEvent,
|
|
26
|
+
TaskFailedEvent,
|
|
27
|
+
TaskStartedEvent,
|
|
28
|
+
WorkerCreatedEvent,
|
|
29
|
+
WorkerDeletedEvent,
|
|
30
|
+
)
|
|
31
|
+
from camel.societies.workforce.workforce_callback import WorkforceCallback
|
|
32
|
+
from camel.societies.workforce.workforce_metrics import WorkforceMetrics
|
|
19
33
|
from camel.types.agents import ToolCallingRecord
|
|
20
34
|
|
|
21
35
|
logger = get_logger(__name__)
|
|
22
36
|
|
|
23
37
|
|
|
24
|
-
class WorkforceLogger:
|
|
38
|
+
class WorkforceLogger(WorkforceCallback, WorkforceMetrics):
|
|
25
39
|
r"""Logs events and metrics for a Workforce instance."""
|
|
26
40
|
|
|
27
41
|
def __init__(self, workforce_id: str):
|
|
@@ -55,197 +69,201 @@ class WorkforceLogger:
|
|
|
55
69
|
|
|
56
70
|
def log_task_created(
|
|
57
71
|
self,
|
|
58
|
-
|
|
59
|
-
description: str,
|
|
60
|
-
parent_task_id: Optional[str] = None,
|
|
61
|
-
task_type: Optional[str] = None,
|
|
62
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
72
|
+
event: TaskCreatedEvent,
|
|
63
73
|
) -> None:
|
|
64
74
|
r"""Logs the creation of a new task."""
|
|
65
75
|
self._log_event(
|
|
66
|
-
|
|
67
|
-
task_id=task_id,
|
|
68
|
-
description=description,
|
|
69
|
-
parent_task_id=parent_task_id,
|
|
70
|
-
task_type=task_type,
|
|
71
|
-
metadata=metadata or {},
|
|
76
|
+
event_type=event.event_type,
|
|
77
|
+
task_id=event.task_id,
|
|
78
|
+
description=event.description,
|
|
79
|
+
parent_task_id=event.parent_task_id,
|
|
80
|
+
task_type=event.task_type,
|
|
81
|
+
metadata=event.metadata or {},
|
|
72
82
|
)
|
|
73
|
-
self._task_hierarchy[task_id] = {
|
|
74
|
-
'parent': parent_task_id,
|
|
83
|
+
self._task_hierarchy[event.task_id] = {
|
|
84
|
+
'parent': event.parent_task_id,
|
|
75
85
|
'children': [],
|
|
76
86
|
'status': 'created',
|
|
77
|
-
'description': description,
|
|
87
|
+
'description': event.description,
|
|
78
88
|
'assigned_to': None,
|
|
79
|
-
**(metadata or {}),
|
|
89
|
+
**(event.metadata or {}),
|
|
80
90
|
}
|
|
81
|
-
if
|
|
82
|
-
|
|
91
|
+
if (
|
|
92
|
+
event.parent_task_id
|
|
93
|
+
and event.parent_task_id in self._task_hierarchy
|
|
94
|
+
):
|
|
95
|
+
self._task_hierarchy[event.parent_task_id]['children'].append(
|
|
96
|
+
event.task_id
|
|
97
|
+
)
|
|
83
98
|
|
|
84
99
|
def log_task_decomposed(
|
|
85
100
|
self,
|
|
86
|
-
|
|
87
|
-
subtask_ids: List[str],
|
|
88
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
101
|
+
event: TaskDecomposedEvent,
|
|
89
102
|
) -> None:
|
|
90
103
|
r"""Logs the decomposition of a task into subtasks."""
|
|
91
104
|
self._log_event(
|
|
92
|
-
|
|
93
|
-
parent_task_id=parent_task_id,
|
|
94
|
-
subtask_ids=subtask_ids,
|
|
95
|
-
metadata=metadata or {},
|
|
105
|
+
event_type=event.event_type,
|
|
106
|
+
parent_task_id=event.parent_task_id,
|
|
107
|
+
subtask_ids=event.subtask_ids,
|
|
108
|
+
metadata=event.metadata or {},
|
|
96
109
|
)
|
|
97
|
-
if parent_task_id in self._task_hierarchy:
|
|
98
|
-
self._task_hierarchy[parent_task_id]['status'] = "decomposed"
|
|
110
|
+
if event.parent_task_id in self._task_hierarchy:
|
|
111
|
+
self._task_hierarchy[event.parent_task_id]['status'] = "decomposed"
|
|
99
112
|
|
|
100
113
|
def log_task_assigned(
|
|
101
114
|
self,
|
|
102
|
-
|
|
103
|
-
worker_id: str,
|
|
104
|
-
queue_time_seconds: Optional[float] = None,
|
|
105
|
-
dependencies: Optional[List[str]] = None,
|
|
106
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
115
|
+
event: TaskAssignedEvent,
|
|
107
116
|
) -> None:
|
|
108
117
|
r"""Logs the assignment of a task to a worker."""
|
|
109
118
|
self._log_event(
|
|
110
|
-
|
|
111
|
-
task_id=task_id,
|
|
112
|
-
worker_id=worker_id,
|
|
113
|
-
queue_time_seconds=queue_time_seconds,
|
|
114
|
-
dependencies=dependencies or [],
|
|
115
|
-
metadata=metadata or {},
|
|
119
|
+
event_type=event.event_type,
|
|
120
|
+
task_id=event.task_id,
|
|
121
|
+
worker_id=event.worker_id,
|
|
122
|
+
queue_time_seconds=event.queue_time_seconds,
|
|
123
|
+
dependencies=event.dependencies or [],
|
|
124
|
+
metadata=event.metadata or {},
|
|
116
125
|
)
|
|
117
|
-
if task_id in self._task_hierarchy:
|
|
118
|
-
self._task_hierarchy[task_id]['status'] = 'assigned'
|
|
119
|
-
self._task_hierarchy[task_id]['assigned_to'] =
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
self.
|
|
123
|
-
|
|
126
|
+
if event.task_id in self._task_hierarchy:
|
|
127
|
+
self._task_hierarchy[event.task_id]['status'] = 'assigned'
|
|
128
|
+
self._task_hierarchy[event.task_id]['assigned_to'] = (
|
|
129
|
+
event.worker_id
|
|
130
|
+
)
|
|
131
|
+
self._task_hierarchy[event.task_id]['dependencies'] = (
|
|
132
|
+
event.dependencies or []
|
|
133
|
+
)
|
|
134
|
+
if event.worker_id in self._worker_information:
|
|
135
|
+
self._worker_information[event.worker_id]['current_task_id'] = (
|
|
136
|
+
event.task_id
|
|
137
|
+
)
|
|
138
|
+
self._worker_information[event.worker_id]['status'] = 'busy'
|
|
124
139
|
|
|
125
140
|
def log_task_started(
|
|
126
141
|
self,
|
|
127
|
-
|
|
128
|
-
worker_id: str,
|
|
129
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
142
|
+
event: TaskStartedEvent,
|
|
130
143
|
) -> None:
|
|
131
144
|
r"""Logs when a worker starts processing a task."""
|
|
132
145
|
self._log_event(
|
|
133
|
-
|
|
134
|
-
task_id=task_id,
|
|
135
|
-
worker_id=worker_id,
|
|
136
|
-
metadata=metadata or {},
|
|
146
|
+
event_type=event.event_type,
|
|
147
|
+
task_id=event.task_id,
|
|
148
|
+
worker_id=event.worker_id,
|
|
149
|
+
metadata=event.metadata or {},
|
|
137
150
|
)
|
|
138
|
-
if task_id in self._task_hierarchy:
|
|
139
|
-
self._task_hierarchy[task_id]['status'] = 'processing'
|
|
151
|
+
if event.task_id in self._task_hierarchy:
|
|
152
|
+
self._task_hierarchy[event.task_id]['status'] = 'processing'
|
|
140
153
|
|
|
141
|
-
def log_task_completed(
|
|
142
|
-
self,
|
|
143
|
-
task_id: str,
|
|
144
|
-
worker_id: str,
|
|
145
|
-
result_summary: Optional[str] = None,
|
|
146
|
-
processing_time_seconds: Optional[float] = None,
|
|
147
|
-
token_usage: Optional[Dict[str, int]] = None,
|
|
148
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
149
|
-
) -> None:
|
|
154
|
+
def log_task_completed(self, event: TaskCompletedEvent) -> None:
|
|
150
155
|
r"""Logs the successful completion of a task."""
|
|
151
156
|
self._log_event(
|
|
152
|
-
|
|
153
|
-
task_id=task_id,
|
|
154
|
-
worker_id=worker_id,
|
|
155
|
-
result_summary=result_summary,
|
|
156
|
-
processing_time_seconds=processing_time_seconds,
|
|
157
|
-
token_usage=token_usage or {},
|
|
158
|
-
metadata=metadata or {},
|
|
157
|
+
event_type=event.event_type,
|
|
158
|
+
task_id=event.task_id,
|
|
159
|
+
worker_id=event.worker_id,
|
|
160
|
+
result_summary=event.result_summary,
|
|
161
|
+
processing_time_seconds=event.processing_time_seconds,
|
|
162
|
+
token_usage=event.token_usage or {},
|
|
163
|
+
metadata=event.metadata or {},
|
|
159
164
|
)
|
|
160
|
-
if task_id in self._task_hierarchy:
|
|
161
|
-
self._task_hierarchy[task_id]['status'] = 'completed'
|
|
162
|
-
self._task_hierarchy[task_id]['assigned_to'] = None
|
|
165
|
+
if event.task_id in self._task_hierarchy:
|
|
166
|
+
self._task_hierarchy[event.task_id]['status'] = 'completed'
|
|
167
|
+
self._task_hierarchy[event.task_id]['assigned_to'] = None
|
|
163
168
|
# Store processing time in task hierarchy for display in tree
|
|
164
|
-
if processing_time_seconds is not None:
|
|
165
|
-
self._task_hierarchy[task_id][
|
|
166
|
-
|
|
167
|
-
|
|
169
|
+
if event.processing_time_seconds is not None:
|
|
170
|
+
self._task_hierarchy[event.task_id][
|
|
171
|
+
'completion_time_seconds'
|
|
172
|
+
] = event.processing_time_seconds
|
|
168
173
|
# Store token usage in task hierarchy for display in tree
|
|
169
|
-
if token_usage is not None:
|
|
170
|
-
self._task_hierarchy[task_id]['token_usage'] =
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
self._worker_information[worker_id]['
|
|
175
|
-
|
|
174
|
+
if event.token_usage is not None:
|
|
175
|
+
self._task_hierarchy[event.task_id]['token_usage'] = (
|
|
176
|
+
event.token_usage
|
|
177
|
+
)
|
|
178
|
+
if event.worker_id in self._worker_information:
|
|
179
|
+
self._worker_information[event.worker_id]['current_task_id'] = None
|
|
180
|
+
self._worker_information[event.worker_id]['status'] = 'idle'
|
|
181
|
+
self._worker_information[event.worker_id]['tasks_completed'] = (
|
|
182
|
+
self._worker_information[event.worker_id].get(
|
|
183
|
+
'tasks_completed', 0
|
|
184
|
+
)
|
|
176
185
|
+ 1
|
|
177
186
|
)
|
|
178
187
|
|
|
179
188
|
def log_task_failed(
|
|
180
189
|
self,
|
|
181
|
-
|
|
182
|
-
error_message: str,
|
|
183
|
-
error_type: str,
|
|
184
|
-
worker_id: Optional[str] = None,
|
|
185
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
190
|
+
event: TaskFailedEvent,
|
|
186
191
|
) -> None:
|
|
187
192
|
r"""Logs the failure of a task."""
|
|
188
193
|
self._log_event(
|
|
189
|
-
|
|
190
|
-
task_id=task_id,
|
|
191
|
-
worker_id=worker_id,
|
|
192
|
-
error_message=error_message,
|
|
193
|
-
|
|
194
|
-
metadata=metadata or {},
|
|
194
|
+
event_type=event.event_type,
|
|
195
|
+
task_id=event.task_id,
|
|
196
|
+
worker_id=event.worker_id,
|
|
197
|
+
error_message=event.error_message,
|
|
198
|
+
metadata=event.metadata or {},
|
|
195
199
|
)
|
|
196
|
-
if task_id in self._task_hierarchy:
|
|
197
|
-
self._task_hierarchy[task_id]['status'] = 'failed'
|
|
198
|
-
self._task_hierarchy[task_id]['error'] = error_message
|
|
199
|
-
self._task_hierarchy[task_id]['assigned_to'] = None
|
|
200
|
-
if worker_id and worker_id in self._worker_information:
|
|
201
|
-
self._worker_information[worker_id]['current_task_id'] = None
|
|
202
|
-
self._worker_information[worker_id]['status'] = 'idle'
|
|
203
|
-
self._worker_information[worker_id]['tasks_failed'] = (
|
|
204
|
-
self._worker_information[worker_id].get(
|
|
200
|
+
if event.task_id in self._task_hierarchy:
|
|
201
|
+
self._task_hierarchy[event.task_id]['status'] = 'failed'
|
|
202
|
+
self._task_hierarchy[event.task_id]['error'] = event.error_message
|
|
203
|
+
self._task_hierarchy[event.task_id]['assigned_to'] = None
|
|
204
|
+
if event.worker_id and event.worker_id in self._worker_information:
|
|
205
|
+
self._worker_information[event.worker_id]['current_task_id'] = None
|
|
206
|
+
self._worker_information[event.worker_id]['status'] = 'idle'
|
|
207
|
+
self._worker_information[event.worker_id]['tasks_failed'] = (
|
|
208
|
+
self._worker_information[event.worker_id].get(
|
|
209
|
+
'tasks_failed', 0
|
|
210
|
+
)
|
|
211
|
+
+ 1
|
|
205
212
|
)
|
|
206
213
|
|
|
207
214
|
def log_worker_created(
|
|
208
215
|
self,
|
|
209
|
-
|
|
210
|
-
worker_type: str,
|
|
211
|
-
role: str,
|
|
212
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
216
|
+
event: WorkerCreatedEvent,
|
|
213
217
|
) -> None:
|
|
214
218
|
r"""Logs the creation of a new worker."""
|
|
215
219
|
self._log_event(
|
|
216
|
-
|
|
217
|
-
worker_id=worker_id,
|
|
218
|
-
worker_type=worker_type,
|
|
219
|
-
role=role,
|
|
220
|
-
metadata=metadata or {},
|
|
220
|
+
event_type=event.event_type,
|
|
221
|
+
worker_id=event.worker_id,
|
|
222
|
+
worker_type=event.worker_type,
|
|
223
|
+
role=event.role,
|
|
224
|
+
metadata=event.metadata or {},
|
|
221
225
|
)
|
|
222
|
-
self._worker_information[worker_id] = {
|
|
223
|
-
'type': worker_type,
|
|
224
|
-
'role': role,
|
|
226
|
+
self._worker_information[event.worker_id] = {
|
|
227
|
+
'type': event.worker_type,
|
|
228
|
+
'role': event.role,
|
|
225
229
|
'status': 'idle',
|
|
226
230
|
'current_task_id': None,
|
|
227
231
|
'tasks_completed': 0,
|
|
228
232
|
'tasks_failed': 0,
|
|
229
|
-
**(metadata or {}),
|
|
233
|
+
**(event.metadata or {}),
|
|
230
234
|
}
|
|
231
235
|
|
|
232
236
|
def log_worker_deleted(
|
|
233
237
|
self,
|
|
234
|
-
|
|
235
|
-
reason: Optional[str] = None,
|
|
236
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
238
|
+
event: WorkerDeletedEvent,
|
|
237
239
|
) -> None:
|
|
238
240
|
r"""Logs the deletion of a worker."""
|
|
239
241
|
self._log_event(
|
|
240
|
-
|
|
241
|
-
worker_id=worker_id,
|
|
242
|
-
reason=reason,
|
|
243
|
-
metadata=metadata or {},
|
|
242
|
+
event_type=event.event_type,
|
|
243
|
+
worker_id=event.worker_id,
|
|
244
|
+
reason=event.reason,
|
|
245
|
+
metadata=event.metadata or {},
|
|
244
246
|
)
|
|
245
|
-
if worker_id in self._worker_information:
|
|
246
|
-
self._worker_information[worker_id]['status'] = 'deleted'
|
|
247
|
+
if event.worker_id in self._worker_information:
|
|
248
|
+
self._worker_information[event.worker_id]['status'] = 'deleted'
|
|
247
249
|
# Or del self._worker_information[worker_id]
|
|
248
250
|
|
|
251
|
+
def log_queue_status(
|
|
252
|
+
self,
|
|
253
|
+
event: QueueStatusEvent,
|
|
254
|
+
) -> None:
|
|
255
|
+
r"""Logs the status of a task queue."""
|
|
256
|
+
self._log_event(
|
|
257
|
+
event_type=event.event_type,
|
|
258
|
+
queue_name=event.queue_name,
|
|
259
|
+
length=event.length,
|
|
260
|
+
pending_task_ids=event.pending_task_ids or [],
|
|
261
|
+
metadata=event.metadata or {},
|
|
262
|
+
)
|
|
263
|
+
|
|
264
|
+
def log_all_tasks_completed(self, event: AllTasksCompletedEvent) -> None:
|
|
265
|
+
pass
|
|
266
|
+
|
|
249
267
|
def reset_task_data(self) -> None:
|
|
250
268
|
r"""Resets logs and data related to tasks, preserving worker
|
|
251
269
|
information.
|
|
@@ -265,22 +283,6 @@ class WorkforceLogger:
|
|
|
265
283
|
f"{self.workforce_id}"
|
|
266
284
|
)
|
|
267
285
|
|
|
268
|
-
def log_queue_status(
|
|
269
|
-
self,
|
|
270
|
-
queue_name: str,
|
|
271
|
-
length: int,
|
|
272
|
-
pending_task_ids: Optional[List[str]] = None,
|
|
273
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
274
|
-
) -> None:
|
|
275
|
-
r"""Logs the status of a task queue."""
|
|
276
|
-
self._log_event(
|
|
277
|
-
'queue_status',
|
|
278
|
-
queue_name=queue_name,
|
|
279
|
-
length=length,
|
|
280
|
-
pending_task_ids=pending_task_ids or [],
|
|
281
|
-
metadata=metadata or {},
|
|
282
|
-
)
|
|
283
|
-
|
|
284
286
|
def dump_to_json(self, file_path: str) -> None:
|
|
285
287
|
r"""Dumps all log entries to a JSON file.
|
|
286
288
|
|
|
@@ -484,11 +486,9 @@ class WorkforceLogger:
|
|
|
484
486
|
'total_tasks_created': 0,
|
|
485
487
|
'total_tasks_completed': 0,
|
|
486
488
|
'total_tasks_failed': 0,
|
|
487
|
-
'error_types_count': {},
|
|
488
489
|
'worker_utilization': {},
|
|
489
490
|
'current_pending_tasks': 0,
|
|
490
491
|
'total_workforce_running_time_seconds': 0.0,
|
|
491
|
-
'avg_task_queue_time_seconds': 0.0,
|
|
492
492
|
}
|
|
493
493
|
|
|
494
494
|
task_start_times: Dict[str, float] = {}
|
|
@@ -499,59 +499,79 @@ class WorkforceLogger:
|
|
|
499
499
|
|
|
500
500
|
tasks_handled_by_worker: Dict[str, int] = {}
|
|
501
501
|
|
|
502
|
+
# Track unique task final states to avoid double-counting
|
|
503
|
+
task_final_states: Dict[
|
|
504
|
+
str, str
|
|
505
|
+
] = {} # task_id -> 'completed' or 'failed'
|
|
506
|
+
|
|
507
|
+
# Helper function to check if a task is the main task (has no parent)
|
|
508
|
+
def is_main_task(task_id: str) -> bool:
|
|
509
|
+
return (
|
|
510
|
+
task_id in self._task_hierarchy
|
|
511
|
+
and self._task_hierarchy[task_id].get('parent') is None
|
|
512
|
+
)
|
|
513
|
+
|
|
502
514
|
for entry in self.log_entries:
|
|
503
515
|
event_type = entry['event_type']
|
|
504
516
|
timestamp = datetime.fromisoformat(entry['timestamp'])
|
|
517
|
+
task_id = entry.get('task_id', '')
|
|
518
|
+
|
|
505
519
|
if first_timestamp is None or timestamp < first_timestamp:
|
|
506
520
|
first_timestamp = timestamp
|
|
507
521
|
if last_timestamp is None or timestamp > last_timestamp:
|
|
508
522
|
last_timestamp = timestamp
|
|
509
523
|
|
|
510
524
|
if event_type == 'task_created':
|
|
511
|
-
|
|
512
|
-
|
|
525
|
+
# Exclude main task from total count
|
|
526
|
+
if not is_main_task(task_id):
|
|
527
|
+
kpis['total_tasks_created'] += 1
|
|
528
|
+
task_creation_timestamps[task_id] = timestamp
|
|
513
529
|
elif event_type == 'task_assigned':
|
|
514
|
-
task_assignment_timestamps[
|
|
530
|
+
task_assignment_timestamps[task_id] = timestamp
|
|
515
531
|
# Queue time tracking has been removed
|
|
516
532
|
|
|
517
533
|
elif event_type == 'task_started':
|
|
518
534
|
# Store start time for processing time calculation
|
|
519
|
-
task_start_times[
|
|
535
|
+
task_start_times[task_id] = timestamp.timestamp()
|
|
520
536
|
|
|
521
537
|
elif event_type == 'task_completed':
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
538
|
+
# Exclude main task from total count
|
|
539
|
+
if not is_main_task(task_id):
|
|
540
|
+
# Track final state - a completed task overwrites any
|
|
541
|
+
# previous failed state
|
|
542
|
+
task_final_states[task_id] = 'completed'
|
|
543
|
+
# Count tasks handled by worker (only for non-main tasks)
|
|
544
|
+
if 'worker_id' in entry and entry['worker_id'] is not None:
|
|
545
|
+
worker_id = entry['worker_id']
|
|
546
|
+
tasks_handled_by_worker[worker_id] = (
|
|
547
|
+
tasks_handled_by_worker.get(worker_id, 0) + 1
|
|
548
|
+
)
|
|
529
549
|
|
|
530
|
-
if
|
|
550
|
+
if task_id in task_assignment_timestamps:
|
|
531
551
|
completion_time = (
|
|
532
|
-
timestamp
|
|
533
|
-
- task_assignment_timestamps[entry['task_id']]
|
|
552
|
+
timestamp - task_assignment_timestamps[task_id]
|
|
534
553
|
).total_seconds()
|
|
535
554
|
# Store completion time in task hierarchy instead of KPIs
|
|
536
555
|
# array
|
|
537
|
-
if
|
|
538
|
-
self._task_hierarchy[
|
|
556
|
+
if task_id in self._task_hierarchy:
|
|
557
|
+
self._task_hierarchy[task_id][
|
|
539
558
|
'completion_time_seconds'
|
|
540
559
|
] = completion_time
|
|
541
560
|
|
|
542
561
|
elif event_type == 'task_failed':
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
562
|
+
# Exclude main task from total count
|
|
563
|
+
if not is_main_task(task_id):
|
|
564
|
+
# Only track as failed if not already completed
|
|
565
|
+
# (in case of retries, the final completion overwrites
|
|
566
|
+
# failed state)
|
|
567
|
+
if task_final_states.get(task_id) != 'completed':
|
|
568
|
+
task_final_states[task_id] = 'failed'
|
|
569
|
+
# Count tasks handled by worker (only for non-main tasks)
|
|
570
|
+
if 'worker_id' in entry and entry['worker_id'] is not None:
|
|
571
|
+
worker_id = entry['worker_id']
|
|
572
|
+
tasks_handled_by_worker[worker_id] = (
|
|
573
|
+
tasks_handled_by_worker.get(worker_id, 0) + 1
|
|
574
|
+
)
|
|
555
575
|
elif event_type == 'queue_status':
|
|
556
576
|
pass # Placeholder for now
|
|
557
577
|
|
|
@@ -560,6 +580,14 @@ class WorkforceLogger:
|
|
|
560
580
|
kpis['total_workforce_running_time_seconds'] = (
|
|
561
581
|
last_timestamp - first_timestamp
|
|
562
582
|
).total_seconds()
|
|
583
|
+
|
|
584
|
+
# Count unique tasks by final state
|
|
585
|
+
for _task_id, state in task_final_states.items():
|
|
586
|
+
if state == 'completed':
|
|
587
|
+
kpis['total_tasks_completed'] += 1
|
|
588
|
+
elif state == 'failed':
|
|
589
|
+
kpis['total_tasks_failed'] += 1
|
|
590
|
+
|
|
563
591
|
# Calculate worker utilization based on proportion of tasks handled
|
|
564
592
|
total_tasks_processed_for_utilization = (
|
|
565
593
|
kpis['total_tasks_completed'] + kpis['total_tasks_failed']
|
|
@@ -605,9 +633,9 @@ class WorkforceLogger:
|
|
|
605
633
|
|
|
606
634
|
kpis['total_workers_created'] = len(self._worker_information)
|
|
607
635
|
|
|
608
|
-
# Current pending tasks
|
|
609
|
-
kpis['current_pending_tasks'] = kpis['total_tasks_created'] - (
|
|
610
|
-
|
|
636
|
+
# Current pending tasks - tasks created but not yet completed or failed
|
|
637
|
+
kpis['current_pending_tasks'] = kpis['total_tasks_created'] - len(
|
|
638
|
+
task_final_states
|
|
611
639
|
)
|
|
612
640
|
|
|
613
641
|
return kpis
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
+
from abc import ABC, abstractmethod
|
|
15
|
+
from typing import Any, Dict
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class WorkforceMetrics(ABC):
|
|
19
|
+
@abstractmethod
|
|
20
|
+
def reset_task_data(self) -> None:
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
@abstractmethod
|
|
24
|
+
def dump_to_json(self, file_path: str) -> None:
|
|
25
|
+
pass
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
def get_ascii_tree_representation(self) -> str:
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
@abstractmethod
|
|
32
|
+
def get_kpis(self) -> Dict[str, Any]:
|
|
33
|
+
pass
|
camel/storages/__init__.py
CHANGED
|
@@ -26,9 +26,11 @@ from .vectordb_storages.base import (
|
|
|
26
26
|
VectorDBQueryResult,
|
|
27
27
|
VectorRecord,
|
|
28
28
|
)
|
|
29
|
+
from .vectordb_storages.chroma import ChromaStorage
|
|
29
30
|
from .vectordb_storages.faiss import FaissStorage
|
|
30
31
|
from .vectordb_storages.milvus import MilvusStorage
|
|
31
32
|
from .vectordb_storages.oceanbase import OceanBaseStorage
|
|
33
|
+
from .vectordb_storages.pgvector import PgVectorStorage
|
|
32
34
|
from .vectordb_storages.qdrant import QdrantStorage
|
|
33
35
|
from .vectordb_storages.tidb import TiDBStorage
|
|
34
36
|
from .vectordb_storages.weaviate import WeaviateStorage
|
|
@@ -52,4 +54,6 @@ __all__ = [
|
|
|
52
54
|
'Mem0Storage',
|
|
53
55
|
'OceanBaseStorage',
|
|
54
56
|
'WeaviateStorage',
|
|
57
|
+
'PgVectorStorage',
|
|
58
|
+
'ChromaStorage',
|
|
55
59
|
]
|