autobyteus 1.1.3__py3-none-any.whl → 1.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autobyteus/agent/agent.py +1 -1
- autobyteus/agent/bootstrap_steps/system_prompt_processing_step.py +4 -2
- autobyteus/agent/context/agent_config.py +36 -5
- autobyteus/agent/events/worker_event_dispatcher.py +1 -2
- autobyteus/agent/handlers/inter_agent_message_event_handler.py +1 -1
- autobyteus/agent/handlers/llm_user_message_ready_event_handler.py +2 -2
- autobyteus/agent/handlers/tool_result_event_handler.py +48 -20
- autobyteus/agent/handlers/user_input_message_event_handler.py +1 -1
- autobyteus/agent/input_processor/__init__.py +1 -7
- autobyteus/agent/message/context_file_type.py +6 -0
- autobyteus/agent/message/send_message_to.py +68 -99
- autobyteus/agent/phases/discover.py +2 -1
- autobyteus/agent/runtime/agent_worker.py +1 -0
- autobyteus/agent/tool_execution_result_processor/__init__.py +9 -0
- autobyteus/agent/tool_execution_result_processor/base_processor.py +46 -0
- autobyteus/agent/tool_execution_result_processor/processor_definition.py +36 -0
- autobyteus/agent/tool_execution_result_processor/processor_meta.py +36 -0
- autobyteus/agent/tool_execution_result_processor/processor_registry.py +70 -0
- autobyteus/agent/workspace/base_workspace.py +17 -2
- autobyteus/cli/__init__.py +1 -1
- autobyteus/cli/cli_display.py +1 -1
- autobyteus/cli/workflow_tui/__init__.py +4 -0
- autobyteus/cli/workflow_tui/app.py +210 -0
- autobyteus/cli/workflow_tui/state.py +189 -0
- autobyteus/cli/workflow_tui/widgets/__init__.py +6 -0
- autobyteus/cli/workflow_tui/widgets/agent_list_sidebar.py +149 -0
- autobyteus/cli/workflow_tui/widgets/focus_pane.py +335 -0
- autobyteus/cli/workflow_tui/widgets/logo.py +27 -0
- autobyteus/cli/workflow_tui/widgets/renderables.py +70 -0
- autobyteus/cli/workflow_tui/widgets/shared.py +51 -0
- autobyteus/cli/workflow_tui/widgets/status_bar.py +14 -0
- autobyteus/events/event_types.py +3 -0
- autobyteus/llm/api/lmstudio_llm.py +37 -0
- autobyteus/llm/api/openai_compatible_llm.py +20 -3
- autobyteus/llm/llm_factory.py +2 -0
- autobyteus/llm/lmstudio_provider.py +89 -0
- autobyteus/llm/providers.py +1 -0
- autobyteus/llm/token_counter/token_counter_factory.py +2 -0
- autobyteus/tools/__init__.py +2 -0
- autobyteus/tools/ask_user_input.py +2 -1
- autobyteus/tools/bash/bash_executor.py +2 -1
- autobyteus/tools/browser/session_aware/browser_session_aware_navigate_to.py +2 -0
- autobyteus/tools/browser/session_aware/browser_session_aware_web_element_trigger.py +3 -0
- autobyteus/tools/browser/session_aware/browser_session_aware_webpage_reader.py +3 -0
- autobyteus/tools/browser/session_aware/browser_session_aware_webpage_screenshot_taker.py +3 -0
- autobyteus/tools/browser/standalone/google_search_ui.py +2 -0
- autobyteus/tools/browser/standalone/navigate_to.py +2 -0
- autobyteus/tools/browser/standalone/web_page_pdf_generator.py +3 -0
- autobyteus/tools/browser/standalone/webpage_image_downloader.py +3 -0
- autobyteus/tools/browser/standalone/webpage_reader.py +2 -0
- autobyteus/tools/browser/standalone/webpage_screenshot_taker.py +3 -0
- autobyteus/tools/file/file_reader.py +36 -9
- autobyteus/tools/file/file_writer.py +37 -9
- autobyteus/tools/functional_tool.py +5 -4
- autobyteus/tools/image_downloader.py +2 -0
- autobyteus/tools/mcp/tool_registrar.py +3 -1
- autobyteus/tools/pdf_downloader.py +2 -1
- autobyteus/tools/registry/tool_definition.py +12 -8
- autobyteus/tools/registry/tool_registry.py +50 -2
- autobyteus/tools/timer.py +2 -0
- autobyteus/tools/tool_category.py +14 -4
- autobyteus/tools/tool_meta.py +6 -1
- autobyteus/tools/tool_origin.py +10 -0
- autobyteus/workflow/agentic_workflow.py +93 -0
- autobyteus/{agent/workflow → workflow}/base_agentic_workflow.py +19 -27
- autobyteus/workflow/bootstrap_steps/__init__.py +20 -0
- autobyteus/workflow/bootstrap_steps/agent_tool_injection_step.py +34 -0
- autobyteus/workflow/bootstrap_steps/base_workflow_bootstrap_step.py +23 -0
- autobyteus/workflow/bootstrap_steps/coordinator_initialization_step.py +41 -0
- autobyteus/workflow/bootstrap_steps/coordinator_prompt_preparation_step.py +108 -0
- autobyteus/workflow/bootstrap_steps/workflow_bootstrapper.py +50 -0
- autobyteus/workflow/bootstrap_steps/workflow_runtime_queue_initialization_step.py +25 -0
- autobyteus/workflow/context/__init__.py +17 -0
- autobyteus/workflow/context/team_manager.py +147 -0
- autobyteus/workflow/context/workflow_config.py +30 -0
- autobyteus/workflow/context/workflow_context.py +61 -0
- autobyteus/workflow/context/workflow_node_config.py +76 -0
- autobyteus/workflow/context/workflow_runtime_state.py +53 -0
- autobyteus/workflow/events/__init__.py +29 -0
- autobyteus/workflow/events/workflow_event_dispatcher.py +39 -0
- autobyteus/workflow/events/workflow_events.py +53 -0
- autobyteus/workflow/events/workflow_input_event_queue_manager.py +21 -0
- autobyteus/workflow/exceptions.py +8 -0
- autobyteus/workflow/factory/__init__.py +9 -0
- autobyteus/workflow/factory/workflow_factory.py +99 -0
- autobyteus/workflow/handlers/__init__.py +19 -0
- autobyteus/workflow/handlers/base_workflow_event_handler.py +16 -0
- autobyteus/workflow/handlers/inter_agent_message_request_event_handler.py +61 -0
- autobyteus/workflow/handlers/lifecycle_workflow_event_handler.py +27 -0
- autobyteus/workflow/handlers/process_user_message_event_handler.py +46 -0
- autobyteus/workflow/handlers/tool_approval_workflow_event_handler.py +39 -0
- autobyteus/workflow/handlers/workflow_event_handler_registry.py +23 -0
- autobyteus/workflow/phases/__init__.py +11 -0
- autobyteus/workflow/phases/workflow_operational_phase.py +19 -0
- autobyteus/workflow/phases/workflow_phase_manager.py +48 -0
- autobyteus/workflow/runtime/__init__.py +13 -0
- autobyteus/workflow/runtime/workflow_runtime.py +82 -0
- autobyteus/workflow/runtime/workflow_worker.py +117 -0
- autobyteus/workflow/shutdown_steps/__init__.py +17 -0
- autobyteus/workflow/shutdown_steps/agent_team_shutdown_step.py +42 -0
- autobyteus/workflow/shutdown_steps/base_workflow_shutdown_step.py +16 -0
- autobyteus/workflow/shutdown_steps/bridge_cleanup_step.py +28 -0
- autobyteus/workflow/shutdown_steps/sub_workflow_shutdown_step.py +41 -0
- autobyteus/workflow/shutdown_steps/workflow_shutdown_orchestrator.py +35 -0
- autobyteus/workflow/streaming/__init__.py +26 -0
- autobyteus/workflow/streaming/agent_event_bridge.py +48 -0
- autobyteus/workflow/streaming/agent_event_multiplexer.py +70 -0
- autobyteus/workflow/streaming/workflow_event_bridge.py +50 -0
- autobyteus/workflow/streaming/workflow_event_notifier.py +83 -0
- autobyteus/workflow/streaming/workflow_event_stream.py +33 -0
- autobyteus/workflow/streaming/workflow_stream_event_payloads.py +28 -0
- autobyteus/workflow/streaming/workflow_stream_events.py +45 -0
- autobyteus/workflow/utils/__init__.py +9 -0
- autobyteus/workflow/utils/wait_for_idle.py +46 -0
- autobyteus/workflow/workflow_builder.py +151 -0
- {autobyteus-1.1.3.dist-info → autobyteus-1.1.4.dist-info}/METADATA +16 -14
- {autobyteus-1.1.3.dist-info → autobyteus-1.1.4.dist-info}/RECORD +134 -65
- {autobyteus-1.1.3.dist-info → autobyteus-1.1.4.dist-info}/top_level.txt +1 -0
- examples/__init__.py +1 -0
- examples/discover_phase_transitions.py +104 -0
- examples/run_browser_agent.py +260 -0
- examples/run_google_slides_agent.py +286 -0
- examples/run_mcp_browser_client.py +174 -0
- examples/run_mcp_google_slides_client.py +270 -0
- examples/run_mcp_list_tools.py +189 -0
- examples/run_poem_writer.py +274 -0
- examples/run_sqlite_agent.py +293 -0
- examples/workflow/__init__.py +1 -0
- examples/workflow/run_basic_research_workflow.py +189 -0
- examples/workflow/run_code_review_workflow.py +269 -0
- examples/workflow/run_debate_workflow.py +212 -0
- examples/workflow/run_workflow_with_tui.py +153 -0
- autobyteus/agent/context/agent_phase_manager.py +0 -264
- autobyteus/agent/context/phases.py +0 -49
- autobyteus/agent/group/__init__.py +0 -0
- autobyteus/agent/group/agent_group.py +0 -164
- autobyteus/agent/group/agent_group_context.py +0 -81
- autobyteus/agent/input_processor/content_prefixing_input_processor.py +0 -41
- autobyteus/agent/input_processor/metadata_appending_input_processor.py +0 -34
- autobyteus/agent/input_processor/passthrough_input_processor.py +0 -33
- autobyteus/agent/workflow/__init__.py +0 -11
- autobyteus/agent/workflow/agentic_workflow.py +0 -89
- autobyteus/tools/mcp/registrar.py +0 -202
- autobyteus/workflow/simple_task.py +0 -98
- autobyteus/workflow/task.py +0 -147
- autobyteus/workflow/workflow.py +0 -49
- {autobyteus-1.1.3.dist-info → autobyteus-1.1.4.dist-info}/WHEEL +0 -0
- {autobyteus-1.1.3.dist-info → autobyteus-1.1.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,81 +0,0 @@
|
|
|
1
|
-
# file: autobyteus/autobyteus/agent/group/agent_group_context.py
|
|
2
|
-
import logging
|
|
3
|
-
from typing import List, Dict, Optional, TYPE_CHECKING
|
|
4
|
-
|
|
5
|
-
if TYPE_CHECKING:
|
|
6
|
-
from autobyteus.agent.agent import Agent
|
|
7
|
-
|
|
8
|
-
logger = logging.getLogger(__name__)
|
|
9
|
-
|
|
10
|
-
class AgentGroupContext:
|
|
11
|
-
"""
|
|
12
|
-
Stores contextual information about an agent group, including its ID,
|
|
13
|
-
member agents, and the designated coordinator. Provides methods to
|
|
14
|
-
discover agents within the group.
|
|
15
|
-
"""
|
|
16
|
-
def __init__(self,
|
|
17
|
-
group_id: str,
|
|
18
|
-
agents: List['Agent'],
|
|
19
|
-
coordinator_agent_id: str):
|
|
20
|
-
"""
|
|
21
|
-
Initializes the AgentGroupContext.
|
|
22
|
-
"""
|
|
23
|
-
if not group_id or not isinstance(group_id, str):
|
|
24
|
-
raise ValueError("AgentGroupContext requires a non-empty string 'group_id'.")
|
|
25
|
-
if not coordinator_agent_id or not isinstance(coordinator_agent_id, str):
|
|
26
|
-
raise ValueError("AgentGroupContext requires a non-empty string 'coordinator_agent_id'.")
|
|
27
|
-
if not agents:
|
|
28
|
-
raise ValueError("AgentGroupContext requires a non-empty list of 'agents'.")
|
|
29
|
-
|
|
30
|
-
from autobyteus.agent.agent import Agent as AgentClassRef
|
|
31
|
-
if not all(isinstance(agent, AgentClassRef) for agent in agents):
|
|
32
|
-
raise TypeError("All items in 'agents' list must be instances of the 'Agent' class.")
|
|
33
|
-
|
|
34
|
-
self.group_id: str = group_id
|
|
35
|
-
self._agents_by_id: Dict[str, 'Agent'] = {agent.agent_id: agent for agent in agents}
|
|
36
|
-
self._coordinator_agent_id: str = coordinator_agent_id
|
|
37
|
-
|
|
38
|
-
if self._coordinator_agent_id not in self._agents_by_id:
|
|
39
|
-
logger.error(f"Coordinator agent with ID '{self._coordinator_agent_id}' not found in the provided list of agents for group '{self.group_id}'.")
|
|
40
|
-
|
|
41
|
-
logger.info(f"AgentGroupContext initialized for group_id '{self.group_id}'.")
|
|
42
|
-
|
|
43
|
-
def get_agent(self, agent_id: str) -> Optional['Agent']:
|
|
44
|
-
"""
|
|
45
|
-
Retrieves an agent from the group by its unique agent_id.
|
|
46
|
-
"""
|
|
47
|
-
return self._agents_by_id.get(agent_id)
|
|
48
|
-
|
|
49
|
-
def get_agents_by_role(self, role_name: str) -> List['Agent']:
|
|
50
|
-
"""
|
|
51
|
-
Retrieves all agents within the group that match the specified role name.
|
|
52
|
-
"""
|
|
53
|
-
if not isinstance(role_name, str):
|
|
54
|
-
logger.warning(f"Attempted to get_agents_by_role with non-string role_name: {role_name} in group '{self.group_id}'.")
|
|
55
|
-
return []
|
|
56
|
-
|
|
57
|
-
matching_agents: List['Agent'] = [
|
|
58
|
-
agent for agent in self._agents_by_id.values()
|
|
59
|
-
if agent.context and agent.context.config and agent.context.config.role == role_name
|
|
60
|
-
]
|
|
61
|
-
|
|
62
|
-
if not matching_agents:
|
|
63
|
-
logger.debug(f"No agents found with role '{role_name}' in group '{self.group_id}'.")
|
|
64
|
-
return matching_agents
|
|
65
|
-
|
|
66
|
-
def get_coordinator_agent(self) -> Optional['Agent']:
|
|
67
|
-
"""
|
|
68
|
-
Retrieves the designated coordinator agent for this group.
|
|
69
|
-
"""
|
|
70
|
-
return self.get_agent(self._coordinator_agent_id)
|
|
71
|
-
|
|
72
|
-
def get_all_agents(self) -> List['Agent']:
|
|
73
|
-
"""
|
|
74
|
-
Retrieves all agents currently part of this group.
|
|
75
|
-
"""
|
|
76
|
-
return list(self._agents_by_id.values())
|
|
77
|
-
|
|
78
|
-
def __repr__(self) -> str:
|
|
79
|
-
return (f"<AgentGroupContext group_id='{self.group_id}', "
|
|
80
|
-
f"num_agents={len(self._agents_by_id)}, "
|
|
81
|
-
f"coordinator_id='{self._coordinator_agent_id}'>")
|
|
@@ -1,41 +0,0 @@
|
|
|
1
|
-
# file: autobyteus/autobyteus/agent/input_processor/content_prefixing_input_processor.py
|
|
2
|
-
import logging
|
|
3
|
-
from typing import TYPE_CHECKING
|
|
4
|
-
|
|
5
|
-
from .base_user_input_processor import BaseAgentUserInputMessageProcessor
|
|
6
|
-
|
|
7
|
-
if TYPE_CHECKING:
|
|
8
|
-
from autobyteus.agent.message.agent_input_user_message import AgentInputUserMessage
|
|
9
|
-
from autobyteus.agent.context import AgentContext # Composite AgentContext
|
|
10
|
-
from autobyteus.agent.events import UserMessageReceivedEvent
|
|
11
|
-
|
|
12
|
-
logger = logging.getLogger(__name__)
|
|
13
|
-
|
|
14
|
-
class ContentPrefixingInputProcessor(BaseAgentUserInputMessageProcessor):
|
|
15
|
-
"""
|
|
16
|
-
A processor that adds a predefined prefix to the message content.
|
|
17
|
-
The prefix is defined by the agent's custom_data (in AgentRuntimeState) or a default.
|
|
18
|
-
Example prefix key in custom_data: "content_prefix"
|
|
19
|
-
"""
|
|
20
|
-
DEFAULT_PREFIX = "[Processed Message] "
|
|
21
|
-
|
|
22
|
-
async def process(self,
|
|
23
|
-
message: 'AgentInputUserMessage',
|
|
24
|
-
context: 'AgentContext',
|
|
25
|
-
triggering_event: 'UserMessageReceivedEvent') -> 'AgentInputUserMessage':
|
|
26
|
-
"""
|
|
27
|
-
Handles the message by prefixing its content.
|
|
28
|
-
The 'triggering_event' parameter is ignored by this processor.
|
|
29
|
-
"""
|
|
30
|
-
agent_id = context.agent_id # Convenience property
|
|
31
|
-
logger.debug(f"Agent '{agent_id}': ContentPrefixingInputProcessor processing message.")
|
|
32
|
-
|
|
33
|
-
# Access custom_data via convenience property (or context.state.custom_data)
|
|
34
|
-
prefix = context.custom_data.get("content_prefix", self.DEFAULT_PREFIX)
|
|
35
|
-
if not isinstance(prefix, str):
|
|
36
|
-
logger.warning(f"Agent '{agent_id}': 'content_prefix' in custom_data is not a string. Using default prefix. Found: {type(prefix)}")
|
|
37
|
-
prefix = self.DEFAULT_PREFIX
|
|
38
|
-
|
|
39
|
-
message.content = prefix + message.content
|
|
40
|
-
logger.info(f"Agent '{agent_id}': Prefixed message content with '{prefix}'.")
|
|
41
|
-
return message
|
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
# file: autobyteus/autobyteus/agent/input_processor/metadata_appending_input_processor.py
|
|
2
|
-
import logging
|
|
3
|
-
from typing import TYPE_CHECKING
|
|
4
|
-
|
|
5
|
-
from .base_user_input_processor import BaseAgentUserInputMessageProcessor
|
|
6
|
-
|
|
7
|
-
if TYPE_CHECKING:
|
|
8
|
-
from autobyteus.agent.message.agent_input_user_message import AgentInputUserMessage
|
|
9
|
-
from autobyteus.agent.context import AgentContext # Composite AgentContext
|
|
10
|
-
from autobyteus.agent.events import UserMessageReceivedEvent
|
|
11
|
-
|
|
12
|
-
logger = logging.getLogger(__name__)
|
|
13
|
-
|
|
14
|
-
class MetadataAppendingInputProcessor(BaseAgentUserInputMessageProcessor):
|
|
15
|
-
"""
|
|
16
|
-
A processor that appends fixed metadata to the message.
|
|
17
|
-
Example: Appends agent_id and config_name to metadata.
|
|
18
|
-
"""
|
|
19
|
-
async def process(self,
|
|
20
|
-
message: 'AgentInputUserMessage',
|
|
21
|
-
context: 'AgentContext',
|
|
22
|
-
triggering_event: 'UserMessageReceivedEvent') -> 'AgentInputUserMessage':
|
|
23
|
-
"""
|
|
24
|
-
Handles the message by appending metadata.
|
|
25
|
-
The 'triggering_event' parameter is ignored by this processor.
|
|
26
|
-
"""
|
|
27
|
-
agent_id = context.agent_id
|
|
28
|
-
config_name = context.config.name
|
|
29
|
-
|
|
30
|
-
logger.debug(f"Agent '{agent_id}': MetadataAppendingInputProcessor processing message.")
|
|
31
|
-
message.metadata["processed_by_agent_id"] = agent_id
|
|
32
|
-
message.metadata["processed_with_config_name"] = config_name
|
|
33
|
-
logger.info(f"Agent '{agent_id}': Appended 'processed_by_agent_id' and 'processed_with_config_name' to message metadata.")
|
|
34
|
-
return message
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
# file: autobyteus/autobyteus/agent/input_processor/passthrough_input_processor.py
|
|
2
|
-
import logging
|
|
3
|
-
from typing import TYPE_CHECKING
|
|
4
|
-
|
|
5
|
-
from .base_user_input_processor import BaseAgentUserInputMessageProcessor
|
|
6
|
-
|
|
7
|
-
if TYPE_CHECKING:
|
|
8
|
-
from autobyteus.agent.message.agent_input_user_message import AgentInputUserMessage
|
|
9
|
-
from autobyteus.agent.context import AgentContext # Composite AgentContext
|
|
10
|
-
from autobyteus.agent.events import UserMessageReceivedEvent
|
|
11
|
-
|
|
12
|
-
logger = logging.getLogger(__name__)
|
|
13
|
-
|
|
14
|
-
class PassthroughInputProcessor(BaseAgentUserInputMessageProcessor):
|
|
15
|
-
"""
|
|
16
|
-
A processor that returns the message unchanged.
|
|
17
|
-
Can be used as a default or for testing.
|
|
18
|
-
"""
|
|
19
|
-
@classmethod
|
|
20
|
-
def get_name(cls) -> str:
|
|
21
|
-
return "PassthroughInputProcessor"
|
|
22
|
-
|
|
23
|
-
async def process(self,
|
|
24
|
-
message: 'AgentInputUserMessage',
|
|
25
|
-
context: 'AgentContext',
|
|
26
|
-
triggering_event: 'UserMessageReceivedEvent') -> 'AgentInputUserMessage':
|
|
27
|
-
"""
|
|
28
|
-
Handles the message by returning it without modification.
|
|
29
|
-
The 'triggering_event' parameter is ignored by this processor.
|
|
30
|
-
"""
|
|
31
|
-
agent_id = context.agent_id # Convenience property
|
|
32
|
-
logger.debug(f"Agent '{agent_id}': PassthroughInputProcessor received message, returning as is.")
|
|
33
|
-
return message
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
# file: autobyteus/autobyteus/agent/workflow/__init__.py
|
|
2
|
-
"""
|
|
3
|
-
Components for defining and running agentic workflows.
|
|
4
|
-
"""
|
|
5
|
-
from .agentic_workflow import AgenticWorkflow
|
|
6
|
-
from .base_agentic_workflow import BaseAgenticWorkflow
|
|
7
|
-
|
|
8
|
-
__all__ = [
|
|
9
|
-
"AgenticWorkflow",
|
|
10
|
-
"BaseAgenticWorkflow",
|
|
11
|
-
]
|
|
@@ -1,89 +0,0 @@
|
|
|
1
|
-
# file: autobyteus/autobyteus/agent/workflow/agentic_workflow.py
|
|
2
|
-
import logging
|
|
3
|
-
import uuid
|
|
4
|
-
from typing import List, Dict, Optional, Any, cast
|
|
5
|
-
|
|
6
|
-
from autobyteus.agent.context.agent_config import AgentConfig
|
|
7
|
-
from autobyteus.agent.group.agent_group import AgentGroup
|
|
8
|
-
|
|
9
|
-
logger = logging.getLogger(__name__)
|
|
10
|
-
|
|
11
|
-
class AgenticWorkflow:
|
|
12
|
-
"""
|
|
13
|
-
A concrete class for defining and running multi-agent workflows declaratively.
|
|
14
|
-
It internally manages an AgentGroup and provides a user-friendly interface
|
|
15
|
-
to process tasks.
|
|
16
|
-
"""
|
|
17
|
-
def __init__(self,
|
|
18
|
-
agent_configs: List[AgentConfig],
|
|
19
|
-
coordinator_config_name: str,
|
|
20
|
-
workflow_id: Optional[str] = None,
|
|
21
|
-
input_param_name: str = "input",
|
|
22
|
-
):
|
|
23
|
-
"""
|
|
24
|
-
Initializes the AgenticWorkflow.
|
|
25
|
-
|
|
26
|
-
Args:
|
|
27
|
-
agent_configs: List of pre-made AgentConfig instances for the agents in this workflow.
|
|
28
|
-
coordinator_config_name: Name of the agent config to be used as coordinator.
|
|
29
|
-
workflow_id: Optional. A unique ID for this workflow instance. Auto-generated if None.
|
|
30
|
-
input_param_name: The key to use in `process(**kwargs)` to find the initial
|
|
31
|
-
input string for the coordinator. Defaults to "input".
|
|
32
|
-
"""
|
|
33
|
-
self.workflow_id: str = workflow_id or f"workflow_{uuid.uuid4()}"
|
|
34
|
-
self._input_param_name: str = input_param_name
|
|
35
|
-
|
|
36
|
-
logger.info(f"Initializing AgenticWorkflow '{self.workflow_id}'. "
|
|
37
|
-
f"Input parameter name for process(): '{self._input_param_name}'.")
|
|
38
|
-
|
|
39
|
-
# The AgentGroup is now initialized directly with the user-provided configs.
|
|
40
|
-
self.agent_group: AgentGroup = AgentGroup(
|
|
41
|
-
agent_configs=agent_configs,
|
|
42
|
-
coordinator_config_name=coordinator_config_name,
|
|
43
|
-
group_id=f"group_for_{self.workflow_id}",
|
|
44
|
-
)
|
|
45
|
-
logger.info(f"AgenticWorkflow '{self.workflow_id}' successfully instantiated internal AgentGroup '{self.agent_group.group_id}'.")
|
|
46
|
-
|
|
47
|
-
async def process(self, **kwargs: Any) -> Any:
|
|
48
|
-
logger.info(f"AgenticWorkflow '{self.workflow_id}' received process request with kwargs: {list(kwargs.keys())}")
|
|
49
|
-
|
|
50
|
-
initial_input_content = kwargs.get(self._input_param_name)
|
|
51
|
-
if initial_input_content is None:
|
|
52
|
-
raise ValueError(f"Required input parameter '{self._input_param_name}' not found in process() arguments.")
|
|
53
|
-
if not isinstance(initial_input_content, str):
|
|
54
|
-
raise ValueError(f"Input parameter '{self._input_param_name}' must be a string, "
|
|
55
|
-
f"got {type(initial_input_content).__name__}.")
|
|
56
|
-
|
|
57
|
-
user_id: Optional[str] = cast(Optional[str], kwargs.get("user_id")) if isinstance(kwargs.get("user_id"), str) else None
|
|
58
|
-
|
|
59
|
-
logger.debug(f"AgenticWorkflow '{self.workflow_id}': Extracted initial input for coordinator: '{initial_input_content[:100]}...'")
|
|
60
|
-
|
|
61
|
-
result = await self.agent_group.process_task_for_coordinator(
|
|
62
|
-
initial_input_content=initial_input_content,
|
|
63
|
-
user_id=user_id
|
|
64
|
-
)
|
|
65
|
-
|
|
66
|
-
return result
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
async def start(self) -> None:
|
|
70
|
-
logger.info(f"AgenticWorkflow '{self.workflow_id}' received start() request. Delegating to AgentGroup.")
|
|
71
|
-
await self.agent_group.start()
|
|
72
|
-
|
|
73
|
-
async def stop(self, timeout: float = 10.0) -> None:
|
|
74
|
-
logger.info(f"AgenticWorkflow '{self.workflow_id}' received stop() request. Delegating to AgentGroup.")
|
|
75
|
-
await self.agent_group.stop(timeout)
|
|
76
|
-
|
|
77
|
-
@property
|
|
78
|
-
def is_running(self) -> bool:
|
|
79
|
-
return self.agent_group.is_running
|
|
80
|
-
|
|
81
|
-
@property
|
|
82
|
-
def group_id(self) -> str:
|
|
83
|
-
return self.agent_group.group_id
|
|
84
|
-
|
|
85
|
-
def __repr__(self) -> str:
|
|
86
|
-
return (f"<AgenticWorkflow workflow_id='{self.workflow_id}', "
|
|
87
|
-
f"group_id='{self.agent_group.group_id}', "
|
|
88
|
-
f"coordinator='{self.agent_group.coordinator_config_name}', "
|
|
89
|
-
f"is_running={self.is_running}>")
|
|
@@ -1,202 +0,0 @@
|
|
|
1
|
-
# file: autobyteus/autobyteus/tools/mcp/registrar.py
|
|
2
|
-
import logging
|
|
3
|
-
from typing import Any, Dict, List, Optional, Union
|
|
4
|
-
|
|
5
|
-
# Consolidated imports from the autobyteus.autobyteus.mcp package public API
|
|
6
|
-
from .config_service import McpConfigService
|
|
7
|
-
from .factory import McpToolFactory
|
|
8
|
-
from .schema_mapper import McpSchemaMapper
|
|
9
|
-
from .types import BaseMcpConfig, McpTransportType
|
|
10
|
-
from .server import StdioManagedMcpServer, HttpManagedMcpServer, BaseManagedMcpServer
|
|
11
|
-
|
|
12
|
-
from autobyteus.tools.registry import ToolRegistry, ToolDefinition
|
|
13
|
-
from autobyteus.tools.tool_category import ToolCategory
|
|
14
|
-
from autobyteus.utils.singleton import SingletonMeta
|
|
15
|
-
from mcp import types as mcp_types
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
logger = logging.getLogger(__name__)
|
|
19
|
-
|
|
20
|
-
class McpToolRegistrar(metaclass=SingletonMeta):
|
|
21
|
-
"""
|
|
22
|
-
Orchestrates the discovery of remote MCP tools and their registration
|
|
23
|
-
with the AutoByteUs ToolRegistry using a stateful, server-centric architecture.
|
|
24
|
-
"""
|
|
25
|
-
def __init__(self):
|
|
26
|
-
"""
|
|
27
|
-
Initializes the McpToolRegistrar singleton.
|
|
28
|
-
"""
|
|
29
|
-
self._config_service: McpConfigService = McpConfigService()
|
|
30
|
-
self._tool_registry: ToolRegistry = ToolRegistry()
|
|
31
|
-
self._registered_tools_by_server: Dict[str, List[ToolDefinition]] = {}
|
|
32
|
-
logger.info("McpToolRegistrar initialized.")
|
|
33
|
-
|
|
34
|
-
def _create_server_instance_for_discovery(self, server_config: BaseMcpConfig) -> BaseManagedMcpServer:
|
|
35
|
-
"""Creates a server instance based on transport type."""
|
|
36
|
-
if server_config.transport_type == McpTransportType.STDIO:
|
|
37
|
-
return StdioManagedMcpServer(server_config)
|
|
38
|
-
elif server_config.transport_type == McpTransportType.STREAMABLE_HTTP:
|
|
39
|
-
return HttpManagedMcpServer(server_config)
|
|
40
|
-
else:
|
|
41
|
-
raise NotImplementedError(f"Discovery not implemented for transport type: {server_config.transport_type}")
|
|
42
|
-
|
|
43
|
-
async def _fetch_tools_from_server(self, server_config: BaseMcpConfig) -> List[mcp_types.Tool]:
|
|
44
|
-
"""
|
|
45
|
-
Creates a temporary server instance to perform a single, one-shot
|
|
46
|
-
tool discovery, ensuring resources are properly closed.
|
|
47
|
-
"""
|
|
48
|
-
discovery_server = self._create_server_instance_for_discovery(server_config)
|
|
49
|
-
try:
|
|
50
|
-
# The list_remote_tools method implicitly handles the connection.
|
|
51
|
-
remote_tools = await discovery_server.list_remote_tools()
|
|
52
|
-
return remote_tools
|
|
53
|
-
finally:
|
|
54
|
-
# The finally block guarantees the temporary server connection is closed.
|
|
55
|
-
await discovery_server.close()
|
|
56
|
-
|
|
57
|
-
def _create_tool_definition_from_remote(
|
|
58
|
-
self,
|
|
59
|
-
remote_tool: mcp_types.Tool,
|
|
60
|
-
server_config: BaseMcpConfig,
|
|
61
|
-
schema_mapper: McpSchemaMapper
|
|
62
|
-
) -> ToolDefinition:
|
|
63
|
-
"""
|
|
64
|
-
Maps a single remote tool from an MCP server to an AutoByteUs ToolDefinition.
|
|
65
|
-
"""
|
|
66
|
-
if hasattr(remote_tool, 'model_dump_json'):
|
|
67
|
-
logger.debug(f"Processing remote tool from server '{server_config.server_id}':\n{remote_tool.model_dump_json(indent=2)}")
|
|
68
|
-
|
|
69
|
-
actual_arg_schema = schema_mapper.map_to_autobyteus_schema(remote_tool.inputSchema)
|
|
70
|
-
actual_desc = remote_tool.description
|
|
71
|
-
|
|
72
|
-
registered_name = remote_tool.name
|
|
73
|
-
if server_config.tool_name_prefix:
|
|
74
|
-
registered_name = f"{server_config.tool_name_prefix.rstrip('_')}_{remote_tool.name}"
|
|
75
|
-
|
|
76
|
-
# The factory now only needs key identifiers, not live objects.
|
|
77
|
-
tool_factory = McpToolFactory(
|
|
78
|
-
server_id=server_config.server_id,
|
|
79
|
-
remote_tool_name=remote_tool.name,
|
|
80
|
-
registered_tool_name=registered_name,
|
|
81
|
-
tool_description=actual_desc,
|
|
82
|
-
tool_argument_schema=actual_arg_schema
|
|
83
|
-
)
|
|
84
|
-
|
|
85
|
-
return ToolDefinition(
|
|
86
|
-
name=registered_name,
|
|
87
|
-
description=actual_desc,
|
|
88
|
-
argument_schema=actual_arg_schema,
|
|
89
|
-
category=ToolCategory.MCP,
|
|
90
|
-
custom_factory=tool_factory.create_tool,
|
|
91
|
-
config_schema=None,
|
|
92
|
-
tool_class=None
|
|
93
|
-
)
|
|
94
|
-
|
|
95
|
-
async def discover_and_register_tools(self, mcp_config: Optional[Union[BaseMcpConfig, Dict[str, Any]]] = None) -> List[ToolDefinition]:
|
|
96
|
-
"""
|
|
97
|
-
Discovers tools from MCP servers and registers them.
|
|
98
|
-
This process uses a helper to manage short-lived server instances for discovery.
|
|
99
|
-
"""
|
|
100
|
-
configs_to_process: List[BaseMcpConfig]
|
|
101
|
-
|
|
102
|
-
if mcp_config:
|
|
103
|
-
if isinstance(mcp_config, dict):
|
|
104
|
-
configs_to_process = [self._config_service.load_config(mcp_config)]
|
|
105
|
-
elif isinstance(mcp_config, BaseMcpConfig):
|
|
106
|
-
configs_to_process = [self._config_service.add_config(mcp_config)]
|
|
107
|
-
else:
|
|
108
|
-
raise TypeError(f"mcp_config must be a BaseMcpConfig object or a dictionary, not {type(mcp_config)}.")
|
|
109
|
-
logger.info(f"Starting targeted MCP tool discovery for server: {configs_to_process[0].server_id}")
|
|
110
|
-
self.unregister_tools_from_server(configs_to_process[0].server_id)
|
|
111
|
-
else:
|
|
112
|
-
logger.info("Starting full MCP tool discovery. Unregistering all existing MCP tools first.")
|
|
113
|
-
all_server_ids = list(self._registered_tools_by_server.keys())
|
|
114
|
-
for server_id in all_server_ids:
|
|
115
|
-
self.unregister_tools_from_server(server_id)
|
|
116
|
-
self._registered_tools_by_server.clear()
|
|
117
|
-
configs_to_process = self._config_service.get_all_configs()
|
|
118
|
-
|
|
119
|
-
if not configs_to_process:
|
|
120
|
-
logger.info("No MCP server configurations to process. Skipping discovery.")
|
|
121
|
-
return []
|
|
122
|
-
|
|
123
|
-
schema_mapper = McpSchemaMapper()
|
|
124
|
-
registered_tool_definitions: List[ToolDefinition] = []
|
|
125
|
-
for server_config in configs_to_process:
|
|
126
|
-
if not server_config.enabled:
|
|
127
|
-
logger.info(f"MCP server '{server_config.server_id}' is disabled. Skipping.")
|
|
128
|
-
continue
|
|
129
|
-
|
|
130
|
-
logger.info(f"Discovering tools from MCP server: '{server_config.server_id}' ({server_config.transport_type.value})")
|
|
131
|
-
|
|
132
|
-
try:
|
|
133
|
-
# The helper abstracts away the connect/close lifecycle for discovery.
|
|
134
|
-
remote_tools = await self._fetch_tools_from_server(server_config)
|
|
135
|
-
logger.info(f"Discovered {len(remote_tools)} tools from server '{server_config.server_id}'.")
|
|
136
|
-
|
|
137
|
-
for remote_tool in remote_tools:
|
|
138
|
-
try:
|
|
139
|
-
tool_def = self._create_tool_definition_from_remote(remote_tool, server_config, schema_mapper)
|
|
140
|
-
self._tool_registry.register_tool(tool_def)
|
|
141
|
-
self._registered_tools_by_server.setdefault(server_config.server_id, []).append(tool_def)
|
|
142
|
-
registered_tool_definitions.append(tool_def)
|
|
143
|
-
logger.info(f"Successfully registered MCP tool '{remote_tool.name}' from server '{server_config.server_id}' as '{tool_def.name}'.")
|
|
144
|
-
except Exception as e_tool:
|
|
145
|
-
logger.error(f"Failed to process or register remote tool '{remote_tool.name}': {e_tool}", exc_info=True)
|
|
146
|
-
|
|
147
|
-
except Exception as e_server:
|
|
148
|
-
logger.error(f"Failed to discover tools from MCP server '{server_config.server_id}': {e_server}", exc_info=True)
|
|
149
|
-
|
|
150
|
-
logger.info(f"MCP tool discovery and registration process completed. Total tools registered: {len(registered_tool_definitions)}.")
|
|
151
|
-
return registered_tool_definitions
|
|
152
|
-
|
|
153
|
-
async def list_remote_tools(self, mcp_config: Union[BaseMcpConfig, Dict[str, Any]]) -> List[ToolDefinition]:
|
|
154
|
-
"""
|
|
155
|
-
Previews tools from a remote MCP server without registering them.
|
|
156
|
-
This is a stateless "dry-run" or "preview" operation.
|
|
157
|
-
"""
|
|
158
|
-
validated_config: BaseMcpConfig
|
|
159
|
-
if isinstance(mcp_config, dict):
|
|
160
|
-
validated_config = McpConfigService.parse_mcp_config_dict(mcp_config)
|
|
161
|
-
elif isinstance(mcp_config, BaseMcpConfig):
|
|
162
|
-
validated_config = mcp_config
|
|
163
|
-
else:
|
|
164
|
-
raise TypeError(f"mcp_config must be a BaseMcpConfig object or a dictionary, not {type(mcp_config)}.")
|
|
165
|
-
|
|
166
|
-
logger.info(f"Previewing tools from MCP server: '{validated_config.server_id}' ({validated_config.transport_type.value})")
|
|
167
|
-
|
|
168
|
-
schema_mapper = McpSchemaMapper()
|
|
169
|
-
tool_definitions: List[ToolDefinition] = []
|
|
170
|
-
|
|
171
|
-
try:
|
|
172
|
-
# Use the same helper to fetch tools, abstracting away the connection lifecycle.
|
|
173
|
-
remote_tools = await self._fetch_tools_from_server(validated_config)
|
|
174
|
-
logger.info(f"Discovered {len(remote_tools)} tools from server '{validated_config.server_id}' for preview.")
|
|
175
|
-
|
|
176
|
-
for remote_tool in remote_tools:
|
|
177
|
-
try:
|
|
178
|
-
tool_def = self._create_tool_definition_from_remote(remote_tool, validated_config, schema_mapper)
|
|
179
|
-
tool_definitions.append(tool_def)
|
|
180
|
-
except Exception as e_tool:
|
|
181
|
-
logger.error(f"Failed to map remote tool '{remote_tool.name}' from server '{validated_config.server_id}' during preview: {e_tool}", exc_info=True)
|
|
182
|
-
|
|
183
|
-
except Exception as e_server:
|
|
184
|
-
logger.error(f"Failed to discover tools for preview from MCP server '{validated_config.server_id}': {e_server}", exc_info=True)
|
|
185
|
-
raise
|
|
186
|
-
|
|
187
|
-
logger.info(f"MCP tool preview completed. Found {len(tool_definitions)} tools.")
|
|
188
|
-
return tool_definitions
|
|
189
|
-
|
|
190
|
-
def unregister_tools_from_server(self, server_id: str) -> bool:
|
|
191
|
-
if not self.is_server_registered(server_id):
|
|
192
|
-
logger.info(f"No tools found for server ID '{server_id}'. Nothing to unregister.")
|
|
193
|
-
return False
|
|
194
|
-
tools_to_unregister = self._registered_tools_by_server.pop(server_id, [])
|
|
195
|
-
logger.info(f"Unregistering {len(tools_to_unregister)} tools from server ID: '{server_id}'...")
|
|
196
|
-
for tool_def in tools_to_unregister:
|
|
197
|
-
self._tool_registry.unregister_tool(tool_def.name)
|
|
198
|
-
logger.info(f"Successfully unregistered all tools and removed server '{server_id}' from registrar tracking.")
|
|
199
|
-
return True
|
|
200
|
-
|
|
201
|
-
def is_server_registered(self, server_id: str) -> bool:
|
|
202
|
-
return server_id in self._registered_tools_by_server
|
|
@@ -1,98 +0,0 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
import logging
|
|
3
|
-
from typing import Optional, Callable, Any, List, Union
|
|
4
|
-
|
|
5
|
-
from autobyteus.agent.agent import Agent
|
|
6
|
-
from autobyteus.events.event_types import EventType
|
|
7
|
-
from autobyteus.llm.models import LLMModel
|
|
8
|
-
from autobyteus.llm.llm_factory import LLMFactory
|
|
9
|
-
from autobyteus.conversation.user_message import UserMessage
|
|
10
|
-
|
|
11
|
-
logger = logging.getLogger(__name__)
|
|
12
|
-
|
|
13
|
-
class SimpleTask:
|
|
14
|
-
"""
|
|
15
|
-
A simplified task execution class for running single-instruction tasks
|
|
16
|
-
with minimal configuration and built-in result handling.
|
|
17
|
-
"""
|
|
18
|
-
|
|
19
|
-
def __init__(
|
|
20
|
-
self,
|
|
21
|
-
name: str,
|
|
22
|
-
instruction: str,
|
|
23
|
-
llm_model: LLMModel,
|
|
24
|
-
input_data: Optional[Union[str, List[str]]] = None,
|
|
25
|
-
output_parser: Optional[Callable[[str], Any]] = None,
|
|
26
|
-
):
|
|
27
|
-
"""
|
|
28
|
-
Initialize a SimpleTask.
|
|
29
|
-
|
|
30
|
-
Args:
|
|
31
|
-
name (str): Name of the task
|
|
32
|
-
instruction (str): Task instruction/prompt
|
|
33
|
-
llm_model (LLMModel): LLM model to use
|
|
34
|
-
input_data (Optional[Union[str, List[str]]], optional): Input data or file paths. Defaults to None.
|
|
35
|
-
output_parser (Optional[Callable], optional): Function to parse the output. Defaults to None.
|
|
36
|
-
"""
|
|
37
|
-
self.name = name
|
|
38
|
-
self.instruction = instruction
|
|
39
|
-
self.llm_model = llm_model
|
|
40
|
-
self.input_data = input_data if isinstance(input_data, list) else ([input_data] if input_data else [])
|
|
41
|
-
self.output_parser = output_parser
|
|
42
|
-
|
|
43
|
-
logger.info(f"Initialized task '{self.name}' with model {self.llm_model.value} and {len(self.input_data)} inputs")
|
|
44
|
-
|
|
45
|
-
async def execute(self) -> Any:
|
|
46
|
-
"""
|
|
47
|
-
Execute the task and return the result.
|
|
48
|
-
|
|
49
|
-
Returns:
|
|
50
|
-
The result of the task execution, parsed if output_parser is provided
|
|
51
|
-
"""
|
|
52
|
-
try:
|
|
53
|
-
llm = LLMFactory.create_llm(self.llm_model)
|
|
54
|
-
|
|
55
|
-
user_message = UserMessage(
|
|
56
|
-
content=self.instruction,
|
|
57
|
-
file_paths=self.input_data
|
|
58
|
-
)
|
|
59
|
-
|
|
60
|
-
agent = Agent(
|
|
61
|
-
role=self.name,
|
|
62
|
-
llm=llm,
|
|
63
|
-
initial_user_message=user_message
|
|
64
|
-
)
|
|
65
|
-
|
|
66
|
-
result_queue = asyncio.Queue()
|
|
67
|
-
|
|
68
|
-
async def handle_response(*args, **kwargs):
|
|
69
|
-
response = kwargs.get('response')
|
|
70
|
-
if response:
|
|
71
|
-
await result_queue.put(response)
|
|
72
|
-
|
|
73
|
-
agent.subscribe(EventType.ASSISTANT_RESPONSE, handle_response, agent.agent_id)
|
|
74
|
-
|
|
75
|
-
try:
|
|
76
|
-
agent.start()
|
|
77
|
-
result = await asyncio.wait_for(
|
|
78
|
-
result_queue.get(),
|
|
79
|
-
timeout=30.0
|
|
80
|
-
)
|
|
81
|
-
|
|
82
|
-
# Only parse if output_parser is provided
|
|
83
|
-
if self.output_parser:
|
|
84
|
-
return self.output_parser(result)
|
|
85
|
-
return result
|
|
86
|
-
|
|
87
|
-
except asyncio.TimeoutError:
|
|
88
|
-
logger.error(f"Task '{self.name}' timed out")
|
|
89
|
-
raise TimeoutError(f"Task '{self.name}' execution timed out")
|
|
90
|
-
|
|
91
|
-
finally:
|
|
92
|
-
agent.unsubscribe("ASSISTANT_RESPONSE", handle_response, agent.agent_id)
|
|
93
|
-
agent.stop()
|
|
94
|
-
await agent.cleanup()
|
|
95
|
-
|
|
96
|
-
except Exception as e:
|
|
97
|
-
logger.error(f"Error executing task '{self.name}': {str(e)}")
|
|
98
|
-
raise
|