aip-agents-binary 0.5.7__py3-none-macosx_13_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aip_agents/__init__.py +65 -0
- aip_agents/__init__.pyi +19 -0
- aip_agents/a2a/__init__.pyi +3 -0
- aip_agents/a2a/server/__init__.pyi +4 -0
- aip_agents/a2a/server/base_executor.pyi +73 -0
- aip_agents/a2a/server/google_adk_executor.pyi +51 -0
- aip_agents/a2a/server/langflow_executor.pyi +43 -0
- aip_agents/a2a/server/langgraph_executor.pyi +47 -0
- aip_agents/a2a/types.pyi +132 -0
- aip_agents/agent/__init__.pyi +9 -0
- aip_agents/agent/base_agent.pyi +221 -0
- aip_agents/agent/base_langgraph_agent.pyi +232 -0
- aip_agents/agent/google_adk_agent.pyi +141 -0
- aip_agents/agent/google_adk_constants.pyi +3 -0
- aip_agents/agent/hitl/__init__.pyi +6 -0
- aip_agents/agent/hitl/config.pyi +15 -0
- aip_agents/agent/hitl/langgraph_hitl_mixin.pyi +42 -0
- aip_agents/agent/hitl/manager.pyi +199 -0
- aip_agents/agent/hitl/models.pyi +3 -0
- aip_agents/agent/hitl/prompt/__init__.pyi +4 -0
- aip_agents/agent/hitl/prompt/base.pyi +24 -0
- aip_agents/agent/hitl/prompt/deferred.pyi +30 -0
- aip_agents/agent/interface.pyi +81 -0
- aip_agents/agent/interfaces.pyi +44 -0
- aip_agents/agent/langflow_agent.pyi +133 -0
- aip_agents/agent/langgraph_memory_enhancer_agent.pyi +49 -0
- aip_agents/agent/langgraph_react_agent.pyi +126 -0
- aip_agents/agent/system_instruction_context.pyi +13 -0
- aip_agents/clients/__init__.pyi +4 -0
- aip_agents/clients/langflow/__init__.pyi +4 -0
- aip_agents/clients/langflow/client.pyi +140 -0
- aip_agents/clients/langflow/types.pyi +7 -0
- aip_agents/constants.py +23 -0
- aip_agents/constants.pyi +7 -0
- aip_agents/examples/__init__.pyi +0 -0
- aip_agents/examples/compare_streaming_client.pyi +48 -0
- aip_agents/examples/compare_streaming_server.pyi +18 -0
- aip_agents/examples/demo_memory_recall.pyi +58 -0
- aip_agents/examples/hello_world_a2a_google_adk_client.pyi +9 -0
- aip_agents/examples/hello_world_a2a_google_adk_client_agent.pyi +9 -0
- aip_agents/examples/hello_world_a2a_google_adk_client_streaming.pyi +9 -0
- aip_agents/examples/hello_world_a2a_google_adk_server.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langchain_client.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_client_agent.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_client_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_client_streaming.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_reference_client_streaming.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_reference_server.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langchain_server.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langchain_server_lm_invoker.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langflow_client.pyi +9 -0
- aip_agents/examples/hello_world_a2a_langflow_server.pyi +14 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_client.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_client_streaming.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_server.pyi +16 -0
- aip_agents/examples/hello_world_a2a_langgraph_client.pyi +9 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_agent.pyi +9 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_agent_lm_invoker.pyi +2 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming.pyi +9 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming_tool_streaming.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langgraph_server.pyi +14 -0
- aip_agents/examples/hello_world_a2a_langgraph_server_lm_invoker.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langgraph_server_tool_streaming.pyi +15 -0
- aip_agents/examples/hello_world_a2a_mcp_langgraph.pyi +48 -0
- aip_agents/examples/hello_world_a2a_three_level_agent_hierarchy_client.pyi +48 -0
- aip_agents/examples/hello_world_a2a_three_level_agent_hierarchy_server.pyi +45 -0
- aip_agents/examples/hello_world_a2a_with_metadata_langchain_client.pyi +5 -0
- aip_agents/examples/hello_world_a2a_with_metadata_langchain_server_lm_invoker.pyi +15 -0
- aip_agents/examples/hello_world_google_adk.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_http.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_http_stream.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_sse.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_sse_stream.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_stdio.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_stdio_stream.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain.pyi +5 -0
- aip_agents/examples/hello_world_langchain_lm_invoker.pyi +2 -0
- aip_agents/examples/hello_world_langchain_mcp_http.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_http_interactive.pyi +16 -0
- aip_agents/examples/hello_world_langchain_mcp_http_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_multi_server.pyi +18 -0
- aip_agents/examples/hello_world_langchain_mcp_sse.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_sse_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_stdio.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_stdio_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain_stream_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_langflow_agent.pyi +35 -0
- aip_agents/examples/hello_world_langgraph.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_bosa_twitter.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_http.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_http_stream.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_sse.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_sse_stream.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_stdio.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_stdio_stream.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_stream.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_stream_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_model_switch_cli.pyi +30 -0
- aip_agents/examples/hello_world_multi_agent_adk.pyi +6 -0
- aip_agents/examples/hello_world_multi_agent_langchain.pyi +5 -0
- aip_agents/examples/hello_world_multi_agent_langgraph.pyi +5 -0
- aip_agents/examples/hello_world_multi_agent_langgraph_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_pii_logger.pyi +5 -0
- aip_agents/examples/hello_world_sentry.pyi +21 -0
- aip_agents/examples/hello_world_step_limits.pyi +17 -0
- aip_agents/examples/hello_world_stock_a2a_server.pyi +17 -0
- aip_agents/examples/hello_world_tool_output_client.pyi +5 -0
- aip_agents/examples/hello_world_tool_output_server.pyi +19 -0
- aip_agents/examples/hitl_demo.pyi +67 -0
- aip_agents/examples/pii_demo_langgraph_client.pyi +5 -0
- aip_agents/examples/pii_demo_langgraph_server.pyi +20 -0
- aip_agents/examples/pii_demo_multi_agent_client.pyi +5 -0
- aip_agents/examples/pii_demo_multi_agent_server.pyi +40 -0
- aip_agents/examples/todolist_planning_a2a_langchain_client.pyi +5 -0
- aip_agents/examples/todolist_planning_a2a_langgraph_server.pyi +19 -0
- aip_agents/examples/tools/__init__.pyi +9 -0
- aip_agents/examples/tools/adk_arithmetic_tools.pyi +24 -0
- aip_agents/examples/tools/adk_weather_tool.pyi +18 -0
- aip_agents/examples/tools/data_generator_tool.pyi +15 -0
- aip_agents/examples/tools/data_visualization_tool.pyi +19 -0
- aip_agents/examples/tools/image_artifact_tool.pyi +26 -0
- aip_agents/examples/tools/langchain_arithmetic_tools.pyi +17 -0
- aip_agents/examples/tools/langchain_currency_exchange_tool.pyi +20 -0
- aip_agents/examples/tools/langchain_graph_artifact_tool.pyi +25 -0
- aip_agents/examples/tools/langchain_weather_tool.pyi +19 -0
- aip_agents/examples/tools/langgraph_streaming_tool.pyi +43 -0
- aip_agents/examples/tools/mock_retrieval_tool.pyi +13 -0
- aip_agents/examples/tools/pii_demo_tools.pyi +54 -0
- aip_agents/examples/tools/random_chart_tool.pyi +20 -0
- aip_agents/examples/tools/serper_tool.pyi +16 -0
- aip_agents/examples/tools/stock_tools.pyi +36 -0
- aip_agents/examples/tools/table_generator_tool.pyi +22 -0
- aip_agents/examples/tools/time_tool.pyi +15 -0
- aip_agents/examples/tools/weather_forecast_tool.pyi +14 -0
- aip_agents/mcp/__init__.pyi +0 -0
- aip_agents/mcp/client/__init__.pyi +5 -0
- aip_agents/mcp/client/base_mcp_client.pyi +148 -0
- aip_agents/mcp/client/connection_manager.pyi +48 -0
- aip_agents/mcp/client/google_adk/__init__.pyi +3 -0
- aip_agents/mcp/client/google_adk/client.pyi +75 -0
- aip_agents/mcp/client/langchain/__init__.pyi +3 -0
- aip_agents/mcp/client/langchain/client.pyi +48 -0
- aip_agents/mcp/client/persistent_session.pyi +113 -0
- aip_agents/mcp/client/session_pool.pyi +101 -0
- aip_agents/mcp/client/transports.pyi +123 -0
- aip_agents/mcp/utils/__init__.pyi +0 -0
- aip_agents/mcp/utils/config_validator.pyi +82 -0
- aip_agents/memory/__init__.pyi +5 -0
- aip_agents/memory/adapters/__init__.pyi +4 -0
- aip_agents/memory/adapters/base_adapter.pyi +150 -0
- aip_agents/memory/adapters/mem0.pyi +22 -0
- aip_agents/memory/base.pyi +60 -0
- aip_agents/memory/constants.pyi +25 -0
- aip_agents/memory/factory.pyi +24 -0
- aip_agents/memory/guidance.pyi +3 -0
- aip_agents/memory/simple_memory.pyi +23 -0
- aip_agents/middleware/__init__.pyi +5 -0
- aip_agents/middleware/base.pyi +71 -0
- aip_agents/middleware/manager.pyi +80 -0
- aip_agents/middleware/todolist.pyi +125 -0
- aip_agents/schema/__init__.pyi +9 -0
- aip_agents/schema/a2a.pyi +40 -0
- aip_agents/schema/agent.pyi +65 -0
- aip_agents/schema/hitl.pyi +89 -0
- aip_agents/schema/langgraph.pyi +28 -0
- aip_agents/schema/model_id.pyi +54 -0
- aip_agents/schema/step_limit.pyi +63 -0
- aip_agents/schema/storage.pyi +21 -0
- aip_agents/sentry/__init__.pyi +3 -0
- aip_agents/sentry/sentry.pyi +48 -0
- aip_agents/storage/__init__.pyi +8 -0
- aip_agents/storage/base.pyi +58 -0
- aip_agents/storage/clients/__init__.pyi +3 -0
- aip_agents/storage/clients/minio_client.pyi +137 -0
- aip_agents/storage/config.pyi +29 -0
- aip_agents/storage/providers/__init__.pyi +5 -0
- aip_agents/storage/providers/base.pyi +88 -0
- aip_agents/storage/providers/memory.pyi +79 -0
- aip_agents/storage/providers/object_storage.pyi +98 -0
- aip_agents/tools/__init__.pyi +4 -0
- aip_agents/tools/bosa_tools.pyi +37 -0
- aip_agents/tools/browser_use/__init__.pyi +14 -0
- aip_agents/tools/browser_use/action_parser.pyi +18 -0
- aip_agents/tools/browser_use/browser_use_tool.pyi +50 -0
- aip_agents/tools/browser_use/llm_config.pyi +52 -0
- aip_agents/tools/browser_use/minio_storage.pyi +109 -0
- aip_agents/tools/browser_use/schemas.pyi +32 -0
- aip_agents/tools/browser_use/session.pyi +4 -0
- aip_agents/tools/browser_use/session_errors.pyi +53 -0
- aip_agents/tools/browser_use/steel_session_recording.pyi +63 -0
- aip_agents/tools/browser_use/streaming.pyi +81 -0
- aip_agents/tools/browser_use/structured_data_parser.pyi +86 -0
- aip_agents/tools/browser_use/structured_data_recovery.pyi +43 -0
- aip_agents/tools/browser_use/types.pyi +45 -0
- aip_agents/tools/code_sandbox/__init__.pyi +3 -0
- aip_agents/tools/code_sandbox/constant.pyi +4 -0
- aip_agents/tools/code_sandbox/e2b_cloud_sandbox_extended.pyi +86 -0
- aip_agents/tools/code_sandbox/e2b_sandbox_tool.pyi +29 -0
- aip_agents/tools/constants.pyi +135 -0
- aip_agents/tools/document_loader/__init__.pyi +7 -0
- aip_agents/tools/document_loader/base_reader.pyi +62 -0
- aip_agents/tools/document_loader/docx_reader_tool.pyi +6 -0
- aip_agents/tools/document_loader/excel_reader_tool.pyi +26 -0
- aip_agents/tools/document_loader/pdf_reader_tool.pyi +11 -0
- aip_agents/tools/document_loader/pdf_splitter.pyi +18 -0
- aip_agents/tools/gl_connector/__init__.pyi +3 -0
- aip_agents/tools/gl_connector/tool.pyi +74 -0
- aip_agents/tools/memory_search/__init__.pyi +5 -0
- aip_agents/tools/memory_search/base.pyi +69 -0
- aip_agents/tools/memory_search/mem0.pyi +19 -0
- aip_agents/tools/memory_search/schema.pyi +15 -0
- aip_agents/tools/memory_search_tool.pyi +3 -0
- aip_agents/tools/tool_config_injector.pyi +26 -0
- aip_agents/tools/web_search/__init__.pyi +3 -0
- aip_agents/tools/web_search/serper_tool.pyi +19 -0
- aip_agents/types/__init__.pyi +36 -0
- aip_agents/types/a2a_events.pyi +3 -0
- aip_agents/utils/__init__.pyi +11 -0
- aip_agents/utils/a2a_connector.pyi +146 -0
- aip_agents/utils/artifact_helpers.pyi +203 -0
- aip_agents/utils/constants.pyi +10 -0
- aip_agents/utils/datetime/__init__.pyi +4 -0
- aip_agents/utils/datetime/normalization.pyi +95 -0
- aip_agents/utils/datetime/timezone.pyi +48 -0
- aip_agents/utils/env_loader.pyi +10 -0
- aip_agents/utils/event_handler_registry.pyi +23 -0
- aip_agents/utils/file_prompt_utils.pyi +21 -0
- aip_agents/utils/final_response_builder.pyi +34 -0
- aip_agents/utils/formatter_llm_client.pyi +71 -0
- aip_agents/utils/langgraph/__init__.pyi +3 -0
- aip_agents/utils/langgraph/converter.pyi +49 -0
- aip_agents/utils/langgraph/tool_managers/__init__.pyi +5 -0
- aip_agents/utils/langgraph/tool_managers/a2a_tool_manager.pyi +35 -0
- aip_agents/utils/langgraph/tool_managers/base_tool_manager.pyi +48 -0
- aip_agents/utils/langgraph/tool_managers/delegation_tool_manager.pyi +56 -0
- aip_agents/utils/langgraph/tool_output_management.pyi +292 -0
- aip_agents/utils/logger.pyi +60 -0
- aip_agents/utils/metadata/__init__.pyi +5 -0
- aip_agents/utils/metadata/activity_metadata_helper.pyi +25 -0
- aip_agents/utils/metadata/activity_narrative/__init__.pyi +7 -0
- aip_agents/utils/metadata/activity_narrative/builder.pyi +35 -0
- aip_agents/utils/metadata/activity_narrative/constants.pyi +10 -0
- aip_agents/utils/metadata/activity_narrative/context.pyi +32 -0
- aip_agents/utils/metadata/activity_narrative/formatters.pyi +48 -0
- aip_agents/utils/metadata/activity_narrative/utils.pyi +12 -0
- aip_agents/utils/metadata/schemas/__init__.pyi +4 -0
- aip_agents/utils/metadata/schemas/activity_schema.pyi +18 -0
- aip_agents/utils/metadata/schemas/thinking_schema.pyi +20 -0
- aip_agents/utils/metadata/thinking_metadata_helper.pyi +4 -0
- aip_agents/utils/metadata_helper.pyi +117 -0
- aip_agents/utils/name_preprocessor/__init__.pyi +6 -0
- aip_agents/utils/name_preprocessor/base_name_preprocessor.pyi +52 -0
- aip_agents/utils/name_preprocessor/google_name_preprocessor.pyi +38 -0
- aip_agents/utils/name_preprocessor/name_preprocessor.pyi +41 -0
- aip_agents/utils/name_preprocessor/openai_name_preprocessor.pyi +34 -0
- aip_agents/utils/pii/__init__.pyi +5 -0
- aip_agents/utils/pii/pii_handler.pyi +96 -0
- aip_agents/utils/pii/pii_helper.pyi +78 -0
- aip_agents/utils/pii/uuid_deanonymizer_mapping.pyi +73 -0
- aip_agents/utils/reference_helper.pyi +81 -0
- aip_agents/utils/sse_chunk_transformer.pyi +166 -0
- aip_agents/utils/step_limit_manager.pyi +112 -0
- aip_agents/utils/token_usage_helper.pyi +60 -0
- aip_agents_binary-0.5.7.dist-info/METADATA +690 -0
- aip_agents_binary-0.5.7.dist-info/RECORD +270 -0
- aip_agents_binary-0.5.7.dist-info/WHEEL +5 -0
- aip_agents_binary-0.5.7.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from aip_agents.agent.hitl.manager import ApprovalManager as ApprovalManager
|
|
3
|
+
from aip_agents.schema.hitl import ApprovalDecision as ApprovalDecision, ApprovalRequest as ApprovalRequest
|
|
4
|
+
|
|
5
|
+
class BasePromptHandler(ABC):
|
|
6
|
+
"""Abstract base class for prompt handlers used in HITL flows."""
|
|
7
|
+
def attach_manager(self, manager: ApprovalManager) -> None:
|
|
8
|
+
"""Optionally attach the ``ApprovalManager`` coordinating approvals.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
manager (ApprovalManager): The approval manager instance to attach.
|
|
12
|
+
"""
|
|
13
|
+
@abstractmethod
|
|
14
|
+
async def prompt_for_decision(self, request: ApprovalRequest, timeout_seconds: int, context_keys: list[str] | None = None) -> ApprovalDecision:
|
|
15
|
+
"""Collect and return a decision for the given approval request.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
request (ApprovalRequest): The approval request to prompt for.
|
|
19
|
+
timeout_seconds (int): Maximum time to wait for a decision in seconds.
|
|
20
|
+
context_keys (list[str] | None, optional): Optional keys for additional context. Defaults to None.
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
ApprovalDecision: The decision made for the approval request.
|
|
24
|
+
"""
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from aip_agents.agent.hitl.manager import ApprovalManager as ApprovalManager
|
|
2
|
+
from aip_agents.agent.hitl.prompt.base import BasePromptHandler as BasePromptHandler
|
|
3
|
+
from aip_agents.schema.hitl import ApprovalDecision as ApprovalDecision, ApprovalDecisionType as ApprovalDecisionType, ApprovalRequest as ApprovalRequest
|
|
4
|
+
from collections.abc import Callable
|
|
5
|
+
|
|
6
|
+
class DeferredPromptHandler(BasePromptHandler):
|
|
7
|
+
"""Prompt handler that defers tool execution until an external decision is received."""
|
|
8
|
+
def __init__(self, notify: Callable[[ApprovalRequest], None] | None = None) -> None:
|
|
9
|
+
"""Initialize the deferred prompt handler.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
notify: Optional callback function to notify when an approval request is made.
|
|
13
|
+
"""
|
|
14
|
+
def attach_manager(self, manager: ApprovalManager) -> None:
|
|
15
|
+
"""Attach the ApprovalManager orchestrating approvals.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
manager: The ApprovalManager instance to attach for handling approval decisions.
|
|
19
|
+
"""
|
|
20
|
+
async def prompt_for_decision(self, request: ApprovalRequest, timeout_seconds: int, context_keys: list[str] | None = None) -> ApprovalDecision:
|
|
21
|
+
"""Register a waiter and return a pending decision sentinel.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
request: The approval request containing the tool call details and context.
|
|
25
|
+
timeout_seconds: Number of seconds to wait for approval before timing out.
|
|
26
|
+
context_keys: Optional list of context keys to include in the approval request.
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
ApprovalDecision with PENDING status and registered waiter for external resolution.
|
|
30
|
+
"""
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
from a2a.types import AgentCard as AgentCard
|
|
2
|
+
from abc import ABC, abstractmethod
|
|
3
|
+
from aip_agents.schema.agent import BaseAgentConfig as BaseAgentConfig
|
|
4
|
+
from collections.abc import AsyncGenerator
|
|
5
|
+
from gllm_inference.lm_invoker.lm_invoker import BaseLMInvoker
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
class AgentInterface(ABC):
|
|
9
|
+
"""A general and minimal interface for agent implementations.
|
|
10
|
+
|
|
11
|
+
Defines core execution methods (`__init__`, `run`, `arun`, `arun_stream`).
|
|
12
|
+
Concrete subclasses must implement all abstract methods.
|
|
13
|
+
"""
|
|
14
|
+
name: str
|
|
15
|
+
instruction: str
|
|
16
|
+
description: str | None
|
|
17
|
+
mcp_config: dict[str, Any]
|
|
18
|
+
lm_invoker: BaseLMInvoker | None
|
|
19
|
+
config: BaseAgentConfig | None
|
|
20
|
+
def __init__(self, name: str, instruction: str, description: str | None = None, lm_invoker: BaseLMInvoker | None = None, config: BaseAgentConfig | None = None, **kwargs: Any) -> None:
|
|
21
|
+
"""Initializes the agent.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
name: The name of the agent.
|
|
25
|
+
instruction: The core directive or system prompt for the agent.
|
|
26
|
+
description: Human-readable description. Defaults to instruction if not provided.
|
|
27
|
+
lm_invoker: The language model invoker to use for LLM interactions. Defaults to None.
|
|
28
|
+
config: Additional configuration for the agent.
|
|
29
|
+
**kwargs: Additional keyword arguments for concrete implementations.
|
|
30
|
+
"""
|
|
31
|
+
@abstractmethod
|
|
32
|
+
def run(self, query: str, **kwargs: Any) -> dict[str, Any]:
|
|
33
|
+
"""Synchronously runs the agent.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
query: The input query for the agent.
|
|
37
|
+
**kwargs: Additional keyword arguments for execution.
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
Dict containing at least {'output': ...}.
|
|
41
|
+
"""
|
|
42
|
+
@abstractmethod
|
|
43
|
+
async def arun(self, query: str, **kwargs: Any) -> dict[str, Any]:
|
|
44
|
+
"""Asynchronously runs the agent.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
query: The input query for the agent.
|
|
48
|
+
**kwargs: Additional keyword arguments for execution.
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
Dict containing at least {'output': ...}.
|
|
52
|
+
"""
|
|
53
|
+
@abstractmethod
|
|
54
|
+
async def arun_stream(self, query: str, **kwargs: Any) -> AsyncGenerator[str | dict[str, Any], None]:
|
|
55
|
+
"""Asynchronously streams the agent's response.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
query: The input query.
|
|
59
|
+
**kwargs: Extra parameters for execution.
|
|
60
|
+
|
|
61
|
+
Yields:
|
|
62
|
+
Chunks of output (strings or dicts).
|
|
63
|
+
"""
|
|
64
|
+
@abstractmethod
|
|
65
|
+
def add_mcp_server(self, mcp_config: dict[str, dict[str, Any]]) -> None:
|
|
66
|
+
"""Adds a new MCP server configuration.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
mcp_config: Dictionary containing server name as key and its configuration as value.
|
|
70
|
+
|
|
71
|
+
Raises:
|
|
72
|
+
ValueError: If mcp_config is empty or None, or if any server configuration is invalid.
|
|
73
|
+
KeyError: If any server name already exists in the configuration.
|
|
74
|
+
"""
|
|
75
|
+
@abstractmethod
|
|
76
|
+
def register_a2a_agents(self, agents: list[AgentCard]):
|
|
77
|
+
"""Registers A2A agents from a list of AgentCards.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
agents: A list of AgentCard instances.
|
|
81
|
+
"""
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from collections.abc import AsyncIterator
|
|
2
|
+
from typing import Any, Protocol
|
|
3
|
+
|
|
4
|
+
class LangGraphAgentProtocol(Protocol):
|
|
5
|
+
"""Minimal interface required by LangGraphA2AExecutor."""
|
|
6
|
+
name: str
|
|
7
|
+
async def arun_a2a_stream(self, query: str, **kwargs: Any) -> AsyncIterator[dict[str, Any]]:
|
|
8
|
+
"""Stream A2A-compatible chunks for the given query.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
query (str): The query to execute and stream.
|
|
12
|
+
**kwargs (Any): Additional keyword arguments for execution.
|
|
13
|
+
|
|
14
|
+
Yields:
|
|
15
|
+
dict[str, Any]: A2A-compatible streaming chunks.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
class LangflowAgentProtocol(Protocol):
|
|
19
|
+
"""Minimal interface required by LangflowA2AExecutor."""
|
|
20
|
+
name: str
|
|
21
|
+
async def arun_a2a_stream(self, query: str, **kwargs: Any) -> AsyncIterator[dict[str, Any]]:
|
|
22
|
+
"""Stream A2A-compatible chunks for the given query.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
query (str): The query to execute and stream.
|
|
26
|
+
**kwargs (Any): Additional keyword arguments for execution.
|
|
27
|
+
|
|
28
|
+
Yields:
|
|
29
|
+
dict[str, Any]: A2A-compatible streaming chunks.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
class GoogleADKAgentProtocol(Protocol):
|
|
33
|
+
"""Minimal interface required by GoogleADKExecutor."""
|
|
34
|
+
name: str
|
|
35
|
+
async def arun_a2a_stream(self, query: str, **kwargs: Any) -> AsyncIterator[dict[str, Any]]:
|
|
36
|
+
"""Stream A2A-compatible chunks for the given query.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
query (str): The query to execute and stream.
|
|
40
|
+
**kwargs (Any): Additional keyword arguments for execution.
|
|
41
|
+
|
|
42
|
+
Yields:
|
|
43
|
+
dict[str, Any]: A2A-compatible streaming chunks.
|
|
44
|
+
"""
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from a2a.types import AgentCard as AgentCard
|
|
3
|
+
from aip_agents.agent.base_agent import BaseAgent as BaseAgent
|
|
4
|
+
from aip_agents.clients.langflow import LangflowApiClient as LangflowApiClient
|
|
5
|
+
from aip_agents.clients.langflow.types import LangflowEventType as LangflowEventType
|
|
6
|
+
from aip_agents.schema.agent import LangflowAgentConfig as LangflowAgentConfig
|
|
7
|
+
from aip_agents.types import A2AEvent as A2AEvent, A2AStreamEventType as A2AStreamEventType
|
|
8
|
+
from aip_agents.utils.logger import get_logger as get_logger
|
|
9
|
+
from aip_agents.utils.sse_chunk_transformer import SSEChunkTransformer as SSEChunkTransformer
|
|
10
|
+
from collections.abc import AsyncGenerator
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
logger: Incomplete
|
|
14
|
+
|
|
15
|
+
class LangflowAgent(BaseAgent):
|
|
16
|
+
"""Langflow agent implementation for executing Langflow flows.
|
|
17
|
+
|
|
18
|
+
This agent integrates with Langflow APIs to execute flows while providing
|
|
19
|
+
full compatibility with the SDK's agent framework, including:
|
|
20
|
+
- Synchronous and asynchronous execution
|
|
21
|
+
- Regular and A2A streaming support
|
|
22
|
+
- Session management for conversation continuity
|
|
23
|
+
- Error handling and retry logic
|
|
24
|
+
- Credential management through BaseAgent
|
|
25
|
+
|
|
26
|
+
The agent builds on BaseAgent to gain shared A2A utilities while focusing on
|
|
27
|
+
Langflow-specific execution logic.
|
|
28
|
+
"""
|
|
29
|
+
langflow_config: Incomplete
|
|
30
|
+
flow_id: Incomplete
|
|
31
|
+
api_client: Incomplete
|
|
32
|
+
def __init__(self, name: str, flow_id: str, description: str | None = None, base_url: str | None = None, api_key: str | None = None, config: LangflowAgentConfig | dict[str, Any] | None = None, **kwargs: Any) -> None:
|
|
33
|
+
"""Initialize the LangflowAgent.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
name: The name of the agent.
|
|
37
|
+
flow_id: The unique identifier of the Langflow flow to execute.
|
|
38
|
+
description: Human-readable description.
|
|
39
|
+
base_url: The base URL of the Langflow API server.
|
|
40
|
+
api_key: The API key for Langflow authentication.
|
|
41
|
+
config: Langflow-specific configuration or dict.
|
|
42
|
+
**kwargs: Additional keyword arguments passed to BaseAgent.
|
|
43
|
+
"""
|
|
44
|
+
def run(self, query: str, **kwargs: Any) -> dict[str, Any]:
|
|
45
|
+
"""Synchronously run the Langflow agent.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
query: The input query for the agent.
|
|
49
|
+
**kwargs: Additional keyword arguments.
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
Dictionary containing the agent's response.
|
|
53
|
+
"""
|
|
54
|
+
async def arun(self, query: str, **kwargs: Any) -> dict[str, Any]:
|
|
55
|
+
"""Asynchronously run the Langflow agent.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
query: The input query for the agent.
|
|
59
|
+
**kwargs: Additional keyword arguments.
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
Dictionary containing the agent's response and metadata.
|
|
63
|
+
"""
|
|
64
|
+
async def arun_stream(self, query: str, **kwargs: Any) -> AsyncGenerator[str | dict[str, Any], None]:
|
|
65
|
+
"""Asynchronously stream the Langflow agent's response.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
query: The input query for the agent.
|
|
69
|
+
**kwargs: Additional keyword arguments.
|
|
70
|
+
|
|
71
|
+
Yields:
|
|
72
|
+
Chunks of output (strings or dicts) from the streaming response.
|
|
73
|
+
"""
|
|
74
|
+
async def arun_a2a_stream(self, query: str, **kwargs: Any) -> AsyncGenerator[dict[str, Any], None]:
|
|
75
|
+
"""Asynchronously stream the agent's response in A2A format.
|
|
76
|
+
|
|
77
|
+
This method converts Langflow streaming events into A2A-compatible events
|
|
78
|
+
following the patterns established by BaseLangGraphAgent.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
query: The input query for the agent.
|
|
82
|
+
**kwargs: Additional keyword arguments.
|
|
83
|
+
|
|
84
|
+
Yields:
|
|
85
|
+
A2A-compatible event dictionaries with semantic event types.
|
|
86
|
+
"""
|
|
87
|
+
async def arun_sse_stream(self, query: str, task_id: str | None = None, context_id: str | None = None, **kwargs: Any) -> AsyncGenerator[dict[str, Any], None]:
|
|
88
|
+
'''Stream agent response as SSE-compatible chunks.
|
|
89
|
+
|
|
90
|
+
This method wraps arun_a2a_stream and transforms output to the normalized
|
|
91
|
+
dict format matching A2AConnector.astream_to_agent output, enabling direct
|
|
92
|
+
streaming without A2A server overhead.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
query: The input query for the agent.
|
|
96
|
+
task_id: Optional task identifier for the stream.
|
|
97
|
+
context_id: Optional context identifier for the stream.
|
|
98
|
+
**kwargs: Additional arguments passed to arun_a2a_stream.
|
|
99
|
+
|
|
100
|
+
Yields:
|
|
101
|
+
SSEChunk dicts with normalized structure:
|
|
102
|
+
- status: "success" | "error"
|
|
103
|
+
- task_state: "working" | "completed" | "failed" | "canceled"
|
|
104
|
+
- content: Text content or None
|
|
105
|
+
- event_type: Always string (never enum)
|
|
106
|
+
- final: True for terminal events
|
|
107
|
+
- metadata: Normalized metadata dict
|
|
108
|
+
- artifacts: Only present when non-empty
|
|
109
|
+
'''
|
|
110
|
+
def register_a2a_agents(self, agents: list[AgentCard]) -> None:
|
|
111
|
+
"""Register A2A agents (not supported for Langflow agents).
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
agents: List of AgentCard instances.
|
|
115
|
+
|
|
116
|
+
Raises:
|
|
117
|
+
NotImplementedError: Langflow agents don't support A2A agent registration.
|
|
118
|
+
"""
|
|
119
|
+
def add_mcp_server(self, mcp_config: dict[str, dict[str, Any]]) -> None:
|
|
120
|
+
"""Add MCP server configuration (not supported for Langflow agents).
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
mcp_config: MCP server configuration.
|
|
124
|
+
|
|
125
|
+
Raises:
|
|
126
|
+
NotImplementedError: Langflow agents don't support MCP servers.
|
|
127
|
+
"""
|
|
128
|
+
async def health_check(self) -> bool:
|
|
129
|
+
"""Check if the Langflow API is accessible.
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
True if the API is accessible, False otherwise.
|
|
133
|
+
"""
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from aip_agents.agent.langgraph_react_agent import LangGraphReactAgent as LangGraphReactAgent
|
|
3
|
+
from aip_agents.agent.system_instruction_context import get_current_date_context as get_current_date_context
|
|
4
|
+
from aip_agents.memory.guidance import MEM0_MEMORY_RECALL_GUIDANCE as MEM0_MEMORY_RECALL_GUIDANCE
|
|
5
|
+
from aip_agents.tools.memory_search_tool import LongTermMemorySearchTool as LongTermMemorySearchTool, MEMORY_SEARCH_TOOL_NAME as MEMORY_SEARCH_TOOL_NAME, Mem0SearchTool as Mem0SearchTool
|
|
6
|
+
from aip_agents.utils.logger import get_logger as get_logger
|
|
7
|
+
from langgraph.graph import StateGraph
|
|
8
|
+
from langgraph.graph.state import CompiledStateGraph
|
|
9
|
+
|
|
10
|
+
logger: Incomplete
|
|
11
|
+
|
|
12
|
+
class LangGraphMemoryEnhancerAgent(LangGraphReactAgent):
|
|
13
|
+
"""Simplified mini-agent for automatic memory retrieval and query enhancement.
|
|
14
|
+
|
|
15
|
+
This agent has a simple 2-node LangGraph (agent + tools) and uses existing memory
|
|
16
|
+
infrastructure to enhance user queries with relevant context. It acts as a
|
|
17
|
+
preprocessing layer that automatically attempts memory retrieval for every query.
|
|
18
|
+
|
|
19
|
+
Key features:
|
|
20
|
+
- Uses runtime `memory_user_id` provided via call arguments (no static storage)
|
|
21
|
+
- Uses simplified instruction reusing existing guidance
|
|
22
|
+
- Standard 2-node LangGraph pattern (agent -> tools -> agent)
|
|
23
|
+
- Automatically enhances queries with memory context when available
|
|
24
|
+
- Returns original query unchanged if no relevant memories found
|
|
25
|
+
"""
|
|
26
|
+
def __init__(self, memory, **kwargs) -> None:
|
|
27
|
+
"""Initialize the LangGraphMemoryEnhancerAgent with memory backend and configuration.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
memory: Memory backend instance (Mem0Memory or compatible)
|
|
31
|
+
**kwargs: Additional arguments passed to BaseLangGraphAgent, including:
|
|
32
|
+
- memory_agent_id: Fallback user ID for memory operations
|
|
33
|
+
- model: LLM model to use for memory decisions
|
|
34
|
+
- Other BaseLangGraphAgent parameters
|
|
35
|
+
"""
|
|
36
|
+
def define_graph(self, graph_builder: StateGraph) -> CompiledStateGraph:
|
|
37
|
+
"""Define the 3-node memory recall LangGraph for this agent.
|
|
38
|
+
|
|
39
|
+
This creates a streamlined ReAct-inspired structure that reuses
|
|
40
|
+
`LangGraphReactAgent` helpers for robust LM invocation, token usage tracking,
|
|
41
|
+
error handling, and tool execution.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
graph_builder: LangGraph `StateGraph` builder instance used to register nodes and
|
|
45
|
+
edges for compilation.
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
CompiledStateGraph: The compiled memory recall graph ready for execution.
|
|
49
|
+
"""
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from aip_agents.agent.base_langgraph_agent import BaseLangGraphAgent as BaseLangGraphAgent
|
|
3
|
+
from aip_agents.agent.hitl.langgraph_hitl_mixin import LangGraphHitLMixin as LangGraphHitLMixin
|
|
4
|
+
from aip_agents.agent.hitl.manager import TOOL_EXECUTION_BLOCKING_DECISIONS as TOOL_EXECUTION_BLOCKING_DECISIONS
|
|
5
|
+
from aip_agents.middleware.base import AgentMiddleware as AgentMiddleware, ModelRequest as ModelRequest
|
|
6
|
+
from aip_agents.middleware.manager import MiddlewareManager as MiddlewareManager
|
|
7
|
+
from aip_agents.middleware.todolist import TodoList as TodoList, TodoListMiddleware as TodoListMiddleware
|
|
8
|
+
from aip_agents.schema.a2a import A2AStreamEventType as A2AStreamEventType
|
|
9
|
+
from aip_agents.schema.hitl import ApprovalDecision as ApprovalDecision, HitlMetadata as HitlMetadata
|
|
10
|
+
from aip_agents.schema.langgraph import ToolCallResult as ToolCallResult, ToolStorageParams as ToolStorageParams
|
|
11
|
+
from aip_agents.schema.step_limit import MaxStepsExceededError as MaxStepsExceededError, StepLimitConfig as StepLimitConfig
|
|
12
|
+
from aip_agents.tools.memory_search_tool import MEMORY_SEARCH_TOOL_NAME as MEMORY_SEARCH_TOOL_NAME
|
|
13
|
+
from aip_agents.tools.tool_config_injector import TOOL_CONFIGS_KEY as TOOL_CONFIGS_KEY
|
|
14
|
+
from aip_agents.utils import add_references_chunks as add_references_chunks
|
|
15
|
+
from aip_agents.utils.langgraph import convert_langchain_messages_to_gllm_messages as convert_langchain_messages_to_gllm_messages, convert_lm_output_to_langchain_message as convert_lm_output_to_langchain_message
|
|
16
|
+
from aip_agents.utils.langgraph.tool_output_management import StoreOutputParams as StoreOutputParams, ToolOutputManager as ToolOutputManager, ToolReferenceError as ToolReferenceError, ToolReferenceResolver as ToolReferenceResolver
|
|
17
|
+
from aip_agents.utils.logger import get_logger as get_logger
|
|
18
|
+
from aip_agents.utils.metadata.activity_metadata_helper import create_tool_activity_info as create_tool_activity_info
|
|
19
|
+
from aip_agents.utils.metadata_helper import Kind as Kind, MetadataFieldKeys as MetadataFieldKeys, Status as Status
|
|
20
|
+
from aip_agents.utils.pii import ToolPIIHandler as ToolPIIHandler, add_pii_mappings as add_pii_mappings, normalize_enable_pii as normalize_enable_pii
|
|
21
|
+
from aip_agents.utils.reference_helper import extract_references_from_tool as extract_references_from_tool
|
|
22
|
+
from aip_agents.utils.step_limit_manager import StepLimitManager as StepLimitManager
|
|
23
|
+
from aip_agents.utils.token_usage_helper import TOTAL_USAGE_KEY as TOTAL_USAGE_KEY, USAGE_METADATA_KEY as USAGE_METADATA_KEY, add_usage_metadata as add_usage_metadata, extract_and_update_token_usage_from_ai_message as extract_and_update_token_usage_from_ai_message, extract_token_usage_from_tool_output as extract_token_usage_from_tool_output
|
|
24
|
+
from collections.abc import Awaitable as Awaitable, Sequence
|
|
25
|
+
from dataclasses import dataclass
|
|
26
|
+
from gllm_core.event import EventEmitter
|
|
27
|
+
from gllm_core.schema import Chunk as Chunk
|
|
28
|
+
from langchain_core.language_models import BaseChatModel
|
|
29
|
+
from langchain_core.messages import BaseMessage as BaseMessage
|
|
30
|
+
from langchain_core.messages.ai import UsageMetadata
|
|
31
|
+
from langchain_core.tools import BaseTool
|
|
32
|
+
from langgraph.graph import StateGraph
|
|
33
|
+
from langgraph.graph.message import add_messages as add_messages
|
|
34
|
+
from langgraph.graph.state import CompiledStateGraph
|
|
35
|
+
from langgraph.managed import IsLastStep, RemainingSteps
|
|
36
|
+
from typing import Annotated, Any
|
|
37
|
+
from typing_extensions import TypedDict
|
|
38
|
+
|
|
39
|
+
logger: Incomplete
|
|
40
|
+
DEFAULT_INSTRUCTION: str
|
|
41
|
+
TOOL_RUN_STREAMING_METHOD: str
|
|
42
|
+
TOOL_OUTPUT_MANAGER_KEY: str
|
|
43
|
+
CALL_ID_KEY: str
|
|
44
|
+
|
|
45
|
+
@dataclass
|
|
46
|
+
class ToolCallContext:
|
|
47
|
+
"""Context information for executing a single tool call."""
|
|
48
|
+
config: dict[str, Any] | None
|
|
49
|
+
state: dict[str, Any]
|
|
50
|
+
pending_artifacts: list[dict[str, Any]]
|
|
51
|
+
hitl_decision: ApprovalDecision | None = ...
|
|
52
|
+
|
|
53
|
+
class ReactAgentState(TypedDict):
|
|
54
|
+
"""State schema for the ReAct agent.
|
|
55
|
+
|
|
56
|
+
Includes messages, step tracking, optional event emission support, artifacts, references,
|
|
57
|
+
metadata, tool output management, and deep agents middleware state (todos, filesystem).
|
|
58
|
+
"""
|
|
59
|
+
messages: Annotated[Sequence[BaseMessage], add_messages]
|
|
60
|
+
is_last_step: IsLastStep
|
|
61
|
+
remaining_steps: RemainingSteps
|
|
62
|
+
event_emitter: EventEmitter | None
|
|
63
|
+
artifacts: list[dict[str, Any]] | None
|
|
64
|
+
references: Annotated[list[Chunk], add_references_chunks]
|
|
65
|
+
metadata: dict[str, Any] | None
|
|
66
|
+
tool_output_manager: ToolOutputManager | None
|
|
67
|
+
total_usage: Annotated[UsageMetadata | None, add_usage_metadata]
|
|
68
|
+
pii_mapping: Annotated[dict[str, str] | None, add_pii_mappings]
|
|
69
|
+
thread_id: str
|
|
70
|
+
todos: TodoList | None
|
|
71
|
+
current_step: int
|
|
72
|
+
delegation_depth: int
|
|
73
|
+
delegation_chain: list[str]
|
|
74
|
+
step_limit_config: StepLimitConfig | None
|
|
75
|
+
|
|
76
|
+
class LangGraphReactAgent(LangGraphHitLMixin, BaseLangGraphAgent):
|
|
77
|
+
"""A ReAct agent template built on LangGraph.
|
|
78
|
+
|
|
79
|
+
This agent can use either:
|
|
80
|
+
- An LMInvoker (if self.lm_invoker is set by BaseAgent)
|
|
81
|
+
- A LangChain BaseChatModel (if self.model is set by BaseAgent)
|
|
82
|
+
|
|
83
|
+
The graph structure follows the standard ReAct pattern:
|
|
84
|
+
agent -> tools -> agent (loop) -> END
|
|
85
|
+
"""
|
|
86
|
+
tool_output_manager: Incomplete
|
|
87
|
+
step_limit_config: Incomplete
|
|
88
|
+
def __init__(self, name: str, instruction: str = ..., model: BaseChatModel | str | Any | None = None, tools: Sequence[BaseTool] | None = None, agents: Sequence[Any] | None = None, description: str | None = None, thread_id_key: str = 'thread_id', event_emitter: EventEmitter | None = None, tool_output_manager: ToolOutputManager | None = None, planning: bool = False, middlewares: Sequence[AgentMiddleware] | None = None, step_limit_config: StepLimitConfig | None = None, **kwargs: Any) -> None:
|
|
89
|
+
"""Initialize the LangGraph ReAct Agent.
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
name: The name of the agent.
|
|
93
|
+
instruction: The system instruction for the agent.
|
|
94
|
+
model: The model to use (lm_invoker, LangChain model, string, etc.).
|
|
95
|
+
tools: Sequence of LangChain tools available to the agent.
|
|
96
|
+
agents: Optional sequence of sub-agents for delegation (coordinator mode).
|
|
97
|
+
description: Human-readable description of the agent.
|
|
98
|
+
thread_id_key: Key for thread ID in configuration.
|
|
99
|
+
event_emitter: Optional event emitter for streaming updates.
|
|
100
|
+
tool_output_manager: Optional ToolOutputManager instance for tool output management.
|
|
101
|
+
When provided, enables tool output storage, reference resolution, and sharing capabilities.
|
|
102
|
+
This enables multi-agent workflows where agents can access each other's tool outputs.
|
|
103
|
+
If None, tool output management is disabled for this agent.
|
|
104
|
+
planning: Enable planning capabilities with TodoListMiddleware. Defaults to False.
|
|
105
|
+
middlewares: Optional sequence of custom middleware to COMPOSE (not override) with built-in middleware.
|
|
106
|
+
Execution order: [TodoListMiddleware (if planning=True),
|
|
107
|
+
...custom middlewares in order provided]
|
|
108
|
+
All middleware hooks execute - this extends capabilities, never replaces them.
|
|
109
|
+
enable_pii: Optional toggle to enable PII handling for tool inputs and outputs.
|
|
110
|
+
step_limit_config: Optional configuration for step limits and delegation depth.
|
|
111
|
+
**kwargs: Additional keyword arguments passed to BaseLangGraphAgent.
|
|
112
|
+
"""
|
|
113
|
+
def define_graph(self, graph_builder: StateGraph) -> CompiledStateGraph:
|
|
114
|
+
"""Define the ReAct agent graph structure.
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
graph_builder: The StateGraph builder to define the graph structure.
|
|
118
|
+
|
|
119
|
+
Returns:
|
|
120
|
+
Compiled LangGraph ready for execution.
|
|
121
|
+
"""
|
|
122
|
+
|
|
123
|
+
class LangGraphAgent(LangGraphReactAgent):
|
|
124
|
+
"""Alias for LangGraphReactAgent."""
|
|
125
|
+
class LangChainAgent(LangGraphReactAgent):
|
|
126
|
+
"""Alias for LangGraphReactAgent."""
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from aip_agents.utils.constants import DefaultTimezone as DefaultTimezone
|
|
2
|
+
|
|
3
|
+
__all__ = ['get_current_date_context', 'DefaultTimezone']
|
|
4
|
+
|
|
5
|
+
def get_current_date_context(timezone: str = ...) -> str:
|
|
6
|
+
"""Generate current date context for system prompts.
|
|
7
|
+
|
|
8
|
+
Args:
|
|
9
|
+
timezone: IANA timezone name for date formatting.
|
|
10
|
+
|
|
11
|
+
Returns:
|
|
12
|
+
Formatted date context string for inclusion in system prompts.
|
|
13
|
+
"""
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from aip_agents.clients.langflow.types import LangflowEventType as LangflowEventType
|
|
3
|
+
from aip_agents.schema.agent import HttpxClientOptions as HttpxClientOptions
|
|
4
|
+
from aip_agents.utils.logger import get_logger as get_logger
|
|
5
|
+
from collections.abc import AsyncGenerator
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
logger: Incomplete
|
|
9
|
+
DEFAULT_LANGFLOW_BASE_URL: Incomplete
|
|
10
|
+
MAX_PAGE_SIZE: int
|
|
11
|
+
|
|
12
|
+
class LangflowApiClient:
|
|
13
|
+
"""HTTP client for Langflow API with streaming and non-streaming support.
|
|
14
|
+
|
|
15
|
+
This client handles all communication with Langflow APIs, including:
|
|
16
|
+
- Non-streaming execution
|
|
17
|
+
- Server-Sent Events (SSE) streaming
|
|
18
|
+
- Session management for conversation continuity
|
|
19
|
+
- Error handling and retries
|
|
20
|
+
- Credential management
|
|
21
|
+
"""
|
|
22
|
+
flow_id: Incomplete
|
|
23
|
+
base_url: Incomplete
|
|
24
|
+
api_key: Incomplete
|
|
25
|
+
sessions: dict[str, str]
|
|
26
|
+
client_kwargs: Incomplete
|
|
27
|
+
def __init__(self, flow_id: str | None = None, base_url: str | None = None, api_key: str | None = None, httpx_client_options: HttpxClientOptions = None) -> None:
|
|
28
|
+
"""Initialize the Langflow API client.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
flow_id: The unique identifier of the Langflow flow to execute.
|
|
32
|
+
base_url: The base URL of the Langflow API server.
|
|
33
|
+
api_key: The API key for Langflow authentication.
|
|
34
|
+
httpx_client_options: HTTP client configuration options for httpx, including timeout.
|
|
35
|
+
"""
|
|
36
|
+
def get_or_create_session(self, thread_id: str | None = None) -> str:
|
|
37
|
+
"""Get existing session ID or create a new one.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
thread_id: Optional thread ID for session mapping.
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
Session ID for the conversation.
|
|
44
|
+
"""
|
|
45
|
+
async def call_flow(self, input_value: str, session_id: str | None = None, flow_id: str | None = None, **_: Any) -> dict[str, Any]:
|
|
46
|
+
"""Execute Langflow flow without streaming.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
input_value: The user input to send to the flow.
|
|
50
|
+
session_id: Optional session ID for conversation continuity.
|
|
51
|
+
flow_id: Optional flow ID to execute. If None, uses the instance flow_id.
|
|
52
|
+
**_: Additional keyword arguments.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
The response from the flow execution.
|
|
56
|
+
|
|
57
|
+
Raises:
|
|
58
|
+
httpx.HTTPError: If the HTTP request fails.
|
|
59
|
+
ValueError: If the response cannot be parsed.
|
|
60
|
+
"""
|
|
61
|
+
async def stream_flow(self, input_value: str, session_id: str | None = None, flow_id: str | None = None, **_: Any) -> AsyncGenerator[dict[str, Any], None]:
|
|
62
|
+
"""Execute Langflow flow with streaming.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
input_value: The user input to send to the flow.
|
|
66
|
+
session_id: Optional session ID for conversation continuity.
|
|
67
|
+
flow_id: Optional flow ID to execute. If None, uses the instance flow_id.
|
|
68
|
+
**_: Additional keyword arguments.
|
|
69
|
+
|
|
70
|
+
Yields:
|
|
71
|
+
Parsed streaming events from the Langflow API.
|
|
72
|
+
|
|
73
|
+
Raises:
|
|
74
|
+
httpx.HTTPError: If the HTTP request fails.
|
|
75
|
+
ValueError: If streaming events cannot be parsed.
|
|
76
|
+
"""
|
|
77
|
+
def parse_stream_event(self, event_data: dict[str, Any]) -> dict[str, Any] | None:
|
|
78
|
+
"""Parse a single streaming event from Langflow.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
event_data: Raw event data from Langflow streaming response.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
Parsed event dictionary or None if event should be skipped.
|
|
85
|
+
"""
|
|
86
|
+
def clear_session(self, thread_id: str) -> None:
|
|
87
|
+
"""Clear session for a specific thread.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
thread_id: Thread ID to clear session for.
|
|
91
|
+
"""
|
|
92
|
+
def clear_all_sessions(self) -> None:
|
|
93
|
+
"""Clear all stored sessions."""
|
|
94
|
+
async def get_flows(self, project_id: str | None = None, remove_example_flows: bool = False, components_only: bool = False, header_flows: bool = False, get_all: bool = True, page: int = 1, size: int = 50) -> list[dict[str, Any]]:
|
|
95
|
+
"""Retrieve flows from Langflow API with full control over parameters.
|
|
96
|
+
|
|
97
|
+
Based on the official API docs: https://docs.langflow.org/api-flows
|
|
98
|
+
Uses the exact parameter format from the documentation.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
project_id: Optional project ID to filter flows.
|
|
102
|
+
remove_example_flows: Whether to exclude example flows. Defaults to False.
|
|
103
|
+
components_only: Whether to return only components. Defaults to False.
|
|
104
|
+
header_flows: Whether to return only flow headers. Defaults to False.
|
|
105
|
+
get_all: Whether to return all flows (ignores pagination). Defaults to True.
|
|
106
|
+
page: Page number for pagination (ignored if get_all=True). Defaults to 1.
|
|
107
|
+
size: Number of flows per page (ignored if get_all=True). Defaults to 50.
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
List of flows or flow headers from the Langflow API.
|
|
111
|
+
|
|
112
|
+
Raises:
|
|
113
|
+
httpx.HTTPError: If the HTTP request fails.
|
|
114
|
+
ValueError: If the response cannot be parsed or invalid parameters provided.
|
|
115
|
+
"""
|
|
116
|
+
async def get_all_flows(self, project_id: str | None = None, remove_example_flows: bool = False, components_only: bool = False, header_flows: bool = False) -> list[dict[str, Any]]:
|
|
117
|
+
"""Convenience method to get ALL flows using the backend's get_all=true feature.
|
|
118
|
+
|
|
119
|
+
This method is a simple wrapper around get_flows() with get_all=True,
|
|
120
|
+
which uses the Langflow backend's ability to return all flows in one request.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
project_id: Optional project ID to filter flows.
|
|
124
|
+
remove_example_flows: Whether to exclude example flows. Defaults to False.
|
|
125
|
+
components_only: Whether to return only components. Defaults to False.
|
|
126
|
+
header_flows: Whether to return only flow headers. Defaults to False.
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
List of all flows from the Langflow API.
|
|
130
|
+
|
|
131
|
+
Raises:
|
|
132
|
+
httpx.HTTPError: If the HTTP request fails.
|
|
133
|
+
ValueError: If the response cannot be parsed.
|
|
134
|
+
"""
|
|
135
|
+
async def health_check(self) -> bool:
|
|
136
|
+
"""Check if the Langflow API is accessible.
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
True if the API is accessible, False otherwise.
|
|
140
|
+
"""
|