aip-agents-binary 0.5.20__py3-none-manylinux_2_31_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aip_agents/__init__.py +65 -0
- aip_agents/__init__.pyi +19 -0
- aip_agents/a2a/__init__.py +19 -0
- aip_agents/a2a/__init__.pyi +3 -0
- aip_agents/a2a/server/__init__.py +10 -0
- aip_agents/a2a/server/__init__.pyi +4 -0
- aip_agents/a2a/server/base_executor.py +1086 -0
- aip_agents/a2a/server/base_executor.pyi +73 -0
- aip_agents/a2a/server/google_adk_executor.py +198 -0
- aip_agents/a2a/server/google_adk_executor.pyi +51 -0
- aip_agents/a2a/server/langflow_executor.py +180 -0
- aip_agents/a2a/server/langflow_executor.pyi +43 -0
- aip_agents/a2a/server/langgraph_executor.py +270 -0
- aip_agents/a2a/server/langgraph_executor.pyi +47 -0
- aip_agents/a2a/types.py +232 -0
- aip_agents/a2a/types.pyi +132 -0
- aip_agents/agent/__init__.py +27 -0
- aip_agents/agent/__init__.pyi +9 -0
- aip_agents/agent/base_agent.py +970 -0
- aip_agents/agent/base_agent.pyi +221 -0
- aip_agents/agent/base_langgraph_agent.py +2942 -0
- aip_agents/agent/base_langgraph_agent.pyi +232 -0
- aip_agents/agent/google_adk_agent.py +926 -0
- aip_agents/agent/google_adk_agent.pyi +141 -0
- aip_agents/agent/google_adk_constants.py +6 -0
- aip_agents/agent/google_adk_constants.pyi +3 -0
- aip_agents/agent/hitl/__init__.py +24 -0
- aip_agents/agent/hitl/__init__.pyi +6 -0
- aip_agents/agent/hitl/config.py +28 -0
- aip_agents/agent/hitl/config.pyi +15 -0
- aip_agents/agent/hitl/langgraph_hitl_mixin.py +515 -0
- aip_agents/agent/hitl/langgraph_hitl_mixin.pyi +42 -0
- aip_agents/agent/hitl/manager.py +532 -0
- aip_agents/agent/hitl/manager.pyi +200 -0
- aip_agents/agent/hitl/models.py +18 -0
- aip_agents/agent/hitl/models.pyi +3 -0
- aip_agents/agent/hitl/prompt/__init__.py +9 -0
- aip_agents/agent/hitl/prompt/__init__.pyi +4 -0
- aip_agents/agent/hitl/prompt/base.py +42 -0
- aip_agents/agent/hitl/prompt/base.pyi +24 -0
- aip_agents/agent/hitl/prompt/deferred.py +73 -0
- aip_agents/agent/hitl/prompt/deferred.pyi +30 -0
- aip_agents/agent/hitl/registry.py +149 -0
- aip_agents/agent/hitl/registry.pyi +101 -0
- aip_agents/agent/interface.py +138 -0
- aip_agents/agent/interface.pyi +81 -0
- aip_agents/agent/interfaces.py +65 -0
- aip_agents/agent/interfaces.pyi +44 -0
- aip_agents/agent/langflow_agent.py +464 -0
- aip_agents/agent/langflow_agent.pyi +133 -0
- aip_agents/agent/langgraph_memory_enhancer_agent.py +433 -0
- aip_agents/agent/langgraph_memory_enhancer_agent.pyi +49 -0
- aip_agents/agent/langgraph_react_agent.py +2514 -0
- aip_agents/agent/langgraph_react_agent.pyi +126 -0
- aip_agents/agent/system_instruction_context.py +34 -0
- aip_agents/agent/system_instruction_context.pyi +13 -0
- aip_agents/clients/__init__.py +10 -0
- aip_agents/clients/__init__.pyi +4 -0
- aip_agents/clients/langflow/__init__.py +10 -0
- aip_agents/clients/langflow/__init__.pyi +4 -0
- aip_agents/clients/langflow/client.py +477 -0
- aip_agents/clients/langflow/client.pyi +140 -0
- aip_agents/clients/langflow/types.py +18 -0
- aip_agents/clients/langflow/types.pyi +7 -0
- aip_agents/constants.py +23 -0
- aip_agents/constants.pyi +7 -0
- aip_agents/credentials/manager.py +132 -0
- aip_agents/examples/__init__.py +5 -0
- aip_agents/examples/__init__.pyi +0 -0
- aip_agents/examples/compare_streaming_client.py +783 -0
- aip_agents/examples/compare_streaming_client.pyi +48 -0
- aip_agents/examples/compare_streaming_server.py +142 -0
- aip_agents/examples/compare_streaming_server.pyi +18 -0
- aip_agents/examples/demo_memory_recall.py +401 -0
- aip_agents/examples/demo_memory_recall.pyi +58 -0
- aip_agents/examples/hello_world_a2a_google_adk_client.py +49 -0
- aip_agents/examples/hello_world_a2a_google_adk_client.pyi +9 -0
- aip_agents/examples/hello_world_a2a_google_adk_client_agent.py +48 -0
- aip_agents/examples/hello_world_a2a_google_adk_client_agent.pyi +9 -0
- aip_agents/examples/hello_world_a2a_google_adk_client_streaming.py +60 -0
- aip_agents/examples/hello_world_a2a_google_adk_client_streaming.pyi +9 -0
- aip_agents/examples/hello_world_a2a_google_adk_server.py +79 -0
- aip_agents/examples/hello_world_a2a_google_adk_server.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langchain_client.py +39 -0
- aip_agents/examples/hello_world_a2a_langchain_client.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_client_agent.py +39 -0
- aip_agents/examples/hello_world_a2a_langchain_client_agent.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_client_lm_invoker.py +37 -0
- aip_agents/examples/hello_world_a2a_langchain_client_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_client_streaming.py +41 -0
- aip_agents/examples/hello_world_a2a_langchain_client_streaming.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_reference_client_streaming.py +60 -0
- aip_agents/examples/hello_world_a2a_langchain_reference_client_streaming.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_reference_server.py +105 -0
- aip_agents/examples/hello_world_a2a_langchain_reference_server.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langchain_server.py +79 -0
- aip_agents/examples/hello_world_a2a_langchain_server.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langchain_server_lm_invoker.py +78 -0
- aip_agents/examples/hello_world_a2a_langchain_server_lm_invoker.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langflow_client.py +83 -0
- aip_agents/examples/hello_world_a2a_langflow_client.pyi +9 -0
- aip_agents/examples/hello_world_a2a_langflow_server.py +82 -0
- aip_agents/examples/hello_world_a2a_langflow_server.pyi +14 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_client.py +73 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_client.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_client_streaming.py +76 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_client_streaming.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_server.py +92 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_server.pyi +16 -0
- aip_agents/examples/hello_world_a2a_langgraph_client.py +54 -0
- aip_agents/examples/hello_world_a2a_langgraph_client.pyi +9 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_agent.py +54 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_agent.pyi +9 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_agent_lm_invoker.py +32 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_agent_lm_invoker.pyi +2 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming.py +50 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming.pyi +9 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming_lm_invoker.py +44 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming_tool_streaming.py +92 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming_tool_streaming.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langgraph_server.py +84 -0
- aip_agents/examples/hello_world_a2a_langgraph_server.pyi +14 -0
- aip_agents/examples/hello_world_a2a_langgraph_server_lm_invoker.py +79 -0
- aip_agents/examples/hello_world_a2a_langgraph_server_lm_invoker.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langgraph_server_tool_streaming.py +132 -0
- aip_agents/examples/hello_world_a2a_langgraph_server_tool_streaming.pyi +15 -0
- aip_agents/examples/hello_world_a2a_mcp_langgraph.py +196 -0
- aip_agents/examples/hello_world_a2a_mcp_langgraph.pyi +48 -0
- aip_agents/examples/hello_world_a2a_three_level_agent_hierarchy_client.py +244 -0
- aip_agents/examples/hello_world_a2a_three_level_agent_hierarchy_client.pyi +48 -0
- aip_agents/examples/hello_world_a2a_three_level_agent_hierarchy_server.py +251 -0
- aip_agents/examples/hello_world_a2a_three_level_agent_hierarchy_server.pyi +45 -0
- aip_agents/examples/hello_world_a2a_with_metadata_langchain_client.py +57 -0
- aip_agents/examples/hello_world_a2a_with_metadata_langchain_client.pyi +5 -0
- aip_agents/examples/hello_world_a2a_with_metadata_langchain_server_lm_invoker.py +80 -0
- aip_agents/examples/hello_world_a2a_with_metadata_langchain_server_lm_invoker.pyi +15 -0
- aip_agents/examples/hello_world_google_adk.py +41 -0
- aip_agents/examples/hello_world_google_adk.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_http.py +34 -0
- aip_agents/examples/hello_world_google_adk_mcp_http.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_http_stream.py +40 -0
- aip_agents/examples/hello_world_google_adk_mcp_http_stream.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_sse.py +44 -0
- aip_agents/examples/hello_world_google_adk_mcp_sse.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_sse_stream.py +48 -0
- aip_agents/examples/hello_world_google_adk_mcp_sse_stream.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_stdio.py +44 -0
- aip_agents/examples/hello_world_google_adk_mcp_stdio.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_stdio_stream.py +48 -0
- aip_agents/examples/hello_world_google_adk_mcp_stdio_stream.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_stream.py +44 -0
- aip_agents/examples/hello_world_google_adk_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain.py +28 -0
- aip_agents/examples/hello_world_langchain.pyi +5 -0
- aip_agents/examples/hello_world_langchain_lm_invoker.py +15 -0
- aip_agents/examples/hello_world_langchain_lm_invoker.pyi +2 -0
- aip_agents/examples/hello_world_langchain_mcp_http.py +34 -0
- aip_agents/examples/hello_world_langchain_mcp_http.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_http_interactive.py +130 -0
- aip_agents/examples/hello_world_langchain_mcp_http_interactive.pyi +16 -0
- aip_agents/examples/hello_world_langchain_mcp_http_stream.py +42 -0
- aip_agents/examples/hello_world_langchain_mcp_http_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_multi_server.py +155 -0
- aip_agents/examples/hello_world_langchain_mcp_multi_server.pyi +18 -0
- aip_agents/examples/hello_world_langchain_mcp_sse.py +34 -0
- aip_agents/examples/hello_world_langchain_mcp_sse.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_sse_stream.py +40 -0
- aip_agents/examples/hello_world_langchain_mcp_sse_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_stdio.py +30 -0
- aip_agents/examples/hello_world_langchain_mcp_stdio.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_stdio_stream.py +41 -0
- aip_agents/examples/hello_world_langchain_mcp_stdio_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain_stream.py +36 -0
- aip_agents/examples/hello_world_langchain_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain_stream_lm_invoker.py +39 -0
- aip_agents/examples/hello_world_langchain_stream_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_langflow_agent.py +163 -0
- aip_agents/examples/hello_world_langflow_agent.pyi +35 -0
- aip_agents/examples/hello_world_langgraph.py +39 -0
- aip_agents/examples/hello_world_langgraph.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_bosa_twitter.py +41 -0
- aip_agents/examples/hello_world_langgraph_bosa_twitter.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_http.py +31 -0
- aip_agents/examples/hello_world_langgraph_mcp_http.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_http_stream.py +34 -0
- aip_agents/examples/hello_world_langgraph_mcp_http_stream.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_sse.py +35 -0
- aip_agents/examples/hello_world_langgraph_mcp_sse.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_sse_stream.py +50 -0
- aip_agents/examples/hello_world_langgraph_mcp_sse_stream.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_stdio.py +35 -0
- aip_agents/examples/hello_world_langgraph_mcp_stdio.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_stdio_stream.py +50 -0
- aip_agents/examples/hello_world_langgraph_mcp_stdio_stream.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_stream.py +43 -0
- aip_agents/examples/hello_world_langgraph_stream.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_stream_lm_invoker.py +37 -0
- aip_agents/examples/hello_world_langgraph_stream_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_model_switch_cli.py +210 -0
- aip_agents/examples/hello_world_model_switch_cli.pyi +30 -0
- aip_agents/examples/hello_world_multi_agent_adk.py +75 -0
- aip_agents/examples/hello_world_multi_agent_adk.pyi +6 -0
- aip_agents/examples/hello_world_multi_agent_langchain.py +54 -0
- aip_agents/examples/hello_world_multi_agent_langchain.pyi +5 -0
- aip_agents/examples/hello_world_multi_agent_langgraph.py +66 -0
- aip_agents/examples/hello_world_multi_agent_langgraph.pyi +5 -0
- aip_agents/examples/hello_world_multi_agent_langgraph_lm_invoker.py +69 -0
- aip_agents/examples/hello_world_multi_agent_langgraph_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_pii_logger.py +21 -0
- aip_agents/examples/hello_world_pii_logger.pyi +5 -0
- aip_agents/examples/hello_world_sentry.py +133 -0
- aip_agents/examples/hello_world_sentry.pyi +21 -0
- aip_agents/examples/hello_world_step_limits.py +273 -0
- aip_agents/examples/hello_world_step_limits.pyi +17 -0
- aip_agents/examples/hello_world_stock_a2a_server.py +103 -0
- aip_agents/examples/hello_world_stock_a2a_server.pyi +17 -0
- aip_agents/examples/hello_world_tool_output_client.py +46 -0
- aip_agents/examples/hello_world_tool_output_client.pyi +5 -0
- aip_agents/examples/hello_world_tool_output_server.py +114 -0
- aip_agents/examples/hello_world_tool_output_server.pyi +19 -0
- aip_agents/examples/hitl_demo.py +724 -0
- aip_agents/examples/hitl_demo.pyi +67 -0
- aip_agents/examples/mcp_configs/configs.py +63 -0
- aip_agents/examples/mcp_servers/common.py +76 -0
- aip_agents/examples/mcp_servers/mcp_name.py +29 -0
- aip_agents/examples/mcp_servers/mcp_server_http.py +19 -0
- aip_agents/examples/mcp_servers/mcp_server_sse.py +19 -0
- aip_agents/examples/mcp_servers/mcp_server_stdio.py +19 -0
- aip_agents/examples/mcp_servers/mcp_time.py +10 -0
- aip_agents/examples/pii_demo_langgraph_client.py +69 -0
- aip_agents/examples/pii_demo_langgraph_client.pyi +5 -0
- aip_agents/examples/pii_demo_langgraph_server.py +126 -0
- aip_agents/examples/pii_demo_langgraph_server.pyi +20 -0
- aip_agents/examples/pii_demo_multi_agent_client.py +80 -0
- aip_agents/examples/pii_demo_multi_agent_client.pyi +5 -0
- aip_agents/examples/pii_demo_multi_agent_server.py +247 -0
- aip_agents/examples/pii_demo_multi_agent_server.pyi +40 -0
- aip_agents/examples/todolist_planning_a2a_langchain_client.py +70 -0
- aip_agents/examples/todolist_planning_a2a_langchain_client.pyi +5 -0
- aip_agents/examples/todolist_planning_a2a_langgraph_server.py +88 -0
- aip_agents/examples/todolist_planning_a2a_langgraph_server.pyi +19 -0
- aip_agents/examples/tools/__init__.py +27 -0
- aip_agents/examples/tools/__init__.pyi +9 -0
- aip_agents/examples/tools/adk_arithmetic_tools.py +36 -0
- aip_agents/examples/tools/adk_arithmetic_tools.pyi +24 -0
- aip_agents/examples/tools/adk_weather_tool.py +60 -0
- aip_agents/examples/tools/adk_weather_tool.pyi +18 -0
- aip_agents/examples/tools/data_generator_tool.py +103 -0
- aip_agents/examples/tools/data_generator_tool.pyi +15 -0
- aip_agents/examples/tools/data_visualization_tool.py +312 -0
- aip_agents/examples/tools/data_visualization_tool.pyi +19 -0
- aip_agents/examples/tools/image_artifact_tool.py +136 -0
- aip_agents/examples/tools/image_artifact_tool.pyi +26 -0
- aip_agents/examples/tools/langchain_arithmetic_tools.py +26 -0
- aip_agents/examples/tools/langchain_arithmetic_tools.pyi +17 -0
- aip_agents/examples/tools/langchain_currency_exchange_tool.py +88 -0
- aip_agents/examples/tools/langchain_currency_exchange_tool.pyi +20 -0
- aip_agents/examples/tools/langchain_graph_artifact_tool.py +172 -0
- aip_agents/examples/tools/langchain_graph_artifact_tool.pyi +25 -0
- aip_agents/examples/tools/langchain_weather_tool.py +48 -0
- aip_agents/examples/tools/langchain_weather_tool.pyi +19 -0
- aip_agents/examples/tools/langgraph_streaming_tool.py +130 -0
- aip_agents/examples/tools/langgraph_streaming_tool.pyi +43 -0
- aip_agents/examples/tools/mock_retrieval_tool.py +56 -0
- aip_agents/examples/tools/mock_retrieval_tool.pyi +13 -0
- aip_agents/examples/tools/pii_demo_tools.py +189 -0
- aip_agents/examples/tools/pii_demo_tools.pyi +54 -0
- aip_agents/examples/tools/random_chart_tool.py +142 -0
- aip_agents/examples/tools/random_chart_tool.pyi +20 -0
- aip_agents/examples/tools/serper_tool.py +202 -0
- aip_agents/examples/tools/serper_tool.pyi +16 -0
- aip_agents/examples/tools/stock_tools.py +82 -0
- aip_agents/examples/tools/stock_tools.pyi +36 -0
- aip_agents/examples/tools/table_generator_tool.py +167 -0
- aip_agents/examples/tools/table_generator_tool.pyi +22 -0
- aip_agents/examples/tools/time_tool.py +82 -0
- aip_agents/examples/tools/time_tool.pyi +15 -0
- aip_agents/examples/tools/weather_forecast_tool.py +38 -0
- aip_agents/examples/tools/weather_forecast_tool.pyi +14 -0
- aip_agents/executor/agent_executor.py +473 -0
- aip_agents/executor/base.py +48 -0
- aip_agents/mcp/__init__.py +1 -0
- aip_agents/mcp/__init__.pyi +0 -0
- aip_agents/mcp/client/__init__.py +14 -0
- aip_agents/mcp/client/__init__.pyi +5 -0
- aip_agents/mcp/client/base_mcp_client.py +369 -0
- aip_agents/mcp/client/base_mcp_client.pyi +148 -0
- aip_agents/mcp/client/connection_manager.py +193 -0
- aip_agents/mcp/client/connection_manager.pyi +48 -0
- aip_agents/mcp/client/google_adk/__init__.py +11 -0
- aip_agents/mcp/client/google_adk/__init__.pyi +3 -0
- aip_agents/mcp/client/google_adk/client.py +381 -0
- aip_agents/mcp/client/google_adk/client.pyi +75 -0
- aip_agents/mcp/client/langchain/__init__.py +11 -0
- aip_agents/mcp/client/langchain/__init__.pyi +3 -0
- aip_agents/mcp/client/langchain/client.py +265 -0
- aip_agents/mcp/client/langchain/client.pyi +48 -0
- aip_agents/mcp/client/persistent_session.py +359 -0
- aip_agents/mcp/client/persistent_session.pyi +113 -0
- aip_agents/mcp/client/session_pool.py +351 -0
- aip_agents/mcp/client/session_pool.pyi +101 -0
- aip_agents/mcp/client/transports.py +215 -0
- aip_agents/mcp/client/transports.pyi +123 -0
- aip_agents/mcp/utils/__init__.py +7 -0
- aip_agents/mcp/utils/__init__.pyi +0 -0
- aip_agents/mcp/utils/config_validator.py +139 -0
- aip_agents/mcp/utils/config_validator.pyi +82 -0
- aip_agents/memory/__init__.py +14 -0
- aip_agents/memory/__init__.pyi +5 -0
- aip_agents/memory/adapters/__init__.py +10 -0
- aip_agents/memory/adapters/__init__.pyi +4 -0
- aip_agents/memory/adapters/base_adapter.py +717 -0
- aip_agents/memory/adapters/base_adapter.pyi +150 -0
- aip_agents/memory/adapters/mem0.py +84 -0
- aip_agents/memory/adapters/mem0.pyi +22 -0
- aip_agents/memory/base.py +84 -0
- aip_agents/memory/base.pyi +60 -0
- aip_agents/memory/constants.py +49 -0
- aip_agents/memory/constants.pyi +25 -0
- aip_agents/memory/factory.py +86 -0
- aip_agents/memory/factory.pyi +24 -0
- aip_agents/memory/guidance.py +20 -0
- aip_agents/memory/guidance.pyi +3 -0
- aip_agents/memory/simple_memory.py +47 -0
- aip_agents/memory/simple_memory.pyi +23 -0
- aip_agents/middleware/__init__.py +17 -0
- aip_agents/middleware/__init__.pyi +5 -0
- aip_agents/middleware/base.py +88 -0
- aip_agents/middleware/base.pyi +71 -0
- aip_agents/middleware/manager.py +128 -0
- aip_agents/middleware/manager.pyi +80 -0
- aip_agents/middleware/todolist.py +274 -0
- aip_agents/middleware/todolist.pyi +125 -0
- aip_agents/schema/__init__.py +69 -0
- aip_agents/schema/__init__.pyi +9 -0
- aip_agents/schema/a2a.py +56 -0
- aip_agents/schema/a2a.pyi +40 -0
- aip_agents/schema/agent.py +111 -0
- aip_agents/schema/agent.pyi +65 -0
- aip_agents/schema/hitl.py +157 -0
- aip_agents/schema/hitl.pyi +89 -0
- aip_agents/schema/langgraph.py +37 -0
- aip_agents/schema/langgraph.pyi +28 -0
- aip_agents/schema/model_id.py +97 -0
- aip_agents/schema/model_id.pyi +54 -0
- aip_agents/schema/step_limit.py +108 -0
- aip_agents/schema/step_limit.pyi +63 -0
- aip_agents/schema/storage.py +40 -0
- aip_agents/schema/storage.pyi +21 -0
- aip_agents/sentry/__init__.py +11 -0
- aip_agents/sentry/__init__.pyi +3 -0
- aip_agents/sentry/sentry.py +151 -0
- aip_agents/sentry/sentry.pyi +48 -0
- aip_agents/storage/__init__.py +41 -0
- aip_agents/storage/__init__.pyi +8 -0
- aip_agents/storage/base.py +85 -0
- aip_agents/storage/base.pyi +58 -0
- aip_agents/storage/clients/__init__.py +12 -0
- aip_agents/storage/clients/__init__.pyi +3 -0
- aip_agents/storage/clients/minio_client.py +318 -0
- aip_agents/storage/clients/minio_client.pyi +137 -0
- aip_agents/storage/config.py +62 -0
- aip_agents/storage/config.pyi +29 -0
- aip_agents/storage/providers/__init__.py +15 -0
- aip_agents/storage/providers/__init__.pyi +5 -0
- aip_agents/storage/providers/base.py +106 -0
- aip_agents/storage/providers/base.pyi +88 -0
- aip_agents/storage/providers/memory.py +114 -0
- aip_agents/storage/providers/memory.pyi +79 -0
- aip_agents/storage/providers/object_storage.py +214 -0
- aip_agents/storage/providers/object_storage.pyi +98 -0
- aip_agents/tools/__init__.py +33 -0
- aip_agents/tools/__init__.pyi +13 -0
- aip_agents/tools/bosa_tools.py +105 -0
- aip_agents/tools/bosa_tools.pyi +37 -0
- aip_agents/tools/browser_use/__init__.py +82 -0
- aip_agents/tools/browser_use/__init__.pyi +14 -0
- aip_agents/tools/browser_use/action_parser.py +103 -0
- aip_agents/tools/browser_use/action_parser.pyi +18 -0
- aip_agents/tools/browser_use/browser_use_tool.py +1112 -0
- aip_agents/tools/browser_use/browser_use_tool.pyi +50 -0
- aip_agents/tools/browser_use/llm_config.py +120 -0
- aip_agents/tools/browser_use/llm_config.pyi +52 -0
- aip_agents/tools/browser_use/minio_storage.py +198 -0
- aip_agents/tools/browser_use/minio_storage.pyi +109 -0
- aip_agents/tools/browser_use/schemas.py +119 -0
- aip_agents/tools/browser_use/schemas.pyi +32 -0
- aip_agents/tools/browser_use/session.py +76 -0
- aip_agents/tools/browser_use/session.pyi +4 -0
- aip_agents/tools/browser_use/session_errors.py +132 -0
- aip_agents/tools/browser_use/session_errors.pyi +53 -0
- aip_agents/tools/browser_use/steel_session_recording.py +317 -0
- aip_agents/tools/browser_use/steel_session_recording.pyi +63 -0
- aip_agents/tools/browser_use/streaming.py +813 -0
- aip_agents/tools/browser_use/streaming.pyi +81 -0
- aip_agents/tools/browser_use/structured_data_parser.py +257 -0
- aip_agents/tools/browser_use/structured_data_parser.pyi +86 -0
- aip_agents/tools/browser_use/structured_data_recovery.py +204 -0
- aip_agents/tools/browser_use/structured_data_recovery.pyi +43 -0
- aip_agents/tools/browser_use/types.py +78 -0
- aip_agents/tools/browser_use/types.pyi +45 -0
- aip_agents/tools/code_sandbox/__init__.py +26 -0
- aip_agents/tools/code_sandbox/__init__.pyi +3 -0
- aip_agents/tools/code_sandbox/constant.py +13 -0
- aip_agents/tools/code_sandbox/constant.pyi +4 -0
- aip_agents/tools/code_sandbox/e2b_cloud_sandbox_extended.py +257 -0
- aip_agents/tools/code_sandbox/e2b_cloud_sandbox_extended.pyi +86 -0
- aip_agents/tools/code_sandbox/e2b_sandbox_tool.py +411 -0
- aip_agents/tools/code_sandbox/e2b_sandbox_tool.pyi +29 -0
- aip_agents/tools/constants.py +165 -0
- aip_agents/tools/constants.pyi +135 -0
- aip_agents/tools/document_loader/__init__.py +44 -0
- aip_agents/tools/document_loader/__init__.pyi +7 -0
- aip_agents/tools/document_loader/base_reader.py +302 -0
- aip_agents/tools/document_loader/base_reader.pyi +75 -0
- aip_agents/tools/document_loader/docx_reader_tool.py +68 -0
- aip_agents/tools/document_loader/docx_reader_tool.pyi +10 -0
- aip_agents/tools/document_loader/excel_reader_tool.py +171 -0
- aip_agents/tools/document_loader/excel_reader_tool.pyi +26 -0
- aip_agents/tools/document_loader/pdf_reader_tool.py +79 -0
- aip_agents/tools/document_loader/pdf_reader_tool.pyi +11 -0
- aip_agents/tools/document_loader/pdf_splitter.py +169 -0
- aip_agents/tools/document_loader/pdf_splitter.pyi +18 -0
- aip_agents/tools/gl_connector/__init__.py +5 -0
- aip_agents/tools/gl_connector/__init__.pyi +3 -0
- aip_agents/tools/gl_connector/tool.py +351 -0
- aip_agents/tools/gl_connector/tool.pyi +74 -0
- aip_agents/tools/memory_search/__init__.py +22 -0
- aip_agents/tools/memory_search/__init__.pyi +5 -0
- aip_agents/tools/memory_search/base.py +200 -0
- aip_agents/tools/memory_search/base.pyi +69 -0
- aip_agents/tools/memory_search/mem0.py +258 -0
- aip_agents/tools/memory_search/mem0.pyi +19 -0
- aip_agents/tools/memory_search/schema.py +48 -0
- aip_agents/tools/memory_search/schema.pyi +15 -0
- aip_agents/tools/memory_search_tool.py +26 -0
- aip_agents/tools/memory_search_tool.pyi +3 -0
- aip_agents/tools/time_tool.py +117 -0
- aip_agents/tools/time_tool.pyi +16 -0
- aip_agents/tools/tool_config_injector.py +300 -0
- aip_agents/tools/tool_config_injector.pyi +26 -0
- aip_agents/tools/web_search/__init__.py +15 -0
- aip_agents/tools/web_search/__init__.pyi +3 -0
- aip_agents/tools/web_search/serper_tool.py +187 -0
- aip_agents/tools/web_search/serper_tool.pyi +19 -0
- aip_agents/types/__init__.py +70 -0
- aip_agents/types/__init__.pyi +36 -0
- aip_agents/types/a2a_events.py +13 -0
- aip_agents/types/a2a_events.pyi +3 -0
- aip_agents/utils/__init__.py +79 -0
- aip_agents/utils/__init__.pyi +11 -0
- aip_agents/utils/a2a_connector.py +1757 -0
- aip_agents/utils/a2a_connector.pyi +146 -0
- aip_agents/utils/artifact_helpers.py +502 -0
- aip_agents/utils/artifact_helpers.pyi +203 -0
- aip_agents/utils/constants.py +22 -0
- aip_agents/utils/constants.pyi +10 -0
- aip_agents/utils/datetime/__init__.py +34 -0
- aip_agents/utils/datetime/__init__.pyi +4 -0
- aip_agents/utils/datetime/normalization.py +231 -0
- aip_agents/utils/datetime/normalization.pyi +95 -0
- aip_agents/utils/datetime/timezone.py +206 -0
- aip_agents/utils/datetime/timezone.pyi +48 -0
- aip_agents/utils/env_loader.py +27 -0
- aip_agents/utils/env_loader.pyi +10 -0
- aip_agents/utils/event_handler_registry.py +58 -0
- aip_agents/utils/event_handler_registry.pyi +23 -0
- aip_agents/utils/file_prompt_utils.py +176 -0
- aip_agents/utils/file_prompt_utils.pyi +21 -0
- aip_agents/utils/final_response_builder.py +211 -0
- aip_agents/utils/final_response_builder.pyi +34 -0
- aip_agents/utils/formatter_llm_client.py +231 -0
- aip_agents/utils/formatter_llm_client.pyi +71 -0
- aip_agents/utils/langgraph/__init__.py +19 -0
- aip_agents/utils/langgraph/__init__.pyi +3 -0
- aip_agents/utils/langgraph/converter.py +128 -0
- aip_agents/utils/langgraph/converter.pyi +49 -0
- aip_agents/utils/langgraph/tool_managers/__init__.py +15 -0
- aip_agents/utils/langgraph/tool_managers/__init__.pyi +5 -0
- aip_agents/utils/langgraph/tool_managers/a2a_tool_manager.py +99 -0
- aip_agents/utils/langgraph/tool_managers/a2a_tool_manager.pyi +35 -0
- aip_agents/utils/langgraph/tool_managers/base_tool_manager.py +66 -0
- aip_agents/utils/langgraph/tool_managers/base_tool_manager.pyi +48 -0
- aip_agents/utils/langgraph/tool_managers/delegation_tool_manager.py +1071 -0
- aip_agents/utils/langgraph/tool_managers/delegation_tool_manager.pyi +56 -0
- aip_agents/utils/langgraph/tool_output_management.py +967 -0
- aip_agents/utils/langgraph/tool_output_management.pyi +292 -0
- aip_agents/utils/logger.py +195 -0
- aip_agents/utils/logger.pyi +60 -0
- aip_agents/utils/metadata/__init__.py +27 -0
- aip_agents/utils/metadata/__init__.pyi +5 -0
- aip_agents/utils/metadata/activity_metadata_helper.py +407 -0
- aip_agents/utils/metadata/activity_metadata_helper.pyi +25 -0
- aip_agents/utils/metadata/activity_narrative/__init__.py +35 -0
- aip_agents/utils/metadata/activity_narrative/__init__.pyi +7 -0
- aip_agents/utils/metadata/activity_narrative/builder.py +817 -0
- aip_agents/utils/metadata/activity_narrative/builder.pyi +35 -0
- aip_agents/utils/metadata/activity_narrative/constants.py +51 -0
- aip_agents/utils/metadata/activity_narrative/constants.pyi +10 -0
- aip_agents/utils/metadata/activity_narrative/context.py +49 -0
- aip_agents/utils/metadata/activity_narrative/context.pyi +32 -0
- aip_agents/utils/metadata/activity_narrative/formatters.py +230 -0
- aip_agents/utils/metadata/activity_narrative/formatters.pyi +48 -0
- aip_agents/utils/metadata/activity_narrative/utils.py +35 -0
- aip_agents/utils/metadata/activity_narrative/utils.pyi +12 -0
- aip_agents/utils/metadata/schemas/__init__.py +16 -0
- aip_agents/utils/metadata/schemas/__init__.pyi +4 -0
- aip_agents/utils/metadata/schemas/activity_schema.py +29 -0
- aip_agents/utils/metadata/schemas/activity_schema.pyi +18 -0
- aip_agents/utils/metadata/schemas/thinking_schema.py +31 -0
- aip_agents/utils/metadata/schemas/thinking_schema.pyi +20 -0
- aip_agents/utils/metadata/thinking_metadata_helper.py +38 -0
- aip_agents/utils/metadata/thinking_metadata_helper.pyi +4 -0
- aip_agents/utils/metadata_helper.py +358 -0
- aip_agents/utils/metadata_helper.pyi +117 -0
- aip_agents/utils/name_preprocessor/__init__.py +17 -0
- aip_agents/utils/name_preprocessor/__init__.pyi +6 -0
- aip_agents/utils/name_preprocessor/base_name_preprocessor.py +73 -0
- aip_agents/utils/name_preprocessor/base_name_preprocessor.pyi +52 -0
- aip_agents/utils/name_preprocessor/google_name_preprocessor.py +100 -0
- aip_agents/utils/name_preprocessor/google_name_preprocessor.pyi +38 -0
- aip_agents/utils/name_preprocessor/name_preprocessor.py +87 -0
- aip_agents/utils/name_preprocessor/name_preprocessor.pyi +41 -0
- aip_agents/utils/name_preprocessor/openai_name_preprocessor.py +48 -0
- aip_agents/utils/name_preprocessor/openai_name_preprocessor.pyi +34 -0
- aip_agents/utils/pii/__init__.py +25 -0
- aip_agents/utils/pii/__init__.pyi +5 -0
- aip_agents/utils/pii/pii_handler.py +397 -0
- aip_agents/utils/pii/pii_handler.pyi +96 -0
- aip_agents/utils/pii/pii_helper.py +207 -0
- aip_agents/utils/pii/pii_helper.pyi +78 -0
- aip_agents/utils/pii/uuid_deanonymizer_mapping.py +195 -0
- aip_agents/utils/pii/uuid_deanonymizer_mapping.pyi +73 -0
- aip_agents/utils/reference_helper.py +273 -0
- aip_agents/utils/reference_helper.pyi +81 -0
- aip_agents/utils/sse_chunk_transformer.py +831 -0
- aip_agents/utils/sse_chunk_transformer.pyi +166 -0
- aip_agents/utils/step_limit_manager.py +265 -0
- aip_agents/utils/step_limit_manager.pyi +112 -0
- aip_agents/utils/token_usage_helper.py +156 -0
- aip_agents/utils/token_usage_helper.pyi +60 -0
- aip_agents_binary-0.5.20.dist-info/METADATA +681 -0
- aip_agents_binary-0.5.20.dist-info/RECORD +546 -0
- aip_agents_binary-0.5.20.dist-info/WHEEL +5 -0
- aip_agents_binary-0.5.20.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"""Example showing LangGraph agent with MCP tools integration.
|
|
2
|
+
|
|
3
|
+
Authors:
|
|
4
|
+
Fachriza Dian Adhiatma (fachriza.d.adhiatma@gdplabs.id)
|
|
5
|
+
Putu Ravindra Wiguna (putu.r.wiguna@gdplabs.id)
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
|
|
10
|
+
from langchain_openai import ChatOpenAI
|
|
11
|
+
|
|
12
|
+
from aip_agents.agent import LangGraphAgent
|
|
13
|
+
from aip_agents.examples.mcp_configs.configs import mcp_config_sse
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
async def main():
|
|
17
|
+
"""Demonstrates the LangGraphAgent with MCP tools via SSE transport."""
|
|
18
|
+
langgraph_agent = LangGraphAgent(
|
|
19
|
+
name="langgraph_mcp_example",
|
|
20
|
+
instruction="""You are a helpful assistant that can provide weather forecasts.
|
|
21
|
+
For weather, specify the day in lowercase (e.g., 'monday').""",
|
|
22
|
+
model=ChatOpenAI(model="gpt-4.1", temperature=0),
|
|
23
|
+
tools=[],
|
|
24
|
+
)
|
|
25
|
+
langgraph_agent.add_mcp_server(mcp_config_sse)
|
|
26
|
+
|
|
27
|
+
query = "What's the weather forecast for monday?" # Uses MCP weather tool
|
|
28
|
+
|
|
29
|
+
print(f"\nQuery: {query}")
|
|
30
|
+
response = await langgraph_agent.arun(query=query)
|
|
31
|
+
print(f"Response: {response['output']}")
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
if __name__ == "__main__":
|
|
35
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"""Example showing LangGraph agent with MCP tools integration and streaming capabilities.
|
|
2
|
+
|
|
3
|
+
Authors:
|
|
4
|
+
Fachriza Dian Adhiatma (fachriza.d.adhiatma@gdplabs.id)
|
|
5
|
+
Putu Ravindra Wiguna (putu.r.wiguna@gdplabs.id)
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
|
|
10
|
+
from langchain_openai import ChatOpenAI
|
|
11
|
+
|
|
12
|
+
from aip_agents.agent import LangGraphAgent
|
|
13
|
+
from aip_agents.examples.mcp_configs.configs import mcp_config_sse
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
async def main():
|
|
17
|
+
"""Demonstrates the LangGraphAgent with MCP tools via SSE transport and streaming capabilities."""
|
|
18
|
+
langgraph_agent = LangGraphAgent(
|
|
19
|
+
name="langgraph_mcp_stream_example",
|
|
20
|
+
instruction="""You are a helpful assistant that can provide weather forecasts.
|
|
21
|
+
For weather, specify the day in lowercase (e.g., 'monday').""",
|
|
22
|
+
model=ChatOpenAI(model="gpt-4.1", temperature=0),
|
|
23
|
+
tools=[],
|
|
24
|
+
)
|
|
25
|
+
langgraph_agent.add_mcp_server(mcp_config_sse)
|
|
26
|
+
|
|
27
|
+
query = "What's the weather forecast for monday?" # Uses MCP weather tool
|
|
28
|
+
|
|
29
|
+
stream_thread_id = "langgraph_mcp_stream_example"
|
|
30
|
+
|
|
31
|
+
print(f"\nQuery: {query}")
|
|
32
|
+
print("Streaming response:")
|
|
33
|
+
|
|
34
|
+
full_response = ""
|
|
35
|
+
async for chunk in langgraph_agent.arun_stream(
|
|
36
|
+
query=query, configurable={"configurable": {"thread_id": stream_thread_id}}
|
|
37
|
+
):
|
|
38
|
+
if isinstance(chunk, str):
|
|
39
|
+
print(chunk, end="", flush=True)
|
|
40
|
+
full_response += chunk
|
|
41
|
+
elif isinstance(chunk, dict) and "messages" in chunk:
|
|
42
|
+
print("\n(Stream finished with final state object)")
|
|
43
|
+
elif isinstance(chunk, dict):
|
|
44
|
+
pass
|
|
45
|
+
|
|
46
|
+
print(f"\nFull response collected: {full_response}")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
if __name__ == "__main__":
|
|
50
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
from aip_agents.agent import LangGraphAgent as LangGraphAgent
|
|
2
|
+
from aip_agents.examples.mcp_configs.configs import mcp_config_sse as mcp_config_sse
|
|
3
|
+
|
|
4
|
+
async def main() -> None:
|
|
5
|
+
"""Demonstrates the LangGraphAgent with MCP tools via SSE transport and streaming capabilities."""
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"""Example showing LangGraph agent with MCP tools integration using stdio transport.
|
|
2
|
+
|
|
3
|
+
Authors:
|
|
4
|
+
Fachriza Dian Adhiatma (fachriza.d.adhiatma@gdplabs.id)
|
|
5
|
+
Putu Ravindra Wiguna (putu.r.wiguna@gdplabs.id)
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
|
|
10
|
+
from langchain_openai import ChatOpenAI
|
|
11
|
+
|
|
12
|
+
from aip_agents.agent import LangGraphAgent
|
|
13
|
+
from aip_agents.examples.mcp_configs.configs import mcp_config_stdio
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
async def main():
|
|
17
|
+
"""Demonstrates the LangGraphAgent with MCP tools via stdio transport."""
|
|
18
|
+
langgraph_agent = LangGraphAgent(
|
|
19
|
+
name="langgraph_mcp_example",
|
|
20
|
+
instruction="""You are a helpful assistant that can provide weather forecasts.
|
|
21
|
+
For weather, specify the day in lowercase (e.g., 'monday').""",
|
|
22
|
+
model=ChatOpenAI(model="gpt-4.1", temperature=0),
|
|
23
|
+
tools=[],
|
|
24
|
+
)
|
|
25
|
+
langgraph_agent.add_mcp_server(mcp_config_stdio)
|
|
26
|
+
|
|
27
|
+
query = "What's the weather forecast for monday?" # Uses MCP weather tool
|
|
28
|
+
|
|
29
|
+
print(f"\nQuery: {query}")
|
|
30
|
+
response = await langgraph_agent.arun(query=query)
|
|
31
|
+
print(f"Response: {response['output']}")
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
if __name__ == "__main__":
|
|
35
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"""Example showing LangGraph agent with MCP tools integration and streaming capabilities using stdio transport.
|
|
2
|
+
|
|
3
|
+
Authors:
|
|
4
|
+
Fachriza Dian Adhiatma (fachriza.d.adhiatma@gdplabs.id)
|
|
5
|
+
Putu Ravindra Wiguna (putu.r.wiguna@gdplabs.id)
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
|
|
10
|
+
from langchain_openai import ChatOpenAI
|
|
11
|
+
|
|
12
|
+
from aip_agents.agent import LangGraphAgent
|
|
13
|
+
from aip_agents.examples.mcp_configs.configs import mcp_config_stdio
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
async def main():
|
|
17
|
+
"""Demonstrates the LangGraphAgent with MCP tools via stdio transport and streaming."""
|
|
18
|
+
langgraph_agent = LangGraphAgent(
|
|
19
|
+
name="langgraph_mcp_stream_example",
|
|
20
|
+
instruction="""You are a helpful assistant that can provide weather forecasts.
|
|
21
|
+
For weather, specify the day in lowercase (e.g., 'monday').""",
|
|
22
|
+
model=ChatOpenAI(model="gpt-4.1", temperature=0),
|
|
23
|
+
tools=[],
|
|
24
|
+
)
|
|
25
|
+
langgraph_agent.add_mcp_server(mcp_config_stdio)
|
|
26
|
+
|
|
27
|
+
query = "What's the weather forecast for monday?" # Uses MCP weather tool
|
|
28
|
+
|
|
29
|
+
stream_thread_id = "langgraph_mcp_stream_example"
|
|
30
|
+
|
|
31
|
+
print(f"\nQuery: {query}")
|
|
32
|
+
print("Streaming response:")
|
|
33
|
+
|
|
34
|
+
full_response = ""
|
|
35
|
+
async for chunk in langgraph_agent.arun_stream(
|
|
36
|
+
query=query, configurable={"configurable": {"thread_id": stream_thread_id}}
|
|
37
|
+
):
|
|
38
|
+
if isinstance(chunk, str):
|
|
39
|
+
print(chunk, end="", flush=True)
|
|
40
|
+
full_response += chunk
|
|
41
|
+
elif isinstance(chunk, dict) and "messages" in chunk:
|
|
42
|
+
print("\n(Stream finished with final state object)")
|
|
43
|
+
elif isinstance(chunk, dict):
|
|
44
|
+
pass
|
|
45
|
+
|
|
46
|
+
print(f"\nFull response collected: {full_response}")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
if __name__ == "__main__":
|
|
50
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
from aip_agents.agent import LangGraphAgent as LangGraphAgent
|
|
2
|
+
from aip_agents.examples.mcp_configs.configs import mcp_config_stdio as mcp_config_stdio
|
|
3
|
+
|
|
4
|
+
async def main() -> None:
|
|
5
|
+
"""Demonstrates the LangGraphAgent with MCP tools via stdio transport and streaming."""
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"""Minimal LangGraph agent example demonstrating streaming capabilities."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
|
|
5
|
+
from langchain_openai import ChatOpenAI
|
|
6
|
+
|
|
7
|
+
from aip_agents.agent import LangGraphAgent
|
|
8
|
+
from aip_agents.examples.tools.langchain_arithmetic_tools import add_numbers
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
async def langgraph_stream_example():
|
|
12
|
+
"""Demonstrates the LangGraphAgent's arun_stream method."""
|
|
13
|
+
model = ChatOpenAI(model="gpt-4.1", temperature=0)
|
|
14
|
+
tools = [add_numbers]
|
|
15
|
+
agent_name = "LangGraphArithmeticStreamAgent"
|
|
16
|
+
|
|
17
|
+
langgraph_agent = LangGraphAgent(
|
|
18
|
+
name=agent_name,
|
|
19
|
+
instruction="""You are a helpful assistant that can add two numbers using the add_numbers tool
|
|
20
|
+
and stream the results.""",
|
|
21
|
+
model=model,
|
|
22
|
+
tools=tools,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
# Use the same query as in the non-streaming LangGraph example
|
|
26
|
+
query = "What is the sum of 23 and 47? And then add 10 to that, then add 5 more."
|
|
27
|
+
print(f"--- Agent: {agent_name} ---")
|
|
28
|
+
print(f"Query: {query}")
|
|
29
|
+
|
|
30
|
+
print("\nRunning arun_stream...")
|
|
31
|
+
stream_thread_id = "lgraph_arith_stream_example"
|
|
32
|
+
async for chunk in langgraph_agent.arun_stream(
|
|
33
|
+
query=query, configurable={"configurable": {"thread_id": stream_thread_id}}
|
|
34
|
+
):
|
|
35
|
+
if isinstance(chunk, str):
|
|
36
|
+
print(chunk, end="", flush=True) # AI message parts, print live
|
|
37
|
+
|
|
38
|
+
print("\n\n--- End of LangGraph Stream Example ---")
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
if __name__ == "__main__":
|
|
42
|
+
# OPENAI_API_KEY should be set in the environment.
|
|
43
|
+
asyncio.run(langgraph_stream_example())
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"""Minimal LangChain agent example demonstrating streaming capabilities.
|
|
2
|
+
|
|
3
|
+
Authors:
|
|
4
|
+
Christian Trisno Sen Long Chen (christian.t.s.l.chen@gdplabs.id)
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
|
|
9
|
+
import dotenv
|
|
10
|
+
|
|
11
|
+
from aip_agents.agent import LangChainAgent
|
|
12
|
+
from aip_agents.examples.tools.langchain_arithmetic_tools import add_numbers
|
|
13
|
+
|
|
14
|
+
dotenv.load_dotenv(override=True)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
async def main():
|
|
18
|
+
"""Demonstrates the LangChainAgent's arun_stream method with async execution."""
|
|
19
|
+
agent = LangChainAgent(
|
|
20
|
+
name="LangChainStreamingCalculator",
|
|
21
|
+
instruction="You are a helpful calculator assistant that can add numbers. "
|
|
22
|
+
"When asked to add numbers, use the add_numbers tool. "
|
|
23
|
+
"Explain your steps clearly for streaming demonstration.",
|
|
24
|
+
model="openai/gpt-4.1",
|
|
25
|
+
tools=[add_numbers],
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
# Stream the response chunks
|
|
29
|
+
async for chunk in agent.arun_stream(
|
|
30
|
+
query="What is the sum of 23 and 47? And then add 10 to that, then add 5 more."
|
|
31
|
+
):
|
|
32
|
+
if isinstance(chunk, str):
|
|
33
|
+
print(chunk, end="", flush=True)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
if __name__ == "__main__":
|
|
37
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
"""Hello World Model Switch CLI.
|
|
2
|
+
|
|
3
|
+
This example demonstrates the enhanced credential system for various LLM providers.
|
|
4
|
+
|
|
5
|
+
New Auto-Detection Features:
|
|
6
|
+
- Dictionary credentials (e.g., AWS Bedrock)
|
|
7
|
+
- File path credentials (e.g., Google service account JSON)
|
|
8
|
+
- API key strings (e.g., OpenAI, Google API keys)
|
|
9
|
+
|
|
10
|
+
The system automatically detects credential types - no manual type specification needed!
|
|
11
|
+
|
|
12
|
+
Setup Environment Variables:
|
|
13
|
+
- BEDROCK_ACCESS_KEY_ID: Your AWS access key for Bedrock
|
|
14
|
+
- BEDROCK_SECRET_ACCESS_KEY: Your AWS secret key for Bedrock
|
|
15
|
+
- GOOGLE_VERTEX_AI_CREDENTIAL_PATH: Path to Google service account JSON file
|
|
16
|
+
- DEEPINFRA_API_KEY: API key for DeepInfra models
|
|
17
|
+
|
|
18
|
+
Authors:
|
|
19
|
+
Putu R Wiguna (putu.r.wiguna@gdplabs.id)
|
|
20
|
+
Christian Trisno Sen Long Chen (christian.t.s.l.chen@gdplabs.id)
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
import os
|
|
24
|
+
|
|
25
|
+
from dotenv import load_dotenv
|
|
26
|
+
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
|
|
27
|
+
|
|
28
|
+
from aip_agents.agent import LangChainAgent
|
|
29
|
+
from aip_agents.schema.agent import AgentConfig
|
|
30
|
+
|
|
31
|
+
load_dotenv()
|
|
32
|
+
|
|
33
|
+
# You can adjust these model ids to match your backend's supported models
|
|
34
|
+
DEEPINFRA_URL = "https://api.deepinfra.com/v1/openai"
|
|
35
|
+
MODEL_IDS = [
|
|
36
|
+
"openai/gpt-4.1",
|
|
37
|
+
"google/gemini-2.5-flash",
|
|
38
|
+
"bedrock/us.anthropic.claude-sonnet-4-20250514-v1:0", # Amazon Bedrock example
|
|
39
|
+
"google/gemini-2.5-pro", # Google Gemini with service account file
|
|
40
|
+
"openai-compatible/https://api.deepinfra.com/v1/openai:Qwen/Qwen3-30B-A3B",
|
|
41
|
+
"openai-compatible/https://api.deepinfra.com/v1/openai:deepseek-ai/DeepSeek-V3",
|
|
42
|
+
"openai-compatible/https://api.deepinfra.com/v1/openai:deepseek-ai/DeepSeek-R1-0528",
|
|
43
|
+
# our vllm
|
|
44
|
+
"openai-compatible/https://ai-agent-vllm.obrol.id/v1/:Qwen/Qwen3-32B-AWQ", # ensure to turn on vllm-server
|
|
45
|
+
"openai-compatible/Qwen/Qwen3-30B-A3B",
|
|
46
|
+
# Azure OpenAI example
|
|
47
|
+
"azure-openai/https://glair-genai-benchmark.openai.azure.com:glair-benchmark-gpt-4o-mini",
|
|
48
|
+
]
|
|
49
|
+
|
|
50
|
+
MODEL_CONFIGS = {
|
|
51
|
+
# Amazon Bedrock - Dictionary credentials (auto-detected)
|
|
52
|
+
"bedrock/us.anthropic.claude-sonnet-4-20250514-v1:0": AgentConfig(
|
|
53
|
+
lm_credentials={
|
|
54
|
+
"access_key_id": os.getenv("BEDROCK_ACCESS_KEY_ID", "your-aws-access-key-id"),
|
|
55
|
+
"secret_access_key": os.getenv("BEDROCK_SECRET_ACCESS_KEY", "your-aws-secret-access-key"),
|
|
56
|
+
},
|
|
57
|
+
lm_hyperparameters={"temperature": 0.7, "maxTokens": 1000},
|
|
58
|
+
),
|
|
59
|
+
# Google Gemini with service account file - File path (auto-detected)
|
|
60
|
+
"google/gemini-2.5-pro": AgentConfig(
|
|
61
|
+
lm_credentials=os.getenv("GOOGLE_VERTEX_AI_CREDENTIAL_PATH", "/path/to/service-account.json"),
|
|
62
|
+
lm_hyperparameters={"temperature": 0.7},
|
|
63
|
+
),
|
|
64
|
+
# Legacy configurations using lm_api_key (still supported)
|
|
65
|
+
"openai-compatible/https://api.deepinfra.com/v1/openai:Qwen/Qwen3-30B-A3B": AgentConfig(
|
|
66
|
+
lm_base_url=DEEPINFRA_URL,
|
|
67
|
+
lm_api_key=os.getenv("DEEPINFRA_API_KEY"), # Legacy field still works
|
|
68
|
+
),
|
|
69
|
+
"openai-compatible/https://api.deepinfra.com/v1/openai:deepseek-ai/DeepSeek-V3": AgentConfig(
|
|
70
|
+
lm_base_url=DEEPINFRA_URL,
|
|
71
|
+
lm_api_key=os.getenv("DEEPINFRA_API_KEY"),
|
|
72
|
+
),
|
|
73
|
+
"openai-compatible/https://api.deepinfra.com/v1/openai:deepseek-ai/DeepSeek-R1-0528": AgentConfig(
|
|
74
|
+
lm_base_url=DEEPINFRA_URL,
|
|
75
|
+
lm_api_key=os.getenv("DEEPINFRA_API_KEY"),
|
|
76
|
+
),
|
|
77
|
+
"openai-compatible/https://ai-agent-vllm.obrol.id/v1/:Qwen/Qwen3-32B-AWQ": AgentConfig(
|
|
78
|
+
lm_base_url="https://ai-agent-vllm.obrol.id/v1/",
|
|
79
|
+
lm_hyperparameters={"temperature": 1.0},
|
|
80
|
+
),
|
|
81
|
+
"openai-compatible/Qwen/Qwen3-30B-A3B": AgentConfig(
|
|
82
|
+
lm_base_url=DEEPINFRA_URL,
|
|
83
|
+
lm_api_key=os.getenv("DEEPINFRA_API_KEY"),
|
|
84
|
+
),
|
|
85
|
+
"openai-compatible/Qwen/Qwen3-32B-AWQ": AgentConfig(
|
|
86
|
+
lm_base_url="https://ai-agent-vllm.obrol.id/v1/",
|
|
87
|
+
lm_hyperparameters={"temperature": 1.0},
|
|
88
|
+
),
|
|
89
|
+
"openai/gpt-4.1": AgentConfig(
|
|
90
|
+
lm_hyperparameters={"temperature": 1.0},
|
|
91
|
+
),
|
|
92
|
+
"azure-openai/https://glair-genai-benchmark.openai.azure.com:glair-benchmark-gpt-4o-mini": AgentConfig(
|
|
93
|
+
lm_base_url="https://glair-genai-benchmark.openai.azure.com",
|
|
94
|
+
),
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def make_agent(model_id):
|
|
99
|
+
"""Makes an agent with the given model id.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
model_id: The model identifier to use for the agent.
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
LangChainAgent: The configured agent instance.
|
|
106
|
+
"""
|
|
107
|
+
config = MODEL_CONFIGS.get(model_id)
|
|
108
|
+
agent = LangChainAgent(
|
|
109
|
+
name=f"HelloWorldAgent-{model_id}",
|
|
110
|
+
instruction="You are a helpful assistant.",
|
|
111
|
+
model=model_id,
|
|
112
|
+
config=config,
|
|
113
|
+
)
|
|
114
|
+
return agent
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def print_help():
|
|
118
|
+
"""Prints available commands and their descriptions."""
|
|
119
|
+
help_text = (
|
|
120
|
+
"\nAvailable commands:\n"
|
|
121
|
+
" /help Show this help message\n"
|
|
122
|
+
" /switch Switch to another model\n"
|
|
123
|
+
" /exit Quit the chat\n"
|
|
124
|
+
"Type anything else to chat with the assistant.\n"
|
|
125
|
+
)
|
|
126
|
+
print(help_text)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def handle_switch_model(current_model):
|
|
130
|
+
"""Handles model switching. Returns (new_model, new_agent).
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
current_model: The currently active model identifier.
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
tuple: (new_model, new_agent) where new_model is the selected model ID and new_agent is the configured agent instance.
|
|
137
|
+
"""
|
|
138
|
+
print(f"Current model: {current_model}")
|
|
139
|
+
print("Available models:")
|
|
140
|
+
for idx, m in enumerate(MODEL_IDS):
|
|
141
|
+
print(f" [{idx + 1}] {m}")
|
|
142
|
+
try:
|
|
143
|
+
selection = input("Select model number: ").strip()
|
|
144
|
+
idx = int(selection) - 1
|
|
145
|
+
if idx < 0 or idx >= len(MODEL_IDS):
|
|
146
|
+
print("Invalid selection. No switch performed.")
|
|
147
|
+
return current_model, make_agent(current_model)
|
|
148
|
+
new_model = MODEL_IDS[idx]
|
|
149
|
+
except Exception:
|
|
150
|
+
print("Invalid input. No switch performed.")
|
|
151
|
+
return current_model, make_agent(current_model)
|
|
152
|
+
print(f"Switched to model: {new_model}")
|
|
153
|
+
return new_model, make_agent(new_model)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def main():
|
|
157
|
+
"""Runs the Hello World Model Switch CLI."""
|
|
158
|
+
print("Welcome to the Hello World Model Switch CLI!")
|
|
159
|
+
print("🎯 Demonstrating Enhanced Credential Auto-Detection:")
|
|
160
|
+
print(" • AWS Bedrock with dictionary credentials")
|
|
161
|
+
print(" • Google Gemini with API key")
|
|
162
|
+
print(" • Google Gemini with service account file")
|
|
163
|
+
print(" • Legacy lm_api_key support still works!")
|
|
164
|
+
print()
|
|
165
|
+
print(f"Available models: {MODEL_IDS}")
|
|
166
|
+
current_model = MODEL_IDS[0]
|
|
167
|
+
agent = make_agent(current_model)
|
|
168
|
+
print(f"Loaded model: {current_model}")
|
|
169
|
+
print("Type your message, or type /switch to change model, or /exit to quit or /help to show available commands.")
|
|
170
|
+
|
|
171
|
+
# Conversation history as LangChain message objects
|
|
172
|
+
messages = [SystemMessage(content="You are a helpful assistant.")]
|
|
173
|
+
|
|
174
|
+
while True:
|
|
175
|
+
user_input = input("You: ").strip()
|
|
176
|
+
if user_input.lower() == "/help":
|
|
177
|
+
print_help()
|
|
178
|
+
continue
|
|
179
|
+
if user_input.lower() == "/exit":
|
|
180
|
+
print("Goodbye!")
|
|
181
|
+
break
|
|
182
|
+
elif user_input.lower() == "/switch":
|
|
183
|
+
current_model, agent = handle_switch_model(current_model)
|
|
184
|
+
continue
|
|
185
|
+
elif user_input == "":
|
|
186
|
+
continue
|
|
187
|
+
# Add user message to history
|
|
188
|
+
messages.append(HumanMessage(content=user_input))
|
|
189
|
+
# Run the agent synchronously with history
|
|
190
|
+
try:
|
|
191
|
+
response = agent.run(query=user_input, messages=messages[:-1])
|
|
192
|
+
# Extract AI message (should be last in response state)
|
|
193
|
+
output = response.get("output")
|
|
194
|
+
ai_message = None
|
|
195
|
+
# Try to find the returned messages in the response (standard pattern)
|
|
196
|
+
if "messages" in response and response["messages"]:
|
|
197
|
+
# Use the last message as the AI reply
|
|
198
|
+
ai_message = response["messages"][-1]
|
|
199
|
+
elif output:
|
|
200
|
+
ai_message = AIMessage(content=output)
|
|
201
|
+
else:
|
|
202
|
+
ai_message = AIMessage(content=str(response))
|
|
203
|
+
messages.append(ai_message)
|
|
204
|
+
print(f"Agent ({current_model}): {ai_message.content}")
|
|
205
|
+
except Exception as e:
|
|
206
|
+
print(f"Error: {e}")
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
if __name__ == "__main__":
|
|
210
|
+
main()
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from aip_agents.agent import LangChainAgent as LangChainAgent
|
|
3
|
+
from aip_agents.schema.agent import AgentConfig as AgentConfig
|
|
4
|
+
|
|
5
|
+
DEEPINFRA_URL: str
|
|
6
|
+
MODEL_IDS: Incomplete
|
|
7
|
+
MODEL_CONFIGS: Incomplete
|
|
8
|
+
|
|
9
|
+
def make_agent(model_id):
|
|
10
|
+
"""Makes an agent with the given model id.
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
model_id: The model identifier to use for the agent.
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
LangChainAgent: The configured agent instance.
|
|
17
|
+
"""
|
|
18
|
+
def print_help() -> None:
|
|
19
|
+
"""Prints available commands and their descriptions."""
|
|
20
|
+
def handle_switch_model(current_model):
|
|
21
|
+
"""Handles model switching. Returns (new_model, new_agent).
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
current_model: The currently active model identifier.
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
tuple: (new_model, new_agent) where new_model is the selected model ID and new_agent is the configured agent instance.
|
|
28
|
+
"""
|
|
29
|
+
def main() -> None:
|
|
30
|
+
"""Runs the Hello World Model Switch CLI."""
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"""Multi-agent example using Google ADK with a coordinator agent.
|
|
2
|
+
|
|
3
|
+
This example demonstrates a coordinator agent that can delegate tasks to specialized agents.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import asyncio
|
|
7
|
+
|
|
8
|
+
import nest_asyncio
|
|
9
|
+
|
|
10
|
+
from aip_agents.agent.google_adk_agent import GoogleADKAgent
|
|
11
|
+
from aip_agents.examples.tools.adk_arithmetic_tools import sum_numbers
|
|
12
|
+
from aip_agents.examples.tools.adk_weather_tool import get_weather
|
|
13
|
+
|
|
14
|
+
# Apply nest_asyncio to allow nested event loops
|
|
15
|
+
nest_asyncio.apply()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
async def multi_agent_example():
|
|
19
|
+
"""Demonstrates multi-agent coordination with GoogleADKAgent."""
|
|
20
|
+
# Create specialized agents
|
|
21
|
+
weather_agent = GoogleADKAgent(
|
|
22
|
+
name="WeatherAgent",
|
|
23
|
+
instruction=(
|
|
24
|
+
"You are a weather expert. You must use the weather_tool "
|
|
25
|
+
"to find weather information for a given city. "
|
|
26
|
+
"Always include the city name in your response."
|
|
27
|
+
),
|
|
28
|
+
model="gemini-2.0-flash",
|
|
29
|
+
tools=[get_weather], # Use the get_weather function directly
|
|
30
|
+
max_iterations=3,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
math_agent = GoogleADKAgent(
|
|
34
|
+
name="MathAgent",
|
|
35
|
+
instruction=(
|
|
36
|
+
"You are a math expert. You must use the sum_numbers tool to perform addition. "
|
|
37
|
+
"The tool takes two integer arguments: 'a' and 'b'. "
|
|
38
|
+
"For example, to add 5 and 7, you would call sum_numbers(a=5, b=7). "
|
|
39
|
+
"Always state the numbers you're adding in your response."
|
|
40
|
+
),
|
|
41
|
+
model="gemini-2.0-flash",
|
|
42
|
+
tools=[sum_numbers],
|
|
43
|
+
max_iterations=3,
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
# Create coordinator agent with access to specialized agents
|
|
47
|
+
coordinator_agent = GoogleADKAgent(
|
|
48
|
+
name="CoordinatorAgent",
|
|
49
|
+
instruction=(
|
|
50
|
+
"You are a helpful assistant that coordinates between specialized agents.\n"
|
|
51
|
+
"When asked about weather, delegate to WeatherAgent.\n"
|
|
52
|
+
"When asked to do math, delegate to MathAgent.\n"
|
|
53
|
+
"If asked multiple questions, break them down and handle each one separately.\n"
|
|
54
|
+
"Always be concise and helpful in your responses."
|
|
55
|
+
),
|
|
56
|
+
model="gemini-2.0-flash",
|
|
57
|
+
agents=[weather_agent, math_agent],
|
|
58
|
+
max_iterations=3,
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
# Test weather query
|
|
62
|
+
weather_query = "What is the weather in Tokyo?"
|
|
63
|
+
print(f"\n--- Running query 1: {weather_query} ---")
|
|
64
|
+
weather_response = await coordinator_agent.arun(query=weather_query)
|
|
65
|
+
print(f"Weather Response: {weather_response.get('output')}")
|
|
66
|
+
|
|
67
|
+
# Test math query
|
|
68
|
+
math_query = "What is 5 + 7?"
|
|
69
|
+
print(f"\n--- Running query 2: {math_query} ---")
|
|
70
|
+
math_response = await coordinator_agent.arun(query=math_query)
|
|
71
|
+
print(f"Math Response: {math_response.get('output')}")
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
if __name__ == "__main__":
|
|
75
|
+
asyncio.run(multi_agent_example())
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
from aip_agents.agent.google_adk_agent import GoogleADKAgent as GoogleADKAgent
|
|
2
|
+
from aip_agents.examples.tools.adk_arithmetic_tools import sum_numbers as sum_numbers
|
|
3
|
+
from aip_agents.examples.tools.adk_weather_tool import get_weather as get_weather
|
|
4
|
+
|
|
5
|
+
async def multi_agent_example() -> None:
|
|
6
|
+
"""Demonstrates multi-agent coordination with GoogleADKAgent."""
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"""Example demonstrating a multi-agent setup with a Coordinator Agent.
|
|
2
|
+
|
|
3
|
+
This example showcases:
|
|
4
|
+
1. How to define multiple specialized agents (Weather Agent, Math Agent).
|
|
5
|
+
2. How to set up a Coordinator Agent that can delegate to these specialized agents.
|
|
6
|
+
3. How the Coordinator Agent uses dynamically created tools to call sub-agents.
|
|
7
|
+
4. How the Coordinator Agent can delegate tasks to the appropriate sub-agents.
|
|
8
|
+
|
|
9
|
+
Authors:
|
|
10
|
+
Raymond Christopher (raymond.christopher@gdplabs.id)
|
|
11
|
+
Putu Ravindra Wiguna (putu.r.wiguna@gdplabs.id)
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import asyncio
|
|
15
|
+
|
|
16
|
+
from langchain_openai import ChatOpenAI
|
|
17
|
+
|
|
18
|
+
from aip_agents.agent import LangChainAgent
|
|
19
|
+
from aip_agents.examples.tools import weather_tool_langchain as weather_tool
|
|
20
|
+
from aip_agents.examples.tools.langchain_arithmetic_tools import add_numbers
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def main():
|
|
24
|
+
"""Main function demonstrating the multi-agent setup with LangChainAgent."""
|
|
25
|
+
model = ChatOpenAI(model="gpt-4.1", temperature=0)
|
|
26
|
+
|
|
27
|
+
weather_agent = LangChainAgent(
|
|
28
|
+
name="Weather Agent",
|
|
29
|
+
instruction="You are a weather expert. You must use the get_weather tool to find weather information.",
|
|
30
|
+
model=model,
|
|
31
|
+
tools=[weather_tool],
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
math_agent = LangChainAgent(
|
|
35
|
+
name="Math Agent",
|
|
36
|
+
instruction="You are a math expert. Use the 'add_numbers' tool to add two numbers.",
|
|
37
|
+
model=model,
|
|
38
|
+
tools=[add_numbers],
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
coordinator_agent = LangChainAgent(
|
|
42
|
+
name="Coordinator Agent",
|
|
43
|
+
instruction="Delegate each query to suitable agent (Weather Agent or Math Agent) and combine their results.",
|
|
44
|
+
model=model,
|
|
45
|
+
agents=[weather_agent, math_agent],
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
query = "What is the weather in Tokyo and what is 5 + 7?"
|
|
49
|
+
response = await coordinator_agent.arun(query=query)
|
|
50
|
+
print(f"Response: {response.get('output')}")
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
if __name__ == "__main__":
|
|
54
|
+
asyncio.run(main())
|