azure-ai-agentserver-langgraph 1.0.0b2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- azure_ai_agentserver_langgraph-1.0.0b2/CHANGELOG.md +19 -0
- azure_ai_agentserver_langgraph-1.0.0b2/LICENSE +21 -0
- azure_ai_agentserver_langgraph-1.0.0b2/MANIFEST.in +9 -0
- azure_ai_agentserver_langgraph-1.0.0b2/PKG-INFO +90 -0
- azure_ai_agentserver_langgraph-1.0.0b2/README.md +64 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/__init__.py +1 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/__init__.py +1 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/__init__.py +1 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/__init__.py +21 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/_version.py +9 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/langgraph.py +165 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/__init__.py +15 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/langgraph_request_converter.py +155 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/langgraph_response_converter.py +136 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/langgraph_state_converter.py +143 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/langgraph_stream_response_converter.py +74 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/response_event_generators/__init__.py +11 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/response_event_generators/item_content_helpers.py +61 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/response_event_generators/item_resource_helpers.py +114 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/response_event_generators/response_content_part_event_generator.py +154 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/response_event_generators/response_event_generator.py +92 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/response_event_generators/response_function_call_argument_event_generator.py +126 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_item_event_generator.py +163 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_text_event_generator.py +112 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/response_event_generators/response_stream_event_generator.py +138 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/models/utils.py +68 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure/ai/agentserver/langgraph/py.typed +0 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure_ai_agentserver_langgraph.egg-info/PKG-INFO +90 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure_ai_agentserver_langgraph.egg-info/SOURCES.txt +56 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure_ai_agentserver_langgraph.egg-info/dependency_links.txt +1 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure_ai_agentserver_langgraph.egg-info/requires.txt +6 -0
- azure_ai_agentserver_langgraph-1.0.0b2/azure_ai_agentserver_langgraph.egg-info/top_level.txt +1 -0
- azure_ai_agentserver_langgraph-1.0.0b2/pyproject.toml +70 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/agent_calculator/README.md +84 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/agent_calculator/langgraph_agent_calculator.py +142 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/agentic_rag/__init__.py +0 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/agentic_rag/edges/__init__.py +0 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/agentic_rag/edges/grade_documents.py +52 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/agentic_rag/nodes/__init__.py +0 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/agentic_rag/nodes/generate_answer.py +27 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/agentic_rag/nodes/generate_query_or_respond.py +25 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/agentic_rag/nodes/rewrite_question.py +27 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/agentic_rag/tools/__init__.py +0 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/agentic_rag/tools/retriever_tool.py +42 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/agentic_rag/workflow.py +48 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/custom_state/README.md +51 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/custom_state/main.py +293 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/mcp_apikey/README.md +126 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/mcp_apikey/mcp_apikey.py +66 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/mcp_simple/README.md +121 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/mcp_simple/mcp_simple.py +56 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/simple_agent_with_redis_checkpointer/README.md +68 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/simple_agent_with_redis_checkpointer/main.py +72 -0
- azure_ai_agentserver_langgraph-1.0.0b2/samples/simple_react_agent/main.py +53 -0
- azure_ai_agentserver_langgraph-1.0.0b2/setup.cfg +4 -0
- azure_ai_agentserver_langgraph-1.0.0b2/tests/__init__.py +1 -0
- azure_ai_agentserver_langgraph-1.0.0b2/tests/conftest.py +10 -0
- azure_ai_agentserver_langgraph-1.0.0b2/tests/unit_tests/test_langgraph_request_converter.py +121 -0
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# Release History
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
## 1.0.0b2 (2025-11-10)
|
|
5
|
+
|
|
6
|
+
### Bugs Fixed
|
|
7
|
+
|
|
8
|
+
- Fixed Id generator format.
|
|
9
|
+
|
|
10
|
+
- Improved stream mode error messsage.
|
|
11
|
+
|
|
12
|
+
- Updated application insights related configuration environment variables.
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
## 1.0.0b1 (2025-11-07)
|
|
16
|
+
|
|
17
|
+
### Features Added
|
|
18
|
+
|
|
19
|
+
First version
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
Copyright (c) Microsoft Corporation.
|
|
2
|
+
|
|
3
|
+
MIT License
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
include *.md
|
|
2
|
+
include LICENSE
|
|
3
|
+
recursive-include tests *.py
|
|
4
|
+
recursive-include samples *.py *.md
|
|
5
|
+
recursive-include doc *.rst *.md
|
|
6
|
+
include azure/__init__.py
|
|
7
|
+
include azure/ai/__init__.py
|
|
8
|
+
include azure/ai/agentserver/__init__.py
|
|
9
|
+
include azure/ai/agentserver/langgraph/py.typed
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: azure-ai-agentserver-langgraph
|
|
3
|
+
Version: 1.0.0b2
|
|
4
|
+
Summary: LangGraph adapter for Azure AI Agent Server
|
|
5
|
+
Author-email: Microsoft Corporation <azpysdkhelp@microsoft.com>
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: repository, https://github.com/Azure/azure-sdk-for-python
|
|
8
|
+
Keywords: azure,azure sdk
|
|
9
|
+
Classifier: Programming Language :: Python
|
|
10
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
11
|
+
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
+
Requires-Python: >=3.10
|
|
17
|
+
Description-Content-Type: text/markdown
|
|
18
|
+
License-File: LICENSE
|
|
19
|
+
Requires-Dist: azure-ai-agentserver-core
|
|
20
|
+
Requires-Dist: langchain>0.3.5
|
|
21
|
+
Requires-Dist: langchain-openai>0.3.10
|
|
22
|
+
Requires-Dist: langchain-azure-ai[opentelemetry]>=0.1.4
|
|
23
|
+
Requires-Dist: langgraph>0.5.0
|
|
24
|
+
Requires-Dist: opentelemetry-exporter-otlp-proto-http
|
|
25
|
+
Dynamic: license-file
|
|
26
|
+
|
|
27
|
+
# Azure AI Agent Server Adapter for LangGraph Python
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
## Getting started
|
|
31
|
+
|
|
32
|
+
```bash
|
|
33
|
+
pip install azure-ai-agentserver-langgraph
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
## Key concepts
|
|
38
|
+
|
|
39
|
+
Azure AI Agent Server wraps your LangGraph agent, and host it on the cloud.
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
## Examples
|
|
43
|
+
|
|
44
|
+
```python
|
|
45
|
+
# your existing agent
|
|
46
|
+
from my_langgraph_agent import my_awesome_agent
|
|
47
|
+
|
|
48
|
+
# langgraph utils
|
|
49
|
+
from azure.ai.agentserver.langgraph import from_langgraph
|
|
50
|
+
|
|
51
|
+
if __name__ == "__main__":
|
|
52
|
+
# with this simple line, your agent will be hosted on http://localhost:8088
|
|
53
|
+
from_langgraph(my_awesome_agent).run()
|
|
54
|
+
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
**Note**
|
|
58
|
+
If your langgraph agent was not using langgraph's builtin [MessageState](https://langchain-ai.github.io/langgraph/concepts/low_level/?h=messagesstate#messagesstate), you should implement your own `LanggraphStateConverter` and provide to `from_langgraph`.
|
|
59
|
+
|
|
60
|
+
Reference this [example](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/agentserver/azure-ai-agentserver-langgraph/samples/custom_state/main.py) for more details.
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
## Troubleshooting
|
|
64
|
+
|
|
65
|
+
First run your agent with azure-ai-agentserver-langgraph locally.
|
|
66
|
+
|
|
67
|
+
If it works on local but failed on cloud. Check your logs in the application insight connected to your Azure AI Foundry Project.
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
## Next steps
|
|
71
|
+
|
|
72
|
+
Please visit [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-langgraph/samples) folder. There are several samples for you to build your agent with azure-ai-agentserver-* packages
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
## Contributing
|
|
76
|
+
|
|
77
|
+
This project welcomes contributions and suggestions. Most contributions require
|
|
78
|
+
you to agree to a Contributor License Agreement (CLA) declaring that you have
|
|
79
|
+
the right to, and actually do, grant us the rights to use your contribution.
|
|
80
|
+
For details, visit https://cla.microsoft.com.
|
|
81
|
+
|
|
82
|
+
When you submit a pull request, a CLA-bot will automatically determine whether
|
|
83
|
+
you need to provide a CLA and decorate the PR appropriately (e.g., label,
|
|
84
|
+
comment). Simply follow the instructions provided by the bot. You will only
|
|
85
|
+
need to do this once across all repos using our CLA.
|
|
86
|
+
|
|
87
|
+
This project has adopted the
|
|
88
|
+
[Microsoft Open Source Code of Conduct][code_of_conduct]. For more information,
|
|
89
|
+
see the Code of Conduct FAQ or contact opencode@microsoft.com with any
|
|
90
|
+
additional questions or comments.
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
# Azure AI Agent Server Adapter for LangGraph Python
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
## Getting started
|
|
5
|
+
|
|
6
|
+
```bash
|
|
7
|
+
pip install azure-ai-agentserver-langgraph
|
|
8
|
+
```
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
## Key concepts
|
|
12
|
+
|
|
13
|
+
Azure AI Agent Server wraps your LangGraph agent, and host it on the cloud.
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
## Examples
|
|
17
|
+
|
|
18
|
+
```python
|
|
19
|
+
# your existing agent
|
|
20
|
+
from my_langgraph_agent import my_awesome_agent
|
|
21
|
+
|
|
22
|
+
# langgraph utils
|
|
23
|
+
from azure.ai.agentserver.langgraph import from_langgraph
|
|
24
|
+
|
|
25
|
+
if __name__ == "__main__":
|
|
26
|
+
# with this simple line, your agent will be hosted on http://localhost:8088
|
|
27
|
+
from_langgraph(my_awesome_agent).run()
|
|
28
|
+
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
**Note**
|
|
32
|
+
If your langgraph agent was not using langgraph's builtin [MessageState](https://langchain-ai.github.io/langgraph/concepts/low_level/?h=messagesstate#messagesstate), you should implement your own `LanggraphStateConverter` and provide to `from_langgraph`.
|
|
33
|
+
|
|
34
|
+
Reference this [example](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/agentserver/azure-ai-agentserver-langgraph/samples/custom_state/main.py) for more details.
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
## Troubleshooting
|
|
38
|
+
|
|
39
|
+
First run your agent with azure-ai-agentserver-langgraph locally.
|
|
40
|
+
|
|
41
|
+
If it works on local but failed on cloud. Check your logs in the application insight connected to your Azure AI Foundry Project.
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
## Next steps
|
|
45
|
+
|
|
46
|
+
Please visit [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-langgraph/samples) folder. There are several samples for you to build your agent with azure-ai-agentserver-* packages
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
## Contributing
|
|
50
|
+
|
|
51
|
+
This project welcomes contributions and suggestions. Most contributions require
|
|
52
|
+
you to agree to a Contributor License Agreement (CLA) declaring that you have
|
|
53
|
+
the right to, and actually do, grant us the rights to use your contribution.
|
|
54
|
+
For details, visit https://cla.microsoft.com.
|
|
55
|
+
|
|
56
|
+
When you submit a pull request, a CLA-bot will automatically determine whether
|
|
57
|
+
you need to provide a CLA and decorate the PR appropriately (e.g., label,
|
|
58
|
+
comment). Simply follow the instructions provided by the bot. You will only
|
|
59
|
+
need to do this once across all repos using our CLA.
|
|
60
|
+
|
|
61
|
+
This project has adopted the
|
|
62
|
+
[Microsoft Open Source Code of Conduct][code_of_conduct]. For more information,
|
|
63
|
+
see the Code of Conduct FAQ or contact opencode@microsoft.com with any
|
|
64
|
+
additional questions or comments.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
# ---------------------------------------------------------
|
|
2
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
3
|
+
# ---------------------------------------------------------
|
|
4
|
+
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
5
|
+
|
|
6
|
+
from typing import TYPE_CHECKING, Optional
|
|
7
|
+
|
|
8
|
+
from ._version import VERSION
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
11
|
+
from . import models
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def from_langgraph(agent, state_converter: Optional["models.LanggraphStateConverter"] = None):
|
|
15
|
+
from .langgraph import LangGraphAdapter
|
|
16
|
+
|
|
17
|
+
return LangGraphAdapter(agent, state_converter=state_converter)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
__all__ = ["from_langgraph"]
|
|
21
|
+
__version__ = VERSION
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
# coding=utf-8
|
|
2
|
+
# --------------------------------------------------------------------------
|
|
3
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
4
|
+
# Licensed under the MIT License. See License.txt in the project root for license information.
|
|
5
|
+
# Code generated by Microsoft (R) Python Code Generator.
|
|
6
|
+
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
|
7
|
+
# --------------------------------------------------------------------------
|
|
8
|
+
|
|
9
|
+
VERSION = "1.0.0b2"
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
# ---------------------------------------------------------
|
|
2
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
3
|
+
# ---------------------------------------------------------
|
|
4
|
+
# pylint: disable=logging-fstring-interpolation,broad-exception-caught
|
|
5
|
+
# mypy: disable-error-code="assignment,arg-type"
|
|
6
|
+
import os
|
|
7
|
+
import re
|
|
8
|
+
from typing import Optional
|
|
9
|
+
|
|
10
|
+
from langchain_core.runnables import RunnableConfig
|
|
11
|
+
from langgraph.graph.state import CompiledStateGraph
|
|
12
|
+
|
|
13
|
+
from azure.ai.agentserver.core.constants import Constants
|
|
14
|
+
from azure.ai.agentserver.core.logger import get_logger
|
|
15
|
+
from azure.ai.agentserver.core.server.base import FoundryCBAgent
|
|
16
|
+
from azure.ai.agentserver.core.server.common.agent_run_context import AgentRunContext
|
|
17
|
+
|
|
18
|
+
from .models import (
|
|
19
|
+
LanggraphMessageStateConverter,
|
|
20
|
+
LanggraphStateConverter,
|
|
21
|
+
)
|
|
22
|
+
from .models.utils import is_state_schema_valid
|
|
23
|
+
|
|
24
|
+
logger = get_logger()
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class LangGraphAdapter(FoundryCBAgent):
|
|
28
|
+
"""
|
|
29
|
+
Adapter for LangGraph Agent.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(self, graph: CompiledStateGraph, state_converter: Optional[LanggraphStateConverter] = None):
|
|
33
|
+
"""
|
|
34
|
+
Initialize the LangGraphAdapter with a CompiledStateGraph.
|
|
35
|
+
|
|
36
|
+
:param graph: The LangGraph StateGraph to adapt.
|
|
37
|
+
:type graph: CompiledStateGraph
|
|
38
|
+
:param state_converter: custom state converter. Required if graph state is not MessagesState.
|
|
39
|
+
:type state_converter: Optional[LanggraphStateConverter]
|
|
40
|
+
"""
|
|
41
|
+
super().__init__()
|
|
42
|
+
self.graph = graph
|
|
43
|
+
self.azure_ai_tracer = None
|
|
44
|
+
if not state_converter:
|
|
45
|
+
if is_state_schema_valid(self.graph.builder.state_schema):
|
|
46
|
+
self.state_converter = LanggraphMessageStateConverter()
|
|
47
|
+
else:
|
|
48
|
+
raise ValueError("state_converter is required for non-MessagesState graph.")
|
|
49
|
+
else:
|
|
50
|
+
self.state_converter = state_converter
|
|
51
|
+
|
|
52
|
+
async def agent_run(self, context: AgentRunContext):
|
|
53
|
+
input_data = self.state_converter.request_to_state(context)
|
|
54
|
+
logger.debug(f"Converted input data: {input_data}")
|
|
55
|
+
if not context.stream:
|
|
56
|
+
response = await self.agent_run_non_stream(input_data, context)
|
|
57
|
+
return response
|
|
58
|
+
return self.agent_run_astream(input_data, context)
|
|
59
|
+
|
|
60
|
+
def init_tracing_internal(self, exporter_endpoint=None, app_insights_conn_str=None):
|
|
61
|
+
# set env vars for langsmith
|
|
62
|
+
os.environ["LANGSMITH_OTEL_ENABLED"] = "true"
|
|
63
|
+
os.environ["LANGSMITH_TRACING"] = "true"
|
|
64
|
+
os.environ["LANGSMITH_OTEL_ONLY"] = "true"
|
|
65
|
+
if app_insights_conn_str:
|
|
66
|
+
# setup azure ai telemetry callbacks
|
|
67
|
+
try:
|
|
68
|
+
from langchain_azure_ai.callbacks.tracers import AzureAIOpenTelemetryTracer
|
|
69
|
+
|
|
70
|
+
self.azure_ai_tracer = AzureAIOpenTelemetryTracer(
|
|
71
|
+
connection_string=app_insights_conn_str,
|
|
72
|
+
enable_content_recording=True,
|
|
73
|
+
name=self.get_agent_identifier(),
|
|
74
|
+
)
|
|
75
|
+
logger.info("AzureAIOpenTelemetryTracer initialized successfully.")
|
|
76
|
+
except Exception as e:
|
|
77
|
+
logger.error(f"Failed to import AzureAIOpenTelemetryTracer, ignore: {e}")
|
|
78
|
+
|
|
79
|
+
def setup_otlp_exporter(self, endpoint, provider):
|
|
80
|
+
endpoint = self.format_otlp_endpoint(endpoint)
|
|
81
|
+
return super().setup_otlp_exporter(endpoint, provider)
|
|
82
|
+
|
|
83
|
+
def get_trace_attributes(self):
|
|
84
|
+
attrs = super().get_trace_attributes()
|
|
85
|
+
attrs["service.namespace"] = "azure.ai.agentserver.langgraph"
|
|
86
|
+
return attrs
|
|
87
|
+
|
|
88
|
+
async def agent_run_non_stream(self, input_data: dict, context: AgentRunContext):
|
|
89
|
+
"""
|
|
90
|
+
Run the agent with non-streaming response.
|
|
91
|
+
|
|
92
|
+
:param input_data: The input data to run the agent with.
|
|
93
|
+
:type input_data: dict
|
|
94
|
+
:param context: The context for the agent run.
|
|
95
|
+
:type context: AgentRunContext
|
|
96
|
+
|
|
97
|
+
:return: The response of the agent run.
|
|
98
|
+
:rtype: dict
|
|
99
|
+
"""
|
|
100
|
+
|
|
101
|
+
try:
|
|
102
|
+
config = self.create_runnable_config(context)
|
|
103
|
+
stream_mode = self.state_converter.get_stream_mode(context)
|
|
104
|
+
result = await self.graph.ainvoke(input_data, config=config, stream_mode=stream_mode)
|
|
105
|
+
output = self.state_converter.state_to_response(result, context)
|
|
106
|
+
return output
|
|
107
|
+
except Exception as e:
|
|
108
|
+
logger.error(f"Error during agent run: {e}")
|
|
109
|
+
raise e
|
|
110
|
+
|
|
111
|
+
async def agent_run_astream(self, input_data: dict, context: AgentRunContext):
|
|
112
|
+
"""
|
|
113
|
+
Run the agent with streaming response.
|
|
114
|
+
|
|
115
|
+
:param input_data: The input data to run the agent with.
|
|
116
|
+
:type input_data: dict
|
|
117
|
+
:param context: The context for the agent run.
|
|
118
|
+
:type context: AgentRunContext
|
|
119
|
+
|
|
120
|
+
:return: An async generator yielding the response stream events.
|
|
121
|
+
:rtype: AsyncGenerator[dict]
|
|
122
|
+
"""
|
|
123
|
+
try:
|
|
124
|
+
logger.info(f"Starting streaming agent run {context.response_id}")
|
|
125
|
+
config = self.create_runnable_config(context)
|
|
126
|
+
stream_mode = self.state_converter.get_stream_mode(context)
|
|
127
|
+
stream = self.graph.astream(input=input_data, config=config, stream_mode=stream_mode)
|
|
128
|
+
async for result in self.state_converter.state_to_response_stream(stream, context):
|
|
129
|
+
yield result
|
|
130
|
+
except Exception as e:
|
|
131
|
+
logger.error(f"Error during streaming agent run: {e}")
|
|
132
|
+
raise e
|
|
133
|
+
|
|
134
|
+
def create_runnable_config(self, context: AgentRunContext) -> RunnableConfig:
|
|
135
|
+
"""
|
|
136
|
+
Create a RunnableConfig from the converted request data.
|
|
137
|
+
|
|
138
|
+
:param context: The context for the agent run.
|
|
139
|
+
:type context: AgentRunContext
|
|
140
|
+
|
|
141
|
+
:return: The RunnableConfig for the agent run.
|
|
142
|
+
:rtype: RunnableConfig
|
|
143
|
+
"""
|
|
144
|
+
config = RunnableConfig(
|
|
145
|
+
configurable={
|
|
146
|
+
"thread_id": context.conversation_id,
|
|
147
|
+
},
|
|
148
|
+
callbacks=[self.azure_ai_tracer] if self.azure_ai_tracer else None,
|
|
149
|
+
)
|
|
150
|
+
return config
|
|
151
|
+
|
|
152
|
+
def format_otlp_endpoint(self, endpoint: str) -> str:
|
|
153
|
+
m = re.match(r"^(https?://[^/]+)", endpoint)
|
|
154
|
+
if m:
|
|
155
|
+
return f"{m.group(1)}/v1/traces"
|
|
156
|
+
return endpoint
|
|
157
|
+
|
|
158
|
+
def get_agent_identifier(self) -> str:
|
|
159
|
+
agent_name = os.getenv(Constants.AGENT_NAME)
|
|
160
|
+
if agent_name:
|
|
161
|
+
return agent_name
|
|
162
|
+
agent_id = os.getenv(Constants.AGENT_ID)
|
|
163
|
+
if agent_id:
|
|
164
|
+
return agent_id
|
|
165
|
+
return "HostedAgent-LangGraph"
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# ---------------------------------------------------------
|
|
2
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
3
|
+
# ---------------------------------------------------------
|
|
4
|
+
from .langgraph_request_converter import LangGraphRequestConverter
|
|
5
|
+
from .langgraph_response_converter import LangGraphResponseConverter
|
|
6
|
+
from .langgraph_state_converter import LanggraphMessageStateConverter, LanggraphStateConverter
|
|
7
|
+
from .langgraph_stream_response_converter import LangGraphStreamResponseConverter
|
|
8
|
+
|
|
9
|
+
__all__ = [
|
|
10
|
+
"LangGraphRequestConverter",
|
|
11
|
+
"LangGraphResponseConverter",
|
|
12
|
+
"LangGraphStreamResponseConverter",
|
|
13
|
+
"LanggraphStateConverter",
|
|
14
|
+
"LanggraphMessageStateConverter",
|
|
15
|
+
]
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
# ---------------------------------------------------------
|
|
2
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
3
|
+
# ---------------------------------------------------------
|
|
4
|
+
# pylint: disable=logging-fstring-interpolation
|
|
5
|
+
# mypy: ignore-errors
|
|
6
|
+
import json
|
|
7
|
+
from typing import Dict, List
|
|
8
|
+
|
|
9
|
+
from langchain_core.messages import (
|
|
10
|
+
AIMessage,
|
|
11
|
+
AnyMessage,
|
|
12
|
+
HumanMessage,
|
|
13
|
+
SystemMessage,
|
|
14
|
+
ToolMessage,
|
|
15
|
+
)
|
|
16
|
+
from langchain_core.messages.tool import ToolCall
|
|
17
|
+
|
|
18
|
+
from azure.ai.agentserver.core.logger import get_logger
|
|
19
|
+
from azure.ai.agentserver.core.models import CreateResponse, openai as openai_models, projects as project_models
|
|
20
|
+
|
|
21
|
+
logger = get_logger()
|
|
22
|
+
|
|
23
|
+
role_mapping = {
|
|
24
|
+
project_models.ResponsesMessageRole.USER: HumanMessage,
|
|
25
|
+
project_models.ResponsesMessageRole.SYSTEM: SystemMessage,
|
|
26
|
+
project_models.ResponsesMessageRole.ASSISTANT: AIMessage,
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
item_content_type_mapping = {
|
|
30
|
+
project_models.ItemContentType.INPUT_TEXT: "text",
|
|
31
|
+
project_models.ItemContentType.INPUT_AUDIO: "audio",
|
|
32
|
+
project_models.ItemContentType.INPUT_IMAGE: "image",
|
|
33
|
+
project_models.ItemContentType.INPUT_FILE: "file",
|
|
34
|
+
project_models.ItemContentType.OUTPUT_TEXT: "text",
|
|
35
|
+
project_models.ItemContentType.OUTPUT_AUDIO: "audio",
|
|
36
|
+
# project_models.ItemContentType.REFUSAL: "refusal",
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class LangGraphRequestConverter:
|
|
41
|
+
def __init__(self, data: CreateResponse):
|
|
42
|
+
self.data: CreateResponse = data
|
|
43
|
+
|
|
44
|
+
def convert(self) -> dict:
|
|
45
|
+
# Convert the CreateRunRequest input to a format suitable for LangGraph
|
|
46
|
+
langgraph_input = {"messages": []}
|
|
47
|
+
|
|
48
|
+
instructions = self.data.get("instructions")
|
|
49
|
+
if instructions and isinstance(instructions, str):
|
|
50
|
+
langgraph_input["messages"].append(SystemMessage(content=instructions))
|
|
51
|
+
|
|
52
|
+
input = self.data.get("input")
|
|
53
|
+
if isinstance(input, str):
|
|
54
|
+
langgraph_input["messages"].append(HumanMessage(content=input))
|
|
55
|
+
elif isinstance(input, List):
|
|
56
|
+
for inner in input:
|
|
57
|
+
message = self.convert_input(inner)
|
|
58
|
+
langgraph_input["messages"].append(message)
|
|
59
|
+
else:
|
|
60
|
+
raise ValueError(f"Unsupported input type: {type(input)}, {input}")
|
|
61
|
+
return langgraph_input
|
|
62
|
+
|
|
63
|
+
def convert_input(self, item: openai_models.ResponseInputItemParam) -> AnyMessage:
|
|
64
|
+
"""
|
|
65
|
+
Convert ResponseInputItemParam to a LangGraph message
|
|
66
|
+
|
|
67
|
+
:param item: The ResponseInputItemParam to convert from request.
|
|
68
|
+
:type item: openai_models.ResponseInputItemParam
|
|
69
|
+
|
|
70
|
+
:return: The converted LangGraph message.
|
|
71
|
+
:rtype: AnyMessage
|
|
72
|
+
"""
|
|
73
|
+
item_type = item.get("type", project_models.ItemType.MESSAGE)
|
|
74
|
+
if item_type == project_models.ItemType.MESSAGE:
|
|
75
|
+
# this is a message
|
|
76
|
+
return self.convert_message(item)
|
|
77
|
+
if item_type == project_models.ItemType.FUNCTION_CALL:
|
|
78
|
+
return self.convert_function_call(item)
|
|
79
|
+
if item_type == project_models.ItemType.FUNCTION_CALL_OUTPUT:
|
|
80
|
+
return self.convert_function_call_output(item)
|
|
81
|
+
raise ValueError(f"Unsupported OpenAIItemParam type: {item_type}, {item}")
|
|
82
|
+
|
|
83
|
+
def convert_message(self, message: dict) -> AnyMessage:
|
|
84
|
+
"""
|
|
85
|
+
Convert a message dict to a LangGraph message
|
|
86
|
+
|
|
87
|
+
:param message: The message dict to convert.
|
|
88
|
+
:type message: dict
|
|
89
|
+
|
|
90
|
+
:return: The converted LangGraph message.
|
|
91
|
+
:rtype: AnyMessage
|
|
92
|
+
"""
|
|
93
|
+
content = message.get("content")
|
|
94
|
+
role = message.get("role", project_models.ResponsesMessageRole.USER)
|
|
95
|
+
if not content:
|
|
96
|
+
raise ValueError(f"Message missing content: {message}")
|
|
97
|
+
if isinstance(content, str):
|
|
98
|
+
return role_mapping[role](content=content)
|
|
99
|
+
if isinstance(content, list):
|
|
100
|
+
return role_mapping[role](content=self.convert_OpenAIItemContentList(content))
|
|
101
|
+
raise ValueError(f"Unsupported ResponseMessagesItemParam content type: {type(content)}, {content}")
|
|
102
|
+
|
|
103
|
+
def convert_function_call(self, item: dict) -> AnyMessage:
|
|
104
|
+
try:
|
|
105
|
+
item = openai_models.ResponseFunctionToolCallParam(**item)
|
|
106
|
+
argument = item.get("arguments", None)
|
|
107
|
+
args = json.loads(argument) if argument else {}
|
|
108
|
+
except json.JSONDecodeError as e:
|
|
109
|
+
raise ValueError(f"Invalid JSON in function call arguments: {item}") from e
|
|
110
|
+
except Exception as e:
|
|
111
|
+
raise ValueError(f"Invalid function call item: {item}") from e
|
|
112
|
+
return AIMessage(tool_calls=[ToolCall(id=item.get("call_id"), name=item.get("name"), args=args)], content="")
|
|
113
|
+
|
|
114
|
+
def convert_function_call_output(self, item: dict) -> ToolMessage:
|
|
115
|
+
try:
|
|
116
|
+
item = openai_models.response_input_item_param.FunctionCallOutput(**item) # pylint: disable=no-member
|
|
117
|
+
except Exception as e:
|
|
118
|
+
raise ValueError(f"Invalid function call output item: {item}") from e
|
|
119
|
+
|
|
120
|
+
output = item.get("output", None)
|
|
121
|
+
if isinstance(output, str):
|
|
122
|
+
return ToolMessage(content=output, tool_call_id=item.get("call_id"))
|
|
123
|
+
if isinstance(output, list):
|
|
124
|
+
return ToolMessage(content=self.convert_OpenAIItemContentList(output), tool_call_id=item.get("call_id"))
|
|
125
|
+
raise ValueError(f"Unsupported function call output type: {type(output)}, {output}")
|
|
126
|
+
|
|
127
|
+
def convert_OpenAIItemContentList(self, content: List[Dict]) -> List[Dict]:
|
|
128
|
+
"""
|
|
129
|
+
Convert ItemContent to a list format
|
|
130
|
+
|
|
131
|
+
:param content: The list of ItemContent to convert.
|
|
132
|
+
:type content: List[Dict]
|
|
133
|
+
|
|
134
|
+
:return: The converted list of ItemContent.
|
|
135
|
+
:rtype: List[Dict]
|
|
136
|
+
"""
|
|
137
|
+
result = []
|
|
138
|
+
for item in content:
|
|
139
|
+
result.append(self.convert_OpenAIItemContent(item))
|
|
140
|
+
return result
|
|
141
|
+
|
|
142
|
+
def convert_OpenAIItemContent(self, content: Dict) -> Dict:
|
|
143
|
+
"""
|
|
144
|
+
Convert ItemContent to a dict format
|
|
145
|
+
|
|
146
|
+
:param content: The ItemContent to convert.
|
|
147
|
+
:type content: Dict
|
|
148
|
+
|
|
149
|
+
:return: The converted ItemContent.
|
|
150
|
+
:rtype: Dict
|
|
151
|
+
"""
|
|
152
|
+
res = content.copy()
|
|
153
|
+
content_type = content.get("type")
|
|
154
|
+
res["type"] = item_content_type_mapping.get(content_type, content_type)
|
|
155
|
+
return res
|