langgraph-api 0.0.31__tar.gz → 0.0.33__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/PKG-INFO +2 -2
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/api/__init__.py +6 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/api/assistants.py +7 -1
- langgraph_api-0.0.33/langgraph_api/api/mcp.py +467 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/api/threads.py +10 -1
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/asyncio.py +21 -2
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/cli.py +9 -7
- langgraph_api-0.0.33/langgraph_api/command.py +29 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/config.py +5 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/graph.py +24 -8
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/remote.py +72 -51
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/lifespan.py +10 -1
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/logging.py +11 -10
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/metadata.py +1 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/models/run.py +11 -1
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/stream.py +5 -30
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/validation.py +13 -1
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/worker.py +2 -2
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_storage/ops.py +75 -1
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/openapi.json +102 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/pyproject.toml +3 -3
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/LICENSE +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/README.md +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/__init__.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/api/meta.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/api/openapi.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/api/runs.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/api/store.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/api/ui.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/auth/__init__.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/auth/custom.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/auth/langsmith/__init__.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/auth/langsmith/backend.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/auth/langsmith/client.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/auth/middleware.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/auth/noop.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/auth/studio_user.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/cron_scheduler.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/errors.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/http.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/.gitignore +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/base.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/build.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/client.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/errors.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/global.d.ts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/package.json +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/schema.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/src/graph.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/src/hooks.mjs +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/src/parser/parser.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/src/parser/parser.worker.mjs +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/src/schema/types.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/src/schema/types.template.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/src/utils/importMap.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/src/utils/pythonSchemas.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/src/utils/serde.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/sse.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/api.test.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/compose-postgres.yml +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/graphs/.gitignore +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/graphs/agent.css +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/graphs/agent.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/graphs/agent.ui.tsx +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/graphs/delay.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/graphs/error.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/graphs/langgraph.json +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/graphs/nested.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/graphs/package.json +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/graphs/weather.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/graphs/yarn.lock +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/parser.test.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/tests/utils.mts +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/js/yarn.lock +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/middleware/__init__.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/middleware/http_logger.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/middleware/private_network.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/models/__init__.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/patch.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/queue_entrypoint.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/route.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/schema.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/serde.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/server.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/sse.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/state.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/utils.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_api/webhook.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_license/__init__.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_license/middleware.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_license/validation.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_storage/__init__.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_storage/checkpoint.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_storage/database.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_storage/inmem_stream.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_storage/queue.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_storage/retry.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_storage/store.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/langgraph_storage/ttl_dict.py +0 -0
- {langgraph_api-0.0.31 → langgraph_api-0.0.33}/logging.json +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: langgraph-api
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.33
|
|
4
4
|
Summary:
|
|
5
5
|
License: Elastic-2.0
|
|
6
6
|
Author: Nuno Campos
|
|
@@ -16,7 +16,7 @@ Requires-Dist: jsonschema-rs (>=0.20.0,<0.30)
|
|
|
16
16
|
Requires-Dist: langchain-core (>=0.2.38,<0.4.0)
|
|
17
17
|
Requires-Dist: langgraph (>=0.2.56,<0.4.0)
|
|
18
18
|
Requires-Dist: langgraph-checkpoint (>=2.0.21,<3.0)
|
|
19
|
-
Requires-Dist: langgraph-sdk (>=0.1.
|
|
19
|
+
Requires-Dist: langgraph-sdk (>=0.1.58,<0.2.0)
|
|
20
20
|
Requires-Dist: langsmith (>=0.1.63,<0.4.0)
|
|
21
21
|
Requires-Dist: orjson (>=3.9.7)
|
|
22
22
|
Requires-Dist: pyjwt (>=2.9.0,<3.0.0)
|
|
@@ -10,6 +10,7 @@ from starlette.responses import HTMLResponse, JSONResponse, Response
|
|
|
10
10
|
from starlette.routing import BaseRoute, Mount, Route
|
|
11
11
|
|
|
12
12
|
from langgraph_api.api.assistants import assistants_routes
|
|
13
|
+
from langgraph_api.api.mcp import mcp_routes
|
|
13
14
|
from langgraph_api.api.meta import meta_info, meta_metrics
|
|
14
15
|
from langgraph_api.api.openapi import get_openapi_spec
|
|
15
16
|
from langgraph_api.api.runs import runs_routes
|
|
@@ -66,6 +67,11 @@ if HTTP_CONFIG:
|
|
|
66
67
|
protected_routes.extend(store_routes)
|
|
67
68
|
if not HTTP_CONFIG.get("disable_ui"):
|
|
68
69
|
protected_routes.extend(ui_routes)
|
|
70
|
+
# Default for disabling MCP. Until we can verify that the protocol is working
|
|
71
|
+
# correctly. This is dependent on the release of an official MCP client
|
|
72
|
+
# implementation.
|
|
73
|
+
if not HTTP_CONFIG.get("disable_mcp", True):
|
|
74
|
+
protected_routes.extend(mcp_routes)
|
|
69
75
|
else:
|
|
70
76
|
protected_routes.extend(assistants_routes)
|
|
71
77
|
protected_routes.extend(runs_routes)
|
|
@@ -87,7 +87,13 @@ def _graph_schemas(graph: Pregel) -> dict:
|
|
|
87
87
|
f"Failed to get output schema for graph {graph.name} with error: `{str(e)}`"
|
|
88
88
|
)
|
|
89
89
|
output_schema = None
|
|
90
|
-
|
|
90
|
+
try:
|
|
91
|
+
state_schema = _state_jsonschema(graph)
|
|
92
|
+
except Exception as e:
|
|
93
|
+
logger.warning(
|
|
94
|
+
f"Failed to get state schema for graph {graph.name} with error: `{str(e)}`"
|
|
95
|
+
)
|
|
96
|
+
state_schema = None
|
|
91
97
|
try:
|
|
92
98
|
config_schema = _get_configurable_jsonschema(graph)
|
|
93
99
|
except Exception as e:
|
|
@@ -0,0 +1,467 @@
|
|
|
1
|
+
"""Implement MCP endpoint for Streamable HTTP protocol.
|
|
2
|
+
|
|
3
|
+
The current version of the RFC can be found here:
|
|
4
|
+
|
|
5
|
+
https://github.com/modelcontextprotocol/specification/blob/0f4924b07447073cbe1e29fbe64e42d379b52b04/docs/specification/draft/basic/transports.md#streamable-http
|
|
6
|
+
|
|
7
|
+
Tools specification:
|
|
8
|
+
|
|
9
|
+
https://github.com/modelcontextprotocol/specification/blob/0f4924b07447073cbe1e29fbe64e42d379b52b04/docs/specification/draft/server/tools.md
|
|
10
|
+
|
|
11
|
+
Message format:
|
|
12
|
+
|
|
13
|
+
https://github.com/modelcontextprotocol/specification/blob/0f4924b07447073cbe1e29fbe64e42d379b52b04/docs/specification/draft/basic/messages.md
|
|
14
|
+
|
|
15
|
+
Error handling with tools:
|
|
16
|
+
|
|
17
|
+
https://github.com/modelcontextprotocol/specification/blob/0f4924b07447073cbe1e29fbe64e42d379b52b04/docs/specification/draft/server/tools.md#error-handling
|
|
18
|
+
|
|
19
|
+
Streamable HTTP is a protocol that allows for the use of HTTP as transport.
|
|
20
|
+
|
|
21
|
+
The protocol supports both stateless and stateful interactions, and allows
|
|
22
|
+
the server to respond via either Application/JSON or text/event-stream.
|
|
23
|
+
|
|
24
|
+
LangGraph's implementation is currently stateless and only uses Application/JSON.
|
|
25
|
+
|
|
26
|
+
1. Adding stateful sessions: A stateful session would in theory allow agents used
|
|
27
|
+
as tools to remember past interactions. We likely do not want to map a session
|
|
28
|
+
to a thread ID as a single session may involve more than one tool call.
|
|
29
|
+
We would need to map a session to a collection of threads.
|
|
30
|
+
|
|
31
|
+
2. text/event-stream (SSE): Should be simple to add we'd want to make sure
|
|
32
|
+
we know what information we want to stream; e.g., progress notifications or
|
|
33
|
+
custom notifications.
|
|
34
|
+
|
|
35
|
+
In addition, the server could support resumability by allowing clients to specify
|
|
36
|
+
a Last-Event-ID in the request headers.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
import functools
|
|
40
|
+
import json
|
|
41
|
+
from typing import Any, NotRequired, cast
|
|
42
|
+
|
|
43
|
+
from langgraph_sdk.client import LangGraphClient, get_client
|
|
44
|
+
from starlette.responses import JSONResponse, Response
|
|
45
|
+
from structlog import getLogger
|
|
46
|
+
from typing_extensions import TypedDict
|
|
47
|
+
|
|
48
|
+
from langgraph_api.route import ApiRequest, ApiRoute
|
|
49
|
+
|
|
50
|
+
logger = getLogger(__name__)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class JsonRpcErrorObject(TypedDict):
|
|
54
|
+
code: int
|
|
55
|
+
message: str
|
|
56
|
+
data: NotRequired[Any]
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class JsonRpcRequest(TypedDict):
|
|
60
|
+
jsonrpc: str # Must be "2.0"
|
|
61
|
+
id: str | int
|
|
62
|
+
method: str
|
|
63
|
+
params: NotRequired[dict[str, Any]]
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class JsonRpcResponse(TypedDict):
|
|
67
|
+
jsonrpc: str # Must be "2.0"
|
|
68
|
+
id: str | int
|
|
69
|
+
result: NotRequired[dict[str, Any]]
|
|
70
|
+
error: NotRequired[JsonRpcErrorObject]
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class JsonRpcNotification(TypedDict):
|
|
74
|
+
jsonrpc: str # Must be "2.0"
|
|
75
|
+
method: str
|
|
76
|
+
params: NotRequired[dict[str, Any]]
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@functools.lru_cache(maxsize=1)
|
|
80
|
+
def _client() -> LangGraphClient:
|
|
81
|
+
"""Get a client for local operations."""
|
|
82
|
+
return get_client(url=None)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
# Workaround assistant name not exposed in the Assistants.search API
|
|
86
|
+
MAX_ASSISTANTS = 1000
|
|
87
|
+
DEFAULT_PAGE_SIZE = 100
|
|
88
|
+
|
|
89
|
+
# JSON-RPC error codes: https://www.jsonrpc.org/specification#error_object
|
|
90
|
+
ERROR_CODE_INVALID_PARAMS = -32602
|
|
91
|
+
ERROR_CODE_METHOD_NOT_FOUND = -32601
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
async def handle_mcp_endpoint(request: ApiRequest) -> Response:
|
|
95
|
+
"""MCP endpoint handler the implements the Streamable HTTP protocol.
|
|
96
|
+
|
|
97
|
+
The handler is expected to support the following methods:
|
|
98
|
+
|
|
99
|
+
- POST: Process a JSON-RPC request
|
|
100
|
+
- DELETE: Terminate a session
|
|
101
|
+
|
|
102
|
+
We currently do not support:
|
|
103
|
+
- /GET (initiates a streaming session)
|
|
104
|
+
This endpoint can be used to RESUME a previously interrupted session.
|
|
105
|
+
- text/event-stream (streaming) response from the server.
|
|
106
|
+
|
|
107
|
+
Support for these can be added, we just need to determine what information
|
|
108
|
+
from the agent we want to stream.
|
|
109
|
+
|
|
110
|
+
One possibility is to map "custom" stream mode to server side notifications.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
request: The incoming request object
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
The response to the request
|
|
117
|
+
"""
|
|
118
|
+
# Route request based on HTTP method
|
|
119
|
+
if request.method == "DELETE":
|
|
120
|
+
return handle_delete_request()
|
|
121
|
+
elif request.method == "GET":
|
|
122
|
+
return handle_get_request()
|
|
123
|
+
elif request.method == "POST":
|
|
124
|
+
return await handle_post_request(request)
|
|
125
|
+
else:
|
|
126
|
+
# Method not allowed
|
|
127
|
+
return Response(status_code=405)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def handle_delete_request() -> Response:
|
|
131
|
+
"""Handle HTTP DELETE requests for session termination.
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
Response with appropriate status code
|
|
135
|
+
"""
|
|
136
|
+
return Response(status_code=404)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def handle_get_request() -> Response:
|
|
140
|
+
"""Handle HTTP GET requests for streaming (not currently supported).
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
Method not allowed response
|
|
144
|
+
"""
|
|
145
|
+
# Does not support streaming at the moment
|
|
146
|
+
return Response(status_code=405)
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
async def handle_post_request(request: ApiRequest) -> Response:
|
|
150
|
+
"""Handle HTTP POST requests for JSON-RPC messaging.
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
request: The incoming request object
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
Response to the JSON-RPC message
|
|
157
|
+
"""
|
|
158
|
+
body = await request.body()
|
|
159
|
+
|
|
160
|
+
# Validate JSON
|
|
161
|
+
try:
|
|
162
|
+
message = json.loads(body)
|
|
163
|
+
except json.JSONDecodeError:
|
|
164
|
+
return create_error_response("Invalid JSON", 400)
|
|
165
|
+
|
|
166
|
+
# Validate Accept header
|
|
167
|
+
if not is_valid_accept_header(request):
|
|
168
|
+
return create_error_response(
|
|
169
|
+
"Accept header must include application/json or text/event-stream", 400
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
# Validate message format
|
|
173
|
+
if not isinstance(message, dict):
|
|
174
|
+
return create_error_response("Invalid message format.", 400)
|
|
175
|
+
|
|
176
|
+
# Determine message type and route to appropriate handler
|
|
177
|
+
id_ = message.get("id")
|
|
178
|
+
method = message.get("method")
|
|
179
|
+
|
|
180
|
+
# Check for required jsonrpc field
|
|
181
|
+
if message.get("jsonrpc") != "2.0":
|
|
182
|
+
return create_error_response(
|
|
183
|
+
"Invalid JSON-RPC message. Missing or invalid jsonrpc version.", 400
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
if id_ and method:
|
|
187
|
+
# JSON-RPC request
|
|
188
|
+
return await handle_jsonrpc_request(request, cast(JsonRpcRequest, message))
|
|
189
|
+
elif id_:
|
|
190
|
+
# JSON-RPC response
|
|
191
|
+
return handle_jsonrpc_response(cast(JsonRpcResponse, message))
|
|
192
|
+
elif method:
|
|
193
|
+
# JSON-RPC notification
|
|
194
|
+
return handle_jsonrpc_notification(cast(JsonRpcNotification, message))
|
|
195
|
+
else:
|
|
196
|
+
# Invalid message format
|
|
197
|
+
return create_error_response(
|
|
198
|
+
"Invalid message format. A message is to be either a JSON-RPC "
|
|
199
|
+
"request, response, or notification."
|
|
200
|
+
"Please see the Messages section of the Streamable HTTP RFC "
|
|
201
|
+
"for more information.",
|
|
202
|
+
400,
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def is_valid_accept_header(request: ApiRequest) -> bool:
|
|
207
|
+
"""Check if the Accept header contains supported content types.
|
|
208
|
+
|
|
209
|
+
Args:
|
|
210
|
+
request: The incoming request
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
True if header contains application/json or text/event-stream
|
|
214
|
+
"""
|
|
215
|
+
accept_header = request.headers.get("Accept", "")
|
|
216
|
+
accepts_json = "application/json" in accept_header
|
|
217
|
+
accepts_sse = "text/event-stream" in accept_header
|
|
218
|
+
return accepts_json or accepts_sse
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def create_error_response(message: str, status_code: int) -> Response:
|
|
222
|
+
"""Create a JSON error response.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
message: The error message
|
|
226
|
+
status_code: The HTTP status code
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
JSON response with error details
|
|
230
|
+
"""
|
|
231
|
+
return Response(
|
|
232
|
+
content=json.dumps({"error": message}),
|
|
233
|
+
status_code=status_code,
|
|
234
|
+
media_type="application/json",
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
async def handle_jsonrpc_request(
|
|
239
|
+
request: ApiRequest,
|
|
240
|
+
message: JsonRpcRequest,
|
|
241
|
+
) -> Response:
|
|
242
|
+
"""Handle JSON-RPC requests (messages with both id and method).
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
request: The incoming request object
|
|
246
|
+
message: The parsed JSON-RPC message
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
Response to the request
|
|
250
|
+
"""
|
|
251
|
+
method = message["method"]
|
|
252
|
+
params = message.get("params", {})
|
|
253
|
+
|
|
254
|
+
if method == "initialize":
|
|
255
|
+
result_or_error = handle_initialize_request(message)
|
|
256
|
+
elif method == "tools/list":
|
|
257
|
+
result_or_error = await handle_tools_list(request, params)
|
|
258
|
+
elif method == "tools/call":
|
|
259
|
+
result_or_error = await handle_tools_call(request, params)
|
|
260
|
+
else:
|
|
261
|
+
result_or_error = {
|
|
262
|
+
"error": {
|
|
263
|
+
"code": ERROR_CODE_METHOD_NOT_FOUND,
|
|
264
|
+
"message": f"Method not found: {method}",
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
# Process the result or error output
|
|
269
|
+
exists = {"error", "result"} - set(result_or_error.keys())
|
|
270
|
+
if len(exists) != 1:
|
|
271
|
+
raise AssertionError(
|
|
272
|
+
"Internal server error. Invalid response in MCP protocol implementation."
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
return JSONResponse(
|
|
276
|
+
{
|
|
277
|
+
"jsonrpc": "2.0",
|
|
278
|
+
"id": message["id"],
|
|
279
|
+
**result_or_error,
|
|
280
|
+
}
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def handle_initialize_request(message: JsonRpcRequest) -> dict[str, Any]:
|
|
285
|
+
"""Handle initialize requests to create a new session.
|
|
286
|
+
|
|
287
|
+
Args:
|
|
288
|
+
message: The JSON-RPC request message
|
|
289
|
+
|
|
290
|
+
Returns:
|
|
291
|
+
Response with new session details
|
|
292
|
+
"""
|
|
293
|
+
return {
|
|
294
|
+
"result": {
|
|
295
|
+
# We do not return a session ID right now.
|
|
296
|
+
"capabilities": {
|
|
297
|
+
"tools": {
|
|
298
|
+
# We do not support subscriptions currently
|
|
299
|
+
"listChanged": False,
|
|
300
|
+
},
|
|
301
|
+
},
|
|
302
|
+
},
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
def handle_jsonrpc_response(message: JsonRpcResponse) -> Response:
|
|
307
|
+
"""Handle JSON-RPC responses (messages with id but no method).
|
|
308
|
+
|
|
309
|
+
Args:
|
|
310
|
+
message: The parsed JSON-RPC response message
|
|
311
|
+
|
|
312
|
+
Returns:
|
|
313
|
+
Acknowledgement response
|
|
314
|
+
"""
|
|
315
|
+
# For any responses, we just acknowledge receipt
|
|
316
|
+
return Response(status_code=202)
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
def handle_jsonrpc_notification(message: JsonRpcNotification) -> Response:
|
|
320
|
+
"""Handle JSON-RPC notifications (messages with method but no id).
|
|
321
|
+
|
|
322
|
+
Args:
|
|
323
|
+
message: The parsed JSON-RPC message
|
|
324
|
+
|
|
325
|
+
Returns:
|
|
326
|
+
Response to the notification
|
|
327
|
+
"""
|
|
328
|
+
return Response(status_code=202)
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
async def handle_tools_list(
|
|
332
|
+
request: ApiRequest, params: dict[str, Any]
|
|
333
|
+
) -> dict[str, Any]:
|
|
334
|
+
"""Handle tools/list request to get available assistants as tools.
|
|
335
|
+
|
|
336
|
+
Args:
|
|
337
|
+
request: The incoming request object. Used for propagating any headers
|
|
338
|
+
for authentication purposes.
|
|
339
|
+
params: The parameters for the tools/list request
|
|
340
|
+
|
|
341
|
+
Returns:
|
|
342
|
+
Dictionary containing list of available tools
|
|
343
|
+
"""
|
|
344
|
+
client = _client()
|
|
345
|
+
|
|
346
|
+
try:
|
|
347
|
+
cursor = params.get("cursor", 0)
|
|
348
|
+
cursor = int(cursor)
|
|
349
|
+
except ValueError:
|
|
350
|
+
cursor = 0
|
|
351
|
+
|
|
352
|
+
# Get assistants from the API
|
|
353
|
+
# For now set a large limit to get all assistants
|
|
354
|
+
assistants = await client.assistants.search(offset=cursor, limit=DEFAULT_PAGE_SIZE)
|
|
355
|
+
|
|
356
|
+
if len(assistants) == DEFAULT_PAGE_SIZE:
|
|
357
|
+
next_cursor = cursor + DEFAULT_PAGE_SIZE
|
|
358
|
+
else:
|
|
359
|
+
next_cursor = None
|
|
360
|
+
|
|
361
|
+
# Format assistants as tools for MCP
|
|
362
|
+
tools = []
|
|
363
|
+
seen_names = set()
|
|
364
|
+
for assistant in assistants:
|
|
365
|
+
id_ = assistant.get("assistant_id")
|
|
366
|
+
name = assistant["name"]
|
|
367
|
+
|
|
368
|
+
if name in seen_names:
|
|
369
|
+
await logger.awarning(f"Duplicate assistant name found {name}", name=name)
|
|
370
|
+
else:
|
|
371
|
+
seen_names.add(name)
|
|
372
|
+
|
|
373
|
+
schemas = await client.assistants.get_schemas(id_)
|
|
374
|
+
tools.append(
|
|
375
|
+
{
|
|
376
|
+
"name": name,
|
|
377
|
+
"inputSchema": schemas.get("input_schema", {}),
|
|
378
|
+
"description": "",
|
|
379
|
+
},
|
|
380
|
+
)
|
|
381
|
+
return {"result": {"tools": tools, "nextCursor": next_cursor}}
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
async def handle_tools_call(
|
|
385
|
+
request: ApiRequest, params: dict[str, Any]
|
|
386
|
+
) -> dict[str, Any]:
|
|
387
|
+
"""Handle tools/call request to execute an assistant.
|
|
388
|
+
|
|
389
|
+
Args:
|
|
390
|
+
request: The incoming request
|
|
391
|
+
params: The parameters for the tool call
|
|
392
|
+
|
|
393
|
+
Returns:
|
|
394
|
+
The result of the tool execution
|
|
395
|
+
"""
|
|
396
|
+
client = _client()
|
|
397
|
+
|
|
398
|
+
tool_name = params.get("name")
|
|
399
|
+
|
|
400
|
+
if not tool_name:
|
|
401
|
+
return {
|
|
402
|
+
"jsonrpc": "2.0",
|
|
403
|
+
"id": 3,
|
|
404
|
+
"error": {
|
|
405
|
+
"code": ERROR_CODE_INVALID_PARAMS,
|
|
406
|
+
"message": f"Unknown tool: {tool_name}",
|
|
407
|
+
},
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
arguments = params.get("arguments", {})
|
|
411
|
+
assistants = await client.assistants.search(limit=MAX_ASSISTANTS)
|
|
412
|
+
matching_assistant = [
|
|
413
|
+
assistant for assistant in assistants if assistant["name"] == tool_name
|
|
414
|
+
]
|
|
415
|
+
|
|
416
|
+
num_assistants = len(matching_assistant)
|
|
417
|
+
|
|
418
|
+
if num_assistants == 0:
|
|
419
|
+
return {
|
|
420
|
+
"jsonrpc": "2.0",
|
|
421
|
+
"id": 3,
|
|
422
|
+
"error": {
|
|
423
|
+
"code": ERROR_CODE_INVALID_PARAMS,
|
|
424
|
+
"message": f"Unknown tool: {tool_name}",
|
|
425
|
+
},
|
|
426
|
+
}
|
|
427
|
+
elif num_assistants > 1:
|
|
428
|
+
return {
|
|
429
|
+
"jsonrpc": "2.0",
|
|
430
|
+
"id": 3,
|
|
431
|
+
"error": {
|
|
432
|
+
"code": ERROR_CODE_INVALID_PARAMS,
|
|
433
|
+
"message": "Multiple tools found with the same name.",
|
|
434
|
+
},
|
|
435
|
+
}
|
|
436
|
+
else:
|
|
437
|
+
tool_name = matching_assistant[0]["assistant_id"]
|
|
438
|
+
|
|
439
|
+
value = await client.runs.wait(
|
|
440
|
+
thread_id=None, assistant_id=tool_name, input=arguments, raise_error=False
|
|
441
|
+
)
|
|
442
|
+
|
|
443
|
+
if "__error__" in value:
|
|
444
|
+
# This is a run-time error in the tool.
|
|
445
|
+
return {
|
|
446
|
+
"result": {
|
|
447
|
+
"isError": True,
|
|
448
|
+
"content": [
|
|
449
|
+
{"type": "text", "value": value["__error__"]["error"]},
|
|
450
|
+
],
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
# All good, return the result
|
|
455
|
+
return {
|
|
456
|
+
"result": {
|
|
457
|
+
"content": [
|
|
458
|
+
{"type": "text", "value": repr(value)},
|
|
459
|
+
]
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
|
|
464
|
+
# Define routes for the MCP endpoint
|
|
465
|
+
mcp_routes = [
|
|
466
|
+
ApiRoute("/mcp", handle_mcp_endpoint, methods=["GET", "POST", "DELETE"]),
|
|
467
|
+
]
|
|
@@ -28,12 +28,21 @@ async def create_thread(
|
|
|
28
28
|
if thread_id := payload.get("thread_id"):
|
|
29
29
|
validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
|
|
30
30
|
async with connect() as conn:
|
|
31
|
+
thread_id = thread_id or str(uuid4())
|
|
31
32
|
iter = await Threads.put(
|
|
32
33
|
conn,
|
|
33
|
-
thread_id
|
|
34
|
+
thread_id,
|
|
34
35
|
metadata=payload.get("metadata"),
|
|
35
36
|
if_exists=payload.get("if_exists") or "raise",
|
|
36
37
|
)
|
|
38
|
+
|
|
39
|
+
if supersteps := payload.get("supersteps"):
|
|
40
|
+
await Threads.State.bulk(
|
|
41
|
+
conn,
|
|
42
|
+
config={"configurable": {"thread_id": thread_id}},
|
|
43
|
+
supersteps=supersteps,
|
|
44
|
+
)
|
|
45
|
+
|
|
37
46
|
return ApiResponse(await fetchone(iter, not_found_code=409))
|
|
38
47
|
|
|
39
48
|
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
import concurrent.futures
|
|
2
3
|
from collections.abc import AsyncIterator, Coroutine
|
|
3
4
|
from contextlib import AbstractAsyncContextManager
|
|
4
5
|
from functools import partial
|
|
@@ -10,6 +11,13 @@ T = TypeVar("T")
|
|
|
10
11
|
|
|
11
12
|
logger = structlog.stdlib.get_logger(__name__)
|
|
12
13
|
|
|
14
|
+
_MAIN_LOOP: asyncio.AbstractEventLoop | None = None
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def set_event_loop(loop: asyncio.AbstractEventLoop) -> None:
|
|
18
|
+
global _MAIN_LOOP
|
|
19
|
+
_MAIN_LOOP = loop
|
|
20
|
+
|
|
13
21
|
|
|
14
22
|
async def sleep_if_not_done(delay: float, done: asyncio.Event) -> None:
|
|
15
23
|
try:
|
|
@@ -76,9 +84,10 @@ PENDING_TASKS = set()
|
|
|
76
84
|
|
|
77
85
|
|
|
78
86
|
def _create_task_done_callback(
|
|
79
|
-
ignore_exceptions: tuple[Exception, ...],
|
|
87
|
+
ignore_exceptions: tuple[Exception, ...],
|
|
88
|
+
task: asyncio.Task | asyncio.Future,
|
|
80
89
|
) -> None:
|
|
81
|
-
PENDING_TASKS.
|
|
90
|
+
PENDING_TASKS.discard(task)
|
|
82
91
|
try:
|
|
83
92
|
if exc := task.exception():
|
|
84
93
|
if not isinstance(exc, ignore_exceptions):
|
|
@@ -97,6 +106,16 @@ def create_task(
|
|
|
97
106
|
return task
|
|
98
107
|
|
|
99
108
|
|
|
109
|
+
def run_coroutine_threadsafe(
|
|
110
|
+
coro: Coroutine[Any, Any, T], ignore_exceptions: tuple[type[Exception], ...] = ()
|
|
111
|
+
) -> concurrent.futures.Future[T | None]:
|
|
112
|
+
if _MAIN_LOOP is None:
|
|
113
|
+
raise RuntimeError("No event loop set")
|
|
114
|
+
future = asyncio.run_coroutine_threadsafe(coro, _MAIN_LOOP)
|
|
115
|
+
future.add_done_callback(partial(_create_task_done_callback, ignore_exceptions))
|
|
116
|
+
return future
|
|
117
|
+
|
|
118
|
+
|
|
100
119
|
class SimpleTaskGroup(AbstractAsyncContextManager["SimpleTaskGroup"]):
|
|
101
120
|
"""An async task group that can be configured to wait and/or cancel tasks on exit.
|
|
102
121
|
|
|
@@ -129,6 +129,7 @@ def run_server(
|
|
|
129
129
|
store: typing.Optional["StoreConfig"] = None,
|
|
130
130
|
auth: AuthConfig | None = None,
|
|
131
131
|
http: typing.Optional["HttpConfig"] = None,
|
|
132
|
+
studio_url: str | None = None,
|
|
132
133
|
**kwargs: typing.Any,
|
|
133
134
|
):
|
|
134
135
|
"""Run the LangGraph API server."""
|
|
@@ -188,15 +189,16 @@ def run_server(
|
|
|
188
189
|
LANGSMITH_LANGGRAPH_API_VARIANT="local_dev",
|
|
189
190
|
LANGGRAPH_AUTH=json.dumps(auth) if auth else None,
|
|
190
191
|
LANGGRAPH_HTTP=json.dumps(http) if http else None,
|
|
192
|
+
LANGGRAPH_API_URL=local_url,
|
|
191
193
|
# See https://developer.chrome.com/blog/private-network-access-update-2024-03
|
|
192
194
|
ALLOW_PRIVATE_NETWORK="true",
|
|
193
195
|
**(env_vars or {}),
|
|
194
196
|
):
|
|
195
|
-
studio_origin = _get_ls_origin() or "https://smith.langchain.com"
|
|
196
|
-
|
|
197
|
+
studio_origin = studio_url or _get_ls_origin() or "https://smith.langchain.com"
|
|
198
|
+
full_studio_url = f"{studio_origin}/studio/?baseUrl={local_url}"
|
|
197
199
|
|
|
198
200
|
def _open_browser():
|
|
199
|
-
nonlocal studio_origin,
|
|
201
|
+
nonlocal studio_origin, full_studio_url
|
|
200
202
|
import time
|
|
201
203
|
import urllib.request
|
|
202
204
|
import webbrowser
|
|
@@ -218,7 +220,7 @@ def run_server(
|
|
|
218
220
|
try:
|
|
219
221
|
org_id = org_id_future.result(timeout=3.0)
|
|
220
222
|
if org_id:
|
|
221
|
-
|
|
223
|
+
full_studio_url = f"{studio_origin}/studio/?baseUrl={local_url}&organizationId={org_id}"
|
|
222
224
|
except TimeoutError as e:
|
|
223
225
|
thread_logger.debug(
|
|
224
226
|
f"Failed to get organization ID: {str(e)}"
|
|
@@ -230,8 +232,8 @@ def run_server(
|
|
|
230
232
|
thread_logger.info(
|
|
231
233
|
"🎨 Opening Studio in your browser..."
|
|
232
234
|
)
|
|
233
|
-
thread_logger.info("URL: " +
|
|
234
|
-
webbrowser.open(
|
|
235
|
+
thread_logger.info("URL: " + full_studio_url)
|
|
236
|
+
webbrowser.open(full_studio_url)
|
|
235
237
|
return
|
|
236
238
|
except urllib.error.URLError:
|
|
237
239
|
pass
|
|
@@ -246,7 +248,7 @@ def run_server(
|
|
|
246
248
|
╩═╝┴ ┴┘└┘└─┘╚═╝┴└─┴ ┴┴ ┴ ┴
|
|
247
249
|
|
|
248
250
|
- 🚀 API: \033[36m{local_url}\033[0m
|
|
249
|
-
- 🎨 Studio UI: \033[36m{
|
|
251
|
+
- 🎨 Studio UI: \033[36m{full_studio_url}\033[0m
|
|
250
252
|
- 📚 API Docs: \033[36m{local_url}/docs\033[0m
|
|
251
253
|
|
|
252
254
|
This in-memory server is designed for development and testing.
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from langgraph.types import Command, Send
|
|
2
|
+
|
|
3
|
+
from langgraph_api.schema import RunCommand
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def map_cmd(cmd: RunCommand) -> Command:
|
|
7
|
+
goto = cmd.get("goto")
|
|
8
|
+
if goto is not None and not isinstance(goto, list):
|
|
9
|
+
goto = [cmd.get("goto")]
|
|
10
|
+
|
|
11
|
+
update = cmd.get("update")
|
|
12
|
+
if isinstance(update, tuple | list) and all(
|
|
13
|
+
isinstance(t, tuple | list) and len(t) == 2 and isinstance(t[0], str)
|
|
14
|
+
for t in update
|
|
15
|
+
):
|
|
16
|
+
update = [tuple(t) for t in update]
|
|
17
|
+
|
|
18
|
+
return Command(
|
|
19
|
+
update=update,
|
|
20
|
+
goto=(
|
|
21
|
+
[
|
|
22
|
+
it if isinstance(it, str) else Send(it["node"], it["input"])
|
|
23
|
+
for it in goto
|
|
24
|
+
]
|
|
25
|
+
if goto
|
|
26
|
+
else None
|
|
27
|
+
),
|
|
28
|
+
resume=cmd.get("resume"),
|
|
29
|
+
)
|
|
@@ -32,6 +32,11 @@ class HttpConfig(TypedDict, total=False):
|
|
|
32
32
|
disable_meta: bool
|
|
33
33
|
"""Disable /ok, /info, /metrics, and /docs routes"""
|
|
34
34
|
cors: CorsConfig | None
|
|
35
|
+
"""CORS configuration"""
|
|
36
|
+
disable_ui: bool
|
|
37
|
+
"""Disable /ui routes"""
|
|
38
|
+
disable_mcp: bool
|
|
39
|
+
"""Disable /mcp routes"""
|
|
35
40
|
|
|
36
41
|
|
|
37
42
|
class IndexConfig(TypedDict, total=False):
|