langgraph-api 0.2.72__py3-none-any.whl → 0.2.75__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langgraph_api/__init__.py +1 -1
- langgraph_api/api/assistants.py +2 -0
- langgraph_api/asyncio.py +5 -2
- langgraph_api/cron_scheduler.py +1 -1
- langgraph_api/graph.py +4 -3
- langgraph_api/logging.py +1 -6
- langgraph_api/state.py +7 -1
- langgraph_api/store.py +1 -1
- langgraph_api/stream.py +3 -3
- langgraph_api/utils/__init__.py +129 -0
- langgraph_api/utils/config.py +140 -0
- langgraph_api/utils/future.py +220 -0
- langgraph_api/utils.py +0 -129
- {langgraph_api-0.2.72.dist-info → langgraph_api-0.2.75.dist-info}/METADATA +1 -1
- {langgraph_api-0.2.72.dist-info → langgraph_api-0.2.75.dist-info}/RECORD +18 -15
- {langgraph_api-0.2.72.dist-info → langgraph_api-0.2.75.dist-info}/WHEEL +0 -0
- {langgraph_api-0.2.72.dist-info → langgraph_api-0.2.75.dist-info}/entry_points.txt +0 -0
- {langgraph_api-0.2.72.dist-info → langgraph_api-0.2.75.dist-info}/licenses/LICENSE +0 -0
langgraph_api/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.2.
|
|
1
|
+
__version__ = "0.2.75"
|
langgraph_api/api/assistants.py
CHANGED
langgraph_api/asyncio.py
CHANGED
|
@@ -6,7 +6,6 @@ from functools import partial
|
|
|
6
6
|
from typing import Any, Generic, TypeVar
|
|
7
7
|
|
|
8
8
|
import structlog
|
|
9
|
-
from langgraph.utils.future import chain_future
|
|
10
9
|
|
|
11
10
|
T = TypeVar("T")
|
|
12
11
|
|
|
@@ -130,11 +129,15 @@ def run_coroutine_threadsafe(
|
|
|
130
129
|
|
|
131
130
|
def call_soon_in_main_loop(coro: Coroutine[Any, Any, T]) -> asyncio.Future[T]:
|
|
132
131
|
"""Run a coroutine in the main event loop."""
|
|
132
|
+
from langgraph_api.utils import future as lg_future
|
|
133
|
+
|
|
133
134
|
if _MAIN_LOOP is None:
|
|
134
135
|
raise RuntimeError("No event loop set")
|
|
135
136
|
main_loop_fut = asyncio.ensure_future(coro, loop=_MAIN_LOOP)
|
|
136
137
|
this_loop_fut = asyncio.get_running_loop().create_future()
|
|
137
|
-
_MAIN_LOOP.call_soon_threadsafe(
|
|
138
|
+
_MAIN_LOOP.call_soon_threadsafe(
|
|
139
|
+
lg_future.chain_future, main_loop_fut, this_loop_fut
|
|
140
|
+
)
|
|
138
141
|
return this_loop_fut
|
|
139
142
|
|
|
140
143
|
|
langgraph_api/cron_scheduler.py
CHANGED
|
@@ -2,10 +2,10 @@ import asyncio
|
|
|
2
2
|
from random import random
|
|
3
3
|
|
|
4
4
|
import structlog
|
|
5
|
-
from langchain_core.runnables.config import run_in_executor
|
|
6
5
|
|
|
7
6
|
from langgraph_api.models.run import create_valid_run
|
|
8
7
|
from langgraph_api.utils import next_cron_date
|
|
8
|
+
from langgraph_api.utils.config import run_in_executor
|
|
9
9
|
from langgraph_api.worker import set_auth_ctx_for_run
|
|
10
10
|
from langgraph_runtime.database import connect
|
|
11
11
|
from langgraph_runtime.ops import Crons
|
langgraph_api/graph.py
CHANGED
|
@@ -14,19 +14,18 @@ from uuid import UUID, uuid5
|
|
|
14
14
|
|
|
15
15
|
import orjson
|
|
16
16
|
import structlog
|
|
17
|
-
from langchain_core.runnables.config import run_in_executor, var_child_runnable_config
|
|
18
17
|
from langgraph.checkpoint.base import BaseCheckpointSaver
|
|
19
18
|
from langgraph.constants import CONFIG_KEY_CHECKPOINTER, CONFIG_KEY_STORE
|
|
20
19
|
from langgraph.graph import StateGraph
|
|
21
20
|
from langgraph.pregel import Pregel
|
|
22
21
|
from langgraph.store.base import BaseStore
|
|
23
|
-
from langgraph.utils.config import ensure_config
|
|
24
22
|
from starlette.exceptions import HTTPException
|
|
25
23
|
|
|
26
24
|
from langgraph_api import asyncio as lg_asyncio
|
|
27
25
|
from langgraph_api import config
|
|
28
26
|
from langgraph_api.js.base import BaseRemotePregel, is_js_path
|
|
29
27
|
from langgraph_api.schema import Config
|
|
28
|
+
from langgraph_api.utils.config import run_in_executor, var_child_runnable_config
|
|
30
29
|
|
|
31
30
|
if TYPE_CHECKING:
|
|
32
31
|
from langchain_core.embeddings import Embeddings
|
|
@@ -123,10 +122,12 @@ async def get_graph(
|
|
|
123
122
|
store: BaseStore | None = None,
|
|
124
123
|
) -> AsyncIterator[Pregel]:
|
|
125
124
|
"""Return the runnable."""
|
|
125
|
+
from langgraph_api.utils import config as lg_config
|
|
126
|
+
|
|
126
127
|
assert_graph_exists(graph_id)
|
|
127
128
|
value = GRAPHS[graph_id]
|
|
128
129
|
if graph_id in FACTORY_ACCEPTS_CONFIG:
|
|
129
|
-
config = ensure_config(config)
|
|
130
|
+
config = lg_config.ensure_config(config)
|
|
130
131
|
if store is not None and not config["configurable"].get(CONFIG_KEY_STORE):
|
|
131
132
|
config["configurable"][CONFIG_KEY_STORE] = store
|
|
132
133
|
if checkpointer is not None and not config["configurable"].get(
|
langgraph_api/logging.py
CHANGED
|
@@ -15,7 +15,6 @@ log_env = Config()
|
|
|
15
15
|
LOG_JSON = log_env("LOG_JSON", cast=bool, default=False)
|
|
16
16
|
LOG_COLOR = log_env("LOG_COLOR", cast=bool, default=True)
|
|
17
17
|
LOG_LEVEL = log_env("LOG_LEVEL", cast=str, default="INFO")
|
|
18
|
-
LOG_DICT_TRACEBACKS = log_env("LOG_DICT_TRACEBACKS", cast=bool, default=True)
|
|
19
18
|
|
|
20
19
|
logging.getLogger().setLevel(LOG_LEVEL.upper())
|
|
21
20
|
logging.getLogger("psycopg").setLevel(logging.WARNING)
|
|
@@ -104,11 +103,7 @@ shared_processors = [
|
|
|
104
103
|
AddApiVersion(),
|
|
105
104
|
structlog.processors.TimeStamper(fmt="iso", utc=True),
|
|
106
105
|
structlog.processors.StackInfoRenderer(),
|
|
107
|
-
|
|
108
|
-
structlog.processors.dict_tracebacks
|
|
109
|
-
if LOG_JSON and LOG_DICT_TRACEBACKS
|
|
110
|
-
else structlog.processors.format_exc_info
|
|
111
|
-
),
|
|
106
|
+
structlog.processors.format_exc_info,
|
|
112
107
|
structlog.processors.UnicodeDecoder(),
|
|
113
108
|
AddLoggingContext(),
|
|
114
109
|
]
|
langgraph_api/state.py
CHANGED
|
@@ -1,8 +1,14 @@
|
|
|
1
|
-
from
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
2
5
|
from langgraph.types import StateSnapshot
|
|
3
6
|
|
|
4
7
|
from langgraph_api.schema import Checkpoint, ThreadState
|
|
5
8
|
|
|
9
|
+
if typing.TYPE_CHECKING:
|
|
10
|
+
from langchain_core.runnables.config import RunnableConfig
|
|
11
|
+
|
|
6
12
|
|
|
7
13
|
def runnable_config_to_checkpoint(
|
|
8
14
|
config: RunnableConfig | None,
|
langgraph_api/store.py
CHANGED
|
@@ -8,12 +8,12 @@ from random import choice
|
|
|
8
8
|
from typing import Any
|
|
9
9
|
|
|
10
10
|
import structlog
|
|
11
|
-
from langchain_core.runnables.config import run_in_executor
|
|
12
11
|
from langgraph.graph import StateGraph
|
|
13
12
|
from langgraph.pregel import Pregel
|
|
14
13
|
from langgraph.store.base import BaseStore
|
|
15
14
|
|
|
16
15
|
from langgraph_api import config
|
|
16
|
+
from langgraph_api.utils.config import run_in_executor
|
|
17
17
|
|
|
18
18
|
logger = structlog.stdlib.get_logger(__name__)
|
|
19
19
|
|
langgraph_api/stream.py
CHANGED
|
@@ -7,12 +7,12 @@ import langgraph.version
|
|
|
7
7
|
import langsmith
|
|
8
8
|
import structlog
|
|
9
9
|
from langchain_core.messages import (
|
|
10
|
+
# TODO: Remove explicit dependency
|
|
10
11
|
BaseMessage,
|
|
11
12
|
BaseMessageChunk,
|
|
12
13
|
convert_to_messages,
|
|
13
14
|
message_chunk_to_message,
|
|
14
15
|
)
|
|
15
|
-
from langchain_core.runnables.config import run_in_executor
|
|
16
16
|
from langgraph.errors import (
|
|
17
17
|
EmptyChannelError,
|
|
18
18
|
EmptyInputError,
|
|
@@ -33,6 +33,7 @@ from langgraph_api.js.base import BaseRemotePregel
|
|
|
33
33
|
from langgraph_api.metadata import HOST, PLAN, USER_API_URL, incr_nodes
|
|
34
34
|
from langgraph_api.schema import Run, StreamMode
|
|
35
35
|
from langgraph_api.serde import json_dumpb
|
|
36
|
+
from langgraph_api.utils.config import run_in_executor
|
|
36
37
|
from langgraph_runtime.checkpoint import Checkpointer
|
|
37
38
|
from langgraph_runtime.ops import Runs
|
|
38
39
|
|
|
@@ -320,7 +321,6 @@ async def consume(stream: AnyStream, run_id: str, resumable: bool = False) -> No
|
|
|
320
321
|
resumable=resumable,
|
|
321
322
|
)
|
|
322
323
|
except Exception as e:
|
|
323
|
-
g = e
|
|
324
324
|
if isinstance(e, ExceptionGroup):
|
|
325
325
|
e = e.exceptions[0]
|
|
326
326
|
await Runs.Stream.publish(
|
|
@@ -329,7 +329,7 @@ async def consume(stream: AnyStream, run_id: str, resumable: bool = False) -> No
|
|
|
329
329
|
await run_in_executor(None, json_dumpb, e),
|
|
330
330
|
resumable=resumable,
|
|
331
331
|
)
|
|
332
|
-
raise e
|
|
332
|
+
raise e
|
|
333
333
|
|
|
334
334
|
|
|
335
335
|
def get_feedback_urls(run_id: str, feedback_keys: list[str]) -> dict[str, str]:
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
import contextvars
|
|
2
|
+
import uuid
|
|
3
|
+
from collections.abc import AsyncIterator
|
|
4
|
+
from contextlib import asynccontextmanager
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import Any, Protocol, TypeAlias, TypeVar
|
|
7
|
+
|
|
8
|
+
import structlog
|
|
9
|
+
from langgraph_sdk import Auth
|
|
10
|
+
from starlette.authentication import AuthCredentials, BaseUser
|
|
11
|
+
from starlette.exceptions import HTTPException
|
|
12
|
+
from starlette.schemas import BaseSchemaGenerator
|
|
13
|
+
|
|
14
|
+
from langgraph_api.auth.custom import SimpleUser
|
|
15
|
+
|
|
16
|
+
logger = structlog.stdlib.get_logger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
T = TypeVar("T")
|
|
20
|
+
Row: TypeAlias = dict[str, Any]
|
|
21
|
+
AuthContext = contextvars.ContextVar[Auth.types.BaseAuthContext | None](
|
|
22
|
+
"AuthContext", default=None
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@asynccontextmanager
|
|
27
|
+
async def with_user(
|
|
28
|
+
user: BaseUser | None = None, auth: AuthCredentials | list[str] | None = None
|
|
29
|
+
):
|
|
30
|
+
current = get_auth_ctx()
|
|
31
|
+
set_auth_ctx(user, auth)
|
|
32
|
+
yield
|
|
33
|
+
if current is None:
|
|
34
|
+
return
|
|
35
|
+
set_auth_ctx(current.user, AuthCredentials(scopes=current.permissions))
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def set_auth_ctx(
|
|
39
|
+
user: BaseUser | None, auth: AuthCredentials | list[str] | None
|
|
40
|
+
) -> None:
|
|
41
|
+
if user is None and auth is None:
|
|
42
|
+
AuthContext.set(None)
|
|
43
|
+
else:
|
|
44
|
+
AuthContext.set(
|
|
45
|
+
Auth.types.BaseAuthContext(
|
|
46
|
+
permissions=(
|
|
47
|
+
auth.scopes if isinstance(auth, AuthCredentials) else (auth or [])
|
|
48
|
+
),
|
|
49
|
+
user=user or SimpleUser(""),
|
|
50
|
+
)
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def get_auth_ctx() -> Auth.types.BaseAuthContext | None:
|
|
55
|
+
return AuthContext.get()
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class AsyncCursorProto(Protocol):
|
|
59
|
+
async def fetchone(self) -> Row: ...
|
|
60
|
+
|
|
61
|
+
async def fetchall(self) -> list[Row]: ...
|
|
62
|
+
|
|
63
|
+
async def __aiter__(self) -> AsyncIterator[Row]:
|
|
64
|
+
yield ...
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class AsyncPipelineProto(Protocol):
|
|
68
|
+
async def sync(self) -> None: ...
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class AsyncConnectionProto(Protocol):
|
|
72
|
+
@asynccontextmanager
|
|
73
|
+
async def pipeline(self) -> AsyncIterator[AsyncPipelineProto]:
|
|
74
|
+
yield ...
|
|
75
|
+
|
|
76
|
+
async def execute(self, query: str, *args, **kwargs) -> AsyncCursorProto: ...
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
async def fetchone(
|
|
80
|
+
it: AsyncIterator[T],
|
|
81
|
+
*,
|
|
82
|
+
not_found_code: int = 404,
|
|
83
|
+
not_found_detail: str | None = None,
|
|
84
|
+
) -> T:
|
|
85
|
+
"""Fetch the first row from an async iterator."""
|
|
86
|
+
try:
|
|
87
|
+
return await anext(it)
|
|
88
|
+
except StopAsyncIteration:
|
|
89
|
+
raise HTTPException(
|
|
90
|
+
status_code=not_found_code, detail=not_found_detail
|
|
91
|
+
) from None
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def validate_uuid(uuid_str: str, invalid_uuid_detail: str | None) -> uuid.UUID:
|
|
95
|
+
try:
|
|
96
|
+
return uuid.UUID(uuid_str)
|
|
97
|
+
except ValueError:
|
|
98
|
+
raise HTTPException(status_code=422, detail=invalid_uuid_detail) from None
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def next_cron_date(schedule: str, base_time: datetime) -> datetime:
|
|
102
|
+
import croniter
|
|
103
|
+
|
|
104
|
+
cron_iter = croniter.croniter(schedule, base_time)
|
|
105
|
+
return cron_iter.get_next(datetime)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class SchemaGenerator(BaseSchemaGenerator):
|
|
109
|
+
def __init__(self, base_schema: dict[str, Any]) -> None:
|
|
110
|
+
self.base_schema = base_schema
|
|
111
|
+
|
|
112
|
+
def get_schema(self, routes: list) -> dict[str, Any]:
|
|
113
|
+
schema = dict(self.base_schema)
|
|
114
|
+
schema.setdefault("paths", {})
|
|
115
|
+
endpoints_info = self.get_endpoints(routes)
|
|
116
|
+
|
|
117
|
+
for endpoint in endpoints_info:
|
|
118
|
+
try:
|
|
119
|
+
parsed = self.parse_docstring(endpoint.func)
|
|
120
|
+
except AssertionError:
|
|
121
|
+
logger.warning("Could not parse docstrings for route %s", endpoint.path)
|
|
122
|
+
parsed = {}
|
|
123
|
+
|
|
124
|
+
if endpoint.path not in schema["paths"]:
|
|
125
|
+
schema["paths"][endpoint.path] = {}
|
|
126
|
+
|
|
127
|
+
schema["paths"][endpoint.path][endpoint.http_method] = parsed
|
|
128
|
+
|
|
129
|
+
return schema
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import functools
|
|
5
|
+
import typing
|
|
6
|
+
from collections import ChainMap
|
|
7
|
+
from concurrent.futures import Executor
|
|
8
|
+
from contextvars import copy_context
|
|
9
|
+
from os import getenv
|
|
10
|
+
from typing import Any, ParamSpec, TypeVar, cast
|
|
11
|
+
|
|
12
|
+
from langgraph.constants import CONF
|
|
13
|
+
|
|
14
|
+
if typing.TYPE_CHECKING:
|
|
15
|
+
from langchain_core.runnables import RunnableConfig
|
|
16
|
+
|
|
17
|
+
try:
|
|
18
|
+
from langchain_core.runnables.config import (
|
|
19
|
+
var_child_runnable_config,
|
|
20
|
+
)
|
|
21
|
+
except ImportError:
|
|
22
|
+
var_child_runnable_config = None
|
|
23
|
+
|
|
24
|
+
CONFIG_KEYS = [
|
|
25
|
+
"tags",
|
|
26
|
+
"metadata",
|
|
27
|
+
"callbacks",
|
|
28
|
+
"run_name",
|
|
29
|
+
"max_concurrency",
|
|
30
|
+
"recursion_limit",
|
|
31
|
+
"configurable",
|
|
32
|
+
"run_id",
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
COPIABLE_KEYS = [
|
|
36
|
+
"tags",
|
|
37
|
+
"metadata",
|
|
38
|
+
"callbacks",
|
|
39
|
+
"configurable",
|
|
40
|
+
]
|
|
41
|
+
|
|
42
|
+
DEFAULT_RECURSION_LIMIT = int(getenv("LANGGRAPH_DEFAULT_RECURSION_LIMIT", "25"))
|
|
43
|
+
|
|
44
|
+
T = TypeVar("T")
|
|
45
|
+
P = ParamSpec("P")
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _is_not_empty(value: Any) -> bool:
|
|
49
|
+
if isinstance(value, list | tuple | dict):
|
|
50
|
+
return len(value) > 0
|
|
51
|
+
else:
|
|
52
|
+
return value is not None
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def ensure_config(*configs: RunnableConfig | None) -> RunnableConfig:
|
|
56
|
+
"""Return a config with all keys, merging any provided configs.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
*configs: Configs to merge before ensuring defaults.
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
RunnableConfig: The merged and ensured config.
|
|
63
|
+
"""
|
|
64
|
+
empty = dict(
|
|
65
|
+
tags=[],
|
|
66
|
+
metadata=ChainMap(),
|
|
67
|
+
callbacks=None,
|
|
68
|
+
recursion_limit=DEFAULT_RECURSION_LIMIT,
|
|
69
|
+
configurable={},
|
|
70
|
+
)
|
|
71
|
+
if var_child_runnable_config is not None and (
|
|
72
|
+
var_config := var_child_runnable_config.get()
|
|
73
|
+
):
|
|
74
|
+
empty.update(
|
|
75
|
+
{
|
|
76
|
+
k: v.copy() if k in COPIABLE_KEYS else v # type: ignore[attr-defined]
|
|
77
|
+
for k, v in var_config.items()
|
|
78
|
+
if _is_not_empty(v)
|
|
79
|
+
},
|
|
80
|
+
)
|
|
81
|
+
for config in configs:
|
|
82
|
+
if config is None:
|
|
83
|
+
continue
|
|
84
|
+
for k, v in config.items():
|
|
85
|
+
if _is_not_empty(v) and k in CONFIG_KEYS:
|
|
86
|
+
if k == CONF:
|
|
87
|
+
empty[k] = cast(dict, v).copy()
|
|
88
|
+
else:
|
|
89
|
+
empty[k] = v # type: ignore[literal-required]
|
|
90
|
+
for k, v in config.items():
|
|
91
|
+
if _is_not_empty(v) and k not in CONFIG_KEYS:
|
|
92
|
+
empty[CONF][k] = v
|
|
93
|
+
for key, value in empty[CONF].items():
|
|
94
|
+
if (
|
|
95
|
+
not key.startswith("__")
|
|
96
|
+
and isinstance(value, str | int | float | bool)
|
|
97
|
+
and key not in empty["metadata"]
|
|
98
|
+
):
|
|
99
|
+
empty["metadata"][key] = value
|
|
100
|
+
return empty
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
async def run_in_executor(
|
|
104
|
+
executor_or_config: Executor | RunnableConfig | None,
|
|
105
|
+
func: typing.Callable[P, T],
|
|
106
|
+
*args: P.args,
|
|
107
|
+
**kwargs: P.kwargs,
|
|
108
|
+
) -> T:
|
|
109
|
+
"""Run a function in an executor.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
executor_or_config: The executor or config to run in.
|
|
113
|
+
func (Callable[P, Output]): The function.
|
|
114
|
+
*args (Any): The positional arguments to the function.
|
|
115
|
+
**kwargs (Any): The keyword arguments to the function.
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
Output: The output of the function.
|
|
119
|
+
|
|
120
|
+
Raises:
|
|
121
|
+
RuntimeError: If the function raises a StopIteration.
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
def wrapper() -> T:
|
|
125
|
+
try:
|
|
126
|
+
return func(*args, **kwargs)
|
|
127
|
+
except StopIteration as exc:
|
|
128
|
+
# StopIteration can't be set on an asyncio.Future
|
|
129
|
+
# it raises a TypeError and leaves the Future pending forever
|
|
130
|
+
# so we need to convert it to a RuntimeError
|
|
131
|
+
raise RuntimeError from exc
|
|
132
|
+
|
|
133
|
+
if executor_or_config is None or isinstance(executor_or_config, dict):
|
|
134
|
+
# Use default executor with context copied from current context
|
|
135
|
+
return await asyncio.get_running_loop().run_in_executor(
|
|
136
|
+
None,
|
|
137
|
+
functools.partial(copy_context().run, wrapper),
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
return await asyncio.get_running_loop().run_in_executor(executor_or_config, wrapper)
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import concurrent.futures
|
|
5
|
+
import contextvars
|
|
6
|
+
import inspect
|
|
7
|
+
import sys
|
|
8
|
+
import types
|
|
9
|
+
from collections.abc import Awaitable, Coroutine, Generator
|
|
10
|
+
from typing import TypeVar, cast
|
|
11
|
+
|
|
12
|
+
T = TypeVar("T")
|
|
13
|
+
AnyFuture = asyncio.Future | concurrent.futures.Future
|
|
14
|
+
|
|
15
|
+
CONTEXT_NOT_SUPPORTED = sys.version_info < (3, 11)
|
|
16
|
+
EAGER_NOT_SUPPORTED = sys.version_info < (3, 12)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _get_loop(fut: asyncio.Future) -> asyncio.AbstractEventLoop:
|
|
20
|
+
# Tries to call Future.get_loop() if it's available.
|
|
21
|
+
# Otherwise fallbacks to using the old '_loop' property.
|
|
22
|
+
try:
|
|
23
|
+
get_loop = fut.get_loop
|
|
24
|
+
except AttributeError:
|
|
25
|
+
pass
|
|
26
|
+
else:
|
|
27
|
+
return get_loop()
|
|
28
|
+
return fut._loop
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _convert_future_exc(exc: BaseException) -> BaseException:
|
|
32
|
+
exc_class = type(exc)
|
|
33
|
+
if exc_class is concurrent.futures.CancelledError:
|
|
34
|
+
return asyncio.CancelledError(*exc.args)
|
|
35
|
+
elif exc_class is concurrent.futures.TimeoutError:
|
|
36
|
+
return TimeoutError(*exc.args)
|
|
37
|
+
elif exc_class is concurrent.futures.InvalidStateError:
|
|
38
|
+
return asyncio.InvalidStateError(*exc.args)
|
|
39
|
+
else:
|
|
40
|
+
return exc
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _set_concurrent_future_state(
|
|
44
|
+
concurrent: concurrent.futures.Future,
|
|
45
|
+
source: AnyFuture,
|
|
46
|
+
) -> None:
|
|
47
|
+
"""Copy state from a future to a concurrent.futures.Future."""
|
|
48
|
+
assert source.done()
|
|
49
|
+
if source.cancelled():
|
|
50
|
+
concurrent.cancel()
|
|
51
|
+
if not concurrent.set_running_or_notify_cancel():
|
|
52
|
+
return
|
|
53
|
+
exception = source.exception()
|
|
54
|
+
if exception is not None:
|
|
55
|
+
concurrent.set_exception(_convert_future_exc(exception))
|
|
56
|
+
else:
|
|
57
|
+
result = source.result()
|
|
58
|
+
concurrent.set_result(result)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _copy_future_state(source: AnyFuture, dest: asyncio.Future) -> None:
|
|
62
|
+
"""Internal helper to copy state from another Future.
|
|
63
|
+
|
|
64
|
+
The other Future may be a concurrent.futures.Future.
|
|
65
|
+
"""
|
|
66
|
+
if dest.done():
|
|
67
|
+
return
|
|
68
|
+
assert source.done()
|
|
69
|
+
if dest.cancelled():
|
|
70
|
+
return
|
|
71
|
+
if source.cancelled():
|
|
72
|
+
dest.cancel()
|
|
73
|
+
else:
|
|
74
|
+
exception = source.exception()
|
|
75
|
+
if exception is not None:
|
|
76
|
+
dest.set_exception(_convert_future_exc(exception))
|
|
77
|
+
else:
|
|
78
|
+
result = source.result()
|
|
79
|
+
dest.set_result(result)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _chain_future(source: AnyFuture, destination: AnyFuture) -> None:
|
|
83
|
+
"""Chain two futures so that when one completes, so does the other.
|
|
84
|
+
|
|
85
|
+
The result (or exception) of source will be copied to destination.
|
|
86
|
+
If destination is cancelled, source gets cancelled too.
|
|
87
|
+
Compatible with both asyncio.Future and concurrent.futures.Future.
|
|
88
|
+
"""
|
|
89
|
+
if not asyncio.isfuture(source) and not isinstance(
|
|
90
|
+
source, concurrent.futures.Future
|
|
91
|
+
):
|
|
92
|
+
raise TypeError("A future is required for source argument")
|
|
93
|
+
if not asyncio.isfuture(destination) and not isinstance(
|
|
94
|
+
destination, concurrent.futures.Future
|
|
95
|
+
):
|
|
96
|
+
raise TypeError("A future is required for destination argument")
|
|
97
|
+
source_loop = _get_loop(source) if asyncio.isfuture(source) else None
|
|
98
|
+
dest_loop = _get_loop(destination) if asyncio.isfuture(destination) else None
|
|
99
|
+
|
|
100
|
+
def _set_state(future: AnyFuture, other: AnyFuture) -> None:
|
|
101
|
+
if asyncio.isfuture(future):
|
|
102
|
+
_copy_future_state(other, future)
|
|
103
|
+
else:
|
|
104
|
+
_set_concurrent_future_state(future, other)
|
|
105
|
+
|
|
106
|
+
def _call_check_cancel(destination: AnyFuture) -> None:
|
|
107
|
+
if destination.cancelled():
|
|
108
|
+
if source_loop is None or source_loop is dest_loop:
|
|
109
|
+
source.cancel()
|
|
110
|
+
else:
|
|
111
|
+
source_loop.call_soon_threadsafe(source.cancel)
|
|
112
|
+
|
|
113
|
+
def _call_set_state(source: AnyFuture) -> None:
|
|
114
|
+
if destination.cancelled() and dest_loop is not None and dest_loop.is_closed():
|
|
115
|
+
return
|
|
116
|
+
if dest_loop is None or dest_loop is source_loop:
|
|
117
|
+
_set_state(destination, source)
|
|
118
|
+
else:
|
|
119
|
+
if dest_loop.is_closed():
|
|
120
|
+
return
|
|
121
|
+
dest_loop.call_soon_threadsafe(_set_state, destination, source)
|
|
122
|
+
|
|
123
|
+
destination.add_done_callback(_call_check_cancel)
|
|
124
|
+
source.add_done_callback(_call_set_state)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def chain_future(source: AnyFuture, destination: AnyFuture) -> AnyFuture:
|
|
128
|
+
# adapted from asyncio.run_coroutine_threadsafe
|
|
129
|
+
try:
|
|
130
|
+
_chain_future(source, destination)
|
|
131
|
+
return destination
|
|
132
|
+
except (SystemExit, KeyboardInterrupt):
|
|
133
|
+
raise
|
|
134
|
+
except BaseException as exc:
|
|
135
|
+
if isinstance(destination, concurrent.futures.Future):
|
|
136
|
+
if destination.set_running_or_notify_cancel():
|
|
137
|
+
destination.set_exception(exc)
|
|
138
|
+
else:
|
|
139
|
+
destination.set_exception(exc)
|
|
140
|
+
raise
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def _ensure_future(
|
|
144
|
+
coro_or_future: Coroutine[None, None, T] | Awaitable[T],
|
|
145
|
+
*,
|
|
146
|
+
loop: asyncio.AbstractEventLoop,
|
|
147
|
+
name: str | None = None,
|
|
148
|
+
context: contextvars.Context | None = None,
|
|
149
|
+
lazy: bool = True,
|
|
150
|
+
) -> asyncio.Task[T]:
|
|
151
|
+
called_wrap_awaitable = False
|
|
152
|
+
if not asyncio.iscoroutine(coro_or_future):
|
|
153
|
+
if inspect.isawaitable(coro_or_future):
|
|
154
|
+
coro_or_future = cast(
|
|
155
|
+
Coroutine[None, None, T], _wrap_awaitable(coro_or_future)
|
|
156
|
+
)
|
|
157
|
+
called_wrap_awaitable = True
|
|
158
|
+
else:
|
|
159
|
+
raise TypeError(
|
|
160
|
+
"An asyncio.Future, a coroutine or an awaitable is required."
|
|
161
|
+
f" Got {type(coro_or_future).__name__} instead."
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
try:
|
|
165
|
+
if CONTEXT_NOT_SUPPORTED:
|
|
166
|
+
return loop.create_task(coro_or_future, name=name)
|
|
167
|
+
elif EAGER_NOT_SUPPORTED or lazy:
|
|
168
|
+
return loop.create_task(coro_or_future, name=name, context=context)
|
|
169
|
+
else:
|
|
170
|
+
return asyncio.eager_task_factory(
|
|
171
|
+
loop, coro_or_future, name=name, context=context
|
|
172
|
+
)
|
|
173
|
+
except RuntimeError:
|
|
174
|
+
if not called_wrap_awaitable:
|
|
175
|
+
coro_or_future.close()
|
|
176
|
+
raise
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
@types.coroutine
|
|
180
|
+
def _wrap_awaitable(awaitable: Awaitable[T]) -> Generator[None, None, T]:
|
|
181
|
+
"""Helper for asyncio.ensure_future().
|
|
182
|
+
|
|
183
|
+
Wraps awaitable (an object with __await__) into a coroutine
|
|
184
|
+
that will later be wrapped in a Task by ensure_future().
|
|
185
|
+
"""
|
|
186
|
+
return (yield from awaitable.__await__())
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def run_coroutine_threadsafe(
|
|
190
|
+
coro: Coroutine[None, None, T],
|
|
191
|
+
loop: asyncio.AbstractEventLoop,
|
|
192
|
+
*,
|
|
193
|
+
lazy: bool,
|
|
194
|
+
name: str | None = None,
|
|
195
|
+
context: contextvars.Context | None = None,
|
|
196
|
+
) -> asyncio.Future[T]:
|
|
197
|
+
"""Submit a coroutine object to a given event loop.
|
|
198
|
+
|
|
199
|
+
Return an asyncio.Future to access the result.
|
|
200
|
+
"""
|
|
201
|
+
|
|
202
|
+
if asyncio._get_running_loop() is loop:
|
|
203
|
+
return _ensure_future(coro, loop=loop, name=name, context=context, lazy=lazy)
|
|
204
|
+
else:
|
|
205
|
+
future: asyncio.Future[T] = asyncio.Future(loop=loop)
|
|
206
|
+
|
|
207
|
+
def callback() -> None:
|
|
208
|
+
try:
|
|
209
|
+
chain_future(
|
|
210
|
+
_ensure_future(coro, loop=loop, name=name, context=context),
|
|
211
|
+
future,
|
|
212
|
+
)
|
|
213
|
+
except (SystemExit, KeyboardInterrupt):
|
|
214
|
+
raise
|
|
215
|
+
except BaseException as exc:
|
|
216
|
+
future.set_exception(exc)
|
|
217
|
+
raise
|
|
218
|
+
|
|
219
|
+
loop.call_soon_threadsafe(callback, context=context)
|
|
220
|
+
return future
|
langgraph_api/utils.py
CHANGED
|
@@ -1,129 +0,0 @@
|
|
|
1
|
-
import contextvars
|
|
2
|
-
import uuid
|
|
3
|
-
from collections.abc import AsyncIterator
|
|
4
|
-
from contextlib import asynccontextmanager
|
|
5
|
-
from datetime import datetime
|
|
6
|
-
from typing import Any, Protocol, TypeAlias, TypeVar
|
|
7
|
-
|
|
8
|
-
import structlog
|
|
9
|
-
from langgraph_sdk import Auth
|
|
10
|
-
from starlette.authentication import AuthCredentials, BaseUser
|
|
11
|
-
from starlette.exceptions import HTTPException
|
|
12
|
-
from starlette.schemas import BaseSchemaGenerator
|
|
13
|
-
|
|
14
|
-
from langgraph_api.auth.custom import SimpleUser
|
|
15
|
-
|
|
16
|
-
logger = structlog.stdlib.get_logger(__name__)
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
T = TypeVar("T")
|
|
20
|
-
Row: TypeAlias = dict[str, Any]
|
|
21
|
-
AuthContext = contextvars.ContextVar[Auth.types.BaseAuthContext | None](
|
|
22
|
-
"AuthContext", default=None
|
|
23
|
-
)
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
@asynccontextmanager
|
|
27
|
-
async def with_user(
|
|
28
|
-
user: BaseUser | None = None, auth: AuthCredentials | list[str] | None = None
|
|
29
|
-
):
|
|
30
|
-
current = get_auth_ctx()
|
|
31
|
-
set_auth_ctx(user, auth)
|
|
32
|
-
yield
|
|
33
|
-
if current is None:
|
|
34
|
-
return
|
|
35
|
-
set_auth_ctx(current.user, AuthCredentials(scopes=current.permissions))
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def set_auth_ctx(
|
|
39
|
-
user: BaseUser | None, auth: AuthCredentials | list[str] | None
|
|
40
|
-
) -> None:
|
|
41
|
-
if user is None and auth is None:
|
|
42
|
-
AuthContext.set(None)
|
|
43
|
-
else:
|
|
44
|
-
AuthContext.set(
|
|
45
|
-
Auth.types.BaseAuthContext(
|
|
46
|
-
permissions=(
|
|
47
|
-
auth.scopes if isinstance(auth, AuthCredentials) else (auth or [])
|
|
48
|
-
),
|
|
49
|
-
user=user or SimpleUser(""),
|
|
50
|
-
)
|
|
51
|
-
)
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
def get_auth_ctx() -> Auth.types.BaseAuthContext | None:
|
|
55
|
-
return AuthContext.get()
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
class AsyncCursorProto(Protocol):
|
|
59
|
-
async def fetchone(self) -> Row: ...
|
|
60
|
-
|
|
61
|
-
async def fetchall(self) -> list[Row]: ...
|
|
62
|
-
|
|
63
|
-
async def __aiter__(self) -> AsyncIterator[Row]:
|
|
64
|
-
yield ...
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
class AsyncPipelineProto(Protocol):
|
|
68
|
-
async def sync(self) -> None: ...
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
class AsyncConnectionProto(Protocol):
|
|
72
|
-
@asynccontextmanager
|
|
73
|
-
async def pipeline(self) -> AsyncIterator[AsyncPipelineProto]:
|
|
74
|
-
yield ...
|
|
75
|
-
|
|
76
|
-
async def execute(self, query: str, *args, **kwargs) -> AsyncCursorProto: ...
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
async def fetchone(
|
|
80
|
-
it: AsyncIterator[T],
|
|
81
|
-
*,
|
|
82
|
-
not_found_code: int = 404,
|
|
83
|
-
not_found_detail: str | None = None,
|
|
84
|
-
) -> T:
|
|
85
|
-
"""Fetch the first row from an async iterator."""
|
|
86
|
-
try:
|
|
87
|
-
return await anext(it)
|
|
88
|
-
except StopAsyncIteration:
|
|
89
|
-
raise HTTPException(
|
|
90
|
-
status_code=not_found_code, detail=not_found_detail
|
|
91
|
-
) from None
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
def validate_uuid(uuid_str: str, invalid_uuid_detail: str | None) -> uuid.UUID:
|
|
95
|
-
try:
|
|
96
|
-
return uuid.UUID(uuid_str)
|
|
97
|
-
except ValueError:
|
|
98
|
-
raise HTTPException(status_code=422, detail=invalid_uuid_detail) from None
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
def next_cron_date(schedule: str, base_time: datetime) -> datetime:
|
|
102
|
-
import croniter
|
|
103
|
-
|
|
104
|
-
cron_iter = croniter.croniter(schedule, base_time)
|
|
105
|
-
return cron_iter.get_next(datetime)
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
class SchemaGenerator(BaseSchemaGenerator):
|
|
109
|
-
def __init__(self, base_schema: dict[str, Any]) -> None:
|
|
110
|
-
self.base_schema = base_schema
|
|
111
|
-
|
|
112
|
-
def get_schema(self, routes: list) -> dict[str, Any]:
|
|
113
|
-
schema = dict(self.base_schema)
|
|
114
|
-
schema.setdefault("paths", {})
|
|
115
|
-
endpoints_info = self.get_endpoints(routes)
|
|
116
|
-
|
|
117
|
-
for endpoint in endpoints_info:
|
|
118
|
-
try:
|
|
119
|
-
parsed = self.parse_docstring(endpoint.func)
|
|
120
|
-
except AssertionError:
|
|
121
|
-
logger.warning("Could not parse docstrings for route %s", endpoint.path)
|
|
122
|
-
parsed = {}
|
|
123
|
-
|
|
124
|
-
if endpoint.path not in schema["paths"]:
|
|
125
|
-
schema["paths"][endpoint.path] = {}
|
|
126
|
-
|
|
127
|
-
schema["paths"][endpoint.path][endpoint.http_method] = parsed
|
|
128
|
-
|
|
129
|
-
return schema
|
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
langgraph_api/__init__.py,sha256=
|
|
1
|
+
langgraph_api/__init__.py,sha256=fGfmYDwqJaQwj-AKSkwGYRZdqQNoadp6S3aayGVqQ9E,23
|
|
2
2
|
langgraph_api/asgi_transport.py,sha256=eqifhHxNnxvI7jJqrY1_8RjL4Fp9NdN4prEub2FWBt8,5091
|
|
3
|
-
langgraph_api/asyncio.py,sha256=
|
|
3
|
+
langgraph_api/asyncio.py,sha256=qrYEqPRrqtGq7E7KjcMC-ALyN79HkRnmp9rM2TAw9L8,9404
|
|
4
4
|
langgraph_api/cli.py,sha256=13mKb-WT7fGx_yqcbWITPB9ICEHCrPzIP1ddZ5RbXbY,16015
|
|
5
5
|
langgraph_api/command.py,sha256=3O9v3i0OPa96ARyJ_oJbLXkfO8rPgDhLCswgO9koTFA,768
|
|
6
6
|
langgraph_api/config.py,sha256=jmtO5LXubet2Hl7S2tF2BRyB_q2yqCzFqCwN7n6za4Y,11727
|
|
7
|
-
langgraph_api/cron_scheduler.py,sha256=
|
|
7
|
+
langgraph_api/cron_scheduler.py,sha256=CiwZ-U4gDOdG9zl9dlr7mH50USUgNB2Fvb8YTKVRBN4,2625
|
|
8
8
|
langgraph_api/errors.py,sha256=zlnl3xXIwVG0oGNKKpXf1an9Rn_SBDHSyhe53hU6aLw,1858
|
|
9
|
-
langgraph_api/graph.py,sha256=
|
|
9
|
+
langgraph_api/graph.py,sha256=pw_3jVZNe0stO5-Y8kLUuC8EJ5tFqdLu9fLpwUz4Hc4,23574
|
|
10
10
|
langgraph_api/http.py,sha256=gYbxxjY8aLnsXeJymcJ7G7Nj_yToOGpPYQqmZ1_ggfA,5240
|
|
11
|
-
langgraph_api/logging.py,sha256=
|
|
11
|
+
langgraph_api/logging.py,sha256=LL2LNuMYFrqDhG_KbyKy9AoAPghcdlFj2T50zMyPddk,4182
|
|
12
12
|
langgraph_api/metadata.py,sha256=Gx0b6YszLRjdWLDVN8OcVgC_YYQG_nQitPfUfgQx1w8,4648
|
|
13
13
|
langgraph_api/patch.py,sha256=Dgs0PXHytekX4SUL6KsjjN0hHcOtGLvv1GRGbh6PswU,1408
|
|
14
14
|
langgraph_api/queue_entrypoint.py,sha256=hC8j-A4cUxibusiiPJBlK0mkmChNZxNcXn5GVwL0yic,4889
|
|
@@ -17,16 +17,16 @@ langgraph_api/schema.py,sha256=a6it0h9ku4jrTXiW9MhnGok_wignyQ4cXBra67FiryM,5678
|
|
|
17
17
|
langgraph_api/serde.py,sha256=8fQXg7T7RVUqj_jgOoSOJrWVpQDW0qJKjAjSsEhPHo4,4803
|
|
18
18
|
langgraph_api/server.py,sha256=Z_VL-kIphybTRDWBIqHMfRhgCmAFyTRqAGlgnHQF0Zg,6973
|
|
19
19
|
langgraph_api/sse.py,sha256=SLdtZmTdh5D8fbWrQjuY9HYLd2dg8Rmi6ZMmFMVc2iE,4204
|
|
20
|
-
langgraph_api/state.py,sha256=
|
|
21
|
-
langgraph_api/store.py,sha256=
|
|
22
|
-
langgraph_api/stream.py,sha256=
|
|
20
|
+
langgraph_api/state.py,sha256=NLl5YgLKppHJ7zfF0bXjsroXmIGCZez0IlDAKNGVy0g,2365
|
|
21
|
+
langgraph_api/store.py,sha256=srRI0fQXNFo_RSUs4apucr4BEp_KrIseJksZXs32MlQ,4635
|
|
22
|
+
langgraph_api/stream.py,sha256=EorM9BD7oiCvkRXlMqnOkBd9P1X3mEagS_oHp-_9aRQ,13669
|
|
23
23
|
langgraph_api/thread_ttl.py,sha256=-Ox8NFHqUH3wGNdEKMIfAXUubY5WGifIgCaJ7npqLgw,1762
|
|
24
|
-
langgraph_api/utils.py,sha256=
|
|
24
|
+
langgraph_api/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
25
25
|
langgraph_api/validation.py,sha256=zMuKmwUEBjBgFMwAaeLZmatwGVijKv2sOYtYg7gfRtc,4950
|
|
26
26
|
langgraph_api/webhook.py,sha256=1ncwO0rIZcj-Df9sxSnFEzd1gP1bfS4okeZQS8NSRoE,1382
|
|
27
27
|
langgraph_api/worker.py,sha256=fL0pNEW9FaldEREq_4L-ivkxTr2R_rEpmVMi-zRx__U,14226
|
|
28
28
|
langgraph_api/api/__init__.py,sha256=YVzpbn5IQotvuuLG9fhS9QMrxXfP4s4EpEMG0n4q3Nw,5625
|
|
29
|
-
langgraph_api/api/assistants.py,sha256=
|
|
29
|
+
langgraph_api/api/assistants.py,sha256=w7nXjEknDVHSuP228S8ZLh4bG0nRGnSwVP9pECQOK90,16247
|
|
30
30
|
langgraph_api/api/mcp.py,sha256=RvRYgANqRzNQzSmgjNkq4RlKTtoEJYil04ot9lsmEtE,14352
|
|
31
31
|
langgraph_api/api/meta.py,sha256=MU9Ehdo2M8oaxGVBXVQFNRP6qSTXyrsGXFcndRlnvIE,3924
|
|
32
32
|
langgraph_api/api/openapi.py,sha256=362m6Ny8wOwZ6HrDK9JAVUzPkyLYWKeV1E71hPOaA0U,11278
|
|
@@ -72,6 +72,9 @@ langgraph_api/middleware/request_id.py,sha256=SDj3Yi3WvTbFQ2ewrPQBjAV8sYReOJGeIi
|
|
|
72
72
|
langgraph_api/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
73
73
|
langgraph_api/models/run.py,sha256=j1s9KRfFXgjKUudB9z7IVJ34Klo85PPeaVFtmWHhEdo,14514
|
|
74
74
|
langgraph_api/tunneling/cloudflare.py,sha256=iKb6tj-VWPlDchHFjuQyep2Dpb-w2NGfJKt-WJG9LH0,3650
|
|
75
|
+
langgraph_api/utils/__init__.py,sha256=92mSti9GfGdMRRWyESKQW5yV-75Z9icGHnIrBYvdypU,3619
|
|
76
|
+
langgraph_api/utils/config.py,sha256=gONI0UsoSpuR72D9lSGAmpr-_iSMDFdD4M_tiXXjmNk,3936
|
|
77
|
+
langgraph_api/utils/future.py,sha256=CGhUb_Ht4_CnTuXc2kI8evEn1gnMKYN0ce9ZyUkW5G4,7251
|
|
75
78
|
langgraph_license/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
76
79
|
langgraph_license/validation.py,sha256=ZKraAVJArAABKqrmHN-EN18ncoNUmRm500Yt1Sc7tUA,537
|
|
77
80
|
langgraph_runtime/__init__.py,sha256=O4GgSmu33c-Pr8Xzxj_brcK5vkm70iNTcyxEjICFZxA,1075
|
|
@@ -86,8 +89,8 @@ langgraph_runtime/store.py,sha256=7mowndlsIroGHv3NpTSOZDJR0lCuaYMBoTnTrewjslw,11
|
|
|
86
89
|
LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
|
|
87
90
|
logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
|
|
88
91
|
openapi.json,sha256=NVgY0hdCu6v5anNus9I-CVYKKcrDLKd1qhILy-kQpq8,142590
|
|
89
|
-
langgraph_api-0.2.
|
|
90
|
-
langgraph_api-0.2.
|
|
91
|
-
langgraph_api-0.2.
|
|
92
|
-
langgraph_api-0.2.
|
|
93
|
-
langgraph_api-0.2.
|
|
92
|
+
langgraph_api-0.2.75.dist-info/METADATA,sha256=2WD4F3joubu25ijCCW5czUAgFBv0-50WZVHhKiuYW48,3891
|
|
93
|
+
langgraph_api-0.2.75.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
94
|
+
langgraph_api-0.2.75.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
|
|
95
|
+
langgraph_api-0.2.75.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
|
|
96
|
+
langgraph_api-0.2.75.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|