langgraph-api 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- LICENSE +93 -0
- langgraph_api/__init__.py +0 -0
- langgraph_api/api/__init__.py +63 -0
- langgraph_api/api/assistants.py +326 -0
- langgraph_api/api/meta.py +71 -0
- langgraph_api/api/openapi.py +32 -0
- langgraph_api/api/runs.py +463 -0
- langgraph_api/api/store.py +116 -0
- langgraph_api/api/threads.py +263 -0
- langgraph_api/asyncio.py +201 -0
- langgraph_api/auth/__init__.py +0 -0
- langgraph_api/auth/langsmith/__init__.py +0 -0
- langgraph_api/auth/langsmith/backend.py +67 -0
- langgraph_api/auth/langsmith/client.py +145 -0
- langgraph_api/auth/middleware.py +41 -0
- langgraph_api/auth/noop.py +14 -0
- langgraph_api/cli.py +209 -0
- langgraph_api/config.py +70 -0
- langgraph_api/cron_scheduler.py +60 -0
- langgraph_api/errors.py +52 -0
- langgraph_api/graph.py +314 -0
- langgraph_api/http.py +168 -0
- langgraph_api/http_logger.py +89 -0
- langgraph_api/js/.gitignore +2 -0
- langgraph_api/js/build.mts +49 -0
- langgraph_api/js/client.mts +849 -0
- langgraph_api/js/global.d.ts +6 -0
- langgraph_api/js/package.json +33 -0
- langgraph_api/js/remote.py +673 -0
- langgraph_api/js/server_sent_events.py +126 -0
- langgraph_api/js/src/graph.mts +88 -0
- langgraph_api/js/src/hooks.mjs +12 -0
- langgraph_api/js/src/parser/parser.mts +443 -0
- langgraph_api/js/src/parser/parser.worker.mjs +12 -0
- langgraph_api/js/src/schema/types.mts +2136 -0
- langgraph_api/js/src/schema/types.template.mts +74 -0
- langgraph_api/js/src/utils/importMap.mts +85 -0
- langgraph_api/js/src/utils/pythonSchemas.mts +28 -0
- langgraph_api/js/src/utils/serde.mts +21 -0
- langgraph_api/js/tests/api.test.mts +1566 -0
- langgraph_api/js/tests/compose-postgres.yml +56 -0
- langgraph_api/js/tests/graphs/.gitignore +1 -0
- langgraph_api/js/tests/graphs/agent.mts +127 -0
- langgraph_api/js/tests/graphs/error.mts +17 -0
- langgraph_api/js/tests/graphs/langgraph.json +8 -0
- langgraph_api/js/tests/graphs/nested.mts +44 -0
- langgraph_api/js/tests/graphs/package.json +7 -0
- langgraph_api/js/tests/graphs/weather.mts +57 -0
- langgraph_api/js/tests/graphs/yarn.lock +159 -0
- langgraph_api/js/tests/parser.test.mts +870 -0
- langgraph_api/js/tests/utils.mts +17 -0
- langgraph_api/js/yarn.lock +1340 -0
- langgraph_api/lifespan.py +41 -0
- langgraph_api/logging.py +121 -0
- langgraph_api/metadata.py +101 -0
- langgraph_api/models/__init__.py +0 -0
- langgraph_api/models/run.py +229 -0
- langgraph_api/patch.py +42 -0
- langgraph_api/queue.py +245 -0
- langgraph_api/route.py +118 -0
- langgraph_api/schema.py +190 -0
- langgraph_api/serde.py +124 -0
- langgraph_api/server.py +48 -0
- langgraph_api/sse.py +118 -0
- langgraph_api/state.py +67 -0
- langgraph_api/stream.py +289 -0
- langgraph_api/utils.py +60 -0
- langgraph_api/validation.py +141 -0
- langgraph_api-0.0.1.dist-info/LICENSE +93 -0
- langgraph_api-0.0.1.dist-info/METADATA +26 -0
- langgraph_api-0.0.1.dist-info/RECORD +86 -0
- langgraph_api-0.0.1.dist-info/WHEEL +4 -0
- langgraph_api-0.0.1.dist-info/entry_points.txt +3 -0
- langgraph_license/__init__.py +0 -0
- langgraph_license/middleware.py +21 -0
- langgraph_license/validation.py +11 -0
- langgraph_storage/__init__.py +0 -0
- langgraph_storage/checkpoint.py +94 -0
- langgraph_storage/database.py +190 -0
- langgraph_storage/ops.py +1523 -0
- langgraph_storage/queue.py +108 -0
- langgraph_storage/retry.py +27 -0
- langgraph_storage/store.py +28 -0
- langgraph_storage/ttl_dict.py +54 -0
- logging.json +22 -0
- openapi.json +4304 -0
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from uuid import UUID
|
|
6
|
+
|
|
7
|
+
logger = logging.getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class Message:
|
|
12
|
+
topic: bytes
|
|
13
|
+
data: bytes
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ContextQueue(asyncio.Queue):
|
|
17
|
+
"""Queue that supports async context manager protocol"""
|
|
18
|
+
|
|
19
|
+
async def __aenter__(self):
|
|
20
|
+
return self
|
|
21
|
+
|
|
22
|
+
async def __aexit__(
|
|
23
|
+
self,
|
|
24
|
+
exc_type: type[BaseException] | None,
|
|
25
|
+
exc_val: BaseException | None,
|
|
26
|
+
exc_tb: object | None,
|
|
27
|
+
) -> None:
|
|
28
|
+
# Clear the queue
|
|
29
|
+
while not self.empty():
|
|
30
|
+
try:
|
|
31
|
+
self.get_nowait()
|
|
32
|
+
except asyncio.QueueEmpty:
|
|
33
|
+
break
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class StreamManager:
|
|
37
|
+
def __init__(self):
|
|
38
|
+
self.queues = defaultdict(list) # Dict[UUID, List[asyncio.Queue]]
|
|
39
|
+
self.control_queues = defaultdict(list)
|
|
40
|
+
|
|
41
|
+
def get_queues(self, run_id: UUID) -> list[asyncio.Queue]:
|
|
42
|
+
return self.queues[run_id]
|
|
43
|
+
|
|
44
|
+
async def put(self, run_id: UUID, message: Message) -> None:
|
|
45
|
+
topic = message.topic.decode()
|
|
46
|
+
if "control" in topic:
|
|
47
|
+
self.control_queues[run_id].append(message)
|
|
48
|
+
queues = self.queues.get(run_id, [])
|
|
49
|
+
coros = [queue.put(message) for queue in queues]
|
|
50
|
+
results = await asyncio.gather(*coros, return_exceptions=True)
|
|
51
|
+
for result in results:
|
|
52
|
+
if isinstance(result, Exception):
|
|
53
|
+
logger.exception(f"Failed to put message in queue: {result}")
|
|
54
|
+
|
|
55
|
+
async def add_queue(self, run_id: UUID) -> asyncio.Queue:
|
|
56
|
+
queue = ContextQueue()
|
|
57
|
+
self.queues[run_id].append(queue)
|
|
58
|
+
for control_msg in self.control_queues[run_id]:
|
|
59
|
+
try:
|
|
60
|
+
await queue.put(control_msg)
|
|
61
|
+
except Exception:
|
|
62
|
+
logger.exception(
|
|
63
|
+
f"Failed to put control message in queue: {control_msg}"
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
return queue
|
|
67
|
+
|
|
68
|
+
async def remove_queue(self, run_id: UUID, queue: asyncio.Queue):
|
|
69
|
+
if run_id in self.queues:
|
|
70
|
+
self.queues[run_id].remove(queue)
|
|
71
|
+
if not self.queues[run_id]:
|
|
72
|
+
del self.queues[run_id]
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
# Global instance
|
|
76
|
+
stream_manager = StreamManager()
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
async def start_queue() -> None:
|
|
80
|
+
"""Initialize the queue system.
|
|
81
|
+
In this in-memory implementation, we just need to ensure we have a clean StreamManager instance.
|
|
82
|
+
"""
|
|
83
|
+
global stream_manager
|
|
84
|
+
stream_manager = StreamManager()
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
async def stop_queue() -> None:
|
|
88
|
+
"""Clean up the queue system.
|
|
89
|
+
Clear all queues and stored control messages."""
|
|
90
|
+
global stream_manager
|
|
91
|
+
|
|
92
|
+
# Send 'done' message to all active queues before clearing
|
|
93
|
+
for run_id in list(stream_manager.queues.keys()):
|
|
94
|
+
control_message = Message(topic=f"run:{run_id}:control".encode(), data=b"done")
|
|
95
|
+
for queue in stream_manager.queues[run_id]:
|
|
96
|
+
try:
|
|
97
|
+
await queue.put(control_message)
|
|
98
|
+
except (Exception, RuntimeError):
|
|
99
|
+
pass # Ignore errors during shutdown
|
|
100
|
+
|
|
101
|
+
# Clear all stored data
|
|
102
|
+
stream_manager.queues.clear()
|
|
103
|
+
stream_manager.control_queues.clear()
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def get_stream_manager() -> StreamManager:
|
|
107
|
+
"""Get the global stream manager instance."""
|
|
108
|
+
return stream_manager
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import functools
|
|
3
|
+
from collections.abc import Callable
|
|
4
|
+
from typing import ParamSpec, TypeVar
|
|
5
|
+
|
|
6
|
+
P = ParamSpec("P")
|
|
7
|
+
T = TypeVar("T")
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
RETRIABLE_EXCEPTIONS: tuple[type[BaseException], ...] = ()
|
|
11
|
+
OVERLOADED_EXCEPTIONS: tuple[type[BaseException], ...] = ()
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def retry_db(func: Callable[P, T]) -> Callable[P, T]:
|
|
15
|
+
attempts = 3
|
|
16
|
+
|
|
17
|
+
@functools.wraps(func)
|
|
18
|
+
async def wrapper(*args, **kwargs):
|
|
19
|
+
for i in range(attempts):
|
|
20
|
+
if i == attempts - 1:
|
|
21
|
+
return await func(*args, **kwargs)
|
|
22
|
+
try:
|
|
23
|
+
return await func(*args, **kwargs)
|
|
24
|
+
except RETRIABLE_EXCEPTIONS:
|
|
25
|
+
await asyncio.sleep(0.01)
|
|
26
|
+
|
|
27
|
+
return wrapper
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
from langgraph.checkpoint.memory import PersistentDict
|
|
4
|
+
from langgraph.store.memory import InMemoryStore
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class DiskBackedInMemStore(InMemoryStore):
|
|
8
|
+
def __init__(self, *args, filename=None, **kwargs):
|
|
9
|
+
super().__init__(*args, **kwargs)
|
|
10
|
+
self.filename = filename
|
|
11
|
+
self._data = PersistentDict(dict, filename=self.filename)
|
|
12
|
+
try:
|
|
13
|
+
self._data.load()
|
|
14
|
+
except FileNotFoundError:
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
def close(self):
|
|
18
|
+
self._data.close()
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
_STORE_FILE = os.path.join(".langgraph_api", "store.pckl")
|
|
22
|
+
if not os.path.exists(".langgraph_api"):
|
|
23
|
+
os.mkdir(".langgraph_api")
|
|
24
|
+
STORE = DiskBackedInMemStore(filename=_STORE_FILE)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def Store(*args, **kwargs):
|
|
28
|
+
return STORE
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
# TODO: Make this class thread-safe
|
|
5
|
+
class TTLDict:
|
|
6
|
+
def __init__(self, timeout=10):
|
|
7
|
+
self._store = {}
|
|
8
|
+
self._timeouts = {}
|
|
9
|
+
self._tasks = {} # Track tasks for key expiry
|
|
10
|
+
self._timeout = timeout
|
|
11
|
+
|
|
12
|
+
async def _remove_key_after_timeout(self, key):
|
|
13
|
+
await asyncio.sleep(self._timeouts[key])
|
|
14
|
+
if key in self._store:
|
|
15
|
+
del self._store[key]
|
|
16
|
+
del self._timeouts[key]
|
|
17
|
+
|
|
18
|
+
def set(self, key, value, timeout=None):
|
|
19
|
+
self._store[key] = value
|
|
20
|
+
key_timeout = timeout if timeout is not None else self._timeout
|
|
21
|
+
self._timeouts[key] = key_timeout
|
|
22
|
+
|
|
23
|
+
# Cancel the previous task if it exists
|
|
24
|
+
if key in self._tasks:
|
|
25
|
+
self._tasks[key].cancel()
|
|
26
|
+
|
|
27
|
+
# Schedule the removal of the key
|
|
28
|
+
self._tasks[key] = asyncio.create_task(self._remove_key_after_timeout(key))
|
|
29
|
+
|
|
30
|
+
def get(self, key, default=None):
|
|
31
|
+
return self._store.get(key, default)
|
|
32
|
+
|
|
33
|
+
def __contains__(self, key):
|
|
34
|
+
return key in self._store
|
|
35
|
+
|
|
36
|
+
def remove(self, key):
|
|
37
|
+
if key in self._store:
|
|
38
|
+
del self._store[key]
|
|
39
|
+
del self._timeouts[key]
|
|
40
|
+
if key in self._tasks:
|
|
41
|
+
self._tasks[key].cancel()
|
|
42
|
+
del self._tasks[key]
|
|
43
|
+
|
|
44
|
+
def keys(self):
|
|
45
|
+
return self._store.keys()
|
|
46
|
+
|
|
47
|
+
def values(self):
|
|
48
|
+
return self._store.values()
|
|
49
|
+
|
|
50
|
+
def items(self):
|
|
51
|
+
return self._store.items()
|
|
52
|
+
|
|
53
|
+
def __repr__(self):
|
|
54
|
+
return f"{self.__class__.__name__}({self._store})"
|
logging.json
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 1,
|
|
3
|
+
"incremental": false,
|
|
4
|
+
"disable_existing_loggers": false,
|
|
5
|
+
"formatters": {
|
|
6
|
+
"simple": {
|
|
7
|
+
"class": "langgraph_api.logging.Formatter"
|
|
8
|
+
}
|
|
9
|
+
},
|
|
10
|
+
"handlers": {
|
|
11
|
+
"console": {
|
|
12
|
+
"class": "logging.StreamHandler",
|
|
13
|
+
"formatter": "simple",
|
|
14
|
+
"stream": "ext://sys.stdout"
|
|
15
|
+
}
|
|
16
|
+
},
|
|
17
|
+
"root": {
|
|
18
|
+
"handlers": [
|
|
19
|
+
"console"
|
|
20
|
+
]
|
|
21
|
+
}
|
|
22
|
+
}
|