langgraph-api 0.0.14__py3-none-any.whl → 0.0.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- langgraph_api/api/__init__.py +2 -1
- langgraph_api/api/assistants.py +4 -4
- langgraph_api/api/store.py +67 -15
- langgraph_api/asyncio.py +5 -0
- langgraph_api/auth/custom.py +20 -5
- langgraph_api/config.py +1 -0
- langgraph_api/graph.py +6 -13
- langgraph_api/js/base.py +9 -0
- langgraph_api/js/build.mts +2 -0
- langgraph_api/js/client.mts +383 -409
- langgraph_api/js/client.new.mts +856 -0
- langgraph_api/js/errors.py +11 -0
- langgraph_api/js/package.json +3 -1
- langgraph_api/js/remote.py +16 -673
- langgraph_api/js/remote_new.py +693 -0
- langgraph_api/js/remote_old.py +665 -0
- langgraph_api/js/schema.py +29 -0
- langgraph_api/js/src/utils/serde.mts +7 -0
- langgraph_api/js/tests/api.test.mts +125 -8
- langgraph_api/js/tests/compose-postgres.yml +2 -1
- langgraph_api/js/tests/graphs/agent.mts +2 -0
- langgraph_api/js/tests/graphs/delay.mts +30 -0
- langgraph_api/js/tests/graphs/langgraph.json +2 -1
- langgraph_api/js/yarn.lock +870 -18
- langgraph_api/models/run.py +1 -0
- langgraph_api/queue.py +129 -31
- langgraph_api/route.py +8 -3
- langgraph_api/schema.py +1 -1
- langgraph_api/stream.py +12 -5
- langgraph_api/utils.py +11 -5
- {langgraph_api-0.0.14.dist-info → langgraph_api-0.0.16.dist-info}/METADATA +3 -3
- {langgraph_api-0.0.14.dist-info → langgraph_api-0.0.16.dist-info}/RECORD +37 -30
- langgraph_storage/ops.py +9 -2
- openapi.json +5 -5
- {langgraph_api-0.0.14.dist-info → langgraph_api-0.0.16.dist-info}/LICENSE +0 -0
- {langgraph_api-0.0.14.dist-info → langgraph_api-0.0.16.dist-info}/WHEEL +0 -0
- {langgraph_api-0.0.14.dist-info → langgraph_api-0.0.16.dist-info}/entry_points.txt +0 -0
|
@@ -447,7 +447,7 @@ describe("runs", () => {
|
|
|
447
447
|
await sql`DELETE FROM store`;
|
|
448
448
|
});
|
|
449
449
|
|
|
450
|
-
it.
|
|
450
|
+
it.concurrent("list runs", async () => {
|
|
451
451
|
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
452
452
|
const thread = await client.threads.create();
|
|
453
453
|
await client.runs.wait(thread.thread_id, assistant.assistant_id, {
|
|
@@ -753,13 +753,13 @@ describe("runs", () => {
|
|
|
753
753
|
messages = findLast(chunks, (i) => i.event === "values")?.data.messages;
|
|
754
754
|
|
|
755
755
|
const threadAfterInterrupt = await client.threads.get(thread.thread_id);
|
|
756
|
-
expect(threadAfterInterrupt.status).toBe("
|
|
756
|
+
expect(threadAfterInterrupt.status).toBe("interrupted");
|
|
757
757
|
|
|
758
758
|
expect(messages.at(-1)).not.toBeNull();
|
|
759
|
-
expect(messages.at(-1)?.content).toBe("
|
|
759
|
+
expect(messages.at(-1)?.content).toBe("begin");
|
|
760
760
|
|
|
761
761
|
const state = await client.threads.getState(thread.thread_id);
|
|
762
|
-
expect(state.next).toEqual([]);
|
|
762
|
+
expect(state.next).toEqual(["tool"]);
|
|
763
763
|
|
|
764
764
|
// continue after interrupt
|
|
765
765
|
chunks = await gatherIterator(
|
|
@@ -817,6 +817,7 @@ describe("runs", () => {
|
|
|
817
817
|
});
|
|
818
818
|
|
|
819
819
|
const modifiedThread = await client.threads.get(thread.thread_id);
|
|
820
|
+
expect(modifiedThread.status).toBe("interrupted");
|
|
820
821
|
expect(modifiedThread.metadata?.modified).toBe(true);
|
|
821
822
|
|
|
822
823
|
const stateAfterModify = await client.threads.getState<AgentState>(
|
|
@@ -836,22 +837,42 @@ describe("runs", () => {
|
|
|
836
837
|
})
|
|
837
838
|
);
|
|
838
839
|
|
|
840
|
+
const threadAfterContinue = await client.threads.get(thread.thread_id);
|
|
841
|
+
expect(threadAfterContinue.status).toBe("idle");
|
|
842
|
+
|
|
839
843
|
expect(chunks.filter((i) => i.event === "error").length).toBe(0);
|
|
840
844
|
messages = findLast(chunks, (i) => i.event === "values")?.data.messages;
|
|
841
845
|
|
|
842
|
-
expect(messages.length).toBe(
|
|
843
|
-
expect(messages[
|
|
846
|
+
expect(messages.length).toBe(4);
|
|
847
|
+
expect(messages[2].content).toBe(`tool_call__modified`);
|
|
844
848
|
expect(messages.at(-1)?.content).toBe("end");
|
|
845
849
|
|
|
846
850
|
// get the history
|
|
847
851
|
const history = await client.threads.getHistory<AgentState>(
|
|
848
852
|
thread.thread_id
|
|
849
853
|
);
|
|
850
|
-
expect(history.length).toBe(
|
|
854
|
+
expect(history.length).toBe(6);
|
|
851
855
|
expect(history[0].next.length).toBe(0);
|
|
852
|
-
expect(history[0].values.messages.length).toBe(
|
|
856
|
+
expect(history[0].values.messages.length).toBe(4);
|
|
853
857
|
expect(history.at(-1)?.next).toEqual(["__start__"]);
|
|
854
858
|
});
|
|
859
|
+
|
|
860
|
+
it.concurrent("interrupt before", async () => {
|
|
861
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
862
|
+
let thread = await client.threads.create();
|
|
863
|
+
const input = {
|
|
864
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
865
|
+
};
|
|
866
|
+
|
|
867
|
+
await client.runs.wait(thread.thread_id, assistant.assistant_id, {
|
|
868
|
+
input,
|
|
869
|
+
interruptBefore: ["agent"],
|
|
870
|
+
config: globalConfig,
|
|
871
|
+
});
|
|
872
|
+
|
|
873
|
+
thread = await client.threads.get(thread.thread_id);
|
|
874
|
+
expect(thread.status).toBe("interrupted");
|
|
875
|
+
});
|
|
855
876
|
});
|
|
856
877
|
|
|
857
878
|
describe("shared state", () => {
|
|
@@ -1650,3 +1671,99 @@ describe("errors", () => {
|
|
|
1650
1671
|
expect(runState.status).toEqual("error");
|
|
1651
1672
|
});
|
|
1652
1673
|
});
|
|
1674
|
+
|
|
1675
|
+
describe("long running tasks", () => {
|
|
1676
|
+
it.concurrent.for([1000, 8000, 12000])(
|
|
1677
|
+
"long running task with %dms delay",
|
|
1678
|
+
{ timeout: 15_000 },
|
|
1679
|
+
async (delay) => {
|
|
1680
|
+
const assistant = await client.assistants.create({ graphId: "delay" });
|
|
1681
|
+
const thread = await client.threads.create();
|
|
1682
|
+
|
|
1683
|
+
const run = await client.runs.create(
|
|
1684
|
+
thread.thread_id,
|
|
1685
|
+
assistant.assistant_id,
|
|
1686
|
+
{
|
|
1687
|
+
input: { messages: [], delay },
|
|
1688
|
+
config: globalConfig,
|
|
1689
|
+
}
|
|
1690
|
+
);
|
|
1691
|
+
|
|
1692
|
+
await client.runs.join(thread.thread_id, run.run_id);
|
|
1693
|
+
|
|
1694
|
+
const runState = await client.runs.get(thread.thread_id, run.run_id);
|
|
1695
|
+
expect(runState.status).toEqual("success");
|
|
1696
|
+
|
|
1697
|
+
const runResult = await client.threads.getState<{
|
|
1698
|
+
messages: BaseMessageLike[];
|
|
1699
|
+
delay: number;
|
|
1700
|
+
}>(thread.thread_id);
|
|
1701
|
+
|
|
1702
|
+
expect(runResult.values.messages).toMatchObject([
|
|
1703
|
+
{ content: `finished after ${delay}ms` },
|
|
1704
|
+
]);
|
|
1705
|
+
}
|
|
1706
|
+
);
|
|
1707
|
+
});
|
|
1708
|
+
|
|
1709
|
+
// Not implemented in JS yet
|
|
1710
|
+
describe.skip("command update state", () => {
|
|
1711
|
+
it("updates state via commands", async () => {
|
|
1712
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
1713
|
+
const thread = await client.threads.create();
|
|
1714
|
+
|
|
1715
|
+
const input = { messages: [{ role: "human", content: "foo" }] };
|
|
1716
|
+
|
|
1717
|
+
// dict-based updates
|
|
1718
|
+
await client.runs.wait(thread.thread_id, assistant.assistant_id, {
|
|
1719
|
+
input,
|
|
1720
|
+
config: globalConfig,
|
|
1721
|
+
});
|
|
1722
|
+
let stream = await gatherIterator(
|
|
1723
|
+
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
1724
|
+
command: { update: { keyOne: "value3", keyTwo: "value4" } },
|
|
1725
|
+
config: globalConfig,
|
|
1726
|
+
})
|
|
1727
|
+
);
|
|
1728
|
+
expect(stream.filter((chunk) => chunk.event === "error")).toEqual([]);
|
|
1729
|
+
|
|
1730
|
+
let state = await client.threads.getState<{
|
|
1731
|
+
keyOne: string;
|
|
1732
|
+
keyTwo: string;
|
|
1733
|
+
}>(thread.thread_id);
|
|
1734
|
+
|
|
1735
|
+
expect(state.values).toMatchObject({
|
|
1736
|
+
keyOne: "value3",
|
|
1737
|
+
keyTwo: "value4",
|
|
1738
|
+
});
|
|
1739
|
+
|
|
1740
|
+
// list-based updates
|
|
1741
|
+
await client.runs.wait(thread.thread_id, assistant.assistant_id, {
|
|
1742
|
+
input,
|
|
1743
|
+
config: globalConfig,
|
|
1744
|
+
});
|
|
1745
|
+
stream = await gatherIterator(
|
|
1746
|
+
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
1747
|
+
command: {
|
|
1748
|
+
update: [
|
|
1749
|
+
["keyOne", "value1"],
|
|
1750
|
+
["keyTwo", "value2"],
|
|
1751
|
+
],
|
|
1752
|
+
},
|
|
1753
|
+
config: globalConfig,
|
|
1754
|
+
})
|
|
1755
|
+
);
|
|
1756
|
+
|
|
1757
|
+
expect(stream.filter((chunk) => chunk.event === "error")).toEqual([]);
|
|
1758
|
+
|
|
1759
|
+
state = await client.threads.getState<{
|
|
1760
|
+
keyOne: string;
|
|
1761
|
+
keyTwo: string;
|
|
1762
|
+
}>(thread.thread_id);
|
|
1763
|
+
|
|
1764
|
+
expect(state.values).toMatchObject({
|
|
1765
|
+
keyOne: "value1",
|
|
1766
|
+
keyTwo: "value2",
|
|
1767
|
+
});
|
|
1768
|
+
});
|
|
1769
|
+
});
|
|
@@ -34,7 +34,7 @@ services:
|
|
|
34
34
|
ADD . /deps/graphs
|
|
35
35
|
WORKDIR /deps/graphs
|
|
36
36
|
RUN yarn install --frozen-lockfile
|
|
37
|
-
ENV LANGSERVE_GRAPHS='{"agent":"./agent.mts:graph", "nested": "./nested.mts:graph", "weather": "./weather.mts:graph", "error": "./error.mts:graph"}'
|
|
37
|
+
ENV LANGSERVE_GRAPHS='{"agent":"./agent.mts:graph", "nested": "./nested.mts:graph", "weather": "./weather.mts:graph", "error": "./error.mts:graph", "delay": "./delay.mts:graph"}'
|
|
38
38
|
ENV LANGGRAPH_CONFIG='{"agent": {"configurable": {"model_name": "openai"}}}'
|
|
39
39
|
RUN tsx /api/langgraph_api/js/build.mts
|
|
40
40
|
depends_on:
|
|
@@ -54,3 +54,4 @@ services:
|
|
|
54
54
|
DATABASE_URI: postgres://postgres:postgres@langgraph-postgres:5432/postgres?sslmode=disable
|
|
55
55
|
N_JOBS_PER_WORKER: "2"
|
|
56
56
|
LANGGRAPH_CLOUD_LICENSE_KEY: ${LANGGRAPH_CLOUD_LICENSE_KEY}
|
|
57
|
+
FF_JS_ZEROMQ_ENABLED: ${FF_JS_ZEROMQ_ENABLED}
|
|
@@ -18,6 +18,8 @@ const GraphAnnotationOutput = Annotation.Root({
|
|
|
18
18
|
}),
|
|
19
19
|
sharedStateValue: Annotation<string | null>(),
|
|
20
20
|
interrupt: Annotation<boolean>(),
|
|
21
|
+
keyOne: Annotation<string | null>(),
|
|
22
|
+
keyTwo: Annotation<string | null>(),
|
|
21
23
|
});
|
|
22
24
|
|
|
23
25
|
const GraphAnnotationInput = Annotation.Root({
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import {
|
|
2
|
+
MessagesAnnotation,
|
|
3
|
+
StateGraph,
|
|
4
|
+
END,
|
|
5
|
+
START,
|
|
6
|
+
Annotation,
|
|
7
|
+
} from "@langchain/langgraph";
|
|
8
|
+
|
|
9
|
+
const StateSchema = Annotation.Root({
|
|
10
|
+
...MessagesAnnotation.spec,
|
|
11
|
+
delay: Annotation<number>(),
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
const longRunning = async (
|
|
15
|
+
state: typeof StateSchema.State
|
|
16
|
+
): Promise<typeof StateSchema.Update> => {
|
|
17
|
+
if (state.delay === -1) {
|
|
18
|
+
while (true) {
|
|
19
|
+
// hang the event loop
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
await new Promise((resolve) => setTimeout(resolve, state.delay));
|
|
23
|
+
return { messages: [`finished after ${state.delay}ms`] };
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
export const graph = new StateGraph(StateSchema)
|
|
27
|
+
.addNode("long_running", longRunning)
|
|
28
|
+
.addEdge(START, "long_running")
|
|
29
|
+
.addEdge("long_running", END)
|
|
30
|
+
.compile();
|