langgraph-api 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- LICENSE +93 -0
- langgraph_api/__init__.py +0 -0
- langgraph_api/api/__init__.py +63 -0
- langgraph_api/api/assistants.py +326 -0
- langgraph_api/api/meta.py +71 -0
- langgraph_api/api/openapi.py +32 -0
- langgraph_api/api/runs.py +463 -0
- langgraph_api/api/store.py +116 -0
- langgraph_api/api/threads.py +263 -0
- langgraph_api/asyncio.py +201 -0
- langgraph_api/auth/__init__.py +0 -0
- langgraph_api/auth/langsmith/__init__.py +0 -0
- langgraph_api/auth/langsmith/backend.py +67 -0
- langgraph_api/auth/langsmith/client.py +145 -0
- langgraph_api/auth/middleware.py +41 -0
- langgraph_api/auth/noop.py +14 -0
- langgraph_api/cli.py +209 -0
- langgraph_api/config.py +70 -0
- langgraph_api/cron_scheduler.py +60 -0
- langgraph_api/errors.py +52 -0
- langgraph_api/graph.py +314 -0
- langgraph_api/http.py +168 -0
- langgraph_api/http_logger.py +89 -0
- langgraph_api/js/.gitignore +2 -0
- langgraph_api/js/build.mts +49 -0
- langgraph_api/js/client.mts +849 -0
- langgraph_api/js/global.d.ts +6 -0
- langgraph_api/js/package.json +33 -0
- langgraph_api/js/remote.py +673 -0
- langgraph_api/js/server_sent_events.py +126 -0
- langgraph_api/js/src/graph.mts +88 -0
- langgraph_api/js/src/hooks.mjs +12 -0
- langgraph_api/js/src/parser/parser.mts +443 -0
- langgraph_api/js/src/parser/parser.worker.mjs +12 -0
- langgraph_api/js/src/schema/types.mts +2136 -0
- langgraph_api/js/src/schema/types.template.mts +74 -0
- langgraph_api/js/src/utils/importMap.mts +85 -0
- langgraph_api/js/src/utils/pythonSchemas.mts +28 -0
- langgraph_api/js/src/utils/serde.mts +21 -0
- langgraph_api/js/tests/api.test.mts +1566 -0
- langgraph_api/js/tests/compose-postgres.yml +56 -0
- langgraph_api/js/tests/graphs/.gitignore +1 -0
- langgraph_api/js/tests/graphs/agent.mts +127 -0
- langgraph_api/js/tests/graphs/error.mts +17 -0
- langgraph_api/js/tests/graphs/langgraph.json +8 -0
- langgraph_api/js/tests/graphs/nested.mts +44 -0
- langgraph_api/js/tests/graphs/package.json +7 -0
- langgraph_api/js/tests/graphs/weather.mts +57 -0
- langgraph_api/js/tests/graphs/yarn.lock +159 -0
- langgraph_api/js/tests/parser.test.mts +870 -0
- langgraph_api/js/tests/utils.mts +17 -0
- langgraph_api/js/yarn.lock +1340 -0
- langgraph_api/lifespan.py +41 -0
- langgraph_api/logging.py +121 -0
- langgraph_api/metadata.py +101 -0
- langgraph_api/models/__init__.py +0 -0
- langgraph_api/models/run.py +229 -0
- langgraph_api/patch.py +42 -0
- langgraph_api/queue.py +245 -0
- langgraph_api/route.py +118 -0
- langgraph_api/schema.py +190 -0
- langgraph_api/serde.py +124 -0
- langgraph_api/server.py +48 -0
- langgraph_api/sse.py +118 -0
- langgraph_api/state.py +67 -0
- langgraph_api/stream.py +289 -0
- langgraph_api/utils.py +60 -0
- langgraph_api/validation.py +141 -0
- langgraph_api-0.0.1.dist-info/LICENSE +93 -0
- langgraph_api-0.0.1.dist-info/METADATA +26 -0
- langgraph_api-0.0.1.dist-info/RECORD +86 -0
- langgraph_api-0.0.1.dist-info/WHEEL +4 -0
- langgraph_api-0.0.1.dist-info/entry_points.txt +3 -0
- langgraph_license/__init__.py +0 -0
- langgraph_license/middleware.py +21 -0
- langgraph_license/validation.py +11 -0
- langgraph_storage/__init__.py +0 -0
- langgraph_storage/checkpoint.py +94 -0
- langgraph_storage/database.py +190 -0
- langgraph_storage/ops.py +1523 -0
- langgraph_storage/queue.py +108 -0
- langgraph_storage/retry.py +27 -0
- langgraph_storage/store.py +28 -0
- langgraph_storage/ttl_dict.py +54 -0
- logging.json +22 -0
- openapi.json +4304 -0
|
@@ -0,0 +1,1566 @@
|
|
|
1
|
+
import { describe, expect, it, beforeEach, beforeAll } from "vitest";
|
|
2
|
+
import { Client } from "@langchain/langgraph-sdk";
|
|
3
|
+
|
|
4
|
+
import postgres from "postgres";
|
|
5
|
+
import { findLast, gatherIterator } from "./utils.mts";
|
|
6
|
+
import {
|
|
7
|
+
BaseMessageFields,
|
|
8
|
+
BaseMessageLike,
|
|
9
|
+
MessageType,
|
|
10
|
+
} from "@langchain/core/messages";
|
|
11
|
+
import { randomUUID } from "crypto";
|
|
12
|
+
|
|
13
|
+
const sql = postgres(
|
|
14
|
+
process.env.POSTGRES_URI ??
|
|
15
|
+
"postgres://postgres:postgres@127.0.0.1:5433/postgres?sslmode=disable"
|
|
16
|
+
);
|
|
17
|
+
|
|
18
|
+
const client = new Client({
|
|
19
|
+
// apiUrl: process.env["LANGGRAPH_ENDPOINT"] ?? "http://localhost:9123",
|
|
20
|
+
apiUrl: "http://localhost:9123",
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
// Passed to all invocation requests as the graph now requires this field to be present
|
|
24
|
+
// in `configurable` due to a new `SharedValue` field requiring it.
|
|
25
|
+
const globalConfig = {
|
|
26
|
+
configurable: {
|
|
27
|
+
user_id: "123",
|
|
28
|
+
},
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
// TODO: this is not exported anywhere in JS
|
|
32
|
+
// we should support only the flattened one
|
|
33
|
+
type BaseMessage = {
|
|
34
|
+
type: MessageType | "user" | "assistant" | "placeholder";
|
|
35
|
+
} & BaseMessageFields;
|
|
36
|
+
|
|
37
|
+
interface AgentState {
|
|
38
|
+
messages: Array<BaseMessage>;
|
|
39
|
+
sharedStateValue?: string | null;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
beforeAll(async () => {
|
|
43
|
+
await sql`DELETE FROM thread`;
|
|
44
|
+
await sql`DELETE FROM store`;
|
|
45
|
+
await sql`DELETE FROM assistant WHERE metadata->>'created_by' is null OR metadata->>'created_by' != 'system'`;
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
describe("assistants", () => {
|
|
49
|
+
it("create read update delete", async () => {
|
|
50
|
+
const graphId = "agent";
|
|
51
|
+
const config = { configurable: { model_name: "gpt" } };
|
|
52
|
+
|
|
53
|
+
let res = await client.assistants.create({ graphId, config });
|
|
54
|
+
expect(res).toMatchObject({ graph_id: graphId, config });
|
|
55
|
+
|
|
56
|
+
const metadata = { name: "woof" };
|
|
57
|
+
await client.assistants.update(res.assistant_id, { graphId, metadata });
|
|
58
|
+
|
|
59
|
+
res = await client.assistants.get(res.assistant_id);
|
|
60
|
+
expect(res).toMatchObject({ graph_id: graphId, config, metadata });
|
|
61
|
+
|
|
62
|
+
await client.assistants.delete(res.assistant_id);
|
|
63
|
+
expect(() => client.assistants.get(res.assistant_id)).rejects.toThrow(
|
|
64
|
+
"HTTP 404: Not Found"
|
|
65
|
+
);
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
it("schemas", async () => {
|
|
69
|
+
const graphId = "agent";
|
|
70
|
+
const config = { configurable: { model: "openai" } };
|
|
71
|
+
|
|
72
|
+
let res = await client.assistants.create({ graphId, config });
|
|
73
|
+
expect(res).toMatchObject({ graph_id: graphId, config });
|
|
74
|
+
|
|
75
|
+
res = await client.assistants.get(res.assistant_id);
|
|
76
|
+
expect(res).toMatchObject({ graph_id: graphId, config });
|
|
77
|
+
|
|
78
|
+
const graph = await client.assistants.getGraph(res.assistant_id);
|
|
79
|
+
expect(graph).toMatchObject({
|
|
80
|
+
nodes: expect.arrayContaining([
|
|
81
|
+
{ id: "__start__", type: "unknown", data: "__start__" },
|
|
82
|
+
{ id: "__end__", type: "unknown", data: "__end__" },
|
|
83
|
+
{ id: "agent", type: "unknown", data: "agent" },
|
|
84
|
+
{ id: "tool", type: "unknown", data: "tool" },
|
|
85
|
+
]),
|
|
86
|
+
edges: expect.arrayContaining([
|
|
87
|
+
{ source: "tool", target: "agent" },
|
|
88
|
+
{ source: "agent", target: "tool", conditional: true },
|
|
89
|
+
{ source: "__start__", target: "agent" },
|
|
90
|
+
{ source: "agent", target: "__end__", conditional: true },
|
|
91
|
+
]),
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
const schemas = await client.assistants.getSchemas(res.assistant_id);
|
|
95
|
+
|
|
96
|
+
expect(schemas.input_schema).not.toBe(null);
|
|
97
|
+
expect(schemas.output_schema).not.toBe(null);
|
|
98
|
+
expect(schemas.config_schema).toMatchObject({
|
|
99
|
+
type: "object",
|
|
100
|
+
properties: { model_name: { type: "string" } },
|
|
101
|
+
$schema: "http://json-schema.org/draft-07/schema#",
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
expect(schemas.state_schema).toMatchObject({
|
|
105
|
+
type: "object",
|
|
106
|
+
properties: {
|
|
107
|
+
messages: {
|
|
108
|
+
type: "array",
|
|
109
|
+
items: {
|
|
110
|
+
$ref: "#/definitions/BaseMessage",
|
|
111
|
+
},
|
|
112
|
+
},
|
|
113
|
+
},
|
|
114
|
+
definitions: {
|
|
115
|
+
BaseMessage: {
|
|
116
|
+
oneOf: [
|
|
117
|
+
{ $ref: "#/definitions/BaseMessageChunk" },
|
|
118
|
+
{ $ref: "#/definitions/ToolMessage" },
|
|
119
|
+
{ $ref: "#/definitions/AIMessage" },
|
|
120
|
+
{ $ref: "#/definitions/ChatMessage" },
|
|
121
|
+
{ $ref: "#/definitions/FunctionMessage" },
|
|
122
|
+
{ $ref: "#/definitions/HumanMessage" },
|
|
123
|
+
{ $ref: "#/definitions/SystemMessage" },
|
|
124
|
+
{ $ref: "#/definitions/RemoveMessage" },
|
|
125
|
+
],
|
|
126
|
+
},
|
|
127
|
+
},
|
|
128
|
+
$schema: "http://json-schema.org/draft-07/schema#",
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
await client.assistants.delete(res.assistant_id);
|
|
132
|
+
expect(() => client.assistants.get(res.assistant_id)).rejects.toThrow(
|
|
133
|
+
"HTTP 404: Not Found"
|
|
134
|
+
);
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
it("list assistants", async () => {
|
|
138
|
+
let search = await client.assistants.search();
|
|
139
|
+
// Greater than or equal because the vitest retries can cause multiple assistants to be created
|
|
140
|
+
expect(search.length).toBeGreaterThanOrEqual(1);
|
|
141
|
+
|
|
142
|
+
const graphid = "agent";
|
|
143
|
+
const create = await client.assistants.create({ graphId: "agent" });
|
|
144
|
+
|
|
145
|
+
search = await client.assistants.search();
|
|
146
|
+
expect(search.length).toBeGreaterThanOrEqual(2);
|
|
147
|
+
|
|
148
|
+
search = await client.assistants.search({ graphId: graphid });
|
|
149
|
+
expect(search.length).toBeGreaterThanOrEqual(2);
|
|
150
|
+
expect(search.every((i) => i.graph_id === graphid)).toBe(true);
|
|
151
|
+
|
|
152
|
+
search = await client.assistants.search({
|
|
153
|
+
metadata: { created_by: "system" },
|
|
154
|
+
});
|
|
155
|
+
expect(search.length).toBeGreaterThanOrEqual(1);
|
|
156
|
+
expect(search.every((i) => i.assistant_id !== create.assistant_id)).toBe(
|
|
157
|
+
true
|
|
158
|
+
);
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
it("config from env", async () => {
|
|
162
|
+
let search = await client.assistants.search({
|
|
163
|
+
graphId: "agent",
|
|
164
|
+
metadata: { created_by: "system" },
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
expect(search.length).toBe(1);
|
|
168
|
+
expect(search[0].config).toMatchObject({
|
|
169
|
+
configurable: { model_name: "openai" },
|
|
170
|
+
});
|
|
171
|
+
});
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
describe("threads crud", () => {
|
|
175
|
+
beforeEach(async () => {
|
|
176
|
+
await sql`DELETE FROM thread`;
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
it("create, read, update, delete thread", async () => {
|
|
180
|
+
const metadata = { name: "test_thread" };
|
|
181
|
+
|
|
182
|
+
const threadOne = await client.threads.create({ metadata });
|
|
183
|
+
expect(threadOne.metadata).toEqual(metadata);
|
|
184
|
+
|
|
185
|
+
let get = await client.threads.get(threadOne.thread_id);
|
|
186
|
+
expect(get.thread_id).toBe(threadOne.thread_id);
|
|
187
|
+
expect(get.metadata).toEqual(metadata);
|
|
188
|
+
|
|
189
|
+
await client.threads.update(threadOne.thread_id, {
|
|
190
|
+
metadata: { modified: true },
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
get = await client.threads.get(threadOne.thread_id);
|
|
194
|
+
expect(get.metadata).toEqual({ ...metadata, modified: true });
|
|
195
|
+
|
|
196
|
+
const threadTwo = await client.threads.create({
|
|
197
|
+
metadata: { name: "another_thread" },
|
|
198
|
+
});
|
|
199
|
+
let search = await client.threads.search();
|
|
200
|
+
expect(search.length).toBe(2);
|
|
201
|
+
expect(search[0].thread_id).toBe(threadTwo.thread_id);
|
|
202
|
+
expect(search[1].thread_id).toBe(threadOne.thread_id);
|
|
203
|
+
|
|
204
|
+
search = await client.threads.search({ metadata: { modified: true } });
|
|
205
|
+
expect(search.length).toBe(1);
|
|
206
|
+
expect(search[0].thread_id).toBe(threadOne.thread_id);
|
|
207
|
+
|
|
208
|
+
await client.threads.delete(threadOne.thread_id);
|
|
209
|
+
search = await client.threads.search();
|
|
210
|
+
|
|
211
|
+
expect(search.length).toBe(1);
|
|
212
|
+
expect(search[0].thread_id).toBe(threadTwo.thread_id);
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
it("list threads", async () => {
|
|
216
|
+
let search = await client.threads.search();
|
|
217
|
+
expect(search.length).toBe(0);
|
|
218
|
+
|
|
219
|
+
// test adding a single thread w/o metadata
|
|
220
|
+
const createThreadResponse = await client.threads.create();
|
|
221
|
+
search = await client.threads.search();
|
|
222
|
+
|
|
223
|
+
expect(search.length).toBe(1);
|
|
224
|
+
expect(createThreadResponse.thread_id).toBe(search[0].thread_id);
|
|
225
|
+
|
|
226
|
+
// test adding a thread w/ metadata
|
|
227
|
+
const metadata = { name: "test_thread" };
|
|
228
|
+
const create = await client.threads.create({ metadata });
|
|
229
|
+
|
|
230
|
+
search = await client.threads.search();
|
|
231
|
+
expect(search.length).toBe(2);
|
|
232
|
+
expect(create.thread_id).toBe(search[0].thread_id);
|
|
233
|
+
|
|
234
|
+
// test filtering on metadata
|
|
235
|
+
search = await client.threads.search({ metadata });
|
|
236
|
+
expect(search.length).toBe(1);
|
|
237
|
+
expect(create.thread_id).toBe(search[0].thread_id);
|
|
238
|
+
|
|
239
|
+
// test pagination
|
|
240
|
+
search = await client.threads.search({ offset: 1, limit: 1 });
|
|
241
|
+
expect(search.length).toBe(1);
|
|
242
|
+
expect(createThreadResponse.thread_id).toBe(search[0].thread_id);
|
|
243
|
+
});
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
describe("threads copy", () => {
|
|
247
|
+
it.concurrent("copy", async () => {
|
|
248
|
+
const assistantId = "agent";
|
|
249
|
+
const thread = await client.threads.create();
|
|
250
|
+
const input = { messages: [{ type: "human", content: "foo" }] };
|
|
251
|
+
await client.runs.wait(thread.thread_id, assistantId, {
|
|
252
|
+
input,
|
|
253
|
+
config: globalConfig,
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
const threadState = await client.threads.getState(thread.thread_id);
|
|
257
|
+
|
|
258
|
+
const copiedThread = await client.threads.copy(thread.thread_id);
|
|
259
|
+
const copiedThreadState = await client.threads.getState(
|
|
260
|
+
copiedThread.thread_id
|
|
261
|
+
);
|
|
262
|
+
|
|
263
|
+
// check copied thread state matches expected output
|
|
264
|
+
const expectedThreadMetadata = {
|
|
265
|
+
...threadState.metadata,
|
|
266
|
+
thread_id: copiedThread.thread_id,
|
|
267
|
+
};
|
|
268
|
+
const expectedThreadState = {
|
|
269
|
+
...threadState,
|
|
270
|
+
checkpoint: {
|
|
271
|
+
...threadState.checkpoint,
|
|
272
|
+
thread_id: copiedThread.thread_id,
|
|
273
|
+
},
|
|
274
|
+
parent_checkpoint: {
|
|
275
|
+
...threadState.parent_checkpoint,
|
|
276
|
+
thread_id: copiedThread.thread_id,
|
|
277
|
+
},
|
|
278
|
+
metadata: expectedThreadMetadata,
|
|
279
|
+
checkpoint_id: copiedThreadState.checkpoint.checkpoint_id,
|
|
280
|
+
parent_checkpoint_id: copiedThreadState.parent_checkpoint?.checkpoint_id,
|
|
281
|
+
};
|
|
282
|
+
expect(copiedThreadState).toEqual(expectedThreadState);
|
|
283
|
+
|
|
284
|
+
// check checkpoints in DB
|
|
285
|
+
const existingCheckpoints = await sql`
|
|
286
|
+
SELECT * FROM checkpoints WHERE thread_id = ${thread.thread_id}
|
|
287
|
+
`;
|
|
288
|
+
const copiedCheckpoints = await sql`
|
|
289
|
+
SELECT * FROM checkpoints WHERE thread_id = ${copiedThread.thread_id}
|
|
290
|
+
`;
|
|
291
|
+
|
|
292
|
+
expect(existingCheckpoints.length).toBe(copiedCheckpoints.length);
|
|
293
|
+
for (let i = 0; i < existingCheckpoints.length; i++) {
|
|
294
|
+
const existing = existingCheckpoints[i];
|
|
295
|
+
const copied = copiedCheckpoints[i];
|
|
296
|
+
delete existing.thread_id;
|
|
297
|
+
delete existing.metadata.thread_id;
|
|
298
|
+
delete copied.thread_id;
|
|
299
|
+
delete copied.metadata.thread_id;
|
|
300
|
+
expect(existing).toEqual(copied);
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
// check checkpoint blobs in DB
|
|
304
|
+
const existingCheckpointBlobs = await sql`
|
|
305
|
+
SELECT * FROM checkpoint_blobs WHERE thread_id = ${thread.thread_id} ORDER BY channel, version
|
|
306
|
+
`;
|
|
307
|
+
const copiedCheckpointBlobs = await sql`
|
|
308
|
+
SELECT * FROM checkpoint_blobs WHERE thread_id = ${copiedThread.thread_id} ORDER BY channel, version
|
|
309
|
+
`;
|
|
310
|
+
|
|
311
|
+
expect(existingCheckpointBlobs.length).toBe(copiedCheckpointBlobs.length);
|
|
312
|
+
for (let i = 0; i < existingCheckpointBlobs.length; i++) {
|
|
313
|
+
const existing = existingCheckpointBlobs[i];
|
|
314
|
+
const copied = copiedCheckpointBlobs[i];
|
|
315
|
+
delete existing.thread_id;
|
|
316
|
+
delete copied.thread_id;
|
|
317
|
+
expect(existing).toEqual(copied);
|
|
318
|
+
}
|
|
319
|
+
});
|
|
320
|
+
|
|
321
|
+
it.concurrent("copy runs", async () => {
|
|
322
|
+
const assistantId = "agent";
|
|
323
|
+
const thread = await client.threads.create();
|
|
324
|
+
|
|
325
|
+
const input = { messages: [{ type: "human", content: "foo" }] };
|
|
326
|
+
await client.runs.wait(thread.thread_id, assistantId, {
|
|
327
|
+
input,
|
|
328
|
+
config: globalConfig,
|
|
329
|
+
});
|
|
330
|
+
const originalThreadState = await client.threads.getState(thread.thread_id);
|
|
331
|
+
|
|
332
|
+
const copiedThread = await client.threads.copy(thread.thread_id);
|
|
333
|
+
const newInput = { messages: [{ type: "human", content: "bar" }] };
|
|
334
|
+
await client.runs.wait(copiedThread.thread_id, assistantId, {
|
|
335
|
+
input: newInput,
|
|
336
|
+
config: globalConfig,
|
|
337
|
+
});
|
|
338
|
+
|
|
339
|
+
// test that copied thread has original as well as new values
|
|
340
|
+
const copiedThreadState = await client.threads.getState<AgentState>(
|
|
341
|
+
copiedThread.thread_id
|
|
342
|
+
);
|
|
343
|
+
|
|
344
|
+
const copiedThreadStateMessages = copiedThreadState.values.messages.map(
|
|
345
|
+
(m) => m.content
|
|
346
|
+
);
|
|
347
|
+
expect(copiedThreadStateMessages).toEqual([
|
|
348
|
+
// original messages
|
|
349
|
+
"foo",
|
|
350
|
+
"begin",
|
|
351
|
+
"tool_call__begin",
|
|
352
|
+
"end",
|
|
353
|
+
// new messages
|
|
354
|
+
"bar",
|
|
355
|
+
"begin",
|
|
356
|
+
"tool_call__begin",
|
|
357
|
+
"end",
|
|
358
|
+
]);
|
|
359
|
+
|
|
360
|
+
// test that the new run on the copied thread doesn't affect the original one
|
|
361
|
+
const currentOriginalThreadState = await client.threads.getState(
|
|
362
|
+
thread.thread_id
|
|
363
|
+
);
|
|
364
|
+
expect(currentOriginalThreadState).toEqual(originalThreadState);
|
|
365
|
+
});
|
|
366
|
+
|
|
367
|
+
it.concurrent("get thread history", async () => {
|
|
368
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
369
|
+
const thread = await client.threads.create();
|
|
370
|
+
const input = { messages: [{ type: "human", content: "foo" }] };
|
|
371
|
+
|
|
372
|
+
const emptyHistory = await client.threads.getHistory(thread.thread_id);
|
|
373
|
+
expect(emptyHistory.length).toBe(0);
|
|
374
|
+
|
|
375
|
+
await client.runs.wait(thread.thread_id, assistant.assistant_id, {
|
|
376
|
+
input,
|
|
377
|
+
config: globalConfig,
|
|
378
|
+
});
|
|
379
|
+
|
|
380
|
+
const history = await client.threads.getHistory<AgentState>(
|
|
381
|
+
thread.thread_id
|
|
382
|
+
);
|
|
383
|
+
expect(history.length).toBe(5);
|
|
384
|
+
expect(history[0].values.messages.length).toBe(4);
|
|
385
|
+
expect(history[0].next.length).toBe(0);
|
|
386
|
+
expect(history.at(-1)?.next).toEqual(["__start__"]);
|
|
387
|
+
|
|
388
|
+
const runMetadata = { run_metadata: "run_metadata" };
|
|
389
|
+
const inputBar = { messages: [{ type: "human", content: "bar" }] };
|
|
390
|
+
await client.runs.wait(thread.thread_id, assistant.assistant_id, {
|
|
391
|
+
input: inputBar,
|
|
392
|
+
metadata: runMetadata,
|
|
393
|
+
config: globalConfig,
|
|
394
|
+
});
|
|
395
|
+
|
|
396
|
+
const fullHistory = await client.threads.getHistory<AgentState>(
|
|
397
|
+
thread.thread_id
|
|
398
|
+
);
|
|
399
|
+
const filteredHistory = await client.threads.getHistory<AgentState>(
|
|
400
|
+
thread.thread_id,
|
|
401
|
+
{ metadata: runMetadata }
|
|
402
|
+
);
|
|
403
|
+
|
|
404
|
+
expect(fullHistory.length).toBe(10);
|
|
405
|
+
expect(fullHistory.at(-1)?.values.messages.length).toBe(0);
|
|
406
|
+
|
|
407
|
+
expect(filteredHistory.length).toBe(5);
|
|
408
|
+
expect(filteredHistory.at(-1)?.values.messages.length).toBe(4);
|
|
409
|
+
});
|
|
410
|
+
|
|
411
|
+
it.concurrent("copy update", async () => {
|
|
412
|
+
const assistantId = "agent";
|
|
413
|
+
const thread = await client.threads.create();
|
|
414
|
+
const input = {
|
|
415
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
416
|
+
};
|
|
417
|
+
await client.runs.wait(thread.thread_id, assistantId, {
|
|
418
|
+
input,
|
|
419
|
+
config: globalConfig,
|
|
420
|
+
});
|
|
421
|
+
|
|
422
|
+
const originalState = await client.threads.getState(thread.thread_id);
|
|
423
|
+
const copyThread = await client.threads.copy(thread.thread_id);
|
|
424
|
+
|
|
425
|
+
// update state on a copied thread
|
|
426
|
+
const update = { type: "human", content: "bar", id: "initial-message" };
|
|
427
|
+
await client.threads.updateState(copyThread.thread_id, {
|
|
428
|
+
values: { messages: [update] },
|
|
429
|
+
});
|
|
430
|
+
|
|
431
|
+
const copiedThreadState = await client.threads.getState<AgentState>(
|
|
432
|
+
copyThread.thread_id
|
|
433
|
+
);
|
|
434
|
+
expect(copiedThreadState.values.messages[0].content).toBe("bar");
|
|
435
|
+
|
|
436
|
+
// test that updating the copied thread doesn't affect the original one
|
|
437
|
+
const currentOriginalThreadState = await client.threads.getState(
|
|
438
|
+
thread.thread_id
|
|
439
|
+
);
|
|
440
|
+
expect(currentOriginalThreadState).toEqual(originalState);
|
|
441
|
+
});
|
|
442
|
+
});
|
|
443
|
+
|
|
444
|
+
describe("runs", () => {
|
|
445
|
+
beforeAll(async () => {
|
|
446
|
+
await sql`DELETE FROM thread`;
|
|
447
|
+
await sql`DELETE FROM store`;
|
|
448
|
+
});
|
|
449
|
+
|
|
450
|
+
it.concurrent("stream values", async () => {
|
|
451
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
452
|
+
const thread = await client.threads.create();
|
|
453
|
+
const input = {
|
|
454
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
455
|
+
};
|
|
456
|
+
const stream = client.runs.stream(
|
|
457
|
+
thread.thread_id,
|
|
458
|
+
assistant.assistant_id,
|
|
459
|
+
{ input, streamMode: "values", config: globalConfig }
|
|
460
|
+
);
|
|
461
|
+
|
|
462
|
+
let runId: string | null = null;
|
|
463
|
+
let previousMessageIds = [];
|
|
464
|
+
const seenEventTypes = new Set();
|
|
465
|
+
|
|
466
|
+
let chunk: any;
|
|
467
|
+
for await (chunk of stream) {
|
|
468
|
+
seenEventTypes.add(chunk.event);
|
|
469
|
+
|
|
470
|
+
if (chunk.event === "metadata") {
|
|
471
|
+
runId = chunk.data.run_id;
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
if (chunk.event === "values") {
|
|
475
|
+
const messageIds = chunk.data.messages.map((message) => message.id);
|
|
476
|
+
expect(messageIds.slice(0, -1)).toEqual(previousMessageIds);
|
|
477
|
+
previousMessageIds = messageIds;
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
expect(chunk.event).toBe("values");
|
|
482
|
+
expect(seenEventTypes).toEqual(new Set(["metadata", "values"]));
|
|
483
|
+
|
|
484
|
+
expect(runId).not.toBeNull();
|
|
485
|
+
const run = await client.runs.get(thread.thread_id, runId as string);
|
|
486
|
+
expect(run.status).toBe("success");
|
|
487
|
+
|
|
488
|
+
let cur = await sql`SELECT * FROM checkpoints WHERE run_id is null`;
|
|
489
|
+
|
|
490
|
+
expect(cur).toHaveLength(0);
|
|
491
|
+
|
|
492
|
+
cur = await sql`SELECT * FROM checkpoints WHERE run_id = ${run.run_id}`;
|
|
493
|
+
expect(cur.length).toBeGreaterThan(1);
|
|
494
|
+
});
|
|
495
|
+
|
|
496
|
+
it.concurrent("wait error", async () => {
|
|
497
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
498
|
+
const thread = await client.threads.create();
|
|
499
|
+
const input = {
|
|
500
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
501
|
+
};
|
|
502
|
+
|
|
503
|
+
await expect(
|
|
504
|
+
client.runs.wait(thread.thread_id, assistant.assistant_id, {
|
|
505
|
+
input,
|
|
506
|
+
config: { ...globalConfig, recursion_limit: 1 },
|
|
507
|
+
})
|
|
508
|
+
).rejects.toThrowError(/GraphRecursionError/);
|
|
509
|
+
const threadUpdated = await client.threads.get(thread.thread_id);
|
|
510
|
+
expect(threadUpdated.status).toBe("error");
|
|
511
|
+
});
|
|
512
|
+
|
|
513
|
+
it.concurrent("wait", async () => {
|
|
514
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
515
|
+
const thread = await client.threads.create();
|
|
516
|
+
const input = {
|
|
517
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
518
|
+
};
|
|
519
|
+
const values = await client.runs.wait(
|
|
520
|
+
thread.thread_id,
|
|
521
|
+
assistant.assistant_id,
|
|
522
|
+
{ input, config: globalConfig }
|
|
523
|
+
);
|
|
524
|
+
|
|
525
|
+
expect(Array.isArray((values as any).messages)).toBe(true);
|
|
526
|
+
const threadUpdated = await client.threads.get(thread.thread_id);
|
|
527
|
+
expect(threadUpdated.status).toBe("idle");
|
|
528
|
+
});
|
|
529
|
+
|
|
530
|
+
it.concurrent("stream updates", async () => {
|
|
531
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
532
|
+
const thread = await client.threads.create();
|
|
533
|
+
const input = {
|
|
534
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
535
|
+
};
|
|
536
|
+
const stream = client.runs.stream(
|
|
537
|
+
thread.thread_id,
|
|
538
|
+
assistant.assistant_id,
|
|
539
|
+
{ input, streamMode: "updates", config: globalConfig }
|
|
540
|
+
);
|
|
541
|
+
|
|
542
|
+
let runId: string | null = null;
|
|
543
|
+
const seenEventTypes = new Set();
|
|
544
|
+
const seenNodes: string[] = [];
|
|
545
|
+
|
|
546
|
+
let chunk: any;
|
|
547
|
+
for await (chunk of stream) {
|
|
548
|
+
seenEventTypes.add(chunk.event);
|
|
549
|
+
|
|
550
|
+
if (chunk.event === "metadata") {
|
|
551
|
+
runId = chunk.data.run_id;
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
if (chunk.event === "updates") {
|
|
555
|
+
const node = Object.keys(chunk.data)[0];
|
|
556
|
+
seenNodes.push(node);
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
expect(seenNodes).toEqual(["agent", "tool", "agent"]);
|
|
561
|
+
|
|
562
|
+
expect(chunk.event).toBe("updates");
|
|
563
|
+
expect(seenEventTypes).toEqual(new Set(["metadata", "updates"]));
|
|
564
|
+
|
|
565
|
+
expect(runId).not.toBeNull();
|
|
566
|
+
const run = await client.runs.get(thread.thread_id, runId as string);
|
|
567
|
+
expect(run.status).toBe("success");
|
|
568
|
+
});
|
|
569
|
+
|
|
570
|
+
it.concurrent("stream events", async () => {
|
|
571
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
572
|
+
const thread = await client.threads.create();
|
|
573
|
+
const input = {
|
|
574
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
575
|
+
};
|
|
576
|
+
const stream = client.runs.stream(
|
|
577
|
+
thread.thread_id,
|
|
578
|
+
assistant.assistant_id,
|
|
579
|
+
{ input, streamMode: "events", config: globalConfig }
|
|
580
|
+
);
|
|
581
|
+
|
|
582
|
+
const events = await gatherIterator(stream);
|
|
583
|
+
expect(new Set(events.map((i) => i.event))).toEqual(
|
|
584
|
+
new Set(["metadata", "events"])
|
|
585
|
+
);
|
|
586
|
+
|
|
587
|
+
expect(
|
|
588
|
+
new Set(
|
|
589
|
+
events.filter((i) => i.event === "events").map((i) => i.data.event)
|
|
590
|
+
)
|
|
591
|
+
).toEqual(
|
|
592
|
+
new Set([
|
|
593
|
+
"on_chain_start",
|
|
594
|
+
"on_chain_end",
|
|
595
|
+
"on_chat_model_end",
|
|
596
|
+
"on_chat_model_start",
|
|
597
|
+
"on_chat_model_stream",
|
|
598
|
+
])
|
|
599
|
+
);
|
|
600
|
+
});
|
|
601
|
+
|
|
602
|
+
it.concurrent("stream messages", async () => {
|
|
603
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
604
|
+
const thread = await client.threads.create();
|
|
605
|
+
const input = {
|
|
606
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
607
|
+
};
|
|
608
|
+
const stream = client.runs.stream(
|
|
609
|
+
thread.thread_id,
|
|
610
|
+
assistant.assistant_id,
|
|
611
|
+
{ input, streamMode: "messages", config: globalConfig }
|
|
612
|
+
);
|
|
613
|
+
|
|
614
|
+
let runId: string | null = null;
|
|
615
|
+
const seenEventTypes = new Set();
|
|
616
|
+
const messageIdToContent: Record<string, string> = {};
|
|
617
|
+
let lastMessage: any = null;
|
|
618
|
+
|
|
619
|
+
let chunk: any;
|
|
620
|
+
for await (chunk of stream) {
|
|
621
|
+
seenEventTypes.add(chunk.event);
|
|
622
|
+
|
|
623
|
+
if (chunk.event === "metadata") {
|
|
624
|
+
runId = chunk.data.run_id;
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
if (chunk.event === "messages/partial") {
|
|
628
|
+
const message = chunk.data[0];
|
|
629
|
+
messageIdToContent[message.id] = message.content;
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
if (chunk.event === "messages/complete") {
|
|
633
|
+
const message = chunk.data[0];
|
|
634
|
+
expect(message.content).not.toBeNull();
|
|
635
|
+
if (message.type === "ai") {
|
|
636
|
+
expect(message.content).toBe(messageIdToContent[message.id]);
|
|
637
|
+
}
|
|
638
|
+
lastMessage = message;
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
|
|
642
|
+
expect(lastMessage).not.toBeNull();
|
|
643
|
+
expect(lastMessage.content).toBe("end");
|
|
644
|
+
|
|
645
|
+
expect(chunk.event).toBe("messages/complete");
|
|
646
|
+
expect(seenEventTypes).toEqual(
|
|
647
|
+
new Set([
|
|
648
|
+
"metadata",
|
|
649
|
+
"messages/metadata",
|
|
650
|
+
"messages/partial",
|
|
651
|
+
"messages/complete",
|
|
652
|
+
])
|
|
653
|
+
);
|
|
654
|
+
|
|
655
|
+
expect(runId).not.toBeNull();
|
|
656
|
+
const run = await client.runs.get(thread.thread_id, runId as string);
|
|
657
|
+
expect(run.status).toBe("success");
|
|
658
|
+
});
|
|
659
|
+
|
|
660
|
+
it.concurrent("stream mixed modes", async () => {
|
|
661
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
662
|
+
const thread = await client.threads.create();
|
|
663
|
+
const input = {
|
|
664
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
665
|
+
};
|
|
666
|
+
const stream = await client.runs.stream(
|
|
667
|
+
thread.thread_id,
|
|
668
|
+
assistant.assistant_id,
|
|
669
|
+
{ input, streamMode: ["messages", "values"], config: globalConfig }
|
|
670
|
+
);
|
|
671
|
+
|
|
672
|
+
const chunks = await gatherIterator(stream);
|
|
673
|
+
expect(chunks.at(-1)?.event).toBe("messages/complete");
|
|
674
|
+
expect(chunks.filter((i) => i.event === "error").length).toBe(0);
|
|
675
|
+
|
|
676
|
+
const messages: BaseMessage[] = findLast(
|
|
677
|
+
chunks,
|
|
678
|
+
(i) => i.event === "values"
|
|
679
|
+
)?.data.messages;
|
|
680
|
+
|
|
681
|
+
expect(messages.length).toBe(4);
|
|
682
|
+
expect(messages.at(-1)?.content).toBe("end");
|
|
683
|
+
|
|
684
|
+
const runId = findLast(chunks, (i) => i.event === "metadata")?.data.run_id;
|
|
685
|
+
expect(runId).not.toBeNull();
|
|
686
|
+
|
|
687
|
+
const seenEventTypes = new Set(chunks.map((i) => i.event));
|
|
688
|
+
expect(seenEventTypes).toEqual(
|
|
689
|
+
new Set([
|
|
690
|
+
"metadata",
|
|
691
|
+
"messages/metadata",
|
|
692
|
+
"messages/partial",
|
|
693
|
+
"messages/complete",
|
|
694
|
+
"values",
|
|
695
|
+
])
|
|
696
|
+
);
|
|
697
|
+
|
|
698
|
+
const run = await client.runs.get(thread.thread_id, runId);
|
|
699
|
+
expect(run.status).toBe("success");
|
|
700
|
+
});
|
|
701
|
+
|
|
702
|
+
it.concurrent(
|
|
703
|
+
"human in the loop - no modification",
|
|
704
|
+
{ retry: 0 },
|
|
705
|
+
async () => {
|
|
706
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
707
|
+
const thread = await client.threads.create();
|
|
708
|
+
const input = {
|
|
709
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
710
|
+
};
|
|
711
|
+
let messages: BaseMessage[] = [];
|
|
712
|
+
|
|
713
|
+
// (1) interrupt and then continue running, no modification
|
|
714
|
+
// run until the interrupt
|
|
715
|
+
let chunks = await gatherIterator(
|
|
716
|
+
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
717
|
+
input,
|
|
718
|
+
interruptBefore: ["tool"],
|
|
719
|
+
config: globalConfig,
|
|
720
|
+
})
|
|
721
|
+
);
|
|
722
|
+
|
|
723
|
+
expect(chunks.filter((i) => i.event === "error").length).toBe(0);
|
|
724
|
+
messages = findLast(chunks, (i) => i.event === "values")?.data.messages;
|
|
725
|
+
|
|
726
|
+
const threadAfterInterrupt = await client.threads.get(thread.thread_id);
|
|
727
|
+
expect(threadAfterInterrupt.status).toBe("idle");
|
|
728
|
+
|
|
729
|
+
expect(messages.at(-1)).not.toBeNull();
|
|
730
|
+
expect(messages.at(-1)?.content).toBe("end");
|
|
731
|
+
|
|
732
|
+
const state = await client.threads.getState(thread.thread_id);
|
|
733
|
+
expect(state.next).toEqual([]);
|
|
734
|
+
|
|
735
|
+
// continue after interrupt
|
|
736
|
+
chunks = await gatherIterator(
|
|
737
|
+
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
738
|
+
input: null,
|
|
739
|
+
config: globalConfig,
|
|
740
|
+
})
|
|
741
|
+
);
|
|
742
|
+
|
|
743
|
+
expect(chunks.filter((i) => i.event === "error").length).toBe(0);
|
|
744
|
+
messages = findLast(chunks, (i) => i.event === "values")?.data.messages;
|
|
745
|
+
|
|
746
|
+
expect(messages.length).toBe(4);
|
|
747
|
+
expect(messages[2].content).toBe("tool_call__begin");
|
|
748
|
+
expect(messages.at(-1)?.content).toBe("end");
|
|
749
|
+
|
|
750
|
+
const threadAfterContinue = await client.threads.get(thread.thread_id);
|
|
751
|
+
expect(threadAfterContinue.status).toBe("idle");
|
|
752
|
+
}
|
|
753
|
+
);
|
|
754
|
+
|
|
755
|
+
it.concurrent("human in the loop - modification", async () => {
|
|
756
|
+
// (2) interrupt, modify the message and then continue running
|
|
757
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
758
|
+
const thread = await client.threads.create();
|
|
759
|
+
const input = {
|
|
760
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
761
|
+
};
|
|
762
|
+
let messages: BaseMessage[] = [];
|
|
763
|
+
|
|
764
|
+
// run until the interrupt
|
|
765
|
+
let chunks = await gatherIterator(
|
|
766
|
+
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
767
|
+
input,
|
|
768
|
+
interruptBefore: ["tool"],
|
|
769
|
+
config: globalConfig,
|
|
770
|
+
})
|
|
771
|
+
);
|
|
772
|
+
|
|
773
|
+
expect(chunks.filter((i) => i.event === "error").length).toBe(0);
|
|
774
|
+
|
|
775
|
+
// edit the last message
|
|
776
|
+
const lastMessage = findLast(
|
|
777
|
+
chunks,
|
|
778
|
+
(i) => i.event === "values"
|
|
779
|
+
)?.data.messages.at(-1);
|
|
780
|
+
lastMessage.content = "modified";
|
|
781
|
+
|
|
782
|
+
// update state
|
|
783
|
+
await client.threads.updateState<AgentState>(thread.thread_id, {
|
|
784
|
+
values: { messages: [lastMessage] },
|
|
785
|
+
});
|
|
786
|
+
await client.threads.update(thread.thread_id, {
|
|
787
|
+
metadata: { modified: true },
|
|
788
|
+
});
|
|
789
|
+
|
|
790
|
+
const modifiedThread = await client.threads.get(thread.thread_id);
|
|
791
|
+
expect(modifiedThread.metadata?.modified).toBe(true);
|
|
792
|
+
|
|
793
|
+
const stateAfterModify = await client.threads.getState<AgentState>(
|
|
794
|
+
thread.thread_id
|
|
795
|
+
);
|
|
796
|
+
expect(stateAfterModify.values.messages.at(-1)?.content).toBe("modified");
|
|
797
|
+
expect(stateAfterModify.next).toEqual(["tool"]);
|
|
798
|
+
expect(stateAfterModify.tasks).toMatchObject([
|
|
799
|
+
{ id: expect.any(String), name: "tool", error: null, interrupts: [] },
|
|
800
|
+
]);
|
|
801
|
+
|
|
802
|
+
// continue after interrupt
|
|
803
|
+
chunks = await gatherIterator(
|
|
804
|
+
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
805
|
+
input: null,
|
|
806
|
+
config: globalConfig,
|
|
807
|
+
})
|
|
808
|
+
);
|
|
809
|
+
|
|
810
|
+
expect(chunks.filter((i) => i.event === "error").length).toBe(0);
|
|
811
|
+
messages = findLast(chunks, (i) => i.event === "values")?.data.messages;
|
|
812
|
+
|
|
813
|
+
expect(messages.length).toBe(8);
|
|
814
|
+
expect(messages[4].content).toBe(`tool_call__modified`);
|
|
815
|
+
expect(messages.at(-1)?.content).toBe("end");
|
|
816
|
+
|
|
817
|
+
// get the history
|
|
818
|
+
const history = await client.threads.getHistory<AgentState>(
|
|
819
|
+
thread.thread_id
|
|
820
|
+
);
|
|
821
|
+
expect(history.length).toBe(10);
|
|
822
|
+
expect(history[0].next.length).toBe(0);
|
|
823
|
+
expect(history[0].values.messages.length).toBe(8);
|
|
824
|
+
expect(history.at(-1)?.next).toEqual(["__start__"]);
|
|
825
|
+
});
|
|
826
|
+
});
|
|
827
|
+
|
|
828
|
+
describe("shared state", () => {
|
|
829
|
+
beforeEach(async () => {
|
|
830
|
+
await sql`DELETE FROM store`;
|
|
831
|
+
});
|
|
832
|
+
|
|
833
|
+
it("should share state between runs with the same thread ID", async () => {
|
|
834
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
835
|
+
const thread = await client.threads.create();
|
|
836
|
+
|
|
837
|
+
const input = {
|
|
838
|
+
messages: [
|
|
839
|
+
{ type: "human", content: "should_end", id: "initial-message" },
|
|
840
|
+
],
|
|
841
|
+
};
|
|
842
|
+
const config = { configurable: { user_id: "start_user_id" } };
|
|
843
|
+
|
|
844
|
+
// First run
|
|
845
|
+
const res1 = (await client.runs.wait(
|
|
846
|
+
thread.thread_id,
|
|
847
|
+
assistant.assistant_id,
|
|
848
|
+
{ input, config }
|
|
849
|
+
)) as Awaited<Record<string, any>>;
|
|
850
|
+
expect(res1.sharedStateValue).toBe(null);
|
|
851
|
+
|
|
852
|
+
// Second run with the same thread ID & config
|
|
853
|
+
const res2 = (await client.runs.wait(
|
|
854
|
+
thread.thread_id,
|
|
855
|
+
assistant.assistant_id,
|
|
856
|
+
{ input, config }
|
|
857
|
+
)) as Awaited<Record<string, any>>;
|
|
858
|
+
expect(res2.sharedStateValue).toBe(config.configurable.user_id);
|
|
859
|
+
});
|
|
860
|
+
|
|
861
|
+
it("should not share state between runs with different thread IDs", async () => {
|
|
862
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
863
|
+
const thread = await client.threads.create();
|
|
864
|
+
|
|
865
|
+
const input = {
|
|
866
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
867
|
+
};
|
|
868
|
+
|
|
869
|
+
// Run with the default `globalConfig`
|
|
870
|
+
const config1 = { configurable: { user_id: "start_user_id" } };
|
|
871
|
+
const res1 = (await client.runs.wait(
|
|
872
|
+
thread.thread_id,
|
|
873
|
+
assistant.assistant_id,
|
|
874
|
+
{ input, config: config1 }
|
|
875
|
+
)) as Awaited<Record<string, any>>;
|
|
876
|
+
|
|
877
|
+
// Run with the same thread id but a new config
|
|
878
|
+
const config2 = { configurable: { user_id: "new_user_id" } };
|
|
879
|
+
const res2 = (await client.runs.wait(
|
|
880
|
+
thread.thread_id,
|
|
881
|
+
assistant.assistant_id,
|
|
882
|
+
{ input, config: config2 }
|
|
883
|
+
)) as Awaited<Record<string, any>>;
|
|
884
|
+
|
|
885
|
+
expect(res1.sharedStateValue).toBe(config1.configurable.user_id);
|
|
886
|
+
// Null on first iteration since the shared value is set in the second iteration
|
|
887
|
+
expect(res2.sharedStateValue).toBe(config2.configurable.user_id);
|
|
888
|
+
expect(res1.sharedStateValue).not.toBe(res2.sharedStateValue);
|
|
889
|
+
});
|
|
890
|
+
|
|
891
|
+
it("should be able to set and return data from store in config", async () => {
|
|
892
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
893
|
+
const thread = await client.threads.create();
|
|
894
|
+
|
|
895
|
+
const input = {
|
|
896
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
897
|
+
};
|
|
898
|
+
const config = {
|
|
899
|
+
configurable: {
|
|
900
|
+
user_id: "start_user_id",
|
|
901
|
+
},
|
|
902
|
+
};
|
|
903
|
+
|
|
904
|
+
// Run with the default `globalConfig`
|
|
905
|
+
const res1 = (await client.runs.wait(
|
|
906
|
+
thread.thread_id,
|
|
907
|
+
assistant.assistant_id,
|
|
908
|
+
{ input, config }
|
|
909
|
+
)) as Awaited<Record<string, any>>;
|
|
910
|
+
expect(res1.sharedStateFromStoreConfig).toBeDefined();
|
|
911
|
+
expect(res1.sharedStateFromStoreConfig.id).toBeDefined();
|
|
912
|
+
expect(res1.sharedStateFromStoreConfig.id).toBe(
|
|
913
|
+
config.configurable.user_id
|
|
914
|
+
);
|
|
915
|
+
});
|
|
916
|
+
|
|
917
|
+
it("Should be able to use the store client to fetch values", async () => {
|
|
918
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
919
|
+
const thread = await client.threads.create();
|
|
920
|
+
|
|
921
|
+
const input = {
|
|
922
|
+
messages: [{ type: "human", content: "foo", id: "initial-message" }],
|
|
923
|
+
};
|
|
924
|
+
const config = {
|
|
925
|
+
configurable: {
|
|
926
|
+
user_id: "start_user_id",
|
|
927
|
+
},
|
|
928
|
+
};
|
|
929
|
+
|
|
930
|
+
// For shared state
|
|
931
|
+
const namespace = ["sharedState", "data"];
|
|
932
|
+
const key = "user_id";
|
|
933
|
+
|
|
934
|
+
// Run with the default `globalConfig`
|
|
935
|
+
const res1 = (await client.runs.wait(
|
|
936
|
+
thread.thread_id,
|
|
937
|
+
assistant.assistant_id,
|
|
938
|
+
{ input, config }
|
|
939
|
+
)) as Awaited<Record<string, any>>;
|
|
940
|
+
expect(res1.sharedStateFromStoreConfig).toBeDefined();
|
|
941
|
+
expect(res1.sharedStateFromStoreConfig.id).toBeDefined();
|
|
942
|
+
expect(res1.sharedStateFromStoreConfig.id).toBe(
|
|
943
|
+
config.configurable.user_id
|
|
944
|
+
);
|
|
945
|
+
|
|
946
|
+
// Fetch data from store client
|
|
947
|
+
const storeRes = await client.store.getItem(namespace, key);
|
|
948
|
+
expect(storeRes).toBeDefined();
|
|
949
|
+
expect(storeRes?.value).toBeDefined();
|
|
950
|
+
expect(storeRes?.value).toEqual({ id: config.configurable.user_id });
|
|
951
|
+
});
|
|
952
|
+
});
|
|
953
|
+
|
|
954
|
+
describe("StoreClient", () => {
|
|
955
|
+
beforeEach(async () => {
|
|
956
|
+
await sql`DELETE FROM store`;
|
|
957
|
+
});
|
|
958
|
+
|
|
959
|
+
it("Should be able to use the store client methods", async () => {
|
|
960
|
+
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
961
|
+
const thread = await client.threads.create();
|
|
962
|
+
|
|
963
|
+
const input = {
|
|
964
|
+
messages: [
|
|
965
|
+
{
|
|
966
|
+
type: "human",
|
|
967
|
+
content: "___check_state_value",
|
|
968
|
+
id: "initial-message",
|
|
969
|
+
},
|
|
970
|
+
],
|
|
971
|
+
};
|
|
972
|
+
const config = {
|
|
973
|
+
configurable: {
|
|
974
|
+
user_id: "start_user_id",
|
|
975
|
+
},
|
|
976
|
+
};
|
|
977
|
+
|
|
978
|
+
// For shared state
|
|
979
|
+
const namespace = ["inputtedState", "data"];
|
|
980
|
+
const key = "my_key";
|
|
981
|
+
|
|
982
|
+
// Set the value
|
|
983
|
+
await client.store.putItem(namespace, key, { isTrue: true });
|
|
984
|
+
|
|
985
|
+
// Invoke the graph and ensure the value is set
|
|
986
|
+
// When the graph is invoked with this input, it will route to
|
|
987
|
+
// a special node that throws an error if the value is not set.
|
|
988
|
+
await client.runs.wait(thread.thread_id, assistant.assistant_id, {
|
|
989
|
+
input,
|
|
990
|
+
config,
|
|
991
|
+
});
|
|
992
|
+
|
|
993
|
+
// Verify it can be fetched
|
|
994
|
+
const storeRes = await client.store.getItem(namespace, key);
|
|
995
|
+
expect(storeRes).toBeDefined();
|
|
996
|
+
expect(storeRes?.value).toBeDefined();
|
|
997
|
+
expect(storeRes?.value).toEqual({ isTrue: true });
|
|
998
|
+
|
|
999
|
+
await client.store.deleteItem(namespace, key);
|
|
1000
|
+
const storeResAfterDelete = await client.store.getItem(namespace, key);
|
|
1001
|
+
expect(storeResAfterDelete).toBe(null);
|
|
1002
|
+
});
|
|
1003
|
+
|
|
1004
|
+
it("Can put, search, list, get and delete", async () => {
|
|
1005
|
+
const namespace = ["allMethods", "data"];
|
|
1006
|
+
const key = randomUUID();
|
|
1007
|
+
const value = { foo: "bar" };
|
|
1008
|
+
|
|
1009
|
+
// Try searching when no values are present.
|
|
1010
|
+
const searchRes = await client.store.searchItems(namespace);
|
|
1011
|
+
expect(searchRes.items).toBeDefined();
|
|
1012
|
+
expect(searchRes.items.length).toBe(0);
|
|
1013
|
+
|
|
1014
|
+
// Try listing when no values are present.
|
|
1015
|
+
const listRes = await client.store.listNamespaces();
|
|
1016
|
+
expect(listRes.namespaces).toBeDefined();
|
|
1017
|
+
expect(listRes.namespaces.length).toBe(0);
|
|
1018
|
+
|
|
1019
|
+
// Put an item
|
|
1020
|
+
await client.store.putItem(namespace, key, value);
|
|
1021
|
+
|
|
1022
|
+
// Get the item
|
|
1023
|
+
const getRes = await client.store.getItem(namespace, key);
|
|
1024
|
+
expect(getRes).toBeDefined();
|
|
1025
|
+
expect(getRes?.value).toEqual(value);
|
|
1026
|
+
|
|
1027
|
+
const searchResAfterPut = await client.store.searchItems(namespace);
|
|
1028
|
+
expect(searchResAfterPut.items).toBeDefined();
|
|
1029
|
+
expect(searchResAfterPut.items.length).toBe(1);
|
|
1030
|
+
expect(searchResAfterPut.items[0].key).toBe(key);
|
|
1031
|
+
expect(searchResAfterPut.items[0].value).toEqual(value);
|
|
1032
|
+
expect(searchResAfterPut.items[0].createdAt).toBeDefined();
|
|
1033
|
+
expect(searchResAfterPut.items[0].updatedAt).toBeDefined();
|
|
1034
|
+
expect(
|
|
1035
|
+
new Date(searchResAfterPut.items[0].createdAt).getTime()
|
|
1036
|
+
).toBeLessThanOrEqual(Date.now());
|
|
1037
|
+
expect(
|
|
1038
|
+
new Date(searchResAfterPut.items[0].updatedAt).getTime()
|
|
1039
|
+
).toBeLessThanOrEqual(Date.now());
|
|
1040
|
+
|
|
1041
|
+
const updatedValue = { foo: "baz" };
|
|
1042
|
+
await client.store.putItem(namespace, key, updatedValue);
|
|
1043
|
+
|
|
1044
|
+
const getResAfterUpdate = await client.store.getItem(namespace, key);
|
|
1045
|
+
expect(getResAfterUpdate).toBeDefined();
|
|
1046
|
+
expect(getResAfterUpdate?.value).toEqual(updatedValue);
|
|
1047
|
+
|
|
1048
|
+
const searchResAfterUpdate = await client.store.searchItems(namespace);
|
|
1049
|
+
expect(searchResAfterUpdate.items).toBeDefined();
|
|
1050
|
+
expect(searchResAfterUpdate.items.length).toBe(1);
|
|
1051
|
+
expect(searchResAfterUpdate.items[0].key).toBe(key);
|
|
1052
|
+
expect(searchResAfterUpdate.items[0].value).toEqual(updatedValue);
|
|
1053
|
+
|
|
1054
|
+
expect(
|
|
1055
|
+
new Date(searchResAfterUpdate.items[0].updatedAt).getTime()
|
|
1056
|
+
).toBeGreaterThan(new Date(searchResAfterPut.items[0].updatedAt).getTime());
|
|
1057
|
+
|
|
1058
|
+
const listResAfterPut = await client.store.listNamespaces();
|
|
1059
|
+
expect(listResAfterPut.namespaces).toBeDefined();
|
|
1060
|
+
expect(listResAfterPut.namespaces.length).toBe(1);
|
|
1061
|
+
expect(listResAfterPut.namespaces[0]).toEqual(namespace);
|
|
1062
|
+
|
|
1063
|
+
await client.store.deleteItem(namespace, key);
|
|
1064
|
+
|
|
1065
|
+
const getResAfterDelete = await client.store.getItem(namespace, key);
|
|
1066
|
+
expect(getResAfterDelete).toBeNull();
|
|
1067
|
+
|
|
1068
|
+
const searchResAfterDelete = await client.store.searchItems(namespace);
|
|
1069
|
+
expect(searchResAfterDelete.items).toBeDefined();
|
|
1070
|
+
expect(searchResAfterDelete.items.length).toBe(0);
|
|
1071
|
+
});
|
|
1072
|
+
});
|
|
1073
|
+
|
|
1074
|
+
describe("subgraphs", () => {
|
|
1075
|
+
it.concurrent("get subgraphs", async () => {
|
|
1076
|
+
const assistant = await client.assistants.create({ graphId: "nested" });
|
|
1077
|
+
|
|
1078
|
+
expect(
|
|
1079
|
+
Object.keys(await client.assistants.getSubgraphs(assistant.assistant_id))
|
|
1080
|
+
).toEqual(["gp_two"]);
|
|
1081
|
+
|
|
1082
|
+
const subgraphs = await client.assistants.getSubgraphs(
|
|
1083
|
+
assistant.assistant_id,
|
|
1084
|
+
{ recurse: true }
|
|
1085
|
+
);
|
|
1086
|
+
|
|
1087
|
+
expect(Object.keys(subgraphs)).toEqual(["gp_two", "gp_two|p_two"]);
|
|
1088
|
+
expect(subgraphs).toMatchObject({
|
|
1089
|
+
gp_two: {
|
|
1090
|
+
state: {
|
|
1091
|
+
type: "object",
|
|
1092
|
+
properties: {
|
|
1093
|
+
parent: {
|
|
1094
|
+
type: "string",
|
|
1095
|
+
enum: ["parent_one", "parent_two"],
|
|
1096
|
+
},
|
|
1097
|
+
messages: { type: "array" },
|
|
1098
|
+
},
|
|
1099
|
+
},
|
|
1100
|
+
},
|
|
1101
|
+
"gp_two|p_two": {
|
|
1102
|
+
state: {
|
|
1103
|
+
type: "object",
|
|
1104
|
+
properties: {
|
|
1105
|
+
child: {
|
|
1106
|
+
type: "string",
|
|
1107
|
+
enum: ["child_one", "child_two"],
|
|
1108
|
+
},
|
|
1109
|
+
messages: { type: "array" },
|
|
1110
|
+
},
|
|
1111
|
+
},
|
|
1112
|
+
},
|
|
1113
|
+
});
|
|
1114
|
+
});
|
|
1115
|
+
|
|
1116
|
+
// (1) interrupt and then continue running, no modification
|
|
1117
|
+
it.concurrent("human in the loop - no modification", async () => {
|
|
1118
|
+
const assistant = await client.assistants.create({ graphId: "weather" });
|
|
1119
|
+
|
|
1120
|
+
const thread = await client.threads.create();
|
|
1121
|
+
const input = {
|
|
1122
|
+
messages: [{ role: "human", content: "SF", id: "initial-message" }],
|
|
1123
|
+
};
|
|
1124
|
+
|
|
1125
|
+
// run until the interrupt
|
|
1126
|
+
let lastMessageBeforeInterrupt: { content?: string } | null = null;
|
|
1127
|
+
let chunks = await gatherIterator(
|
|
1128
|
+
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
1129
|
+
input,
|
|
1130
|
+
interruptBefore: ["tool"],
|
|
1131
|
+
})
|
|
1132
|
+
);
|
|
1133
|
+
|
|
1134
|
+
for (const chunk of chunks) {
|
|
1135
|
+
if (chunk.event === "values") {
|
|
1136
|
+
lastMessageBeforeInterrupt =
|
|
1137
|
+
chunk.data.messages[chunk.data.messages.length - 1];
|
|
1138
|
+
}
|
|
1139
|
+
|
|
1140
|
+
if (chunk.event === "error") {
|
|
1141
|
+
throw new Error(chunk.data);
|
|
1142
|
+
}
|
|
1143
|
+
}
|
|
1144
|
+
|
|
1145
|
+
expect(lastMessageBeforeInterrupt?.content).toBe("SF");
|
|
1146
|
+
expect(chunks).toEqual([
|
|
1147
|
+
{
|
|
1148
|
+
event: "metadata",
|
|
1149
|
+
data: { run_id: expect.any(String), attempt: 1 },
|
|
1150
|
+
},
|
|
1151
|
+
{
|
|
1152
|
+
event: "values",
|
|
1153
|
+
data: {
|
|
1154
|
+
messages: [
|
|
1155
|
+
{
|
|
1156
|
+
content: "SF",
|
|
1157
|
+
additional_kwargs: {},
|
|
1158
|
+
response_metadata: {},
|
|
1159
|
+
type: "human",
|
|
1160
|
+
id: "initial-message",
|
|
1161
|
+
},
|
|
1162
|
+
],
|
|
1163
|
+
},
|
|
1164
|
+
},
|
|
1165
|
+
{
|
|
1166
|
+
event: "values",
|
|
1167
|
+
data: {
|
|
1168
|
+
messages: [
|
|
1169
|
+
{
|
|
1170
|
+
content: "SF",
|
|
1171
|
+
additional_kwargs: {},
|
|
1172
|
+
response_metadata: {},
|
|
1173
|
+
type: "human",
|
|
1174
|
+
id: "initial-message",
|
|
1175
|
+
},
|
|
1176
|
+
],
|
|
1177
|
+
route: "weather",
|
|
1178
|
+
},
|
|
1179
|
+
},
|
|
1180
|
+
]);
|
|
1181
|
+
|
|
1182
|
+
let state = await client.threads.getState(thread.thread_id);
|
|
1183
|
+
expect(state.next).toEqual(["weather_graph"]);
|
|
1184
|
+
expect(state.tasks).toEqual([
|
|
1185
|
+
{
|
|
1186
|
+
id: expect.any(String),
|
|
1187
|
+
name: "weather_graph",
|
|
1188
|
+
path: ["__pregel_pull", "weather_graph"],
|
|
1189
|
+
error: null,
|
|
1190
|
+
interrupts: [],
|
|
1191
|
+
checkpoint: {
|
|
1192
|
+
checkpoint_ns: expect.stringMatching(/^weather_graph:/),
|
|
1193
|
+
thread_id: expect.any(String),
|
|
1194
|
+
},
|
|
1195
|
+
state: null,
|
|
1196
|
+
result: null,
|
|
1197
|
+
},
|
|
1198
|
+
]);
|
|
1199
|
+
|
|
1200
|
+
const stateRecursive = await client.threads.getState(
|
|
1201
|
+
thread.thread_id,
|
|
1202
|
+
undefined,
|
|
1203
|
+
{ subgraphs: true }
|
|
1204
|
+
);
|
|
1205
|
+
|
|
1206
|
+
expect(stateRecursive.next).toEqual(["weather_graph"]);
|
|
1207
|
+
expect(stateRecursive.tasks).toEqual([
|
|
1208
|
+
{
|
|
1209
|
+
id: expect.any(String),
|
|
1210
|
+
name: "weather_graph",
|
|
1211
|
+
path: ["__pregel_pull", "weather_graph"],
|
|
1212
|
+
error: null,
|
|
1213
|
+
interrupts: [],
|
|
1214
|
+
checkpoint: null,
|
|
1215
|
+
result: null,
|
|
1216
|
+
state: {
|
|
1217
|
+
values: {
|
|
1218
|
+
city: "San Francisco",
|
|
1219
|
+
messages: [
|
|
1220
|
+
{
|
|
1221
|
+
content: "SF",
|
|
1222
|
+
additional_kwargs: {},
|
|
1223
|
+
response_metadata: {},
|
|
1224
|
+
type: "human",
|
|
1225
|
+
id: "initial-message",
|
|
1226
|
+
},
|
|
1227
|
+
],
|
|
1228
|
+
},
|
|
1229
|
+
next: ["weather_node"],
|
|
1230
|
+
tasks: [
|
|
1231
|
+
{
|
|
1232
|
+
id: expect.any(String),
|
|
1233
|
+
name: "weather_node",
|
|
1234
|
+
path: ["__pregel_pull", "weather_node"],
|
|
1235
|
+
error: null,
|
|
1236
|
+
interrupts: [],
|
|
1237
|
+
checkpoint: null,
|
|
1238
|
+
state: null,
|
|
1239
|
+
result: null,
|
|
1240
|
+
},
|
|
1241
|
+
],
|
|
1242
|
+
metadata: expect.any(Object),
|
|
1243
|
+
created_at: expect.any(String),
|
|
1244
|
+
checkpoint: expect.any(Object),
|
|
1245
|
+
parent_checkpoint: expect.any(Object),
|
|
1246
|
+
checkpoint_id: expect.any(String),
|
|
1247
|
+
parent_checkpoint_id: expect.any(String),
|
|
1248
|
+
},
|
|
1249
|
+
},
|
|
1250
|
+
]);
|
|
1251
|
+
|
|
1252
|
+
const threadAfterInterrupt = await client.threads.get(thread.thread_id);
|
|
1253
|
+
expect(threadAfterInterrupt.status).toBe("interrupted");
|
|
1254
|
+
|
|
1255
|
+
// continue after interrupt
|
|
1256
|
+
chunks = await gatherIterator(
|
|
1257
|
+
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
1258
|
+
input: null,
|
|
1259
|
+
streamMode: ["values", "updates"],
|
|
1260
|
+
streamSubgraphs: true,
|
|
1261
|
+
})
|
|
1262
|
+
);
|
|
1263
|
+
|
|
1264
|
+
expect(chunks.filter((i) => i.event === "error")).toEqual([]);
|
|
1265
|
+
expect(chunks.at(-1)?.event).toBe("values");
|
|
1266
|
+
|
|
1267
|
+
const continueMessages = findLast(chunks, (i) => i.event === "values")?.data
|
|
1268
|
+
.messages;
|
|
1269
|
+
|
|
1270
|
+
expect(continueMessages.length).toBe(2);
|
|
1271
|
+
expect(continueMessages[0].content).toBe("SF");
|
|
1272
|
+
expect(continueMessages[1].content).toBe("It's sunny in San Francisco!");
|
|
1273
|
+
expect(chunks).toEqual([
|
|
1274
|
+
{
|
|
1275
|
+
event: "metadata",
|
|
1276
|
+
data: { run_id: expect.any(String), attempt: 1 },
|
|
1277
|
+
},
|
|
1278
|
+
{
|
|
1279
|
+
event: "values",
|
|
1280
|
+
data: {
|
|
1281
|
+
messages: [
|
|
1282
|
+
{
|
|
1283
|
+
content: "SF",
|
|
1284
|
+
additional_kwargs: {},
|
|
1285
|
+
response_metadata: {},
|
|
1286
|
+
type: "human",
|
|
1287
|
+
id: "initial-message",
|
|
1288
|
+
},
|
|
1289
|
+
],
|
|
1290
|
+
route: "weather",
|
|
1291
|
+
},
|
|
1292
|
+
},
|
|
1293
|
+
{
|
|
1294
|
+
event: expect.stringMatching(/^values\|weather_graph:/),
|
|
1295
|
+
data: {
|
|
1296
|
+
messages: [
|
|
1297
|
+
{
|
|
1298
|
+
content: "SF",
|
|
1299
|
+
additional_kwargs: {},
|
|
1300
|
+
response_metadata: {},
|
|
1301
|
+
type: "human",
|
|
1302
|
+
id: "initial-message",
|
|
1303
|
+
},
|
|
1304
|
+
],
|
|
1305
|
+
city: "San Francisco",
|
|
1306
|
+
},
|
|
1307
|
+
},
|
|
1308
|
+
{
|
|
1309
|
+
event: expect.stringMatching(/^updates\|weather_graph:/),
|
|
1310
|
+
data: {
|
|
1311
|
+
weather_node: {
|
|
1312
|
+
messages: [
|
|
1313
|
+
{
|
|
1314
|
+
content: "It's sunny in San Francisco!",
|
|
1315
|
+
additional_kwargs: {},
|
|
1316
|
+
response_metadata: {},
|
|
1317
|
+
type: "ai",
|
|
1318
|
+
id: expect.any(String),
|
|
1319
|
+
tool_calls: [],
|
|
1320
|
+
invalid_tool_calls: [],
|
|
1321
|
+
},
|
|
1322
|
+
],
|
|
1323
|
+
},
|
|
1324
|
+
},
|
|
1325
|
+
},
|
|
1326
|
+
{
|
|
1327
|
+
event: expect.stringMatching(/^values\|weather_graph:/),
|
|
1328
|
+
data: {
|
|
1329
|
+
messages: [
|
|
1330
|
+
{
|
|
1331
|
+
content: "SF",
|
|
1332
|
+
additional_kwargs: {},
|
|
1333
|
+
response_metadata: {},
|
|
1334
|
+
type: "human",
|
|
1335
|
+
id: "initial-message",
|
|
1336
|
+
},
|
|
1337
|
+
{
|
|
1338
|
+
content: "It's sunny in San Francisco!",
|
|
1339
|
+
additional_kwargs: {},
|
|
1340
|
+
response_metadata: {},
|
|
1341
|
+
type: "ai",
|
|
1342
|
+
id: expect.any(String),
|
|
1343
|
+
tool_calls: [],
|
|
1344
|
+
invalid_tool_calls: [],
|
|
1345
|
+
},
|
|
1346
|
+
],
|
|
1347
|
+
city: "San Francisco",
|
|
1348
|
+
},
|
|
1349
|
+
},
|
|
1350
|
+
{
|
|
1351
|
+
event: "updates",
|
|
1352
|
+
data: {
|
|
1353
|
+
weather_graph: {
|
|
1354
|
+
messages: [
|
|
1355
|
+
{
|
|
1356
|
+
content: "SF",
|
|
1357
|
+
additional_kwargs: {},
|
|
1358
|
+
response_metadata: {},
|
|
1359
|
+
type: "human",
|
|
1360
|
+
id: "initial-message",
|
|
1361
|
+
},
|
|
1362
|
+
{
|
|
1363
|
+
content: "It's sunny in San Francisco!",
|
|
1364
|
+
additional_kwargs: {},
|
|
1365
|
+
response_metadata: {},
|
|
1366
|
+
type: "ai",
|
|
1367
|
+
id: expect.any(String),
|
|
1368
|
+
tool_calls: [],
|
|
1369
|
+
invalid_tool_calls: [],
|
|
1370
|
+
},
|
|
1371
|
+
],
|
|
1372
|
+
},
|
|
1373
|
+
},
|
|
1374
|
+
},
|
|
1375
|
+
{
|
|
1376
|
+
event: "values",
|
|
1377
|
+
data: {
|
|
1378
|
+
messages: [
|
|
1379
|
+
{
|
|
1380
|
+
content: "SF",
|
|
1381
|
+
additional_kwargs: {},
|
|
1382
|
+
response_metadata: {},
|
|
1383
|
+
type: "human",
|
|
1384
|
+
id: "initial-message",
|
|
1385
|
+
},
|
|
1386
|
+
{
|
|
1387
|
+
content: "It's sunny in San Francisco!",
|
|
1388
|
+
additional_kwargs: {},
|
|
1389
|
+
response_metadata: {},
|
|
1390
|
+
type: "ai",
|
|
1391
|
+
id: expect.any(String),
|
|
1392
|
+
tool_calls: [],
|
|
1393
|
+
invalid_tool_calls: [],
|
|
1394
|
+
},
|
|
1395
|
+
],
|
|
1396
|
+
route: "weather",
|
|
1397
|
+
},
|
|
1398
|
+
},
|
|
1399
|
+
]);
|
|
1400
|
+
|
|
1401
|
+
const threadAfterContinue = await client.threads.get(thread.thread_id);
|
|
1402
|
+
expect(threadAfterContinue.status).toBe("idle");
|
|
1403
|
+
});
|
|
1404
|
+
|
|
1405
|
+
// (2) interrupt, modify the message and then continue running
|
|
1406
|
+
it.concurrent("human in the loop - modification", async () => {
|
|
1407
|
+
const assistant = await client.assistants.create({ graphId: "weather" });
|
|
1408
|
+
const thread = await client.threads.create();
|
|
1409
|
+
const input = {
|
|
1410
|
+
messages: [{ role: "human", content: "SF", id: "initial-message" }],
|
|
1411
|
+
};
|
|
1412
|
+
|
|
1413
|
+
// run until the interrupt (same as before)
|
|
1414
|
+
let chunks = await gatherIterator(
|
|
1415
|
+
client.runs.stream(thread.thread_id, assistant.assistant_id, { input })
|
|
1416
|
+
);
|
|
1417
|
+
expect(chunks.filter((i) => i.event === "error")).toEqual([]);
|
|
1418
|
+
|
|
1419
|
+
// get state after interrupt
|
|
1420
|
+
const state = await client.threads.getState(thread.thread_id);
|
|
1421
|
+
expect(state.next).toEqual(["weather_graph"]);
|
|
1422
|
+
expect(state.tasks).toEqual([
|
|
1423
|
+
{
|
|
1424
|
+
id: expect.any(String),
|
|
1425
|
+
name: "weather_graph",
|
|
1426
|
+
path: ["__pregel_pull", "weather_graph"],
|
|
1427
|
+
error: null,
|
|
1428
|
+
interrupts: [],
|
|
1429
|
+
checkpoint: {
|
|
1430
|
+
checkpoint_ns: expect.stringMatching(/^weather_graph:/),
|
|
1431
|
+
thread_id: expect.any(String),
|
|
1432
|
+
},
|
|
1433
|
+
state: null,
|
|
1434
|
+
result: null,
|
|
1435
|
+
},
|
|
1436
|
+
]);
|
|
1437
|
+
|
|
1438
|
+
// edit the city in the subgraph state
|
|
1439
|
+
await client.threads.updateState(thread.thread_id, {
|
|
1440
|
+
values: { city: "LA" },
|
|
1441
|
+
checkpoint: state.tasks[0].checkpoint ?? undefined,
|
|
1442
|
+
});
|
|
1443
|
+
|
|
1444
|
+
// get inner state after update
|
|
1445
|
+
const innerState = await client.threads.getState<{ city: string }>(
|
|
1446
|
+
thread.thread_id,
|
|
1447
|
+
state.tasks[0].checkpoint ?? undefined
|
|
1448
|
+
);
|
|
1449
|
+
|
|
1450
|
+
expect(innerState.values.city).toBe("LA");
|
|
1451
|
+
expect(innerState.next).toEqual(["weather_node"]);
|
|
1452
|
+
expect(innerState.tasks).toEqual([
|
|
1453
|
+
{
|
|
1454
|
+
id: expect.any(String),
|
|
1455
|
+
name: "weather_node",
|
|
1456
|
+
path: ["__pregel_pull", "weather_node"],
|
|
1457
|
+
error: null,
|
|
1458
|
+
interrupts: [],
|
|
1459
|
+
checkpoint: null,
|
|
1460
|
+
state: null,
|
|
1461
|
+
result: null,
|
|
1462
|
+
},
|
|
1463
|
+
]);
|
|
1464
|
+
|
|
1465
|
+
// continue after interrupt
|
|
1466
|
+
chunks = await gatherIterator(
|
|
1467
|
+
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
1468
|
+
input: null,
|
|
1469
|
+
})
|
|
1470
|
+
);
|
|
1471
|
+
|
|
1472
|
+
expect(chunks.filter((i) => i.event === "error")).toEqual([]);
|
|
1473
|
+
expect(chunks.at(-1)?.event).toBe("values");
|
|
1474
|
+
|
|
1475
|
+
const continueMessages = findLast(chunks, (i) => i.event === "values")?.data
|
|
1476
|
+
.messages;
|
|
1477
|
+
|
|
1478
|
+
expect(continueMessages.length).toBe(2);
|
|
1479
|
+
expect(continueMessages[0].content).toBe("SF");
|
|
1480
|
+
expect(continueMessages[1].content).toBe("It's sunny in LA!");
|
|
1481
|
+
|
|
1482
|
+
// get the history for the root graph
|
|
1483
|
+
const history = await client.threads.getHistory<{
|
|
1484
|
+
messages: BaseMessageLike[];
|
|
1485
|
+
}>(thread.thread_id);
|
|
1486
|
+
expect(history.length).toBe(4);
|
|
1487
|
+
expect(history[0].next.length).toBe(0);
|
|
1488
|
+
expect(history[0].values.messages.length).toBe(2);
|
|
1489
|
+
expect(history[history.length - 1].next).toEqual(["__start__"]);
|
|
1490
|
+
|
|
1491
|
+
// get inner history
|
|
1492
|
+
const innerHistory = await client.threads.getHistory<{
|
|
1493
|
+
messages: BaseMessageLike[];
|
|
1494
|
+
city: string;
|
|
1495
|
+
}>(thread.thread_id, {
|
|
1496
|
+
checkpoint: state.tasks[0].checkpoint ?? undefined,
|
|
1497
|
+
});
|
|
1498
|
+
expect(innerHistory.length).toBe(5);
|
|
1499
|
+
expect(innerHistory[0].next.length).toBe(0);
|
|
1500
|
+
expect(innerHistory[0].values.messages.length).toBe(2);
|
|
1501
|
+
expect(innerHistory[innerHistory.length - 1].next).toEqual(["__start__"]);
|
|
1502
|
+
});
|
|
1503
|
+
});
|
|
1504
|
+
|
|
1505
|
+
describe("errors", () => {
|
|
1506
|
+
it.concurrent("stream", async () => {
|
|
1507
|
+
const assistant = await client.assistants.create({ graphId: "error" });
|
|
1508
|
+
const thread = await client.threads.create();
|
|
1509
|
+
|
|
1510
|
+
const stream = await gatherIterator(
|
|
1511
|
+
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
1512
|
+
input: { messages: [] },
|
|
1513
|
+
streamMode: ["debug", "events"],
|
|
1514
|
+
})
|
|
1515
|
+
);
|
|
1516
|
+
|
|
1517
|
+
expect(stream.at(-1)).toMatchObject({
|
|
1518
|
+
event: "error",
|
|
1519
|
+
data: {
|
|
1520
|
+
error: "CustomError",
|
|
1521
|
+
message: "Boo!",
|
|
1522
|
+
},
|
|
1523
|
+
});
|
|
1524
|
+
});
|
|
1525
|
+
|
|
1526
|
+
it.concurrent("create + join", async () => {
|
|
1527
|
+
const assistant = await client.assistants.create({ graphId: "error" });
|
|
1528
|
+
const thread = await client.threads.create();
|
|
1529
|
+
|
|
1530
|
+
const run = await client.runs.create(
|
|
1531
|
+
thread.thread_id,
|
|
1532
|
+
assistant.assistant_id,
|
|
1533
|
+
{ input: { messages: [] } }
|
|
1534
|
+
);
|
|
1535
|
+
|
|
1536
|
+
await client.runs.join(thread.thread_id, run.run_id);
|
|
1537
|
+
const runState = await client.runs.get(thread.thread_id, run.run_id);
|
|
1538
|
+
expect(runState.status).toEqual("error");
|
|
1539
|
+
});
|
|
1540
|
+
|
|
1541
|
+
it.concurrent("create + stream join", async () => {
|
|
1542
|
+
const assistant = await client.assistants.create({ graphId: "error" });
|
|
1543
|
+
const thread = await client.threads.create();
|
|
1544
|
+
|
|
1545
|
+
const run = await client.runs.create(
|
|
1546
|
+
thread.thread_id,
|
|
1547
|
+
assistant.assistant_id,
|
|
1548
|
+
{ input: { messages: [] } }
|
|
1549
|
+
);
|
|
1550
|
+
|
|
1551
|
+
const stream = await gatherIterator(
|
|
1552
|
+
client.runs.joinStream(thread.thread_id, run.run_id)
|
|
1553
|
+
);
|
|
1554
|
+
|
|
1555
|
+
expect(stream.at(-1)).toMatchObject({
|
|
1556
|
+
event: "error",
|
|
1557
|
+
data: {
|
|
1558
|
+
error: "CustomError",
|
|
1559
|
+
message: "Boo!",
|
|
1560
|
+
},
|
|
1561
|
+
});
|
|
1562
|
+
|
|
1563
|
+
const runState = await client.runs.get(thread.thread_id, run.run_id);
|
|
1564
|
+
expect(runState.status).toEqual("error");
|
|
1565
|
+
});
|
|
1566
|
+
});
|