langgraph-api 0.1.5__py3-none-any.whl → 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

@@ -15,10 +15,8 @@ const sql = postgres(
15
15
  "postgres://postgres:postgres@127.0.0.1:5433/postgres?sslmode=disable",
16
16
  );
17
17
 
18
- const client = new Client<any>({
19
- // apiUrl: process.env["LANGGRAPH_ENDPOINT"] ?? "http://localhost:9123",
20
- apiUrl: "http://localhost:9123",
21
- });
18
+ const API_URL = "http://localhost:9123";
19
+ const client = new Client<any>({ apiUrl: API_URL });
22
20
 
23
21
  // Passed to all invocation requests as the graph now requires this field to be present
24
22
  // in `configurable` due to a new `SharedValue` field requiring it.
@@ -821,7 +819,7 @@ describe("runs", () => {
821
819
  },
822
820
  );
823
821
 
824
- it.concurrent("human in the loop - modification", {retry: 3}, async () => {
822
+ it.concurrent("human in the loop - modification", { retry: 3 }, async () => {
825
823
  // (2) interrupt, modify the message and then continue running
826
824
  const assistant = await client.assistants.create({ graphId: "agent" });
827
825
  const thread = await client.threads.create();
@@ -1204,103 +1202,55 @@ describe("subgraphs", () => {
1204
1202
  });
1205
1203
 
1206
1204
  // (1) interrupt and then continue running, no modification
1207
- it.concurrent("human in the loop - no modification", {retry: 3}, async () => {
1208
- const assistant = await client.assistants.create({ graphId: "weather" });
1209
- const thread = await client.threads.create();
1205
+ it.concurrent(
1206
+ "human in the loop - no modification",
1207
+ { retry: 3 },
1208
+ async () => {
1209
+ const assistant = await client.assistants.create({ graphId: "weather" });
1210
+ const thread = await client.threads.create();
1210
1211
 
1211
- // run until the interrupt
1212
- let lastMessageBeforeInterrupt: { content?: string } | null = null;
1213
- let chunks = await gatherIterator(
1214
- client.runs.stream(thread.thread_id, assistant.assistant_id, {
1215
- input: {
1216
- messages: [{ role: "human", content: "SF", id: "initial-message" }],
1217
- },
1218
- interruptBefore: ["tool"],
1219
- }),
1220
- );
1212
+ // run until the interrupt
1213
+ let lastMessageBeforeInterrupt: { content?: string } | null = null;
1214
+ let chunks = await gatherIterator(
1215
+ client.runs.stream(thread.thread_id, assistant.assistant_id, {
1216
+ input: {
1217
+ messages: [{ role: "human", content: "SF", id: "initial-message" }],
1218
+ },
1219
+ interruptBefore: ["tool"],
1220
+ }),
1221
+ );
1221
1222
 
1222
- for (const chunk of chunks) {
1223
- if (chunk.event === "values") {
1224
- lastMessageBeforeInterrupt =
1225
- chunk.data.messages[chunk.data.messages.length - 1];
1226
- }
1223
+ for (const chunk of chunks) {
1224
+ if (chunk.event === "values") {
1225
+ lastMessageBeforeInterrupt =
1226
+ chunk.data.messages[chunk.data.messages.length - 1];
1227
+ }
1227
1228
 
1228
- if (chunk.event === "error") {
1229
- throw new Error(chunk.data.error);
1229
+ if (chunk.event === "error") {
1230
+ throw new Error(chunk.data.error);
1231
+ }
1230
1232
  }
1231
- }
1232
1233
 
1233
- expect(lastMessageBeforeInterrupt?.content).toBe("SF");
1234
- expect(chunks).toEqual([
1235
- { event: "metadata", data: { run_id: expect.any(String), attempt: 1 } },
1236
- {
1237
- event: "values",
1238
- data: {
1239
- messages: [
1240
- {
1241
- content: "SF",
1242
- additional_kwargs: {},
1243
- response_metadata: {},
1244
- type: "human",
1245
- id: "initial-message",
1246
- },
1247
- ],
1248
- },
1249
- },
1250
- {
1251
- event: "values",
1252
- data: {
1253
- messages: [
1254
- {
1255
- content: "SF",
1256
- additional_kwargs: {},
1257
- response_metadata: {},
1258
- type: "human",
1259
- id: "initial-message",
1260
- },
1261
- ],
1262
- route: "weather",
1263
- },
1264
- },
1265
- ]);
1266
-
1267
- let state = await client.threads.getState(thread.thread_id);
1268
- expect(state.next).toEqual(["weather_graph"]);
1269
- expect(state.tasks).toEqual([
1270
- {
1271
- id: expect.any(String),
1272
- name: "weather_graph",
1273
- path: ["__pregel_pull", "weather_graph"],
1274
- error: null,
1275
- interrupts: [],
1276
- checkpoint: {
1277
- checkpoint_ns: expect.stringMatching(/^weather_graph:/),
1278
- thread_id: expect.any(String),
1234
+ expect(lastMessageBeforeInterrupt?.content).toBe("SF");
1235
+ expect(chunks).toEqual([
1236
+ { event: "metadata", data: { run_id: expect.any(String), attempt: 1 } },
1237
+ {
1238
+ event: "values",
1239
+ data: {
1240
+ messages: [
1241
+ {
1242
+ content: "SF",
1243
+ additional_kwargs: {},
1244
+ response_metadata: {},
1245
+ type: "human",
1246
+ id: "initial-message",
1247
+ },
1248
+ ],
1249
+ },
1279
1250
  },
1280
- state: null,
1281
- result: null,
1282
- },
1283
- ]);
1284
-
1285
- const stateRecursive = await client.threads.getState(
1286
- thread.thread_id,
1287
- undefined,
1288
- { subgraphs: true },
1289
- );
1290
-
1291
- expect(stateRecursive.next).toEqual(["weather_graph"]);
1292
- expect(stateRecursive.tasks).toEqual([
1293
- {
1294
- id: expect.any(String),
1295
- name: "weather_graph",
1296
- path: ["__pregel_pull", "weather_graph"],
1297
- error: null,
1298
- interrupts: [],
1299
- checkpoint: null,
1300
- result: null,
1301
- state: {
1302
- values: {
1303
- city: "San Francisco",
1251
+ {
1252
+ event: "values",
1253
+ data: {
1304
1254
  messages: [
1305
1255
  {
1306
1256
  content: "SF",
@@ -1310,94 +1260,169 @@ describe("subgraphs", () => {
1310
1260
  id: "initial-message",
1311
1261
  },
1312
1262
  ],
1263
+ route: "weather",
1313
1264
  },
1314
- next: ["weather_node"],
1315
- tasks: [
1316
- {
1317
- id: expect.any(String),
1318
- name: "weather_node",
1319
- path: ["__pregel_pull", "weather_node"],
1320
- error: null,
1321
- interrupts: [],
1322
- checkpoint: null,
1323
- state: null,
1324
- result: null,
1265
+ },
1266
+ ]);
1267
+
1268
+ let state = await client.threads.getState(thread.thread_id);
1269
+ expect(state.next).toEqual(["weather_graph"]);
1270
+ expect(state.tasks).toEqual([
1271
+ {
1272
+ id: expect.any(String),
1273
+ name: "weather_graph",
1274
+ path: ["__pregel_pull", "weather_graph"],
1275
+ error: null,
1276
+ interrupts: [],
1277
+ checkpoint: {
1278
+ checkpoint_ns: expect.stringMatching(/^weather_graph:/),
1279
+ thread_id: expect.any(String),
1280
+ },
1281
+ state: null,
1282
+ result: null,
1283
+ },
1284
+ ]);
1285
+
1286
+ const stateRecursive = await client.threads.getState(
1287
+ thread.thread_id,
1288
+ undefined,
1289
+ { subgraphs: true },
1290
+ );
1291
+
1292
+ expect(stateRecursive.next).toEqual(["weather_graph"]);
1293
+ expect(stateRecursive.tasks).toEqual([
1294
+ {
1295
+ id: expect.any(String),
1296
+ name: "weather_graph",
1297
+ path: ["__pregel_pull", "weather_graph"],
1298
+ error: null,
1299
+ interrupts: [],
1300
+ checkpoint: null,
1301
+ result: null,
1302
+ state: {
1303
+ values: {
1304
+ city: "San Francisco",
1305
+ messages: [
1306
+ {
1307
+ content: "SF",
1308
+ additional_kwargs: {},
1309
+ response_metadata: {},
1310
+ type: "human",
1311
+ id: "initial-message",
1312
+ },
1313
+ ],
1325
1314
  },
1326
- ],
1327
- metadata: expect.any(Object),
1328
- created_at: expect.any(String),
1329
- checkpoint: expect.any(Object),
1330
- parent_checkpoint: expect.any(Object),
1331
- checkpoint_id: expect.any(String),
1332
- parent_checkpoint_id: expect.any(String),
1315
+ next: ["weather_node"],
1316
+ tasks: [
1317
+ {
1318
+ id: expect.any(String),
1319
+ name: "weather_node",
1320
+ path: ["__pregel_pull", "weather_node"],
1321
+ error: null,
1322
+ interrupts: [],
1323
+ checkpoint: null,
1324
+ state: null,
1325
+ result: null,
1326
+ },
1327
+ ],
1328
+ metadata: expect.any(Object),
1329
+ created_at: expect.any(String),
1330
+ checkpoint: expect.any(Object),
1331
+ parent_checkpoint: expect.any(Object),
1332
+ checkpoint_id: expect.any(String),
1333
+ parent_checkpoint_id: expect.any(String),
1334
+ },
1333
1335
  },
1334
- },
1335
- ]);
1336
+ ]);
1336
1337
 
1337
- const threadAfterInterrupt = await client.threads.get(thread.thread_id);
1338
- expect(threadAfterInterrupt.status).toBe("interrupted");
1338
+ const threadAfterInterrupt = await client.threads.get(thread.thread_id);
1339
+ expect(threadAfterInterrupt.status).toBe("interrupted");
1339
1340
 
1340
- // continue after interrupt
1341
- const chunksSubgraph = await gatherIterator(
1342
- client.runs.stream(thread.thread_id, assistant.assistant_id, {
1343
- input: null,
1344
- streamMode: ["values", "updates"],
1345
- streamSubgraphs: true,
1346
- }),
1347
- );
1341
+ // continue after interrupt
1342
+ const chunksSubgraph = await gatherIterator(
1343
+ client.runs.stream(thread.thread_id, assistant.assistant_id, {
1344
+ input: null,
1345
+ streamMode: ["values", "updates"],
1346
+ streamSubgraphs: true,
1347
+ }),
1348
+ );
1348
1349
 
1349
- expect(chunksSubgraph.filter((i) => i.event === "error")).toEqual([]);
1350
- expect(chunksSubgraph.at(-1)?.event).toBe("values");
1350
+ expect(chunksSubgraph.filter((i) => i.event === "error")).toEqual([]);
1351
+ expect(chunksSubgraph.at(-1)?.event).toBe("values");
1351
1352
 
1352
- type ChunkType = (typeof chunksSubgraph)[number];
1353
- const continueMessages = findLast(
1354
- chunksSubgraph,
1355
- (i): i is ChunkType & { event: "values" } => i.event === "values",
1356
- )?.data.messages;
1353
+ type ChunkType = (typeof chunksSubgraph)[number];
1354
+ const continueMessages = findLast(
1355
+ chunksSubgraph,
1356
+ (i): i is ChunkType & { event: "values" } => i.event === "values",
1357
+ )?.data.messages;
1357
1358
 
1358
- expect(continueMessages.length).toBe(2);
1359
- expect(continueMessages[0].content).toBe("SF");
1360
- expect(continueMessages[1].content).toBe("It's sunny in San Francisco!");
1361
- expect(chunksSubgraph).toEqual([
1362
- {
1363
- event: "metadata",
1364
- data: { run_id: expect.any(String), attempt: 1 },
1365
- },
1366
- {
1367
- event: "values",
1368
- data: {
1369
- messages: [
1370
- {
1371
- content: "SF",
1372
- additional_kwargs: {},
1373
- response_metadata: {},
1374
- type: "human",
1375
- id: "initial-message",
1376
- },
1377
- ],
1378
- route: "weather",
1359
+ expect(continueMessages.length).toBe(2);
1360
+ expect(continueMessages[0].content).toBe("SF");
1361
+ expect(continueMessages[1].content).toBe("It's sunny in San Francisco!");
1362
+ expect(chunksSubgraph).toEqual([
1363
+ {
1364
+ event: "metadata",
1365
+ data: { run_id: expect.any(String), attempt: 1 },
1379
1366
  },
1380
- },
1381
- {
1382
- event: expect.stringMatching(/^values\|weather_graph:/),
1383
- data: {
1384
- messages: [
1385
- {
1386
- content: "SF",
1387
- additional_kwargs: {},
1388
- response_metadata: {},
1389
- type: "human",
1390
- id: "initial-message",
1367
+ {
1368
+ event: "values",
1369
+ data: {
1370
+ messages: [
1371
+ {
1372
+ content: "SF",
1373
+ additional_kwargs: {},
1374
+ response_metadata: {},
1375
+ type: "human",
1376
+ id: "initial-message",
1377
+ },
1378
+ ],
1379
+ route: "weather",
1380
+ },
1381
+ },
1382
+ {
1383
+ event: expect.stringMatching(/^values\|weather_graph:/),
1384
+ data: {
1385
+ messages: [
1386
+ {
1387
+ content: "SF",
1388
+ additional_kwargs: {},
1389
+ response_metadata: {},
1390
+ type: "human",
1391
+ id: "initial-message",
1392
+ },
1393
+ ],
1394
+ city: "San Francisco",
1395
+ },
1396
+ },
1397
+ {
1398
+ event: expect.stringMatching(/^updates\|weather_graph:/),
1399
+ data: {
1400
+ weather_node: {
1401
+ messages: [
1402
+ {
1403
+ content: "It's sunny in San Francisco!",
1404
+ additional_kwargs: {},
1405
+ response_metadata: {},
1406
+ type: "ai",
1407
+ id: expect.any(String),
1408
+ tool_calls: [],
1409
+ invalid_tool_calls: [],
1410
+ },
1411
+ ],
1391
1412
  },
1392
- ],
1393
- city: "San Francisco",
1413
+ },
1394
1414
  },
1395
- },
1396
- {
1397
- event: expect.stringMatching(/^updates\|weather_graph:/),
1398
- data: {
1399
- weather_node: {
1415
+ {
1416
+ event: expect.stringMatching(/^values\|weather_graph:/),
1417
+ data: {
1400
1418
  messages: [
1419
+ {
1420
+ content: "SF",
1421
+ additional_kwargs: {},
1422
+ response_metadata: {},
1423
+ type: "human",
1424
+ id: "initial-message",
1425
+ },
1401
1426
  {
1402
1427
  content: "It's sunny in San Francisco!",
1403
1428
  additional_kwargs: {},
@@ -1408,37 +1433,37 @@ describe("subgraphs", () => {
1408
1433
  invalid_tool_calls: [],
1409
1434
  },
1410
1435
  ],
1436
+ city: "San Francisco",
1411
1437
  },
1412
1438
  },
1413
- },
1414
- {
1415
- event: expect.stringMatching(/^values\|weather_graph:/),
1416
- data: {
1417
- messages: [
1418
- {
1419
- content: "SF",
1420
- additional_kwargs: {},
1421
- response_metadata: {},
1422
- type: "human",
1423
- id: "initial-message",
1424
- },
1425
- {
1426
- content: "It's sunny in San Francisco!",
1427
- additional_kwargs: {},
1428
- response_metadata: {},
1429
- type: "ai",
1430
- id: expect.any(String),
1431
- tool_calls: [],
1432
- invalid_tool_calls: [],
1439
+ {
1440
+ event: "updates",
1441
+ data: {
1442
+ weather_graph: {
1443
+ messages: [
1444
+ {
1445
+ content: "SF",
1446
+ additional_kwargs: {},
1447
+ response_metadata: {},
1448
+ type: "human",
1449
+ id: "initial-message",
1450
+ },
1451
+ {
1452
+ content: "It's sunny in San Francisco!",
1453
+ additional_kwargs: {},
1454
+ response_metadata: {},
1455
+ type: "ai",
1456
+ id: expect.any(String),
1457
+ tool_calls: [],
1458
+ invalid_tool_calls: [],
1459
+ },
1460
+ ],
1433
1461
  },
1434
- ],
1435
- city: "San Francisco",
1462
+ },
1436
1463
  },
1437
- },
1438
- {
1439
- event: "updates",
1440
- data: {
1441
- weather_graph: {
1464
+ {
1465
+ event: "values",
1466
+ data: {
1442
1467
  messages: [
1443
1468
  {
1444
1469
  content: "SF",
@@ -1457,41 +1482,18 @@ describe("subgraphs", () => {
1457
1482
  invalid_tool_calls: [],
1458
1483
  },
1459
1484
  ],
1485
+ route: "weather",
1460
1486
  },
1461
1487
  },
1462
- },
1463
- {
1464
- event: "values",
1465
- data: {
1466
- messages: [
1467
- {
1468
- content: "SF",
1469
- additional_kwargs: {},
1470
- response_metadata: {},
1471
- type: "human",
1472
- id: "initial-message",
1473
- },
1474
- {
1475
- content: "It's sunny in San Francisco!",
1476
- additional_kwargs: {},
1477
- response_metadata: {},
1478
- type: "ai",
1479
- id: expect.any(String),
1480
- tool_calls: [],
1481
- invalid_tool_calls: [],
1482
- },
1483
- ],
1484
- route: "weather",
1485
- },
1486
- },
1487
- ]);
1488
+ ]);
1488
1489
 
1489
- const threadAfterContinue = await client.threads.get(thread.thread_id);
1490
- expect(threadAfterContinue.status).toBe("idle");
1491
- });
1490
+ const threadAfterContinue = await client.threads.get(thread.thread_id);
1491
+ expect(threadAfterContinue.status).toBe("idle");
1492
+ },
1493
+ );
1492
1494
 
1493
1495
  // (2) interrupt, modify the message and then continue running
1494
- it.concurrent("human in the loop - modification", {retry: 3}, async () => {
1496
+ it.concurrent("human in the loop - modification", { retry: 3 }, async () => {
1495
1497
  const assistant = await client.assistants.create({ graphId: "weather" });
1496
1498
  const thread = await client.threads.create();
1497
1499
  const input = {
@@ -1993,3 +1995,88 @@ it("generative ui", async () => {
1993
1995
  client["~ui"].getComponent("non-existent", "none"),
1994
1996
  ).rejects.toThrow();
1995
1997
  });
1998
+
1999
+ it("custom routes", async () => {
2000
+ const fetcher = async (...args: Parameters<typeof fetch>) => {
2001
+ const res = await fetch(...args);
2002
+ if (!res.ok) throw new Error(`${res.status} ${res.statusText}`);
2003
+ return { json: await res.json(), headers: res.headers };
2004
+ };
2005
+
2006
+ let res = await fetcher(new URL("/custom/my-route?aCoolParam=13", API_URL), {
2007
+ headers: { "x-custom-input": "hey" },
2008
+ });
2009
+ expect(res.json).toEqual({ foo: "bar" });
2010
+ expect(res.headers.get("x-custom-output")).toEqual("hey");
2011
+ expect(res.headers.get("x-js-middleware")).toEqual("true");
2012
+
2013
+ res = await fetcher(new URL("/runs/afakeroute", API_URL));
2014
+ expect(res.json).toEqual({ foo: "afakeroute" });
2015
+
2016
+ await expect(() =>
2017
+ fetcher(new URL("/does/not/exist", API_URL)),
2018
+ ).rejects.toThrow("404");
2019
+
2020
+ await expect(() =>
2021
+ fetcher(new URL("/custom/error", API_URL)),
2022
+ ).rejects.toThrow("400");
2023
+
2024
+ await expect(() =>
2025
+ fetcher(new URL("/__langgraph_check", API_URL), { method: "OPTIONS" }),
2026
+ ).rejects.toThrow("404");
2027
+
2028
+ const stream = await fetch(new URL("/custom/streaming", API_URL));
2029
+ const reader = stream.body?.getReader();
2030
+ if (!reader) throw new Error("No reader");
2031
+
2032
+ const chunks: string[] = [];
2033
+ while (true) {
2034
+ const { done, value } = await reader.read();
2035
+ if (done) break;
2036
+ chunks.push(new TextDecoder().decode(value));
2037
+ }
2038
+
2039
+ expect(chunks.length).toBeGreaterThanOrEqual(4); // Must actually stream
2040
+ expect(chunks.join("")).toEqual("Count: 0\nCount: 1\nCount: 2\nCount: 3\n");
2041
+
2042
+ const thread = await client.threads.create();
2043
+ await client.runs.wait(thread.thread_id, "agent_simple", {
2044
+ input: { messages: [{ role: "human", content: "foo" }] },
2045
+ webhook: "/custom/webhook",
2046
+ });
2047
+
2048
+ await expect
2049
+ .poll(() => fetcher(new URL("/custom/webhook-payload", API_URL)), {
2050
+ interval: 500,
2051
+ timeout: 3000,
2052
+ })
2053
+ .toMatchObject({ json: { status: "success" } });
2054
+
2055
+ // check if custom middleware is applied even for python routes
2056
+ res = await fetcher(new URL("/info", API_URL));
2057
+ expect(res.headers.get("x-js-middleware")).toEqual("true");
2058
+
2059
+ // ... and if we can intercept a request targeted for Python API
2060
+ res = await fetcher(new URL("/info?interrupt", API_URL));
2061
+ expect(res.json).toEqual({ status: "interrupted" });
2062
+ });
2063
+
2064
+ it("custom routes - mutate request body", async () => {
2065
+ const client = new Client<any>({
2066
+ apiUrl: API_URL,
2067
+ defaultHeaders: {
2068
+ "x-configurable-header": "extra-client",
2069
+ },
2070
+ });
2071
+
2072
+ const thread = await client.threads.create();
2073
+ const res = await client.runs.wait(thread.thread_id, "agent_simple", {
2074
+ input: { messages: [{ role: "human", content: "input" }] },
2075
+ });
2076
+
2077
+ expect(res).toEqual({
2078
+ messages: expect.arrayContaining([
2079
+ expect.objectContaining({ content: "end: extra-client" }),
2080
+ ]),
2081
+ });
2082
+ });
@@ -34,8 +34,9 @@ services:
34
34
  ADD . /deps/graphs
35
35
  WORKDIR /deps/graphs
36
36
  RUN yarn install --frozen-lockfile
37
- ENV LANGSERVE_GRAPHS='{"agent":"./agent.mts:graph", "nested": "./nested.mts:graph", "weather": "./weather.mts:graph", "error": "./error.mts:graph", "delay": "./delay.mts:graph", "dynamic": "./dynamic.mts:graph", "command": "./command.mts:graph"}'
37
+ ENV LANGSERVE_GRAPHS='{"agent":"./agent.mts:graph", "nested": "./nested.mts:graph", "weather": "./weather.mts:graph", "error": "./error.mts:graph", "delay": "./delay.mts:graph", "dynamic": "./dynamic.mts:graph", "command": "./command.mts:graph", "agent_simple": "./agent_simple.mts:graph"}'
38
38
  ENV LANGGRAPH_CONFIG='{"agent": {"configurable": {"model_name": "openai"}}}'
39
+ ENV LANGGRAPH_HTTP='{"app": "./http.mts:app"}'
39
40
  ENV LANGGRAPH_UI='{"agent": "./agent.ui.tsx"}'
40
41
  RUN tsx /api/langgraph_api/js/build.mts
41
42
  depends_on:
@@ -29,7 +29,7 @@ const AgentState = Annotation.Root({
29
29
 
30
30
  async function callModel(
31
31
  state: typeof AgentState.State,
32
- config: LangGraphRunnableConfig
32
+ config: LangGraphRunnableConfig,
33
33
  ): Promise<typeof AgentState.Update> {
34
34
  let userId: string | undefined;
35
35
 
@@ -41,6 +41,12 @@ async function callModel(
41
41
  userId = user?.identity;
42
42
  }
43
43
 
44
+ if (config.configurable?.["x-configurable-header"] != null) {
45
+ return {
46
+ messages: [`end: ${config.configurable?.["x-configurable-header"]}`],
47
+ };
48
+ }
49
+
44
50
  const model = getStableModel(config.configurable?.thread_id ?? "$");
45
51
  const existing = await config.store?.get([userId ?? "ALL"], "key_one");
46
52
  if (!existing) {
@@ -49,16 +55,15 @@ async function callModel(
49
55
  }
50
56
 
51
57
  const response = await model.invoke(state.messages);
52
- const result: typeof AgentState.Update = { messages: [response] };
53
- return result;
58
+ return { messages: [response] };
54
59
  }
55
60
 
56
61
  async function callTool(
57
- message: BaseMessage
62
+ message: BaseMessage,
58
63
  ): Promise<typeof AgentState.Update> {
59
64
  const response = new ToolMessage(
60
65
  `tool_call__${message.content}`,
61
- "tool_call_id"
66
+ "tool_call_id",
62
67
  );
63
68
  return { messages: [response] };
64
69
  }