langgraph-api 0.0.38__py3-none-any.whl → 0.0.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

@@ -1,5 +1,5 @@
1
1
  import { describe, expect, it, beforeEach, beforeAll } from "vitest";
2
- import { Client } from "@langchain/langgraph-sdk";
2
+ import { Client, FeedbackStreamEvent } from "@langchain/langgraph-sdk";
3
3
 
4
4
  import postgres from "postgres";
5
5
  import { findLast, gatherIterator } from "./utils.mts";
@@ -12,10 +12,10 @@ import { randomUUID } from "crypto";
12
12
 
13
13
  const sql = postgres(
14
14
  process.env.POSTGRES_URI ??
15
- "postgres://postgres:postgres@127.0.0.1:5433/postgres?sslmode=disable"
15
+ "postgres://postgres:postgres@127.0.0.1:5433/postgres?sslmode=disable",
16
16
  );
17
17
 
18
- const client = new Client({
18
+ const client = new Client<any>({
19
19
  // apiUrl: process.env["LANGGRAPH_ENDPOINT"] ?? "http://localhost:9123",
20
20
  apiUrl: "http://localhost:9123",
21
21
  });
@@ -61,7 +61,7 @@ describe("assistants", () => {
61
61
 
62
62
  await client.assistants.delete(res.assistant_id);
63
63
  await expect(() => client.assistants.get(res.assistant_id)).rejects.toThrow(
64
- "HTTP 404: Not Found"
64
+ "HTTP 404: Not Found",
65
65
  );
66
66
  });
67
67
 
@@ -130,7 +130,7 @@ describe("assistants", () => {
130
130
 
131
131
  await client.assistants.delete(res.assistant_id);
132
132
  await expect(() => client.assistants.get(res.assistant_id)).rejects.toThrow(
133
- "HTTP 404: Not Found"
133
+ "HTTP 404: Not Found",
134
134
  );
135
135
  });
136
136
 
@@ -154,7 +154,7 @@ describe("assistants", () => {
154
154
  });
155
155
  expect(search.length).toBeGreaterThanOrEqual(1);
156
156
  expect(search.every((i) => i.assistant_id !== create.assistant_id)).toBe(
157
- true
157
+ true,
158
158
  );
159
159
  });
160
160
 
@@ -257,7 +257,7 @@ describe("threads copy", () => {
257
257
 
258
258
  const copiedThread = await client.threads.copy(thread.thread_id);
259
259
  const copiedThreadState = await client.threads.getState(
260
- copiedThread.thread_id
260
+ copiedThread.thread_id,
261
261
  );
262
262
 
263
263
  // check copied thread state matches expected output
@@ -338,11 +338,11 @@ describe("threads copy", () => {
338
338
 
339
339
  // test that copied thread has original as well as new values
340
340
  const copiedThreadState = await client.threads.getState<AgentState>(
341
- copiedThread.thread_id
341
+ copiedThread.thread_id,
342
342
  );
343
343
 
344
344
  const copiedThreadStateMessages = copiedThreadState.values.messages.map(
345
- (m) => m.content
345
+ (m) => m.content,
346
346
  );
347
347
  expect(copiedThreadStateMessages).toEqual([
348
348
  // original messages
@@ -359,7 +359,7 @@ describe("threads copy", () => {
359
359
 
360
360
  // test that the new run on the copied thread doesn't affect the original one
361
361
  const currentOriginalThreadState = await client.threads.getState(
362
- thread.thread_id
362
+ thread.thread_id,
363
363
  );
364
364
  expect(currentOriginalThreadState).toEqual(originalThreadState);
365
365
  });
@@ -378,7 +378,7 @@ describe("threads copy", () => {
378
378
  });
379
379
 
380
380
  const history = await client.threads.getHistory<AgentState>(
381
- thread.thread_id
381
+ thread.thread_id,
382
382
  );
383
383
  expect(history.length).toBe(5);
384
384
  expect(history[0].values.messages.length).toBe(4);
@@ -394,11 +394,11 @@ describe("threads copy", () => {
394
394
  });
395
395
 
396
396
  const fullHistory = await client.threads.getHistory<AgentState>(
397
- thread.thread_id
397
+ thread.thread_id,
398
398
  );
399
399
  const filteredHistory = await client.threads.getHistory<AgentState>(
400
400
  thread.thread_id,
401
- { metadata: runMetadata }
401
+ { metadata: runMetadata },
402
402
  );
403
403
 
404
404
  expect(fullHistory.length).toBe(10);
@@ -429,13 +429,13 @@ describe("threads copy", () => {
429
429
  });
430
430
 
431
431
  const copiedThreadState = await client.threads.getState<AgentState>(
432
- copyThread.thread_id
432
+ copyThread.thread_id,
433
433
  );
434
434
  expect(copiedThreadState.values.messages[0].content).toBe("bar");
435
435
 
436
436
  // test that updating the copied thread doesn't affect the original one
437
437
  const currentOriginalThreadState = await client.threads.getState(
438
- thread.thread_id
438
+ thread.thread_id,
439
439
  );
440
440
  expect(currentOriginalThreadState).toEqual(originalState);
441
441
  });
@@ -462,7 +462,7 @@ describe("runs", () => {
462
462
  input: { messages: [{ type: "human", content: "bar" }] },
463
463
  config: globalConfig,
464
464
  afterSeconds: 10,
465
- }
465
+ },
466
466
  );
467
467
 
468
468
  let runs = await client.runs.list(thread.thread_id);
@@ -486,7 +486,7 @@ describe("runs", () => {
486
486
  const stream = client.runs.stream(
487
487
  thread.thread_id,
488
488
  assistant.assistant_id,
489
- { input, streamMode: "values", config: globalConfig }
489
+ { input, streamMode: "values", config: globalConfig },
490
490
  );
491
491
 
492
492
  let runId: string | null = null;
@@ -523,7 +523,7 @@ describe("runs", () => {
523
523
  expect(cur.length).toBeGreaterThan(1);
524
524
  });
525
525
 
526
- it.concurrent("wait error", async () => {
526
+ it.concurrent("wait error", { retry: 3 }, async () => {
527
527
  const assistant = await client.assistants.create({ graphId: "agent" });
528
528
  const thread = await client.threads.create();
529
529
  const input = {
@@ -534,13 +534,13 @@ describe("runs", () => {
534
534
  client.runs.wait(thread.thread_id, assistant.assistant_id, {
535
535
  input,
536
536
  config: { ...globalConfig, recursion_limit: 1 },
537
- })
537
+ }),
538
538
  ).rejects.toThrowError(/GraphRecursionError/);
539
539
  const threadUpdated = await client.threads.get(thread.thread_id);
540
540
  expect(threadUpdated.status).toBe("error");
541
541
  });
542
542
 
543
- it.concurrent("wait", async () => {
543
+ it.concurrent("wait", { retry: 3 }, async () => {
544
544
  const assistant = await client.assistants.create({ graphId: "agent" });
545
545
  const thread = await client.threads.create();
546
546
  const input = {
@@ -549,7 +549,7 @@ describe("runs", () => {
549
549
  const values = await client.runs.wait(
550
550
  thread.thread_id,
551
551
  assistant.assistant_id,
552
- { input, config: globalConfig }
552
+ { input, config: globalConfig },
553
553
  );
554
554
 
555
555
  expect(Array.isArray((values as any).messages)).toBe(true);
@@ -566,7 +566,7 @@ describe("runs", () => {
566
566
  const stream = client.runs.stream(
567
567
  thread.thread_id,
568
568
  assistant.assistant_id,
569
- { input, streamMode: "updates", config: globalConfig }
569
+ { input, streamMode: "updates", config: globalConfig },
570
570
  );
571
571
 
572
572
  let runId: string | null = null;
@@ -606,18 +606,20 @@ describe("runs", () => {
606
606
  const stream = client.runs.stream(
607
607
  thread.thread_id,
608
608
  assistant.assistant_id,
609
- { input, streamMode: "events", config: globalConfig }
609
+ { input, streamMode: "events", config: globalConfig },
610
610
  );
611
611
 
612
612
  const events = await gatherIterator(stream);
613
613
  expect(new Set(events.map((i) => i.event))).toEqual(
614
- new Set(["metadata", "events"])
614
+ new Set(["metadata", "events"]),
615
615
  );
616
616
 
617
617
  expect(
618
618
  new Set(
619
- events.filter((i) => i.event === "events").map((i) => i.data.event)
620
- )
619
+ events
620
+ .filter((i) => i.event === "events")
621
+ .map((i) => (i.data as any).event),
622
+ ),
621
623
  ).toEqual(
622
624
  new Set([
623
625
  "on_chain_start",
@@ -625,7 +627,7 @@ describe("runs", () => {
625
627
  "on_chat_model_end",
626
628
  "on_chat_model_start",
627
629
  "on_chat_model_stream",
628
- ])
630
+ ]),
629
631
  );
630
632
  });
631
633
 
@@ -638,7 +640,7 @@ describe("runs", () => {
638
640
  const stream = client.runs.stream(
639
641
  thread.thread_id,
640
642
  assistant.assistant_id,
641
- { input, streamMode: "messages", config: globalConfig }
643
+ { input, streamMode: "messages", config: globalConfig },
642
644
  );
643
645
 
644
646
  let runId: string | null = null;
@@ -679,7 +681,7 @@ describe("runs", () => {
679
681
  "messages/metadata",
680
682
  "messages/partial",
681
683
  "messages/complete",
682
- ])
684
+ ]),
683
685
  );
684
686
 
685
687
  expect(runId).not.toBeNull();
@@ -696,11 +698,14 @@ describe("runs", () => {
696
698
  const stream = await client.runs.stream(
697
699
  thread.thread_id,
698
700
  assistant.assistant_id,
699
- { input, streamMode: "messages-tuple", config: globalConfig }
701
+ { input, streamMode: "messages-tuple", config: globalConfig },
700
702
  );
701
703
 
702
704
  const chunks = await gatherIterator(stream);
703
- const runId = findLast(chunks, (i) => i.event === "metadata")?.data.run_id;
705
+ const runId = findLast(
706
+ chunks,
707
+ (i): i is FeedbackStreamEvent => i.event === "metadata",
708
+ )?.data.run_id;
704
709
  expect(runId).not.toBeNull();
705
710
 
706
711
  const messages = chunks
@@ -730,7 +735,7 @@ describe("runs", () => {
730
735
  const stream = await client.runs.stream(
731
736
  thread.thread_id,
732
737
  assistant.assistant_id,
733
- { input, streamMode: ["messages", "values"], config: globalConfig }
738
+ { input, streamMode: ["messages", "values"], config: globalConfig },
734
739
  );
735
740
 
736
741
  const chunks = await gatherIterator(stream);
@@ -739,7 +744,7 @@ describe("runs", () => {
739
744
 
740
745
  const messages: BaseMessage[] = findLast(
741
746
  chunks,
742
- (i) => i.event === "values"
747
+ (i) => i.event === "values",
743
748
  )?.data.messages;
744
749
 
745
750
  expect(messages.length).toBe(4);
@@ -756,10 +761,10 @@ describe("runs", () => {
756
761
  "messages/partial",
757
762
  "messages/complete",
758
763
  "values",
759
- ])
764
+ ]),
760
765
  );
761
766
 
762
- const run = await client.runs.get(thread.thread_id, runId);
767
+ const run = await client.runs.get(thread.thread_id, runId!);
763
768
  expect(run.status).toBe("success");
764
769
  });
765
770
 
@@ -781,7 +786,7 @@ describe("runs", () => {
781
786
  input,
782
787
  interruptBefore: ["tool"],
783
788
  config: globalConfig,
784
- })
789
+ }),
785
790
  );
786
791
 
787
792
  expect(chunks.filter((i) => i.event === "error").length).toBe(0);
@@ -801,7 +806,7 @@ describe("runs", () => {
801
806
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
802
807
  input: null,
803
808
  config: globalConfig,
804
- })
809
+ }),
805
810
  );
806
811
 
807
812
  expect(chunks.filter((i) => i.event === "error").length).toBe(0);
@@ -813,7 +818,7 @@ describe("runs", () => {
813
818
 
814
819
  const threadAfterContinue = await client.threads.get(thread.thread_id);
815
820
  expect(threadAfterContinue.status).toBe("idle");
816
- }
821
+ },
817
822
  );
818
823
 
819
824
  it.concurrent("human in the loop - modification", async () => {
@@ -831,7 +836,7 @@ describe("runs", () => {
831
836
  input,
832
837
  interruptBefore: ["tool"],
833
838
  config: globalConfig,
834
- })
839
+ }),
835
840
  );
836
841
 
837
842
  expect(chunks.filter((i) => i.event === "error").length).toBe(0);
@@ -839,7 +844,7 @@ describe("runs", () => {
839
844
  // edit the last message
840
845
  const lastMessage = findLast(
841
846
  chunks,
842
- (i) => i.event === "values"
847
+ (i) => i.event === "values",
843
848
  )?.data.messages.at(-1);
844
849
  lastMessage.content = "modified";
845
850
 
@@ -856,7 +861,7 @@ describe("runs", () => {
856
861
  expect(modifiedThread.metadata?.modified).toBe(true);
857
862
 
858
863
  const stateAfterModify = await client.threads.getState<AgentState>(
859
- thread.thread_id
864
+ thread.thread_id,
860
865
  );
861
866
  expect(stateAfterModify.values.messages.at(-1)?.content).toBe("modified");
862
867
  expect(stateAfterModify.next).toEqual(["tool"]);
@@ -869,7 +874,7 @@ describe("runs", () => {
869
874
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
870
875
  input: null,
871
876
  config: globalConfig,
872
- })
877
+ }),
873
878
  );
874
879
 
875
880
  const threadAfterContinue = await client.threads.get(thread.thread_id);
@@ -884,7 +889,7 @@ describe("runs", () => {
884
889
 
885
890
  // get the history
886
891
  const history = await client.threads.getHistory<AgentState>(
887
- thread.thread_id
892
+ thread.thread_id,
888
893
  );
889
894
  expect(history.length).toBe(6);
890
895
  expect(history[0].next.length).toBe(0);
@@ -892,7 +897,7 @@ describe("runs", () => {
892
897
  expect(history.at(-1)?.next).toEqual(["__start__"]);
893
898
  });
894
899
 
895
- it.concurrent("interrupt before", async () => {
900
+ it.concurrent("interrupt before", { retry: 3 }, async () => {
896
901
  const assistant = await client.assistants.create({ graphId: "agent" });
897
902
  let thread = await client.threads.create();
898
903
  const input = {
@@ -930,7 +935,7 @@ describe("shared state", () => {
930
935
  const res1 = (await client.runs.wait(
931
936
  thread.thread_id,
932
937
  assistant.assistant_id,
933
- { input, config }
938
+ { input, config },
934
939
  )) as Awaited<Record<string, any>>;
935
940
  expect(res1.sharedStateValue).toBe(null);
936
941
 
@@ -938,7 +943,7 @@ describe("shared state", () => {
938
943
  const res2 = (await client.runs.wait(
939
944
  thread.thread_id,
940
945
  assistant.assistant_id,
941
- { input, config }
946
+ { input, config },
942
947
  )) as Awaited<Record<string, any>>;
943
948
  expect(res2.sharedStateValue).toBe(config.configurable.user_id);
944
949
  });
@@ -956,7 +961,7 @@ describe("shared state", () => {
956
961
  const res1 = (await client.runs.wait(
957
962
  thread.thread_id,
958
963
  assistant.assistant_id,
959
- { input, config: config1 }
964
+ { input, config: config1 },
960
965
  )) as Awaited<Record<string, any>>;
961
966
 
962
967
  // Run with the same thread id but a new config
@@ -964,7 +969,7 @@ describe("shared state", () => {
964
969
  const res2 = (await client.runs.wait(
965
970
  thread.thread_id,
966
971
  assistant.assistant_id,
967
- { input, config: config2 }
972
+ { input, config: config2 },
968
973
  )) as Awaited<Record<string, any>>;
969
974
 
970
975
  expect(res1.sharedStateValue).toBe(config1.configurable.user_id);
@@ -990,12 +995,12 @@ describe("shared state", () => {
990
995
  const res1 = (await client.runs.wait(
991
996
  thread.thread_id,
992
997
  assistant.assistant_id,
993
- { input, config }
998
+ { input, config },
994
999
  )) as Awaited<Record<string, any>>;
995
1000
  expect(res1.sharedStateFromStoreConfig).toBeDefined();
996
1001
  expect(res1.sharedStateFromStoreConfig.id).toBeDefined();
997
1002
  expect(res1.sharedStateFromStoreConfig.id).toBe(
998
- config.configurable.user_id
1003
+ config.configurable.user_id,
999
1004
  );
1000
1005
  });
1001
1006
 
@@ -1020,12 +1025,12 @@ describe("shared state", () => {
1020
1025
  const res1 = (await client.runs.wait(
1021
1026
  thread.thread_id,
1022
1027
  assistant.assistant_id,
1023
- { input, config }
1028
+ { input, config },
1024
1029
  )) as Awaited<Record<string, any>>;
1025
1030
  expect(res1.sharedStateFromStoreConfig).toBeDefined();
1026
1031
  expect(res1.sharedStateFromStoreConfig.id).toBeDefined();
1027
1032
  expect(res1.sharedStateFromStoreConfig.id).toBe(
1028
- config.configurable.user_id
1033
+ config.configurable.user_id,
1029
1034
  );
1030
1035
 
1031
1036
  // Fetch data from store client
@@ -1117,10 +1122,10 @@ describe("StoreClient", () => {
1117
1122
  expect(searchResAfterPut.items[0].createdAt).toBeDefined();
1118
1123
  expect(searchResAfterPut.items[0].updatedAt).toBeDefined();
1119
1124
  expect(
1120
- new Date(searchResAfterPut.items[0].createdAt).getTime()
1125
+ new Date(searchResAfterPut.items[0].createdAt).getTime(),
1121
1126
  ).toBeLessThanOrEqual(Date.now());
1122
1127
  expect(
1123
- new Date(searchResAfterPut.items[0].updatedAt).getTime()
1128
+ new Date(searchResAfterPut.items[0].updatedAt).getTime(),
1124
1129
  ).toBeLessThanOrEqual(Date.now());
1125
1130
 
1126
1131
  const updatedValue = { foo: "baz" };
@@ -1137,7 +1142,7 @@ describe("StoreClient", () => {
1137
1142
  expect(searchResAfterUpdate.items[0].value).toEqual(updatedValue);
1138
1143
 
1139
1144
  expect(
1140
- new Date(searchResAfterUpdate.items[0].updatedAt).getTime()
1145
+ new Date(searchResAfterUpdate.items[0].updatedAt).getTime(),
1141
1146
  ).toBeGreaterThan(new Date(searchResAfterPut.items[0].updatedAt).getTime());
1142
1147
 
1143
1148
  const listResAfterPut = await client.store.listNamespaces();
@@ -1161,12 +1166,12 @@ describe("subgraphs", () => {
1161
1166
  const assistant = await client.assistants.create({ graphId: "nested" });
1162
1167
 
1163
1168
  expect(
1164
- Object.keys(await client.assistants.getSubgraphs(assistant.assistant_id))
1169
+ Object.keys(await client.assistants.getSubgraphs(assistant.assistant_id)),
1165
1170
  ).toEqual(["gp_two"]);
1166
1171
 
1167
1172
  const subgraphs = await client.assistants.getSubgraphs(
1168
1173
  assistant.assistant_id,
1169
- { recurse: true }
1174
+ { recurse: true },
1170
1175
  );
1171
1176
 
1172
1177
  expect(Object.keys(subgraphs)).toEqual(["gp_two", "gp_two|p_two"]);
@@ -1201,19 +1206,17 @@ describe("subgraphs", () => {
1201
1206
  // (1) interrupt and then continue running, no modification
1202
1207
  it.concurrent("human in the loop - no modification", async () => {
1203
1208
  const assistant = await client.assistants.create({ graphId: "weather" });
1204
-
1205
1209
  const thread = await client.threads.create();
1206
- const input = {
1207
- messages: [{ role: "human", content: "SF", id: "initial-message" }],
1208
- };
1209
1210
 
1210
1211
  // run until the interrupt
1211
1212
  let lastMessageBeforeInterrupt: { content?: string } | null = null;
1212
1213
  let chunks = await gatherIterator(
1213
1214
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
1214
- input,
1215
+ input: {
1216
+ messages: [{ role: "human", content: "SF", id: "initial-message" }],
1217
+ },
1215
1218
  interruptBefore: ["tool"],
1216
- })
1219
+ }),
1217
1220
  );
1218
1221
 
1219
1222
  for (const chunk of chunks) {
@@ -1223,16 +1226,13 @@ describe("subgraphs", () => {
1223
1226
  }
1224
1227
 
1225
1228
  if (chunk.event === "error") {
1226
- throw new Error(chunk.data);
1229
+ throw new Error(chunk.data.error);
1227
1230
  }
1228
1231
  }
1229
1232
 
1230
1233
  expect(lastMessageBeforeInterrupt?.content).toBe("SF");
1231
1234
  expect(chunks).toEqual([
1232
- {
1233
- event: "metadata",
1234
- data: { run_id: expect.any(String), attempt: 1 },
1235
- },
1235
+ { event: "metadata", data: { run_id: expect.any(String), attempt: 1 } },
1236
1236
  {
1237
1237
  event: "values",
1238
1238
  data: {
@@ -1285,7 +1285,7 @@ describe("subgraphs", () => {
1285
1285
  const stateRecursive = await client.threads.getState(
1286
1286
  thread.thread_id,
1287
1287
  undefined,
1288
- { subgraphs: true }
1288
+ { subgraphs: true },
1289
1289
  );
1290
1290
 
1291
1291
  expect(stateRecursive.next).toEqual(["weather_graph"]);
@@ -1338,24 +1338,27 @@ describe("subgraphs", () => {
1338
1338
  expect(threadAfterInterrupt.status).toBe("interrupted");
1339
1339
 
1340
1340
  // continue after interrupt
1341
- chunks = await gatherIterator(
1341
+ const chunksSubgraph = await gatherIterator(
1342
1342
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
1343
1343
  input: null,
1344
1344
  streamMode: ["values", "updates"],
1345
1345
  streamSubgraphs: true,
1346
- })
1346
+ }),
1347
1347
  );
1348
1348
 
1349
- expect(chunks.filter((i) => i.event === "error")).toEqual([]);
1350
- expect(chunks.at(-1)?.event).toBe("values");
1349
+ expect(chunksSubgraph.filter((i) => i.event === "error")).toEqual([]);
1350
+ expect(chunksSubgraph.at(-1)?.event).toBe("values");
1351
1351
 
1352
- const continueMessages = findLast(chunks, (i) => i.event === "values")?.data
1353
- .messages;
1352
+ type ChunkType = (typeof chunksSubgraph)[number];
1353
+ const continueMessages = findLast(
1354
+ chunksSubgraph,
1355
+ (i): i is ChunkType & { event: "values" } => i.event === "values",
1356
+ )?.data.messages;
1354
1357
 
1355
1358
  expect(continueMessages.length).toBe(2);
1356
1359
  expect(continueMessages[0].content).toBe("SF");
1357
1360
  expect(continueMessages[1].content).toBe("It's sunny in San Francisco!");
1358
- expect(chunks).toEqual([
1361
+ expect(chunksSubgraph).toEqual([
1359
1362
  {
1360
1363
  event: "metadata",
1361
1364
  data: { run_id: expect.any(String), attempt: 1 },
@@ -1497,7 +1500,7 @@ describe("subgraphs", () => {
1497
1500
 
1498
1501
  // run until the interrupt (same as before)
1499
1502
  let chunks = await gatherIterator(
1500
- client.runs.stream(thread.thread_id, assistant.assistant_id, { input })
1503
+ client.runs.stream(thread.thread_id, assistant.assistant_id, { input }),
1501
1504
  );
1502
1505
  expect(chunks.filter((i) => i.event === "error")).toEqual([]);
1503
1506
 
@@ -1529,7 +1532,7 @@ describe("subgraphs", () => {
1529
1532
  // get inner state after update
1530
1533
  const innerState = await client.threads.getState<{ city: string }>(
1531
1534
  thread.thread_id,
1532
- state.tasks[0].checkpoint ?? undefined
1535
+ state.tasks[0].checkpoint ?? undefined,
1533
1536
  );
1534
1537
 
1535
1538
  expect(innerState.values.city).toBe("LA");
@@ -1551,7 +1554,7 @@ describe("subgraphs", () => {
1551
1554
  chunks = await gatherIterator(
1552
1555
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
1553
1556
  input: null,
1554
- })
1557
+ }),
1555
1558
  );
1556
1559
 
1557
1560
  expect(chunks.filter((i) => i.event === "error")).toEqual([]);
@@ -1586,7 +1589,7 @@ describe("subgraphs", () => {
1586
1589
  expect(innerHistory[innerHistory.length - 1].next).toEqual(["__start__"]);
1587
1590
  });
1588
1591
 
1589
- it.concurrent("interrupt inside node", async () => {
1592
+ it.concurrent("interrupt inside node", { retry: 3 }, async () => {
1590
1593
  const assistant = await client.assistants.create({ graphId: "agent" });
1591
1594
 
1592
1595
  let thread = await client.threads.create();
@@ -1636,7 +1639,7 @@ describe("subgraphs", () => {
1636
1639
  const stream = await gatherIterator(
1637
1640
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
1638
1641
  command: { resume: "i want to resume" },
1639
- })
1642
+ }),
1640
1643
  );
1641
1644
 
1642
1645
  expect(stream.at(-1)?.event).toBe("values");
@@ -1653,7 +1656,7 @@ describe("errors", () => {
1653
1656
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
1654
1657
  input: { messages: [] },
1655
1658
  streamMode: ["debug", "events"],
1656
- })
1659
+ }),
1657
1660
  );
1658
1661
 
1659
1662
  expect(stream.at(-1)).toMatchObject({
@@ -1672,7 +1675,7 @@ describe("errors", () => {
1672
1675
  const run = await client.runs.create(
1673
1676
  thread.thread_id,
1674
1677
  assistant.assistant_id,
1675
- { input: { messages: [] } }
1678
+ { input: { messages: [] } },
1676
1679
  );
1677
1680
 
1678
1681
  await client.runs.join(thread.thread_id, run.run_id);
@@ -1687,11 +1690,11 @@ describe("errors", () => {
1687
1690
  const run = await client.runs.create(
1688
1691
  thread.thread_id,
1689
1692
  assistant.assistant_id,
1690
- { input: { messages: [] } }
1693
+ { input: { messages: [] } },
1691
1694
  );
1692
1695
 
1693
1696
  const stream = await gatherIterator(
1694
- client.runs.joinStream(thread.thread_id, run.run_id)
1697
+ client.runs.joinStream(thread.thread_id, run.run_id),
1695
1698
  );
1696
1699
 
1697
1700
  expect(stream.at(-1)).toMatchObject({
@@ -1721,7 +1724,7 @@ describe("long running tasks", () => {
1721
1724
  {
1722
1725
  input: { messages: [], delay },
1723
1726
  config: globalConfig,
1724
- }
1727
+ },
1725
1728
  );
1726
1729
 
1727
1730
  await client.runs.join(thread.thread_id, run.run_id);
@@ -1737,14 +1740,12 @@ describe("long running tasks", () => {
1737
1740
  expect(runResult.values.messages).toMatchObject([
1738
1741
  { content: `finished after ${delay}ms` },
1739
1742
  ]);
1740
- }
1743
+ },
1741
1744
  );
1742
1745
  });
1743
1746
 
1744
1747
  it("unusual newline termination characters", async () => {
1745
- const thread = await client.threads.create({
1746
- metadata: { graph_id: "agent" },
1747
- });
1748
+ const thread = await client.threads.create({ graphId: "agent" });
1748
1749
 
1749
1750
  await client.threads.updateState(thread.thread_id, {
1750
1751
  values: {
@@ -1764,7 +1765,7 @@ it("unusual newline termination characters", async () => {
1764
1765
  expect(history.length).toBe(1);
1765
1766
  expect(history[0].values.messages.length).toBe(1);
1766
1767
  expect(history[0].values.messages[0].content).toBe(
1767
- "Page break characters: \n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029"
1768
+ "Page break characters: \n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029",
1768
1769
  );
1769
1770
  });
1770
1771
 
@@ -1785,7 +1786,7 @@ describe.skip("command update state", () => {
1785
1786
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
1786
1787
  command: { update: { keyOne: "value3", keyTwo: "value4" } },
1787
1788
  config: globalConfig,
1788
- })
1789
+ }),
1789
1790
  );
1790
1791
  expect(stream.filter((chunk) => chunk.event === "error")).toEqual([]);
1791
1792
 
@@ -1813,7 +1814,7 @@ describe.skip("command update state", () => {
1813
1814
  ],
1814
1815
  },
1815
1816
  config: globalConfig,
1816
- })
1817
+ }),
1817
1818
  );
1818
1819
 
1819
1820
  expect(stream.filter((chunk) => chunk.event === "error")).toEqual([]);
@@ -1829,3 +1830,73 @@ describe.skip("command update state", () => {
1829
1830
  });
1830
1831
  });
1831
1832
  });
1833
+
1834
+ it("dynamic graph", async () => {
1835
+ const defaultAssistant = await client.assistants.create({
1836
+ graphId: "dynamic",
1837
+ });
1838
+
1839
+ let updates = await gatherIterator(
1840
+ client.runs.stream(null, defaultAssistant.assistant_id, {
1841
+ input: { messages: ["input"] },
1842
+ streamMode: ["updates"],
1843
+ }),
1844
+ );
1845
+
1846
+ expect
1847
+ .soft(
1848
+ updates
1849
+ .filter((i) => i.event === "updates")
1850
+ .flatMap((i) => Object.keys(i.data)),
1851
+ )
1852
+ .toEqual(expect.arrayContaining(["default"]));
1853
+
1854
+ updates = await gatherIterator(
1855
+ client.runs.stream(null, defaultAssistant.assistant_id, {
1856
+ input: { messages: ["input"] },
1857
+ config: { configurable: { nodeName: "runtime" } },
1858
+ streamMode: ["updates"],
1859
+ }),
1860
+ );
1861
+
1862
+ expect
1863
+ .soft(
1864
+ updates
1865
+ .filter((i) => i.event === "updates")
1866
+ .flatMap((i) => Object.keys(i.data)),
1867
+ )
1868
+ .toEqual(expect.arrayContaining(["runtime"]));
1869
+
1870
+ const configAssistant = await client.assistants.create({
1871
+ graphId: "dynamic",
1872
+ config: { configurable: { nodeName: "assistant" } },
1873
+ });
1874
+
1875
+ let thread = await client.threads.create({ graphId: "dynamic" });
1876
+ updates = await gatherIterator(
1877
+ client.runs.stream(thread.thread_id, configAssistant.assistant_id, {
1878
+ input: { messages: ["input"], configurable: { nodeName: "assistant" } },
1879
+ streamMode: ["updates"],
1880
+ }),
1881
+ );
1882
+
1883
+ expect
1884
+ .soft(
1885
+ updates
1886
+ .filter((i) => i.event === "updates")
1887
+ .flatMap((i) => Object.keys(i.data)),
1888
+ )
1889
+ .toEqual(expect.arrayContaining(["assistant"]));
1890
+
1891
+ thread = await client.threads.get(thread.thread_id);
1892
+
1893
+ // check if we are properly recreating the graph with the
1894
+ // stored configuration inside a thread
1895
+ await client.threads.updateState(thread.thread_id, {
1896
+ values: { messages: "update" },
1897
+ asNode: "assistant",
1898
+ });
1899
+
1900
+ const state = await client.threads.getState(thread.thread_id);
1901
+ expect(state.values.messages).toEqual(["input", "assistant", "update"]);
1902
+ });