langgraph-api 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- langgraph_api/__init__.py +1 -1
- langgraph_api/auth/custom.py +25 -4
- langgraph_api/graph.py +4 -12
- langgraph_api/js/__init__.py +0 -0
- langgraph_api/js/base.py +17 -0
- langgraph_api/js/build.mts +3 -3
- langgraph_api/js/client.mts +64 -3
- langgraph_api/js/global.d.ts +1 -0
- langgraph_api/js/package.json +4 -3
- langgraph_api/js/remote.py +93 -2
- langgraph_api/js/src/graph.mts +0 -6
- langgraph_api/js/src/utils/files.mts +4 -0
- langgraph_api/js/tests/api.test.mts +80 -80
- langgraph_api/js/tests/auth.test.mts +648 -0
- langgraph_api/js/tests/compose-postgres.auth.yml +59 -0
- langgraph_api/js/tests/graphs/agent_simple.mts +79 -0
- langgraph_api/js/tests/graphs/auth.mts +106 -0
- langgraph_api/js/tests/graphs/package.json +3 -1
- langgraph_api/js/tests/graphs/yarn.lock +9 -4
- langgraph_api/js/yarn.lock +18 -23
- langgraph_api/stream.py +2 -1
- langgraph_api/worker.py +1 -22
- {langgraph_api-0.1.0.dist-info → langgraph_api-0.1.2.dist-info}/METADATA +1 -2
- {langgraph_api-0.1.0.dist-info → langgraph_api-0.1.2.dist-info}/RECORD +27 -21
- {langgraph_api-0.1.0.dist-info → langgraph_api-0.1.2.dist-info}/LICENSE +0 -0
- {langgraph_api-0.1.0.dist-info → langgraph_api-0.1.2.dist-info}/WHEEL +0 -0
- {langgraph_api-0.1.0.dist-info → langgraph_api-0.1.2.dist-info}/entry_points.txt +0 -0
|
@@ -12,7 +12,7 @@ import { randomUUID } from "crypto";
|
|
|
12
12
|
|
|
13
13
|
const sql = postgres(
|
|
14
14
|
process.env.POSTGRES_URI ??
|
|
15
|
-
"postgres://postgres:postgres@127.0.0.1:5433/postgres?sslmode=disable"
|
|
15
|
+
"postgres://postgres:postgres@127.0.0.1:5433/postgres?sslmode=disable",
|
|
16
16
|
);
|
|
17
17
|
|
|
18
18
|
const client = new Client<any>({
|
|
@@ -61,7 +61,7 @@ describe("assistants", () => {
|
|
|
61
61
|
|
|
62
62
|
await client.assistants.delete(res.assistant_id);
|
|
63
63
|
await expect(() => client.assistants.get(res.assistant_id)).rejects.toThrow(
|
|
64
|
-
"HTTP 404: Not Found"
|
|
64
|
+
"HTTP 404: Not Found",
|
|
65
65
|
);
|
|
66
66
|
});
|
|
67
67
|
|
|
@@ -130,7 +130,7 @@ describe("assistants", () => {
|
|
|
130
130
|
|
|
131
131
|
await client.assistants.delete(res.assistant_id);
|
|
132
132
|
await expect(() => client.assistants.get(res.assistant_id)).rejects.toThrow(
|
|
133
|
-
"HTTP 404: Not Found"
|
|
133
|
+
"HTTP 404: Not Found",
|
|
134
134
|
);
|
|
135
135
|
});
|
|
136
136
|
|
|
@@ -154,7 +154,7 @@ describe("assistants", () => {
|
|
|
154
154
|
});
|
|
155
155
|
expect(search.length).toBeGreaterThanOrEqual(1);
|
|
156
156
|
expect(search.every((i) => i.assistant_id !== create.assistant_id)).toBe(
|
|
157
|
-
true
|
|
157
|
+
true,
|
|
158
158
|
);
|
|
159
159
|
});
|
|
160
160
|
|
|
@@ -257,7 +257,7 @@ describe("threads copy", () => {
|
|
|
257
257
|
|
|
258
258
|
const copiedThread = await client.threads.copy(thread.thread_id);
|
|
259
259
|
const copiedThreadState = await client.threads.getState(
|
|
260
|
-
copiedThread.thread_id
|
|
260
|
+
copiedThread.thread_id,
|
|
261
261
|
);
|
|
262
262
|
|
|
263
263
|
// check copied thread state matches expected output
|
|
@@ -338,11 +338,11 @@ describe("threads copy", () => {
|
|
|
338
338
|
|
|
339
339
|
// test that copied thread has original as well as new values
|
|
340
340
|
const copiedThreadState = await client.threads.getState<AgentState>(
|
|
341
|
-
copiedThread.thread_id
|
|
341
|
+
copiedThread.thread_id,
|
|
342
342
|
);
|
|
343
343
|
|
|
344
344
|
const copiedThreadStateMessages = copiedThreadState.values.messages.map(
|
|
345
|
-
(m) => m.content
|
|
345
|
+
(m) => m.content,
|
|
346
346
|
);
|
|
347
347
|
expect(copiedThreadStateMessages).toEqual([
|
|
348
348
|
// original messages
|
|
@@ -359,7 +359,7 @@ describe("threads copy", () => {
|
|
|
359
359
|
|
|
360
360
|
// test that the new run on the copied thread doesn't affect the original one
|
|
361
361
|
const currentOriginalThreadState = await client.threads.getState(
|
|
362
|
-
thread.thread_id
|
|
362
|
+
thread.thread_id,
|
|
363
363
|
);
|
|
364
364
|
expect(currentOriginalThreadState).toEqual(originalThreadState);
|
|
365
365
|
});
|
|
@@ -378,7 +378,7 @@ describe("threads copy", () => {
|
|
|
378
378
|
});
|
|
379
379
|
|
|
380
380
|
const history = await client.threads.getHistory<AgentState>(
|
|
381
|
-
thread.thread_id
|
|
381
|
+
thread.thread_id,
|
|
382
382
|
);
|
|
383
383
|
expect(history.length).toBe(5);
|
|
384
384
|
expect(history[0].values.messages.length).toBe(4);
|
|
@@ -394,11 +394,11 @@ describe("threads copy", () => {
|
|
|
394
394
|
});
|
|
395
395
|
|
|
396
396
|
const fullHistory = await client.threads.getHistory<AgentState>(
|
|
397
|
-
thread.thread_id
|
|
397
|
+
thread.thread_id,
|
|
398
398
|
);
|
|
399
399
|
const filteredHistory = await client.threads.getHistory<AgentState>(
|
|
400
400
|
thread.thread_id,
|
|
401
|
-
{ metadata: runMetadata }
|
|
401
|
+
{ metadata: runMetadata },
|
|
402
402
|
);
|
|
403
403
|
|
|
404
404
|
expect(fullHistory.length).toBe(10);
|
|
@@ -429,13 +429,13 @@ describe("threads copy", () => {
|
|
|
429
429
|
});
|
|
430
430
|
|
|
431
431
|
const copiedThreadState = await client.threads.getState<AgentState>(
|
|
432
|
-
copyThread.thread_id
|
|
432
|
+
copyThread.thread_id,
|
|
433
433
|
);
|
|
434
434
|
expect(copiedThreadState.values.messages[0].content).toBe("bar");
|
|
435
435
|
|
|
436
436
|
// test that updating the copied thread doesn't affect the original one
|
|
437
437
|
const currentOriginalThreadState = await client.threads.getState(
|
|
438
|
-
thread.thread_id
|
|
438
|
+
thread.thread_id,
|
|
439
439
|
);
|
|
440
440
|
expect(currentOriginalThreadState).toEqual(originalState);
|
|
441
441
|
});
|
|
@@ -462,7 +462,7 @@ describe("runs", () => {
|
|
|
462
462
|
input: { messages: [{ type: "human", content: "bar" }] },
|
|
463
463
|
config: globalConfig,
|
|
464
464
|
afterSeconds: 10,
|
|
465
|
-
}
|
|
465
|
+
},
|
|
466
466
|
);
|
|
467
467
|
|
|
468
468
|
let runs = await client.runs.list(thread.thread_id);
|
|
@@ -486,7 +486,7 @@ describe("runs", () => {
|
|
|
486
486
|
const stream = client.runs.stream(
|
|
487
487
|
thread.thread_id,
|
|
488
488
|
assistant.assistant_id,
|
|
489
|
-
{ input, streamMode: "values", config: globalConfig }
|
|
489
|
+
{ input, streamMode: "values", config: globalConfig },
|
|
490
490
|
);
|
|
491
491
|
|
|
492
492
|
let runId: string | null = null;
|
|
@@ -534,7 +534,7 @@ describe("runs", () => {
|
|
|
534
534
|
client.runs.wait(thread.thread_id, assistant.assistant_id, {
|
|
535
535
|
input,
|
|
536
536
|
config: { ...globalConfig, recursion_limit: 1 },
|
|
537
|
-
})
|
|
537
|
+
}),
|
|
538
538
|
).rejects.toThrowError(/GraphRecursionError/);
|
|
539
539
|
const threadUpdated = await client.threads.get(thread.thread_id);
|
|
540
540
|
expect(threadUpdated.status).toBe("error");
|
|
@@ -549,7 +549,7 @@ describe("runs", () => {
|
|
|
549
549
|
const values = await client.runs.wait(
|
|
550
550
|
thread.thread_id,
|
|
551
551
|
assistant.assistant_id,
|
|
552
|
-
{ input, config: globalConfig }
|
|
552
|
+
{ input, config: globalConfig },
|
|
553
553
|
);
|
|
554
554
|
|
|
555
555
|
expect(Array.isArray((values as any).messages)).toBe(true);
|
|
@@ -566,7 +566,7 @@ describe("runs", () => {
|
|
|
566
566
|
const stream = client.runs.stream(
|
|
567
567
|
thread.thread_id,
|
|
568
568
|
assistant.assistant_id,
|
|
569
|
-
{ input, streamMode: "updates", config: globalConfig }
|
|
569
|
+
{ input, streamMode: "updates", config: globalConfig },
|
|
570
570
|
);
|
|
571
571
|
|
|
572
572
|
let runId: string | null = null;
|
|
@@ -606,20 +606,20 @@ describe("runs", () => {
|
|
|
606
606
|
const stream = client.runs.stream(
|
|
607
607
|
thread.thread_id,
|
|
608
608
|
assistant.assistant_id,
|
|
609
|
-
{ input, streamMode: "events", config: globalConfig }
|
|
609
|
+
{ input, streamMode: "events", config: globalConfig },
|
|
610
610
|
);
|
|
611
611
|
|
|
612
612
|
const events = await gatherIterator(stream);
|
|
613
613
|
expect(new Set(events.map((i) => i.event))).toEqual(
|
|
614
|
-
new Set(["metadata", "events"])
|
|
614
|
+
new Set(["metadata", "events"]),
|
|
615
615
|
);
|
|
616
616
|
|
|
617
617
|
expect(
|
|
618
618
|
new Set(
|
|
619
619
|
events
|
|
620
620
|
.filter((i) => i.event === "events")
|
|
621
|
-
.map((i) => (i.data as any).event)
|
|
622
|
-
)
|
|
621
|
+
.map((i) => (i.data as any).event),
|
|
622
|
+
),
|
|
623
623
|
).toEqual(
|
|
624
624
|
new Set([
|
|
625
625
|
"on_chain_start",
|
|
@@ -627,11 +627,11 @@ describe("runs", () => {
|
|
|
627
627
|
"on_chat_model_end",
|
|
628
628
|
"on_chat_model_start",
|
|
629
629
|
"on_chat_model_stream",
|
|
630
|
-
])
|
|
630
|
+
]),
|
|
631
631
|
);
|
|
632
632
|
});
|
|
633
633
|
|
|
634
|
-
it.concurrent("stream messages", async () => {
|
|
634
|
+
it.concurrent("stream messages", { retry: 3 }, async () => {
|
|
635
635
|
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
636
636
|
const thread = await client.threads.create();
|
|
637
637
|
const input = {
|
|
@@ -640,7 +640,7 @@ describe("runs", () => {
|
|
|
640
640
|
const stream = client.runs.stream(
|
|
641
641
|
thread.thread_id,
|
|
642
642
|
assistant.assistant_id,
|
|
643
|
-
{ input, streamMode: "messages", config: globalConfig }
|
|
643
|
+
{ input, streamMode: "messages", config: globalConfig },
|
|
644
644
|
);
|
|
645
645
|
|
|
646
646
|
let runId: string | null = null;
|
|
@@ -681,7 +681,7 @@ describe("runs", () => {
|
|
|
681
681
|
"messages/metadata",
|
|
682
682
|
"messages/partial",
|
|
683
683
|
"messages/complete",
|
|
684
|
-
])
|
|
684
|
+
]),
|
|
685
685
|
);
|
|
686
686
|
|
|
687
687
|
expect(runId).not.toBeNull();
|
|
@@ -689,7 +689,7 @@ describe("runs", () => {
|
|
|
689
689
|
expect(run.status).toBe("success");
|
|
690
690
|
});
|
|
691
691
|
|
|
692
|
-
it.concurrent("stream messages tuple", async () => {
|
|
692
|
+
it.concurrent("stream messages tuple", { retry: 3 }, async () => {
|
|
693
693
|
const assistant = await client.assistants.create({ graphId: "agent" });
|
|
694
694
|
const thread = await client.threads.create();
|
|
695
695
|
const input = {
|
|
@@ -698,13 +698,13 @@ describe("runs", () => {
|
|
|
698
698
|
const stream = await client.runs.stream(
|
|
699
699
|
thread.thread_id,
|
|
700
700
|
assistant.assistant_id,
|
|
701
|
-
{ input, streamMode: "messages-tuple", config: globalConfig }
|
|
701
|
+
{ input, streamMode: "messages-tuple", config: globalConfig },
|
|
702
702
|
);
|
|
703
703
|
|
|
704
704
|
const chunks = await gatherIterator(stream);
|
|
705
705
|
const runId = findLast(
|
|
706
706
|
chunks,
|
|
707
|
-
(i): i is FeedbackStreamEvent => i.event === "metadata"
|
|
707
|
+
(i): i is FeedbackStreamEvent => i.event === "metadata",
|
|
708
708
|
)?.data.run_id;
|
|
709
709
|
expect(runId).not.toBeNull();
|
|
710
710
|
|
|
@@ -735,7 +735,7 @@ describe("runs", () => {
|
|
|
735
735
|
const stream = await client.runs.stream(
|
|
736
736
|
thread.thread_id,
|
|
737
737
|
assistant.assistant_id,
|
|
738
|
-
{ input, streamMode: ["messages", "values"], config: globalConfig }
|
|
738
|
+
{ input, streamMode: ["messages", "values"], config: globalConfig },
|
|
739
739
|
);
|
|
740
740
|
|
|
741
741
|
const chunks = await gatherIterator(stream);
|
|
@@ -744,7 +744,7 @@ describe("runs", () => {
|
|
|
744
744
|
|
|
745
745
|
const messages: BaseMessage[] = findLast(
|
|
746
746
|
chunks,
|
|
747
|
-
(i) => i.event === "values"
|
|
747
|
+
(i) => i.event === "values",
|
|
748
748
|
)?.data.messages;
|
|
749
749
|
|
|
750
750
|
expect(messages.length).toBe(4);
|
|
@@ -761,7 +761,7 @@ describe("runs", () => {
|
|
|
761
761
|
"messages/partial",
|
|
762
762
|
"messages/complete",
|
|
763
763
|
"values",
|
|
764
|
-
])
|
|
764
|
+
]),
|
|
765
765
|
);
|
|
766
766
|
|
|
767
767
|
const run = await client.runs.get(thread.thread_id, runId!);
|
|
@@ -786,7 +786,7 @@ describe("runs", () => {
|
|
|
786
786
|
input,
|
|
787
787
|
interruptBefore: ["tool"],
|
|
788
788
|
config: globalConfig,
|
|
789
|
-
})
|
|
789
|
+
}),
|
|
790
790
|
);
|
|
791
791
|
|
|
792
792
|
expect(chunks.filter((i) => i.event === "error").length).toBe(0);
|
|
@@ -806,7 +806,7 @@ describe("runs", () => {
|
|
|
806
806
|
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
807
807
|
input: null,
|
|
808
808
|
config: globalConfig,
|
|
809
|
-
})
|
|
809
|
+
}),
|
|
810
810
|
);
|
|
811
811
|
|
|
812
812
|
expect(chunks.filter((i) => i.event === "error").length).toBe(0);
|
|
@@ -818,7 +818,7 @@ describe("runs", () => {
|
|
|
818
818
|
|
|
819
819
|
const threadAfterContinue = await client.threads.get(thread.thread_id);
|
|
820
820
|
expect(threadAfterContinue.status).toBe("idle");
|
|
821
|
-
}
|
|
821
|
+
},
|
|
822
822
|
);
|
|
823
823
|
|
|
824
824
|
it.concurrent("human in the loop - modification", async () => {
|
|
@@ -836,7 +836,7 @@ describe("runs", () => {
|
|
|
836
836
|
input,
|
|
837
837
|
interruptBefore: ["tool"],
|
|
838
838
|
config: globalConfig,
|
|
839
|
-
})
|
|
839
|
+
}),
|
|
840
840
|
);
|
|
841
841
|
|
|
842
842
|
expect(chunks.filter((i) => i.event === "error").length).toBe(0);
|
|
@@ -844,7 +844,7 @@ describe("runs", () => {
|
|
|
844
844
|
// edit the last message
|
|
845
845
|
const lastMessage = findLast(
|
|
846
846
|
chunks,
|
|
847
|
-
(i) => i.event === "values"
|
|
847
|
+
(i) => i.event === "values",
|
|
848
848
|
)?.data.messages.at(-1);
|
|
849
849
|
lastMessage.content = "modified";
|
|
850
850
|
|
|
@@ -861,7 +861,7 @@ describe("runs", () => {
|
|
|
861
861
|
expect(modifiedThread.metadata?.modified).toBe(true);
|
|
862
862
|
|
|
863
863
|
const stateAfterModify = await client.threads.getState<AgentState>(
|
|
864
|
-
thread.thread_id
|
|
864
|
+
thread.thread_id,
|
|
865
865
|
);
|
|
866
866
|
expect(stateAfterModify.values.messages.at(-1)?.content).toBe("modified");
|
|
867
867
|
expect(stateAfterModify.next).toEqual(["tool"]);
|
|
@@ -874,7 +874,7 @@ describe("runs", () => {
|
|
|
874
874
|
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
875
875
|
input: null,
|
|
876
876
|
config: globalConfig,
|
|
877
|
-
})
|
|
877
|
+
}),
|
|
878
878
|
);
|
|
879
879
|
|
|
880
880
|
const threadAfterContinue = await client.threads.get(thread.thread_id);
|
|
@@ -889,7 +889,7 @@ describe("runs", () => {
|
|
|
889
889
|
|
|
890
890
|
// get the history
|
|
891
891
|
const history = await client.threads.getHistory<AgentState>(
|
|
892
|
-
thread.thread_id
|
|
892
|
+
thread.thread_id,
|
|
893
893
|
);
|
|
894
894
|
expect(history.length).toBe(6);
|
|
895
895
|
expect(history[0].next.length).toBe(0);
|
|
@@ -935,7 +935,7 @@ describe("shared state", () => {
|
|
|
935
935
|
const res1 = (await client.runs.wait(
|
|
936
936
|
thread.thread_id,
|
|
937
937
|
assistant.assistant_id,
|
|
938
|
-
{ input, config }
|
|
938
|
+
{ input, config },
|
|
939
939
|
)) as Awaited<Record<string, any>>;
|
|
940
940
|
expect(res1.sharedStateValue).toBe(null);
|
|
941
941
|
|
|
@@ -943,7 +943,7 @@ describe("shared state", () => {
|
|
|
943
943
|
const res2 = (await client.runs.wait(
|
|
944
944
|
thread.thread_id,
|
|
945
945
|
assistant.assistant_id,
|
|
946
|
-
{ input, config }
|
|
946
|
+
{ input, config },
|
|
947
947
|
)) as Awaited<Record<string, any>>;
|
|
948
948
|
expect(res2.sharedStateValue).toBe(config.configurable.user_id);
|
|
949
949
|
});
|
|
@@ -961,7 +961,7 @@ describe("shared state", () => {
|
|
|
961
961
|
const res1 = (await client.runs.wait(
|
|
962
962
|
thread.thread_id,
|
|
963
963
|
assistant.assistant_id,
|
|
964
|
-
{ input, config: config1 }
|
|
964
|
+
{ input, config: config1 },
|
|
965
965
|
)) as Awaited<Record<string, any>>;
|
|
966
966
|
|
|
967
967
|
// Run with the same thread id but a new config
|
|
@@ -969,7 +969,7 @@ describe("shared state", () => {
|
|
|
969
969
|
const res2 = (await client.runs.wait(
|
|
970
970
|
thread.thread_id,
|
|
971
971
|
assistant.assistant_id,
|
|
972
|
-
{ input, config: config2 }
|
|
972
|
+
{ input, config: config2 },
|
|
973
973
|
)) as Awaited<Record<string, any>>;
|
|
974
974
|
|
|
975
975
|
expect(res1.sharedStateValue).toBe(config1.configurable.user_id);
|
|
@@ -995,12 +995,12 @@ describe("shared state", () => {
|
|
|
995
995
|
const res1 = (await client.runs.wait(
|
|
996
996
|
thread.thread_id,
|
|
997
997
|
assistant.assistant_id,
|
|
998
|
-
{ input, config }
|
|
998
|
+
{ input, config },
|
|
999
999
|
)) as Awaited<Record<string, any>>;
|
|
1000
1000
|
expect(res1.sharedStateFromStoreConfig).toBeDefined();
|
|
1001
1001
|
expect(res1.sharedStateFromStoreConfig.id).toBeDefined();
|
|
1002
1002
|
expect(res1.sharedStateFromStoreConfig.id).toBe(
|
|
1003
|
-
config.configurable.user_id
|
|
1003
|
+
config.configurable.user_id,
|
|
1004
1004
|
);
|
|
1005
1005
|
});
|
|
1006
1006
|
|
|
@@ -1025,12 +1025,12 @@ describe("shared state", () => {
|
|
|
1025
1025
|
const res1 = (await client.runs.wait(
|
|
1026
1026
|
thread.thread_id,
|
|
1027
1027
|
assistant.assistant_id,
|
|
1028
|
-
{ input, config }
|
|
1028
|
+
{ input, config },
|
|
1029
1029
|
)) as Awaited<Record<string, any>>;
|
|
1030
1030
|
expect(res1.sharedStateFromStoreConfig).toBeDefined();
|
|
1031
1031
|
expect(res1.sharedStateFromStoreConfig.id).toBeDefined();
|
|
1032
1032
|
expect(res1.sharedStateFromStoreConfig.id).toBe(
|
|
1033
|
-
config.configurable.user_id
|
|
1033
|
+
config.configurable.user_id,
|
|
1034
1034
|
);
|
|
1035
1035
|
|
|
1036
1036
|
// Fetch data from store client
|
|
@@ -1122,10 +1122,10 @@ describe("StoreClient", () => {
|
|
|
1122
1122
|
expect(searchResAfterPut.items[0].createdAt).toBeDefined();
|
|
1123
1123
|
expect(searchResAfterPut.items[0].updatedAt).toBeDefined();
|
|
1124
1124
|
expect(
|
|
1125
|
-
new Date(searchResAfterPut.items[0].createdAt).getTime()
|
|
1125
|
+
new Date(searchResAfterPut.items[0].createdAt).getTime(),
|
|
1126
1126
|
).toBeLessThanOrEqual(Date.now());
|
|
1127
1127
|
expect(
|
|
1128
|
-
new Date(searchResAfterPut.items[0].updatedAt).getTime()
|
|
1128
|
+
new Date(searchResAfterPut.items[0].updatedAt).getTime(),
|
|
1129
1129
|
).toBeLessThanOrEqual(Date.now());
|
|
1130
1130
|
|
|
1131
1131
|
const updatedValue = { foo: "baz" };
|
|
@@ -1142,7 +1142,7 @@ describe("StoreClient", () => {
|
|
|
1142
1142
|
expect(searchResAfterUpdate.items[0].value).toEqual(updatedValue);
|
|
1143
1143
|
|
|
1144
1144
|
expect(
|
|
1145
|
-
new Date(searchResAfterUpdate.items[0].updatedAt).getTime()
|
|
1145
|
+
new Date(searchResAfterUpdate.items[0].updatedAt).getTime(),
|
|
1146
1146
|
).toBeGreaterThan(new Date(searchResAfterPut.items[0].updatedAt).getTime());
|
|
1147
1147
|
|
|
1148
1148
|
const listResAfterPut = await client.store.listNamespaces();
|
|
@@ -1166,12 +1166,12 @@ describe("subgraphs", () => {
|
|
|
1166
1166
|
const assistant = await client.assistants.create({ graphId: "nested" });
|
|
1167
1167
|
|
|
1168
1168
|
expect(
|
|
1169
|
-
Object.keys(await client.assistants.getSubgraphs(assistant.assistant_id))
|
|
1169
|
+
Object.keys(await client.assistants.getSubgraphs(assistant.assistant_id)),
|
|
1170
1170
|
).toEqual(["gp_two"]);
|
|
1171
1171
|
|
|
1172
1172
|
const subgraphs = await client.assistants.getSubgraphs(
|
|
1173
1173
|
assistant.assistant_id,
|
|
1174
|
-
{ recurse: true }
|
|
1174
|
+
{ recurse: true },
|
|
1175
1175
|
);
|
|
1176
1176
|
|
|
1177
1177
|
expect(Object.keys(subgraphs)).toEqual(["gp_two", "gp_two|p_two"]);
|
|
@@ -1216,7 +1216,7 @@ describe("subgraphs", () => {
|
|
|
1216
1216
|
messages: [{ role: "human", content: "SF", id: "initial-message" }],
|
|
1217
1217
|
},
|
|
1218
1218
|
interruptBefore: ["tool"],
|
|
1219
|
-
})
|
|
1219
|
+
}),
|
|
1220
1220
|
);
|
|
1221
1221
|
|
|
1222
1222
|
for (const chunk of chunks) {
|
|
@@ -1285,7 +1285,7 @@ describe("subgraphs", () => {
|
|
|
1285
1285
|
const stateRecursive = await client.threads.getState(
|
|
1286
1286
|
thread.thread_id,
|
|
1287
1287
|
undefined,
|
|
1288
|
-
{ subgraphs: true }
|
|
1288
|
+
{ subgraphs: true },
|
|
1289
1289
|
);
|
|
1290
1290
|
|
|
1291
1291
|
expect(stateRecursive.next).toEqual(["weather_graph"]);
|
|
@@ -1343,7 +1343,7 @@ describe("subgraphs", () => {
|
|
|
1343
1343
|
input: null,
|
|
1344
1344
|
streamMode: ["values", "updates"],
|
|
1345
1345
|
streamSubgraphs: true,
|
|
1346
|
-
})
|
|
1346
|
+
}),
|
|
1347
1347
|
);
|
|
1348
1348
|
|
|
1349
1349
|
expect(chunksSubgraph.filter((i) => i.event === "error")).toEqual([]);
|
|
@@ -1352,7 +1352,7 @@ describe("subgraphs", () => {
|
|
|
1352
1352
|
type ChunkType = (typeof chunksSubgraph)[number];
|
|
1353
1353
|
const continueMessages = findLast(
|
|
1354
1354
|
chunksSubgraph,
|
|
1355
|
-
(i): i is ChunkType & { event: "values" } => i.event === "values"
|
|
1355
|
+
(i): i is ChunkType & { event: "values" } => i.event === "values",
|
|
1356
1356
|
)?.data.messages;
|
|
1357
1357
|
|
|
1358
1358
|
expect(continueMessages.length).toBe(2);
|
|
@@ -1500,7 +1500,7 @@ describe("subgraphs", () => {
|
|
|
1500
1500
|
|
|
1501
1501
|
// run until the interrupt (same as before)
|
|
1502
1502
|
let chunks = await gatherIterator(
|
|
1503
|
-
client.runs.stream(thread.thread_id, assistant.assistant_id, { input })
|
|
1503
|
+
client.runs.stream(thread.thread_id, assistant.assistant_id, { input }),
|
|
1504
1504
|
);
|
|
1505
1505
|
expect(chunks.filter((i) => i.event === "error")).toEqual([]);
|
|
1506
1506
|
|
|
@@ -1532,7 +1532,7 @@ describe("subgraphs", () => {
|
|
|
1532
1532
|
// get inner state after update
|
|
1533
1533
|
const innerState = await client.threads.getState<{ city: string }>(
|
|
1534
1534
|
thread.thread_id,
|
|
1535
|
-
state.tasks[0].checkpoint ?? undefined
|
|
1535
|
+
state.tasks[0].checkpoint ?? undefined,
|
|
1536
1536
|
);
|
|
1537
1537
|
|
|
1538
1538
|
expect(innerState.values.city).toBe("LA");
|
|
@@ -1554,7 +1554,7 @@ describe("subgraphs", () => {
|
|
|
1554
1554
|
chunks = await gatherIterator(
|
|
1555
1555
|
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
1556
1556
|
input: null,
|
|
1557
|
-
})
|
|
1557
|
+
}),
|
|
1558
1558
|
);
|
|
1559
1559
|
|
|
1560
1560
|
expect(chunks.filter((i) => i.event === "error")).toEqual([]);
|
|
@@ -1639,7 +1639,7 @@ describe("subgraphs", () => {
|
|
|
1639
1639
|
const stream = await gatherIterator(
|
|
1640
1640
|
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
1641
1641
|
command: { resume: "i want to resume" },
|
|
1642
|
-
})
|
|
1642
|
+
}),
|
|
1643
1643
|
);
|
|
1644
1644
|
|
|
1645
1645
|
expect(stream.at(-1)?.event).toBe("values");
|
|
@@ -1656,7 +1656,7 @@ describe("errors", () => {
|
|
|
1656
1656
|
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
1657
1657
|
input: { messages: [] },
|
|
1658
1658
|
streamMode: ["debug", "events"],
|
|
1659
|
-
})
|
|
1659
|
+
}),
|
|
1660
1660
|
);
|
|
1661
1661
|
|
|
1662
1662
|
expect(stream.at(-1)).toMatchObject({
|
|
@@ -1675,7 +1675,7 @@ describe("errors", () => {
|
|
|
1675
1675
|
const run = await client.runs.create(
|
|
1676
1676
|
thread.thread_id,
|
|
1677
1677
|
assistant.assistant_id,
|
|
1678
|
-
{ input: { messages: [] } }
|
|
1678
|
+
{ input: { messages: [] } },
|
|
1679
1679
|
);
|
|
1680
1680
|
|
|
1681
1681
|
await client.runs.join(thread.thread_id, run.run_id);
|
|
@@ -1690,11 +1690,11 @@ describe("errors", () => {
|
|
|
1690
1690
|
const run = await client.runs.create(
|
|
1691
1691
|
thread.thread_id,
|
|
1692
1692
|
assistant.assistant_id,
|
|
1693
|
-
{ input: { messages: [] } }
|
|
1693
|
+
{ input: { messages: [] } },
|
|
1694
1694
|
);
|
|
1695
1695
|
|
|
1696
1696
|
const stream = await gatherIterator(
|
|
1697
|
-
client.runs.joinStream(thread.thread_id, run.run_id)
|
|
1697
|
+
client.runs.joinStream(thread.thread_id, run.run_id),
|
|
1698
1698
|
);
|
|
1699
1699
|
|
|
1700
1700
|
expect(stream.at(-1)).toMatchObject({
|
|
@@ -1724,7 +1724,7 @@ describe("long running tasks", () => {
|
|
|
1724
1724
|
{
|
|
1725
1725
|
input: { messages: [], delay },
|
|
1726
1726
|
config: globalConfig,
|
|
1727
|
-
}
|
|
1727
|
+
},
|
|
1728
1728
|
);
|
|
1729
1729
|
|
|
1730
1730
|
await client.runs.join(thread.thread_id, run.run_id);
|
|
@@ -1740,7 +1740,7 @@ describe("long running tasks", () => {
|
|
|
1740
1740
|
expect(runResult.values.messages).toMatchObject([
|
|
1741
1741
|
{ content: `finished after ${delay}ms` },
|
|
1742
1742
|
]);
|
|
1743
|
-
}
|
|
1743
|
+
},
|
|
1744
1744
|
);
|
|
1745
1745
|
});
|
|
1746
1746
|
|
|
@@ -1765,7 +1765,7 @@ it("unusual newline termination characters", async () => {
|
|
|
1765
1765
|
expect(history.length).toBe(1);
|
|
1766
1766
|
expect(history[0].values.messages.length).toBe(1);
|
|
1767
1767
|
expect(history[0].values.messages[0].content).toBe(
|
|
1768
|
-
"Page break characters: \n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029"
|
|
1768
|
+
"Page break characters: \n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029",
|
|
1769
1769
|
);
|
|
1770
1770
|
});
|
|
1771
1771
|
|
|
@@ -1785,7 +1785,7 @@ describe("command update state", () => {
|
|
|
1785
1785
|
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
1786
1786
|
command: { update: { keyOne: "value3", keyTwo: "value4" } },
|
|
1787
1787
|
config: globalConfig,
|
|
1788
|
-
})
|
|
1788
|
+
}),
|
|
1789
1789
|
);
|
|
1790
1790
|
expect(stream.filter((chunk) => chunk.event === "error")).toEqual([]);
|
|
1791
1791
|
|
|
@@ -1813,7 +1813,7 @@ describe("command update state", () => {
|
|
|
1813
1813
|
],
|
|
1814
1814
|
},
|
|
1815
1815
|
config: globalConfig,
|
|
1816
|
-
})
|
|
1816
|
+
}),
|
|
1817
1817
|
);
|
|
1818
1818
|
|
|
1819
1819
|
expect(stream.filter((chunk) => chunk.event === "error")).toEqual([]);
|
|
@@ -1877,7 +1877,7 @@ describe("command update state", () => {
|
|
|
1877
1877
|
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
1878
1878
|
// TODO: figure out why we cannot go to the interrupt node directly
|
|
1879
1879
|
command: { goto: "before_interrupt" },
|
|
1880
|
-
})
|
|
1880
|
+
}),
|
|
1881
1881
|
);
|
|
1882
1882
|
|
|
1883
1883
|
let state = await client.threads.getState(thread.thread_id);
|
|
@@ -1893,7 +1893,7 @@ describe("command update state", () => {
|
|
|
1893
1893
|
client.runs.stream(thread.thread_id, assistant.assistant_id, {
|
|
1894
1894
|
command: { resume: "resume" },
|
|
1895
1895
|
streamMode: ["updates"],
|
|
1896
|
-
})
|
|
1896
|
+
}),
|
|
1897
1897
|
);
|
|
1898
1898
|
|
|
1899
1899
|
state = await client.threads.getState(thread.thread_id);
|
|
@@ -1913,14 +1913,14 @@ it("dynamic graph", async () => {
|
|
|
1913
1913
|
client.runs.stream(null, defaultAssistant.assistant_id, {
|
|
1914
1914
|
input: { messages: ["input"] },
|
|
1915
1915
|
streamMode: ["updates"],
|
|
1916
|
-
})
|
|
1916
|
+
}),
|
|
1917
1917
|
);
|
|
1918
1918
|
|
|
1919
1919
|
expect
|
|
1920
1920
|
.soft(
|
|
1921
1921
|
updates
|
|
1922
1922
|
.filter((i) => i.event === "updates")
|
|
1923
|
-
.flatMap((i) => Object.keys(i.data))
|
|
1923
|
+
.flatMap((i) => Object.keys(i.data)),
|
|
1924
1924
|
)
|
|
1925
1925
|
.toEqual(expect.arrayContaining(["default"]));
|
|
1926
1926
|
|
|
@@ -1929,14 +1929,14 @@ it("dynamic graph", async () => {
|
|
|
1929
1929
|
input: { messages: ["input"] },
|
|
1930
1930
|
config: { configurable: { nodeName: "runtime" } },
|
|
1931
1931
|
streamMode: ["updates"],
|
|
1932
|
-
})
|
|
1932
|
+
}),
|
|
1933
1933
|
);
|
|
1934
1934
|
|
|
1935
1935
|
expect
|
|
1936
1936
|
.soft(
|
|
1937
1937
|
updates
|
|
1938
1938
|
.filter((i) => i.event === "updates")
|
|
1939
|
-
.flatMap((i) => Object.keys(i.data))
|
|
1939
|
+
.flatMap((i) => Object.keys(i.data)),
|
|
1940
1940
|
)
|
|
1941
1941
|
.toEqual(expect.arrayContaining(["runtime"]));
|
|
1942
1942
|
|
|
@@ -1950,14 +1950,14 @@ it("dynamic graph", async () => {
|
|
|
1950
1950
|
client.runs.stream(thread.thread_id, configAssistant.assistant_id, {
|
|
1951
1951
|
input: { messages: ["input"], configurable: { nodeName: "assistant" } },
|
|
1952
1952
|
streamMode: ["updates"],
|
|
1953
|
-
})
|
|
1953
|
+
}),
|
|
1954
1954
|
);
|
|
1955
1955
|
|
|
1956
1956
|
expect
|
|
1957
1957
|
.soft(
|
|
1958
1958
|
updates
|
|
1959
1959
|
.filter((i) => i.event === "updates")
|
|
1960
|
-
.flatMap((i) => Object.keys(i.data))
|
|
1960
|
+
.flatMap((i) => Object.keys(i.data)),
|
|
1961
1961
|
)
|
|
1962
1962
|
.toEqual(expect.arrayContaining(["assistant"]));
|
|
1963
1963
|
|
|
@@ -1977,7 +1977,7 @@ it("dynamic graph", async () => {
|
|
|
1977
1977
|
it("generative ui", async () => {
|
|
1978
1978
|
const ui = await client["~ui"].getComponent("agent", "weather-component");
|
|
1979
1979
|
expect(ui).toEqual(
|
|
1980
|
-
`<script src="//localhost:9123/ui/agent/entrypoint.js" onload='__LGUI_agent.render("weather-component", "{{shadowRootId}}")'></script>\n<link rel="stylesheet" href="//localhost:9123/ui/agent/entrypoint.css"
|
|
1980
|
+
`<script src="//localhost:9123/ui/agent/entrypoint.js" onload='__LGUI_agent.render("weather-component", "{{shadowRootId}}")'></script>\n<link rel="stylesheet" href="//localhost:9123/ui/agent/entrypoint.css" />`,
|
|
1981
1981
|
);
|
|
1982
1982
|
|
|
1983
1983
|
const match = /src="(?<src>[^"]+)"/.exec(ui);
|
|
@@ -1990,6 +1990,6 @@ it("generative ui", async () => {
|
|
|
1990
1990
|
expect(js).contains(`globalThis[Symbol.for("LGUI_REQUIRE")]`);
|
|
1991
1991
|
|
|
1992
1992
|
await expect(() =>
|
|
1993
|
-
client["~ui"].getComponent("non-existent", "none")
|
|
1993
|
+
client["~ui"].getComponent("non-existent", "none"),
|
|
1994
1994
|
).rejects.toThrow();
|
|
1995
1995
|
});
|