@copilotkit/runtime 0.38.0-mme-alpha.1 → 1.0.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +81 -67
- package/CHANGELOG.md +13 -10
- package/__snapshots__/schema/schema.graphql +15 -1
- package/dist/chunk-44O2JGUY.mjs +12 -0
- package/dist/chunk-5U6S5EGT.mjs +24 -0
- package/dist/chunk-5U6S5EGT.mjs.map +1 -0
- package/dist/{chunk-XI3HBDMA.mjs → chunk-7QCEK4DP.mjs} +25 -14
- package/dist/chunk-7QCEK4DP.mjs.map +1 -0
- package/dist/{chunk-DX2KAJBF.mjs → chunk-DL2QQEOR.mjs} +204 -19
- package/dist/chunk-DL2QQEOR.mjs.map +1 -0
- package/dist/{chunk-FRK6BXXV.mjs → chunk-DVDKD6F5.mjs} +3 -3
- package/dist/{chunk-NXFMYCNF.mjs → chunk-F5K2JKGM.mjs} +38 -23
- package/dist/chunk-F5K2JKGM.mjs.map +1 -0
- package/dist/{chunk-OZMCHYYR.mjs → chunk-GEIBJJQ4.mjs} +3 -3
- package/dist/{chunk-4RGXTUS4.mjs → chunk-R25IOQB5.mjs} +17 -4
- package/dist/chunk-R25IOQB5.mjs.map +1 -0
- package/dist/{chunk-6NZ4UMOD.mjs → chunk-RMZWGQ46.mjs} +2 -2
- package/dist/{chunk-BYB2LNMK.mjs → chunk-U3V2BCGI.mjs} +2 -2
- package/dist/chunk-ZTEJCDMJ.mjs +24 -0
- package/dist/chunk-ZTEJCDMJ.mjs.map +1 -0
- package/dist/graphql/types/base/index.mjs +2 -2
- package/dist/graphql/types/converted/index.mjs +3 -3
- package/dist/index.d.ts +8 -5
- package/dist/index.js +297 -53
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +17 -11
- package/dist/index.mjs.map +1 -1
- package/dist/{langchain-adapter-54784d29.d.ts → langchain-adapter-a02d1d38.d.ts} +4 -4
- package/dist/{langserve-63794237.d.ts → langserve-75ebbc38.d.ts} +1 -2
- package/dist/lib/index.d.ts +8 -5
- package/dist/lib/index.js +297 -53
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/index.mjs +17 -11
- package/dist/lib/integrations/index.d.ts +7 -4
- package/dist/lib/integrations/index.js +254 -14
- package/dist/lib/integrations/index.js.map +1 -1
- package/dist/lib/integrations/index.mjs +15 -7
- package/dist/lib/integrations/nest/index.d.ts +14 -0
- package/dist/lib/integrations/nest/index.js +1481 -0
- package/dist/lib/integrations/nest/index.js.map +1 -0
- package/dist/lib/integrations/nest/index.mjs +13 -0
- package/dist/lib/integrations/nest/index.mjs.map +1 -0
- package/dist/lib/integrations/node-express/index.d.ts +14 -0
- package/dist/lib/integrations/node-express/index.js +1481 -0
- package/dist/lib/integrations/node-express/index.js.map +1 -0
- package/dist/lib/integrations/node-express/index.mjs +13 -0
- package/dist/lib/integrations/node-express/index.mjs.map +1 -0
- package/dist/lib/integrations/node-http/index.d.ts +7 -3
- package/dist/lib/integrations/node-http/index.js +214 -14
- package/dist/lib/integrations/node-http/index.js.map +1 -1
- package/dist/lib/integrations/node-http/index.mjs +6 -6
- package/dist/{pages-router-b88114e9.d.ts → pages-router-24897543.d.ts} +1 -1
- package/dist/service-adapters/index.d.ts +2 -2
- package/dist/service-adapters/index.js +28 -17
- package/dist/service-adapters/index.js.map +1 -1
- package/dist/service-adapters/index.mjs +5 -5
- package/dist/{index-1eb2e9b1.d.ts → shared-48c0f7b8.d.ts} +64 -21
- package/dist/utils/index.mjs +2 -2
- package/package.json +4 -4
- package/src/graphql/inputs/cloud-guardrails.input.ts +2 -2
- package/src/graphql/inputs/generate-copilot-response.input.ts +10 -0
- package/src/graphql/resolvers/copilot.resolver.ts +79 -3
- package/src/graphql/types/enums.ts +13 -0
- package/src/lib/copilot-runtime.ts +106 -32
- package/src/lib/integrations/index.ts +2 -0
- package/src/lib/integrations/nest/index.ts +14 -0
- package/src/lib/integrations/nextjs/app-router.ts +10 -0
- package/src/lib/integrations/nextjs/pages-router.ts +10 -0
- package/src/lib/integrations/node-express/index.ts +14 -0
- package/src/lib/integrations/node-http/index.ts +10 -0
- package/src/lib/integrations/shared.ts +15 -2
- package/src/lib/telemetry-client.ts +9 -0
- package/src/service-adapters/experimental/groq/groq-adapter.ts +3 -1
- package/src/service-adapters/experimental/ollama/ollama-adapter.ts +3 -1
- package/src/service-adapters/google/google-genai-adapter.ts +3 -1
- package/src/service-adapters/langchain/langchain-adapter.ts +8 -9
- package/src/service-adapters/openai/openai-adapter.ts +8 -7
- package/src/service-adapters/openai/openai-assistant-adapter.ts +6 -8
- package/src/service-adapters/service-adapter.ts +1 -2
- package/dist/chunk-4RGXTUS4.mjs.map +0 -1
- package/dist/chunk-DX2KAJBF.mjs.map +0 -1
- package/dist/chunk-NXFMYCNF.mjs.map +0 -1
- package/dist/chunk-RHQLCJGG.mjs +0 -7
- package/dist/chunk-XI3HBDMA.mjs.map +0 -1
- /package/dist/{chunk-RHQLCJGG.mjs.map → chunk-44O2JGUY.mjs.map} +0 -0
- /package/dist/{chunk-FRK6BXXV.mjs.map → chunk-DVDKD6F5.mjs.map} +0 -0
- /package/dist/{chunk-OZMCHYYR.mjs.map → chunk-GEIBJJQ4.mjs.map} +0 -0
- /package/dist/{chunk-6NZ4UMOD.mjs.map → chunk-RMZWGQ46.mjs.map} +0 -0
- /package/dist/{chunk-BYB2LNMK.mjs.map → chunk-U3V2BCGI.mjs.map} +0 -0
package/.turbo/turbo-build.log
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
|
|
2
|
-
> @copilotkit/runtime@0.
|
|
2
|
+
> @copilotkit/runtime@1.0.0-beta.1 build /home/runner/work/CopilotKit/CopilotKit/CopilotKit/packages/runtime
|
|
3
3
|
> tsup --clean
|
|
4
4
|
|
|
5
|
-
[34mCLI[39m Building entry: src/index.ts, src/lib/index.ts, src/service-adapters/index.ts, src/utils/index.ts, src/lib/cloud/index.ts, src/lib/integrations/index.ts, src/graphql/types/base/index.ts, src/graphql/types/converted/index.ts, src/lib/integrations/node-http/index.ts
|
|
5
|
+
[34mCLI[39m Building entry: src/index.ts, src/lib/index.ts, src/service-adapters/index.ts, src/utils/index.ts, src/lib/cloud/index.ts, src/lib/integrations/index.ts, src/graphql/types/base/index.ts, src/graphql/types/converted/index.ts, src/lib/integrations/nest/index.ts, src/lib/integrations/node-express/index.ts, src/lib/integrations/node-http/index.ts
|
|
6
6
|
[34mCLI[39m Using tsconfig: tsconfig.json
|
|
7
7
|
[34mCLI[39m tsup v6.7.0
|
|
8
8
|
[34mCLI[39m Using tsup config: /home/runner/work/CopilotKit/CopilotKit/CopilotKit/packages/runtime/tsup.config.ts
|
|
@@ -10,76 +10,90 @@
|
|
|
10
10
|
[34mCLI[39m Cleaning output folder
|
|
11
11
|
[34mESM[39m Build start
|
|
12
12
|
[34mCJS[39m Build start
|
|
13
|
-
[
|
|
14
|
-
[
|
|
15
|
-
[
|
|
16
|
-
[
|
|
17
|
-
[
|
|
18
|
-
[
|
|
19
|
-
[
|
|
20
|
-
[
|
|
21
|
-
[
|
|
22
|
-
[
|
|
23
|
-
[
|
|
24
|
-
[
|
|
25
|
-
[
|
|
26
|
-
[
|
|
27
|
-
[
|
|
28
|
-
[
|
|
29
|
-
[
|
|
30
|
-
[
|
|
31
|
-
[
|
|
32
|
-
[32mESM[39m [1mdist/lib/integrations/node-http/index.mjs
|
|
33
|
-
[32mESM[39m [1mdist/index.mjs
|
|
34
|
-
[32mESM[39m [1mdist/
|
|
35
|
-
[32mESM[39m [1mdist/
|
|
36
|
-
[32mESM[39m [1mdist/service-adapters/index.mjs
|
|
37
|
-
[32mESM[39m [1mdist/
|
|
38
|
-
[32mESM[39m [1mdist/
|
|
39
|
-
[32mESM[39m [1mdist/lib/cloud/index.mjs
|
|
40
|
-
[32mESM[39m [1mdist/
|
|
41
|
-
[32mESM[39m [1mdist/chunk-
|
|
42
|
-
[32mESM[39m [1mdist/chunk-
|
|
43
|
-
[32mESM[39m [1mdist/chunk-
|
|
44
|
-
[32mESM[39m [1mdist/chunk-
|
|
45
|
-
[32mESM[39m [1mdist/
|
|
46
|
-
[32mESM[39m [1mdist/
|
|
47
|
-
[32mESM[39m [1mdist/chunk-
|
|
48
|
-
[32mESM[39m [1mdist/
|
|
49
|
-
[32mESM[39m [1mdist/chunk-
|
|
50
|
-
[32mESM[39m [1mdist/
|
|
51
|
-
[32mESM[39m [1mdist/
|
|
52
|
-
[32mESM[39m [1mdist/
|
|
53
|
-
[32mESM[39m [1mdist/lib/index.mjs.map
|
|
54
|
-
[32mESM[39m [1mdist/
|
|
55
|
-
[32mESM[39m [1mdist/
|
|
56
|
-
[32mESM[39m [1mdist/
|
|
57
|
-
[32mESM[39m
|
|
58
|
-
[
|
|
59
|
-
[
|
|
60
|
-
[
|
|
61
|
-
[
|
|
62
|
-
[
|
|
63
|
-
[
|
|
64
|
-
[
|
|
65
|
-
[
|
|
66
|
-
[
|
|
67
|
-
[
|
|
68
|
-
[
|
|
13
|
+
[32mESM[39m [1mdist/index.mjs [22m[32m1.43 KB[39m
|
|
14
|
+
[32mESM[39m [1mdist/lib/index.mjs [22m[32m1.19 KB[39m
|
|
15
|
+
[32mESM[39m [1mdist/service-adapters/index.mjs [22m[32m427.00 B[39m
|
|
16
|
+
[32mESM[39m [1mdist/utils/index.mjs [22m[32m315.00 B[39m
|
|
17
|
+
[32mESM[39m [1mdist/lib/cloud/index.mjs [22m[32m34.00 B[39m
|
|
18
|
+
[32mESM[39m [1mdist/lib/integrations/index.mjs [22m[32m884.00 B[39m
|
|
19
|
+
[32mESM[39m [1mdist/chunk-F5K2JKGM.mjs [22m[32m4.67 KB[39m
|
|
20
|
+
[32mESM[39m [1mdist/chunk-7QCEK4DP.mjs [22m[32m21.31 KB[39m
|
|
21
|
+
[32mESM[39m [1mdist/chunk-R25IOQB5.mjs [22m[32m1.80 KB[39m
|
|
22
|
+
[32mESM[39m [1mdist/chunk-ZTEJCDMJ.mjs [22m[32m611.00 B[39m
|
|
23
|
+
[32mESM[39m [1mdist/chunk-5U6S5EGT.mjs [22m[32m655.00 B[39m
|
|
24
|
+
[32mESM[39m [1mdist/chunk-DVDKD6F5.mjs [22m[32m7.85 KB[39m
|
|
25
|
+
[32mESM[39m [1mdist/chunk-DL2QQEOR.mjs [22m[32m48.70 KB[39m
|
|
26
|
+
[32mESM[39m [1mdist/chunk-U3V2BCGI.mjs [22m[32m4.91 KB[39m
|
|
27
|
+
[32mESM[39m [1mdist/chunk-RMZWGQ46.mjs [22m[32m1.36 KB[39m
|
|
28
|
+
[32mESM[39m [1mdist/chunk-GEIBJJQ4.mjs [22m[32m645.00 B[39m
|
|
29
|
+
[32mESM[39m [1mdist/chunk-44O2JGUY.mjs [22m[32m418.00 B[39m
|
|
30
|
+
[32mESM[39m [1mdist/lib/integrations/nest/index.mjs [22m[32m379.00 B[39m
|
|
31
|
+
[32mESM[39m [1mdist/lib/integrations/node-express/index.mjs [22m[32m393.00 B[39m
|
|
32
|
+
[32mESM[39m [1mdist/lib/integrations/node-http/index.mjs [22m[32m349.00 B[39m
|
|
33
|
+
[32mESM[39m [1mdist/graphql/types/base/index.mjs [22m[32m159.00 B[39m
|
|
34
|
+
[32mESM[39m [1mdist/index.mjs.map [22m[32m222.00 B[39m
|
|
35
|
+
[32mESM[39m [1mdist/lib/index.mjs.map [22m[32m71.00 B[39m
|
|
36
|
+
[32mESM[39m [1mdist/service-adapters/index.mjs.map [22m[32m71.00 B[39m
|
|
37
|
+
[32mESM[39m [1mdist/utils/index.mjs.map [22m[32m71.00 B[39m
|
|
38
|
+
[32mESM[39m [1mdist/lib/integrations/index.mjs.map [22m[32m71.00 B[39m
|
|
39
|
+
[32mESM[39m [1mdist/lib/cloud/index.mjs.map [22m[32m71.00 B[39m
|
|
40
|
+
[32mESM[39m [1mdist/chunk-F5K2JKGM.mjs.map [22m[32m13.94 KB[39m
|
|
41
|
+
[32mESM[39m [1mdist/chunk-7QCEK4DP.mjs.map [22m[32m45.36 KB[39m
|
|
42
|
+
[32mESM[39m [1mdist/chunk-R25IOQB5.mjs.map [22m[32m3.55 KB[39m
|
|
43
|
+
[32mESM[39m [1mdist/chunk-ZTEJCDMJ.mjs.map [22m[32m901.00 B[39m
|
|
44
|
+
[32mESM[39m [1mdist/chunk-5U6S5EGT.mjs.map [22m[32m931.00 B[39m
|
|
45
|
+
[32mESM[39m [1mdist/chunk-DVDKD6F5.mjs.map [22m[32m14.76 KB[39m
|
|
46
|
+
[32mESM[39m [1mdist/chunk-DL2QQEOR.mjs.map [22m[32m64.60 KB[39m
|
|
47
|
+
[32mESM[39m [1mdist/chunk-U3V2BCGI.mjs.map [22m[32m5.63 KB[39m
|
|
48
|
+
[32mESM[39m [1mdist/graphql/types/converted/index.mjs [22m[32m283.00 B[39m
|
|
49
|
+
[32mESM[39m [1mdist/chunk-RMZWGQ46.mjs.map [22m[32m645.00 B[39m
|
|
50
|
+
[32mESM[39m [1mdist/chunk-GEIBJJQ4.mjs.map [22m[32m1.18 KB[39m
|
|
51
|
+
[32mESM[39m [1mdist/chunk-44O2JGUY.mjs.map [22m[32m71.00 B[39m
|
|
52
|
+
[32mESM[39m [1mdist/lib/integrations/nest/index.mjs.map [22m[32m71.00 B[39m
|
|
53
|
+
[32mESM[39m [1mdist/lib/integrations/node-express/index.mjs.map [22m[32m71.00 B[39m
|
|
54
|
+
[32mESM[39m [1mdist/lib/integrations/node-http/index.mjs.map [22m[32m71.00 B[39m
|
|
55
|
+
[32mESM[39m [1mdist/graphql/types/base/index.mjs.map [22m[32m71.00 B[39m
|
|
56
|
+
[32mESM[39m [1mdist/graphql/types/converted/index.mjs.map [22m[32m71.00 B[39m
|
|
57
|
+
[32mESM[39m ⚡️ Build success in 220ms
|
|
58
|
+
[32mCJS[39m [1mdist/index.js [22m[32m96.29 KB[39m
|
|
59
|
+
[32mCJS[39m [1mdist/lib/index.js [22m[32m95.93 KB[39m
|
|
60
|
+
[32mCJS[39m [1mdist/service-adapters/index.js [22m[32m32.69 KB[39m
|
|
61
|
+
[32mCJS[39m [1mdist/utils/index.js [22m[32m6.32 KB[39m
|
|
62
|
+
[32mCJS[39m [1mdist/lib/cloud/index.js [22m[32m787.00 B[39m
|
|
63
|
+
[32mCJS[39m [1mdist/lib/integrations/index.js [22m[32m58.28 KB[39m
|
|
64
|
+
[32mCJS[39m [1mdist/graphql/types/base/index.js [22m[32m2.44 KB[39m
|
|
65
|
+
[32mCJS[39m [1mdist/graphql/types/converted/index.js [22m[32m3.04 KB[39m
|
|
66
|
+
[32mCJS[39m [1mdist/lib/integrations/nest/index.js [22m[32m55.65 KB[39m
|
|
67
|
+
[32mCJS[39m [1mdist/lib/integrations/node-express/index.js [22m[32m55.74 KB[39m
|
|
68
|
+
[32mCJS[39m [1mdist/lib/integrations/node-http/index.js [22m[32m55.27 KB[39m
|
|
69
|
+
[32mCJS[39m [1mdist/index.js.map [22m[32m150.25 KB[39m
|
|
70
|
+
[32mCJS[39m [1mdist/lib/index.js.map [22m[32m150.60 KB[39m
|
|
71
|
+
[32mCJS[39m [1mdist/service-adapters/index.js.map [22m[32m62.05 KB[39m
|
|
72
|
+
[32mCJS[39m [1mdist/utils/index.js.map [22m[32m5.63 KB[39m
|
|
73
|
+
[32mCJS[39m [1mdist/lib/cloud/index.js.map [22m[32m217.00 B[39m
|
|
74
|
+
[32mCJS[39m [1mdist/lib/integrations/index.js.map [22m[32m73.68 KB[39m
|
|
75
|
+
[32mCJS[39m [1mdist/graphql/types/base/index.js.map [22m[32m645.00 B[39m
|
|
76
|
+
[32mCJS[39m [1mdist/graphql/types/converted/index.js.map [22m[32m1.76 KB[39m
|
|
77
|
+
[32mCJS[39m [1mdist/lib/integrations/nest/index.js.map [22m[32m69.13 KB[39m
|
|
78
|
+
[32mCJS[39m [1mdist/lib/integrations/node-express/index.js.map [22m[32m69.16 KB[39m
|
|
79
|
+
[32mCJS[39m [1mdist/lib/integrations/node-http/index.js.map [22m[32m68.31 KB[39m
|
|
80
|
+
[32mCJS[39m ⚡️ Build success in 228ms
|
|
69
81
|
[34mDTS[39m Build start
|
|
70
|
-
[32mDTS[39m ⚡️ Build success in
|
|
71
|
-
[32mDTS[39m [1mdist/index.d.ts [22m[32m1.
|
|
82
|
+
[32mDTS[39m ⚡️ Build success in 8126ms
|
|
83
|
+
[32mDTS[39m [1mdist/index.d.ts [22m[32m1.50 KB[39m
|
|
84
|
+
[32mDTS[39m [1mdist/lib/integrations/node-http/index.d.ts [22m[32m586.00 B[39m
|
|
85
|
+
[32mDTS[39m [1mdist/lib/integrations/node-express/index.d.ts [22m[32m592.00 B[39m
|
|
86
|
+
[32mDTS[39m [1mdist/lib/integrations/nest/index.d.ts [22m[32m578.00 B[39m
|
|
72
87
|
[32mDTS[39m [1mdist/lib/cloud/index.d.ts [22m[32m116.00 B[39m
|
|
73
88
|
[32mDTS[39m [1mdist/graphql/types/base/index.d.ts [22m[32m92.00 B[39m
|
|
74
|
-
[32mDTS[39m [1mdist/lib/index.d.ts [22m[32m1.
|
|
89
|
+
[32mDTS[39m [1mdist/lib/index.d.ts [22m[32m1.32 KB[39m
|
|
75
90
|
[32mDTS[39m [1mdist/service-adapters/index.d.ts [22m[32m543.00 B[39m
|
|
76
91
|
[32mDTS[39m [1mdist/utils/index.d.ts [22m[32m172.00 B[39m
|
|
77
|
-
[32mDTS[39m [1mdist/lib/integrations/index.d.ts [22m[
|
|
78
|
-
[32mDTS[39m [1mdist/lib/integrations/node-http/index.d.ts [22m[32m338.00 B[39m
|
|
92
|
+
[32mDTS[39m [1mdist/lib/integrations/index.d.ts [22m[32m862.00 B[39m
|
|
79
93
|
[32mDTS[39m [1mdist/graphql/types/converted/index.d.ts [22m[32m153.00 B[39m
|
|
80
|
-
[32mDTS[39m [1mdist/
|
|
81
|
-
[32mDTS[39m [1mdist/langchain-adapter-
|
|
82
|
-
[32mDTS[39m [1mdist/pages-router-
|
|
94
|
+
[32mDTS[39m [1mdist/shared-48c0f7b8.d.ts [22m[32m7.43 KB[39m
|
|
95
|
+
[32mDTS[39m [1mdist/langchain-adapter-a02d1d38.d.ts [22m[32m5.74 KB[39m
|
|
96
|
+
[32mDTS[39m [1mdist/pages-router-24897543.d.ts [22m[32m892.00 B[39m
|
|
83
97
|
[32mDTS[39m [1mdist/failed-response-status-reasons-0ab19e06.d.ts [22m[32m1.41 KB[39m
|
|
84
|
-
[32mDTS[39m [1mdist/langserve-
|
|
98
|
+
[32mDTS[39m [1mdist/langserve-75ebbc38.d.ts [22m[32m3.59 KB[39m
|
|
85
99
|
[32mDTS[39m [1mdist/index-aa091e3c.d.ts [22m[32m1.33 KB[39m
|
package/CHANGELOG.md
CHANGED
|
@@ -1,23 +1,26 @@
|
|
|
1
1
|
# @copilotkit/runtime
|
|
2
2
|
|
|
3
|
-
## 0.
|
|
4
|
-
|
|
5
|
-
### Minor Changes
|
|
6
|
-
|
|
7
|
-
- Fix: Missing server action results
|
|
3
|
+
## 1.0.0-beta.1
|
|
8
4
|
|
|
9
5
|
### Patch Changes
|
|
10
6
|
|
|
7
|
+
- Introduce anonymous telemetry
|
|
11
8
|
- Updated dependencies
|
|
12
|
-
- @copilotkit/shared@0.
|
|
9
|
+
- @copilotkit/shared@1.0.0-beta.1
|
|
10
|
+
|
|
11
|
+
## 1.0.0-beta.0
|
|
13
12
|
|
|
14
|
-
|
|
13
|
+
### Major Changes
|
|
15
14
|
|
|
16
|
-
|
|
15
|
+
- V1.0 Release Candidate
|
|
17
16
|
|
|
18
|
-
-
|
|
17
|
+
- A robust new protocol between the frontend and the Copilot Runtime
|
|
18
|
+
- Support for Copilot Cloud
|
|
19
|
+
- Generative UI
|
|
20
|
+
- Support for LangChain universal tool calling
|
|
21
|
+
- OpenAI assistant API streaming
|
|
19
22
|
|
|
20
23
|
### Patch Changes
|
|
21
24
|
|
|
22
25
|
- Updated dependencies
|
|
23
|
-
- @copilotkit/shared@0.
|
|
26
|
+
- @copilotkit/shared@1.0.0-beta.0
|
|
@@ -44,6 +44,15 @@ input CloudInput {
|
|
|
44
44
|
guardrails: GuardrailsInput
|
|
45
45
|
}
|
|
46
46
|
|
|
47
|
+
"""The type of Copilot request"""
|
|
48
|
+
enum CopilotRequestType {
|
|
49
|
+
Chat
|
|
50
|
+
Suggestion
|
|
51
|
+
Task
|
|
52
|
+
TextareaCompletion
|
|
53
|
+
TextareaPopover
|
|
54
|
+
}
|
|
55
|
+
|
|
47
56
|
type CopilotResponse {
|
|
48
57
|
messages: [BaseMessageOutput!]!
|
|
49
58
|
runId: String
|
|
@@ -82,12 +91,17 @@ input GenerateCopilotResponseInput {
|
|
|
82
91
|
cloud: CloudInput
|
|
83
92
|
frontend: FrontendInput!
|
|
84
93
|
messages: [MessageInput!]!
|
|
94
|
+
metadata: GenerateCopilotResponseMetadataInput!
|
|
85
95
|
runId: String
|
|
86
96
|
threadId: String
|
|
87
97
|
}
|
|
88
98
|
|
|
99
|
+
input GenerateCopilotResponseMetadataInput {
|
|
100
|
+
requestType: CopilotRequestType
|
|
101
|
+
}
|
|
102
|
+
|
|
89
103
|
input GuardrailsInput {
|
|
90
|
-
inputValidationRules: GuardrailsRuleInput
|
|
104
|
+
inputValidationRules: GuardrailsRuleInput!
|
|
91
105
|
}
|
|
92
106
|
|
|
93
107
|
input GuardrailsRuleInput {
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
3
|
+
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
4
|
+
var __commonJS = (cb, mod) => function __require() {
|
|
5
|
+
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
|
6
|
+
};
|
|
7
|
+
|
|
8
|
+
export {
|
|
9
|
+
__name,
|
|
10
|
+
__commonJS
|
|
11
|
+
};
|
|
12
|
+
//# sourceMappingURL=chunk-44O2JGUY.mjs.map
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import {
|
|
2
|
+
copilotRuntimeNodeHttpEndpoint,
|
|
3
|
+
telemetry_client_default
|
|
4
|
+
} from "./chunk-DL2QQEOR.mjs";
|
|
5
|
+
import {
|
|
6
|
+
__name
|
|
7
|
+
} from "./chunk-44O2JGUY.mjs";
|
|
8
|
+
|
|
9
|
+
// src/lib/integrations/node-express/index.ts
|
|
10
|
+
function copilotRuntimeNodeExpressEndpoint(options) {
|
|
11
|
+
telemetry_client_default.setGlobalProperties({
|
|
12
|
+
runtime: {
|
|
13
|
+
framework: "node-express"
|
|
14
|
+
}
|
|
15
|
+
});
|
|
16
|
+
telemetry_client_default.capture("oss.runtime.instance_created", {});
|
|
17
|
+
return copilotRuntimeNodeHttpEndpoint(options);
|
|
18
|
+
}
|
|
19
|
+
__name(copilotRuntimeNodeExpressEndpoint, "copilotRuntimeNodeExpressEndpoint");
|
|
20
|
+
|
|
21
|
+
export {
|
|
22
|
+
copilotRuntimeNodeExpressEndpoint
|
|
23
|
+
};
|
|
24
|
+
//# sourceMappingURL=chunk-5U6S5EGT.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/integrations/node-express/index.ts"],"sourcesContent":["import { CreateCopilotRuntimeServerOptions } from \"../shared\";\nimport { copilotRuntimeNodeHttpEndpoint } from \"../node-http\";\nimport telemetry from \"../../telemetry-client\";\n\nexport function copilotRuntimeNodeExpressEndpoint(options: CreateCopilotRuntimeServerOptions) {\n telemetry.setGlobalProperties({\n runtime: {\n framework: \"node-express\",\n },\n });\n\n telemetry.capture(\"oss.runtime.instance_created\", {});\n return copilotRuntimeNodeHttpEndpoint(options);\n}\n"],"mappings":";;;;;;;;;AAIO,SAASA,kCAAkCC,SAA0C;AAC1FC,2BAAUC,oBAAoB;IAC5BC,SAAS;MACPC,WAAW;IACb;EACF,CAAA;AAEAH,2BAAUI,QAAQ,gCAAgC,CAAC,CAAA;AACnD,SAAOC,+BAA+BN,OAAAA;AACxC;AATgBD;","names":["copilotRuntimeNodeExpressEndpoint","options","telemetry","setGlobalProperties","runtime","framework","capture","copilotRuntimeNodeHttpEndpoint"]}
|
|
@@ -2,15 +2,15 @@ import {
|
|
|
2
2
|
convertActionInputToLangChainTool,
|
|
3
3
|
convertMessageToLangChainMessage,
|
|
4
4
|
streamLangChainResponse
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-DVDKD6F5.mjs";
|
|
6
6
|
import {
|
|
7
7
|
ActionExecutionMessage,
|
|
8
8
|
ResultMessage,
|
|
9
9
|
TextMessage
|
|
10
|
-
} from "./chunk-
|
|
10
|
+
} from "./chunk-GEIBJJQ4.mjs";
|
|
11
11
|
import {
|
|
12
12
|
__name
|
|
13
|
-
} from "./chunk-
|
|
13
|
+
} from "./chunk-44O2JGUY.mjs";
|
|
14
14
|
|
|
15
15
|
// src/service-adapters/openai/openai-adapter.ts
|
|
16
16
|
import OpenAI from "openai";
|
|
@@ -162,6 +162,7 @@ function convertSystemMessageToAssistantAPI(message) {
|
|
|
162
162
|
__name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
|
|
163
163
|
|
|
164
164
|
// src/service-adapters/openai/openai-adapter.ts
|
|
165
|
+
import { nanoid } from "nanoid";
|
|
165
166
|
var DEFAULT_MODEL = "gpt-4o";
|
|
166
167
|
var OpenAIAdapter = class {
|
|
167
168
|
model = DEFAULT_MODEL;
|
|
@@ -175,7 +176,8 @@ var OpenAIAdapter = class {
|
|
|
175
176
|
this.model = params.model;
|
|
176
177
|
}
|
|
177
178
|
}
|
|
178
|
-
async process(
|
|
179
|
+
async process(request) {
|
|
180
|
+
const { threadId, model = this.model, messages, actions, eventSource } = request;
|
|
179
181
|
const tools = actions.map(convertActionInputToOpenAITool);
|
|
180
182
|
let openaiMessages = messages.map(convertMessageToOpenAIMessage);
|
|
181
183
|
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
@@ -222,7 +224,9 @@ var OpenAIAdapter = class {
|
|
|
222
224
|
}
|
|
223
225
|
eventStream$.complete();
|
|
224
226
|
});
|
|
225
|
-
return {
|
|
227
|
+
return {
|
|
228
|
+
threadId: threadId || nanoid()
|
|
229
|
+
};
|
|
226
230
|
}
|
|
227
231
|
};
|
|
228
232
|
__name(OpenAIAdapter, "OpenAIAdapter");
|
|
@@ -240,8 +244,9 @@ var OpenAIAssistantAdapter = class {
|
|
|
240
244
|
this.fileSearchEnabled = params.fileSearchEnabled === false || true;
|
|
241
245
|
this.assistantId = params.assistantId;
|
|
242
246
|
}
|
|
243
|
-
async process(
|
|
244
|
-
|
|
247
|
+
async process(request) {
|
|
248
|
+
const { messages, actions, eventSource, runId } = request;
|
|
249
|
+
let threadId = request.threadId || (await this.openai.beta.threads.create()).id;
|
|
245
250
|
const lastMessage = messages.at(-1);
|
|
246
251
|
let nextRunId = void 0;
|
|
247
252
|
if (lastMessage instanceof ResultMessage && runId) {
|
|
@@ -466,7 +471,7 @@ function tryParseJson(str) {
|
|
|
466
471
|
__name(tryParseJson, "tryParseJson");
|
|
467
472
|
|
|
468
473
|
// src/service-adapters/google/google-genai-adapter.ts
|
|
469
|
-
import { nanoid } from "nanoid";
|
|
474
|
+
import { nanoid as nanoid2 } from "nanoid";
|
|
470
475
|
var GoogleGenerativeAIAdapter = class {
|
|
471
476
|
model;
|
|
472
477
|
constructor(options) {
|
|
@@ -533,7 +538,7 @@ var GoogleGenerativeAIAdapter = class {
|
|
|
533
538
|
}
|
|
534
539
|
if (!isTextMessage) {
|
|
535
540
|
isTextMessage = true;
|
|
536
|
-
eventStream$.sendTextMessageStart(
|
|
541
|
+
eventStream$.sendTextMessageStart(nanoid2());
|
|
537
542
|
}
|
|
538
543
|
eventStream$.sendTextMessageContent(chunkText);
|
|
539
544
|
}
|
|
@@ -543,12 +548,14 @@ var GoogleGenerativeAIAdapter = class {
|
|
|
543
548
|
let calls = (await result.response).functionCalls();
|
|
544
549
|
if (calls) {
|
|
545
550
|
for (let call of calls) {
|
|
546
|
-
eventStream$.sendActionExecution(
|
|
551
|
+
eventStream$.sendActionExecution(nanoid2(), call.name, JSON.stringify(replaceNewlinesInObject(call.args)));
|
|
547
552
|
}
|
|
548
553
|
}
|
|
549
554
|
eventStream$.complete();
|
|
550
555
|
});
|
|
551
|
-
return {
|
|
556
|
+
return {
|
|
557
|
+
threadId: request.threadId || nanoid2()
|
|
558
|
+
};
|
|
552
559
|
}
|
|
553
560
|
};
|
|
554
561
|
__name(GoogleGenerativeAIAdapter, "GoogleGenerativeAIAdapter");
|
|
@@ -571,6 +578,7 @@ function replaceNewlinesInObject(obj) {
|
|
|
571
578
|
__name(replaceNewlinesInObject, "replaceNewlinesInObject");
|
|
572
579
|
|
|
573
580
|
// src/service-adapters/langchain/langchain-adapter.ts
|
|
581
|
+
import { nanoid as nanoid3 } from "nanoid";
|
|
574
582
|
var LangChainAdapter = class {
|
|
575
583
|
options;
|
|
576
584
|
/**
|
|
@@ -579,7 +587,8 @@ var LangChainAdapter = class {
|
|
|
579
587
|
constructor(options) {
|
|
580
588
|
this.options = options;
|
|
581
589
|
}
|
|
582
|
-
async process(
|
|
590
|
+
async process(request) {
|
|
591
|
+
const { eventSource, model, actions, messages, threadId, runId } = request;
|
|
583
592
|
const result = await this.options.chainFn({
|
|
584
593
|
messages: messages.map(convertMessageToLangChainMessage),
|
|
585
594
|
tools: actions.map(convertActionInputToLangChainTool),
|
|
@@ -593,7 +602,9 @@ var LangChainAdapter = class {
|
|
|
593
602
|
eventStream$
|
|
594
603
|
});
|
|
595
604
|
});
|
|
596
|
-
return {
|
|
605
|
+
return {
|
|
606
|
+
threadId: threadId || nanoid3()
|
|
607
|
+
};
|
|
597
608
|
}
|
|
598
609
|
};
|
|
599
610
|
__name(LangChainAdapter, "LangChainAdapter");
|
|
@@ -683,4 +694,4 @@ export {
|
|
|
683
694
|
LangChainAdapter,
|
|
684
695
|
RemoteChain
|
|
685
696
|
};
|
|
686
|
-
//# sourceMappingURL=chunk-
|
|
697
|
+
//# sourceMappingURL=chunk-7QCEK4DP.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/service-adapters/openai/openai-adapter.ts","../src/service-adapters/openai/utils.ts","../src/service-adapters/openai/openai-assistant-adapter.ts","../src/service-adapters/google/google-genai-adapter.ts","../src/service-adapters/google/utils.ts","../src/service-adapters/langchain/langchain-adapter.ts","../src/service-adapters/langchain/langserve.ts"],"sourcesContent":["/**\n * CopilotRuntime Adapter for OpenAI.\n *\n * <RequestExample>\n * ```jsx CopilotRuntime Example\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(req, new OpenAIAdapter());\n * ```\n * </RequestExample>\n *\n * You can easily set the model to use by passing it to the constructor.\n * ```jsx\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new OpenAIAdapter({ model: \"gpt-4o\" }),\n * );\n * ```\n *\n * To use your custom OpenAI instance, pass the `openai` property.\n * ```jsx\n * const openai = new OpenAI({\n * organization: \"your-organization-id\",\n * apiKey: \"your-api-key\"\n * });\n *\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new OpenAIAdapter({ openai }),\n * );\n * ```\n *\n */\nimport OpenAI from \"openai\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n limitMessagesToTokenCount,\n} from \"./utils\";\nimport { nanoid } from \"nanoid\";\n\nconst DEFAULT_MODEL = \"gpt-4o\";\n\nexport interface OpenAIAdapterParams {\n /**\n * An optional OpenAI instance to use.\n */\n openai?: OpenAI;\n\n /**\n * The model to use.\n */\n model?: string;\n}\n\nexport class OpenAIAdapter implements CopilotServiceAdapter {\n private model: string = DEFAULT_MODEL;\n\n private _openai: OpenAI;\n public get openai(): OpenAI {\n return this._openai;\n }\n\n constructor(params?: OpenAIAdapterParams) {\n this._openai = params?.openai || new OpenAI({});\n if (params?.model) {\n this.model = params.model;\n }\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { threadId, model = this.model, messages, actions, eventSource } = request;\n const tools = actions.map(convertActionInputToOpenAITool);\n\n let openaiMessages = messages.map(convertMessageToOpenAIMessage);\n openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);\n\n const stream = this.openai.beta.chat.completions.stream({\n model: model,\n stream: true,\n messages: openaiMessages,\n ...(tools.length > 0 && { tools }),\n });\n\n eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n for await (const chunk of stream) {\n const toolCall = chunk.choices[0].delta.tool_calls?.[0];\n const content = chunk.choices[0].delta.content;\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCall?.id is defined, it means a new tool call starts.\n if (mode === \"message\" && toolCall?.id) {\n mode = null;\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\" && (toolCall === undefined || toolCall?.id)) {\n mode = null;\n eventStream$.sendActionExecutionEnd();\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (toolCall?.id) {\n mode = \"function\";\n eventStream$.sendActionExecutionStart(toolCall!.id, toolCall!.function!.name);\n } else if (content) {\n mode = \"message\";\n eventStream$.sendTextMessageStart(chunk.id);\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent(content);\n } else if (mode === \"function\" && toolCall?.function?.arguments) {\n eventStream$.sendActionExecutionArgs(toolCall.function.arguments);\n }\n }\n\n // send the end events\n if (mode === \"message\") {\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd();\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: threadId || nanoid(),\n };\n }\n}\n","import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n} from \"../../graphql/types/converted\";\nimport { Tiktoken, TiktokenModel, encodingForModel } from \"js-tiktoken\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { ChatCompletionMessageParam, ChatCompletionTool } from \"openai/resources\";\n\nexport function limitMessagesToTokenCount(\n messages: any[],\n tools: any[],\n model: string,\n maxTokens?: number,\n): any[] {\n maxTokens ||= maxTokensForOpenAIModel(model);\n\n const result: any[] = [];\n const toolsNumTokens = countToolsTokens(model, tools);\n if (toolsNumTokens > maxTokens) {\n throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);\n }\n maxTokens -= toolsNumTokens;\n\n for (const message of messages) {\n if (message.role === \"system\") {\n const numTokens = countMessageTokens(model, message);\n maxTokens -= numTokens;\n\n if (maxTokens < 0) {\n throw new Error(\"Not enough tokens for system message.\");\n }\n }\n }\n\n let cutoff: boolean = false;\n\n const reversedMessages = [...messages].reverse();\n for (const message of reversedMessages) {\n if (message.role === \"system\") {\n result.unshift(message);\n continue;\n } else if (cutoff) {\n continue;\n }\n let numTokens = countMessageTokens(model, message);\n if (maxTokens < numTokens) {\n cutoff = true;\n continue;\n }\n result.unshift(message);\n maxTokens -= numTokens;\n }\n\n return result;\n}\n\nexport function maxTokensForOpenAIModel(model: string): number {\n return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;\n}\n\nconst DEFAULT_MAX_TOKENS = 128000;\n\nconst maxTokensByModel: { [key: string]: number } = {\n // GPT-4\n \"gpt-4o\": 128000,\n \"gpt-4o-2024-05-13\": 128000,\n \"gpt-4-turbo\": 128000,\n \"gpt-4-turbo-2024-04-09\": 128000,\n \"gpt-4-0125-preview\": 128000,\n \"gpt-4-turbo-preview\": 128000,\n \"gpt-4-1106-preview\": 128000,\n \"gpt-4-vision-preview\": 128000,\n \"gpt-4-1106-vision-preview\": 128000,\n \"gpt-4-32k\": 32768,\n \"gpt-4-32k-0613\": 32768,\n \"gpt-4-32k-0314\": 32768,\n \"gpt-4\": 8192,\n \"gpt-4-0613\": 8192,\n \"gpt-4-0314\": 8192,\n\n // GPT-3.5\n \"gpt-3.5-turbo-0125\": 16385,\n \"gpt-3.5-turbo\": 16385,\n \"gpt-3.5-turbo-1106\": 16385,\n \"gpt-3.5-turbo-instruct\": 4096,\n \"gpt-3.5-turbo-16k\": 16385,\n \"gpt-3.5-turbo-0613\": 4096,\n \"gpt-3.5-turbo-16k-0613\": 16385,\n \"gpt-3.5-turbo-0301\": 4097,\n};\n\nfunction countToolsTokens(model: string, tools: any[]): number {\n if (tools.length === 0) {\n return 0;\n }\n const json = JSON.stringify(tools);\n return countTokens(model, json);\n}\n\nfunction countMessageTokens(model: string, message: any): number {\n return countTokens(model, message.content || \"\");\n}\n\nfunction countTokens(model: string, text: string): number {\n let enc: Tiktoken;\n try {\n enc = encodingForModel(model as TiktokenModel);\n } catch (e) {\n enc = encodingForModel(\"gpt-4\");\n }\n return enc.encode(text).length;\n}\n\nexport function convertActionInputToOpenAITool(action: ActionInput): ChatCompletionTool {\n return {\n type: \"function\",\n function: {\n name: action.name,\n description: action.description,\n parameters: JSON.parse(action.jsonSchema),\n },\n };\n}\n\nexport function convertMessageToOpenAIMessage(message: Message): ChatCompletionMessageParam {\n if (message instanceof TextMessage) {\n return {\n role: message.role,\n content: message.content,\n };\n } else if (message instanceof ActionExecutionMessage) {\n return {\n role: \"assistant\",\n tool_calls: [\n {\n id: message.id,\n type: \"function\",\n function: {\n name: message.name,\n arguments: JSON.stringify(message.arguments),\n },\n },\n ],\n };\n } else if (message instanceof ResultMessage) {\n return {\n role: \"tool\",\n content: message.result,\n tool_call_id: message.actionExecutionId,\n };\n }\n}\n\nexport function convertSystemMessageToAssistantAPI(message: ChatCompletionMessageParam) {\n return {\n ...message,\n ...(message.role === \"system\" && {\n role: \"assistant\",\n content: \"THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: \" + message.content,\n }),\n };\n}\n","/**\n * CopilotKit Adapter for the OpenAI Assistant API.\n *\n * Use this adapter to get responses from the OpenAI Assistant API.\n *\n * <RequestExample>\n * ```typescript\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new OpenAIAssistantAdapter({\n * assistantId: \"your-assistant-id\"\n * })\n * );\n * ```\n * </RequestExample>\n */\nimport OpenAI from \"openai\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport { Message, ResultMessage, TextMessage } from \"../../graphql/types/converted\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n convertSystemMessageToAssistantAPI,\n} from \"./utils\";\nimport { RunSubmitToolOutputsStreamParams } from \"openai/resources/beta/threads/runs/runs\";\nimport { AssistantStream } from \"openai/lib/AssistantStream\";\nimport { RuntimeEventSource } from \"../events\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { AssistantStreamEvent, AssistantTool } from \"openai/resources/beta/assistants\";\n\nexport interface OpenAIAssistantAdapterParams {\n /**\n * The ID of the assistant to use.\n */\n assistantId: string;\n\n /**\n * An instance of `OpenAI` to use for the request. If not provided, a new instance will be created.\n */\n openai?: OpenAI;\n\n /**\n * Whether to enable the code interpreter. Defaults to `true`.\n */\n codeInterpreterEnabled?: boolean;\n\n /**\n * Whether to enable retrieval. Defaults to `true`.\n */\n fileSearchEnabled?: boolean;\n}\n\nexport class OpenAIAssistantAdapter implements CopilotServiceAdapter {\n private openai: OpenAI;\n private codeInterpreterEnabled: boolean;\n private assistantId: string;\n private fileSearchEnabled: boolean;\n\n constructor(params: OpenAIAssistantAdapterParams) {\n this.openai = params.openai || new OpenAI({});\n this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;\n this.fileSearchEnabled = params.fileSearchEnabled === false || true;\n this.assistantId = params.assistantId;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { messages, actions, eventSource, runId } = request;\n // if we don't have a threadId, create a new thread\n let threadId = request.threadId || (await this.openai.beta.threads.create()).id;\n\n const lastMessage = messages.at(-1);\n\n let nextRunId: string | undefined = undefined;\n\n // submit function outputs\n if (lastMessage instanceof ResultMessage && runId) {\n nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);\n }\n // submit user message\n else if (lastMessage instanceof TextMessage) {\n nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource);\n }\n // unsupported message\n else {\n throw new Error(\"No actionable message found in the messages\");\n }\n\n return {\n threadId,\n runId: nextRunId,\n };\n }\n\n private async submitToolOutputs(\n threadId: string,\n runId: string,\n messages: Message[],\n eventSource: RuntimeEventSource,\n ) {\n let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);\n if (!run.required_action) {\n throw new Error(\"No tool outputs required\");\n }\n\n // get the required tool call ids\n const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map(\n (toolCall) => toolCall.id,\n );\n\n // search for these tool calls\n const resultMessages = messages.filter(\n (message) =>\n message instanceof ResultMessage && toolCallsIds.includes(message.actionExecutionId),\n ) as ResultMessage[];\n\n if (toolCallsIds.length != resultMessages.length) {\n throw new Error(\"Number of function results does not match the number of tool calls\");\n }\n\n // submit the tool outputs\n const toolOutputs: RunSubmitToolOutputsStreamParams.ToolOutput[] = resultMessages.map(\n (message) => {\n return {\n tool_call_id: message.actionExecutionId,\n output: message.result,\n };\n },\n );\n\n const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {\n tool_outputs: toolOutputs,\n });\n\n await this.streamResponse(stream, eventSource);\n return runId;\n }\n\n private async submitUserMessage(\n threadId: string,\n messages: Message[],\n actions: ActionInput[],\n eventSource: RuntimeEventSource,\n ) {\n messages = [...messages];\n\n // get the instruction message\n const instructionsMessage = messages.shift();\n const instructions =\n instructionsMessage instanceof TextMessage ? instructionsMessage.content : \"\";\n\n // get the latest user message\n const userMessage = messages\n .map(convertMessageToOpenAIMessage)\n .map(convertSystemMessageToAssistantAPI)\n .at(-1);\n\n if (userMessage.role !== \"user\") {\n throw new Error(\"No user message found\");\n }\n\n // create a new message on the thread\n await this.openai.beta.threads.messages.create(threadId, {\n role: \"user\",\n content: userMessage.content,\n });\n\n const openaiTools = actions.map(convertActionInputToOpenAITool);\n\n const tools = [\n ...openaiTools,\n ...(this.codeInterpreterEnabled ? [{ type: \"code_interpreter\" } as AssistantTool] : []),\n ...(this.fileSearchEnabled ? [{ type: \"file_search\" } as AssistantTool] : []),\n ];\n\n // run the thread\n let stream = this.openai.beta.threads.runs.stream(threadId, {\n assistant_id: this.assistantId,\n instructions,\n tools: tools,\n });\n\n await this.streamResponse(stream, eventSource);\n\n return getRunIdFromStream(stream);\n }\n\n private async streamResponse(stream: AssistantStream, eventSource: RuntimeEventSource) {\n eventSource.stream(async (eventStream$) => {\n let inFunctionCall = false;\n\n for await (const chunk of stream) {\n switch (chunk.event) {\n case \"thread.message.created\":\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd();\n }\n eventStream$.sendTextMessageStart(chunk.data.id);\n break;\n case \"thread.message.delta\":\n if (chunk.data.delta.content?.[0].type === \"text\") {\n eventStream$.sendTextMessageContent(chunk.data.delta.content?.[0].text.value);\n }\n break;\n case \"thread.message.completed\":\n eventStream$.sendTextMessageEnd();\n break;\n case \"thread.run.step.delta\":\n let toolCallId: string | undefined;\n let toolCallName: string | undefined;\n let toolCallArgs: string | undefined;\n if (\n chunk.data.delta.step_details.type === \"tool_calls\" &&\n chunk.data.delta.step_details.tool_calls?.[0].type === \"function\"\n ) {\n toolCallId = chunk.data.delta.step_details.tool_calls?.[0].id;\n toolCallName = chunk.data.delta.step_details.tool_calls?.[0].function.name;\n toolCallArgs = chunk.data.delta.step_details.tool_calls?.[0].function.arguments;\n }\n\n if (toolCallName && toolCallId) {\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd();\n }\n inFunctionCall = true;\n eventStream$.sendActionExecutionStart(toolCallId, toolCallName);\n } else if (toolCallArgs) {\n eventStream$.sendActionExecutionArgs(toolCallArgs);\n }\n break;\n }\n }\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd();\n }\n eventStream$.complete();\n });\n }\n}\n\nfunction getRunIdFromStream(stream: AssistantStream): Promise<string> {\n return new Promise<string>((resolve, reject) => {\n let runIdGetter = (event: AssistantStreamEvent) => {\n if (event.event === \"thread.run.created\") {\n const runId = event.data.id;\n stream.off(\"event\", runIdGetter);\n resolve(runId);\n }\n };\n stream.on(\"event\", runIdGetter);\n });\n}\n","/**\n * CopilotKit Adapter for Google Gemini\n *\n * Use this adapter for a Google Gemini backend.\n *\n * <RequestExample>\n * ```typescript\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new GoogleGenerativeAIAdapter()\n * );\n * ```\n * </RequestExample>\n *\n * To set up a different model, pass the model prop:\n *\n * ```typescript\n * const copilotKit = new CopilotRuntime();\n * const genAI = new GoogleGenerativeAI(\n * process.env[\"GOOGLE_API_KEY\"]!\n * );\n * const model = genAI.getGenerativeModel(\n * { model: \"gemini-pro\" }\n * );\n * return copilotKit.response(\n * req,\n * new GoogleGenerativeAIAdapter()\n * );\n * ```\n */\nimport { CopilotServiceAdapter } from \"../service-adapter\";\nimport {\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport { GenerativeModel, GoogleGenerativeAI } from \"@google/generative-ai\";\nimport { TextMessage } from \"../../graphql/types/converted\";\nimport { convertMessageToGoogleGenAIMessage, transformActionToGoogleGenAITool } from \"./utils\";\nimport { nanoid } from \"nanoid\";\n\ninterface GoogleGenerativeAIAdapterOptions {\n /**\n * A custom `GenerativeModel` to use for the request.\n */\n model?: GenerativeModel;\n}\n\nexport class GoogleGenerativeAIAdapter implements CopilotServiceAdapter {\n private model: GenerativeModel;\n\n constructor(options?: GoogleGenerativeAIAdapterOptions) {\n if (options?.model) {\n this.model = options.model;\n } else {\n const genAI = new GoogleGenerativeAI(process.env[\"GOOGLE_API_KEY\"]!);\n this.model = genAI.getGenerativeModel({ model: \"gemini-pro\" });\n }\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { messages, actions, eventSource } = request;\n\n // get the history (everything except the first and last message)\n const history = messages.slice(1, -1).map(convertMessageToGoogleGenAIMessage);\n\n // get the current message (the last message)\n const currentMessage = convertMessageToGoogleGenAIMessage(messages.at(-1));\n if (!currentMessage) {\n throw new Error(\"No current message\");\n }\n\n let systemMessage: string;\n const firstMessage = messages.at(0);\n if (firstMessage instanceof TextMessage && firstMessage.role === \"system\") {\n systemMessage = firstMessage.content.trim();\n } else {\n throw new Error(\"First message is not a system message\");\n }\n\n const tools = actions.map(transformActionToGoogleGenAITool);\n\n const isFirstGenGeminiPro =\n this.model.model === \"gemini-pro\" || this.model.model === \"models/gemini-pro\";\n\n const chat = this.model.startChat({\n history: [\n ...history,\n // gemini-pro does not support system instructions, so we need to add them to the history\n ...(isFirstGenGeminiPro ? [{ role: \"user\", parts: [{ text: systemMessage }] }] : []),\n ],\n // only gemini-1.5-pro-latest and later supports setting system instructions\n ...(isFirstGenGeminiPro\n ? {}\n : { systemInstruction: { role: \"user\", parts: [{ text: systemMessage }] } }),\n tools,\n });\n\n const result = await chat.sendMessageStream(currentMessage.parts);\n\n eventSource.stream(async (eventStream$) => {\n let isTextMessage = false;\n for await (const chunk of result.stream) {\n const chunkText = chunk.text();\n if (chunkText === \"\") {\n continue;\n }\n if (!isTextMessage) {\n isTextMessage = true;\n eventStream$.sendTextMessageStart(nanoid());\n }\n eventStream$.sendTextMessageContent(chunkText);\n }\n if (isTextMessage) {\n eventStream$.sendTextMessageEnd();\n }\n\n let calls = (await result.response).functionCalls();\n if (calls) {\n for (let call of calls) {\n eventStream$.sendActionExecution(\n nanoid(),\n call.name,\n JSON.stringify(replaceNewlinesInObject(call.args)),\n );\n }\n }\n eventStream$.complete();\n });\n\n return {\n threadId: request.threadId || nanoid(),\n };\n }\n}\n\nfunction replaceNewlinesInObject(obj: any): any {\n if (typeof obj === \"string\") {\n return obj.replace(/\\\\\\\\n/g, \"\\n\");\n } else if (Array.isArray(obj)) {\n return obj.map(replaceNewlinesInObject);\n } else if (typeof obj === \"object\" && obj !== null) {\n const newObj: any = {};\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n newObj[key] = replaceNewlinesInObject(obj[key]);\n }\n }\n return newObj;\n }\n return obj;\n}\n","import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n} from \"../../graphql/types/converted\";\nimport { Tool } from \"@google/generative-ai\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\n\nexport function convertMessageToGoogleGenAIMessage(message: Message) {\n if (message instanceof TextMessage) {\n const role = {\n user: \"user\",\n assistant: \"model\",\n system: \"user\",\n }[message.role];\n\n const text =\n message.role === \"system\"\n ? \"THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: \" + message.content\n : message.content;\n\n return {\n role,\n parts: [{ text }],\n };\n } else if (message instanceof ActionExecutionMessage) {\n return {\n role: \"model\",\n parts: [\n {\n functionCall: {\n name: message.name,\n args: message.arguments,\n },\n },\n ],\n };\n } else if (message instanceof ResultMessage) {\n return {\n role: \"function\",\n parts: [\n {\n functionResponse: {\n name: message.actionName,\n response: {\n name: message.actionName,\n content: tryParseJson(message.result),\n },\n },\n },\n ],\n };\n }\n}\n\nexport function transformActionToGoogleGenAITool(action: ActionInput): Tool {\n const name = action.name;\n const description = action.description;\n const parameters = JSON.parse(action.jsonSchema);\n\n const transformProperties = (props: any) => {\n for (const key in props) {\n if (props[key].type) {\n props[key].type = props[key].type.toUpperCase();\n }\n if (props[key].properties) {\n transformProperties(props[key].properties);\n }\n }\n };\n transformProperties(parameters);\n\n return {\n functionDeclarations: [\n {\n name,\n description,\n parameters,\n },\n ],\n };\n}\n\nfunction tryParseJson(str?: string) {\n if (!str) {\n return \"\";\n }\n try {\n return JSON.parse(str);\n } catch (e) {\n return str;\n }\n}\n","/**\n * CopilotKit Adapter for LangChain\n *\n * Use this adapter to use LangChain as a backend.\n *\n * ```typescript\n * return copilotKit.response(\n * req,\n * new LangChainAdapter(async (forwardedProps) => {\n * const model = new ChatOpenAI({ modelName: \"gpt-4o\" });\n * return model.stream(forwardedProps.messages, {\n * tools: forwardedProps.tools,\n * });\n * })\n * );\n * ```\n * The async handler function can return:\n *\n * - a simple `string` response\n * - a LangChain stream `IterableReadableStream`\n * - a LangChain `BaseMessageChunk` object\n * - a LangChain `AIMessage` object\n */\n\nimport { BaseMessage } from \"@langchain/core/messages\";\nimport { CopilotServiceAdapter } from \"../service-adapter\";\nimport {\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToLangChainTool,\n convertMessageToLangChainMessage,\n streamLangChainResponse,\n} from \"./utils\";\nimport { DynamicStructuredTool } from \"@langchain/core/tools\";\nimport { LangChainReturnType } from \"./types\";\nimport { nanoid } from \"nanoid\";\n\ninterface ChainFnParameters {\n model: string;\n messages: BaseMessage[];\n tools: DynamicStructuredTool[];\n threadId?: string;\n runId?: string;\n}\n\ninterface LangChainAdapterOptions {\n chainFn: (parameters: ChainFnParameters) => Promise<LangChainReturnType>;\n}\n\nexport class LangChainAdapter implements CopilotServiceAdapter {\n /**\n * To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.\n */\n constructor(private options: LangChainAdapterOptions) {}\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { eventSource, model, actions, messages, threadId, runId } = request;\n const result = await this.options.chainFn({\n messages: messages.map(convertMessageToLangChainMessage),\n tools: actions.map(convertActionInputToLangChainTool),\n model,\n threadId,\n runId,\n });\n\n eventSource.stream(async (eventStream$) => {\n await streamLangChainResponse({\n result,\n eventStream$,\n });\n });\n\n return {\n threadId: threadId || nanoid(),\n };\n }\n}\n","import { Parameter, Action } from \"@copilotkit/shared\";\nimport { RemoteRunnable } from \"langchain/runnables/remote\";\n\nexport interface RemoteChainParameters {\n name: string;\n description: string;\n chainUrl: string;\n parameters?: Parameter[];\n parameterType?: \"single\" | \"multi\";\n}\n\nexport class RemoteChain {\n name: string;\n description: string;\n chainUrl: string;\n parameters?: Parameter[];\n parameterType: \"single\" | \"multi\";\n\n constructor(options: RemoteChainParameters) {\n this.name = options.name;\n this.description = options.description;\n this.chainUrl = options.chainUrl;\n this.parameters = options.parameters;\n this.parameterType = options.parameterType || \"multi\";\n }\n\n async toAction(): Promise<Action<any>> {\n if (!this.parameters) {\n await this.inferLangServeParameters();\n }\n\n return {\n name: this.name,\n description: this.description,\n parameters: this.parameters!,\n handler: async (args: any) => {\n const runnable = new RemoteRunnable({ url: this.chainUrl });\n let input: any;\n if (this.parameterType === \"single\") {\n input = args[Object.keys(args)[0]];\n } else {\n input = args;\n }\n return await runnable.invoke(input);\n },\n };\n }\n\n async inferLangServeParameters() {\n const supportedTypes = [\"string\", \"number\", \"boolean\"];\n\n let schemaUrl = this.chainUrl.replace(/\\/+$/, \"\") + \"/input_schema\";\n let schema = await fetch(schemaUrl)\n .then((res) => res.json())\n .catch(() => {\n throw new Error(\"Failed to fetch langserve schema at \" + schemaUrl);\n });\n // for now, don't use json schema, just do a simple conversion\n\n if (supportedTypes.includes(schema.type)) {\n this.parameterType = \"single\";\n this.parameters = [\n {\n name: \"input\",\n type: schema.type,\n description: \"The input to the chain\",\n },\n ];\n } else if (schema.type === \"object\") {\n this.parameterType = \"multi\";\n this.parameters = Object.keys(schema.properties).map((key) => {\n let property = schema.properties[key];\n if (!supportedTypes.includes(property.type)) {\n throw new Error(\"Unsupported schema type\");\n }\n return {\n name: key,\n type: property.type,\n description: property.description || \"\",\n required: schema.required?.includes(key) || false,\n };\n });\n } else {\n throw new Error(\"Unsupported schema type\");\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;AAkCA,OAAOA,YAAY;;;AC5BnB,SAAkCC,wBAAwB;AAInD,SAASC,0BACdC,UACAC,OACAC,OACAC,WAAkB;AAElBA,4BAAcC,wBAAwBF,KAAAA;AAEtC,QAAMG,SAAgB,CAAA;AACtB,QAAMC,iBAAiBC,iBAAiBL,OAAOD,KAAAA;AAC/C,MAAIK,iBAAiBH,WAAW;AAC9B,UAAM,IAAIK,MAAM,4CAA4CF,oBAAoBH,WAAW;EAC7F;AACAA,eAAaG;AAEb,aAAWG,WAAWT,UAAU;AAC9B,QAAIS,QAAQC,SAAS,UAAU;AAC7B,YAAMC,YAAYC,mBAAmBV,OAAOO,OAAAA;AAC5CN,mBAAaQ;AAEb,UAAIR,YAAY,GAAG;AACjB,cAAM,IAAIK,MAAM,uCAAA;MAClB;IACF;EACF;AAEA,MAAIK,SAAkB;AAEtB,QAAMC,mBAAmB;OAAId;IAAUe,QAAO;AAC9C,aAAWN,WAAWK,kBAAkB;AACtC,QAAIL,QAAQC,SAAS,UAAU;AAC7BL,aAAOW,QAAQP,OAAAA;AACf;IACF,WAAWI,QAAQ;AACjB;IACF;AACA,QAAIF,YAAYC,mBAAmBV,OAAOO,OAAAA;AAC1C,QAAIN,YAAYQ,WAAW;AACzBE,eAAS;AACT;IACF;AACAR,WAAOW,QAAQP,OAAAA;AACfN,iBAAaQ;EACf;AAEA,SAAON;AACT;AA9CgBN;AAgDT,SAASK,wBAAwBF,OAAa;AACnD,SAAOe,iBAAiBf,KAAAA,KAAUgB;AACpC;AAFgBd;AAIhB,IAAMc,qBAAqB;AAE3B,IAAMD,mBAA8C;;EAElD,UAAU;EACV,qBAAqB;EACrB,eAAe;EACf,0BAA0B;EAC1B,sBAAsB;EACtB,uBAAuB;EACvB,sBAAsB;EACtB,wBAAwB;EACxB,6BAA6B;EAC7B,aAAa;EACb,kBAAkB;EAClB,kBAAkB;EAClB,SAAS;EACT,cAAc;EACd,cAAc;;EAGd,sBAAsB;EACtB,iBAAiB;EACjB,sBAAsB;EACtB,0BAA0B;EAC1B,qBAAqB;EACrB,sBAAsB;EACtB,0BAA0B;EAC1B,sBAAsB;AACxB;AAEA,SAASV,iBAAiBL,OAAeD,OAAY;AACnD,MAAIA,MAAMkB,WAAW,GAAG;AACtB,WAAO;EACT;AACA,QAAMC,OAAOC,KAAKC,UAAUrB,KAAAA;AAC5B,SAAOsB,YAAYrB,OAAOkB,IAAAA;AAC5B;AANSb;AAQT,SAASK,mBAAmBV,OAAeO,SAAY;AACrD,SAAOc,YAAYrB,OAAOO,QAAQe,WAAW,EAAA;AAC/C;AAFSZ;AAIT,SAASW,YAAYrB,OAAeuB,MAAY;AAC9C,MAAIC;AACJ,MAAI;AACFA,UAAMC,iBAAiBzB,KAAAA;EACzB,SAAS0B,GAAP;AACAF,UAAMC,iBAAiB,OAAA;EACzB;AACA,SAAOD,IAAIG,OAAOJ,IAAAA,EAAMN;AAC1B;AARSI;AAUF,SAASO,+BAA+BC,QAAmB;AAChE,SAAO;IACLC,MAAM;IACNC,UAAU;MACRC,MAAMH,OAAOG;MACbC,aAAaJ,OAAOI;MACpBC,YAAYf,KAAKgB,MAAMN,OAAOO,UAAU;IAC1C;EACF;AACF;AATgBR;AAWT,SAASS,8BAA8B9B,SAAgB;AAC5D,MAAIA,mBAAmB+B,aAAa;AAClC,WAAO;MACL9B,MAAMD,QAAQC;MACdc,SAASf,QAAQe;IACnB;EACF,WAAWf,mBAAmBgC,wBAAwB;AACpD,WAAO;MACL/B,MAAM;MACNgC,YAAY;QACV;UACEC,IAAIlC,QAAQkC;UACZX,MAAM;UACNC,UAAU;YACRC,MAAMzB,QAAQyB;YACdU,WAAWvB,KAAKC,UAAUb,QAAQmC,SAAS;UAC7C;QACF;;IAEJ;EACF,WAAWnC,mBAAmBoC,eAAe;AAC3C,WAAO;MACLnC,MAAM;MACNc,SAASf,QAAQJ;MACjByC,cAAcrC,QAAQsC;IACxB;EACF;AACF;AA3BgBR;AA6BT,SAASS,mCAAmCvC,SAAmC;AACpF,SAAO;IACL,GAAGA;IACH,GAAIA,QAAQC,SAAS,YAAY;MAC/BA,MAAM;MACNc,SAAS,gDAAgDf,QAAQe;IACnE;EACF;AACF;AARgBwB;;;AD9GhB,SAASC,cAAc;AAEvB,IAAMC,gBAAgB;AAcf,IAAMC,gBAAN,MAAMA;EACHC,QAAgBF;EAEhBG;EACR,IAAWC,SAAiB;AAC1B,WAAO,KAAKD;EACd;EAEAE,YAAYC,QAA8B;AACxC,SAAKH,WAAUG,iCAAQF,WAAU,IAAIG,OAAO,CAAC,CAAA;AAC7C,QAAID,iCAAQJ,OAAO;AACjB,WAAKA,QAAQI,OAAOJ;IACtB;EACF;EAEA,MAAMM,QACJC,SAC+C;AAC/C,UAAM,EAAEC,UAAUR,QAAQ,KAAKA,OAAOS,UAAUC,SAASC,YAAW,IAAKJ;AACzE,UAAMK,QAAQF,QAAQG,IAAIC,8BAAAA;AAE1B,QAAIC,iBAAiBN,SAASI,IAAIG,6BAAAA;AAClCD,qBAAiBE,0BAA0BF,gBAAgBH,OAAOZ,KAAAA;AAElE,UAAMkB,SAAS,KAAKhB,OAAOiB,KAAKC,KAAKC,YAAYH,OAAO;MACtDlB;MACAkB,QAAQ;MACRT,UAAUM;MACV,GAAIH,MAAMU,SAAS,KAAK;QAAEV;MAAM;IAClC,CAAA;AAEAD,gBAAYO,OAAO,OAAOK,iBAAAA;AA5F9B;AA6FM,UAAIC,OAAsC;AAC1C,uBAAiBC,SAASP,QAAQ;AAChC,cAAMQ,YAAWD,WAAME,QAAQ,CAAA,EAAGC,MAAMC,eAAvBJ,mBAAoC;AACrD,cAAMK,UAAUL,MAAME,QAAQ,CAAA,EAAGC,MAAME;AAKvC,YAAIN,SAAS,cAAaE,qCAAUK,KAAI;AACtCP,iBAAO;AACPD,uBAAaS,mBAAkB;QACjC,WAAWR,SAAS,eAAeE,aAAaO,WAAaP,qCAAUK,MAAK;AAC1EP,iBAAO;AACPD,uBAAaW,uBAAsB;QACrC;AAGA,YAAIV,SAAS,MAAM;AACjB,cAAIE,qCAAUK,IAAI;AAChBP,mBAAO;AACPD,yBAAaY,yBAAyBT,SAAUK,IAAIL,SAAUU,SAAUC,IAAI;UAC9E,WAAWP,SAAS;AAClBN,mBAAO;AACPD,yBAAae,qBAAqBb,MAAMM,EAAE;UAC5C;QACF;AAGA,YAAIP,SAAS,aAAaM,SAAS;AACjCP,uBAAagB,uBAAuBT,OAAAA;QACtC,WAAWN,SAAS,gBAAcE,0CAAUU,aAAVV,mBAAoBc,YAAW;AAC/DjB,uBAAakB,wBAAwBf,SAASU,SAASI,SAAS;QAClE;MACF;AAGA,UAAIhB,SAAS,WAAW;AACtBD,qBAAaS,mBAAkB;MACjC,WAAWR,SAAS,YAAY;AAC9BD,qBAAaW,uBAAsB;MACrC;AAEAX,mBAAamB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACLlC,UAAUA,YAAYmC,OAAAA;IACxB;EACF;AACF;AAjFa5C;;;AE5Cb,OAAO6C,aAAY;AAwCZ,IAAMC,yBAAN,MAAMA;EACHC;EACAC;EACAC;EACAC;EAERC,YAAYC,QAAsC;AAChD,SAAKL,SAASK,OAAOL,UAAU,IAAIM,QAAO,CAAC,CAAA;AAC3C,SAAKL,yBAAyBI,OAAOJ,2BAA2B,SAAS;AACzE,SAAKE,oBAAoBE,OAAOF,sBAAsB,SAAS;AAC/D,SAAKD,cAAcG,OAAOH;EAC5B;EAEA,MAAMK,QACJC,SAC+C;AAC/C,UAAM,EAAEC,UAAUC,SAASC,aAAaC,MAAK,IAAKJ;AAElD,QAAIK,WAAWL,QAAQK,aAAa,MAAM,KAAKb,OAAOc,KAAKC,QAAQC,OAAM,GAAIC;AAE7E,UAAMC,cAAcT,SAASU,GAAG,EAAC;AAEjC,QAAIC,YAAgCC;AAGpC,QAAIH,uBAAuBI,iBAAiBV,OAAO;AACjDQ,kBAAY,MAAM,KAAKG,kBAAkBV,UAAUD,OAAOH,UAAUE,WAAAA;IACtE,WAESO,uBAAuBM,aAAa;AAC3CJ,kBAAY,MAAM,KAAKK,kBAAkBZ,UAAUJ,UAAUC,SAASC,WAAAA;IACxE,OAEK;AACH,YAAM,IAAIe,MAAM,6CAAA;IAClB;AAEA,WAAO;MACLb;MACAD,OAAOQ;IACT;EACF;EAEA,MAAcG,kBACZV,UACAD,OACAH,UACAE,aACA;AACA,QAAIgB,MAAM,MAAM,KAAK3B,OAAOc,KAAKC,QAAQa,KAAKC,SAAShB,UAAUD,KAAAA;AACjE,QAAI,CAACe,IAAIG,iBAAiB;AACxB,YAAM,IAAIJ,MAAM,0BAAA;IAClB;AAGA,UAAMK,eAAeJ,IAAIG,gBAAgBE,oBAAoBC,WAAWC,IACtE,CAACC,aAAaA,SAASlB,EAAE;AAI3B,UAAMmB,iBAAiB3B,SAAS4B,OAC9B,CAACC,YACCA,mBAAmBhB,iBAAiBS,aAAaQ,SAASD,QAAQE,iBAAiB,CAAA;AAGvF,QAAIT,aAAaU,UAAUL,eAAeK,QAAQ;AAChD,YAAM,IAAIf,MAAM,oEAAA;IAClB;AAGA,UAAMgB,cAA6DN,eAAeF,IAChF,CAACI,YAAAA;AACC,aAAO;QACLK,cAAcL,QAAQE;QACtBI,QAAQN,QAAQO;MAClB;IACF,CAAA;AAGF,UAAMC,SAAS,KAAK9C,OAAOc,KAAKC,QAAQa,KAAKmB,wBAAwBlC,UAAUD,OAAO;MACpFoC,cAAcN;IAChB,CAAA;AAEA,UAAM,KAAKO,eAAeH,QAAQnC,WAAAA;AAClC,WAAOC;EACT;EAEA,MAAca,kBACZZ,UACAJ,UACAC,SACAC,aACA;AACAF,eAAW;SAAIA;;AAGf,UAAMyC,sBAAsBzC,SAAS0C,MAAK;AAC1C,UAAMC,eACJF,+BAA+B1B,cAAc0B,oBAAoBG,UAAU;AAG7E,UAAMC,cAAc7C,SACjByB,IAAIqB,6BAAAA,EACJrB,IAAIsB,kCAAAA,EACJrC,GAAG,EAAC;AAEP,QAAImC,YAAYG,SAAS,QAAQ;AAC/B,YAAM,IAAI/B,MAAM,uBAAA;IAClB;AAGA,UAAM,KAAK1B,OAAOc,KAAKC,QAAQN,SAASO,OAAOH,UAAU;MACvD4C,MAAM;MACNJ,SAASC,YAAYD;IACvB,CAAA;AAEA,UAAMK,cAAchD,QAAQwB,IAAIyB,8BAAAA;AAEhC,UAAMC,QAAQ;SACTF;SACC,KAAKzD,yBAAyB;QAAC;UAAE4D,MAAM;QAAmB;UAAsB,CAAA;SAChF,KAAK1D,oBAAoB;QAAC;UAAE0D,MAAM;QAAc;UAAsB,CAAA;;AAI5E,QAAIf,SAAS,KAAK9C,OAAOc,KAAKC,QAAQa,KAAKkB,OAAOjC,UAAU;MAC1DiD,cAAc,KAAK5D;MACnBkD;MACAQ;IACF,CAAA;AAEA,UAAM,KAAKX,eAAeH,QAAQnC,WAAAA;AAElC,WAAOoD,mBAAmBjB,MAAAA;EAC5B;EAEA,MAAcG,eAAeH,QAAyBnC,aAAiC;AACrFA,gBAAYmC,OAAO,OAAOkB,iBAAAA;AAlM9B;AAmMM,UAAIC,iBAAiB;AAErB,uBAAiBC,SAASpB,QAAQ;AAChC,gBAAQoB,MAAMC,OAAK;UACjB,KAAK;AACH,gBAAIF,gBAAgB;AAClBD,2BAAaI,uBAAsB;YACrC;AACAJ,yBAAaK,qBAAqBH,MAAMI,KAAKrD,EAAE;AAC/C;UACF,KAAK;AACH,kBAAIiD,WAAMI,KAAKC,MAAMlB,YAAjBa,mBAA2B,GAAGL,UAAS,QAAQ;AACjDG,2BAAaQ,wBAAuBN,WAAMI,KAAKC,MAAMlB,YAAjBa,mBAA2B,GAAGO,KAAKC,KAAAA;YACzE;AACA;UACF,KAAK;AACHV,yBAAaW,mBAAkB;AAC/B;UACF,KAAK;AACH,gBAAIC;AACJ,gBAAIC;AACJ,gBAAIC;AACJ,gBACEZ,MAAMI,KAAKC,MAAMQ,aAAalB,SAAS,kBACvCK,WAAMI,KAAKC,MAAMQ,aAAa9C,eAA9BiC,mBAA2C,GAAGL,UAAS,YACvD;AACAe,4BAAaV,WAAMI,KAAKC,MAAMQ,aAAa9C,eAA9BiC,mBAA2C,GAAGjD;AAC3D4D,8BAAeX,WAAMI,KAAKC,MAAMQ,aAAa9C,eAA9BiC,mBAA2C,GAAGc,SAASC;AACtEH,8BAAeZ,WAAMI,KAAKC,MAAMQ,aAAa9C,eAA9BiC,mBAA2C,GAAGc,SAASE;YACxE;AAEA,gBAAIL,gBAAgBD,YAAY;AAC9B,kBAAIX,gBAAgB;AAClBD,6BAAaI,uBAAsB;cACrC;AACAH,+BAAiB;AACjBD,2BAAamB,yBAAyBP,YAAYC,YAAAA;YACpD,WAAWC,cAAc;AACvBd,2BAAaoB,wBAAwBN,YAAAA;YACvC;AACA;QACJ;MACF;AACA,UAAIb,gBAAgB;AAClBD,qBAAaI,uBAAsB;MACrC;AACAJ,mBAAaqB,SAAQ;IACvB,CAAA;EACF;AACF;AA3LatF;AA6Lb,SAASgE,mBAAmBjB,QAAuB;AACjD,SAAO,IAAIwC,QAAgB,CAACC,SAASC,WAAAA;AACnC,QAAIC,cAAc,wBAACtB,UAAAA;AACjB,UAAIA,MAAMA,UAAU,sBAAsB;AACxC,cAAMvD,QAAQuD,MAAMG,KAAKrD;AACzB6B,eAAO4C,IAAI,SAASD,WAAAA;AACpBF,gBAAQ3E,KAAAA;MACV;IACF,GANkB;AAOlBkC,WAAO6C,GAAG,SAASF,WAAAA;EACrB,CAAA;AACF;AAXS1B;;;AClNT,SAA0B6B,0BAA0B;;;AC3B7C,SAASC,mCAAmCC,SAAgB;AACjE,MAAIA,mBAAmBC,aAAa;AAClC,UAAMC,OAAO;MACXC,MAAM;MACNC,WAAW;MACXC,QAAQ;IACV,EAAEL,QAAQE,IAAI;AAEd,UAAMI,OACJN,QAAQE,SAAS,WACb,gDAAgDF,QAAQO,UACxDP,QAAQO;AAEd,WAAO;MACLL;MACAM,OAAO;QAAC;UAAEF;QAAK;;IACjB;EACF,WAAWN,mBAAmBS,wBAAwB;AACpD,WAAO;MACLP,MAAM;MACNM,OAAO;QACL;UACEE,cAAc;YACZC,MAAMX,QAAQW;YACdC,MAAMZ,QAAQa;UAChB;QACF;;IAEJ;EACF,WAAWb,mBAAmBc,eAAe;AAC3C,WAAO;MACLZ,MAAM;MACNM,OAAO;QACL;UACEO,kBAAkB;YAChBJ,MAAMX,QAAQgB;YACdC,UAAU;cACRN,MAAMX,QAAQgB;cACdT,SAASW,aAAalB,QAAQmB,MAAM;YACtC;UACF;QACF;;IAEJ;EACF;AACF;AA7CgBpB;AA+CT,SAASqB,iCAAiCC,QAAmB;AAClE,QAAMV,OAAOU,OAAOV;AACpB,QAAMW,cAAcD,OAAOC;AAC3B,QAAMC,aAAaC,KAAKC,MAAMJ,OAAOK,UAAU;AAE/C,QAAMC,sBAAsB,wBAACC,UAAAA;AAC3B,eAAWC,OAAOD,OAAO;AACvB,UAAIA,MAAMC,GAAAA,EAAKC,MAAM;AACnBF,cAAMC,GAAAA,EAAKC,OAAOF,MAAMC,GAAAA,EAAKC,KAAKC,YAAW;MAC/C;AACA,UAAIH,MAAMC,GAAAA,EAAKG,YAAY;AACzBL,4BAAoBC,MAAMC,GAAAA,EAAKG,UAAU;MAC3C;IACF;EACF,GAT4B;AAU5BL,sBAAoBJ,UAAAA;AAEpB,SAAO;IACLU,sBAAsB;MACpB;QACEtB;QACAW;QACAC;MACF;;EAEJ;AACF;AA1BgBH;AA4BhB,SAASF,aAAagB,KAAY;AAChC,MAAI,CAACA,KAAK;AACR,WAAO;EACT;AACA,MAAI;AACF,WAAOV,KAAKC,MAAMS,GAAAA;EACpB,SAASC,GAAP;AACA,WAAOD;EACT;AACF;AATShB;;;AD7CT,SAASkB,UAAAA,eAAc;AAShB,IAAMC,4BAAN,MAAMA;EACHC;EAERC,YAAYC,SAA4C;AACtD,QAAIA,mCAASF,OAAO;AAClB,WAAKA,QAAQE,QAAQF;IACvB,OAAO;AACL,YAAMG,QAAQ,IAAIC,mBAAmBC,QAAQC,IAAI,gBAAA,CAAiB;AAClE,WAAKN,QAAQG,MAAMI,mBAAmB;QAAEP,OAAO;MAAa,CAAA;IAC9D;EACF;EAEA,MAAMK,QACJG,SAC+C;AAC/C,UAAM,EAAEC,UAAUC,SAASC,YAAW,IAAKH;AAG3C,UAAMI,UAAUH,SAASI,MAAM,GAAG,EAAC,EAAGC,IAAIC,kCAAAA;AAG1C,UAAMC,iBAAiBD,mCAAmCN,SAASQ,GAAG,EAAC,CAAA;AACvE,QAAI,CAACD,gBAAgB;AACnB,YAAM,IAAIE,MAAM,oBAAA;IAClB;AAEA,QAAIC;AACJ,UAAMC,eAAeX,SAASQ,GAAG,CAAA;AACjC,QAAIG,wBAAwBC,eAAeD,aAAaE,SAAS,UAAU;AACzEH,sBAAgBC,aAAaG,QAAQC,KAAI;IAC3C,OAAO;AACL,YAAM,IAAIN,MAAM,uCAAA;IAClB;AAEA,UAAMO,QAAQf,QAAQI,IAAIY,gCAAAA;AAE1B,UAAMC,sBACJ,KAAK3B,MAAMA,UAAU,gBAAgB,KAAKA,MAAMA,UAAU;AAE5D,UAAM4B,OAAO,KAAK5B,MAAM6B,UAAU;MAChCjB,SAAS;WACJA;;WAECe,sBAAsB;UAAC;YAAEL,MAAM;YAAQQ,OAAO;cAAC;gBAAEC,MAAMZ;cAAc;;UAAG;YAAK,CAAA;;;MAGnF,GAAIQ,sBACA,CAAC,IACD;QAAEK,mBAAmB;UAAEV,MAAM;UAAQQ,OAAO;YAAC;cAAEC,MAAMZ;YAAc;;QAAG;MAAE;MAC5EM;IACF,CAAA;AAEA,UAAMQ,SAAS,MAAML,KAAKM,kBAAkBlB,eAAec,KAAK;AAEhEnB,gBAAYwB,OAAO,OAAOC,iBAAAA;AACxB,UAAIC,gBAAgB;AACpB,uBAAiBC,SAASL,OAAOE,QAAQ;AACvC,cAAMI,YAAYD,MAAMP,KAAI;AAC5B,YAAIQ,cAAc,IAAI;AACpB;QACF;AACA,YAAI,CAACF,eAAe;AAClBA,0BAAgB;AAChBD,uBAAaI,qBAAqBC,QAAAA,CAAAA;QACpC;AACAL,qBAAaM,uBAAuBH,SAAAA;MACtC;AACA,UAAIF,eAAe;AACjBD,qBAAaO,mBAAkB;MACjC;AAEA,UAAIC,SAAS,MAAMX,OAAOY,UAAUC,cAAa;AACjD,UAAIF,OAAO;AACT,iBAASG,QAAQH,OAAO;AACtBR,uBAAaY,oBACXP,QAAAA,GACAM,KAAKE,MACLC,KAAKC,UAAUC,wBAAwBL,KAAKM,IAAI,CAAA,CAAA;QAEpD;MACF;AACAjB,mBAAakB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACLC,UAAU/C,QAAQ+C,YAAYd,QAAAA;IAChC;EACF;AACF;AAxFa1C;AA0Fb,SAASqD,wBAAwBI,KAAQ;AACvC,MAAI,OAAOA,QAAQ,UAAU;AAC3B,WAAOA,IAAIC,QAAQ,UAAU,IAAA;EAC/B,WAAWC,MAAMC,QAAQH,GAAAA,GAAM;AAC7B,WAAOA,IAAI1C,IAAIsC,uBAAAA;EACjB,WAAW,OAAOI,QAAQ,YAAYA,QAAQ,MAAM;AAClD,UAAMI,SAAc,CAAC;AACrB,eAAWC,OAAOL,KAAK;AACrB,UAAIA,IAAIM,eAAeD,GAAAA,GAAM;AAC3BD,eAAOC,GAAAA,IAAOT,wBAAwBI,IAAIK,GAAAA,CAAI;MAChD;IACF;AACA,WAAOD;EACT;AACA,SAAOJ;AACT;AAfSJ;;;AErGT,SAASW,UAAAA,eAAc;AAchB,IAAMC,mBAAN,MAAMA;;;;;EAIXC,YAAoBC,SAAkC;SAAlCA,UAAAA;EAAmC;EAEvD,MAAMC,QACJC,SAC+C;AAC/C,UAAM,EAAEC,aAAaC,OAAOC,SAASC,UAAUC,UAAUC,MAAK,IAAKN;AACnE,UAAMO,SAAS,MAAM,KAAKT,QAAQU,QAAQ;MACxCJ,UAAUA,SAASK,IAAIC,gCAAAA;MACvBC,OAAOR,QAAQM,IAAIG,iCAAAA;MACnBV;MACAG;MACAC;IACF,CAAA;AAEAL,gBAAYY,OAAO,OAAOC,iBAAAA;AACxB,YAAMC,wBAAwB;QAC5BR;QACAO;MACF,CAAA;IACF,CAAA;AAEA,WAAO;MACLT,UAAUA,YAAYW,QAAAA;IACxB;EACF;AACF;AA7BapB;;;AClDb,SAASqB,sBAAsB;AAUxB,IAAMC,cAAN,MAAMA;EACXC;EACAC;EACAC;EACAC;EACAC;EAEAC,YAAYC,SAAgC;AAC1C,SAAKN,OAAOM,QAAQN;AACpB,SAAKC,cAAcK,QAAQL;AAC3B,SAAKC,WAAWI,QAAQJ;AACxB,SAAKC,aAAaG,QAAQH;AAC1B,SAAKC,gBAAgBE,QAAQF,iBAAiB;EAChD;EAEA,MAAMG,WAAiC;AACrC,QAAI,CAAC,KAAKJ,YAAY;AACpB,YAAM,KAAKK,yBAAwB;IACrC;AAEA,WAAO;MACLR,MAAM,KAAKA;MACXC,aAAa,KAAKA;MAClBE,YAAY,KAAKA;MACjBM,SAAS,OAAOC,SAAAA;AACd,cAAMC,WAAW,IAAIC,eAAe;UAAEC,KAAK,KAAKX;QAAS,CAAA;AACzD,YAAIY;AACJ,YAAI,KAAKV,kBAAkB,UAAU;AACnCU,kBAAQJ,KAAKK,OAAOC,KAAKN,IAAAA,EAAM,CAAA,CAAE;QACnC,OAAO;AACLI,kBAAQJ;QACV;AACA,eAAO,MAAMC,SAASM,OAAOH,KAAAA;MAC/B;IACF;EACF;EAEA,MAAMN,2BAA2B;AAC/B,UAAMU,iBAAiB;MAAC;MAAU;MAAU;;AAE5C,QAAIC,YAAY,KAAKjB,SAASkB,QAAQ,QAAQ,EAAA,IAAM;AACpD,QAAIC,SAAS,MAAMC,MAAMH,SAAAA,EACtBI,KAAK,CAACC,QAAQA,IAAIC,KAAI,CAAA,EACtBC,MAAM,MAAA;AACL,YAAM,IAAIC,MAAM,yCAAyCR,SAAAA;IAC3D,CAAA;AAGF,QAAID,eAAeU,SAASP,OAAOQ,IAAI,GAAG;AACxC,WAAKzB,gBAAgB;AACrB,WAAKD,aAAa;QAChB;UACEH,MAAM;UACN6B,MAAMR,OAAOQ;UACb5B,aAAa;QACf;;IAEJ,WAAWoB,OAAOQ,SAAS,UAAU;AACnC,WAAKzB,gBAAgB;AACrB,WAAKD,aAAaY,OAAOC,KAAKK,OAAOS,UAAU,EAAEC,IAAI,CAACC,QAAAA;AArE5D;AAsEQ,YAAIC,WAAWZ,OAAOS,WAAWE,GAAAA;AACjC,YAAI,CAACd,eAAeU,SAASK,SAASJ,IAAI,GAAG;AAC3C,gBAAM,IAAIF,MAAM,yBAAA;QAClB;AACA,eAAO;UACL3B,MAAMgC;UACNH,MAAMI,SAASJ;UACf5B,aAAagC,SAAShC,eAAe;UACrCiC,YAAUb,YAAOa,aAAPb,mBAAiBO,SAASI,SAAQ;QAC9C;MACF,CAAA;IACF,OAAO;AACL,YAAM,IAAIL,MAAM,yBAAA;IAClB;EACF;AACF;AA3Ea5B;","names":["OpenAI","encodingForModel","limitMessagesToTokenCount","messages","tools","model","maxTokens","maxTokensForOpenAIModel","result","toolsNumTokens","countToolsTokens","Error","message","role","numTokens","countMessageTokens","cutoff","reversedMessages","reverse","unshift","maxTokensByModel","DEFAULT_MAX_TOKENS","length","json","JSON","stringify","countTokens","content","text","enc","encodingForModel","e","encode","convertActionInputToOpenAITool","action","type","function","name","description","parameters","parse","jsonSchema","convertMessageToOpenAIMessage","TextMessage","ActionExecutionMessage","tool_calls","id","arguments","ResultMessage","tool_call_id","actionExecutionId","convertSystemMessageToAssistantAPI","nanoid","DEFAULT_MODEL","OpenAIAdapter","model","_openai","openai","constructor","params","OpenAI","process","request","threadId","messages","actions","eventSource","tools","map","convertActionInputToOpenAITool","openaiMessages","convertMessageToOpenAIMessage","limitMessagesToTokenCount","stream","beta","chat","completions","length","eventStream$","mode","chunk","toolCall","choices","delta","tool_calls","content","id","sendTextMessageEnd","undefined","sendActionExecutionEnd","sendActionExecutionStart","function","name","sendTextMessageStart","sendTextMessageContent","arguments","sendActionExecutionArgs","complete","nanoid","OpenAI","OpenAIAssistantAdapter","openai","codeInterpreterEnabled","assistantId","fileSearchEnabled","constructor","params","OpenAI","process","request","messages","actions","eventSource","runId","threadId","beta","threads","create","id","lastMessage","at","nextRunId","undefined","ResultMessage","submitToolOutputs","TextMessage","submitUserMessage","Error","run","runs","retrieve","required_action","toolCallsIds","submit_tool_outputs","tool_calls","map","toolCall","resultMessages","filter","message","includes","actionExecutionId","length","toolOutputs","tool_call_id","output","result","stream","submitToolOutputsStream","tool_outputs","streamResponse","instructionsMessage","shift","instructions","content","userMessage","convertMessageToOpenAIMessage","convertSystemMessageToAssistantAPI","role","openaiTools","convertActionInputToOpenAITool","tools","type","assistant_id","getRunIdFromStream","eventStream$","inFunctionCall","chunk","event","sendActionExecutionEnd","sendTextMessageStart","data","delta","sendTextMessageContent","text","value","sendTextMessageEnd","toolCallId","toolCallName","toolCallArgs","step_details","function","name","arguments","sendActionExecutionStart","sendActionExecutionArgs","complete","Promise","resolve","reject","runIdGetter","off","on","GoogleGenerativeAI","convertMessageToGoogleGenAIMessage","message","TextMessage","role","user","assistant","system","text","content","parts","ActionExecutionMessage","functionCall","name","args","arguments","ResultMessage","functionResponse","actionName","response","tryParseJson","result","transformActionToGoogleGenAITool","action","description","parameters","JSON","parse","jsonSchema","transformProperties","props","key","type","toUpperCase","properties","functionDeclarations","str","e","nanoid","GoogleGenerativeAIAdapter","model","constructor","options","genAI","GoogleGenerativeAI","process","env","getGenerativeModel","request","messages","actions","eventSource","history","slice","map","convertMessageToGoogleGenAIMessage","currentMessage","at","Error","systemMessage","firstMessage","TextMessage","role","content","trim","tools","transformActionToGoogleGenAITool","isFirstGenGeminiPro","chat","startChat","parts","text","systemInstruction","result","sendMessageStream","stream","eventStream$","isTextMessage","chunk","chunkText","sendTextMessageStart","nanoid","sendTextMessageContent","sendTextMessageEnd","calls","response","functionCalls","call","sendActionExecution","name","JSON","stringify","replaceNewlinesInObject","args","complete","threadId","obj","replace","Array","isArray","newObj","key","hasOwnProperty","nanoid","LangChainAdapter","constructor","options","process","request","eventSource","model","actions","messages","threadId","runId","result","chainFn","map","convertMessageToLangChainMessage","tools","convertActionInputToLangChainTool","stream","eventStream$","streamLangChainResponse","nanoid","RemoteRunnable","RemoteChain","name","description","chainUrl","parameters","parameterType","constructor","options","toAction","inferLangServeParameters","handler","args","runnable","RemoteRunnable","url","input","Object","keys","invoke","supportedTypes","schemaUrl","replace","schema","fetch","then","res","json","catch","Error","includes","type","properties","map","key","property","required"]}
|