@copilotkit/runtime 1.50.0-beta.8 → 1.50.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (226) hide show
  1. package/CHANGELOG.md +168 -0
  2. package/dist/chunk-27JKTS6P.mjs +1704 -0
  3. package/dist/chunk-27JKTS6P.mjs.map +1 -0
  4. package/dist/chunk-2GPTVDTO.mjs +25 -0
  5. package/dist/chunk-2GPTVDTO.mjs.map +1 -0
  6. package/dist/chunk-2OZAGFV3.mjs +43 -0
  7. package/dist/chunk-2OZAGFV3.mjs.map +1 -0
  8. package/dist/chunk-3AJVKDZX.mjs +3097 -0
  9. package/dist/chunk-3AJVKDZX.mjs.map +1 -0
  10. package/dist/chunk-45RCC3ZS.mjs +25 -0
  11. package/dist/chunk-45RCC3ZS.mjs.map +1 -0
  12. package/dist/chunk-4EHJ4XFJ.mjs +25 -0
  13. package/dist/chunk-4EHJ4XFJ.mjs.map +1 -0
  14. package/dist/chunk-4IANB4TC.mjs +25 -0
  15. package/dist/chunk-4IANB4TC.mjs.map +1 -0
  16. package/dist/chunk-4KES76K3.mjs +74 -0
  17. package/dist/chunk-4KES76K3.mjs.map +1 -0
  18. package/dist/chunk-4OGE3SLW.mjs +3100 -0
  19. package/dist/chunk-4OGE3SLW.mjs.map +1 -0
  20. package/dist/chunk-54YJBMCP.mjs +3097 -0
  21. package/dist/chunk-54YJBMCP.mjs.map +1 -0
  22. package/dist/chunk-62NE5S6M.mjs +226 -0
  23. package/dist/chunk-62NE5S6M.mjs.map +1 -0
  24. package/dist/chunk-6ER4SZYH.mjs +74 -0
  25. package/dist/chunk-6ER4SZYH.mjs.map +1 -0
  26. package/dist/chunk-6TNSLHVR.mjs +74 -0
  27. package/dist/chunk-6TNSLHVR.mjs.map +1 -0
  28. package/dist/chunk-6XRUR5UK.mjs +1 -0
  29. package/dist/chunk-6XRUR5UK.mjs.map +1 -0
  30. package/dist/chunk-7V4BK7TZ.mjs +25 -0
  31. package/dist/chunk-7V4BK7TZ.mjs.map +1 -0
  32. package/dist/chunk-7YZIEXD2.mjs +74 -0
  33. package/dist/chunk-7YZIEXD2.mjs.map +1 -0
  34. package/dist/chunk-A4XHOAFU.mjs +25 -0
  35. package/dist/chunk-A4XHOAFU.mjs.map +1 -0
  36. package/dist/chunk-A555KEAD.mjs +6020 -0
  37. package/dist/chunk-A555KEAD.mjs.map +1 -0
  38. package/dist/chunk-AF73TFTX.mjs +74 -0
  39. package/dist/chunk-AF73TFTX.mjs.map +1 -0
  40. package/dist/chunk-AMUJQ6IR.mjs +50 -0
  41. package/dist/chunk-AMUJQ6IR.mjs.map +1 -0
  42. package/dist/chunk-AQG2SVCA.mjs +25 -0
  43. package/dist/chunk-AQG2SVCA.mjs.map +1 -0
  44. package/dist/chunk-BJZHMXND.mjs +74 -0
  45. package/dist/chunk-BJZHMXND.mjs.map +1 -0
  46. package/dist/chunk-CB2OJXF6.mjs +25 -0
  47. package/dist/chunk-CB2OJXF6.mjs.map +1 -0
  48. package/dist/chunk-CEOMFPJU.mjs +6020 -0
  49. package/dist/chunk-CEOMFPJU.mjs.map +1 -0
  50. package/dist/chunk-CZVLR7CC.mjs +175 -0
  51. package/dist/chunk-CZVLR7CC.mjs.map +1 -0
  52. package/dist/chunk-DCEEHMLJ.mjs +1127 -0
  53. package/dist/chunk-DCEEHMLJ.mjs.map +1 -0
  54. package/dist/chunk-DE3CLKUG.mjs +25 -0
  55. package/dist/chunk-DE3CLKUG.mjs.map +1 -0
  56. package/dist/chunk-DTPRUTNV.mjs +25 -0
  57. package/dist/chunk-DTPRUTNV.mjs.map +1 -0
  58. package/dist/chunk-ERUOA47O.mjs +626 -0
  59. package/dist/chunk-ERUOA47O.mjs.map +1 -0
  60. package/dist/chunk-ESSRC64W.mjs +74 -0
  61. package/dist/chunk-ESSRC64W.mjs.map +1 -0
  62. package/dist/chunk-FHD4JECV.mjs +33 -0
  63. package/dist/chunk-FHD4JECV.mjs.map +1 -0
  64. package/dist/chunk-GRAN6K6N.mjs +25 -0
  65. package/dist/chunk-GRAN6K6N.mjs.map +1 -0
  66. package/dist/chunk-I27F2UPA.mjs +175 -0
  67. package/dist/chunk-I27F2UPA.mjs.map +1 -0
  68. package/dist/chunk-IAZKTOQW.mjs +25 -0
  69. package/dist/chunk-IAZKTOQW.mjs.map +1 -0
  70. package/dist/chunk-J6XZ5MFB.mjs +25 -0
  71. package/dist/chunk-J6XZ5MFB.mjs.map +1 -0
  72. package/dist/chunk-JJ32MA4C.mjs +73 -0
  73. package/dist/chunk-JJ32MA4C.mjs.map +1 -0
  74. package/dist/chunk-JJY4ZTHQ.mjs +25 -0
  75. package/dist/chunk-JJY4ZTHQ.mjs.map +1 -0
  76. package/dist/chunk-KEYLBFU2.mjs +3117 -0
  77. package/dist/chunk-KEYLBFU2.mjs.map +1 -0
  78. package/dist/chunk-KQ53L4WZ.mjs +3094 -0
  79. package/dist/chunk-KQ53L4WZ.mjs.map +1 -0
  80. package/dist/chunk-KTELVQ67.mjs +3098 -0
  81. package/dist/chunk-KTELVQ67.mjs.map +1 -0
  82. package/dist/chunk-LPEPX6NH.mjs +25 -0
  83. package/dist/chunk-LPEPX6NH.mjs.map +1 -0
  84. package/dist/chunk-MDXE55DK.mjs +3117 -0
  85. package/dist/chunk-MDXE55DK.mjs.map +1 -0
  86. package/dist/chunk-MMFUVOXH.mjs +73 -0
  87. package/dist/chunk-MMFUVOXH.mjs.map +1 -0
  88. package/dist/chunk-N3Y4U66N.mjs +253 -0
  89. package/dist/chunk-N3Y4U66N.mjs.map +1 -0
  90. package/dist/chunk-O7UYB4MH.mjs +25 -0
  91. package/dist/chunk-O7UYB4MH.mjs.map +1 -0
  92. package/dist/chunk-OFNVQHNM.mjs +3089 -0
  93. package/dist/chunk-OFNVQHNM.mjs.map +1 -0
  94. package/dist/chunk-OFSV5GET.mjs +3074 -0
  95. package/dist/chunk-OFSV5GET.mjs.map +1 -0
  96. package/dist/chunk-OMRST67R.mjs +25 -0
  97. package/dist/chunk-OMRST67R.mjs.map +1 -0
  98. package/dist/chunk-OWIGJONH.mjs +275 -0
  99. package/dist/chunk-OWIGJONH.mjs.map +1 -0
  100. package/dist/chunk-PRZHE74A.mjs +25 -0
  101. package/dist/chunk-PRZHE74A.mjs.map +1 -0
  102. package/dist/chunk-PTYRVXXP.mjs +80 -0
  103. package/dist/chunk-PTYRVXXP.mjs.map +1 -0
  104. package/dist/chunk-R22B5CCO.mjs +25 -0
  105. package/dist/chunk-R22B5CCO.mjs.map +1 -0
  106. package/dist/chunk-SHBDMA63.mjs +141 -0
  107. package/dist/chunk-SHBDMA63.mjs.map +1 -0
  108. package/dist/chunk-SPVXBPRA.mjs +74 -0
  109. package/dist/chunk-SPVXBPRA.mjs.map +1 -0
  110. package/dist/chunk-T72G46ME.mjs +25 -0
  111. package/dist/chunk-T72G46ME.mjs.map +1 -0
  112. package/dist/chunk-TGELROPU.mjs +25 -0
  113. package/dist/chunk-TGELROPU.mjs.map +1 -0
  114. package/dist/chunk-UNX4IAAD.mjs +25 -0
  115. package/dist/chunk-UNX4IAAD.mjs.map +1 -0
  116. package/dist/chunk-V4DHVC7M.mjs +3085 -0
  117. package/dist/chunk-V4DHVC7M.mjs.map +1 -0
  118. package/dist/chunk-VVRFOB66.mjs +25 -0
  119. package/dist/chunk-VVRFOB66.mjs.map +1 -0
  120. package/dist/chunk-W6NVBYM6.mjs +80 -0
  121. package/dist/chunk-W6NVBYM6.mjs.map +1 -0
  122. package/dist/chunk-W7MBACGC.mjs +74 -0
  123. package/dist/chunk-W7MBACGC.mjs.map +1 -0
  124. package/dist/chunk-WMD4XZZS.mjs +25 -0
  125. package/dist/chunk-WMD4XZZS.mjs.map +1 -0
  126. package/dist/chunk-WX2ZNCRT.mjs +74 -0
  127. package/dist/chunk-WX2ZNCRT.mjs.map +1 -0
  128. package/dist/chunk-XWBDEXDA.mjs +153 -0
  129. package/dist/chunk-XWBDEXDA.mjs.map +1 -0
  130. package/dist/chunk-Y2Z62E2T.mjs +74 -0
  131. package/dist/chunk-Y2Z62E2T.mjs.map +1 -0
  132. package/dist/chunk-YO4I6RVI.mjs +25 -0
  133. package/dist/chunk-YO4I6RVI.mjs.map +1 -0
  134. package/dist/chunk-Z6Q5IW6I.mjs +3098 -0
  135. package/dist/chunk-Z6Q5IW6I.mjs.map +1 -0
  136. package/dist/chunk-Z726O3G2.mjs +25 -0
  137. package/dist/chunk-Z726O3G2.mjs.map +1 -0
  138. package/dist/chunk-ZE4SMZZR.mjs +3097 -0
  139. package/dist/chunk-ZE4SMZZR.mjs.map +1 -0
  140. package/dist/chunk-ZULZB33C.mjs +73 -0
  141. package/dist/chunk-ZULZB33C.mjs.map +1 -0
  142. package/dist/chunk-ZVRGXMY7.mjs +25 -0
  143. package/dist/chunk-ZVRGXMY7.mjs.map +1 -0
  144. package/dist/chunk-ZZ35WBYQ.mjs +25 -0
  145. package/dist/chunk-ZZ35WBYQ.mjs.map +1 -0
  146. package/dist/graphql/message-conversion/index.d.ts +18 -0
  147. package/dist/graphql/message-conversion/index.js +725 -0
  148. package/dist/graphql/message-conversion/index.js.map +1 -0
  149. package/dist/graphql/message-conversion/index.mjs +245 -0
  150. package/dist/graphql/message-conversion/index.mjs.map +1 -0
  151. package/dist/graphql/types/base/index.d.ts +6 -0
  152. package/dist/graphql/types/base/index.js +63 -0
  153. package/dist/graphql/types/base/index.js.map +1 -0
  154. package/dist/graphql/types/base/index.mjs +8 -0
  155. package/dist/graphql/types/base/index.mjs.map +1 -0
  156. package/dist/graphql/types/converted/index.d.ts +2 -0
  157. package/dist/graphql/types/converted/index.js +200 -0
  158. package/dist/graphql/types/converted/index.js.map +1 -0
  159. package/dist/graphql/types/converted/index.mjs +19 -0
  160. package/dist/graphql/types/converted/index.mjs.map +1 -0
  161. package/dist/groq-adapter-540da9c3.d.ts +331 -0
  162. package/dist/groq-adapter-a6f5e9d2.d.ts +331 -0
  163. package/dist/groq-adapter-c8aec5c5.d.ts +321 -0
  164. package/dist/index-96b330da.d.ts +119 -0
  165. package/dist/index-adbd78f1.d.ts +154 -0
  166. package/dist/index.d.ts +67 -8
  167. package/dist/index.js +118 -28
  168. package/dist/index.js.map +1 -1
  169. package/dist/index.mjs +120 -31
  170. package/dist/index.mjs.map +1 -1
  171. package/dist/langserve-0c6100e3.d.ts +257 -0
  172. package/dist/langserve-978d5790.d.ts +243 -0
  173. package/dist/langserve-9fc76ce5.d.ts +243 -0
  174. package/dist/lib/cloud/index.d.ts +6 -0
  175. package/dist/lib/cloud/index.js +18 -0
  176. package/dist/lib/cloud/index.js.map +1 -0
  177. package/dist/lib/cloud/index.mjs +1 -0
  178. package/dist/lib/cloud/index.mjs.map +1 -0
  179. package/dist/lib/index.d.ts +212 -0
  180. package/dist/lib/index.js +7843 -0
  181. package/dist/lib/index.js.map +1 -0
  182. package/dist/lib/index.mjs +76 -0
  183. package/dist/lib/index.mjs.map +1 -0
  184. package/dist/lib/integrations/index.d.ts +34 -0
  185. package/dist/lib/integrations/index.js +3052 -0
  186. package/dist/lib/integrations/index.js.map +1 -0
  187. package/dist/lib/integrations/index.mjs +37 -0
  188. package/dist/lib/integrations/index.mjs.map +1 -0
  189. package/dist/lib/integrations/nest/index.d.ts +15 -0
  190. package/dist/lib/integrations/nest/index.js +2959 -0
  191. package/dist/lib/integrations/nest/index.js.map +1 -0
  192. package/dist/lib/integrations/nest/index.mjs +14 -0
  193. package/dist/lib/integrations/nest/index.mjs.map +1 -0
  194. package/dist/lib/integrations/node-express/index.d.ts +15 -0
  195. package/dist/lib/integrations/node-express/index.js +2959 -0
  196. package/dist/lib/integrations/node-express/index.js.map +1 -0
  197. package/dist/lib/integrations/node-express/index.mjs +14 -0
  198. package/dist/lib/integrations/node-express/index.mjs.map +1 -0
  199. package/dist/lib/integrations/node-http/index.d.ts +15 -0
  200. package/dist/lib/integrations/node-http/index.js +2945 -0
  201. package/dist/lib/integrations/node-http/index.js.map +1 -0
  202. package/dist/lib/integrations/node-http/index.mjs +13 -0
  203. package/dist/lib/integrations/node-http/index.mjs.map +1 -0
  204. package/dist/service-adapters/index.d.ts +162 -0
  205. package/dist/service-adapters/index.js +1787 -0
  206. package/dist/service-adapters/index.js.map +1 -0
  207. package/dist/service-adapters/index.mjs +34 -0
  208. package/dist/service-adapters/index.mjs.map +1 -0
  209. package/dist/service-adapters/shared/index.d.ts +9 -0
  210. package/dist/service-adapters/shared/index.js +72 -0
  211. package/dist/service-adapters/shared/index.js.map +1 -0
  212. package/dist/service-adapters/shared/index.mjs +8 -0
  213. package/dist/service-adapters/shared/index.mjs.map +1 -0
  214. package/dist/shared-0a7346ce.d.ts +466 -0
  215. package/dist/shared-35c6eb04.d.ts +448 -0
  216. package/dist/shared-9ed1dc31.d.ts +414 -0
  217. package/dist/shared-da5708fe.d.ts +449 -0
  218. package/dist/utils/index.d.ts +65 -0
  219. package/dist/utils/index.js +175 -0
  220. package/dist/utils/index.js.map +1 -0
  221. package/dist/utils/index.mjs +12 -0
  222. package/dist/utils/index.mjs.map +1 -0
  223. package/package.json +14 -20
  224. package/src/lib/index.ts +7 -6
  225. package/src/lib/runtime/copilot-runtime.ts +62 -26
  226. package/src/lib/runtime/telemetry-agent-runner.ts +139 -0
@@ -0,0 +1,1127 @@
1
+ import {
2
+ convertServiceAdapterError
3
+ } from "./chunk-AMUJQ6IR.mjs";
4
+ import {
5
+ __name
6
+ } from "./chunk-FHD4JECV.mjs";
7
+
8
+ // src/service-adapters/openai/openai-adapter.ts
9
+ import OpenAI from "openai";
10
+
11
+ // src/service-adapters/openai/utils.ts
12
+ import { parseJson } from "@copilotkit/shared";
13
+ function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
14
+ maxTokens || (maxTokens = maxTokensForOpenAIModel(model));
15
+ const result = [];
16
+ const toolsNumTokens = countToolsTokens(model, tools);
17
+ if (toolsNumTokens > maxTokens) {
18
+ throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
19
+ }
20
+ maxTokens -= toolsNumTokens;
21
+ for (const message of messages) {
22
+ if ([
23
+ "system",
24
+ "developer"
25
+ ].includes(message.role)) {
26
+ const numTokens = countMessageTokens(model, message);
27
+ maxTokens -= numTokens;
28
+ if (maxTokens < 0) {
29
+ throw new Error("Not enough tokens for system message.");
30
+ }
31
+ }
32
+ }
33
+ let cutoff = false;
34
+ const reversedMessages = [
35
+ ...messages
36
+ ].reverse();
37
+ for (const message of reversedMessages) {
38
+ if ([
39
+ "system",
40
+ "developer"
41
+ ].includes(message.role)) {
42
+ result.unshift(message);
43
+ continue;
44
+ } else if (cutoff) {
45
+ continue;
46
+ }
47
+ let numTokens = countMessageTokens(model, message);
48
+ if (maxTokens < numTokens) {
49
+ cutoff = true;
50
+ continue;
51
+ }
52
+ result.unshift(message);
53
+ maxTokens -= numTokens;
54
+ }
55
+ return result;
56
+ }
57
+ __name(limitMessagesToTokenCount, "limitMessagesToTokenCount");
58
+ function maxTokensForOpenAIModel(model) {
59
+ return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;
60
+ }
61
+ __name(maxTokensForOpenAIModel, "maxTokensForOpenAIModel");
62
+ var DEFAULT_MAX_TOKENS = 128e3;
63
+ var maxTokensByModel = {
64
+ // o1
65
+ o1: 2e5,
66
+ "o1-2024-12-17": 2e5,
67
+ "o1-mini": 128e3,
68
+ "o1-mini-2024-09-12": 128e3,
69
+ "o1-preview": 128e3,
70
+ "o1-preview-2024-09-12": 128e3,
71
+ // o3-mini
72
+ "o3-mini": 2e5,
73
+ "o3-mini-2025-01-31": 2e5,
74
+ // GPT-4
75
+ "gpt-4o": 128e3,
76
+ "chatgpt-4o-latest": 128e3,
77
+ "gpt-4o-2024-08-06": 128e3,
78
+ "gpt-4o-2024-05-13": 128e3,
79
+ "gpt-4o-mini": 128e3,
80
+ "gpt-4o-mini-2024-07-18": 128e3,
81
+ "gpt-4-turbo": 128e3,
82
+ "gpt-4-turbo-2024-04-09": 128e3,
83
+ "gpt-4-0125-preview": 128e3,
84
+ "gpt-4-turbo-preview": 128e3,
85
+ "gpt-4-1106-preview": 128e3,
86
+ "gpt-4-vision-preview": 128e3,
87
+ "gpt-4-1106-vision-preview": 128e3,
88
+ "gpt-4-32k": 32768,
89
+ "gpt-4-32k-0613": 32768,
90
+ "gpt-4-32k-0314": 32768,
91
+ "gpt-4": 8192,
92
+ "gpt-4-0613": 8192,
93
+ "gpt-4-0314": 8192,
94
+ // GPT-3.5
95
+ "gpt-3.5-turbo-0125": 16385,
96
+ "gpt-3.5-turbo": 16385,
97
+ "gpt-3.5-turbo-1106": 16385,
98
+ "gpt-3.5-turbo-instruct": 4096,
99
+ "gpt-3.5-turbo-16k": 16385,
100
+ "gpt-3.5-turbo-0613": 4096,
101
+ "gpt-3.5-turbo-16k-0613": 16385,
102
+ "gpt-3.5-turbo-0301": 4097
103
+ };
104
+ function countToolsTokens(model, tools) {
105
+ if (tools.length === 0) {
106
+ return 0;
107
+ }
108
+ const json = JSON.stringify(tools);
109
+ return countTokens(model, json);
110
+ }
111
+ __name(countToolsTokens, "countToolsTokens");
112
+ function countMessageTokens(model, message) {
113
+ return countTokens(model, message.content || "");
114
+ }
115
+ __name(countMessageTokens, "countMessageTokens");
116
+ function countTokens(model, text) {
117
+ return text.length / 3;
118
+ }
119
+ __name(countTokens, "countTokens");
120
+ function convertActionInputToOpenAITool(action) {
121
+ return {
122
+ type: "function",
123
+ function: {
124
+ name: action.name,
125
+ description: action.description,
126
+ parameters: parseJson(action.jsonSchema, {})
127
+ }
128
+ };
129
+ }
130
+ __name(convertActionInputToOpenAITool, "convertActionInputToOpenAITool");
131
+ function convertMessageToOpenAIMessage(message, options) {
132
+ const { keepSystemRole } = options || {
133
+ keepSystemRole: false
134
+ };
135
+ if (message.isTextMessage()) {
136
+ let role = message.role;
137
+ if (message.role === "system" && !keepSystemRole) {
138
+ role = "developer";
139
+ }
140
+ return {
141
+ role,
142
+ content: message.content
143
+ };
144
+ } else if (message.isImageMessage()) {
145
+ return {
146
+ role: "user",
147
+ content: [
148
+ {
149
+ type: "image_url",
150
+ image_url: {
151
+ url: `data:image/${message.format};base64,${message.bytes}`
152
+ }
153
+ }
154
+ ]
155
+ };
156
+ } else if (message.isActionExecutionMessage()) {
157
+ return {
158
+ role: "assistant",
159
+ tool_calls: [
160
+ {
161
+ id: message.id,
162
+ type: "function",
163
+ function: {
164
+ name: message.name,
165
+ arguments: JSON.stringify(message.arguments)
166
+ }
167
+ }
168
+ ]
169
+ };
170
+ } else if (message.isResultMessage()) {
171
+ return {
172
+ role: "tool",
173
+ content: message.result,
174
+ tool_call_id: message.actionExecutionId
175
+ };
176
+ }
177
+ }
178
+ __name(convertMessageToOpenAIMessage, "convertMessageToOpenAIMessage");
179
+ function convertSystemMessageToAssistantAPI(message) {
180
+ return {
181
+ ...message,
182
+ ...[
183
+ "system",
184
+ "developer"
185
+ ].includes(message.role) && {
186
+ role: "assistant",
187
+ content: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
188
+ }
189
+ };
190
+ }
191
+ __name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
192
+
193
+ // src/service-adapters/openai/openai-adapter.ts
194
+ import { randomUUID } from "@copilotkit/shared";
195
+ var DEFAULT_MODEL = "gpt-4o";
196
+ var OpenAIAdapter = class {
197
+ model = DEFAULT_MODEL;
198
+ provider = "openai";
199
+ disableParallelToolCalls = false;
200
+ _openai;
201
+ keepSystemRole = false;
202
+ get openai() {
203
+ return this._openai;
204
+ }
205
+ get name() {
206
+ return "OpenAIAdapter";
207
+ }
208
+ constructor(params) {
209
+ this._openai = (params == null ? void 0 : params.openai) || new OpenAI({});
210
+ if (params == null ? void 0 : params.model) {
211
+ this.model = params.model;
212
+ }
213
+ this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
214
+ this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
215
+ }
216
+ async process(request) {
217
+ const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
218
+ const tools = actions.map(convertActionInputToOpenAITool);
219
+ const threadId = threadIdFromRequest ?? randomUUID();
220
+ const validToolUseIds = /* @__PURE__ */ new Set();
221
+ for (const message of messages) {
222
+ if (message.isActionExecutionMessage()) {
223
+ validToolUseIds.add(message.id);
224
+ }
225
+ }
226
+ const filteredMessages = messages.filter((message) => {
227
+ if (message.isResultMessage()) {
228
+ if (!validToolUseIds.has(message.actionExecutionId)) {
229
+ return false;
230
+ }
231
+ validToolUseIds.delete(message.actionExecutionId);
232
+ return true;
233
+ }
234
+ return true;
235
+ });
236
+ let openaiMessages = filteredMessages.map((m) => convertMessageToOpenAIMessage(m, {
237
+ keepSystemRole: this.keepSystemRole
238
+ }));
239
+ openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
240
+ let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
241
+ if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
242
+ toolChoice = {
243
+ type: "function",
244
+ function: {
245
+ name: forwardedParameters.toolChoiceFunctionName
246
+ }
247
+ };
248
+ }
249
+ try {
250
+ const stream = this.openai.beta.chat.completions.stream({
251
+ model,
252
+ stream: true,
253
+ messages: openaiMessages,
254
+ ...tools.length > 0 && {
255
+ tools
256
+ },
257
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
258
+ max_completion_tokens: forwardedParameters.maxTokens
259
+ },
260
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
261
+ stop: forwardedParameters.stop
262
+ },
263
+ ...toolChoice && {
264
+ tool_choice: toolChoice
265
+ },
266
+ ...this.disableParallelToolCalls && {
267
+ parallel_tool_calls: false
268
+ },
269
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
270
+ temperature: forwardedParameters.temperature
271
+ }
272
+ });
273
+ eventSource.stream(async (eventStream$) => {
274
+ var _a, _b;
275
+ let mode = null;
276
+ let currentMessageId;
277
+ let currentToolCallId;
278
+ try {
279
+ for await (const chunk of stream) {
280
+ if (chunk.choices.length === 0) {
281
+ continue;
282
+ }
283
+ const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
284
+ const content = chunk.choices[0].delta.content;
285
+ if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
286
+ mode = null;
287
+ eventStream$.sendTextMessageEnd({
288
+ messageId: currentMessageId
289
+ });
290
+ } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
291
+ mode = null;
292
+ eventStream$.sendActionExecutionEnd({
293
+ actionExecutionId: currentToolCallId
294
+ });
295
+ }
296
+ if (mode === null) {
297
+ if (toolCall == null ? void 0 : toolCall.id) {
298
+ mode = "function";
299
+ currentToolCallId = toolCall.id;
300
+ eventStream$.sendActionExecutionStart({
301
+ actionExecutionId: currentToolCallId,
302
+ parentMessageId: chunk.id,
303
+ actionName: toolCall.function.name
304
+ });
305
+ } else if (content) {
306
+ mode = "message";
307
+ currentMessageId = chunk.id;
308
+ eventStream$.sendTextMessageStart({
309
+ messageId: currentMessageId
310
+ });
311
+ }
312
+ }
313
+ if (mode === "message" && content) {
314
+ eventStream$.sendTextMessageContent({
315
+ messageId: currentMessageId,
316
+ content
317
+ });
318
+ } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
319
+ eventStream$.sendActionExecutionArgs({
320
+ actionExecutionId: currentToolCallId,
321
+ args: toolCall.function.arguments
322
+ });
323
+ }
324
+ }
325
+ if (mode === "message") {
326
+ eventStream$.sendTextMessageEnd({
327
+ messageId: currentMessageId
328
+ });
329
+ } else if (mode === "function") {
330
+ eventStream$.sendActionExecutionEnd({
331
+ actionExecutionId: currentToolCallId
332
+ });
333
+ }
334
+ } catch (error) {
335
+ console.error("[OpenAI] Error during API call:", error);
336
+ throw convertServiceAdapterError(error, "OpenAI");
337
+ }
338
+ eventStream$.complete();
339
+ });
340
+ } catch (error) {
341
+ console.error("[OpenAI] Error during API call:", error);
342
+ throw convertServiceAdapterError(error, "OpenAI");
343
+ }
344
+ return {
345
+ threadId
346
+ };
347
+ }
348
+ };
349
+ __name(OpenAIAdapter, "OpenAIAdapter");
350
+
351
+ // src/service-adapters/langchain/utils.ts
352
+ import { AIMessage, HumanMessage, SystemMessage, ToolMessage } from "@langchain/core/messages";
353
+ import { DynamicStructuredTool } from "@langchain/core/tools";
354
+ import { randomId, convertJsonSchemaToZodSchema } from "@copilotkit/shared";
355
+ function convertMessageToLangChainMessage(message) {
356
+ if (message.isTextMessage()) {
357
+ if (message.role == "user") {
358
+ return new HumanMessage(message.content);
359
+ } else if (message.role == "assistant") {
360
+ return new AIMessage(message.content);
361
+ } else if (message.role === "system") {
362
+ return new SystemMessage(message.content);
363
+ }
364
+ } else if (message.isActionExecutionMessage()) {
365
+ return new AIMessage({
366
+ content: "",
367
+ tool_calls: [
368
+ {
369
+ id: message.id,
370
+ args: message.arguments,
371
+ name: message.name
372
+ }
373
+ ]
374
+ });
375
+ } else if (message.isResultMessage()) {
376
+ return new ToolMessage({
377
+ content: message.result,
378
+ tool_call_id: message.actionExecutionId
379
+ });
380
+ }
381
+ }
382
+ __name(convertMessageToLangChainMessage, "convertMessageToLangChainMessage");
383
+ function convertActionInputToLangChainTool(actionInput) {
384
+ return new DynamicStructuredTool({
385
+ ...actionInput,
386
+ name: actionInput.name,
387
+ description: actionInput.description,
388
+ schema: convertJsonSchemaToZodSchema(JSON.parse(actionInput.jsonSchema), true),
389
+ func: async () => {
390
+ return "";
391
+ }
392
+ });
393
+ }
394
+ __name(convertActionInputToLangChainTool, "convertActionInputToLangChainTool");
395
+ function isAIMessage(message) {
396
+ return Object.prototype.toString.call(message) === "[object AIMessage]";
397
+ }
398
+ __name(isAIMessage, "isAIMessage");
399
+ function isAIMessageChunk(message) {
400
+ return Object.prototype.toString.call(message) === "[object AIMessageChunk]";
401
+ }
402
+ __name(isAIMessageChunk, "isAIMessageChunk");
403
+ function isBaseMessageChunk(message) {
404
+ return Object.prototype.toString.call(message) === "[object BaseMessageChunk]";
405
+ }
406
+ __name(isBaseMessageChunk, "isBaseMessageChunk");
407
+ function maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution) {
408
+ if (actionExecution) {
409
+ eventStream$.sendActionExecutionResult({
410
+ actionExecutionId: actionExecution.id,
411
+ actionName: actionExecution.name,
412
+ result: "Sending a message"
413
+ });
414
+ }
415
+ }
416
+ __name(maybeSendActionExecutionResultIsMessage, "maybeSendActionExecutionResultIsMessage");
417
+ async function streamLangChainResponse({ result, eventStream$, actionExecution }) {
418
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
419
+ if (typeof result === "string") {
420
+ if (!actionExecution || (actionExecution == null ? void 0 : actionExecution.returnDirect)) {
421
+ eventStream$.sendActionExecutionResult({
422
+ actionExecutionId: actionExecution.id,
423
+ actionName: actionExecution.name,
424
+ result
425
+ });
426
+ eventStream$.sendTextMessage(randomId(), result);
427
+ } else {
428
+ eventStream$.sendActionExecutionResult({
429
+ actionExecutionId: actionExecution.id,
430
+ actionName: actionExecution.name,
431
+ result
432
+ });
433
+ }
434
+ } else if (isAIMessage(result)) {
435
+ maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
436
+ if (result.content) {
437
+ eventStream$.sendTextMessage(randomId(), result.content);
438
+ }
439
+ for (const toolCall of result.tool_calls) {
440
+ eventStream$.sendActionExecution({
441
+ actionExecutionId: toolCall.id || randomId(),
442
+ actionName: toolCall.name,
443
+ args: JSON.stringify(toolCall.args)
444
+ });
445
+ }
446
+ } else if (isBaseMessageChunk(result)) {
447
+ maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
448
+ if ((_a = result.lc_kwargs) == null ? void 0 : _a.content) {
449
+ eventStream$.sendTextMessage(randomId(), result.content);
450
+ }
451
+ if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
452
+ for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
453
+ eventStream$.sendActionExecution({
454
+ actionExecutionId: toolCall.id || randomId(),
455
+ actionName: toolCall.name,
456
+ args: JSON.stringify(toolCall.args)
457
+ });
458
+ }
459
+ }
460
+ } else if (result && "getReader" in result) {
461
+ maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
462
+ let reader = result.getReader();
463
+ let mode = null;
464
+ let currentMessageId;
465
+ const toolCallDetails = {
466
+ name: null,
467
+ id: null,
468
+ index: null,
469
+ prevIndex: null
470
+ };
471
+ while (true) {
472
+ try {
473
+ const { done, value } = await reader.read();
474
+ let toolCallName = void 0;
475
+ let toolCallId = void 0;
476
+ let toolCallArgs = void 0;
477
+ let hasToolCall = false;
478
+ let content = "";
479
+ if (value && value.content) {
480
+ content = Array.isArray(value.content) ? ((_d = value.content[0]) == null ? void 0 : _d.text) ?? "" : value.content;
481
+ }
482
+ if (isAIMessageChunk(value)) {
483
+ let chunk = (_e = value.tool_call_chunks) == null ? void 0 : _e[0];
484
+ toolCallArgs = chunk == null ? void 0 : chunk.args;
485
+ hasToolCall = chunk != void 0;
486
+ if (chunk == null ? void 0 : chunk.name)
487
+ toolCallDetails.name = chunk.name;
488
+ if ((chunk == null ? void 0 : chunk.index) != null) {
489
+ toolCallDetails.index = chunk.index;
490
+ if (toolCallDetails.prevIndex == null)
491
+ toolCallDetails.prevIndex = chunk.index;
492
+ }
493
+ if (chunk == null ? void 0 : chunk.id)
494
+ toolCallDetails.id = chunk.index != null ? `${chunk.id}-idx-${chunk.index}` : chunk.id;
495
+ toolCallName = toolCallDetails.name;
496
+ toolCallId = toolCallDetails.id;
497
+ } else if (isBaseMessageChunk(value)) {
498
+ let chunk = (_g = (_f = value.additional_kwargs) == null ? void 0 : _f.tool_calls) == null ? void 0 : _g[0];
499
+ toolCallName = (_h = chunk == null ? void 0 : chunk.function) == null ? void 0 : _h.name;
500
+ toolCallId = chunk == null ? void 0 : chunk.id;
501
+ toolCallArgs = (_i = chunk == null ? void 0 : chunk.function) == null ? void 0 : _i.arguments;
502
+ hasToolCall = (chunk == null ? void 0 : chunk.function) != void 0;
503
+ }
504
+ if (mode === "message" && (toolCallId || done)) {
505
+ mode = null;
506
+ eventStream$.sendTextMessageEnd({
507
+ messageId: currentMessageId
508
+ });
509
+ } else if (mode === "function" && (!hasToolCall || done)) {
510
+ mode = null;
511
+ eventStream$.sendActionExecutionEnd({
512
+ actionExecutionId: toolCallId
513
+ });
514
+ }
515
+ if (done) {
516
+ break;
517
+ }
518
+ if (mode === null) {
519
+ if (hasToolCall && toolCallId && toolCallName) {
520
+ mode = "function";
521
+ eventStream$.sendActionExecutionStart({
522
+ actionExecutionId: toolCallId,
523
+ actionName: toolCallName,
524
+ parentMessageId: (_j = value.lc_kwargs) == null ? void 0 : _j.id
525
+ });
526
+ } else if (content) {
527
+ mode = "message";
528
+ currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || randomId();
529
+ eventStream$.sendTextMessageStart({
530
+ messageId: currentMessageId
531
+ });
532
+ }
533
+ }
534
+ if (mode === "message" && content) {
535
+ eventStream$.sendTextMessageContent({
536
+ messageId: currentMessageId,
537
+ content
538
+ });
539
+ } else if (mode === "function" && toolCallArgs) {
540
+ if (toolCallDetails.index !== toolCallDetails.prevIndex) {
541
+ eventStream$.sendActionExecutionEnd({
542
+ actionExecutionId: toolCallId
543
+ });
544
+ eventStream$.sendActionExecutionStart({
545
+ actionExecutionId: toolCallId,
546
+ actionName: toolCallName,
547
+ parentMessageId: (_l = value.lc_kwargs) == null ? void 0 : _l.id
548
+ });
549
+ toolCallDetails.prevIndex = toolCallDetails.index;
550
+ }
551
+ eventStream$.sendActionExecutionArgs({
552
+ actionExecutionId: toolCallId,
553
+ args: toolCallArgs
554
+ });
555
+ }
556
+ } catch (error) {
557
+ console.error("Error reading from stream", error);
558
+ break;
559
+ }
560
+ }
561
+ } else if (actionExecution) {
562
+ eventStream$.sendActionExecutionResult({
563
+ actionExecutionId: actionExecution.id,
564
+ actionName: actionExecution.name,
565
+ result: encodeResult(result)
566
+ });
567
+ } else {
568
+ throw new Error("Invalid return type from LangChain function.");
569
+ }
570
+ eventStream$.complete();
571
+ }
572
+ __name(streamLangChainResponse, "streamLangChainResponse");
573
+ function encodeResult(result) {
574
+ if (result === void 0) {
575
+ return "";
576
+ } else if (typeof result === "string") {
577
+ return result;
578
+ } else {
579
+ return JSON.stringify(result);
580
+ }
581
+ }
582
+ __name(encodeResult, "encodeResult");
583
+
584
+ // src/service-adapters/langchain/langchain-adapter.ts
585
+ import { randomUUID as randomUUID2 } from "@copilotkit/shared";
586
+ import { awaitAllCallbacks } from "@langchain/core/callbacks/promises";
587
+ var LangChainAdapter = class {
588
+ options;
589
+ /**
590
+ * To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.
591
+ */
592
+ get name() {
593
+ return "LangChainAdapter";
594
+ }
595
+ constructor(options) {
596
+ this.options = options;
597
+ }
598
+ async process(request) {
599
+ try {
600
+ const { eventSource, model, actions, messages, runId, threadId: threadIdFromRequest } = request;
601
+ const threadId = threadIdFromRequest ?? randomUUID2();
602
+ const result = await this.options.chainFn({
603
+ messages: messages.map(convertMessageToLangChainMessage),
604
+ tools: actions.map(convertActionInputToLangChainTool),
605
+ model,
606
+ threadId,
607
+ runId
608
+ });
609
+ eventSource.stream(async (eventStream$) => {
610
+ await streamLangChainResponse({
611
+ result,
612
+ eventStream$
613
+ });
614
+ });
615
+ return {
616
+ threadId
617
+ };
618
+ } finally {
619
+ await awaitAllCallbacks();
620
+ }
621
+ }
622
+ };
623
+ __name(LangChainAdapter, "LangChainAdapter");
624
+
625
+ // src/service-adapters/google/google-genai-adapter.ts
626
+ import { ChatGoogle } from "@langchain/google-gauth";
627
+ import { AIMessage as AIMessage2 } from "@langchain/core/messages";
628
+ var DEFAULT_MODEL2 = "gemini-1.5-pro";
629
+ var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
630
+ provider = "google";
631
+ model = DEFAULT_MODEL2;
632
+ constructor(options) {
633
+ super({
634
+ chainFn: async ({ messages, tools, threadId }) => {
635
+ const filteredMessages = messages.filter((message) => {
636
+ if (!(message instanceof AIMessage2)) {
637
+ return true;
638
+ }
639
+ return message.content && String(message.content).trim().length > 0 || message.tool_calls && message.tool_calls.length > 0;
640
+ });
641
+ this.model = (options == null ? void 0 : options.model) ?? "gemini-1.5-pro";
642
+ const model = new ChatGoogle({
643
+ apiKey: (options == null ? void 0 : options.apiKey) ?? process.env.GOOGLE_API_KEY,
644
+ modelName: this.model,
645
+ apiVersion: "v1beta"
646
+ }).bindTools(tools);
647
+ return model.stream(filteredMessages, {
648
+ metadata: {
649
+ conversation_id: threadId
650
+ }
651
+ });
652
+ }
653
+ });
654
+ }
655
+ };
656
+ __name(GoogleGenerativeAIAdapter, "GoogleGenerativeAIAdapter");
657
+
658
+ // src/service-adapters/openai/openai-assistant-adapter.ts
659
+ import OpenAI2 from "openai";
660
+ var OpenAIAssistantAdapter = class {
661
+ openai;
662
+ codeInterpreterEnabled;
663
+ assistantId;
664
+ fileSearchEnabled;
665
+ disableParallelToolCalls;
666
+ keepSystemRole = false;
667
+ get name() {
668
+ return "OpenAIAssistantAdapter";
669
+ }
670
+ constructor(params) {
671
+ this.openai = params.openai || new OpenAI2({});
672
+ this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
673
+ this.fileSearchEnabled = params.fileSearchEnabled === false || true;
674
+ this.assistantId = params.assistantId;
675
+ this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
676
+ this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
677
+ }
678
+ async process(request) {
679
+ var _a, _b;
680
+ const { messages, actions, eventSource, runId, forwardedParameters } = request;
681
+ let threadId = (_b = (_a = request.extensions) == null ? void 0 : _a.openaiAssistantAPI) == null ? void 0 : _b.threadId;
682
+ if (!threadId) {
683
+ threadId = (await this.openai.beta.threads.create()).id;
684
+ }
685
+ const lastMessage = messages.at(-1);
686
+ let nextRunId = void 0;
687
+ if (lastMessage.isResultMessage() && runId) {
688
+ nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);
689
+ } else if (lastMessage.isTextMessage()) {
690
+ nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters);
691
+ } else {
692
+ throw new Error("No actionable message found in the messages");
693
+ }
694
+ return {
695
+ runId: nextRunId,
696
+ threadId,
697
+ extensions: {
698
+ ...request.extensions,
699
+ openaiAssistantAPI: {
700
+ threadId,
701
+ runId: nextRunId
702
+ }
703
+ }
704
+ };
705
+ }
706
+ async submitToolOutputs(threadId, runId, messages, eventSource) {
707
+ let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);
708
+ if (!run.required_action) {
709
+ throw new Error("No tool outputs required");
710
+ }
711
+ const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map((toolCall) => toolCall.id);
712
+ const resultMessages = messages.filter((message) => message.isResultMessage() && toolCallsIds.includes(message.actionExecutionId));
713
+ if (toolCallsIds.length != resultMessages.length) {
714
+ throw new Error("Number of function results does not match the number of tool calls");
715
+ }
716
+ const toolOutputs = resultMessages.map((message) => {
717
+ return {
718
+ tool_call_id: message.actionExecutionId,
719
+ output: message.result
720
+ };
721
+ });
722
+ const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
723
+ tool_outputs: toolOutputs,
724
+ ...this.disableParallelToolCalls && {
725
+ parallel_tool_calls: false
726
+ }
727
+ });
728
+ await this.streamResponse(stream, eventSource);
729
+ return runId;
730
+ }
731
+ async submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters) {
732
+ messages = [
733
+ ...messages
734
+ ];
735
+ const instructionsMessage = messages.shift();
736
+ const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
737
+ const userMessage = messages.map((m) => convertMessageToOpenAIMessage(m, {
738
+ keepSystemRole: this.keepSystemRole
739
+ })).map(convertSystemMessageToAssistantAPI).at(-1);
740
+ if (userMessage.role !== "user") {
741
+ throw new Error("No user message found");
742
+ }
743
+ await this.openai.beta.threads.messages.create(threadId, {
744
+ role: "user",
745
+ content: userMessage.content
746
+ });
747
+ const openaiTools = actions.map(convertActionInputToOpenAITool);
748
+ const tools = [
749
+ ...openaiTools,
750
+ ...this.codeInterpreterEnabled ? [
751
+ {
752
+ type: "code_interpreter"
753
+ }
754
+ ] : [],
755
+ ...this.fileSearchEnabled ? [
756
+ {
757
+ type: "file_search"
758
+ }
759
+ ] : []
760
+ ];
761
+ let stream = this.openai.beta.threads.runs.stream(threadId, {
762
+ assistant_id: this.assistantId,
763
+ instructions,
764
+ tools,
765
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
766
+ max_completion_tokens: forwardedParameters.maxTokens
767
+ },
768
+ ...this.disableParallelToolCalls && {
769
+ parallel_tool_calls: false
770
+ }
771
+ });
772
+ await this.streamResponse(stream, eventSource);
773
+ return getRunIdFromStream(stream);
774
+ }
775
+ async streamResponse(stream, eventSource) {
776
+ eventSource.stream(async (eventStream$) => {
777
+ var _a, _b, _c, _d, _e, _f;
778
+ let inFunctionCall = false;
779
+ let currentMessageId;
780
+ let currentToolCallId;
781
+ for await (const chunk of stream) {
782
+ switch (chunk.event) {
783
+ case "thread.message.created":
784
+ if (inFunctionCall) {
785
+ eventStream$.sendActionExecutionEnd({
786
+ actionExecutionId: currentToolCallId
787
+ });
788
+ }
789
+ currentMessageId = chunk.data.id;
790
+ eventStream$.sendTextMessageStart({
791
+ messageId: currentMessageId
792
+ });
793
+ break;
794
+ case "thread.message.delta":
795
+ if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
796
+ eventStream$.sendTextMessageContent({
797
+ messageId: currentMessageId,
798
+ content: (_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value
799
+ });
800
+ }
801
+ break;
802
+ case "thread.message.completed":
803
+ eventStream$.sendTextMessageEnd({
804
+ messageId: currentMessageId
805
+ });
806
+ break;
807
+ case "thread.run.step.delta":
808
+ let toolCallId;
809
+ let toolCallName;
810
+ let toolCallArgs;
811
+ if (chunk.data.delta.step_details.type === "tool_calls" && ((_c = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _c[0].type) === "function") {
812
+ toolCallId = (_d = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _d[0].id;
813
+ toolCallName = (_e = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _e[0].function.name;
814
+ toolCallArgs = (_f = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _f[0].function.arguments;
815
+ }
816
+ if (toolCallName && toolCallId) {
817
+ if (inFunctionCall) {
818
+ eventStream$.sendActionExecutionEnd({
819
+ actionExecutionId: currentToolCallId
820
+ });
821
+ }
822
+ inFunctionCall = true;
823
+ currentToolCallId = toolCallId;
824
+ eventStream$.sendActionExecutionStart({
825
+ actionExecutionId: currentToolCallId,
826
+ parentMessageId: chunk.data.id,
827
+ actionName: toolCallName
828
+ });
829
+ } else if (toolCallArgs) {
830
+ eventStream$.sendActionExecutionArgs({
831
+ actionExecutionId: currentToolCallId,
832
+ args: toolCallArgs
833
+ });
834
+ }
835
+ break;
836
+ }
837
+ }
838
+ if (inFunctionCall) {
839
+ eventStream$.sendActionExecutionEnd({
840
+ actionExecutionId: currentToolCallId
841
+ });
842
+ }
843
+ eventStream$.complete();
844
+ });
845
+ }
846
+ };
847
+ __name(OpenAIAssistantAdapter, "OpenAIAssistantAdapter");
848
+ function getRunIdFromStream(stream) {
849
+ return new Promise((resolve, reject) => {
850
+ let runIdGetter = /* @__PURE__ */ __name((event) => {
851
+ if (event.event === "thread.run.created") {
852
+ const runId = event.data.id;
853
+ stream.off("event", runIdGetter);
854
+ resolve(runId);
855
+ }
856
+ }, "runIdGetter");
857
+ stream.on("event", runIdGetter);
858
+ });
859
+ }
860
+ __name(getRunIdFromStream, "getRunIdFromStream");
861
+
862
+ // src/service-adapters/unify/unify-adapter.ts
863
+ import OpenAI3 from "openai";
864
+ import { randomId as randomId2, randomUUID as randomUUID3 } from "@copilotkit/shared";
865
+ var UnifyAdapter = class {
866
+ apiKey;
867
+ model;
868
+ start;
869
+ provider = "unify";
870
+ get name() {
871
+ return "UnifyAdapter";
872
+ }
873
+ constructor(options) {
874
+ if (options == null ? void 0 : options.apiKey) {
875
+ this.apiKey = options.apiKey;
876
+ } else {
877
+ this.apiKey = "UNIFY_API_KEY";
878
+ }
879
+ this.model = options == null ? void 0 : options.model;
880
+ this.start = true;
881
+ }
882
+ async process(request) {
883
+ const tools = request.actions.map(convertActionInputToOpenAITool);
884
+ const openai = new OpenAI3({
885
+ apiKey: this.apiKey,
886
+ baseURL: "https://api.unify.ai/v0/"
887
+ });
888
+ const forwardedParameters = request.forwardedParameters;
889
+ const messages = request.messages.map((m) => convertMessageToOpenAIMessage(m));
890
+ const stream = await openai.chat.completions.create({
891
+ model: this.model,
892
+ messages,
893
+ stream: true,
894
+ ...tools.length > 0 && {
895
+ tools
896
+ },
897
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
898
+ temperature: forwardedParameters.temperature
899
+ }
900
+ });
901
+ let model = null;
902
+ let currentMessageId;
903
+ let currentToolCallId;
904
+ request.eventSource.stream(async (eventStream$) => {
905
+ var _a, _b;
906
+ let mode = null;
907
+ for await (const chunk of stream) {
908
+ if (this.start) {
909
+ model = chunk.model;
910
+ currentMessageId = randomId2();
911
+ eventStream$.sendTextMessageStart({
912
+ messageId: currentMessageId
913
+ });
914
+ eventStream$.sendTextMessageContent({
915
+ messageId: currentMessageId,
916
+ content: `Model used: ${model}
917
+ `
918
+ });
919
+ eventStream$.sendTextMessageEnd({
920
+ messageId: currentMessageId
921
+ });
922
+ this.start = false;
923
+ }
924
+ const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
925
+ const content = chunk.choices[0].delta.content;
926
+ if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
927
+ mode = null;
928
+ eventStream$.sendTextMessageEnd({
929
+ messageId: currentMessageId
930
+ });
931
+ } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
932
+ mode = null;
933
+ eventStream$.sendActionExecutionEnd({
934
+ actionExecutionId: currentToolCallId
935
+ });
936
+ }
937
+ if (mode === null) {
938
+ if (toolCall == null ? void 0 : toolCall.id) {
939
+ mode = "function";
940
+ currentToolCallId = toolCall.id;
941
+ eventStream$.sendActionExecutionStart({
942
+ actionExecutionId: currentToolCallId,
943
+ actionName: toolCall.function.name
944
+ });
945
+ } else if (content) {
946
+ mode = "message";
947
+ currentMessageId = chunk.id;
948
+ eventStream$.sendTextMessageStart({
949
+ messageId: currentMessageId
950
+ });
951
+ }
952
+ }
953
+ if (mode === "message" && content) {
954
+ eventStream$.sendTextMessageContent({
955
+ messageId: currentMessageId,
956
+ content
957
+ });
958
+ } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
959
+ eventStream$.sendActionExecutionArgs({
960
+ actionExecutionId: currentToolCallId,
961
+ args: toolCall.function.arguments
962
+ });
963
+ }
964
+ }
965
+ if (mode === "message") {
966
+ eventStream$.sendTextMessageEnd({
967
+ messageId: currentMessageId
968
+ });
969
+ } else if (mode === "function") {
970
+ eventStream$.sendActionExecutionEnd({
971
+ actionExecutionId: currentToolCallId
972
+ });
973
+ }
974
+ eventStream$.complete();
975
+ });
976
+ return {
977
+ threadId: request.threadId || randomUUID3()
978
+ };
979
+ }
980
+ };
981
+ __name(UnifyAdapter, "UnifyAdapter");
982
+
983
+ // src/service-adapters/groq/groq-adapter.ts
984
+ import { Groq } from "groq-sdk";
985
+ import { randomUUID as randomUUID4 } from "@copilotkit/shared";
986
+ var DEFAULT_MODEL3 = "llama-3.3-70b-versatile";
987
+ var GroqAdapter = class {
988
+ model = DEFAULT_MODEL3;
989
+ provider = "groq";
990
+ disableParallelToolCalls = false;
991
+ _groq;
992
+ get groq() {
993
+ return this._groq;
994
+ }
995
+ get name() {
996
+ return "GroqAdapter";
997
+ }
998
+ constructor(params) {
999
+ this._groq = (params == null ? void 0 : params.groq) || new Groq({});
1000
+ if (params == null ? void 0 : params.model) {
1001
+ this.model = params.model;
1002
+ }
1003
+ this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
1004
+ }
1005
+ async process(request) {
1006
+ const { threadId, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
1007
+ const tools = actions.map(convertActionInputToOpenAITool);
1008
+ let openaiMessages = messages.map((m) => convertMessageToOpenAIMessage(m, {
1009
+ keepSystemRole: true
1010
+ }));
1011
+ openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
1012
+ let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
1013
+ if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
1014
+ toolChoice = {
1015
+ type: "function",
1016
+ function: {
1017
+ name: forwardedParameters.toolChoiceFunctionName
1018
+ }
1019
+ };
1020
+ }
1021
+ let stream;
1022
+ try {
1023
+ stream = await this.groq.chat.completions.create({
1024
+ model,
1025
+ stream: true,
1026
+ messages: openaiMessages,
1027
+ ...tools.length > 0 && {
1028
+ tools
1029
+ },
1030
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
1031
+ max_tokens: forwardedParameters.maxTokens
1032
+ },
1033
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
1034
+ stop: forwardedParameters.stop
1035
+ },
1036
+ ...toolChoice && {
1037
+ tool_choice: toolChoice
1038
+ },
1039
+ ...this.disableParallelToolCalls && {
1040
+ parallel_tool_calls: false
1041
+ },
1042
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
1043
+ temperature: forwardedParameters.temperature
1044
+ }
1045
+ });
1046
+ } catch (error) {
1047
+ throw convertServiceAdapterError(error, "Groq");
1048
+ }
1049
+ eventSource.stream(async (eventStream$) => {
1050
+ var _a, _b;
1051
+ let mode = null;
1052
+ let currentMessageId;
1053
+ let currentToolCallId;
1054
+ try {
1055
+ for await (const chunk of stream) {
1056
+ const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
1057
+ const content = chunk.choices[0].delta.content;
1058
+ if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
1059
+ mode = null;
1060
+ eventStream$.sendTextMessageEnd({
1061
+ messageId: currentMessageId
1062
+ });
1063
+ } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
1064
+ mode = null;
1065
+ eventStream$.sendActionExecutionEnd({
1066
+ actionExecutionId: currentToolCallId
1067
+ });
1068
+ }
1069
+ if (mode === null) {
1070
+ if (toolCall == null ? void 0 : toolCall.id) {
1071
+ mode = "function";
1072
+ currentToolCallId = toolCall.id;
1073
+ eventStream$.sendActionExecutionStart({
1074
+ actionExecutionId: currentToolCallId,
1075
+ actionName: toolCall.function.name,
1076
+ parentMessageId: chunk.id
1077
+ });
1078
+ } else if (content) {
1079
+ mode = "message";
1080
+ currentMessageId = chunk.id;
1081
+ eventStream$.sendTextMessageStart({
1082
+ messageId: currentMessageId
1083
+ });
1084
+ }
1085
+ }
1086
+ if (mode === "message" && content) {
1087
+ eventStream$.sendTextMessageContent({
1088
+ messageId: currentMessageId,
1089
+ content
1090
+ });
1091
+ } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
1092
+ eventStream$.sendActionExecutionArgs({
1093
+ actionExecutionId: currentToolCallId,
1094
+ args: toolCall.function.arguments
1095
+ });
1096
+ }
1097
+ }
1098
+ if (mode === "message") {
1099
+ eventStream$.sendTextMessageEnd({
1100
+ messageId: currentMessageId
1101
+ });
1102
+ } else if (mode === "function") {
1103
+ eventStream$.sendActionExecutionEnd({
1104
+ actionExecutionId: currentToolCallId
1105
+ });
1106
+ }
1107
+ } catch (error) {
1108
+ throw convertServiceAdapterError(error, "Groq");
1109
+ }
1110
+ eventStream$.complete();
1111
+ });
1112
+ return {
1113
+ threadId: request.threadId || randomUUID4()
1114
+ };
1115
+ }
1116
+ };
1117
+ __name(GroqAdapter, "GroqAdapter");
1118
+
1119
+ export {
1120
+ OpenAIAdapter,
1121
+ LangChainAdapter,
1122
+ GoogleGenerativeAIAdapter,
1123
+ OpenAIAssistantAdapter,
1124
+ UnifyAdapter,
1125
+ GroqAdapter
1126
+ };
1127
+ //# sourceMappingURL=chunk-DCEEHMLJ.mjs.map