@copilotkit/runtime 1.50.0-beta.1 → 1.50.0-beta.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. package/CHANGELOG.md +85 -0
  2. package/dist/chunk-2OZAGFV3.mjs +43 -0
  3. package/dist/chunk-2OZAGFV3.mjs.map +1 -0
  4. package/dist/chunk-62NE5S6M.mjs +226 -0
  5. package/dist/chunk-62NE5S6M.mjs.map +1 -0
  6. package/dist/chunk-6XRUR5UK.mjs +1 -0
  7. package/dist/chunk-6XRUR5UK.mjs.map +1 -0
  8. package/dist/chunk-AMUJQ6IR.mjs +50 -0
  9. package/dist/chunk-AMUJQ6IR.mjs.map +1 -0
  10. package/dist/chunk-BJEYMRDD.mjs +25 -0
  11. package/dist/chunk-BJEYMRDD.mjs.map +1 -0
  12. package/dist/chunk-DZV4ZIAR.mjs +3063 -0
  13. package/dist/chunk-DZV4ZIAR.mjs.map +1 -0
  14. package/dist/chunk-FHD4JECV.mjs +33 -0
  15. package/dist/chunk-FHD4JECV.mjs.map +1 -0
  16. package/dist/chunk-FMU55SEU.mjs +25 -0
  17. package/dist/chunk-FMU55SEU.mjs.map +1 -0
  18. package/dist/chunk-OWIGJONH.mjs +275 -0
  19. package/dist/chunk-OWIGJONH.mjs.map +1 -0
  20. package/dist/chunk-SBCOROE4.mjs +1112 -0
  21. package/dist/chunk-SBCOROE4.mjs.map +1 -0
  22. package/dist/chunk-TTUAEJLD.mjs +617 -0
  23. package/dist/chunk-TTUAEJLD.mjs.map +1 -0
  24. package/dist/chunk-XWBDEXDA.mjs +153 -0
  25. package/dist/chunk-XWBDEXDA.mjs.map +1 -0
  26. package/dist/chunk-Z752VE75.mjs +74 -0
  27. package/dist/chunk-Z752VE75.mjs.map +1 -0
  28. package/dist/graphql/message-conversion/index.d.ts +18 -0
  29. package/dist/graphql/message-conversion/index.js +725 -0
  30. package/dist/graphql/message-conversion/index.js.map +1 -0
  31. package/dist/graphql/message-conversion/index.mjs +245 -0
  32. package/dist/graphql/message-conversion/index.mjs.map +1 -0
  33. package/dist/graphql/types/base/index.d.ts +6 -0
  34. package/dist/graphql/types/base/index.js +63 -0
  35. package/dist/graphql/types/base/index.js.map +1 -0
  36. package/dist/graphql/types/base/index.mjs +8 -0
  37. package/dist/graphql/types/base/index.mjs.map +1 -0
  38. package/dist/graphql/types/converted/index.d.ts +2 -0
  39. package/dist/graphql/types/converted/index.js +294 -0
  40. package/dist/graphql/types/converted/index.js.map +1 -0
  41. package/dist/graphql/types/converted/index.mjs +20 -0
  42. package/dist/graphql/types/converted/index.mjs.map +1 -0
  43. package/dist/groq-adapter-50bc6e4a.d.ts +326 -0
  44. package/dist/index-adbd78f1.d.ts +154 -0
  45. package/dist/index.d.ts +136 -287
  46. package/dist/index.js +414 -293
  47. package/dist/index.js.map +1 -1
  48. package/dist/index.mjs +407 -283
  49. package/dist/index.mjs.map +1 -1
  50. package/dist/langgraph.d.ts +284 -0
  51. package/dist/langgraph.js +211 -0
  52. package/dist/langgraph.js.map +1 -0
  53. package/dist/langgraph.mjs +206 -0
  54. package/dist/langgraph.mjs.map +1 -0
  55. package/dist/langserve-74a52292.d.ts +242 -0
  56. package/dist/lib/cloud/index.d.ts +6 -0
  57. package/dist/lib/cloud/index.js +18 -0
  58. package/dist/lib/cloud/index.js.map +1 -0
  59. package/dist/lib/cloud/index.mjs +1 -0
  60. package/dist/lib/cloud/index.mjs.map +1 -0
  61. package/dist/lib/index.d.ts +266 -0
  62. package/dist/lib/index.js +4944 -0
  63. package/dist/lib/index.js.map +1 -0
  64. package/dist/lib/index.mjs +74 -0
  65. package/dist/lib/index.mjs.map +1 -0
  66. package/dist/lib/integrations/index.d.ts +28 -0
  67. package/dist/lib/integrations/index.js +3024 -0
  68. package/dist/lib/integrations/index.js.map +1 -0
  69. package/dist/lib/integrations/index.mjs +36 -0
  70. package/dist/lib/integrations/index.mjs.map +1 -0
  71. package/dist/lib/integrations/nest/index.d.ts +16 -0
  72. package/dist/lib/integrations/nest/index.js +2937 -0
  73. package/dist/lib/integrations/nest/index.js.map +1 -0
  74. package/dist/lib/integrations/nest/index.mjs +13 -0
  75. package/dist/lib/integrations/nest/index.mjs.map +1 -0
  76. package/dist/lib/integrations/node-express/index.d.ts +16 -0
  77. package/dist/lib/integrations/node-express/index.js +2937 -0
  78. package/dist/lib/integrations/node-express/index.js.map +1 -0
  79. package/dist/lib/integrations/node-express/index.mjs +13 -0
  80. package/dist/lib/integrations/node-express/index.mjs.map +1 -0
  81. package/dist/lib/integrations/node-http/index.d.ts +16 -0
  82. package/dist/lib/integrations/node-http/index.js +2923 -0
  83. package/dist/lib/integrations/node-http/index.js.map +1 -0
  84. package/dist/lib/integrations/node-http/index.mjs +12 -0
  85. package/dist/lib/integrations/node-http/index.mjs.map +1 -0
  86. package/dist/service-adapters/index.d.ts +166 -0
  87. package/dist/service-adapters/index.js +1800 -0
  88. package/dist/service-adapters/index.js.map +1 -0
  89. package/dist/service-adapters/index.mjs +36 -0
  90. package/dist/service-adapters/index.mjs.map +1 -0
  91. package/dist/service-adapters/shared/index.d.ts +9 -0
  92. package/dist/service-adapters/shared/index.js +72 -0
  93. package/dist/service-adapters/shared/index.js.map +1 -0
  94. package/dist/service-adapters/shared/index.mjs +8 -0
  95. package/dist/service-adapters/shared/index.mjs.map +1 -0
  96. package/dist/shared-f6d43ef8.d.ts +446 -0
  97. package/dist/utils/index.d.ts +65 -0
  98. package/dist/utils/index.js +175 -0
  99. package/dist/utils/index.js.map +1 -0
  100. package/dist/utils/index.mjs +12 -0
  101. package/dist/utils/index.mjs.map +1 -0
  102. package/dist/v2/index.d.ts +1 -0
  103. package/dist/v2/index.js +7 -0
  104. package/dist/v2/index.js.map +1 -1
  105. package/dist/v2/index.mjs +1 -0
  106. package/dist/v2/index.mjs.map +1 -1
  107. package/package.json +56 -18
  108. package/src/graphql/message-conversion/agui-to-gql.test.ts +2 -2
  109. package/src/graphql/message-conversion/gql-to-agui.test.ts +30 -28
  110. package/src/graphql/message-conversion/roundtrip-conversion.test.ts +8 -8
  111. package/src/langgraph.ts +1 -0
  112. package/src/lib/index.ts +42 -1
  113. package/src/lib/integrations/nextjs/app-router.ts +3 -1
  114. package/src/lib/integrations/node-http/index.ts +132 -11
  115. package/src/lib/integrations/shared.ts +2 -2
  116. package/src/lib/runtime/agent-integrations/{langgraph.agent.ts → langgraph/agent.ts} +5 -30
  117. package/src/lib/runtime/agent-integrations/langgraph/consts.ts +34 -0
  118. package/src/lib/runtime/agent-integrations/langgraph/index.ts +2 -0
  119. package/src/lib/runtime/copilot-runtime.ts +86 -69
  120. package/src/lib/runtime/telemetry-agent-runner.ts +134 -0
  121. package/src/service-adapters/anthropic/anthropic-adapter.ts +16 -3
  122. package/src/service-adapters/bedrock/bedrock-adapter.ts +4 -1
  123. package/src/service-adapters/experimental/ollama/ollama-adapter.ts +2 -1
  124. package/src/service-adapters/google/google-genai-adapter.ts +9 -4
  125. package/src/service-adapters/groq/groq-adapter.ts +16 -3
  126. package/src/service-adapters/langchain/langchain-adapter.ts +5 -3
  127. package/src/service-adapters/langchain/langserve.ts +2 -1
  128. package/src/service-adapters/openai/openai-adapter.ts +17 -3
  129. package/src/service-adapters/openai/openai-assistant-adapter.ts +26 -11
  130. package/src/service-adapters/unify/unify-adapter.ts +3 -1
  131. package/src/v2/index.ts +1 -0
  132. package/tsup.config.ts +5 -2
@@ -0,0 +1,617 @@
1
+ import {
2
+ LangChainAdapter
3
+ } from "./chunk-SBCOROE4.mjs";
4
+ import {
5
+ convertServiceAdapterError
6
+ } from "./chunk-AMUJQ6IR.mjs";
7
+ import {
8
+ __name
9
+ } from "./chunk-FHD4JECV.mjs";
10
+
11
+ // src/service-adapters/langchain/langserve.ts
12
+ import { RemoteRunnable } from "langchain/runnables/remote";
13
+ var RemoteChain = class {
14
+ name;
15
+ description;
16
+ chainUrl;
17
+ parameters;
18
+ parameterType;
19
+ constructor(options) {
20
+ this.name = options.name;
21
+ this.description = options.description;
22
+ this.chainUrl = options.chainUrl;
23
+ this.parameters = options.parameters;
24
+ this.parameterType = options.parameterType || "multi";
25
+ }
26
+ async toAction() {
27
+ if (!this.parameters) {
28
+ await this.inferLangServeParameters();
29
+ }
30
+ return {
31
+ name: this.name,
32
+ description: this.description,
33
+ parameters: this.parameters,
34
+ handler: async (args) => {
35
+ const runnable = new RemoteRunnable({
36
+ url: this.chainUrl
37
+ });
38
+ let input;
39
+ if (this.parameterType === "single") {
40
+ input = args[Object.keys(args)[0]];
41
+ } else {
42
+ input = args;
43
+ }
44
+ return await runnable.invoke(input);
45
+ }
46
+ };
47
+ }
48
+ async inferLangServeParameters() {
49
+ const supportedTypes = [
50
+ "string",
51
+ "number",
52
+ "boolean"
53
+ ];
54
+ let schemaUrl = this.chainUrl.replace(/\/+$/, "") + "/input_schema";
55
+ let schema = await fetch(schemaUrl).then((res) => res.json()).catch(() => {
56
+ throw new Error("Failed to fetch langserve schema at " + schemaUrl);
57
+ });
58
+ if (supportedTypes.includes(schema.type)) {
59
+ this.parameterType = "single";
60
+ this.parameters = [
61
+ {
62
+ name: "input",
63
+ type: schema.type,
64
+ description: "The input to the chain"
65
+ }
66
+ ];
67
+ } else if (schema.type === "object") {
68
+ this.parameterType = "multi";
69
+ this.parameters = Object.keys(schema.properties).map((key) => {
70
+ var _a;
71
+ let property = schema.properties[key];
72
+ if (!supportedTypes.includes(property.type)) {
73
+ throw new Error("Unsupported schema type");
74
+ }
75
+ return {
76
+ name: key,
77
+ type: property.type,
78
+ description: property.description || "",
79
+ required: ((_a = schema.required) == null ? void 0 : _a.includes(key)) || false
80
+ };
81
+ });
82
+ } else {
83
+ throw new Error("Unsupported schema type");
84
+ }
85
+ }
86
+ };
87
+ __name(RemoteChain, "RemoteChain");
88
+
89
+ // src/service-adapters/anthropic/anthropic-adapter.ts
90
+ import Anthropic from "@anthropic-ai/sdk";
91
+
92
+ // src/service-adapters/anthropic/utils.ts
93
+ function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
94
+ maxTokens || (maxTokens = MAX_TOKENS);
95
+ const result = [];
96
+ const toolsNumTokens = countToolsTokens(model, tools);
97
+ if (toolsNumTokens > maxTokens) {
98
+ throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
99
+ }
100
+ maxTokens -= toolsNumTokens;
101
+ for (const message of messages) {
102
+ if (message.role === "system") {
103
+ const numTokens = countMessageTokens(model, message);
104
+ maxTokens -= numTokens;
105
+ if (maxTokens < 0) {
106
+ throw new Error("Not enough tokens for system message.");
107
+ }
108
+ }
109
+ }
110
+ let cutoff = false;
111
+ const reversedMessages = [
112
+ ...messages
113
+ ].reverse();
114
+ for (const message of reversedMessages) {
115
+ if (message.role === "system") {
116
+ result.unshift(message);
117
+ continue;
118
+ } else if (cutoff) {
119
+ continue;
120
+ }
121
+ let numTokens = countMessageTokens(model, message);
122
+ if (maxTokens < numTokens) {
123
+ cutoff = true;
124
+ continue;
125
+ }
126
+ result.unshift(message);
127
+ maxTokens -= numTokens;
128
+ }
129
+ return result;
130
+ }
131
+ __name(limitMessagesToTokenCount, "limitMessagesToTokenCount");
132
+ var MAX_TOKENS = 128e3;
133
+ function countToolsTokens(model, tools) {
134
+ if (tools.length === 0) {
135
+ return 0;
136
+ }
137
+ const json = JSON.stringify(tools);
138
+ return countTokens(model, json);
139
+ }
140
+ __name(countToolsTokens, "countToolsTokens");
141
+ function countMessageTokens(model, message) {
142
+ return countTokens(model, JSON.stringify(message.content) || "");
143
+ }
144
+ __name(countMessageTokens, "countMessageTokens");
145
+ function countTokens(model, text) {
146
+ return text.length / 3;
147
+ }
148
+ __name(countTokens, "countTokens");
149
+ function convertActionInputToAnthropicTool(action) {
150
+ return {
151
+ name: action.name,
152
+ description: action.description,
153
+ input_schema: JSON.parse(action.jsonSchema)
154
+ };
155
+ }
156
+ __name(convertActionInputToAnthropicTool, "convertActionInputToAnthropicTool");
157
+ function convertMessageToAnthropicMessage(message) {
158
+ if (message.isTextMessage()) {
159
+ if (message.role === "system") {
160
+ return {
161
+ role: "assistant",
162
+ content: [
163
+ {
164
+ type: "text",
165
+ text: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
166
+ }
167
+ ]
168
+ };
169
+ } else {
170
+ return {
171
+ role: message.role === "user" ? "user" : "assistant",
172
+ content: [
173
+ {
174
+ type: "text",
175
+ text: message.content
176
+ }
177
+ ]
178
+ };
179
+ }
180
+ } else if (message.isImageMessage()) {
181
+ let mediaType;
182
+ switch (message.format) {
183
+ case "jpeg":
184
+ mediaType = "image/jpeg";
185
+ break;
186
+ case "png":
187
+ mediaType = "image/png";
188
+ break;
189
+ case "webp":
190
+ mediaType = "image/webp";
191
+ break;
192
+ case "gif":
193
+ mediaType = "image/gif";
194
+ break;
195
+ default:
196
+ throw new Error(`Unsupported image format: ${message.format}`);
197
+ }
198
+ return {
199
+ role: "user",
200
+ content: [
201
+ {
202
+ type: "image",
203
+ source: {
204
+ type: "base64",
205
+ media_type: mediaType,
206
+ data: message.bytes
207
+ }
208
+ }
209
+ ]
210
+ };
211
+ } else if (message.isActionExecutionMessage()) {
212
+ return {
213
+ role: "assistant",
214
+ content: [
215
+ {
216
+ id: message.id,
217
+ type: "tool_use",
218
+ input: message.arguments,
219
+ name: message.name
220
+ }
221
+ ]
222
+ };
223
+ } else if (message.isResultMessage()) {
224
+ return {
225
+ role: "user",
226
+ content: [
227
+ {
228
+ type: "tool_result",
229
+ content: message.result || "Action completed successfully",
230
+ tool_use_id: message.actionExecutionId
231
+ }
232
+ ]
233
+ };
234
+ }
235
+ }
236
+ __name(convertMessageToAnthropicMessage, "convertMessageToAnthropicMessage");
237
+
238
+ // src/service-adapters/anthropic/anthropic-adapter.ts
239
+ import { randomId, randomUUID } from "@copilotkit/shared";
240
+ var DEFAULT_MODEL = "claude-3-5-sonnet-latest";
241
+ var AnthropicAdapter = class {
242
+ model = DEFAULT_MODEL;
243
+ provider = "anthropic";
244
+ promptCaching;
245
+ _anthropic;
246
+ get anthropic() {
247
+ return this._anthropic;
248
+ }
249
+ constructor(params) {
250
+ this._anthropic = (params == null ? void 0 : params.anthropic) || new Anthropic({});
251
+ if (params == null ? void 0 : params.model) {
252
+ this.model = params.model;
253
+ }
254
+ this.promptCaching = (params == null ? void 0 : params.promptCaching) || {
255
+ enabled: false
256
+ };
257
+ }
258
+ /**
259
+ * Adds cache control to system prompt
260
+ */
261
+ addSystemPromptCaching(system, debug = false) {
262
+ if (!this.promptCaching.enabled || !system) {
263
+ return system;
264
+ }
265
+ const originalTextLength = system.length;
266
+ if (debug) {
267
+ console.log(`[ANTHROPIC CACHE DEBUG] Added cache control to system prompt (${originalTextLength} chars).`);
268
+ }
269
+ return [
270
+ {
271
+ type: "text",
272
+ text: system,
273
+ cache_control: {
274
+ type: "ephemeral"
275
+ }
276
+ }
277
+ ];
278
+ }
279
+ /**
280
+ * Adds cache control to the final message
281
+ */
282
+ addIncrementalMessageCaching(messages, debug = false) {
283
+ if (!this.promptCaching.enabled || messages.length === 0) {
284
+ return messages;
285
+ }
286
+ const finalMessage = messages[messages.length - 1];
287
+ const messageNumber = messages.length;
288
+ if (Array.isArray(finalMessage.content) && finalMessage.content.length > 0) {
289
+ const finalBlock = finalMessage.content[finalMessage.content.length - 1];
290
+ const updatedMessages = [
291
+ ...messages.slice(0, -1),
292
+ {
293
+ ...finalMessage,
294
+ content: [
295
+ ...finalMessage.content.slice(0, -1),
296
+ {
297
+ ...finalBlock,
298
+ cache_control: {
299
+ type: "ephemeral"
300
+ }
301
+ }
302
+ ]
303
+ }
304
+ ];
305
+ if (debug) {
306
+ console.log(`[ANTHROPIC CACHE DEBUG] Added cache control to final message (message ${messageNumber}).`);
307
+ }
308
+ return updatedMessages;
309
+ }
310
+ return messages;
311
+ }
312
+ shouldGenerateFallbackResponse(messages) {
313
+ var _a, _b, _c;
314
+ if (messages.length === 0)
315
+ return false;
316
+ const lastMessage = messages[messages.length - 1];
317
+ const endsWithToolResult = lastMessage.role === "user" && Array.isArray(lastMessage.content) && lastMessage.content.some((content) => content.type === "tool_result");
318
+ if (messages.length >= 3 && endsWithToolResult) {
319
+ const lastThree = messages.slice(-3);
320
+ const hasRecentToolPattern = ((_a = lastThree[0]) == null ? void 0 : _a.role) === "user" && // Initial user message
321
+ ((_b = lastThree[1]) == null ? void 0 : _b.role) === "assistant" && // Assistant tool use
322
+ Array.isArray(lastThree[1].content) && lastThree[1].content.some((content) => content.type === "tool_use") && ((_c = lastThree[2]) == null ? void 0 : _c.role) === "user" && // Tool result
323
+ Array.isArray(lastThree[2].content) && lastThree[2].content.some((content) => content.type === "tool_result");
324
+ return hasRecentToolPattern;
325
+ }
326
+ return endsWithToolResult;
327
+ }
328
+ async process(request) {
329
+ const { threadId, model = this.model, messages: rawMessages, actions, eventSource, forwardedParameters } = request;
330
+ const tools = actions.map(convertActionInputToAnthropicTool);
331
+ const messages = [
332
+ ...rawMessages
333
+ ];
334
+ const instructionsMessage = messages.shift();
335
+ const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
336
+ const validToolUseIds = /* @__PURE__ */ new Set();
337
+ for (const message of messages) {
338
+ if (message.isActionExecutionMessage()) {
339
+ validToolUseIds.add(message.id);
340
+ }
341
+ }
342
+ const processedToolResultIds = /* @__PURE__ */ new Set();
343
+ const anthropicMessages = messages.map((message) => {
344
+ if (message.isResultMessage()) {
345
+ if (!validToolUseIds.has(message.actionExecutionId)) {
346
+ return null;
347
+ }
348
+ if (processedToolResultIds.has(message.actionExecutionId)) {
349
+ return null;
350
+ }
351
+ processedToolResultIds.add(message.actionExecutionId);
352
+ return {
353
+ role: "user",
354
+ content: [
355
+ {
356
+ type: "tool_result",
357
+ content: message.result || "Action completed successfully",
358
+ tool_use_id: message.actionExecutionId
359
+ }
360
+ ]
361
+ };
362
+ }
363
+ return convertMessageToAnthropicMessage(message);
364
+ }).filter(Boolean).filter((msg) => {
365
+ if (msg.role === "assistant" && Array.isArray(msg.content)) {
366
+ const hasEmptyTextOnly = msg.content.length === 1 && msg.content[0].type === "text" && (!msg.content[0].text || msg.content[0].text.trim() === "");
367
+ return !hasEmptyTextOnly;
368
+ }
369
+ return true;
370
+ });
371
+ const limitedMessages = limitMessagesToTokenCount(anthropicMessages, tools, model);
372
+ const cachedSystemPrompt = this.addSystemPromptCaching(instructions, this.promptCaching.debug);
373
+ const cachedMessages = this.addIncrementalMessageCaching(limitedMessages, this.promptCaching.debug);
374
+ let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
375
+ if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
376
+ toolChoice = {
377
+ type: "tool",
378
+ name: forwardedParameters.toolChoiceFunctionName
379
+ };
380
+ }
381
+ try {
382
+ const createParams = {
383
+ system: cachedSystemPrompt,
384
+ model: this.model,
385
+ messages: cachedMessages,
386
+ max_tokens: (forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) || 1024,
387
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) ? {
388
+ temperature: forwardedParameters.temperature
389
+ } : {},
390
+ ...tools.length > 0 && {
391
+ tools
392
+ },
393
+ ...toolChoice && {
394
+ tool_choice: toolChoice
395
+ },
396
+ stream: true
397
+ };
398
+ const stream = await this.anthropic.messages.create(createParams);
399
+ eventSource.stream(async (eventStream$) => {
400
+ let mode = null;
401
+ let didOutputText = false;
402
+ let currentMessageId = randomId();
403
+ let currentToolCallId = randomId();
404
+ let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
405
+ let hasReceivedContent = false;
406
+ try {
407
+ for await (const chunk of stream) {
408
+ if (chunk.type === "message_start") {
409
+ currentMessageId = chunk.message.id;
410
+ } else if (chunk.type === "content_block_start") {
411
+ hasReceivedContent = true;
412
+ if (chunk.content_block.type === "text") {
413
+ didOutputText = false;
414
+ filterThinkingTextBuffer.reset();
415
+ mode = "message";
416
+ } else if (chunk.content_block.type === "tool_use") {
417
+ currentToolCallId = chunk.content_block.id;
418
+ eventStream$.sendActionExecutionStart({
419
+ actionExecutionId: currentToolCallId,
420
+ actionName: chunk.content_block.name,
421
+ parentMessageId: currentMessageId
422
+ });
423
+ mode = "function";
424
+ }
425
+ } else if (chunk.type === "content_block_delta") {
426
+ if (chunk.delta.type === "text_delta") {
427
+ const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
428
+ if (text.length > 0) {
429
+ if (!didOutputText) {
430
+ eventStream$.sendTextMessageStart({
431
+ messageId: currentMessageId
432
+ });
433
+ didOutputText = true;
434
+ }
435
+ eventStream$.sendTextMessageContent({
436
+ messageId: currentMessageId,
437
+ content: text
438
+ });
439
+ }
440
+ } else if (chunk.delta.type === "input_json_delta") {
441
+ eventStream$.sendActionExecutionArgs({
442
+ actionExecutionId: currentToolCallId,
443
+ args: chunk.delta.partial_json
444
+ });
445
+ }
446
+ } else if (chunk.type === "content_block_stop") {
447
+ if (mode === "message") {
448
+ if (didOutputText) {
449
+ eventStream$.sendTextMessageEnd({
450
+ messageId: currentMessageId
451
+ });
452
+ }
453
+ } else if (mode === "function") {
454
+ eventStream$.sendActionExecutionEnd({
455
+ actionExecutionId: currentToolCallId
456
+ });
457
+ }
458
+ }
459
+ }
460
+ } catch (error) {
461
+ throw convertServiceAdapterError(error, "Anthropic");
462
+ }
463
+ if (!hasReceivedContent && this.shouldGenerateFallbackResponse(cachedMessages)) {
464
+ let fallbackContent = "Task completed successfully.";
465
+ const lastMessage = cachedMessages[cachedMessages.length - 1];
466
+ if ((lastMessage == null ? void 0 : lastMessage.role) === "user" && Array.isArray(lastMessage.content)) {
467
+ const toolResult = lastMessage.content.find((c) => c.type === "tool_result");
468
+ if ((toolResult == null ? void 0 : toolResult.content) && toolResult.content !== "Action completed successfully") {
469
+ fallbackContent = toolResult.content;
470
+ }
471
+ }
472
+ currentMessageId = randomId();
473
+ eventStream$.sendTextMessageStart({
474
+ messageId: currentMessageId
475
+ });
476
+ eventStream$.sendTextMessageContent({
477
+ messageId: currentMessageId,
478
+ content: fallbackContent
479
+ });
480
+ eventStream$.sendTextMessageEnd({
481
+ messageId: currentMessageId
482
+ });
483
+ }
484
+ eventStream$.complete();
485
+ });
486
+ } catch (error) {
487
+ throw convertServiceAdapterError(error, "Anthropic");
488
+ }
489
+ return {
490
+ threadId: threadId || randomUUID()
491
+ };
492
+ }
493
+ };
494
+ __name(AnthropicAdapter, "AnthropicAdapter");
495
+ var THINKING_TAG = "<thinking>";
496
+ var THINKING_TAG_END = "</thinking>";
497
+ var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBuffer2 {
498
+ buffer;
499
+ didFilterThinkingTag = false;
500
+ constructor() {
501
+ this.buffer = "";
502
+ }
503
+ onTextChunk(text) {
504
+ this.buffer += text;
505
+ if (this.didFilterThinkingTag) {
506
+ return text;
507
+ }
508
+ const potentialTag = this.buffer.slice(0, THINKING_TAG.length);
509
+ if (THINKING_TAG.startsWith(potentialTag)) {
510
+ if (this.buffer.includes(THINKING_TAG_END)) {
511
+ const end = this.buffer.indexOf(THINKING_TAG_END);
512
+ const filteredText = this.buffer.slice(end + THINKING_TAG_END.length);
513
+ this.buffer = filteredText;
514
+ this.didFilterThinkingTag = true;
515
+ return filteredText;
516
+ } else {
517
+ return "";
518
+ }
519
+ }
520
+ return text;
521
+ }
522
+ reset() {
523
+ this.buffer = "";
524
+ this.didFilterThinkingTag = false;
525
+ }
526
+ }, "FilterThinkingTextBuffer");
527
+
528
+ // src/service-adapters/experimental/ollama/ollama-adapter.ts
529
+ import { Ollama } from "@langchain/community/llms/ollama";
530
+ import { randomId as randomId2, randomUUID as randomUUID2 } from "@copilotkit/shared";
531
+ var DEFAULT_MODEL2 = "llama3:latest";
532
+ var ExperimentalOllamaAdapter = class {
533
+ model;
534
+ provider = "ollama";
535
+ constructor(options) {
536
+ if (options == null ? void 0 : options.model) {
537
+ this.model = options.model;
538
+ } else {
539
+ this.model = DEFAULT_MODEL2;
540
+ }
541
+ }
542
+ async process(request) {
543
+ const { messages, actions, eventSource } = request;
544
+ const ollama = new Ollama({
545
+ model: this.model
546
+ });
547
+ const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
548
+ const _stream = await ollama.stream(contents);
549
+ eventSource.stream(async (eventStream$) => {
550
+ const currentMessageId = randomId2();
551
+ eventStream$.sendTextMessageStart({
552
+ messageId: currentMessageId
553
+ });
554
+ for await (const chunkText of _stream) {
555
+ eventStream$.sendTextMessageContent({
556
+ messageId: currentMessageId,
557
+ content: chunkText
558
+ });
559
+ }
560
+ eventStream$.sendTextMessageEnd({
561
+ messageId: currentMessageId
562
+ });
563
+ eventStream$.complete();
564
+ });
565
+ return {
566
+ threadId: request.threadId || randomUUID2()
567
+ };
568
+ }
569
+ };
570
+ __name(ExperimentalOllamaAdapter, "ExperimentalOllamaAdapter");
571
+
572
+ // src/service-adapters/bedrock/bedrock-adapter.ts
573
+ import { ChatBedrockConverse } from "@langchain/aws";
574
+ var DEFAULT_MODEL3 = "amazon.nova-lite-v1:0";
575
+ var BedrockAdapter = class extends LangChainAdapter {
576
+ provider = "bedrock";
577
+ model = DEFAULT_MODEL3;
578
+ constructor(options) {
579
+ super({
580
+ chainFn: async ({ messages, tools, threadId }) => {
581
+ this.model = (options == null ? void 0 : options.model) ?? "amazon.nova-lite-v1:0";
582
+ const model = new ChatBedrockConverse({
583
+ model: this.model,
584
+ region: (options == null ? void 0 : options.region) ?? "us-east-1",
585
+ credentials: (options == null ? void 0 : options.credentials) ? {
586
+ accessKeyId: options.credentials.accessKeyId,
587
+ secretAccessKey: options.credentials.secretAccessKey
588
+ } : void 0
589
+ }).bindTools(tools);
590
+ return model.stream(messages);
591
+ }
592
+ });
593
+ }
594
+ };
595
+ __name(BedrockAdapter, "BedrockAdapter");
596
+
597
+ // src/service-adapters/empty/empty-adapter.ts
598
+ import { randomUUID as randomUUID3 } from "@copilotkit/shared";
599
+ var EmptyAdapter = class {
600
+ async process(request) {
601
+ return {
602
+ threadId: request.threadId || randomUUID3()
603
+ };
604
+ }
605
+ };
606
+ __name(EmptyAdapter, "EmptyAdapter");
607
+ var ExperimentalEmptyAdapter = EmptyAdapter;
608
+
609
+ export {
610
+ RemoteChain,
611
+ AnthropicAdapter,
612
+ ExperimentalOllamaAdapter,
613
+ BedrockAdapter,
614
+ EmptyAdapter,
615
+ ExperimentalEmptyAdapter
616
+ };
617
+ //# sourceMappingURL=chunk-TTUAEJLD.mjs.map