@copilotkit/runtime 1.9.2-next.8 → 1.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. package/CHANGELOG.md +177 -0
  2. package/dist/{chunk-4TLMVLU4.mjs → chunk-56ZNYBXV.mjs} +2 -2
  3. package/dist/chunk-AMUJQ6IR.mjs +50 -0
  4. package/dist/chunk-AMUJQ6IR.mjs.map +1 -0
  5. package/dist/{chunk-5SG4WWXH.mjs → chunk-GB4M7WUE.mjs} +2 -2
  6. package/dist/{chunk-JWPSIGSA.mjs → chunk-HJYWUUFY.mjs} +2 -2
  7. package/dist/{chunk-KYCDL2KX.mjs → chunk-M35WOOEP.mjs} +2 -2
  8. package/dist/{chunk-IIXJVVTV.mjs → chunk-QLLV2QVK.mjs} +132 -78
  9. package/dist/chunk-QLLV2QVK.mjs.map +1 -0
  10. package/dist/{chunk-WIXS6EG7.mjs → chunk-TE5QWP4H.mjs} +2401 -2055
  11. package/dist/chunk-TE5QWP4H.mjs.map +1 -0
  12. package/dist/{chunk-5BIEM2UU.mjs → chunk-XWBDEXDA.mjs} +4 -3
  13. package/dist/{chunk-5BIEM2UU.mjs.map → chunk-XWBDEXDA.mjs.map} +1 -1
  14. package/dist/{groq-adapter-25a2bd35.d.ts → groq-adapter-742818f2.d.ts} +5 -1
  15. package/dist/index.d.ts +4 -3
  16. package/dist/index.js +3747 -3303
  17. package/dist/index.js.map +1 -1
  18. package/dist/index.mjs +12 -8
  19. package/dist/index.mjs.map +1 -1
  20. package/dist/{langserve-4a5c9217.d.ts → langserve-3e8d0e06.d.ts} +13 -7
  21. package/dist/lib/index.d.ts +155 -5
  22. package/dist/lib/index.js +2808 -2407
  23. package/dist/lib/index.js.map +1 -1
  24. package/dist/lib/index.mjs +9 -8
  25. package/dist/lib/integrations/index.d.ts +3 -3
  26. package/dist/lib/integrations/index.js +151 -96
  27. package/dist/lib/integrations/index.js.map +1 -1
  28. package/dist/lib/integrations/index.mjs +7 -6
  29. package/dist/lib/integrations/nest/index.d.ts +2 -2
  30. package/dist/lib/integrations/nest/index.js +151 -96
  31. package/dist/lib/integrations/nest/index.js.map +1 -1
  32. package/dist/lib/integrations/nest/index.mjs +5 -4
  33. package/dist/lib/integrations/node-express/index.d.ts +2 -2
  34. package/dist/lib/integrations/node-express/index.js +151 -96
  35. package/dist/lib/integrations/node-express/index.js.map +1 -1
  36. package/dist/lib/integrations/node-express/index.mjs +5 -4
  37. package/dist/lib/integrations/node-http/index.d.ts +2 -2
  38. package/dist/lib/integrations/node-http/index.js +151 -96
  39. package/dist/lib/integrations/node-http/index.js.map +1 -1
  40. package/dist/lib/integrations/node-http/index.mjs +4 -3
  41. package/dist/service-adapters/index.d.ts +6 -4
  42. package/dist/service-adapters/index.js +202 -107
  43. package/dist/service-adapters/index.js.map +1 -1
  44. package/dist/service-adapters/index.mjs +6 -2
  45. package/dist/service-adapters/shared/index.d.ts +9 -0
  46. package/dist/service-adapters/shared/index.js +72 -0
  47. package/dist/service-adapters/shared/index.js.map +1 -0
  48. package/dist/service-adapters/shared/index.mjs +8 -0
  49. package/dist/service-adapters/shared/index.mjs.map +1 -0
  50. package/dist/{shared-941d59dc.d.ts → shared-96b46379.d.ts} +23 -21
  51. package/dist/utils/index.d.ts +17 -1
  52. package/dist/utils/index.js +3 -2
  53. package/dist/utils/index.js.map +1 -1
  54. package/dist/utils/index.mjs +1 -1
  55. package/package.json +11 -11
  56. package/src/agents/langgraph/event-source.ts +36 -38
  57. package/src/agents/langgraph/events.ts +19 -1
  58. package/src/graphql/resolvers/copilot.resolver.ts +85 -42
  59. package/src/lib/error-messages.ts +200 -0
  60. package/src/lib/integrations/shared.ts +43 -0
  61. package/src/lib/runtime/__tests__/{copilot-runtime-trace.test.ts → copilot-runtime-error.test.ts} +27 -27
  62. package/src/lib/runtime/__tests__/mcp-tools-utils.test.ts +464 -0
  63. package/src/lib/runtime/agui-action.ts +9 -3
  64. package/src/lib/runtime/copilot-runtime.ts +156 -160
  65. package/src/lib/runtime/mcp-tools-utils.ts +84 -18
  66. package/src/lib/runtime/remote-action-constructors.ts +28 -3
  67. package/src/lib/runtime/remote-actions.ts +6 -0
  68. package/src/lib/runtime/remote-lg-action.ts +85 -3
  69. package/src/lib/streaming.ts +125 -36
  70. package/src/service-adapters/anthropic/anthropic-adapter.ts +67 -8
  71. package/src/service-adapters/anthropic/utils.ts +3 -8
  72. package/src/service-adapters/events.ts +75 -80
  73. package/src/service-adapters/google/google-genai-adapter.ts +5 -0
  74. package/src/service-adapters/groq/groq-adapter.ts +66 -56
  75. package/src/service-adapters/index.ts +1 -0
  76. package/src/service-adapters/openai/openai-adapter.ts +4 -3
  77. package/src/service-adapters/shared/error-utils.ts +61 -0
  78. package/src/service-adapters/shared/index.ts +1 -0
  79. package/src/utils/failed-response-status-reasons.ts +23 -1
  80. package/tests/service-adapters/anthropic/anthropic-adapter.test.ts +172 -387
  81. package/dist/chunk-IIXJVVTV.mjs.map +0 -1
  82. package/dist/chunk-WIXS6EG7.mjs.map +0 -1
  83. /package/dist/{chunk-4TLMVLU4.mjs.map → chunk-56ZNYBXV.mjs.map} +0 -0
  84. /package/dist/{chunk-5SG4WWXH.mjs.map → chunk-GB4M7WUE.mjs.map} +0 -0
  85. /package/dist/{chunk-JWPSIGSA.mjs.map → chunk-HJYWUUFY.mjs.map} +0 -0
  86. /package/dist/{chunk-KYCDL2KX.mjs.map → chunk-M35WOOEP.mjs.map} +0 -0
@@ -1,9 +1,10 @@
1
1
  import {
2
2
  copilotRuntimeNodeHttpEndpoint
3
- } from "../../../chunk-WIXS6EG7.mjs";
4
- import "../../../chunk-IIXJVVTV.mjs";
5
- import "../../../chunk-5BIEM2UU.mjs";
3
+ } from "../../../chunk-TE5QWP4H.mjs";
6
4
  import "../../../chunk-SHBDMA63.mjs";
5
+ import "../../../chunk-QLLV2QVK.mjs";
6
+ import "../../../chunk-XWBDEXDA.mjs";
7
+ import "../../../chunk-AMUJQ6IR.mjs";
7
8
  import "../../../chunk-2OZAGFV3.mjs";
8
9
  import "../../../chunk-FHD4JECV.mjs";
9
10
  export {
@@ -1,7 +1,8 @@
1
- import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from '../langserve-4a5c9217.js';
2
- export { c as RemoteChain, R as RemoteChainParameters } from '../langserve-4a5c9217.js';
3
- import { L as LangChainAdapter } from '../groq-adapter-25a2bd35.js';
4
- export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-25a2bd35.js';
1
+ import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from '../langserve-3e8d0e06.js';
2
+ export { c as RemoteChain, R as RemoteChainParameters } from '../langserve-3e8d0e06.js';
3
+ export { convertServiceAdapterError } from './shared/index.js';
4
+ import { L as LangChainAdapter } from '../groq-adapter-742818f2.js';
5
+ export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-742818f2.js';
5
6
  import Anthropic from '@anthropic-ai/sdk';
6
7
  import '../index-d4614f9b.js';
7
8
  import '../graphql/types/base/index.js';
@@ -48,6 +49,7 @@ declare class AnthropicAdapter implements CopilotServiceAdapter {
48
49
  private _anthropic;
49
50
  get anthropic(): Anthropic;
50
51
  constructor(params?: AnthropicAdapterParams);
52
+ private shouldGenerateFallbackResponse;
51
53
  process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
52
54
  }
53
55
 
@@ -41,7 +41,8 @@ __export(service_adapters_exports, {
41
41
  OpenAIAdapter: () => OpenAIAdapter,
42
42
  OpenAIAssistantAdapter: () => OpenAIAssistantAdapter,
43
43
  RemoteChain: () => RemoteChain,
44
- UnifyAdapter: () => UnifyAdapter
44
+ UnifyAdapter: () => UnifyAdapter,
45
+ convertServiceAdapterError: () => convertServiceAdapterError
45
46
  });
46
47
  module.exports = __toCommonJS(service_adapters_exports);
47
48
 
@@ -123,11 +124,53 @@ var RemoteChain = class {
123
124
  };
124
125
  __name(RemoteChain, "RemoteChain");
125
126
 
127
+ // src/service-adapters/shared/error-utils.ts
128
+ var import_shared = require("@copilotkit/shared");
129
+ function convertServiceAdapterError(error, adapterName) {
130
+ var _a, _b, _c;
131
+ const errorName = ((_a = error == null ? void 0 : error.constructor) == null ? void 0 : _a.name) || error.name;
132
+ const errorMessage = (error == null ? void 0 : error.message) || String(error);
133
+ const statusCode = error.status || error.statusCode || ((_b = error.response) == null ? void 0 : _b.status);
134
+ const responseData = error.error || ((_c = error.response) == null ? void 0 : _c.data) || error.data;
135
+ const structuredError = new import_shared.CopilotKitLowLevelError({
136
+ error: error instanceof Error ? error : new Error(errorMessage),
137
+ url: `${adapterName} service adapter`,
138
+ message: `${adapterName} API error: ${errorMessage}`
139
+ });
140
+ if (statusCode) {
141
+ structuredError.statusCode = statusCode;
142
+ }
143
+ if (responseData) {
144
+ structuredError.responseData = responseData;
145
+ }
146
+ if (errorName) {
147
+ structuredError.originalErrorType = errorName;
148
+ }
149
+ let newCode;
150
+ if (statusCode === 401) {
151
+ newCode = import_shared.CopilotKitErrorCode.AUTHENTICATION_ERROR;
152
+ } else if (statusCode >= 400 && statusCode < 500) {
153
+ newCode = import_shared.CopilotKitErrorCode.CONFIGURATION_ERROR;
154
+ } else if (statusCode >= 500) {
155
+ newCode = import_shared.CopilotKitErrorCode.NETWORK_ERROR;
156
+ } else if (statusCode) {
157
+ newCode = import_shared.CopilotKitErrorCode.CONFIGURATION_ERROR;
158
+ } else {
159
+ newCode = import_shared.CopilotKitErrorCode.NETWORK_ERROR;
160
+ }
161
+ structuredError.code = newCode;
162
+ if (structuredError.extensions) {
163
+ structuredError.extensions.code = newCode;
164
+ }
165
+ return structuredError;
166
+ }
167
+ __name(convertServiceAdapterError, "convertServiceAdapterError");
168
+
126
169
  // src/service-adapters/openai/openai-adapter.ts
127
170
  var import_openai = __toESM(require("openai"));
128
171
 
129
172
  // src/service-adapters/openai/utils.ts
130
- var import_shared = require("@copilotkit/shared");
173
+ var import_shared2 = require("@copilotkit/shared");
131
174
  function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
132
175
  maxTokens || (maxTokens = maxTokensForOpenAIModel(model));
133
176
  const result = [];
@@ -241,7 +284,7 @@ function convertActionInputToOpenAITool(action) {
241
284
  function: {
242
285
  name: action.name,
243
286
  description: action.description,
244
- parameters: (0, import_shared.parseJson)(action.jsonSchema, {})
287
+ parameters: (0, import_shared2.parseJson)(action.jsonSchema, {})
245
288
  }
246
289
  };
247
290
  }
@@ -309,7 +352,7 @@ function convertSystemMessageToAssistantAPI(message) {
309
352
  __name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
310
353
 
311
354
  // src/service-adapters/openai/openai-adapter.ts
312
- var import_shared2 = require("@copilotkit/shared");
355
+ var import_shared3 = require("@copilotkit/shared");
313
356
  var DEFAULT_MODEL = "gpt-4o";
314
357
  var OpenAIAdapter = class {
315
358
  model = DEFAULT_MODEL;
@@ -330,7 +373,7 @@ var OpenAIAdapter = class {
330
373
  async process(request) {
331
374
  const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
332
375
  const tools = actions.map(convertActionInputToOpenAITool);
333
- const threadId = threadIdFromRequest ?? (0, import_shared2.randomUUID)();
376
+ const threadId = threadIdFromRequest ?? (0, import_shared3.randomUUID)();
334
377
  const validToolUseIds = /* @__PURE__ */ new Set();
335
378
  for (const message of messages) {
336
379
  if (message.isActionExecutionMessage()) {
@@ -446,14 +489,14 @@ var OpenAIAdapter = class {
446
489
  });
447
490
  }
448
491
  } catch (error) {
449
- console.error("[OpenAI] Error processing stream:", error);
450
- throw error;
492
+ console.error("[OpenAI] Error during API call:", error);
493
+ throw convertServiceAdapterError(error, "OpenAI");
451
494
  }
452
495
  eventStream$.complete();
453
496
  });
454
497
  } catch (error) {
455
498
  console.error("[OpenAI] Error during API call:", error);
456
- throw error;
499
+ throw convertServiceAdapterError(error, "OpenAI");
457
500
  }
458
501
  return {
459
502
  threadId
@@ -465,7 +508,7 @@ __name(OpenAIAdapter, "OpenAIAdapter");
465
508
  // src/service-adapters/langchain/utils.ts
466
509
  var import_messages = require("@langchain/core/messages");
467
510
  var import_tools = require("@langchain/core/tools");
468
- var import_shared3 = require("@copilotkit/shared");
511
+ var import_shared5 = require("@copilotkit/shared");
469
512
  function convertMessageToLangChainMessage(message) {
470
513
  if (message.isTextMessage()) {
471
514
  if (message.role == "user") {
@@ -498,7 +541,7 @@ function convertActionInputToLangChainTool(actionInput) {
498
541
  return new import_tools.DynamicStructuredTool({
499
542
  name: actionInput.name,
500
543
  description: actionInput.description,
501
- schema: (0, import_shared3.convertJsonSchemaToZodSchema)(JSON.parse(actionInput.jsonSchema), true),
544
+ schema: (0, import_shared5.convertJsonSchemaToZodSchema)(JSON.parse(actionInput.jsonSchema), true),
502
545
  func: async () => {
503
546
  return "";
504
547
  }
@@ -531,7 +574,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
531
574
  var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
532
575
  if (typeof result === "string") {
533
576
  if (!actionExecution) {
534
- eventStream$.sendTextMessage((0, import_shared3.randomId)(), result);
577
+ eventStream$.sendTextMessage((0, import_shared5.randomId)(), result);
535
578
  } else {
536
579
  eventStream$.sendActionExecutionResult({
537
580
  actionExecutionId: actionExecution.id,
@@ -542,11 +585,11 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
542
585
  } else if (isAIMessage(result)) {
543
586
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
544
587
  if (result.content) {
545
- eventStream$.sendTextMessage((0, import_shared3.randomId)(), result.content);
588
+ eventStream$.sendTextMessage((0, import_shared5.randomId)(), result.content);
546
589
  }
547
590
  for (const toolCall of result.tool_calls) {
548
591
  eventStream$.sendActionExecution({
549
- actionExecutionId: toolCall.id || (0, import_shared3.randomId)(),
592
+ actionExecutionId: toolCall.id || (0, import_shared5.randomId)(),
550
593
  actionName: toolCall.name,
551
594
  args: JSON.stringify(toolCall.args)
552
595
  });
@@ -554,12 +597,12 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
554
597
  } else if (isBaseMessageChunk(result)) {
555
598
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
556
599
  if ((_a = result.lc_kwargs) == null ? void 0 : _a.content) {
557
- eventStream$.sendTextMessage((0, import_shared3.randomId)(), result.content);
600
+ eventStream$.sendTextMessage((0, import_shared5.randomId)(), result.content);
558
601
  }
559
602
  if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
560
603
  for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
561
604
  eventStream$.sendActionExecution({
562
- actionExecutionId: toolCall.id || (0, import_shared3.randomId)(),
605
+ actionExecutionId: toolCall.id || (0, import_shared5.randomId)(),
563
606
  actionName: toolCall.name,
564
607
  args: JSON.stringify(toolCall.args)
565
608
  });
@@ -633,7 +676,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
633
676
  });
634
677
  } else if (content) {
635
678
  mode = "message";
636
- currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || (0, import_shared3.randomId)();
679
+ currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || (0, import_shared5.randomId)();
637
680
  eventStream$.sendTextMessageStart({
638
681
  messageId: currentMessageId
639
682
  });
@@ -690,7 +733,7 @@ function encodeResult(result) {
690
733
  __name(encodeResult, "encodeResult");
691
734
 
692
735
  // src/service-adapters/langchain/langchain-adapter.ts
693
- var import_shared4 = require("@copilotkit/shared");
736
+ var import_shared6 = require("@copilotkit/shared");
694
737
  var import_promises = require("@langchain/core/callbacks/promises");
695
738
  var LangChainAdapter = class {
696
739
  options;
@@ -703,7 +746,7 @@ var LangChainAdapter = class {
703
746
  async process(request) {
704
747
  try {
705
748
  const { eventSource, model, actions, messages, runId, threadId: threadIdFromRequest } = request;
706
- const threadId = threadIdFromRequest ?? (0, import_shared4.randomUUID)();
749
+ const threadId = threadIdFromRequest ?? (0, import_shared6.randomUUID)();
707
750
  const result = await this.options.chainFn({
708
751
  messages: messages.map(convertMessageToLangChainMessage),
709
752
  tools: actions.map(convertActionInputToLangChainTool),
@@ -741,6 +784,7 @@ var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
741
784
  return message.content && String(message.content).trim().length > 0 || message.tool_calls && message.tool_calls.length > 0;
742
785
  });
743
786
  const model = new import_google_gauth.ChatGoogle({
787
+ apiKey: (options == null ? void 0 : options.apiKey) ?? process.env.GOOGLE_API_KEY,
744
788
  modelName: (options == null ? void 0 : options.model) ?? "gemini-1.5-pro",
745
789
  apiVersion: "v1beta"
746
790
  }).bindTools(tools);
@@ -958,7 +1002,7 @@ __name(getRunIdFromStream, "getRunIdFromStream");
958
1002
 
959
1003
  // src/service-adapters/unify/unify-adapter.ts
960
1004
  var import_openai3 = __toESM(require("openai"));
961
- var import_shared5 = require("@copilotkit/shared");
1005
+ var import_shared7 = require("@copilotkit/shared");
962
1006
  var UnifyAdapter = class {
963
1007
  apiKey;
964
1008
  model;
@@ -1000,7 +1044,7 @@ var UnifyAdapter = class {
1000
1044
  for await (const chunk of stream) {
1001
1045
  if (this.start) {
1002
1046
  model = chunk.model;
1003
- currentMessageId = (0, import_shared5.randomId)();
1047
+ currentMessageId = (0, import_shared7.randomId)();
1004
1048
  eventStream$.sendTextMessageStart({
1005
1049
  messageId: currentMessageId
1006
1050
  });
@@ -1067,7 +1111,7 @@ var UnifyAdapter = class {
1067
1111
  eventStream$.complete();
1068
1112
  });
1069
1113
  return {
1070
- threadId: request.threadId || (0, import_shared5.randomUUID)()
1114
+ threadId: request.threadId || (0, import_shared7.randomUUID)()
1071
1115
  };
1072
1116
  }
1073
1117
  };
@@ -1075,7 +1119,7 @@ __name(UnifyAdapter, "UnifyAdapter");
1075
1119
 
1076
1120
  // src/service-adapters/groq/groq-adapter.ts
1077
1121
  var import_groq_sdk = require("groq-sdk");
1078
- var import_shared6 = require("@copilotkit/shared");
1122
+ var import_shared8 = require("@copilotkit/shared");
1079
1123
  var DEFAULT_MODEL2 = "llama-3.3-70b-versatile";
1080
1124
  var GroqAdapter = class {
1081
1125
  model = DEFAULT_MODEL2;
@@ -1107,90 +1151,99 @@ var GroqAdapter = class {
1107
1151
  }
1108
1152
  };
1109
1153
  }
1110
- const stream = await this.groq.chat.completions.create({
1111
- model,
1112
- stream: true,
1113
- messages: openaiMessages,
1114
- ...tools.length > 0 && {
1115
- tools
1116
- },
1117
- ...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
1118
- max_tokens: forwardedParameters.maxTokens
1119
- },
1120
- ...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
1121
- stop: forwardedParameters.stop
1122
- },
1123
- ...toolChoice && {
1124
- tool_choice: toolChoice
1125
- },
1126
- ...this.disableParallelToolCalls && {
1127
- parallel_tool_calls: false
1128
- },
1129
- ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
1130
- temperature: forwardedParameters.temperature
1131
- }
1132
- });
1154
+ let stream;
1155
+ try {
1156
+ stream = await this.groq.chat.completions.create({
1157
+ model,
1158
+ stream: true,
1159
+ messages: openaiMessages,
1160
+ ...tools.length > 0 && {
1161
+ tools
1162
+ },
1163
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
1164
+ max_tokens: forwardedParameters.maxTokens
1165
+ },
1166
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
1167
+ stop: forwardedParameters.stop
1168
+ },
1169
+ ...toolChoice && {
1170
+ tool_choice: toolChoice
1171
+ },
1172
+ ...this.disableParallelToolCalls && {
1173
+ parallel_tool_calls: false
1174
+ },
1175
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
1176
+ temperature: forwardedParameters.temperature
1177
+ }
1178
+ });
1179
+ } catch (error) {
1180
+ throw convertServiceAdapterError(error, "Groq");
1181
+ }
1133
1182
  eventSource.stream(async (eventStream$) => {
1134
1183
  var _a, _b;
1135
1184
  let mode = null;
1136
1185
  let currentMessageId;
1137
1186
  let currentToolCallId;
1138
- for await (const chunk of stream) {
1139
- const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
1140
- const content = chunk.choices[0].delta.content;
1141
- if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
1142
- mode = null;
1143
- eventStream$.sendTextMessageEnd({
1144
- messageId: currentMessageId
1145
- });
1146
- } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
1147
- mode = null;
1148
- eventStream$.sendActionExecutionEnd({
1149
- actionExecutionId: currentToolCallId
1150
- });
1151
- }
1152
- if (mode === null) {
1153
- if (toolCall == null ? void 0 : toolCall.id) {
1154
- mode = "function";
1155
- currentToolCallId = toolCall.id;
1156
- eventStream$.sendActionExecutionStart({
1157
- actionExecutionId: currentToolCallId,
1158
- actionName: toolCall.function.name,
1159
- parentMessageId: chunk.id
1160
- });
1161
- } else if (content) {
1162
- mode = "message";
1163
- currentMessageId = chunk.id;
1164
- eventStream$.sendTextMessageStart({
1187
+ try {
1188
+ for await (const chunk of stream) {
1189
+ const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
1190
+ const content = chunk.choices[0].delta.content;
1191
+ if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
1192
+ mode = null;
1193
+ eventStream$.sendTextMessageEnd({
1165
1194
  messageId: currentMessageId
1166
1195
  });
1196
+ } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
1197
+ mode = null;
1198
+ eventStream$.sendActionExecutionEnd({
1199
+ actionExecutionId: currentToolCallId
1200
+ });
1201
+ }
1202
+ if (mode === null) {
1203
+ if (toolCall == null ? void 0 : toolCall.id) {
1204
+ mode = "function";
1205
+ currentToolCallId = toolCall.id;
1206
+ eventStream$.sendActionExecutionStart({
1207
+ actionExecutionId: currentToolCallId,
1208
+ actionName: toolCall.function.name,
1209
+ parentMessageId: chunk.id
1210
+ });
1211
+ } else if (content) {
1212
+ mode = "message";
1213
+ currentMessageId = chunk.id;
1214
+ eventStream$.sendTextMessageStart({
1215
+ messageId: currentMessageId
1216
+ });
1217
+ }
1218
+ }
1219
+ if (mode === "message" && content) {
1220
+ eventStream$.sendTextMessageContent({
1221
+ messageId: currentMessageId,
1222
+ content
1223
+ });
1224
+ } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
1225
+ eventStream$.sendActionExecutionArgs({
1226
+ actionExecutionId: currentToolCallId,
1227
+ args: toolCall.function.arguments
1228
+ });
1167
1229
  }
1168
1230
  }
1169
- if (mode === "message" && content) {
1170
- eventStream$.sendTextMessageContent({
1171
- messageId: currentMessageId,
1172
- content
1231
+ if (mode === "message") {
1232
+ eventStream$.sendTextMessageEnd({
1233
+ messageId: currentMessageId
1173
1234
  });
1174
- } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
1175
- eventStream$.sendActionExecutionArgs({
1176
- actionExecutionId: currentToolCallId,
1177
- args: toolCall.function.arguments
1235
+ } else if (mode === "function") {
1236
+ eventStream$.sendActionExecutionEnd({
1237
+ actionExecutionId: currentToolCallId
1178
1238
  });
1179
1239
  }
1180
- }
1181
- if (mode === "message") {
1182
- eventStream$.sendTextMessageEnd({
1183
- messageId: currentMessageId
1184
- });
1185
- } else if (mode === "function") {
1186
- eventStream$.sendActionExecutionEnd({
1187
- actionExecutionId: currentToolCallId
1188
- });
1240
+ } catch (error) {
1241
+ throw convertServiceAdapterError(error, "Groq");
1189
1242
  }
1190
1243
  eventStream$.complete();
1191
1244
  });
1192
1245
  return {
1193
- threadId: request.threadId || (0, import_shared6.randomUUID)()
1246
+ threadId: request.threadId || (0, import_shared8.randomUUID)()
1194
1247
  };
1195
1248
  }
1196
1249
  };
@@ -1336,7 +1389,7 @@ function convertMessageToAnthropicMessage(message) {
1336
1389
  content: [
1337
1390
  {
1338
1391
  type: "tool_result",
1339
- content: message.result,
1392
+ content: message.result || "Action completed successfully",
1340
1393
  tool_use_id: message.actionExecutionId
1341
1394
  }
1342
1395
  ]
@@ -1346,7 +1399,7 @@ function convertMessageToAnthropicMessage(message) {
1346
1399
  __name(convertMessageToAnthropicMessage, "convertMessageToAnthropicMessage");
1347
1400
 
1348
1401
  // src/service-adapters/anthropic/anthropic-adapter.ts
1349
- var import_shared7 = require("@copilotkit/shared");
1402
+ var import_shared10 = require("@copilotkit/shared");
1350
1403
  var DEFAULT_MODEL3 = "claude-3-5-sonnet-latest";
1351
1404
  var AnthropicAdapter = class {
1352
1405
  model = DEFAULT_MODEL3;
@@ -1360,6 +1413,22 @@ var AnthropicAdapter = class {
1360
1413
  this.model = params.model;
1361
1414
  }
1362
1415
  }
1416
+ shouldGenerateFallbackResponse(messages) {
1417
+ var _a, _b, _c;
1418
+ if (messages.length === 0)
1419
+ return false;
1420
+ const lastMessage = messages[messages.length - 1];
1421
+ const endsWithToolResult = lastMessage.role === "user" && Array.isArray(lastMessage.content) && lastMessage.content.some((content) => content.type === "tool_result");
1422
+ if (messages.length >= 3 && endsWithToolResult) {
1423
+ const lastThree = messages.slice(-3);
1424
+ const hasRecentToolPattern = ((_a = lastThree[0]) == null ? void 0 : _a.role) === "user" && // Initial user message
1425
+ ((_b = lastThree[1]) == null ? void 0 : _b.role) === "assistant" && // Assistant tool use
1426
+ Array.isArray(lastThree[1].content) && lastThree[1].content.some((content) => content.type === "tool_use") && ((_c = lastThree[2]) == null ? void 0 : _c.role) === "user" && // Tool result
1427
+ Array.isArray(lastThree[2].content) && lastThree[2].content.some((content) => content.type === "tool_result");
1428
+ return hasRecentToolPattern;
1429
+ }
1430
+ return endsWithToolResult;
1431
+ }
1363
1432
  async process(request) {
1364
1433
  const { threadId, model = this.model, messages: rawMessages, actions, eventSource, forwardedParameters } = request;
1365
1434
  const tools = actions.map(convertActionInputToAnthropicTool);
@@ -1374,18 +1443,22 @@ var AnthropicAdapter = class {
1374
1443
  validToolUseIds.add(message.id);
1375
1444
  }
1376
1445
  }
1446
+ const processedToolResultIds = /* @__PURE__ */ new Set();
1377
1447
  const anthropicMessages = messages.map((message) => {
1378
1448
  if (message.isResultMessage()) {
1379
1449
  if (!validToolUseIds.has(message.actionExecutionId)) {
1380
1450
  return null;
1381
1451
  }
1382
- validToolUseIds.delete(message.actionExecutionId);
1452
+ if (processedToolResultIds.has(message.actionExecutionId)) {
1453
+ return null;
1454
+ }
1455
+ processedToolResultIds.add(message.actionExecutionId);
1383
1456
  return {
1384
1457
  role: "user",
1385
1458
  content: [
1386
1459
  {
1387
1460
  type: "tool_result",
1388
- content: message.result,
1461
+ content: message.result || "Action completed successfully",
1389
1462
  tool_use_id: message.actionExecutionId
1390
1463
  }
1391
1464
  ]
@@ -1428,14 +1501,16 @@ var AnthropicAdapter = class {
1428
1501
  eventSource.stream(async (eventStream$) => {
1429
1502
  let mode = null;
1430
1503
  let didOutputText = false;
1431
- let currentMessageId = (0, import_shared7.randomId)();
1432
- let currentToolCallId = (0, import_shared7.randomId)();
1504
+ let currentMessageId = (0, import_shared10.randomId)();
1505
+ let currentToolCallId = (0, import_shared10.randomId)();
1433
1506
  let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
1507
+ let hasReceivedContent = false;
1434
1508
  try {
1435
1509
  for await (const chunk of stream) {
1436
1510
  if (chunk.type === "message_start") {
1437
1511
  currentMessageId = chunk.message.id;
1438
1512
  } else if (chunk.type === "content_block_start") {
1513
+ hasReceivedContent = true;
1439
1514
  if (chunk.content_block.type === "text") {
1440
1515
  didOutputText = false;
1441
1516
  filterThinkingTextBuffer.reset();
@@ -1485,17 +1560,36 @@ var AnthropicAdapter = class {
1485
1560
  }
1486
1561
  }
1487
1562
  } catch (error) {
1488
- console.error("[Anthropic] Error processing stream:", error);
1489
- throw error;
1563
+ throw convertServiceAdapterError(error, "Anthropic");
1564
+ }
1565
+ if (!hasReceivedContent && this.shouldGenerateFallbackResponse(limitedMessages)) {
1566
+ let fallbackContent = "Task completed successfully.";
1567
+ const lastMessage = limitedMessages[limitedMessages.length - 1];
1568
+ if ((lastMessage == null ? void 0 : lastMessage.role) === "user" && Array.isArray(lastMessage.content)) {
1569
+ const toolResult = lastMessage.content.find((c) => c.type === "tool_result");
1570
+ if ((toolResult == null ? void 0 : toolResult.content) && toolResult.content !== "Action completed successfully") {
1571
+ fallbackContent = toolResult.content;
1572
+ }
1573
+ }
1574
+ currentMessageId = (0, import_shared10.randomId)();
1575
+ eventStream$.sendTextMessageStart({
1576
+ messageId: currentMessageId
1577
+ });
1578
+ eventStream$.sendTextMessageContent({
1579
+ messageId: currentMessageId,
1580
+ content: fallbackContent
1581
+ });
1582
+ eventStream$.sendTextMessageEnd({
1583
+ messageId: currentMessageId
1584
+ });
1490
1585
  }
1491
1586
  eventStream$.complete();
1492
1587
  });
1493
1588
  } catch (error) {
1494
- console.error("[Anthropic] Error during API call:", error);
1495
- throw error;
1589
+ throw convertServiceAdapterError(error, "Anthropic");
1496
1590
  }
1497
1591
  return {
1498
- threadId: threadId || (0, import_shared7.randomUUID)()
1592
+ threadId: threadId || (0, import_shared10.randomUUID)()
1499
1593
  };
1500
1594
  }
1501
1595
  };
@@ -1535,7 +1629,7 @@ var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBu
1535
1629
 
1536
1630
  // src/service-adapters/experimental/ollama/ollama-adapter.ts
1537
1631
  var import_ollama = require("@langchain/community/llms/ollama");
1538
- var import_shared8 = require("@copilotkit/shared");
1632
+ var import_shared12 = require("@copilotkit/shared");
1539
1633
  var DEFAULT_MODEL4 = "llama3:latest";
1540
1634
  var ExperimentalOllamaAdapter = class {
1541
1635
  model;
@@ -1554,7 +1648,7 @@ var ExperimentalOllamaAdapter = class {
1554
1648
  const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
1555
1649
  const _stream = await ollama.stream(contents);
1556
1650
  eventSource.stream(async (eventStream$) => {
1557
- const currentMessageId = (0, import_shared8.randomId)();
1651
+ const currentMessageId = (0, import_shared12.randomId)();
1558
1652
  eventStream$.sendTextMessageStart({
1559
1653
  messageId: currentMessageId
1560
1654
  });
@@ -1570,7 +1664,7 @@ var ExperimentalOllamaAdapter = class {
1570
1664
  eventStream$.complete();
1571
1665
  });
1572
1666
  return {
1573
- threadId: request.threadId || (0, import_shared8.randomUUID)()
1667
+ threadId: request.threadId || (0, import_shared12.randomUUID)()
1574
1668
  };
1575
1669
  }
1576
1670
  };
@@ -1598,11 +1692,11 @@ var BedrockAdapter = class extends LangChainAdapter {
1598
1692
  __name(BedrockAdapter, "BedrockAdapter");
1599
1693
 
1600
1694
  // src/service-adapters/empty/empty-adapter.ts
1601
- var import_shared9 = require("@copilotkit/shared");
1695
+ var import_shared13 = require("@copilotkit/shared");
1602
1696
  var EmptyAdapter = class {
1603
1697
  async process(request) {
1604
1698
  return {
1605
- threadId: request.threadId || (0, import_shared9.randomUUID)()
1699
+ threadId: request.threadId || (0, import_shared13.randomUUID)()
1606
1700
  };
1607
1701
  }
1608
1702
  };
@@ -1621,6 +1715,7 @@ var ExperimentalEmptyAdapter = EmptyAdapter;
1621
1715
  OpenAIAdapter,
1622
1716
  OpenAIAssistantAdapter,
1623
1717
  RemoteChain,
1624
- UnifyAdapter
1718
+ UnifyAdapter,
1719
+ convertServiceAdapterError
1625
1720
  });
1626
1721
  //# sourceMappingURL=index.js.map