@copilotkit/runtime 1.9.2-next.10 → 1.9.2-next.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. package/CHANGELOG.md +0 -59
  2. package/dist/{chunk-XWBDEXDA.mjs → chunk-5BIEM2UU.mjs} +3 -4
  3. package/dist/{chunk-XWBDEXDA.mjs.map → chunk-5BIEM2UU.mjs.map} +1 -1
  4. package/dist/{chunk-TOBFVWZU.mjs → chunk-6TQCQ3WJ.mjs} +2 -2
  5. package/dist/{chunk-VBXBFZEL.mjs → chunk-CD2SZGIZ.mjs} +2 -2
  6. package/dist/{chunk-6RUTA76W.mjs → chunk-D3SPXEVJ.mjs} +2 -2
  7. package/dist/{chunk-PMIAGZGS.mjs → chunk-DOWRU5U6.mjs} +2414 -2928
  8. package/dist/chunk-DOWRU5U6.mjs.map +1 -0
  9. package/dist/{chunk-GS7DO47Q.mjs → chunk-IIXJVVTV.mjs} +78 -155
  10. package/dist/chunk-IIXJVVTV.mjs.map +1 -0
  11. package/dist/{chunk-5OK4GLKL.mjs → chunk-ODF35LFG.mjs} +2 -19
  12. package/dist/chunk-ODF35LFG.mjs.map +1 -0
  13. package/dist/{groq-adapter-172a2ca4.d.ts → groq-adapter-25a2bd35.d.ts} +1 -1
  14. package/dist/index.d.ts +3 -4
  15. package/dist/index.js +2598 -3250
  16. package/dist/index.js.map +1 -1
  17. package/dist/index.mjs +8 -12
  18. package/dist/index.mjs.map +1 -1
  19. package/dist/lib/index.d.ts +4 -5
  20. package/dist/lib/index.js +2730 -3339
  21. package/dist/lib/index.js.map +1 -1
  22. package/dist/lib/index.mjs +8 -9
  23. package/dist/lib/integrations/index.d.ts +3 -3
  24. package/dist/lib/integrations/index.js +96 -160
  25. package/dist/lib/integrations/index.js.map +1 -1
  26. package/dist/lib/integrations/index.mjs +6 -7
  27. package/dist/lib/integrations/nest/index.d.ts +2 -2
  28. package/dist/lib/integrations/nest/index.js +96 -160
  29. package/dist/lib/integrations/nest/index.js.map +1 -1
  30. package/dist/lib/integrations/nest/index.mjs +4 -5
  31. package/dist/lib/integrations/node-express/index.d.ts +2 -2
  32. package/dist/lib/integrations/node-express/index.js +96 -160
  33. package/dist/lib/integrations/node-express/index.js.map +1 -1
  34. package/dist/lib/integrations/node-express/index.mjs +4 -5
  35. package/dist/lib/integrations/node-http/index.d.ts +2 -2
  36. package/dist/lib/integrations/node-http/index.js +96 -160
  37. package/dist/lib/integrations/node-http/index.js.map +1 -1
  38. package/dist/lib/integrations/node-http/index.mjs +3 -4
  39. package/dist/service-adapters/index.d.ts +4 -6
  40. package/dist/service-adapters/index.js +107 -225
  41. package/dist/service-adapters/index.js.map +1 -1
  42. package/dist/service-adapters/index.mjs +2 -6
  43. package/dist/{shared-bd953ebf.d.ts → shared-e272b15a.d.ts} +5 -45
  44. package/dist/utils/index.d.ts +1 -17
  45. package/dist/utils/index.js +2 -3
  46. package/dist/utils/index.js.map +1 -1
  47. package/dist/utils/index.mjs +1 -1
  48. package/package.json +2 -2
  49. package/src/agents/langgraph/event-source.ts +38 -36
  50. package/src/agents/langgraph/events.ts +1 -19
  51. package/src/graphql/resolvers/copilot.resolver.ts +45 -108
  52. package/src/graphql/resolvers/state.resolver.ts +3 -3
  53. package/src/lib/integrations/shared.ts +0 -43
  54. package/src/lib/runtime/copilot-runtime.ts +83 -412
  55. package/src/lib/runtime/langgraph/langgraph-agent.ts +0 -12
  56. package/src/lib/runtime/remote-action-constructors.ts +3 -28
  57. package/src/lib/runtime/remote-lg-action.ts +40 -130
  58. package/src/lib/streaming.ts +36 -125
  59. package/src/service-adapters/anthropic/anthropic-adapter.ts +8 -67
  60. package/src/service-adapters/anthropic/utils.ts +8 -3
  61. package/src/service-adapters/events.ts +81 -37
  62. package/src/service-adapters/groq/groq-adapter.ts +56 -66
  63. package/src/service-adapters/index.ts +0 -1
  64. package/src/service-adapters/openai/openai-adapter.ts +3 -18
  65. package/src/utils/failed-response-status-reasons.ts +1 -23
  66. package/tests/service-adapters/anthropic/anthropic-adapter.test.ts +387 -172
  67. package/dist/chunk-5OK4GLKL.mjs.map +0 -1
  68. package/dist/chunk-AMUJQ6IR.mjs +0 -50
  69. package/dist/chunk-AMUJQ6IR.mjs.map +0 -1
  70. package/dist/chunk-GS7DO47Q.mjs.map +0 -1
  71. package/dist/chunk-PMIAGZGS.mjs.map +0 -1
  72. package/dist/service-adapters/shared/index.d.ts +0 -9
  73. package/dist/service-adapters/shared/index.js +0 -72
  74. package/dist/service-adapters/shared/index.js.map +0 -1
  75. package/dist/service-adapters/shared/index.mjs +0 -8
  76. package/dist/service-adapters/shared/index.mjs.map +0 -1
  77. package/src/lib/error-messages.ts +0 -200
  78. package/src/lib/runtime/__tests__/copilot-runtime-trace.test.ts +0 -169
  79. package/src/service-adapters/shared/error-utils.ts +0 -61
  80. package/src/service-adapters/shared/index.ts +0 -1
  81. package/dist/{chunk-TOBFVWZU.mjs.map → chunk-6TQCQ3WJ.mjs.map} +0 -0
  82. package/dist/{chunk-VBXBFZEL.mjs.map → chunk-CD2SZGIZ.mjs.map} +0 -0
  83. package/dist/{chunk-6RUTA76W.mjs.map → chunk-D3SPXEVJ.mjs.map} +0 -0
  84. package/dist/{langserve-fc5cac89.d.ts → langserve-4a5c9217.d.ts} +7 -7
@@ -1,10 +1,9 @@
1
1
  import {
2
2
  copilotRuntimeNodeHttpEndpoint
3
- } from "../../../chunk-PMIAGZGS.mjs";
3
+ } from "../../../chunk-DOWRU5U6.mjs";
4
+ import "../../../chunk-IIXJVVTV.mjs";
5
+ import "../../../chunk-5BIEM2UU.mjs";
4
6
  import "../../../chunk-SHBDMA63.mjs";
5
- import "../../../chunk-GS7DO47Q.mjs";
6
- import "../../../chunk-XWBDEXDA.mjs";
7
- import "../../../chunk-AMUJQ6IR.mjs";
8
7
  import "../../../chunk-2OZAGFV3.mjs";
9
8
  import "../../../chunk-FHD4JECV.mjs";
10
9
  export {
@@ -1,8 +1,7 @@
1
- import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from '../langserve-fc5cac89.js';
2
- export { c as RemoteChain, R as RemoteChainParameters } from '../langserve-fc5cac89.js';
3
- export { convertServiceAdapterError } from './shared/index.js';
4
- import { L as LangChainAdapter } from '../groq-adapter-172a2ca4.js';
5
- export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-172a2ca4.js';
1
+ import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from '../langserve-4a5c9217.js';
2
+ export { c as RemoteChain, R as RemoteChainParameters } from '../langserve-4a5c9217.js';
3
+ import { L as LangChainAdapter } from '../groq-adapter-25a2bd35.js';
4
+ export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-25a2bd35.js';
6
5
  import Anthropic from '@anthropic-ai/sdk';
7
6
  import '../index-d4614f9b.js';
8
7
  import '../graphql/types/base/index.js';
@@ -49,7 +48,6 @@ declare class AnthropicAdapter implements CopilotServiceAdapter {
49
48
  private _anthropic;
50
49
  get anthropic(): Anthropic;
51
50
  constructor(params?: AnthropicAdapterParams);
52
- private shouldGenerateFallbackResponse;
53
51
  process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
54
52
  }
55
53
 
@@ -41,8 +41,7 @@ __export(service_adapters_exports, {
41
41
  OpenAIAdapter: () => OpenAIAdapter,
42
42
  OpenAIAssistantAdapter: () => OpenAIAssistantAdapter,
43
43
  RemoteChain: () => RemoteChain,
44
- UnifyAdapter: () => UnifyAdapter,
45
- convertServiceAdapterError: () => convertServiceAdapterError
44
+ UnifyAdapter: () => UnifyAdapter
46
45
  });
47
46
  module.exports = __toCommonJS(service_adapters_exports);
48
47
 
@@ -124,53 +123,11 @@ var RemoteChain = class {
124
123
  };
125
124
  __name(RemoteChain, "RemoteChain");
126
125
 
127
- // src/service-adapters/shared/error-utils.ts
128
- var import_shared = require("@copilotkit/shared");
129
- function convertServiceAdapterError(error, adapterName) {
130
- var _a, _b, _c;
131
- const errorName = ((_a = error == null ? void 0 : error.constructor) == null ? void 0 : _a.name) || error.name;
132
- const errorMessage = (error == null ? void 0 : error.message) || String(error);
133
- const statusCode = error.status || error.statusCode || ((_b = error.response) == null ? void 0 : _b.status);
134
- const responseData = error.error || ((_c = error.response) == null ? void 0 : _c.data) || error.data;
135
- const structuredError = new import_shared.CopilotKitLowLevelError({
136
- error: error instanceof Error ? error : new Error(errorMessage),
137
- url: `${adapterName} service adapter`,
138
- message: `${adapterName} API error: ${errorMessage}`
139
- });
140
- if (statusCode) {
141
- structuredError.statusCode = statusCode;
142
- }
143
- if (responseData) {
144
- structuredError.responseData = responseData;
145
- }
146
- if (errorName) {
147
- structuredError.originalErrorType = errorName;
148
- }
149
- let newCode;
150
- if (statusCode === 401) {
151
- newCode = import_shared.CopilotKitErrorCode.AUTHENTICATION_ERROR;
152
- } else if (statusCode >= 400 && statusCode < 500) {
153
- newCode = import_shared.CopilotKitErrorCode.CONFIGURATION_ERROR;
154
- } else if (statusCode >= 500) {
155
- newCode = import_shared.CopilotKitErrorCode.NETWORK_ERROR;
156
- } else if (statusCode) {
157
- newCode = import_shared.CopilotKitErrorCode.CONFIGURATION_ERROR;
158
- } else {
159
- newCode = import_shared.CopilotKitErrorCode.NETWORK_ERROR;
160
- }
161
- structuredError.code = newCode;
162
- if (structuredError.extensions) {
163
- structuredError.extensions.code = newCode;
164
- }
165
- return structuredError;
166
- }
167
- __name(convertServiceAdapterError, "convertServiceAdapterError");
168
-
169
126
  // src/service-adapters/openai/openai-adapter.ts
170
127
  var import_openai = __toESM(require("openai"));
171
128
 
172
129
  // src/service-adapters/openai/utils.ts
173
- var import_shared2 = require("@copilotkit/shared");
130
+ var import_shared = require("@copilotkit/shared");
174
131
  function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
175
132
  maxTokens || (maxTokens = maxTokensForOpenAIModel(model));
176
133
  const result = [];
@@ -284,7 +241,7 @@ function convertActionInputToOpenAITool(action) {
284
241
  function: {
285
242
  name: action.name,
286
243
  description: action.description,
287
- parameters: (0, import_shared2.parseJson)(action.jsonSchema, {})
244
+ parameters: (0, import_shared.parseJson)(action.jsonSchema, {})
288
245
  }
289
246
  };
290
247
  }
@@ -352,7 +309,7 @@ function convertSystemMessageToAssistantAPI(message) {
352
309
  __name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
353
310
 
354
311
  // src/service-adapters/openai/openai-adapter.ts
355
- var import_shared3 = require("@copilotkit/shared");
312
+ var import_shared2 = require("@copilotkit/shared");
356
313
  var DEFAULT_MODEL = "gpt-4o";
357
314
  var OpenAIAdapter = class {
358
315
  model = DEFAULT_MODEL;
@@ -373,8 +330,7 @@ var OpenAIAdapter = class {
373
330
  async process(request) {
374
331
  const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
375
332
  const tools = actions.map(convertActionInputToOpenAITool);
376
- const threadId = threadIdFromRequest ?? (0, import_shared3.randomUUID)();
377
- console.log("messages", messages);
333
+ const threadId = threadIdFromRequest ?? (0, import_shared2.randomUUID)();
378
334
  const validToolUseIds = /* @__PURE__ */ new Set();
379
335
  for (const message of messages) {
380
336
  if (message.isActionExecutionMessage()) {
@@ -404,29 +360,6 @@ var OpenAIAdapter = class {
404
360
  }
405
361
  };
406
362
  }
407
- console.log("INPUT", {
408
- model,
409
- stream: true,
410
- messages: openaiMessages,
411
- ...tools.length > 0 && {
412
- tools
413
- },
414
- ...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
415
- max_tokens: forwardedParameters.maxTokens
416
- },
417
- ...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
418
- stop: forwardedParameters.stop
419
- },
420
- ...toolChoice && {
421
- tool_choice: toolChoice
422
- },
423
- ...this.disableParallelToolCalls && {
424
- parallel_tool_calls: false
425
- },
426
- ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
427
- temperature: forwardedParameters.temperature
428
- }
429
- });
430
363
  try {
431
364
  const stream = this.openai.beta.chat.completions.stream({
432
365
  model,
@@ -513,14 +446,14 @@ var OpenAIAdapter = class {
513
446
  });
514
447
  }
515
448
  } catch (error) {
516
- console.error("[OpenAI] Error during API call:", error);
517
- throw convertServiceAdapterError(error, "OpenAI");
449
+ console.error("[OpenAI] Error processing stream:", error);
450
+ throw error;
518
451
  }
519
452
  eventStream$.complete();
520
453
  });
521
454
  } catch (error) {
522
455
  console.error("[OpenAI] Error during API call:", error);
523
- throw convertServiceAdapterError(error, "OpenAI");
456
+ throw error;
524
457
  }
525
458
  return {
526
459
  threadId
@@ -532,7 +465,7 @@ __name(OpenAIAdapter, "OpenAIAdapter");
532
465
  // src/service-adapters/langchain/utils.ts
533
466
  var import_messages = require("@langchain/core/messages");
534
467
  var import_tools = require("@langchain/core/tools");
535
- var import_shared5 = require("@copilotkit/shared");
468
+ var import_shared3 = require("@copilotkit/shared");
536
469
  function convertMessageToLangChainMessage(message) {
537
470
  if (message.isTextMessage()) {
538
471
  if (message.role == "user") {
@@ -565,7 +498,7 @@ function convertActionInputToLangChainTool(actionInput) {
565
498
  return new import_tools.DynamicStructuredTool({
566
499
  name: actionInput.name,
567
500
  description: actionInput.description,
568
- schema: (0, import_shared5.convertJsonSchemaToZodSchema)(JSON.parse(actionInput.jsonSchema), true),
501
+ schema: (0, import_shared3.convertJsonSchemaToZodSchema)(JSON.parse(actionInput.jsonSchema), true),
569
502
  func: async () => {
570
503
  return "";
571
504
  }
@@ -598,7 +531,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
598
531
  var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
599
532
  if (typeof result === "string") {
600
533
  if (!actionExecution) {
601
- eventStream$.sendTextMessage((0, import_shared5.randomId)(), result);
534
+ eventStream$.sendTextMessage((0, import_shared3.randomId)(), result);
602
535
  } else {
603
536
  eventStream$.sendActionExecutionResult({
604
537
  actionExecutionId: actionExecution.id,
@@ -609,11 +542,11 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
609
542
  } else if (isAIMessage(result)) {
610
543
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
611
544
  if (result.content) {
612
- eventStream$.sendTextMessage((0, import_shared5.randomId)(), result.content);
545
+ eventStream$.sendTextMessage((0, import_shared3.randomId)(), result.content);
613
546
  }
614
547
  for (const toolCall of result.tool_calls) {
615
548
  eventStream$.sendActionExecution({
616
- actionExecutionId: toolCall.id || (0, import_shared5.randomId)(),
549
+ actionExecutionId: toolCall.id || (0, import_shared3.randomId)(),
617
550
  actionName: toolCall.name,
618
551
  args: JSON.stringify(toolCall.args)
619
552
  });
@@ -621,12 +554,12 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
621
554
  } else if (isBaseMessageChunk(result)) {
622
555
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
623
556
  if ((_a = result.lc_kwargs) == null ? void 0 : _a.content) {
624
- eventStream$.sendTextMessage((0, import_shared5.randomId)(), result.content);
557
+ eventStream$.sendTextMessage((0, import_shared3.randomId)(), result.content);
625
558
  }
626
559
  if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
627
560
  for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
628
561
  eventStream$.sendActionExecution({
629
- actionExecutionId: toolCall.id || (0, import_shared5.randomId)(),
562
+ actionExecutionId: toolCall.id || (0, import_shared3.randomId)(),
630
563
  actionName: toolCall.name,
631
564
  args: JSON.stringify(toolCall.args)
632
565
  });
@@ -700,7 +633,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
700
633
  });
701
634
  } else if (content) {
702
635
  mode = "message";
703
- currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || (0, import_shared5.randomId)();
636
+ currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || (0, import_shared3.randomId)();
704
637
  eventStream$.sendTextMessageStart({
705
638
  messageId: currentMessageId
706
639
  });
@@ -757,7 +690,7 @@ function encodeResult(result) {
757
690
  __name(encodeResult, "encodeResult");
758
691
 
759
692
  // src/service-adapters/langchain/langchain-adapter.ts
760
- var import_shared6 = require("@copilotkit/shared");
693
+ var import_shared4 = require("@copilotkit/shared");
761
694
  var import_promises = require("@langchain/core/callbacks/promises");
762
695
  var LangChainAdapter = class {
763
696
  options;
@@ -770,7 +703,7 @@ var LangChainAdapter = class {
770
703
  async process(request) {
771
704
  try {
772
705
  const { eventSource, model, actions, messages, runId, threadId: threadIdFromRequest } = request;
773
- const threadId = threadIdFromRequest ?? (0, import_shared6.randomUUID)();
706
+ const threadId = threadIdFromRequest ?? (0, import_shared4.randomUUID)();
774
707
  const result = await this.options.chainFn({
775
708
  messages: messages.map(convertMessageToLangChainMessage),
776
709
  tools: actions.map(convertActionInputToLangChainTool),
@@ -1025,7 +958,7 @@ __name(getRunIdFromStream, "getRunIdFromStream");
1025
958
 
1026
959
  // src/service-adapters/unify/unify-adapter.ts
1027
960
  var import_openai3 = __toESM(require("openai"));
1028
- var import_shared7 = require("@copilotkit/shared");
961
+ var import_shared5 = require("@copilotkit/shared");
1029
962
  var UnifyAdapter = class {
1030
963
  apiKey;
1031
964
  model;
@@ -1067,7 +1000,7 @@ var UnifyAdapter = class {
1067
1000
  for await (const chunk of stream) {
1068
1001
  if (this.start) {
1069
1002
  model = chunk.model;
1070
- currentMessageId = (0, import_shared7.randomId)();
1003
+ currentMessageId = (0, import_shared5.randomId)();
1071
1004
  eventStream$.sendTextMessageStart({
1072
1005
  messageId: currentMessageId
1073
1006
  });
@@ -1134,7 +1067,7 @@ var UnifyAdapter = class {
1134
1067
  eventStream$.complete();
1135
1068
  });
1136
1069
  return {
1137
- threadId: request.threadId || (0, import_shared7.randomUUID)()
1070
+ threadId: request.threadId || (0, import_shared5.randomUUID)()
1138
1071
  };
1139
1072
  }
1140
1073
  };
@@ -1142,7 +1075,7 @@ __name(UnifyAdapter, "UnifyAdapter");
1142
1075
 
1143
1076
  // src/service-adapters/groq/groq-adapter.ts
1144
1077
  var import_groq_sdk = require("groq-sdk");
1145
- var import_shared8 = require("@copilotkit/shared");
1078
+ var import_shared6 = require("@copilotkit/shared");
1146
1079
  var DEFAULT_MODEL2 = "llama-3.3-70b-versatile";
1147
1080
  var GroqAdapter = class {
1148
1081
  model = DEFAULT_MODEL2;
@@ -1174,99 +1107,90 @@ var GroqAdapter = class {
1174
1107
  }
1175
1108
  };
1176
1109
  }
1177
- let stream;
1178
- try {
1179
- stream = await this.groq.chat.completions.create({
1180
- model,
1181
- stream: true,
1182
- messages: openaiMessages,
1183
- ...tools.length > 0 && {
1184
- tools
1185
- },
1186
- ...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
1187
- max_tokens: forwardedParameters.maxTokens
1188
- },
1189
- ...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
1190
- stop: forwardedParameters.stop
1191
- },
1192
- ...toolChoice && {
1193
- tool_choice: toolChoice
1194
- },
1195
- ...this.disableParallelToolCalls && {
1196
- parallel_tool_calls: false
1197
- },
1198
- ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
1199
- temperature: forwardedParameters.temperature
1200
- }
1201
- });
1202
- } catch (error) {
1203
- throw convertServiceAdapterError(error, "Groq");
1204
- }
1110
+ const stream = await this.groq.chat.completions.create({
1111
+ model,
1112
+ stream: true,
1113
+ messages: openaiMessages,
1114
+ ...tools.length > 0 && {
1115
+ tools
1116
+ },
1117
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
1118
+ max_tokens: forwardedParameters.maxTokens
1119
+ },
1120
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
1121
+ stop: forwardedParameters.stop
1122
+ },
1123
+ ...toolChoice && {
1124
+ tool_choice: toolChoice
1125
+ },
1126
+ ...this.disableParallelToolCalls && {
1127
+ parallel_tool_calls: false
1128
+ },
1129
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
1130
+ temperature: forwardedParameters.temperature
1131
+ }
1132
+ });
1205
1133
  eventSource.stream(async (eventStream$) => {
1206
1134
  var _a, _b;
1207
1135
  let mode = null;
1208
1136
  let currentMessageId;
1209
1137
  let currentToolCallId;
1210
- try {
1211
- for await (const chunk of stream) {
1212
- const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
1213
- const content = chunk.choices[0].delta.content;
1214
- if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
1215
- mode = null;
1216
- eventStream$.sendTextMessageEnd({
1217
- messageId: currentMessageId
1218
- });
1219
- } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
1220
- mode = null;
1221
- eventStream$.sendActionExecutionEnd({
1222
- actionExecutionId: currentToolCallId
1223
- });
1224
- }
1225
- if (mode === null) {
1226
- if (toolCall == null ? void 0 : toolCall.id) {
1227
- mode = "function";
1228
- currentToolCallId = toolCall.id;
1229
- eventStream$.sendActionExecutionStart({
1230
- actionExecutionId: currentToolCallId,
1231
- actionName: toolCall.function.name,
1232
- parentMessageId: chunk.id
1233
- });
1234
- } else if (content) {
1235
- mode = "message";
1236
- currentMessageId = chunk.id;
1237
- eventStream$.sendTextMessageStart({
1238
- messageId: currentMessageId
1239
- });
1240
- }
1241
- }
1242
- if (mode === "message" && content) {
1243
- eventStream$.sendTextMessageContent({
1244
- messageId: currentMessageId,
1245
- content
1246
- });
1247
- } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
1248
- eventStream$.sendActionExecutionArgs({
1249
- actionExecutionId: currentToolCallId,
1250
- args: toolCall.function.arguments
1251
- });
1252
- }
1253
- }
1254
- if (mode === "message") {
1138
+ for await (const chunk of stream) {
1139
+ const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
1140
+ const content = chunk.choices[0].delta.content;
1141
+ if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
1142
+ mode = null;
1255
1143
  eventStream$.sendTextMessageEnd({
1256
1144
  messageId: currentMessageId
1257
1145
  });
1258
- } else if (mode === "function") {
1146
+ } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
1147
+ mode = null;
1259
1148
  eventStream$.sendActionExecutionEnd({
1260
1149
  actionExecutionId: currentToolCallId
1261
1150
  });
1262
1151
  }
1263
- } catch (error) {
1264
- throw convertServiceAdapterError(error, "Groq");
1152
+ if (mode === null) {
1153
+ if (toolCall == null ? void 0 : toolCall.id) {
1154
+ mode = "function";
1155
+ currentToolCallId = toolCall.id;
1156
+ eventStream$.sendActionExecutionStart({
1157
+ actionExecutionId: currentToolCallId,
1158
+ actionName: toolCall.function.name,
1159
+ parentMessageId: chunk.id
1160
+ });
1161
+ } else if (content) {
1162
+ mode = "message";
1163
+ currentMessageId = chunk.id;
1164
+ eventStream$.sendTextMessageStart({
1165
+ messageId: currentMessageId
1166
+ });
1167
+ }
1168
+ }
1169
+ if (mode === "message" && content) {
1170
+ eventStream$.sendTextMessageContent({
1171
+ messageId: currentMessageId,
1172
+ content
1173
+ });
1174
+ } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
1175
+ eventStream$.sendActionExecutionArgs({
1176
+ actionExecutionId: currentToolCallId,
1177
+ args: toolCall.function.arguments
1178
+ });
1179
+ }
1180
+ }
1181
+ if (mode === "message") {
1182
+ eventStream$.sendTextMessageEnd({
1183
+ messageId: currentMessageId
1184
+ });
1185
+ } else if (mode === "function") {
1186
+ eventStream$.sendActionExecutionEnd({
1187
+ actionExecutionId: currentToolCallId
1188
+ });
1265
1189
  }
1266
1190
  eventStream$.complete();
1267
1191
  });
1268
1192
  return {
1269
- threadId: request.threadId || (0, import_shared8.randomUUID)()
1193
+ threadId: request.threadId || (0, import_shared6.randomUUID)()
1270
1194
  };
1271
1195
  }
1272
1196
  };
@@ -1412,7 +1336,7 @@ function convertMessageToAnthropicMessage(message) {
1412
1336
  content: [
1413
1337
  {
1414
1338
  type: "tool_result",
1415
- content: message.result || "Action completed successfully",
1339
+ content: message.result,
1416
1340
  tool_use_id: message.actionExecutionId
1417
1341
  }
1418
1342
  ]
@@ -1422,7 +1346,7 @@ function convertMessageToAnthropicMessage(message) {
1422
1346
  __name(convertMessageToAnthropicMessage, "convertMessageToAnthropicMessage");
1423
1347
 
1424
1348
  // src/service-adapters/anthropic/anthropic-adapter.ts
1425
- var import_shared10 = require("@copilotkit/shared");
1349
+ var import_shared7 = require("@copilotkit/shared");
1426
1350
  var DEFAULT_MODEL3 = "claude-3-5-sonnet-latest";
1427
1351
  var AnthropicAdapter = class {
1428
1352
  model = DEFAULT_MODEL3;
@@ -1436,22 +1360,6 @@ var AnthropicAdapter = class {
1436
1360
  this.model = params.model;
1437
1361
  }
1438
1362
  }
1439
- shouldGenerateFallbackResponse(messages) {
1440
- var _a, _b, _c;
1441
- if (messages.length === 0)
1442
- return false;
1443
- const lastMessage = messages[messages.length - 1];
1444
- const endsWithToolResult = lastMessage.role === "user" && Array.isArray(lastMessage.content) && lastMessage.content.some((content) => content.type === "tool_result");
1445
- if (messages.length >= 3 && endsWithToolResult) {
1446
- const lastThree = messages.slice(-3);
1447
- const hasRecentToolPattern = ((_a = lastThree[0]) == null ? void 0 : _a.role) === "user" && // Initial user message
1448
- ((_b = lastThree[1]) == null ? void 0 : _b.role) === "assistant" && // Assistant tool use
1449
- Array.isArray(lastThree[1].content) && lastThree[1].content.some((content) => content.type === "tool_use") && ((_c = lastThree[2]) == null ? void 0 : _c.role) === "user" && // Tool result
1450
- Array.isArray(lastThree[2].content) && lastThree[2].content.some((content) => content.type === "tool_result");
1451
- return hasRecentToolPattern;
1452
- }
1453
- return endsWithToolResult;
1454
- }
1455
1363
  async process(request) {
1456
1364
  const { threadId, model = this.model, messages: rawMessages, actions, eventSource, forwardedParameters } = request;
1457
1365
  const tools = actions.map(convertActionInputToAnthropicTool);
@@ -1466,22 +1374,18 @@ var AnthropicAdapter = class {
1466
1374
  validToolUseIds.add(message.id);
1467
1375
  }
1468
1376
  }
1469
- const processedToolResultIds = /* @__PURE__ */ new Set();
1470
1377
  const anthropicMessages = messages.map((message) => {
1471
1378
  if (message.isResultMessage()) {
1472
1379
  if (!validToolUseIds.has(message.actionExecutionId)) {
1473
1380
  return null;
1474
1381
  }
1475
- if (processedToolResultIds.has(message.actionExecutionId)) {
1476
- return null;
1477
- }
1478
- processedToolResultIds.add(message.actionExecutionId);
1382
+ validToolUseIds.delete(message.actionExecutionId);
1479
1383
  return {
1480
1384
  role: "user",
1481
1385
  content: [
1482
1386
  {
1483
1387
  type: "tool_result",
1484
- content: message.result || "Action completed successfully",
1388
+ content: message.result,
1485
1389
  tool_use_id: message.actionExecutionId
1486
1390
  }
1487
1391
  ]
@@ -1524,16 +1428,14 @@ var AnthropicAdapter = class {
1524
1428
  eventSource.stream(async (eventStream$) => {
1525
1429
  let mode = null;
1526
1430
  let didOutputText = false;
1527
- let currentMessageId = (0, import_shared10.randomId)();
1528
- let currentToolCallId = (0, import_shared10.randomId)();
1431
+ let currentMessageId = (0, import_shared7.randomId)();
1432
+ let currentToolCallId = (0, import_shared7.randomId)();
1529
1433
  let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
1530
- let hasReceivedContent = false;
1531
1434
  try {
1532
1435
  for await (const chunk of stream) {
1533
1436
  if (chunk.type === "message_start") {
1534
1437
  currentMessageId = chunk.message.id;
1535
1438
  } else if (chunk.type === "content_block_start") {
1536
- hasReceivedContent = true;
1537
1439
  if (chunk.content_block.type === "text") {
1538
1440
  didOutputText = false;
1539
1441
  filterThinkingTextBuffer.reset();
@@ -1583,36 +1485,17 @@ var AnthropicAdapter = class {
1583
1485
  }
1584
1486
  }
1585
1487
  } catch (error) {
1586
- throw convertServiceAdapterError(error, "Anthropic");
1587
- }
1588
- if (!hasReceivedContent && this.shouldGenerateFallbackResponse(limitedMessages)) {
1589
- let fallbackContent = "Task completed successfully.";
1590
- const lastMessage = limitedMessages[limitedMessages.length - 1];
1591
- if ((lastMessage == null ? void 0 : lastMessage.role) === "user" && Array.isArray(lastMessage.content)) {
1592
- const toolResult = lastMessage.content.find((c) => c.type === "tool_result");
1593
- if ((toolResult == null ? void 0 : toolResult.content) && toolResult.content !== "Action completed successfully") {
1594
- fallbackContent = toolResult.content;
1595
- }
1596
- }
1597
- currentMessageId = (0, import_shared10.randomId)();
1598
- eventStream$.sendTextMessageStart({
1599
- messageId: currentMessageId
1600
- });
1601
- eventStream$.sendTextMessageContent({
1602
- messageId: currentMessageId,
1603
- content: fallbackContent
1604
- });
1605
- eventStream$.sendTextMessageEnd({
1606
- messageId: currentMessageId
1607
- });
1488
+ console.error("[Anthropic] Error processing stream:", error);
1489
+ throw error;
1608
1490
  }
1609
1491
  eventStream$.complete();
1610
1492
  });
1611
1493
  } catch (error) {
1612
- throw convertServiceAdapterError(error, "Anthropic");
1494
+ console.error("[Anthropic] Error during API call:", error);
1495
+ throw error;
1613
1496
  }
1614
1497
  return {
1615
- threadId: threadId || (0, import_shared10.randomUUID)()
1498
+ threadId: threadId || (0, import_shared7.randomUUID)()
1616
1499
  };
1617
1500
  }
1618
1501
  };
@@ -1652,7 +1535,7 @@ var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBu
1652
1535
 
1653
1536
  // src/service-adapters/experimental/ollama/ollama-adapter.ts
1654
1537
  var import_ollama = require("@langchain/community/llms/ollama");
1655
- var import_shared12 = require("@copilotkit/shared");
1538
+ var import_shared8 = require("@copilotkit/shared");
1656
1539
  var DEFAULT_MODEL4 = "llama3:latest";
1657
1540
  var ExperimentalOllamaAdapter = class {
1658
1541
  model;
@@ -1671,7 +1554,7 @@ var ExperimentalOllamaAdapter = class {
1671
1554
  const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
1672
1555
  const _stream = await ollama.stream(contents);
1673
1556
  eventSource.stream(async (eventStream$) => {
1674
- const currentMessageId = (0, import_shared12.randomId)();
1557
+ const currentMessageId = (0, import_shared8.randomId)();
1675
1558
  eventStream$.sendTextMessageStart({
1676
1559
  messageId: currentMessageId
1677
1560
  });
@@ -1687,7 +1570,7 @@ var ExperimentalOllamaAdapter = class {
1687
1570
  eventStream$.complete();
1688
1571
  });
1689
1572
  return {
1690
- threadId: request.threadId || (0, import_shared12.randomUUID)()
1573
+ threadId: request.threadId || (0, import_shared8.randomUUID)()
1691
1574
  };
1692
1575
  }
1693
1576
  };
@@ -1715,11 +1598,11 @@ var BedrockAdapter = class extends LangChainAdapter {
1715
1598
  __name(BedrockAdapter, "BedrockAdapter");
1716
1599
 
1717
1600
  // src/service-adapters/empty/empty-adapter.ts
1718
- var import_shared13 = require("@copilotkit/shared");
1601
+ var import_shared9 = require("@copilotkit/shared");
1719
1602
  var EmptyAdapter = class {
1720
1603
  async process(request) {
1721
1604
  return {
1722
- threadId: request.threadId || (0, import_shared13.randomUUID)()
1605
+ threadId: request.threadId || (0, import_shared9.randomUUID)()
1723
1606
  };
1724
1607
  }
1725
1608
  };
@@ -1738,7 +1621,6 @@ var ExperimentalEmptyAdapter = EmptyAdapter;
1738
1621
  OpenAIAdapter,
1739
1622
  OpenAIAssistantAdapter,
1740
1623
  RemoteChain,
1741
- UnifyAdapter,
1742
- convertServiceAdapterError
1624
+ UnifyAdapter
1743
1625
  });
1744
1626
  //# sourceMappingURL=index.js.map