@copilotkit/runtime 1.8.12-next.2 → 1.8.12-next.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/CHANGELOG.md +13 -0
  2. package/dist/{chunk-OZLQ2A5E.mjs → chunk-FA3E4I4W.mjs} +4 -3
  3. package/dist/chunk-FA3E4I4W.mjs.map +1 -0
  4. package/dist/{chunk-FDGTTGQU.mjs → chunk-KGZF7KSR.mjs} +2 -2
  5. package/dist/{chunk-VQSVMSXZ.mjs → chunk-MG576PIZ.mjs} +2 -2
  6. package/dist/{chunk-Y4H3U52G.mjs → chunk-MVKCCH5U.mjs} +216 -173
  7. package/dist/chunk-MVKCCH5U.mjs.map +1 -0
  8. package/dist/{chunk-V6IQU4D2.mjs → chunk-S5U6J5X2.mjs} +2 -2
  9. package/dist/index.js +217 -173
  10. package/dist/index.js.map +1 -1
  11. package/dist/index.mjs +5 -5
  12. package/dist/lib/index.js +109 -82
  13. package/dist/lib/index.js.map +1 -1
  14. package/dist/lib/index.mjs +5 -5
  15. package/dist/lib/integrations/index.js +2 -1
  16. package/dist/lib/integrations/index.js.map +1 -1
  17. package/dist/lib/integrations/index.mjs +5 -5
  18. package/dist/lib/integrations/nest/index.js +2 -1
  19. package/dist/lib/integrations/nest/index.js.map +1 -1
  20. package/dist/lib/integrations/nest/index.mjs +3 -3
  21. package/dist/lib/integrations/node-express/index.js +2 -1
  22. package/dist/lib/integrations/node-express/index.js.map +1 -1
  23. package/dist/lib/integrations/node-express/index.mjs +3 -3
  24. package/dist/lib/integrations/node-http/index.js +2 -1
  25. package/dist/lib/integrations/node-http/index.js.map +1 -1
  26. package/dist/lib/integrations/node-http/index.mjs +2 -2
  27. package/dist/service-adapters/index.js +215 -172
  28. package/dist/service-adapters/index.js.map +1 -1
  29. package/dist/service-adapters/index.mjs +1 -1
  30. package/jest.config.js +8 -3
  31. package/package.json +3 -2
  32. package/src/service-adapters/anthropic/anthropic-adapter.ts +124 -66
  33. package/src/service-adapters/anthropic/utils.ts +0 -19
  34. package/src/service-adapters/openai/openai-adapter.ts +107 -69
  35. package/tests/global.d.ts +13 -0
  36. package/tests/service-adapters/anthropic/allowlist-approach.test.ts +226 -0
  37. package/tests/service-adapters/anthropic/anthropic-adapter.test.ts +604 -0
  38. package/tests/service-adapters/openai/allowlist-approach.test.ts +238 -0
  39. package/tests/service-adapters/openai/openai-adapter.test.ts +301 -0
  40. package/tests/setup.jest.ts +21 -0
  41. package/tests/tsconfig.json +10 -0
  42. package/tsconfig.json +1 -1
  43. package/dist/chunk-OZLQ2A5E.mjs.map +0 -1
  44. package/dist/chunk-Y4H3U52G.mjs.map +0 -1
  45. /package/dist/{chunk-FDGTTGQU.mjs.map → chunk-KGZF7KSR.mjs.map} +0 -0
  46. /package/dist/{chunk-VQSVMSXZ.mjs.map → chunk-MG576PIZ.mjs.map} +0 -0
  47. /package/dist/{chunk-V6IQU4D2.mjs.map → chunk-S5U6J5X2.mjs.map} +0 -0
package/dist/index.js CHANGED
@@ -44,7 +44,7 @@ var require_package = __commonJS({
44
44
  publishConfig: {
45
45
  access: "public"
46
46
  },
47
- version: "1.8.12-next.2",
47
+ version: "1.8.12-next.4",
48
48
  sideEffects: false,
49
49
  main: "./dist/index.js",
50
50
  module: "./dist/index.mjs",
@@ -64,6 +64,7 @@ var require_package = __commonJS({
64
64
  "unlink:global": "pnpm unlink --global"
65
65
  },
66
66
  devDependencies: {
67
+ "@jest/globals": "^29.7.0",
67
68
  "@swc/core": "1.5.28",
68
69
  "@types/express": "^4.17.21",
69
70
  "@types/jest": "^29.5.12",
@@ -374,7 +375,23 @@ var OpenAIAdapter = class {
374
375
  const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
375
376
  const tools = actions.map(convertActionInputToOpenAITool);
376
377
  const threadId = threadIdFromRequest ?? (0, import_shared2.randomUUID)();
377
- let openaiMessages = messages.map((m) => convertMessageToOpenAIMessage(m, {
378
+ const validToolUseIds = /* @__PURE__ */ new Set();
379
+ for (const message of messages) {
380
+ if (message.isActionExecutionMessage()) {
381
+ validToolUseIds.add(message.id);
382
+ }
383
+ }
384
+ const filteredMessages = messages.filter((message) => {
385
+ if (message.isResultMessage()) {
386
+ if (!validToolUseIds.has(message.actionExecutionId)) {
387
+ return false;
388
+ }
389
+ validToolUseIds.delete(message.actionExecutionId);
390
+ return true;
391
+ }
392
+ return true;
393
+ });
394
+ let openaiMessages = filteredMessages.map((m) => convertMessageToOpenAIMessage(m, {
378
395
  keepSystemRole: this.keepSystemRole
379
396
  }));
380
397
  openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
@@ -387,91 +404,101 @@ var OpenAIAdapter = class {
387
404
  }
388
405
  };
389
406
  }
390
- const stream = this.openai.beta.chat.completions.stream({
391
- model,
392
- stream: true,
393
- messages: openaiMessages,
394
- ...tools.length > 0 && {
395
- tools
396
- },
397
- ...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
398
- max_tokens: forwardedParameters.maxTokens
399
- },
400
- ...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
401
- stop: forwardedParameters.stop
402
- },
403
- ...toolChoice && {
404
- tool_choice: toolChoice
405
- },
406
- ...this.disableParallelToolCalls && {
407
- parallel_tool_calls: false
408
- },
409
- ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
410
- temperature: forwardedParameters.temperature
411
- }
412
- });
413
- eventSource.stream(async (eventStream$) => {
414
- var _a, _b;
415
- let mode = null;
416
- let currentMessageId;
417
- let currentToolCallId;
418
- for await (const chunk of stream) {
419
- if (chunk.choices.length === 0) {
420
- continue;
421
- }
422
- const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
423
- const content = chunk.choices[0].delta.content;
424
- if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
425
- mode = null;
426
- eventStream$.sendTextMessageEnd({
427
- messageId: currentMessageId
428
- });
429
- } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
430
- mode = null;
431
- eventStream$.sendActionExecutionEnd({
432
- actionExecutionId: currentToolCallId
433
- });
407
+ try {
408
+ const stream = this.openai.beta.chat.completions.stream({
409
+ model,
410
+ stream: true,
411
+ messages: openaiMessages,
412
+ ...tools.length > 0 && {
413
+ tools
414
+ },
415
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
416
+ max_tokens: forwardedParameters.maxTokens
417
+ },
418
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
419
+ stop: forwardedParameters.stop
420
+ },
421
+ ...toolChoice && {
422
+ tool_choice: toolChoice
423
+ },
424
+ ...this.disableParallelToolCalls && {
425
+ parallel_tool_calls: false
426
+ },
427
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
428
+ temperature: forwardedParameters.temperature
434
429
  }
435
- if (mode === null) {
436
- if (toolCall == null ? void 0 : toolCall.id) {
437
- mode = "function";
438
- currentToolCallId = toolCall.id;
439
- eventStream$.sendActionExecutionStart({
440
- actionExecutionId: currentToolCallId,
441
- parentMessageId: chunk.id,
442
- actionName: toolCall.function.name
443
- });
444
- } else if (content) {
445
- mode = "message";
446
- currentMessageId = chunk.id;
447
- eventStream$.sendTextMessageStart({
430
+ });
431
+ eventSource.stream(async (eventStream$) => {
432
+ var _a, _b;
433
+ let mode = null;
434
+ let currentMessageId;
435
+ let currentToolCallId;
436
+ try {
437
+ for await (const chunk of stream) {
438
+ if (chunk.choices.length === 0) {
439
+ continue;
440
+ }
441
+ const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
442
+ const content = chunk.choices[0].delta.content;
443
+ if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
444
+ mode = null;
445
+ eventStream$.sendTextMessageEnd({
446
+ messageId: currentMessageId
447
+ });
448
+ } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
449
+ mode = null;
450
+ eventStream$.sendActionExecutionEnd({
451
+ actionExecutionId: currentToolCallId
452
+ });
453
+ }
454
+ if (mode === null) {
455
+ if (toolCall == null ? void 0 : toolCall.id) {
456
+ mode = "function";
457
+ currentToolCallId = toolCall.id;
458
+ eventStream$.sendActionExecutionStart({
459
+ actionExecutionId: currentToolCallId,
460
+ parentMessageId: chunk.id,
461
+ actionName: toolCall.function.name
462
+ });
463
+ } else if (content) {
464
+ mode = "message";
465
+ currentMessageId = chunk.id;
466
+ eventStream$.sendTextMessageStart({
467
+ messageId: currentMessageId
468
+ });
469
+ }
470
+ }
471
+ if (mode === "message" && content) {
472
+ eventStream$.sendTextMessageContent({
473
+ messageId: currentMessageId,
474
+ content
475
+ });
476
+ } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
477
+ eventStream$.sendActionExecutionArgs({
478
+ actionExecutionId: currentToolCallId,
479
+ args: toolCall.function.arguments
480
+ });
481
+ }
482
+ }
483
+ if (mode === "message") {
484
+ eventStream$.sendTextMessageEnd({
448
485
  messageId: currentMessageId
449
486
  });
487
+ } else if (mode === "function") {
488
+ eventStream$.sendActionExecutionEnd({
489
+ actionExecutionId: currentToolCallId
490
+ });
450
491
  }
492
+ } catch (error) {
493
+ console.error("[OpenAI] Error processing stream:", error);
494
+ throw error;
451
495
  }
452
- if (mode === "message" && content) {
453
- eventStream$.sendTextMessageContent({
454
- messageId: currentMessageId,
455
- content
456
- });
457
- } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
458
- eventStream$.sendActionExecutionArgs({
459
- actionExecutionId: currentToolCallId,
460
- args: toolCall.function.arguments
461
- });
462
- }
463
- }
464
- if (mode === "message") {
465
- eventStream$.sendTextMessageEnd({
466
- messageId: currentMessageId
467
- });
468
- } else if (mode === "function") {
469
- eventStream$.sendActionExecutionEnd({
470
- actionExecutionId: currentToolCallId
471
- });
472
- }
473
- eventStream$.complete();
474
- });
496
+ eventStream$.complete();
497
+ });
498
+ } catch (error) {
499
+ console.error("[OpenAI] Error during API call:", error);
500
+ throw error;
501
+ }
475
502
  return {
476
503
  threadId
477
504
  };
@@ -4361,23 +4388,6 @@ function convertMessageToAnthropicMessage(message) {
4361
4388
  }
4362
4389
  }
4363
4390
  __name(convertMessageToAnthropicMessage, "convertMessageToAnthropicMessage");
4364
- function groupAnthropicMessagesByRole(messageParams) {
4365
- return messageParams.reduce((acc, message) => {
4366
- const lastGroup = acc[acc.length - 1];
4367
- if (lastGroup && lastGroup.role === message.role) {
4368
- lastGroup.content = lastGroup.content.concat(message.content);
4369
- } else {
4370
- acc.push({
4371
- role: message.role,
4372
- content: [
4373
- ...message.content
4374
- ]
4375
- });
4376
- }
4377
- return acc;
4378
- }, []);
4379
- }
4380
- __name(groupAnthropicMessagesByRole, "groupAnthropicMessagesByRole");
4381
4391
 
4382
4392
  // src/service-adapters/anthropic/anthropic-adapter.ts
4383
4393
  var import_shared16 = require("@copilotkit/shared");
@@ -4402,9 +4412,32 @@ var AnthropicAdapter = class {
4402
4412
  ];
4403
4413
  const instructionsMessage = messages.shift();
4404
4414
  const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
4405
- let anthropicMessages = messages.map(convertMessageToAnthropicMessage);
4406
- anthropicMessages = limitMessagesToTokenCount2(anthropicMessages, tools, model);
4407
- anthropicMessages = groupAnthropicMessagesByRole(anthropicMessages);
4415
+ const validToolUseIds = /* @__PURE__ */ new Set();
4416
+ for (const message of messages) {
4417
+ if (message.isActionExecutionMessage()) {
4418
+ validToolUseIds.add(message.id);
4419
+ }
4420
+ }
4421
+ const anthropicMessages = messages.map((message) => {
4422
+ if (message.isResultMessage()) {
4423
+ if (!validToolUseIds.has(message.actionExecutionId)) {
4424
+ return null;
4425
+ }
4426
+ validToolUseIds.delete(message.actionExecutionId);
4427
+ return {
4428
+ role: "user",
4429
+ content: [
4430
+ {
4431
+ type: "tool_result",
4432
+ content: message.result,
4433
+ tool_use_id: message.actionExecutionId
4434
+ }
4435
+ ]
4436
+ };
4437
+ }
4438
+ return convertMessageToAnthropicMessage(message);
4439
+ }).filter(Boolean);
4440
+ const limitedMessages = limitMessagesToTokenCount2(anthropicMessages, tools, model);
4408
4441
  let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
4409
4442
  if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
4410
4443
  toolChoice = {
@@ -4412,82 +4445,93 @@ var AnthropicAdapter = class {
4412
4445
  name: forwardedParameters.toolChoiceFunctionName
4413
4446
  };
4414
4447
  }
4415
- const stream = this.anthropic.messages.create({
4416
- system: instructions,
4417
- model: this.model,
4418
- messages: anthropicMessages,
4419
- max_tokens: (forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) || 1024,
4420
- ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) ? {
4421
- temperature: forwardedParameters.temperature
4422
- } : {},
4423
- ...tools.length > 0 && {
4424
- tools
4425
- },
4426
- ...toolChoice && {
4427
- tool_choice: toolChoice
4428
- },
4429
- stream: true
4430
- });
4431
- eventSource.stream(async (eventStream$) => {
4432
- let mode = null;
4433
- let didOutputText = false;
4434
- let currentMessageId = (0, import_shared16.randomId)();
4435
- let currentToolCallId = (0, import_shared16.randomId)();
4436
- let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
4437
- for await (const chunk of await stream) {
4438
- if (chunk.type === "message_start") {
4439
- currentMessageId = chunk.message.id;
4440
- } else if (chunk.type === "content_block_start") {
4441
- if (chunk.content_block.type === "text") {
4442
- didOutputText = false;
4443
- filterThinkingTextBuffer.reset();
4444
- mode = "message";
4445
- } else if (chunk.content_block.type === "tool_use") {
4446
- currentToolCallId = chunk.content_block.id;
4447
- eventStream$.sendActionExecutionStart({
4448
- actionExecutionId: currentToolCallId,
4449
- actionName: chunk.content_block.name,
4450
- parentMessageId: currentMessageId
4451
- });
4452
- mode = "function";
4453
- }
4454
- } else if (chunk.type === "content_block_delta") {
4455
- if (chunk.delta.type === "text_delta") {
4456
- const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
4457
- if (text.length > 0) {
4458
- if (!didOutputText) {
4459
- eventStream$.sendTextMessageStart({
4460
- messageId: currentMessageId
4448
+ try {
4449
+ const createParams = {
4450
+ system: instructions,
4451
+ model: this.model,
4452
+ messages: limitedMessages,
4453
+ max_tokens: (forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) || 1024,
4454
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) ? {
4455
+ temperature: forwardedParameters.temperature
4456
+ } : {},
4457
+ ...tools.length > 0 && {
4458
+ tools
4459
+ },
4460
+ ...toolChoice && {
4461
+ tool_choice: toolChoice
4462
+ },
4463
+ stream: true
4464
+ };
4465
+ const stream = await this.anthropic.messages.create(createParams);
4466
+ eventSource.stream(async (eventStream$) => {
4467
+ let mode = null;
4468
+ let didOutputText = false;
4469
+ let currentMessageId = (0, import_shared16.randomId)();
4470
+ let currentToolCallId = (0, import_shared16.randomId)();
4471
+ let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
4472
+ try {
4473
+ for await (const chunk of stream) {
4474
+ if (chunk.type === "message_start") {
4475
+ currentMessageId = chunk.message.id;
4476
+ } else if (chunk.type === "content_block_start") {
4477
+ if (chunk.content_block.type === "text") {
4478
+ didOutputText = false;
4479
+ filterThinkingTextBuffer.reset();
4480
+ mode = "message";
4481
+ } else if (chunk.content_block.type === "tool_use") {
4482
+ currentToolCallId = chunk.content_block.id;
4483
+ eventStream$.sendActionExecutionStart({
4484
+ actionExecutionId: currentToolCallId,
4485
+ actionName: chunk.content_block.name,
4486
+ parentMessageId: currentMessageId
4487
+ });
4488
+ mode = "function";
4489
+ }
4490
+ } else if (chunk.type === "content_block_delta") {
4491
+ if (chunk.delta.type === "text_delta") {
4492
+ const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
4493
+ if (text.length > 0) {
4494
+ if (!didOutputText) {
4495
+ eventStream$.sendTextMessageStart({
4496
+ messageId: currentMessageId
4497
+ });
4498
+ didOutputText = true;
4499
+ }
4500
+ eventStream$.sendTextMessageContent({
4501
+ messageId: currentMessageId,
4502
+ content: text
4503
+ });
4504
+ }
4505
+ } else if (chunk.delta.type === "input_json_delta") {
4506
+ eventStream$.sendActionExecutionArgs({
4507
+ actionExecutionId: currentToolCallId,
4508
+ args: chunk.delta.partial_json
4509
+ });
4510
+ }
4511
+ } else if (chunk.type === "content_block_stop") {
4512
+ if (mode === "message") {
4513
+ if (didOutputText) {
4514
+ eventStream$.sendTextMessageEnd({
4515
+ messageId: currentMessageId
4516
+ });
4517
+ }
4518
+ } else if (mode === "function") {
4519
+ eventStream$.sendActionExecutionEnd({
4520
+ actionExecutionId: currentToolCallId
4461
4521
  });
4462
- didOutputText = true;
4463
4522
  }
4464
- eventStream$.sendTextMessageContent({
4465
- messageId: currentMessageId,
4466
- content: text
4467
- });
4468
- }
4469
- } else if (chunk.delta.type === "input_json_delta") {
4470
- eventStream$.sendActionExecutionArgs({
4471
- actionExecutionId: currentToolCallId,
4472
- args: chunk.delta.partial_json
4473
- });
4474
- }
4475
- } else if (chunk.type === "content_block_stop") {
4476
- if (mode === "message") {
4477
- if (didOutputText) {
4478
- eventStream$.sendTextMessageEnd({
4479
- messageId: currentMessageId
4480
- });
4481
4523
  }
4482
- } else if (mode === "function") {
4483
- eventStream$.sendActionExecutionEnd({
4484
- actionExecutionId: currentToolCallId
4485
- });
4486
4524
  }
4525
+ } catch (error) {
4526
+ console.error("[Anthropic] Error processing stream:", error);
4527
+ throw error;
4487
4528
  }
4488
- }
4489
- eventStream$.complete();
4490
- });
4529
+ eventStream$.complete();
4530
+ });
4531
+ } catch (error) {
4532
+ console.error("[Anthropic] Error during API call:", error);
4533
+ throw error;
4534
+ }
4491
4535
  return {
4492
4536
  threadId: threadId || (0, import_shared16.randomUUID)()
4493
4537
  };