@copilotkit/runtime 1.8.12-next.2 → 1.8.12-next.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/CHANGELOG.md +13 -0
  2. package/dist/{chunk-OZLQ2A5E.mjs → chunk-FA3E4I4W.mjs} +4 -3
  3. package/dist/chunk-FA3E4I4W.mjs.map +1 -0
  4. package/dist/{chunk-FDGTTGQU.mjs → chunk-KGZF7KSR.mjs} +2 -2
  5. package/dist/{chunk-VQSVMSXZ.mjs → chunk-MG576PIZ.mjs} +2 -2
  6. package/dist/{chunk-Y4H3U52G.mjs → chunk-MVKCCH5U.mjs} +216 -173
  7. package/dist/chunk-MVKCCH5U.mjs.map +1 -0
  8. package/dist/{chunk-V6IQU4D2.mjs → chunk-S5U6J5X2.mjs} +2 -2
  9. package/dist/index.js +217 -173
  10. package/dist/index.js.map +1 -1
  11. package/dist/index.mjs +5 -5
  12. package/dist/lib/index.js +109 -82
  13. package/dist/lib/index.js.map +1 -1
  14. package/dist/lib/index.mjs +5 -5
  15. package/dist/lib/integrations/index.js +2 -1
  16. package/dist/lib/integrations/index.js.map +1 -1
  17. package/dist/lib/integrations/index.mjs +5 -5
  18. package/dist/lib/integrations/nest/index.js +2 -1
  19. package/dist/lib/integrations/nest/index.js.map +1 -1
  20. package/dist/lib/integrations/nest/index.mjs +3 -3
  21. package/dist/lib/integrations/node-express/index.js +2 -1
  22. package/dist/lib/integrations/node-express/index.js.map +1 -1
  23. package/dist/lib/integrations/node-express/index.mjs +3 -3
  24. package/dist/lib/integrations/node-http/index.js +2 -1
  25. package/dist/lib/integrations/node-http/index.js.map +1 -1
  26. package/dist/lib/integrations/node-http/index.mjs +2 -2
  27. package/dist/service-adapters/index.js +215 -172
  28. package/dist/service-adapters/index.js.map +1 -1
  29. package/dist/service-adapters/index.mjs +1 -1
  30. package/jest.config.js +8 -3
  31. package/package.json +3 -2
  32. package/src/service-adapters/anthropic/anthropic-adapter.ts +124 -66
  33. package/src/service-adapters/anthropic/utils.ts +0 -19
  34. package/src/service-adapters/openai/openai-adapter.ts +107 -69
  35. package/tests/global.d.ts +13 -0
  36. package/tests/service-adapters/anthropic/allowlist-approach.test.ts +226 -0
  37. package/tests/service-adapters/anthropic/anthropic-adapter.test.ts +604 -0
  38. package/tests/service-adapters/openai/allowlist-approach.test.ts +238 -0
  39. package/tests/service-adapters/openai/openai-adapter.test.ts +301 -0
  40. package/tests/setup.jest.ts +21 -0
  41. package/tests/tsconfig.json +10 -0
  42. package/tsconfig.json +1 -1
  43. package/dist/chunk-OZLQ2A5E.mjs.map +0 -1
  44. package/dist/chunk-Y4H3U52G.mjs.map +0 -1
  45. /package/dist/{chunk-FDGTTGQU.mjs.map → chunk-KGZF7KSR.mjs.map} +0 -0
  46. /package/dist/{chunk-VQSVMSXZ.mjs.map → chunk-MG576PIZ.mjs.map} +0 -0
  47. /package/dist/{chunk-V6IQU4D2.mjs.map → chunk-S5U6J5X2.mjs.map} +0 -0
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  copilotRuntimeNodeHttpEndpoint
3
- } from "../../../chunk-OZLQ2A5E.mjs";
4
- import "../../../chunk-Y4H3U52G.mjs";
3
+ } from "../../../chunk-FA3E4I4W.mjs";
4
+ import "../../../chunk-MVKCCH5U.mjs";
5
5
  import "../../../chunk-5BIEM2UU.mjs";
6
6
  import "../../../chunk-SHBDMA63.mjs";
7
7
  import "../../../chunk-2OZAGFV3.mjs";
@@ -330,7 +330,23 @@ var OpenAIAdapter = class {
330
330
  const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
331
331
  const tools = actions.map(convertActionInputToOpenAITool);
332
332
  const threadId = threadIdFromRequest ?? (0, import_shared2.randomUUID)();
333
- let openaiMessages = messages.map((m) => convertMessageToOpenAIMessage(m, {
333
+ const validToolUseIds = /* @__PURE__ */ new Set();
334
+ for (const message of messages) {
335
+ if (message.isActionExecutionMessage()) {
336
+ validToolUseIds.add(message.id);
337
+ }
338
+ }
339
+ const filteredMessages = messages.filter((message) => {
340
+ if (message.isResultMessage()) {
341
+ if (!validToolUseIds.has(message.actionExecutionId)) {
342
+ return false;
343
+ }
344
+ validToolUseIds.delete(message.actionExecutionId);
345
+ return true;
346
+ }
347
+ return true;
348
+ });
349
+ let openaiMessages = filteredMessages.map((m) => convertMessageToOpenAIMessage(m, {
334
350
  keepSystemRole: this.keepSystemRole
335
351
  }));
336
352
  openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
@@ -343,91 +359,101 @@ var OpenAIAdapter = class {
343
359
  }
344
360
  };
345
361
  }
346
- const stream = this.openai.beta.chat.completions.stream({
347
- model,
348
- stream: true,
349
- messages: openaiMessages,
350
- ...tools.length > 0 && {
351
- tools
352
- },
353
- ...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
354
- max_tokens: forwardedParameters.maxTokens
355
- },
356
- ...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
357
- stop: forwardedParameters.stop
358
- },
359
- ...toolChoice && {
360
- tool_choice: toolChoice
361
- },
362
- ...this.disableParallelToolCalls && {
363
- parallel_tool_calls: false
364
- },
365
- ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
366
- temperature: forwardedParameters.temperature
367
- }
368
- });
369
- eventSource.stream(async (eventStream$) => {
370
- var _a, _b;
371
- let mode = null;
372
- let currentMessageId;
373
- let currentToolCallId;
374
- for await (const chunk of stream) {
375
- if (chunk.choices.length === 0) {
376
- continue;
377
- }
378
- const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
379
- const content = chunk.choices[0].delta.content;
380
- if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
381
- mode = null;
382
- eventStream$.sendTextMessageEnd({
383
- messageId: currentMessageId
384
- });
385
- } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
386
- mode = null;
387
- eventStream$.sendActionExecutionEnd({
388
- actionExecutionId: currentToolCallId
389
- });
362
+ try {
363
+ const stream = this.openai.beta.chat.completions.stream({
364
+ model,
365
+ stream: true,
366
+ messages: openaiMessages,
367
+ ...tools.length > 0 && {
368
+ tools
369
+ },
370
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
371
+ max_tokens: forwardedParameters.maxTokens
372
+ },
373
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
374
+ stop: forwardedParameters.stop
375
+ },
376
+ ...toolChoice && {
377
+ tool_choice: toolChoice
378
+ },
379
+ ...this.disableParallelToolCalls && {
380
+ parallel_tool_calls: false
381
+ },
382
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
383
+ temperature: forwardedParameters.temperature
390
384
  }
391
- if (mode === null) {
392
- if (toolCall == null ? void 0 : toolCall.id) {
393
- mode = "function";
394
- currentToolCallId = toolCall.id;
395
- eventStream$.sendActionExecutionStart({
396
- actionExecutionId: currentToolCallId,
397
- parentMessageId: chunk.id,
398
- actionName: toolCall.function.name
399
- });
400
- } else if (content) {
401
- mode = "message";
402
- currentMessageId = chunk.id;
403
- eventStream$.sendTextMessageStart({
385
+ });
386
+ eventSource.stream(async (eventStream$) => {
387
+ var _a, _b;
388
+ let mode = null;
389
+ let currentMessageId;
390
+ let currentToolCallId;
391
+ try {
392
+ for await (const chunk of stream) {
393
+ if (chunk.choices.length === 0) {
394
+ continue;
395
+ }
396
+ const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
397
+ const content = chunk.choices[0].delta.content;
398
+ if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
399
+ mode = null;
400
+ eventStream$.sendTextMessageEnd({
401
+ messageId: currentMessageId
402
+ });
403
+ } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
404
+ mode = null;
405
+ eventStream$.sendActionExecutionEnd({
406
+ actionExecutionId: currentToolCallId
407
+ });
408
+ }
409
+ if (mode === null) {
410
+ if (toolCall == null ? void 0 : toolCall.id) {
411
+ mode = "function";
412
+ currentToolCallId = toolCall.id;
413
+ eventStream$.sendActionExecutionStart({
414
+ actionExecutionId: currentToolCallId,
415
+ parentMessageId: chunk.id,
416
+ actionName: toolCall.function.name
417
+ });
418
+ } else if (content) {
419
+ mode = "message";
420
+ currentMessageId = chunk.id;
421
+ eventStream$.sendTextMessageStart({
422
+ messageId: currentMessageId
423
+ });
424
+ }
425
+ }
426
+ if (mode === "message" && content) {
427
+ eventStream$.sendTextMessageContent({
428
+ messageId: currentMessageId,
429
+ content
430
+ });
431
+ } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
432
+ eventStream$.sendActionExecutionArgs({
433
+ actionExecutionId: currentToolCallId,
434
+ args: toolCall.function.arguments
435
+ });
436
+ }
437
+ }
438
+ if (mode === "message") {
439
+ eventStream$.sendTextMessageEnd({
404
440
  messageId: currentMessageId
405
441
  });
442
+ } else if (mode === "function") {
443
+ eventStream$.sendActionExecutionEnd({
444
+ actionExecutionId: currentToolCallId
445
+ });
406
446
  }
447
+ } catch (error) {
448
+ console.error("[OpenAI] Error processing stream:", error);
449
+ throw error;
407
450
  }
408
- if (mode === "message" && content) {
409
- eventStream$.sendTextMessageContent({
410
- messageId: currentMessageId,
411
- content
412
- });
413
- } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
414
- eventStream$.sendActionExecutionArgs({
415
- actionExecutionId: currentToolCallId,
416
- args: toolCall.function.arguments
417
- });
418
- }
419
- }
420
- if (mode === "message") {
421
- eventStream$.sendTextMessageEnd({
422
- messageId: currentMessageId
423
- });
424
- } else if (mode === "function") {
425
- eventStream$.sendActionExecutionEnd({
426
- actionExecutionId: currentToolCallId
427
- });
428
- }
429
- eventStream$.complete();
430
- });
451
+ eventStream$.complete();
452
+ });
453
+ } catch (error) {
454
+ console.error("[OpenAI] Error during API call:", error);
455
+ throw error;
456
+ }
431
457
  return {
432
458
  threadId
433
459
  };
@@ -1310,23 +1336,6 @@ function convertMessageToAnthropicMessage(message) {
1310
1336
  }
1311
1337
  }
1312
1338
  __name(convertMessageToAnthropicMessage, "convertMessageToAnthropicMessage");
1313
- function groupAnthropicMessagesByRole(messageParams) {
1314
- return messageParams.reduce((acc, message) => {
1315
- const lastGroup = acc[acc.length - 1];
1316
- if (lastGroup && lastGroup.role === message.role) {
1317
- lastGroup.content = lastGroup.content.concat(message.content);
1318
- } else {
1319
- acc.push({
1320
- role: message.role,
1321
- content: [
1322
- ...message.content
1323
- ]
1324
- });
1325
- }
1326
- return acc;
1327
- }, []);
1328
- }
1329
- __name(groupAnthropicMessagesByRole, "groupAnthropicMessagesByRole");
1330
1339
 
1331
1340
  // src/service-adapters/anthropic/anthropic-adapter.ts
1332
1341
  var import_shared7 = require("@copilotkit/shared");
@@ -1351,9 +1360,32 @@ var AnthropicAdapter = class {
1351
1360
  ];
1352
1361
  const instructionsMessage = messages.shift();
1353
1362
  const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
1354
- let anthropicMessages = messages.map(convertMessageToAnthropicMessage);
1355
- anthropicMessages = limitMessagesToTokenCount2(anthropicMessages, tools, model);
1356
- anthropicMessages = groupAnthropicMessagesByRole(anthropicMessages);
1363
+ const validToolUseIds = /* @__PURE__ */ new Set();
1364
+ for (const message of messages) {
1365
+ if (message.isActionExecutionMessage()) {
1366
+ validToolUseIds.add(message.id);
1367
+ }
1368
+ }
1369
+ const anthropicMessages = messages.map((message) => {
1370
+ if (message.isResultMessage()) {
1371
+ if (!validToolUseIds.has(message.actionExecutionId)) {
1372
+ return null;
1373
+ }
1374
+ validToolUseIds.delete(message.actionExecutionId);
1375
+ return {
1376
+ role: "user",
1377
+ content: [
1378
+ {
1379
+ type: "tool_result",
1380
+ content: message.result,
1381
+ tool_use_id: message.actionExecutionId
1382
+ }
1383
+ ]
1384
+ };
1385
+ }
1386
+ return convertMessageToAnthropicMessage(message);
1387
+ }).filter(Boolean);
1388
+ const limitedMessages = limitMessagesToTokenCount2(anthropicMessages, tools, model);
1357
1389
  let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
1358
1390
  if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
1359
1391
  toolChoice = {
@@ -1361,82 +1393,93 @@ var AnthropicAdapter = class {
1361
1393
  name: forwardedParameters.toolChoiceFunctionName
1362
1394
  };
1363
1395
  }
1364
- const stream = this.anthropic.messages.create({
1365
- system: instructions,
1366
- model: this.model,
1367
- messages: anthropicMessages,
1368
- max_tokens: (forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) || 1024,
1369
- ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) ? {
1370
- temperature: forwardedParameters.temperature
1371
- } : {},
1372
- ...tools.length > 0 && {
1373
- tools
1374
- },
1375
- ...toolChoice && {
1376
- tool_choice: toolChoice
1377
- },
1378
- stream: true
1379
- });
1380
- eventSource.stream(async (eventStream$) => {
1381
- let mode = null;
1382
- let didOutputText = false;
1383
- let currentMessageId = (0, import_shared7.randomId)();
1384
- let currentToolCallId = (0, import_shared7.randomId)();
1385
- let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
1386
- for await (const chunk of await stream) {
1387
- if (chunk.type === "message_start") {
1388
- currentMessageId = chunk.message.id;
1389
- } else if (chunk.type === "content_block_start") {
1390
- if (chunk.content_block.type === "text") {
1391
- didOutputText = false;
1392
- filterThinkingTextBuffer.reset();
1393
- mode = "message";
1394
- } else if (chunk.content_block.type === "tool_use") {
1395
- currentToolCallId = chunk.content_block.id;
1396
- eventStream$.sendActionExecutionStart({
1397
- actionExecutionId: currentToolCallId,
1398
- actionName: chunk.content_block.name,
1399
- parentMessageId: currentMessageId
1400
- });
1401
- mode = "function";
1402
- }
1403
- } else if (chunk.type === "content_block_delta") {
1404
- if (chunk.delta.type === "text_delta") {
1405
- const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
1406
- if (text.length > 0) {
1407
- if (!didOutputText) {
1408
- eventStream$.sendTextMessageStart({
1409
- messageId: currentMessageId
1396
+ try {
1397
+ const createParams = {
1398
+ system: instructions,
1399
+ model: this.model,
1400
+ messages: limitedMessages,
1401
+ max_tokens: (forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) || 1024,
1402
+ ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) ? {
1403
+ temperature: forwardedParameters.temperature
1404
+ } : {},
1405
+ ...tools.length > 0 && {
1406
+ tools
1407
+ },
1408
+ ...toolChoice && {
1409
+ tool_choice: toolChoice
1410
+ },
1411
+ stream: true
1412
+ };
1413
+ const stream = await this.anthropic.messages.create(createParams);
1414
+ eventSource.stream(async (eventStream$) => {
1415
+ let mode = null;
1416
+ let didOutputText = false;
1417
+ let currentMessageId = (0, import_shared7.randomId)();
1418
+ let currentToolCallId = (0, import_shared7.randomId)();
1419
+ let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
1420
+ try {
1421
+ for await (const chunk of stream) {
1422
+ if (chunk.type === "message_start") {
1423
+ currentMessageId = chunk.message.id;
1424
+ } else if (chunk.type === "content_block_start") {
1425
+ if (chunk.content_block.type === "text") {
1426
+ didOutputText = false;
1427
+ filterThinkingTextBuffer.reset();
1428
+ mode = "message";
1429
+ } else if (chunk.content_block.type === "tool_use") {
1430
+ currentToolCallId = chunk.content_block.id;
1431
+ eventStream$.sendActionExecutionStart({
1432
+ actionExecutionId: currentToolCallId,
1433
+ actionName: chunk.content_block.name,
1434
+ parentMessageId: currentMessageId
1435
+ });
1436
+ mode = "function";
1437
+ }
1438
+ } else if (chunk.type === "content_block_delta") {
1439
+ if (chunk.delta.type === "text_delta") {
1440
+ const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
1441
+ if (text.length > 0) {
1442
+ if (!didOutputText) {
1443
+ eventStream$.sendTextMessageStart({
1444
+ messageId: currentMessageId
1445
+ });
1446
+ didOutputText = true;
1447
+ }
1448
+ eventStream$.sendTextMessageContent({
1449
+ messageId: currentMessageId,
1450
+ content: text
1451
+ });
1452
+ }
1453
+ } else if (chunk.delta.type === "input_json_delta") {
1454
+ eventStream$.sendActionExecutionArgs({
1455
+ actionExecutionId: currentToolCallId,
1456
+ args: chunk.delta.partial_json
1457
+ });
1458
+ }
1459
+ } else if (chunk.type === "content_block_stop") {
1460
+ if (mode === "message") {
1461
+ if (didOutputText) {
1462
+ eventStream$.sendTextMessageEnd({
1463
+ messageId: currentMessageId
1464
+ });
1465
+ }
1466
+ } else if (mode === "function") {
1467
+ eventStream$.sendActionExecutionEnd({
1468
+ actionExecutionId: currentToolCallId
1410
1469
  });
1411
- didOutputText = true;
1412
1470
  }
1413
- eventStream$.sendTextMessageContent({
1414
- messageId: currentMessageId,
1415
- content: text
1416
- });
1417
- }
1418
- } else if (chunk.delta.type === "input_json_delta") {
1419
- eventStream$.sendActionExecutionArgs({
1420
- actionExecutionId: currentToolCallId,
1421
- args: chunk.delta.partial_json
1422
- });
1423
- }
1424
- } else if (chunk.type === "content_block_stop") {
1425
- if (mode === "message") {
1426
- if (didOutputText) {
1427
- eventStream$.sendTextMessageEnd({
1428
- messageId: currentMessageId
1429
- });
1430
1471
  }
1431
- } else if (mode === "function") {
1432
- eventStream$.sendActionExecutionEnd({
1433
- actionExecutionId: currentToolCallId
1434
- });
1435
1472
  }
1473
+ } catch (error) {
1474
+ console.error("[Anthropic] Error processing stream:", error);
1475
+ throw error;
1436
1476
  }
1437
- }
1438
- eventStream$.complete();
1439
- });
1477
+ eventStream$.complete();
1478
+ });
1479
+ } catch (error) {
1480
+ console.error("[Anthropic] Error during API call:", error);
1481
+ throw error;
1482
+ }
1440
1483
  return {
1441
1484
  threadId: threadId || (0, import_shared7.randomUUID)()
1442
1485
  };