@copilotkit/runtime 1.6.0-next.5 → 1.6.0-next.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/CHANGELOG.md +15 -0
  2. package/__snapshots__/schema/schema.graphql +1 -0
  3. package/dist/{chunk-3C73PW47.mjs → chunk-DV4VS5ER.mjs} +2 -2
  4. package/dist/{chunk-7EXH7PVD.mjs → chunk-FZJAYGIR.mjs} +43 -11
  5. package/dist/chunk-FZJAYGIR.mjs.map +1 -0
  6. package/dist/{chunk-RG3UJM7Q.mjs → chunk-LTUE24TH.mjs} +2 -2
  7. package/dist/{chunk-I6W6FUR5.mjs → chunk-LVJSZOFH.mjs} +2 -2
  8. package/dist/{chunk-F6WKKTYT.mjs → chunk-SAHKR6PK.mjs} +5 -4
  9. package/dist/chunk-SAHKR6PK.mjs.map +1 -0
  10. package/dist/{copilot-runtime-67033bfa.d.ts → copilot-runtime-15bfc4f4.d.ts} +2 -2
  11. package/dist/graphql/types/converted/index.d.ts +1 -1
  12. package/dist/{groq-adapter-9d15c927.d.ts → groq-adapter-fb9aa3ab.d.ts} +1 -1
  13. package/dist/{index-f6d1f30b.d.ts → index-5bec5424.d.ts} +2 -1
  14. package/dist/index.d.ts +4 -4
  15. package/dist/index.js +45 -12
  16. package/dist/index.js.map +1 -1
  17. package/dist/index.mjs +5 -5
  18. package/dist/{langserve-7cc5be48.d.ts → langserve-6f7af8d3.d.ts} +1 -1
  19. package/dist/lib/index.d.ts +4 -4
  20. package/dist/lib/index.js +45 -12
  21. package/dist/lib/index.js.map +1 -1
  22. package/dist/lib/index.mjs +5 -5
  23. package/dist/lib/integrations/index.d.ts +4 -4
  24. package/dist/lib/integrations/index.js +3 -2
  25. package/dist/lib/integrations/index.js.map +1 -1
  26. package/dist/lib/integrations/index.mjs +5 -5
  27. package/dist/lib/integrations/nest/index.d.ts +3 -3
  28. package/dist/lib/integrations/nest/index.js +3 -2
  29. package/dist/lib/integrations/nest/index.js.map +1 -1
  30. package/dist/lib/integrations/nest/index.mjs +3 -3
  31. package/dist/lib/integrations/node-express/index.d.ts +3 -3
  32. package/dist/lib/integrations/node-express/index.js +3 -2
  33. package/dist/lib/integrations/node-express/index.js.map +1 -1
  34. package/dist/lib/integrations/node-express/index.mjs +3 -3
  35. package/dist/lib/integrations/node-http/index.d.ts +3 -3
  36. package/dist/lib/integrations/node-http/index.js +3 -2
  37. package/dist/lib/integrations/node-http/index.js.map +1 -1
  38. package/dist/lib/integrations/node-http/index.mjs +2 -2
  39. package/dist/service-adapters/index.d.ts +4 -4
  40. package/dist/service-adapters/index.js +42 -10
  41. package/dist/service-adapters/index.js.map +1 -1
  42. package/dist/service-adapters/index.mjs +1 -1
  43. package/package.json +3 -3
  44. package/src/graphql/types/enums.ts +1 -0
  45. package/src/service-adapters/groq/groq-adapter.ts +6 -3
  46. package/src/service-adapters/openai/openai-adapter.ts +1 -1
  47. package/src/service-adapters/openai/openai-assistant-adapter.ts +1 -1
  48. package/src/service-adapters/openai/utils.ts +37 -12
  49. package/src/service-adapters/unify/unify-adapter.ts +1 -1
  50. package/dist/chunk-7EXH7PVD.mjs.map +0 -1
  51. package/dist/chunk-F6WKKTYT.mjs.map +0 -1
  52. /package/dist/{chunk-3C73PW47.mjs.map → chunk-DV4VS5ER.mjs.map} +0 -0
  53. /package/dist/{chunk-RG3UJM7Q.mjs.map → chunk-LTUE24TH.mjs.map} +0 -0
  54. /package/dist/{chunk-I6W6FUR5.mjs.map → chunk-LVJSZOFH.mjs.map} +0 -0
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  copilotRuntimeNodeHttpEndpoint
3
- } from "../../../chunk-F6WKKTYT.mjs";
4
- import "../../../chunk-7EXH7PVD.mjs";
3
+ } from "../../../chunk-SAHKR6PK.mjs";
4
+ import "../../../chunk-FZJAYGIR.mjs";
5
5
  import "../../../chunk-5BIEM2UU.mjs";
6
6
  import "../../../chunk-RTFJTJMA.mjs";
7
7
  import "../../../chunk-2OZAGFV3.mjs";
@@ -1,8 +1,8 @@
1
- import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from '../langserve-7cc5be48.js';
2
- export { c as RemoteChain, R as RemoteChainParameters } from '../langserve-7cc5be48.js';
3
- export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-9d15c927.js';
1
+ import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from '../langserve-6f7af8d3.js';
2
+ export { c as RemoteChain, R as RemoteChainParameters } from '../langserve-6f7af8d3.js';
3
+ export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-fb9aa3ab.js';
4
4
  import Anthropic from '@anthropic-ai/sdk';
5
- import '../index-f6d1f30b.js';
5
+ import '../index-5bec5424.js';
6
6
  import '../graphql/types/base/index.js';
7
7
  import 'rxjs';
8
8
  import '@copilotkit/shared';
@@ -136,7 +136,10 @@ function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
136
136
  }
137
137
  maxTokens -= toolsNumTokens;
138
138
  for (const message of messages) {
139
- if (message.role === "system") {
139
+ if ([
140
+ "system",
141
+ "developer"
142
+ ].includes(message.role)) {
140
143
  const numTokens = countMessageTokens(model, message);
141
144
  maxTokens -= numTokens;
142
145
  if (maxTokens < 0) {
@@ -149,7 +152,10 @@ function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
149
152
  ...messages
150
153
  ].reverse();
151
154
  for (const message of reversedMessages) {
152
- if (message.role === "system") {
155
+ if ([
156
+ "system",
157
+ "developer"
158
+ ].includes(message.role)) {
153
159
  result.unshift(message);
154
160
  continue;
155
161
  } else if (cutoff) {
@@ -172,9 +178,23 @@ function maxTokensForOpenAIModel(model) {
172
178
  __name(maxTokensForOpenAIModel, "maxTokensForOpenAIModel");
173
179
  var DEFAULT_MAX_TOKENS = 128e3;
174
180
  var maxTokensByModel = {
181
+ // o1
182
+ o1: 2e5,
183
+ "o1-2024-12-17": 2e5,
184
+ "o1-mini": 128e3,
185
+ "o1-mini-2024-09-12": 128e3,
186
+ "o1-preview": 128e3,
187
+ "o1-preview-2024-09-12": 128e3,
188
+ // o3-mini
189
+ "o3-mini": 2e5,
190
+ "o3-mini-2025-01-31": 2e5,
175
191
  // GPT-4
176
192
  "gpt-4o": 128e3,
193
+ "chatgpt-4o-latest": 128e3,
194
+ "gpt-4o-2024-08-06": 128e3,
177
195
  "gpt-4o-2024-05-13": 128e3,
196
+ "gpt-4o-mini": 128e3,
197
+ "gpt-4o-mini-2024-07-18": 128e3,
178
198
  "gpt-4-turbo": 128e3,
179
199
  "gpt-4-turbo-2024-04-09": 128e3,
180
200
  "gpt-4-0125-preview": 128e3,
@@ -225,10 +245,17 @@ function convertActionInputToOpenAITool(action) {
225
245
  };
226
246
  }
227
247
  __name(convertActionInputToOpenAITool, "convertActionInputToOpenAITool");
228
- function convertMessageToOpenAIMessage(message) {
248
+ function convertMessageToOpenAIMessage(message, options) {
249
+ const { keepSystemRole } = options || {
250
+ keepSystemRole: false
251
+ };
229
252
  if (message.isTextMessage()) {
253
+ let role = message.role;
254
+ if (message.role === "system" && !keepSystemRole) {
255
+ role = "developer";
256
+ }
230
257
  return {
231
- role: message.role,
258
+ role,
232
259
  content: message.content
233
260
  };
234
261
  } else if (message.isActionExecutionMessage()) {
@@ -257,7 +284,10 @@ __name(convertMessageToOpenAIMessage, "convertMessageToOpenAIMessage");
257
284
  function convertSystemMessageToAssistantAPI(message) {
258
285
  return {
259
286
  ...message,
260
- ...message.role === "system" && {
287
+ ...[
288
+ "system",
289
+ "developer"
290
+ ].includes(message.role) && {
261
291
  role: "assistant",
262
292
  content: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
263
293
  }
@@ -286,7 +316,7 @@ var OpenAIAdapter = class {
286
316
  const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
287
317
  const tools = actions.map(convertActionInputToOpenAITool);
288
318
  const threadId = threadIdFromRequest ?? (0, import_shared2.randomUUID)();
289
- let openaiMessages = messages.map(convertMessageToOpenAIMessage);
319
+ let openaiMessages = messages.map((m) => convertMessageToOpenAIMessage(m));
290
320
  openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
291
321
  let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
292
322
  if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
@@ -749,7 +779,7 @@ var OpenAIAssistantAdapter = class {
749
779
  ];
750
780
  const instructionsMessage = messages.shift();
751
781
  const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
752
- const userMessage = messages.map(convertMessageToOpenAIMessage).map(convertSystemMessageToAssistantAPI).at(-1);
782
+ const userMessage = messages.map((m) => convertMessageToOpenAIMessage(m)).map(convertSystemMessageToAssistantAPI).at(-1);
753
783
  if (userMessage.role !== "user") {
754
784
  throw new Error("No user message found");
755
785
  }
@@ -895,7 +925,7 @@ var UnifyAdapter = class {
895
925
  baseURL: "https://api.unify.ai/v0/"
896
926
  });
897
927
  const forwardedParameters = request.forwardedParameters;
898
- const messages = request.messages.map(convertMessageToOpenAIMessage);
928
+ const messages = request.messages.map((m) => convertMessageToOpenAIMessage(m));
899
929
  const stream = await openai.chat.completions.create({
900
930
  model: this.model,
901
931
  messages,
@@ -992,7 +1022,7 @@ __name(UnifyAdapter, "UnifyAdapter");
992
1022
  // src/service-adapters/groq/groq-adapter.ts
993
1023
  var import_groq_sdk = require("groq-sdk");
994
1024
  var import_shared6 = require("@copilotkit/shared");
995
- var DEFAULT_MODEL2 = "llama3-groq-70b-8192-tool-use-preview";
1025
+ var DEFAULT_MODEL2 = "llama-3.3-70b-versatile";
996
1026
  var GroqAdapter = class {
997
1027
  model = DEFAULT_MODEL2;
998
1028
  disableParallelToolCalls = false;
@@ -1010,7 +1040,9 @@ var GroqAdapter = class {
1010
1040
  async process(request) {
1011
1041
  const { threadId, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
1012
1042
  const tools = actions.map(convertActionInputToOpenAITool);
1013
- let openaiMessages = messages.map(convertMessageToOpenAIMessage);
1043
+ let openaiMessages = messages.map((m) => convertMessageToOpenAIMessage(m, {
1044
+ keepSystemRole: true
1045
+ }));
1014
1046
  openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
1015
1047
  let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
1016
1048
  if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {