@copilotkit/runtime 1.6.0-next.4 → 1.6.0-next.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/__snapshots__/schema/schema.graphql +1 -0
- package/dist/{chunk-YUCVJM6E.mjs → chunk-5HTQ3NJN.mjs} +30 -28
- package/dist/chunk-5HTQ3NJN.mjs.map +1 -0
- package/dist/{chunk-7EXH7PVD.mjs → chunk-FZJAYGIR.mjs} +43 -11
- package/dist/chunk-FZJAYGIR.mjs.map +1 -0
- package/dist/{chunk-ROFUPT7E.mjs → chunk-KOZEUHQH.mjs} +2 -2
- package/dist/{chunk-Z5VUD7NL.mjs → chunk-MADVAANL.mjs} +2 -2
- package/dist/{chunk-4FCUC27H.mjs → chunk-USK52PF7.mjs} +2 -2
- package/dist/{copilot-runtime-67033bfa.d.ts → copilot-runtime-15bfc4f4.d.ts} +2 -2
- package/dist/graphql/types/converted/index.d.ts +1 -1
- package/dist/{groq-adapter-9d15c927.d.ts → groq-adapter-fb9aa3ab.d.ts} +1 -1
- package/dist/{index-f6d1f30b.d.ts → index-5bec5424.d.ts} +2 -1
- package/dist/index.d.ts +4 -4
- package/dist/index.js +64 -30
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +10 -10
- package/dist/{langserve-7cc5be48.d.ts → langserve-6f7af8d3.d.ts} +1 -1
- package/dist/lib/index.d.ts +4 -4
- package/dist/lib/index.js +64 -30
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/index.mjs +6 -6
- package/dist/lib/integrations/index.d.ts +4 -4
- package/dist/lib/integrations/index.js +3 -2
- package/dist/lib/integrations/index.js.map +1 -1
- package/dist/lib/integrations/index.mjs +5 -5
- package/dist/lib/integrations/nest/index.d.ts +3 -3
- package/dist/lib/integrations/nest/index.js +3 -2
- package/dist/lib/integrations/nest/index.js.map +1 -1
- package/dist/lib/integrations/nest/index.mjs +3 -3
- package/dist/lib/integrations/node-express/index.d.ts +3 -3
- package/dist/lib/integrations/node-express/index.js +3 -2
- package/dist/lib/integrations/node-express/index.js.map +1 -1
- package/dist/lib/integrations/node-express/index.mjs +3 -3
- package/dist/lib/integrations/node-http/index.d.ts +3 -3
- package/dist/lib/integrations/node-http/index.js +3 -2
- package/dist/lib/integrations/node-http/index.js.map +1 -1
- package/dist/lib/integrations/node-http/index.mjs +2 -2
- package/dist/service-adapters/index.d.ts +4 -4
- package/dist/service-adapters/index.js +42 -10
- package/dist/service-adapters/index.js.map +1 -1
- package/dist/service-adapters/index.mjs +1 -1
- package/package.json +3 -3
- package/src/graphql/types/enums.ts +1 -0
- package/src/lib/runtime/remote-action-constructors.ts +6 -5
- package/src/service-adapters/conversion.ts +39 -47
- package/src/service-adapters/groq/groq-adapter.ts +6 -3
- package/src/service-adapters/openai/openai-adapter.ts +1 -1
- package/src/service-adapters/openai/openai-assistant-adapter.ts +1 -1
- package/src/service-adapters/openai/utils.ts +37 -12
- package/src/service-adapters/unify/unify-adapter.ts +1 -1
- package/dist/chunk-7EXH7PVD.mjs.map +0 -1
- package/dist/chunk-YUCVJM6E.mjs.map +0 -1
- /package/dist/{chunk-ROFUPT7E.mjs.map → chunk-KOZEUHQH.mjs.map} +0 -0
- /package/dist/{chunk-Z5VUD7NL.mjs.map → chunk-MADVAANL.mjs.map} +0 -0
- /package/dist/{chunk-4FCUC27H.mjs.map → chunk-USK52PF7.mjs.map} +0 -0
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import {
|
|
2
2
|
copilotRuntimeNodeHttpEndpoint
|
|
3
|
-
} from "../../../chunk-
|
|
3
|
+
} from "../../../chunk-5HTQ3NJN.mjs";
|
|
4
|
+
import "../../../chunk-FZJAYGIR.mjs";
|
|
4
5
|
import "../../../chunk-5BIEM2UU.mjs";
|
|
5
|
-
import "../../../chunk-7EXH7PVD.mjs";
|
|
6
6
|
import "../../../chunk-RTFJTJMA.mjs";
|
|
7
7
|
import "../../../chunk-2OZAGFV3.mjs";
|
|
8
8
|
import "../../../chunk-FHD4JECV.mjs";
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from '../langserve-
|
|
2
|
-
export { c as RemoteChain, R as RemoteChainParameters } from '../langserve-
|
|
3
|
-
export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-
|
|
1
|
+
import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from '../langserve-6f7af8d3.js';
|
|
2
|
+
export { c as RemoteChain, R as RemoteChainParameters } from '../langserve-6f7af8d3.js';
|
|
3
|
+
export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-fb9aa3ab.js';
|
|
4
4
|
import Anthropic from '@anthropic-ai/sdk';
|
|
5
|
-
import '../index-
|
|
5
|
+
import '../index-5bec5424.js';
|
|
6
6
|
import '../graphql/types/base/index.js';
|
|
7
7
|
import 'rxjs';
|
|
8
8
|
import '@copilotkit/shared';
|
|
@@ -136,7 +136,10 @@ function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
|
|
|
136
136
|
}
|
|
137
137
|
maxTokens -= toolsNumTokens;
|
|
138
138
|
for (const message of messages) {
|
|
139
|
-
if (
|
|
139
|
+
if ([
|
|
140
|
+
"system",
|
|
141
|
+
"developer"
|
|
142
|
+
].includes(message.role)) {
|
|
140
143
|
const numTokens = countMessageTokens(model, message);
|
|
141
144
|
maxTokens -= numTokens;
|
|
142
145
|
if (maxTokens < 0) {
|
|
@@ -149,7 +152,10 @@ function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
|
|
|
149
152
|
...messages
|
|
150
153
|
].reverse();
|
|
151
154
|
for (const message of reversedMessages) {
|
|
152
|
-
if (
|
|
155
|
+
if ([
|
|
156
|
+
"system",
|
|
157
|
+
"developer"
|
|
158
|
+
].includes(message.role)) {
|
|
153
159
|
result.unshift(message);
|
|
154
160
|
continue;
|
|
155
161
|
} else if (cutoff) {
|
|
@@ -172,9 +178,23 @@ function maxTokensForOpenAIModel(model) {
|
|
|
172
178
|
__name(maxTokensForOpenAIModel, "maxTokensForOpenAIModel");
|
|
173
179
|
var DEFAULT_MAX_TOKENS = 128e3;
|
|
174
180
|
var maxTokensByModel = {
|
|
181
|
+
// o1
|
|
182
|
+
o1: 2e5,
|
|
183
|
+
"o1-2024-12-17": 2e5,
|
|
184
|
+
"o1-mini": 128e3,
|
|
185
|
+
"o1-mini-2024-09-12": 128e3,
|
|
186
|
+
"o1-preview": 128e3,
|
|
187
|
+
"o1-preview-2024-09-12": 128e3,
|
|
188
|
+
// o3-mini
|
|
189
|
+
"o3-mini": 2e5,
|
|
190
|
+
"o3-mini-2025-01-31": 2e5,
|
|
175
191
|
// GPT-4
|
|
176
192
|
"gpt-4o": 128e3,
|
|
193
|
+
"chatgpt-4o-latest": 128e3,
|
|
194
|
+
"gpt-4o-2024-08-06": 128e3,
|
|
177
195
|
"gpt-4o-2024-05-13": 128e3,
|
|
196
|
+
"gpt-4o-mini": 128e3,
|
|
197
|
+
"gpt-4o-mini-2024-07-18": 128e3,
|
|
178
198
|
"gpt-4-turbo": 128e3,
|
|
179
199
|
"gpt-4-turbo-2024-04-09": 128e3,
|
|
180
200
|
"gpt-4-0125-preview": 128e3,
|
|
@@ -225,10 +245,17 @@ function convertActionInputToOpenAITool(action) {
|
|
|
225
245
|
};
|
|
226
246
|
}
|
|
227
247
|
__name(convertActionInputToOpenAITool, "convertActionInputToOpenAITool");
|
|
228
|
-
function convertMessageToOpenAIMessage(message) {
|
|
248
|
+
function convertMessageToOpenAIMessage(message, options) {
|
|
249
|
+
const { keepSystemRole } = options || {
|
|
250
|
+
keepSystemRole: false
|
|
251
|
+
};
|
|
229
252
|
if (message.isTextMessage()) {
|
|
253
|
+
let role = message.role;
|
|
254
|
+
if (message.role === "system" && !keepSystemRole) {
|
|
255
|
+
role = "developer";
|
|
256
|
+
}
|
|
230
257
|
return {
|
|
231
|
-
role
|
|
258
|
+
role,
|
|
232
259
|
content: message.content
|
|
233
260
|
};
|
|
234
261
|
} else if (message.isActionExecutionMessage()) {
|
|
@@ -257,7 +284,10 @@ __name(convertMessageToOpenAIMessage, "convertMessageToOpenAIMessage");
|
|
|
257
284
|
function convertSystemMessageToAssistantAPI(message) {
|
|
258
285
|
return {
|
|
259
286
|
...message,
|
|
260
|
-
...
|
|
287
|
+
...[
|
|
288
|
+
"system",
|
|
289
|
+
"developer"
|
|
290
|
+
].includes(message.role) && {
|
|
261
291
|
role: "assistant",
|
|
262
292
|
content: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
|
|
263
293
|
}
|
|
@@ -286,7 +316,7 @@ var OpenAIAdapter = class {
|
|
|
286
316
|
const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
|
|
287
317
|
const tools = actions.map(convertActionInputToOpenAITool);
|
|
288
318
|
const threadId = threadIdFromRequest ?? (0, import_shared2.randomUUID)();
|
|
289
|
-
let openaiMessages = messages.map(convertMessageToOpenAIMessage);
|
|
319
|
+
let openaiMessages = messages.map((m) => convertMessageToOpenAIMessage(m));
|
|
290
320
|
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
291
321
|
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
292
322
|
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
|
|
@@ -749,7 +779,7 @@ var OpenAIAssistantAdapter = class {
|
|
|
749
779
|
];
|
|
750
780
|
const instructionsMessage = messages.shift();
|
|
751
781
|
const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
|
|
752
|
-
const userMessage = messages.map(convertMessageToOpenAIMessage).map(convertSystemMessageToAssistantAPI).at(-1);
|
|
782
|
+
const userMessage = messages.map((m) => convertMessageToOpenAIMessage(m)).map(convertSystemMessageToAssistantAPI).at(-1);
|
|
753
783
|
if (userMessage.role !== "user") {
|
|
754
784
|
throw new Error("No user message found");
|
|
755
785
|
}
|
|
@@ -895,7 +925,7 @@ var UnifyAdapter = class {
|
|
|
895
925
|
baseURL: "https://api.unify.ai/v0/"
|
|
896
926
|
});
|
|
897
927
|
const forwardedParameters = request.forwardedParameters;
|
|
898
|
-
const messages = request.messages.map(convertMessageToOpenAIMessage);
|
|
928
|
+
const messages = request.messages.map((m) => convertMessageToOpenAIMessage(m));
|
|
899
929
|
const stream = await openai.chat.completions.create({
|
|
900
930
|
model: this.model,
|
|
901
931
|
messages,
|
|
@@ -992,7 +1022,7 @@ __name(UnifyAdapter, "UnifyAdapter");
|
|
|
992
1022
|
// src/service-adapters/groq/groq-adapter.ts
|
|
993
1023
|
var import_groq_sdk = require("groq-sdk");
|
|
994
1024
|
var import_shared6 = require("@copilotkit/shared");
|
|
995
|
-
var DEFAULT_MODEL2 = "
|
|
1025
|
+
var DEFAULT_MODEL2 = "llama-3.3-70b-versatile";
|
|
996
1026
|
var GroqAdapter = class {
|
|
997
1027
|
model = DEFAULT_MODEL2;
|
|
998
1028
|
disableParallelToolCalls = false;
|
|
@@ -1010,7 +1040,9 @@ var GroqAdapter = class {
|
|
|
1010
1040
|
async process(request) {
|
|
1011
1041
|
const { threadId, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
|
|
1012
1042
|
const tools = actions.map(convertActionInputToOpenAITool);
|
|
1013
|
-
let openaiMessages = messages.map(convertMessageToOpenAIMessage
|
|
1043
|
+
let openaiMessages = messages.map((m) => convertMessageToOpenAIMessage(m, {
|
|
1044
|
+
keepSystemRole: true
|
|
1045
|
+
}));
|
|
1014
1046
|
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
1015
1047
|
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
1016
1048
|
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
|