@copilotkit/runtime 1.5.9 → 1.5.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/__snapshots__/schema/schema.graphql +273 -0
- package/dist/chunk-44O2JGUY.mjs +12 -0
- package/dist/chunk-44O2JGUY.mjs.map +1 -0
- package/dist/chunk-CLGKEUOA.mjs +1408 -0
- package/dist/chunk-CLGKEUOA.mjs.map +1 -0
- package/dist/chunk-D2WLFQS6.mjs +43 -0
- package/dist/chunk-D2WLFQS6.mjs.map +1 -0
- package/dist/chunk-DFOKBSIS.mjs +1 -0
- package/dist/chunk-DFOKBSIS.mjs.map +1 -0
- package/dist/chunk-EH6BECEX.mjs +25 -0
- package/dist/chunk-EH6BECEX.mjs.map +1 -0
- package/dist/chunk-HKF5IS6J.mjs +3395 -0
- package/dist/chunk-HKF5IS6J.mjs.map +1 -0
- package/dist/chunk-O3VDLEP4.mjs +80 -0
- package/dist/chunk-O3VDLEP4.mjs.map +1 -0
- package/dist/chunk-RFF5IIZJ.mjs +66 -0
- package/dist/chunk-RFF5IIZJ.mjs.map +1 -0
- package/dist/chunk-U3V2BCGI.mjs +152 -0
- package/dist/chunk-U3V2BCGI.mjs.map +1 -0
- package/dist/chunk-XFOOQKZE.mjs +25 -0
- package/dist/chunk-XFOOQKZE.mjs.map +1 -0
- package/dist/copilot-runtime-36700e00.d.ts +196 -0
- package/dist/graphql/types/base/index.d.ts +6 -0
- package/dist/graphql/types/base/index.js +63 -0
- package/dist/graphql/types/base/index.js.map +1 -0
- package/dist/graphql/types/base/index.mjs +8 -0
- package/dist/graphql/types/base/index.mjs.map +1 -0
- package/dist/graphql/types/converted/index.d.ts +2 -0
- package/dist/graphql/types/converted/index.js +124 -0
- package/dist/graphql/types/converted/index.js.map +1 -0
- package/dist/graphql/types/converted/index.mjs +17 -0
- package/dist/graphql/types/converted/index.mjs.map +1 -0
- package/dist/groq-adapter-696b5d29.d.ts +281 -0
- package/dist/index-cc2b17be.d.ts +87 -0
- package/dist/index.d.ts +23 -0
- package/dist/index.js +5153 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +76 -0
- package/dist/index.mjs.map +1 -0
- package/dist/langserve-9125a12e.d.ts +176 -0
- package/dist/lib/cloud/index.d.ts +6 -0
- package/dist/lib/cloud/index.js +18 -0
- package/dist/lib/cloud/index.js.map +1 -0
- package/dist/lib/cloud/index.mjs +1 -0
- package/dist/lib/cloud/index.mjs.map +1 -0
- package/dist/lib/index.d.ts +20 -0
- package/dist/lib/index.js +4801 -0
- package/dist/lib/index.js.map +1 -0
- package/dist/lib/index.mjs +58 -0
- package/dist/lib/index.mjs.map +1 -0
- package/dist/lib/integrations/index.d.ts +33 -0
- package/dist/lib/integrations/index.js +2166 -0
- package/dist/lib/integrations/index.js.map +1 -0
- package/dist/lib/integrations/index.mjs +34 -0
- package/dist/lib/integrations/index.mjs.map +1 -0
- package/dist/lib/integrations/nest/index.d.ts +14 -0
- package/dist/lib/integrations/nest/index.js +2075 -0
- package/dist/lib/integrations/nest/index.js.map +1 -0
- package/dist/lib/integrations/nest/index.mjs +13 -0
- package/dist/lib/integrations/nest/index.mjs.map +1 -0
- package/dist/lib/integrations/node-express/index.d.ts +14 -0
- package/dist/lib/integrations/node-express/index.js +2075 -0
- package/dist/lib/integrations/node-express/index.js.map +1 -0
- package/dist/lib/integrations/node-express/index.mjs +13 -0
- package/dist/lib/integrations/node-express/index.mjs.map +1 -0
- package/dist/lib/integrations/node-http/index.d.ts +14 -0
- package/dist/lib/integrations/node-http/index.js +2061 -0
- package/dist/lib/integrations/node-http/index.js.map +1 -0
- package/dist/lib/integrations/node-http/index.mjs +12 -0
- package/dist/lib/integrations/node-http/index.mjs.map +1 -0
- package/dist/service-adapters/index.d.ts +84 -0
- package/dist/service-adapters/index.js +1448 -0
- package/dist/service-adapters/index.js.map +1 -0
- package/dist/service-adapters/index.mjs +26 -0
- package/dist/service-adapters/index.mjs.map +1 -0
- package/dist/utils/index.d.ts +49 -0
- package/dist/utils/index.js +174 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/index.mjs +12 -0
- package/dist/utils/index.mjs.map +1 -0
- package/package.json +2 -2
|
@@ -0,0 +1,1408 @@
|
|
|
1
|
+
import {
|
|
2
|
+
__name
|
|
3
|
+
} from "./chunk-44O2JGUY.mjs";
|
|
4
|
+
|
|
5
|
+
// src/service-adapters/langchain/langserve.ts
|
|
6
|
+
import { RemoteRunnable } from "langchain/runnables/remote";
|
|
7
|
+
var RemoteChain = class {
|
|
8
|
+
name;
|
|
9
|
+
description;
|
|
10
|
+
chainUrl;
|
|
11
|
+
parameters;
|
|
12
|
+
parameterType;
|
|
13
|
+
constructor(options) {
|
|
14
|
+
this.name = options.name;
|
|
15
|
+
this.description = options.description;
|
|
16
|
+
this.chainUrl = options.chainUrl;
|
|
17
|
+
this.parameters = options.parameters;
|
|
18
|
+
this.parameterType = options.parameterType || "multi";
|
|
19
|
+
}
|
|
20
|
+
async toAction() {
|
|
21
|
+
if (!this.parameters) {
|
|
22
|
+
await this.inferLangServeParameters();
|
|
23
|
+
}
|
|
24
|
+
return {
|
|
25
|
+
name: this.name,
|
|
26
|
+
description: this.description,
|
|
27
|
+
parameters: this.parameters,
|
|
28
|
+
handler: async (args) => {
|
|
29
|
+
const runnable = new RemoteRunnable({
|
|
30
|
+
url: this.chainUrl
|
|
31
|
+
});
|
|
32
|
+
let input;
|
|
33
|
+
if (this.parameterType === "single") {
|
|
34
|
+
input = args[Object.keys(args)[0]];
|
|
35
|
+
} else {
|
|
36
|
+
input = args;
|
|
37
|
+
}
|
|
38
|
+
return await runnable.invoke(input);
|
|
39
|
+
}
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
async inferLangServeParameters() {
|
|
43
|
+
const supportedTypes = [
|
|
44
|
+
"string",
|
|
45
|
+
"number",
|
|
46
|
+
"boolean"
|
|
47
|
+
];
|
|
48
|
+
let schemaUrl = this.chainUrl.replace(/\/+$/, "") + "/input_schema";
|
|
49
|
+
let schema = await fetch(schemaUrl).then((res) => res.json()).catch(() => {
|
|
50
|
+
throw new Error("Failed to fetch langserve schema at " + schemaUrl);
|
|
51
|
+
});
|
|
52
|
+
if (supportedTypes.includes(schema.type)) {
|
|
53
|
+
this.parameterType = "single";
|
|
54
|
+
this.parameters = [
|
|
55
|
+
{
|
|
56
|
+
name: "input",
|
|
57
|
+
type: schema.type,
|
|
58
|
+
description: "The input to the chain"
|
|
59
|
+
}
|
|
60
|
+
];
|
|
61
|
+
} else if (schema.type === "object") {
|
|
62
|
+
this.parameterType = "multi";
|
|
63
|
+
this.parameters = Object.keys(schema.properties).map((key) => {
|
|
64
|
+
var _a;
|
|
65
|
+
let property = schema.properties[key];
|
|
66
|
+
if (!supportedTypes.includes(property.type)) {
|
|
67
|
+
throw new Error("Unsupported schema type");
|
|
68
|
+
}
|
|
69
|
+
return {
|
|
70
|
+
name: key,
|
|
71
|
+
type: property.type,
|
|
72
|
+
description: property.description || "",
|
|
73
|
+
required: ((_a = schema.required) == null ? void 0 : _a.includes(key)) || false
|
|
74
|
+
};
|
|
75
|
+
});
|
|
76
|
+
} else {
|
|
77
|
+
throw new Error("Unsupported schema type");
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
};
|
|
81
|
+
__name(RemoteChain, "RemoteChain");
|
|
82
|
+
|
|
83
|
+
// src/service-adapters/openai/openai-adapter.ts
|
|
84
|
+
import OpenAI from "openai";
|
|
85
|
+
|
|
86
|
+
// src/service-adapters/openai/utils.ts
|
|
87
|
+
function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
|
|
88
|
+
maxTokens || (maxTokens = maxTokensForOpenAIModel(model));
|
|
89
|
+
const result = [];
|
|
90
|
+
const toolsNumTokens = countToolsTokens(model, tools);
|
|
91
|
+
if (toolsNumTokens > maxTokens) {
|
|
92
|
+
throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
|
|
93
|
+
}
|
|
94
|
+
maxTokens -= toolsNumTokens;
|
|
95
|
+
for (const message of messages) {
|
|
96
|
+
if (message.role === "system") {
|
|
97
|
+
const numTokens = countMessageTokens(model, message);
|
|
98
|
+
maxTokens -= numTokens;
|
|
99
|
+
if (maxTokens < 0) {
|
|
100
|
+
throw new Error("Not enough tokens for system message.");
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
let cutoff = false;
|
|
105
|
+
const reversedMessages = [
|
|
106
|
+
...messages
|
|
107
|
+
].reverse();
|
|
108
|
+
for (const message of reversedMessages) {
|
|
109
|
+
if (message.role === "system") {
|
|
110
|
+
result.unshift(message);
|
|
111
|
+
continue;
|
|
112
|
+
} else if (cutoff) {
|
|
113
|
+
continue;
|
|
114
|
+
}
|
|
115
|
+
let numTokens = countMessageTokens(model, message);
|
|
116
|
+
if (maxTokens < numTokens) {
|
|
117
|
+
cutoff = true;
|
|
118
|
+
continue;
|
|
119
|
+
}
|
|
120
|
+
result.unshift(message);
|
|
121
|
+
maxTokens -= numTokens;
|
|
122
|
+
}
|
|
123
|
+
return result;
|
|
124
|
+
}
|
|
125
|
+
__name(limitMessagesToTokenCount, "limitMessagesToTokenCount");
|
|
126
|
+
function maxTokensForOpenAIModel(model) {
|
|
127
|
+
return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;
|
|
128
|
+
}
|
|
129
|
+
__name(maxTokensForOpenAIModel, "maxTokensForOpenAIModel");
|
|
130
|
+
var DEFAULT_MAX_TOKENS = 128e3;
|
|
131
|
+
var maxTokensByModel = {
|
|
132
|
+
// GPT-4
|
|
133
|
+
"gpt-4o": 128e3,
|
|
134
|
+
"gpt-4o-2024-05-13": 128e3,
|
|
135
|
+
"gpt-4-turbo": 128e3,
|
|
136
|
+
"gpt-4-turbo-2024-04-09": 128e3,
|
|
137
|
+
"gpt-4-0125-preview": 128e3,
|
|
138
|
+
"gpt-4-turbo-preview": 128e3,
|
|
139
|
+
"gpt-4-1106-preview": 128e3,
|
|
140
|
+
"gpt-4-vision-preview": 128e3,
|
|
141
|
+
"gpt-4-1106-vision-preview": 128e3,
|
|
142
|
+
"gpt-4-32k": 32768,
|
|
143
|
+
"gpt-4-32k-0613": 32768,
|
|
144
|
+
"gpt-4-32k-0314": 32768,
|
|
145
|
+
"gpt-4": 8192,
|
|
146
|
+
"gpt-4-0613": 8192,
|
|
147
|
+
"gpt-4-0314": 8192,
|
|
148
|
+
// GPT-3.5
|
|
149
|
+
"gpt-3.5-turbo-0125": 16385,
|
|
150
|
+
"gpt-3.5-turbo": 16385,
|
|
151
|
+
"gpt-3.5-turbo-1106": 16385,
|
|
152
|
+
"gpt-3.5-turbo-instruct": 4096,
|
|
153
|
+
"gpt-3.5-turbo-16k": 16385,
|
|
154
|
+
"gpt-3.5-turbo-0613": 4096,
|
|
155
|
+
"gpt-3.5-turbo-16k-0613": 16385,
|
|
156
|
+
"gpt-3.5-turbo-0301": 4097
|
|
157
|
+
};
|
|
158
|
+
function countToolsTokens(model, tools) {
|
|
159
|
+
if (tools.length === 0) {
|
|
160
|
+
return 0;
|
|
161
|
+
}
|
|
162
|
+
const json = JSON.stringify(tools);
|
|
163
|
+
return countTokens(model, json);
|
|
164
|
+
}
|
|
165
|
+
__name(countToolsTokens, "countToolsTokens");
|
|
166
|
+
function countMessageTokens(model, message) {
|
|
167
|
+
return countTokens(model, message.content || "");
|
|
168
|
+
}
|
|
169
|
+
__name(countMessageTokens, "countMessageTokens");
|
|
170
|
+
function countTokens(model, text) {
|
|
171
|
+
return text.length / 3;
|
|
172
|
+
}
|
|
173
|
+
__name(countTokens, "countTokens");
|
|
174
|
+
function convertActionInputToOpenAITool(action) {
|
|
175
|
+
return {
|
|
176
|
+
type: "function",
|
|
177
|
+
function: {
|
|
178
|
+
name: action.name,
|
|
179
|
+
description: action.description,
|
|
180
|
+
parameters: JSON.parse(action.jsonSchema)
|
|
181
|
+
}
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
__name(convertActionInputToOpenAITool, "convertActionInputToOpenAITool");
|
|
185
|
+
function convertMessageToOpenAIMessage(message) {
|
|
186
|
+
if (message.isTextMessage()) {
|
|
187
|
+
return {
|
|
188
|
+
role: message.role,
|
|
189
|
+
content: message.content
|
|
190
|
+
};
|
|
191
|
+
} else if (message.isActionExecutionMessage()) {
|
|
192
|
+
return {
|
|
193
|
+
role: "assistant",
|
|
194
|
+
tool_calls: [
|
|
195
|
+
{
|
|
196
|
+
id: message.id,
|
|
197
|
+
type: "function",
|
|
198
|
+
function: {
|
|
199
|
+
name: message.name,
|
|
200
|
+
arguments: JSON.stringify(message.arguments)
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
]
|
|
204
|
+
};
|
|
205
|
+
} else if (message.isResultMessage()) {
|
|
206
|
+
return {
|
|
207
|
+
role: "tool",
|
|
208
|
+
content: message.result,
|
|
209
|
+
tool_call_id: message.actionExecutionId
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
__name(convertMessageToOpenAIMessage, "convertMessageToOpenAIMessage");
|
|
214
|
+
function convertSystemMessageToAssistantAPI(message) {
|
|
215
|
+
return {
|
|
216
|
+
...message,
|
|
217
|
+
...message.role === "system" && {
|
|
218
|
+
role: "assistant",
|
|
219
|
+
content: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
|
|
220
|
+
}
|
|
221
|
+
};
|
|
222
|
+
}
|
|
223
|
+
__name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
|
|
224
|
+
|
|
225
|
+
// src/service-adapters/openai/openai-adapter.ts
|
|
226
|
+
import { randomId } from "@copilotkit/shared";
|
|
227
|
+
var DEFAULT_MODEL = "gpt-4o";
|
|
228
|
+
var OpenAIAdapter = class {
|
|
229
|
+
model = DEFAULT_MODEL;
|
|
230
|
+
disableParallelToolCalls = false;
|
|
231
|
+
_openai;
|
|
232
|
+
get openai() {
|
|
233
|
+
return this._openai;
|
|
234
|
+
}
|
|
235
|
+
constructor(params) {
|
|
236
|
+
this._openai = (params == null ? void 0 : params.openai) || new OpenAI({});
|
|
237
|
+
if (params == null ? void 0 : params.model) {
|
|
238
|
+
this.model = params.model;
|
|
239
|
+
}
|
|
240
|
+
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
241
|
+
}
|
|
242
|
+
async process(request) {
|
|
243
|
+
const { threadId, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
|
|
244
|
+
const tools = actions.map(convertActionInputToOpenAITool);
|
|
245
|
+
let openaiMessages = messages.map(convertMessageToOpenAIMessage);
|
|
246
|
+
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
247
|
+
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
248
|
+
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
|
|
249
|
+
toolChoice = {
|
|
250
|
+
type: "function",
|
|
251
|
+
function: {
|
|
252
|
+
name: forwardedParameters.toolChoiceFunctionName
|
|
253
|
+
}
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
const stream = this.openai.beta.chat.completions.stream({
|
|
257
|
+
model,
|
|
258
|
+
stream: true,
|
|
259
|
+
messages: openaiMessages,
|
|
260
|
+
...tools.length > 0 && {
|
|
261
|
+
tools
|
|
262
|
+
},
|
|
263
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
|
|
264
|
+
max_tokens: forwardedParameters.maxTokens
|
|
265
|
+
},
|
|
266
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
|
|
267
|
+
stop: forwardedParameters.stop
|
|
268
|
+
},
|
|
269
|
+
...toolChoice && {
|
|
270
|
+
tool_choice: toolChoice
|
|
271
|
+
},
|
|
272
|
+
...this.disableParallelToolCalls && {
|
|
273
|
+
parallel_tool_calls: false
|
|
274
|
+
},
|
|
275
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
|
|
276
|
+
temperature: forwardedParameters.temperature
|
|
277
|
+
}
|
|
278
|
+
});
|
|
279
|
+
eventSource.stream(async (eventStream$) => {
|
|
280
|
+
var _a, _b;
|
|
281
|
+
let mode = null;
|
|
282
|
+
let currentMessageId;
|
|
283
|
+
let currentToolCallId;
|
|
284
|
+
for await (const chunk of stream) {
|
|
285
|
+
if (chunk.choices.length === 0) {
|
|
286
|
+
continue;
|
|
287
|
+
}
|
|
288
|
+
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
|
|
289
|
+
const content = chunk.choices[0].delta.content;
|
|
290
|
+
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
291
|
+
mode = null;
|
|
292
|
+
eventStream$.sendTextMessageEnd({
|
|
293
|
+
messageId: currentMessageId
|
|
294
|
+
});
|
|
295
|
+
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
296
|
+
mode = null;
|
|
297
|
+
eventStream$.sendActionExecutionEnd({
|
|
298
|
+
actionExecutionId: currentToolCallId
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
if (mode === null) {
|
|
302
|
+
if (toolCall == null ? void 0 : toolCall.id) {
|
|
303
|
+
mode = "function";
|
|
304
|
+
currentToolCallId = toolCall.id;
|
|
305
|
+
eventStream$.sendActionExecutionStart({
|
|
306
|
+
actionExecutionId: currentToolCallId,
|
|
307
|
+
parentMessageId: chunk.id,
|
|
308
|
+
actionName: toolCall.function.name
|
|
309
|
+
});
|
|
310
|
+
} else if (content) {
|
|
311
|
+
mode = "message";
|
|
312
|
+
currentMessageId = chunk.id;
|
|
313
|
+
eventStream$.sendTextMessageStart({
|
|
314
|
+
messageId: currentMessageId
|
|
315
|
+
});
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
if (mode === "message" && content) {
|
|
319
|
+
eventStream$.sendTextMessageContent({
|
|
320
|
+
messageId: currentMessageId,
|
|
321
|
+
content
|
|
322
|
+
});
|
|
323
|
+
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
324
|
+
eventStream$.sendActionExecutionArgs({
|
|
325
|
+
actionExecutionId: currentToolCallId,
|
|
326
|
+
args: toolCall.function.arguments
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
if (mode === "message") {
|
|
331
|
+
eventStream$.sendTextMessageEnd({
|
|
332
|
+
messageId: currentMessageId
|
|
333
|
+
});
|
|
334
|
+
} else if (mode === "function") {
|
|
335
|
+
eventStream$.sendActionExecutionEnd({
|
|
336
|
+
actionExecutionId: currentToolCallId
|
|
337
|
+
});
|
|
338
|
+
}
|
|
339
|
+
eventStream$.complete();
|
|
340
|
+
});
|
|
341
|
+
return {
|
|
342
|
+
threadId: threadId || randomId()
|
|
343
|
+
};
|
|
344
|
+
}
|
|
345
|
+
};
|
|
346
|
+
__name(OpenAIAdapter, "OpenAIAdapter");
|
|
347
|
+
|
|
348
|
+
// src/service-adapters/langchain/utils.ts
|
|
349
|
+
import { AIMessage, HumanMessage, SystemMessage, ToolMessage } from "@langchain/core/messages";
|
|
350
|
+
import { DynamicStructuredTool } from "@langchain/core/tools";
|
|
351
|
+
import { randomId as randomId2, convertJsonSchemaToZodSchema } from "@copilotkit/shared";
|
|
352
|
+
function convertMessageToLangChainMessage(message) {
|
|
353
|
+
if (message.isTextMessage()) {
|
|
354
|
+
if (message.role == "user") {
|
|
355
|
+
return new HumanMessage(message.content);
|
|
356
|
+
} else if (message.role == "assistant") {
|
|
357
|
+
return new AIMessage(message.content);
|
|
358
|
+
} else if (message.role === "system") {
|
|
359
|
+
return new SystemMessage(message.content);
|
|
360
|
+
}
|
|
361
|
+
} else if (message.isActionExecutionMessage()) {
|
|
362
|
+
return new AIMessage({
|
|
363
|
+
content: "",
|
|
364
|
+
tool_calls: [
|
|
365
|
+
{
|
|
366
|
+
id: message.id,
|
|
367
|
+
args: message.arguments,
|
|
368
|
+
name: message.name
|
|
369
|
+
}
|
|
370
|
+
]
|
|
371
|
+
});
|
|
372
|
+
} else if (message.isResultMessage()) {
|
|
373
|
+
return new ToolMessage({
|
|
374
|
+
content: message.result,
|
|
375
|
+
tool_call_id: message.actionExecutionId
|
|
376
|
+
});
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
__name(convertMessageToLangChainMessage, "convertMessageToLangChainMessage");
|
|
380
|
+
function convertActionInputToLangChainTool(actionInput) {
|
|
381
|
+
return new DynamicStructuredTool({
|
|
382
|
+
name: actionInput.name,
|
|
383
|
+
description: actionInput.description,
|
|
384
|
+
schema: convertJsonSchemaToZodSchema(JSON.parse(actionInput.jsonSchema), true),
|
|
385
|
+
func: async () => {
|
|
386
|
+
return "";
|
|
387
|
+
}
|
|
388
|
+
});
|
|
389
|
+
}
|
|
390
|
+
__name(convertActionInputToLangChainTool, "convertActionInputToLangChainTool");
|
|
391
|
+
function isAIMessage(message) {
|
|
392
|
+
return Object.prototype.toString.call(message) === "[object AIMessage]";
|
|
393
|
+
}
|
|
394
|
+
__name(isAIMessage, "isAIMessage");
|
|
395
|
+
function isAIMessageChunk(message) {
|
|
396
|
+
return Object.prototype.toString.call(message) === "[object AIMessageChunk]";
|
|
397
|
+
}
|
|
398
|
+
__name(isAIMessageChunk, "isAIMessageChunk");
|
|
399
|
+
function isBaseMessageChunk(message) {
|
|
400
|
+
return Object.prototype.toString.call(message) === "[object BaseMessageChunk]";
|
|
401
|
+
}
|
|
402
|
+
__name(isBaseMessageChunk, "isBaseMessageChunk");
|
|
403
|
+
function maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution) {
|
|
404
|
+
if (actionExecution) {
|
|
405
|
+
eventStream$.sendActionExecutionResult({
|
|
406
|
+
actionExecutionId: actionExecution.id,
|
|
407
|
+
actionName: actionExecution.name,
|
|
408
|
+
result: "Sending a message"
|
|
409
|
+
});
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
__name(maybeSendActionExecutionResultIsMessage, "maybeSendActionExecutionResultIsMessage");
|
|
413
|
+
async function streamLangChainResponse({ result, eventStream$, actionExecution }) {
|
|
414
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
415
|
+
if (typeof result === "string") {
|
|
416
|
+
if (!actionExecution) {
|
|
417
|
+
eventStream$.sendTextMessage(randomId2(), result);
|
|
418
|
+
} else {
|
|
419
|
+
eventStream$.sendActionExecutionResult({
|
|
420
|
+
actionExecutionId: actionExecution.id,
|
|
421
|
+
actionName: actionExecution.name,
|
|
422
|
+
result
|
|
423
|
+
});
|
|
424
|
+
}
|
|
425
|
+
} else if (isAIMessage(result)) {
|
|
426
|
+
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
427
|
+
if (result.content) {
|
|
428
|
+
eventStream$.sendTextMessage(randomId2(), result.content);
|
|
429
|
+
}
|
|
430
|
+
for (const toolCall of result.tool_calls) {
|
|
431
|
+
eventStream$.sendActionExecution({
|
|
432
|
+
actionExecutionId: toolCall.id || randomId2(),
|
|
433
|
+
actionName: toolCall.name,
|
|
434
|
+
args: JSON.stringify(toolCall.args)
|
|
435
|
+
});
|
|
436
|
+
}
|
|
437
|
+
} else if (isBaseMessageChunk(result)) {
|
|
438
|
+
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
439
|
+
if ((_a = result.lc_kwargs) == null ? void 0 : _a.content) {
|
|
440
|
+
eventStream$.sendTextMessage(randomId2(), result.content);
|
|
441
|
+
}
|
|
442
|
+
if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
|
|
443
|
+
for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
|
|
444
|
+
eventStream$.sendActionExecution({
|
|
445
|
+
actionExecutionId: toolCall.id || randomId2(),
|
|
446
|
+
actionName: toolCall.name,
|
|
447
|
+
args: JSON.stringify(toolCall.args)
|
|
448
|
+
});
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
} else if (result && "getReader" in result) {
|
|
452
|
+
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
453
|
+
let reader = result.getReader();
|
|
454
|
+
let mode = null;
|
|
455
|
+
let currentMessageId;
|
|
456
|
+
const toolCallDetails = {
|
|
457
|
+
name: null,
|
|
458
|
+
id: null,
|
|
459
|
+
index: null,
|
|
460
|
+
prevIndex: null
|
|
461
|
+
};
|
|
462
|
+
while (true) {
|
|
463
|
+
try {
|
|
464
|
+
const { done, value } = await reader.read();
|
|
465
|
+
let toolCallName = void 0;
|
|
466
|
+
let toolCallId = void 0;
|
|
467
|
+
let toolCallArgs = void 0;
|
|
468
|
+
let hasToolCall = false;
|
|
469
|
+
let content = "";
|
|
470
|
+
if (value && value.content) {
|
|
471
|
+
content = Array.isArray(value.content) ? ((_d = value.content[0]) == null ? void 0 : _d.text) ?? "" : value.content;
|
|
472
|
+
}
|
|
473
|
+
if (isAIMessageChunk(value)) {
|
|
474
|
+
let chunk = (_e = value.tool_call_chunks) == null ? void 0 : _e[0];
|
|
475
|
+
toolCallArgs = chunk == null ? void 0 : chunk.args;
|
|
476
|
+
hasToolCall = chunk != void 0;
|
|
477
|
+
if (chunk == null ? void 0 : chunk.name)
|
|
478
|
+
toolCallDetails.name = chunk.name;
|
|
479
|
+
if ((chunk == null ? void 0 : chunk.index) != null) {
|
|
480
|
+
toolCallDetails.index = chunk.index;
|
|
481
|
+
if (toolCallDetails.prevIndex == null)
|
|
482
|
+
toolCallDetails.prevIndex = chunk.index;
|
|
483
|
+
}
|
|
484
|
+
if (chunk == null ? void 0 : chunk.id)
|
|
485
|
+
toolCallDetails.id = chunk.index != null ? `${chunk.id}-idx-${chunk.index}` : chunk.id;
|
|
486
|
+
toolCallName = toolCallDetails.name;
|
|
487
|
+
toolCallId = toolCallDetails.id;
|
|
488
|
+
} else if (isBaseMessageChunk(value)) {
|
|
489
|
+
let chunk = (_g = (_f = value.additional_kwargs) == null ? void 0 : _f.tool_calls) == null ? void 0 : _g[0];
|
|
490
|
+
toolCallName = (_h = chunk == null ? void 0 : chunk.function) == null ? void 0 : _h.name;
|
|
491
|
+
toolCallId = chunk == null ? void 0 : chunk.id;
|
|
492
|
+
toolCallArgs = (_i = chunk == null ? void 0 : chunk.function) == null ? void 0 : _i.arguments;
|
|
493
|
+
hasToolCall = (chunk == null ? void 0 : chunk.function) != void 0;
|
|
494
|
+
}
|
|
495
|
+
if (mode === "message" && (toolCallId || done)) {
|
|
496
|
+
mode = null;
|
|
497
|
+
eventStream$.sendTextMessageEnd({
|
|
498
|
+
messageId: currentMessageId
|
|
499
|
+
});
|
|
500
|
+
} else if (mode === "function" && (!hasToolCall || done)) {
|
|
501
|
+
mode = null;
|
|
502
|
+
eventStream$.sendActionExecutionEnd({
|
|
503
|
+
actionExecutionId: toolCallId
|
|
504
|
+
});
|
|
505
|
+
}
|
|
506
|
+
if (done) {
|
|
507
|
+
break;
|
|
508
|
+
}
|
|
509
|
+
if (mode === null) {
|
|
510
|
+
if (hasToolCall && toolCallId && toolCallName) {
|
|
511
|
+
mode = "function";
|
|
512
|
+
eventStream$.sendActionExecutionStart({
|
|
513
|
+
actionExecutionId: toolCallId,
|
|
514
|
+
actionName: toolCallName,
|
|
515
|
+
parentMessageId: (_j = value.lc_kwargs) == null ? void 0 : _j.id
|
|
516
|
+
});
|
|
517
|
+
} else if (content) {
|
|
518
|
+
mode = "message";
|
|
519
|
+
currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || randomId2();
|
|
520
|
+
eventStream$.sendTextMessageStart({
|
|
521
|
+
messageId: currentMessageId
|
|
522
|
+
});
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
if (mode === "message" && content) {
|
|
526
|
+
eventStream$.sendTextMessageContent({
|
|
527
|
+
messageId: currentMessageId,
|
|
528
|
+
content
|
|
529
|
+
});
|
|
530
|
+
} else if (mode === "function" && toolCallArgs) {
|
|
531
|
+
if (toolCallDetails.index !== toolCallDetails.prevIndex) {
|
|
532
|
+
eventStream$.sendActionExecutionEnd({
|
|
533
|
+
actionExecutionId: toolCallId
|
|
534
|
+
});
|
|
535
|
+
eventStream$.sendActionExecutionStart({
|
|
536
|
+
actionExecutionId: toolCallId,
|
|
537
|
+
actionName: toolCallName,
|
|
538
|
+
parentMessageId: (_l = value.lc_kwargs) == null ? void 0 : _l.id
|
|
539
|
+
});
|
|
540
|
+
toolCallDetails.prevIndex = toolCallDetails.index;
|
|
541
|
+
}
|
|
542
|
+
eventStream$.sendActionExecutionArgs({
|
|
543
|
+
actionExecutionId: toolCallId,
|
|
544
|
+
args: toolCallArgs
|
|
545
|
+
});
|
|
546
|
+
}
|
|
547
|
+
} catch (error) {
|
|
548
|
+
console.error("Error reading from stream", error);
|
|
549
|
+
break;
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
} else if (actionExecution) {
|
|
553
|
+
eventStream$.sendActionExecutionResult({
|
|
554
|
+
actionExecutionId: actionExecution.id,
|
|
555
|
+
actionName: actionExecution.name,
|
|
556
|
+
result: encodeResult(result)
|
|
557
|
+
});
|
|
558
|
+
} else {
|
|
559
|
+
throw new Error("Invalid return type from LangChain function.");
|
|
560
|
+
}
|
|
561
|
+
eventStream$.complete();
|
|
562
|
+
}
|
|
563
|
+
__name(streamLangChainResponse, "streamLangChainResponse");
|
|
564
|
+
function encodeResult(result) {
|
|
565
|
+
if (result === void 0) {
|
|
566
|
+
return "";
|
|
567
|
+
} else if (typeof result === "string") {
|
|
568
|
+
return result;
|
|
569
|
+
} else {
|
|
570
|
+
return JSON.stringify(result);
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
__name(encodeResult, "encodeResult");
|
|
574
|
+
|
|
575
|
+
// src/service-adapters/langchain/langchain-adapter.ts
|
|
576
|
+
import { randomId as randomId3 } from "@copilotkit/shared";
|
|
577
|
+
import { awaitAllCallbacks } from "@langchain/core/callbacks/promises";
|
|
578
|
+
var LangChainAdapter = class {
|
|
579
|
+
options;
|
|
580
|
+
/**
|
|
581
|
+
* To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.
|
|
582
|
+
*/
|
|
583
|
+
constructor(options) {
|
|
584
|
+
this.options = options;
|
|
585
|
+
}
|
|
586
|
+
async process(request) {
|
|
587
|
+
try {
|
|
588
|
+
const { eventSource, model, actions, messages, runId } = request;
|
|
589
|
+
const threadId = request.threadId ?? randomId3();
|
|
590
|
+
const result = await this.options.chainFn({
|
|
591
|
+
messages: messages.map(convertMessageToLangChainMessage),
|
|
592
|
+
tools: actions.map(convertActionInputToLangChainTool),
|
|
593
|
+
model,
|
|
594
|
+
threadId,
|
|
595
|
+
runId
|
|
596
|
+
});
|
|
597
|
+
eventSource.stream(async (eventStream$) => {
|
|
598
|
+
await streamLangChainResponse({
|
|
599
|
+
result,
|
|
600
|
+
eventStream$
|
|
601
|
+
});
|
|
602
|
+
});
|
|
603
|
+
return {
|
|
604
|
+
threadId
|
|
605
|
+
};
|
|
606
|
+
} finally {
|
|
607
|
+
await awaitAllCallbacks();
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
};
|
|
611
|
+
__name(LangChainAdapter, "LangChainAdapter");
|
|
612
|
+
|
|
613
|
+
// src/service-adapters/google/google-genai-adapter.ts
|
|
614
|
+
import { ChatGoogle } from "@langchain/google-gauth";
|
|
615
|
+
var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
|
|
616
|
+
constructor(options) {
|
|
617
|
+
super({
|
|
618
|
+
chainFn: async ({ messages, tools, threadId }) => {
|
|
619
|
+
const model = new ChatGoogle({
|
|
620
|
+
modelName: (options == null ? void 0 : options.model) ?? "gemini-1.5-pro",
|
|
621
|
+
apiVersion: "v1beta"
|
|
622
|
+
}).bindTools(tools);
|
|
623
|
+
return model.stream(messages, {
|
|
624
|
+
metadata: {
|
|
625
|
+
conversation_id: threadId
|
|
626
|
+
}
|
|
627
|
+
});
|
|
628
|
+
}
|
|
629
|
+
});
|
|
630
|
+
}
|
|
631
|
+
};
|
|
632
|
+
__name(GoogleGenerativeAIAdapter, "GoogleGenerativeAIAdapter");
|
|
633
|
+
|
|
634
|
+
// src/service-adapters/openai/openai-assistant-adapter.ts
|
|
635
|
+
import OpenAI2 from "openai";
|
|
636
|
+
var OpenAIAssistantAdapter = class {
|
|
637
|
+
openai;
|
|
638
|
+
codeInterpreterEnabled;
|
|
639
|
+
assistantId;
|
|
640
|
+
fileSearchEnabled;
|
|
641
|
+
disableParallelToolCalls;
|
|
642
|
+
constructor(params) {
|
|
643
|
+
this.openai = params.openai || new OpenAI2({});
|
|
644
|
+
this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
|
|
645
|
+
this.fileSearchEnabled = params.fileSearchEnabled === false || true;
|
|
646
|
+
this.assistantId = params.assistantId;
|
|
647
|
+
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
648
|
+
}
|
|
649
|
+
async process(request) {
|
|
650
|
+
const { messages, actions, eventSource, runId, forwardedParameters } = request;
|
|
651
|
+
let threadId = request.threadId || (await this.openai.beta.threads.create()).id;
|
|
652
|
+
const lastMessage = messages.at(-1);
|
|
653
|
+
let nextRunId = void 0;
|
|
654
|
+
if (lastMessage.isResultMessage() && runId) {
|
|
655
|
+
nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);
|
|
656
|
+
} else if (lastMessage.isTextMessage()) {
|
|
657
|
+
nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters);
|
|
658
|
+
} else {
|
|
659
|
+
throw new Error("No actionable message found in the messages");
|
|
660
|
+
}
|
|
661
|
+
return {
|
|
662
|
+
threadId,
|
|
663
|
+
runId: nextRunId
|
|
664
|
+
};
|
|
665
|
+
}
|
|
666
|
+
async submitToolOutputs(threadId, runId, messages, eventSource) {
|
|
667
|
+
let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);
|
|
668
|
+
if (!run.required_action) {
|
|
669
|
+
throw new Error("No tool outputs required");
|
|
670
|
+
}
|
|
671
|
+
const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map((toolCall) => toolCall.id);
|
|
672
|
+
const resultMessages = messages.filter((message) => message.isResultMessage() && toolCallsIds.includes(message.actionExecutionId));
|
|
673
|
+
if (toolCallsIds.length != resultMessages.length) {
|
|
674
|
+
throw new Error("Number of function results does not match the number of tool calls");
|
|
675
|
+
}
|
|
676
|
+
const toolOutputs = resultMessages.map((message) => {
|
|
677
|
+
return {
|
|
678
|
+
tool_call_id: message.actionExecutionId,
|
|
679
|
+
output: message.result
|
|
680
|
+
};
|
|
681
|
+
});
|
|
682
|
+
const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
|
|
683
|
+
tool_outputs: toolOutputs,
|
|
684
|
+
...this.disableParallelToolCalls && {
|
|
685
|
+
parallel_tool_calls: false
|
|
686
|
+
}
|
|
687
|
+
});
|
|
688
|
+
await this.streamResponse(stream, eventSource);
|
|
689
|
+
return runId;
|
|
690
|
+
}
|
|
691
|
+
async submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters) {
|
|
692
|
+
messages = [
|
|
693
|
+
...messages
|
|
694
|
+
];
|
|
695
|
+
const instructionsMessage = messages.shift();
|
|
696
|
+
const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
|
|
697
|
+
const userMessage = messages.map(convertMessageToOpenAIMessage).map(convertSystemMessageToAssistantAPI).at(-1);
|
|
698
|
+
if (userMessage.role !== "user") {
|
|
699
|
+
throw new Error("No user message found");
|
|
700
|
+
}
|
|
701
|
+
await this.openai.beta.threads.messages.create(threadId, {
|
|
702
|
+
role: "user",
|
|
703
|
+
content: userMessage.content
|
|
704
|
+
});
|
|
705
|
+
const openaiTools = actions.map(convertActionInputToOpenAITool);
|
|
706
|
+
const tools = [
|
|
707
|
+
...openaiTools,
|
|
708
|
+
...this.codeInterpreterEnabled ? [
|
|
709
|
+
{
|
|
710
|
+
type: "code_interpreter"
|
|
711
|
+
}
|
|
712
|
+
] : [],
|
|
713
|
+
...this.fileSearchEnabled ? [
|
|
714
|
+
{
|
|
715
|
+
type: "file_search"
|
|
716
|
+
}
|
|
717
|
+
] : []
|
|
718
|
+
];
|
|
719
|
+
let stream = this.openai.beta.threads.runs.stream(threadId, {
|
|
720
|
+
assistant_id: this.assistantId,
|
|
721
|
+
instructions,
|
|
722
|
+
tools,
|
|
723
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
|
|
724
|
+
max_completion_tokens: forwardedParameters.maxTokens
|
|
725
|
+
},
|
|
726
|
+
...this.disableParallelToolCalls && {
|
|
727
|
+
parallel_tool_calls: false
|
|
728
|
+
}
|
|
729
|
+
});
|
|
730
|
+
await this.streamResponse(stream, eventSource);
|
|
731
|
+
return getRunIdFromStream(stream);
|
|
732
|
+
}
|
|
733
|
+
async streamResponse(stream, eventSource) {
|
|
734
|
+
eventSource.stream(async (eventStream$) => {
|
|
735
|
+
var _a, _b, _c, _d, _e, _f;
|
|
736
|
+
let inFunctionCall = false;
|
|
737
|
+
let currentMessageId;
|
|
738
|
+
let currentToolCallId;
|
|
739
|
+
for await (const chunk of stream) {
|
|
740
|
+
switch (chunk.event) {
|
|
741
|
+
case "thread.message.created":
|
|
742
|
+
if (inFunctionCall) {
|
|
743
|
+
eventStream$.sendActionExecutionEnd({
|
|
744
|
+
actionExecutionId: currentToolCallId
|
|
745
|
+
});
|
|
746
|
+
}
|
|
747
|
+
currentMessageId = chunk.data.id;
|
|
748
|
+
eventStream$.sendTextMessageStart({
|
|
749
|
+
messageId: currentMessageId
|
|
750
|
+
});
|
|
751
|
+
break;
|
|
752
|
+
case "thread.message.delta":
|
|
753
|
+
if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
|
|
754
|
+
eventStream$.sendTextMessageContent({
|
|
755
|
+
messageId: currentMessageId,
|
|
756
|
+
content: (_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value
|
|
757
|
+
});
|
|
758
|
+
}
|
|
759
|
+
break;
|
|
760
|
+
case "thread.message.completed":
|
|
761
|
+
eventStream$.sendTextMessageEnd({
|
|
762
|
+
messageId: currentMessageId
|
|
763
|
+
});
|
|
764
|
+
break;
|
|
765
|
+
case "thread.run.step.delta":
|
|
766
|
+
let toolCallId;
|
|
767
|
+
let toolCallName;
|
|
768
|
+
let toolCallArgs;
|
|
769
|
+
if (chunk.data.delta.step_details.type === "tool_calls" && ((_c = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _c[0].type) === "function") {
|
|
770
|
+
toolCallId = (_d = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _d[0].id;
|
|
771
|
+
toolCallName = (_e = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _e[0].function.name;
|
|
772
|
+
toolCallArgs = (_f = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _f[0].function.arguments;
|
|
773
|
+
}
|
|
774
|
+
if (toolCallName && toolCallId) {
|
|
775
|
+
if (inFunctionCall) {
|
|
776
|
+
eventStream$.sendActionExecutionEnd({
|
|
777
|
+
actionExecutionId: currentToolCallId
|
|
778
|
+
});
|
|
779
|
+
}
|
|
780
|
+
inFunctionCall = true;
|
|
781
|
+
currentToolCallId = toolCallId;
|
|
782
|
+
eventStream$.sendActionExecutionStart({
|
|
783
|
+
actionExecutionId: currentToolCallId,
|
|
784
|
+
parentMessageId: chunk.data.id,
|
|
785
|
+
actionName: toolCallName
|
|
786
|
+
});
|
|
787
|
+
} else if (toolCallArgs) {
|
|
788
|
+
eventStream$.sendActionExecutionArgs({
|
|
789
|
+
actionExecutionId: currentToolCallId,
|
|
790
|
+
args: toolCallArgs
|
|
791
|
+
});
|
|
792
|
+
}
|
|
793
|
+
break;
|
|
794
|
+
}
|
|
795
|
+
}
|
|
796
|
+
if (inFunctionCall) {
|
|
797
|
+
eventStream$.sendActionExecutionEnd({
|
|
798
|
+
actionExecutionId: currentToolCallId
|
|
799
|
+
});
|
|
800
|
+
}
|
|
801
|
+
eventStream$.complete();
|
|
802
|
+
});
|
|
803
|
+
}
|
|
804
|
+
};
|
|
805
|
+
__name(OpenAIAssistantAdapter, "OpenAIAssistantAdapter");
|
|
806
|
+
function getRunIdFromStream(stream) {
|
|
807
|
+
return new Promise((resolve, reject) => {
|
|
808
|
+
let runIdGetter = /* @__PURE__ */ __name((event) => {
|
|
809
|
+
if (event.event === "thread.run.created") {
|
|
810
|
+
const runId = event.data.id;
|
|
811
|
+
stream.off("event", runIdGetter);
|
|
812
|
+
resolve(runId);
|
|
813
|
+
}
|
|
814
|
+
}, "runIdGetter");
|
|
815
|
+
stream.on("event", runIdGetter);
|
|
816
|
+
});
|
|
817
|
+
}
|
|
818
|
+
__name(getRunIdFromStream, "getRunIdFromStream");
|
|
819
|
+
|
|
820
|
+
// src/service-adapters/unify/unify-adapter.ts
|
|
821
|
+
import OpenAI3 from "openai";
|
|
822
|
+
import { randomId as randomId4 } from "@copilotkit/shared";
|
|
823
|
+
var UnifyAdapter = class {
|
|
824
|
+
apiKey;
|
|
825
|
+
model;
|
|
826
|
+
start;
|
|
827
|
+
constructor(options) {
|
|
828
|
+
if (options == null ? void 0 : options.apiKey) {
|
|
829
|
+
this.apiKey = options.apiKey;
|
|
830
|
+
} else {
|
|
831
|
+
this.apiKey = "UNIFY_API_KEY";
|
|
832
|
+
}
|
|
833
|
+
this.model = options == null ? void 0 : options.model;
|
|
834
|
+
this.start = true;
|
|
835
|
+
}
|
|
836
|
+
async process(request) {
|
|
837
|
+
const tools = request.actions.map(convertActionInputToOpenAITool);
|
|
838
|
+
const openai = new OpenAI3({
|
|
839
|
+
apiKey: this.apiKey,
|
|
840
|
+
baseURL: "https://api.unify.ai/v0/"
|
|
841
|
+
});
|
|
842
|
+
const forwardedParameters = request.forwardedParameters;
|
|
843
|
+
const messages = request.messages.map(convertMessageToOpenAIMessage);
|
|
844
|
+
const stream = await openai.chat.completions.create({
|
|
845
|
+
model: this.model,
|
|
846
|
+
messages,
|
|
847
|
+
stream: true,
|
|
848
|
+
...tools.length > 0 && {
|
|
849
|
+
tools
|
|
850
|
+
},
|
|
851
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
|
|
852
|
+
temperature: forwardedParameters.temperature
|
|
853
|
+
}
|
|
854
|
+
});
|
|
855
|
+
let model = null;
|
|
856
|
+
let currentMessageId;
|
|
857
|
+
let currentToolCallId;
|
|
858
|
+
request.eventSource.stream(async (eventStream$) => {
|
|
859
|
+
var _a, _b;
|
|
860
|
+
let mode = null;
|
|
861
|
+
for await (const chunk of stream) {
|
|
862
|
+
if (this.start) {
|
|
863
|
+
model = chunk.model;
|
|
864
|
+
currentMessageId = randomId4();
|
|
865
|
+
eventStream$.sendTextMessageStart({
|
|
866
|
+
messageId: currentMessageId
|
|
867
|
+
});
|
|
868
|
+
eventStream$.sendTextMessageContent({
|
|
869
|
+
messageId: currentMessageId,
|
|
870
|
+
content: `Model used: ${model}
|
|
871
|
+
`
|
|
872
|
+
});
|
|
873
|
+
eventStream$.sendTextMessageEnd({
|
|
874
|
+
messageId: currentMessageId
|
|
875
|
+
});
|
|
876
|
+
this.start = false;
|
|
877
|
+
}
|
|
878
|
+
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
|
|
879
|
+
const content = chunk.choices[0].delta.content;
|
|
880
|
+
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
881
|
+
mode = null;
|
|
882
|
+
eventStream$.sendTextMessageEnd({
|
|
883
|
+
messageId: currentMessageId
|
|
884
|
+
});
|
|
885
|
+
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
886
|
+
mode = null;
|
|
887
|
+
eventStream$.sendActionExecutionEnd({
|
|
888
|
+
actionExecutionId: currentToolCallId
|
|
889
|
+
});
|
|
890
|
+
}
|
|
891
|
+
if (mode === null) {
|
|
892
|
+
if (toolCall == null ? void 0 : toolCall.id) {
|
|
893
|
+
mode = "function";
|
|
894
|
+
currentToolCallId = toolCall.id;
|
|
895
|
+
eventStream$.sendActionExecutionStart({
|
|
896
|
+
actionExecutionId: currentToolCallId,
|
|
897
|
+
actionName: toolCall.function.name
|
|
898
|
+
});
|
|
899
|
+
} else if (content) {
|
|
900
|
+
mode = "message";
|
|
901
|
+
currentMessageId = chunk.id;
|
|
902
|
+
eventStream$.sendTextMessageStart({
|
|
903
|
+
messageId: currentMessageId
|
|
904
|
+
});
|
|
905
|
+
}
|
|
906
|
+
}
|
|
907
|
+
if (mode === "message" && content) {
|
|
908
|
+
eventStream$.sendTextMessageContent({
|
|
909
|
+
messageId: currentMessageId,
|
|
910
|
+
content
|
|
911
|
+
});
|
|
912
|
+
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
913
|
+
eventStream$.sendActionExecutionArgs({
|
|
914
|
+
actionExecutionId: currentToolCallId,
|
|
915
|
+
args: toolCall.function.arguments
|
|
916
|
+
});
|
|
917
|
+
}
|
|
918
|
+
}
|
|
919
|
+
if (mode === "message") {
|
|
920
|
+
eventStream$.sendTextMessageEnd({
|
|
921
|
+
messageId: currentMessageId
|
|
922
|
+
});
|
|
923
|
+
} else if (mode === "function") {
|
|
924
|
+
eventStream$.sendActionExecutionEnd({
|
|
925
|
+
actionExecutionId: currentToolCallId
|
|
926
|
+
});
|
|
927
|
+
}
|
|
928
|
+
eventStream$.complete();
|
|
929
|
+
});
|
|
930
|
+
return {
|
|
931
|
+
threadId: request.threadId || randomId4()
|
|
932
|
+
};
|
|
933
|
+
}
|
|
934
|
+
};
|
|
935
|
+
__name(UnifyAdapter, "UnifyAdapter");
|
|
936
|
+
|
|
937
|
+
// src/service-adapters/groq/groq-adapter.ts
|
|
938
|
+
import { Groq } from "groq-sdk";
|
|
939
|
+
import { randomId as randomId5 } from "@copilotkit/shared";
|
|
940
|
+
var DEFAULT_MODEL2 = "llama3-groq-70b-8192-tool-use-preview";
|
|
941
|
+
var GroqAdapter = class {
|
|
942
|
+
model = DEFAULT_MODEL2;
|
|
943
|
+
disableParallelToolCalls = false;
|
|
944
|
+
_groq;
|
|
945
|
+
get groq() {
|
|
946
|
+
return this._groq;
|
|
947
|
+
}
|
|
948
|
+
constructor(params) {
|
|
949
|
+
this._groq = (params == null ? void 0 : params.groq) || new Groq({});
|
|
950
|
+
if (params == null ? void 0 : params.model) {
|
|
951
|
+
this.model = params.model;
|
|
952
|
+
}
|
|
953
|
+
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
954
|
+
}
|
|
955
|
+
async process(request) {
|
|
956
|
+
const { threadId, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
|
|
957
|
+
const tools = actions.map(convertActionInputToOpenAITool);
|
|
958
|
+
let openaiMessages = messages.map(convertMessageToOpenAIMessage);
|
|
959
|
+
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
960
|
+
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
961
|
+
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
|
|
962
|
+
toolChoice = {
|
|
963
|
+
type: "function",
|
|
964
|
+
function: {
|
|
965
|
+
name: forwardedParameters.toolChoiceFunctionName
|
|
966
|
+
}
|
|
967
|
+
};
|
|
968
|
+
}
|
|
969
|
+
const stream = await this.groq.chat.completions.create({
|
|
970
|
+
model,
|
|
971
|
+
stream: true,
|
|
972
|
+
messages: openaiMessages,
|
|
973
|
+
...tools.length > 0 && {
|
|
974
|
+
tools
|
|
975
|
+
},
|
|
976
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
|
|
977
|
+
max_tokens: forwardedParameters.maxTokens
|
|
978
|
+
},
|
|
979
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
|
|
980
|
+
stop: forwardedParameters.stop
|
|
981
|
+
},
|
|
982
|
+
...toolChoice && {
|
|
983
|
+
tool_choice: toolChoice
|
|
984
|
+
},
|
|
985
|
+
...this.disableParallelToolCalls && {
|
|
986
|
+
parallel_tool_calls: false
|
|
987
|
+
},
|
|
988
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
|
|
989
|
+
temperature: forwardedParameters.temperature
|
|
990
|
+
}
|
|
991
|
+
});
|
|
992
|
+
eventSource.stream(async (eventStream$) => {
|
|
993
|
+
var _a, _b;
|
|
994
|
+
let mode = null;
|
|
995
|
+
let currentMessageId;
|
|
996
|
+
let currentToolCallId;
|
|
997
|
+
for await (const chunk of stream) {
|
|
998
|
+
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
|
|
999
|
+
const content = chunk.choices[0].delta.content;
|
|
1000
|
+
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
1001
|
+
mode = null;
|
|
1002
|
+
eventStream$.sendTextMessageEnd({
|
|
1003
|
+
messageId: currentMessageId
|
|
1004
|
+
});
|
|
1005
|
+
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
1006
|
+
mode = null;
|
|
1007
|
+
eventStream$.sendActionExecutionEnd({
|
|
1008
|
+
actionExecutionId: currentToolCallId
|
|
1009
|
+
});
|
|
1010
|
+
}
|
|
1011
|
+
if (mode === null) {
|
|
1012
|
+
if (toolCall == null ? void 0 : toolCall.id) {
|
|
1013
|
+
mode = "function";
|
|
1014
|
+
currentToolCallId = toolCall.id;
|
|
1015
|
+
eventStream$.sendActionExecutionStart({
|
|
1016
|
+
actionExecutionId: currentToolCallId,
|
|
1017
|
+
actionName: toolCall.function.name,
|
|
1018
|
+
parentMessageId: chunk.id
|
|
1019
|
+
});
|
|
1020
|
+
} else if (content) {
|
|
1021
|
+
mode = "message";
|
|
1022
|
+
currentMessageId = chunk.id;
|
|
1023
|
+
eventStream$.sendTextMessageStart({
|
|
1024
|
+
messageId: currentMessageId
|
|
1025
|
+
});
|
|
1026
|
+
}
|
|
1027
|
+
}
|
|
1028
|
+
if (mode === "message" && content) {
|
|
1029
|
+
eventStream$.sendTextMessageContent({
|
|
1030
|
+
messageId: currentMessageId,
|
|
1031
|
+
content
|
|
1032
|
+
});
|
|
1033
|
+
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
1034
|
+
eventStream$.sendActionExecutionArgs({
|
|
1035
|
+
actionExecutionId: currentToolCallId,
|
|
1036
|
+
args: toolCall.function.arguments
|
|
1037
|
+
});
|
|
1038
|
+
}
|
|
1039
|
+
}
|
|
1040
|
+
if (mode === "message") {
|
|
1041
|
+
eventStream$.sendTextMessageEnd({
|
|
1042
|
+
messageId: currentMessageId
|
|
1043
|
+
});
|
|
1044
|
+
} else if (mode === "function") {
|
|
1045
|
+
eventStream$.sendActionExecutionEnd({
|
|
1046
|
+
actionExecutionId: currentToolCallId
|
|
1047
|
+
});
|
|
1048
|
+
}
|
|
1049
|
+
eventStream$.complete();
|
|
1050
|
+
});
|
|
1051
|
+
return {
|
|
1052
|
+
threadId: threadId || randomId5()
|
|
1053
|
+
};
|
|
1054
|
+
}
|
|
1055
|
+
};
|
|
1056
|
+
__name(GroqAdapter, "GroqAdapter");
|
|
1057
|
+
|
|
1058
|
+
// src/service-adapters/anthropic/anthropic-adapter.ts
|
|
1059
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
1060
|
+
|
|
1061
|
+
// src/service-adapters/anthropic/utils.ts
|
|
1062
|
+
function limitMessagesToTokenCount2(messages, tools, model, maxTokens) {
|
|
1063
|
+
maxTokens || (maxTokens = MAX_TOKENS);
|
|
1064
|
+
const result = [];
|
|
1065
|
+
const toolsNumTokens = countToolsTokens2(model, tools);
|
|
1066
|
+
if (toolsNumTokens > maxTokens) {
|
|
1067
|
+
throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
|
|
1068
|
+
}
|
|
1069
|
+
maxTokens -= toolsNumTokens;
|
|
1070
|
+
for (const message of messages) {
|
|
1071
|
+
if (message.role === "system") {
|
|
1072
|
+
const numTokens = countMessageTokens2(model, message);
|
|
1073
|
+
maxTokens -= numTokens;
|
|
1074
|
+
if (maxTokens < 0) {
|
|
1075
|
+
throw new Error("Not enough tokens for system message.");
|
|
1076
|
+
}
|
|
1077
|
+
}
|
|
1078
|
+
}
|
|
1079
|
+
let cutoff = false;
|
|
1080
|
+
const reversedMessages = [
|
|
1081
|
+
...messages
|
|
1082
|
+
].reverse();
|
|
1083
|
+
for (const message of reversedMessages) {
|
|
1084
|
+
if (message.role === "system") {
|
|
1085
|
+
result.unshift(message);
|
|
1086
|
+
continue;
|
|
1087
|
+
} else if (cutoff) {
|
|
1088
|
+
continue;
|
|
1089
|
+
}
|
|
1090
|
+
let numTokens = countMessageTokens2(model, message);
|
|
1091
|
+
if (maxTokens < numTokens) {
|
|
1092
|
+
cutoff = true;
|
|
1093
|
+
continue;
|
|
1094
|
+
}
|
|
1095
|
+
result.unshift(message);
|
|
1096
|
+
maxTokens -= numTokens;
|
|
1097
|
+
}
|
|
1098
|
+
return result;
|
|
1099
|
+
}
|
|
1100
|
+
__name(limitMessagesToTokenCount2, "limitMessagesToTokenCount");
|
|
1101
|
+
var MAX_TOKENS = 128e3;
|
|
1102
|
+
function countToolsTokens2(model, tools) {
|
|
1103
|
+
if (tools.length === 0) {
|
|
1104
|
+
return 0;
|
|
1105
|
+
}
|
|
1106
|
+
const json = JSON.stringify(tools);
|
|
1107
|
+
return countTokens2(model, json);
|
|
1108
|
+
}
|
|
1109
|
+
__name(countToolsTokens2, "countToolsTokens");
|
|
1110
|
+
function countMessageTokens2(model, message) {
|
|
1111
|
+
return countTokens2(model, JSON.stringify(message.content) || "");
|
|
1112
|
+
}
|
|
1113
|
+
__name(countMessageTokens2, "countMessageTokens");
|
|
1114
|
+
function countTokens2(model, text) {
|
|
1115
|
+
return text.length / 3;
|
|
1116
|
+
}
|
|
1117
|
+
__name(countTokens2, "countTokens");
|
|
1118
|
+
function convertActionInputToAnthropicTool(action) {
|
|
1119
|
+
return {
|
|
1120
|
+
name: action.name,
|
|
1121
|
+
description: action.description,
|
|
1122
|
+
input_schema: JSON.parse(action.jsonSchema)
|
|
1123
|
+
};
|
|
1124
|
+
}
|
|
1125
|
+
__name(convertActionInputToAnthropicTool, "convertActionInputToAnthropicTool");
|
|
1126
|
+
function convertMessageToAnthropicMessage(message) {
|
|
1127
|
+
if (message.isTextMessage()) {
|
|
1128
|
+
if (message.role === "system") {
|
|
1129
|
+
return {
|
|
1130
|
+
role: "assistant",
|
|
1131
|
+
content: [
|
|
1132
|
+
{
|
|
1133
|
+
type: "text",
|
|
1134
|
+
text: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
|
|
1135
|
+
}
|
|
1136
|
+
]
|
|
1137
|
+
};
|
|
1138
|
+
} else {
|
|
1139
|
+
return {
|
|
1140
|
+
role: message.role === "user" ? "user" : "assistant",
|
|
1141
|
+
content: [
|
|
1142
|
+
{
|
|
1143
|
+
type: "text",
|
|
1144
|
+
text: message.content
|
|
1145
|
+
}
|
|
1146
|
+
]
|
|
1147
|
+
};
|
|
1148
|
+
}
|
|
1149
|
+
} else if (message.isActionExecutionMessage()) {
|
|
1150
|
+
return {
|
|
1151
|
+
role: "assistant",
|
|
1152
|
+
content: [
|
|
1153
|
+
{
|
|
1154
|
+
id: message.id,
|
|
1155
|
+
type: "tool_use",
|
|
1156
|
+
input: message.arguments,
|
|
1157
|
+
name: message.name
|
|
1158
|
+
}
|
|
1159
|
+
]
|
|
1160
|
+
};
|
|
1161
|
+
} else if (message.isResultMessage()) {
|
|
1162
|
+
return {
|
|
1163
|
+
role: "user",
|
|
1164
|
+
content: [
|
|
1165
|
+
{
|
|
1166
|
+
type: "tool_result",
|
|
1167
|
+
content: message.result,
|
|
1168
|
+
tool_use_id: message.actionExecutionId
|
|
1169
|
+
}
|
|
1170
|
+
]
|
|
1171
|
+
};
|
|
1172
|
+
}
|
|
1173
|
+
}
|
|
1174
|
+
__name(convertMessageToAnthropicMessage, "convertMessageToAnthropicMessage");
|
|
1175
|
+
function groupAnthropicMessagesByRole(messageParams) {
|
|
1176
|
+
return messageParams.reduce((acc, message) => {
|
|
1177
|
+
const lastGroup = acc[acc.length - 1];
|
|
1178
|
+
if (lastGroup && lastGroup.role === message.role) {
|
|
1179
|
+
lastGroup.content = lastGroup.content.concat(message.content);
|
|
1180
|
+
} else {
|
|
1181
|
+
acc.push({
|
|
1182
|
+
role: message.role,
|
|
1183
|
+
content: [
|
|
1184
|
+
...message.content
|
|
1185
|
+
]
|
|
1186
|
+
});
|
|
1187
|
+
}
|
|
1188
|
+
return acc;
|
|
1189
|
+
}, []);
|
|
1190
|
+
}
|
|
1191
|
+
__name(groupAnthropicMessagesByRole, "groupAnthropicMessagesByRole");
|
|
1192
|
+
|
|
1193
|
+
// src/service-adapters/anthropic/anthropic-adapter.ts
|
|
1194
|
+
import { randomId as randomId6 } from "@copilotkit/shared";
|
|
1195
|
+
var DEFAULT_MODEL3 = "claude-3-sonnet-20240229";
|
|
1196
|
+
var AnthropicAdapter = class {
|
|
1197
|
+
model = DEFAULT_MODEL3;
|
|
1198
|
+
_anthropic;
|
|
1199
|
+
get anthropic() {
|
|
1200
|
+
return this._anthropic;
|
|
1201
|
+
}
|
|
1202
|
+
constructor(params) {
|
|
1203
|
+
this._anthropic = (params == null ? void 0 : params.anthropic) || new Anthropic({});
|
|
1204
|
+
if (params == null ? void 0 : params.model) {
|
|
1205
|
+
this.model = params.model;
|
|
1206
|
+
}
|
|
1207
|
+
}
|
|
1208
|
+
async process(request) {
|
|
1209
|
+
const { threadId, model = this.model, messages: rawMessages, actions, eventSource, forwardedParameters } = request;
|
|
1210
|
+
const tools = actions.map(convertActionInputToAnthropicTool);
|
|
1211
|
+
const messages = [
|
|
1212
|
+
...rawMessages
|
|
1213
|
+
];
|
|
1214
|
+
const instructionsMessage = messages.shift();
|
|
1215
|
+
const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
|
|
1216
|
+
let anthropicMessages = messages.map(convertMessageToAnthropicMessage);
|
|
1217
|
+
anthropicMessages = limitMessagesToTokenCount2(anthropicMessages, tools, model);
|
|
1218
|
+
anthropicMessages = groupAnthropicMessagesByRole(anthropicMessages);
|
|
1219
|
+
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
1220
|
+
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
|
|
1221
|
+
toolChoice = {
|
|
1222
|
+
type: "tool",
|
|
1223
|
+
name: forwardedParameters.toolChoiceFunctionName
|
|
1224
|
+
};
|
|
1225
|
+
}
|
|
1226
|
+
const stream = this.anthropic.messages.create({
|
|
1227
|
+
system: instructions,
|
|
1228
|
+
model: this.model,
|
|
1229
|
+
messages: anthropicMessages,
|
|
1230
|
+
max_tokens: (forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) || 1024,
|
|
1231
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) ? {
|
|
1232
|
+
temperature: forwardedParameters.temperature
|
|
1233
|
+
} : {},
|
|
1234
|
+
...tools.length > 0 && {
|
|
1235
|
+
tools
|
|
1236
|
+
},
|
|
1237
|
+
...toolChoice && {
|
|
1238
|
+
tool_choice: toolChoice
|
|
1239
|
+
},
|
|
1240
|
+
stream: true
|
|
1241
|
+
});
|
|
1242
|
+
eventSource.stream(async (eventStream$) => {
|
|
1243
|
+
let mode = null;
|
|
1244
|
+
let didOutputText = false;
|
|
1245
|
+
let currentMessageId = randomId6();
|
|
1246
|
+
let currentToolCallId = randomId6();
|
|
1247
|
+
let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
|
|
1248
|
+
for await (const chunk of await stream) {
|
|
1249
|
+
if (chunk.type === "message_start") {
|
|
1250
|
+
currentMessageId = chunk.message.id;
|
|
1251
|
+
} else if (chunk.type === "content_block_start") {
|
|
1252
|
+
if (chunk.content_block.type === "text") {
|
|
1253
|
+
didOutputText = false;
|
|
1254
|
+
filterThinkingTextBuffer.reset();
|
|
1255
|
+
mode = "message";
|
|
1256
|
+
} else if (chunk.content_block.type === "tool_use") {
|
|
1257
|
+
currentToolCallId = chunk.content_block.id;
|
|
1258
|
+
eventStream$.sendActionExecutionStart({
|
|
1259
|
+
actionExecutionId: currentToolCallId,
|
|
1260
|
+
actionName: chunk.content_block.name,
|
|
1261
|
+
parentMessageId: currentMessageId
|
|
1262
|
+
});
|
|
1263
|
+
mode = "function";
|
|
1264
|
+
}
|
|
1265
|
+
} else if (chunk.type === "content_block_delta") {
|
|
1266
|
+
if (chunk.delta.type === "text_delta") {
|
|
1267
|
+
const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
|
|
1268
|
+
if (text.length > 0) {
|
|
1269
|
+
if (!didOutputText) {
|
|
1270
|
+
eventStream$.sendTextMessageStart({
|
|
1271
|
+
messageId: currentMessageId
|
|
1272
|
+
});
|
|
1273
|
+
didOutputText = true;
|
|
1274
|
+
}
|
|
1275
|
+
eventStream$.sendTextMessageContent({
|
|
1276
|
+
messageId: currentMessageId,
|
|
1277
|
+
content: text
|
|
1278
|
+
});
|
|
1279
|
+
}
|
|
1280
|
+
} else if (chunk.delta.type === "input_json_delta") {
|
|
1281
|
+
eventStream$.sendActionExecutionArgs({
|
|
1282
|
+
actionExecutionId: currentToolCallId,
|
|
1283
|
+
args: chunk.delta.partial_json
|
|
1284
|
+
});
|
|
1285
|
+
}
|
|
1286
|
+
} else if (chunk.type === "content_block_stop") {
|
|
1287
|
+
if (mode === "message") {
|
|
1288
|
+
if (didOutputText) {
|
|
1289
|
+
eventStream$.sendTextMessageEnd({
|
|
1290
|
+
messageId: currentMessageId
|
|
1291
|
+
});
|
|
1292
|
+
}
|
|
1293
|
+
} else if (mode === "function") {
|
|
1294
|
+
eventStream$.sendActionExecutionEnd({
|
|
1295
|
+
actionExecutionId: currentToolCallId
|
|
1296
|
+
});
|
|
1297
|
+
}
|
|
1298
|
+
}
|
|
1299
|
+
}
|
|
1300
|
+
eventStream$.complete();
|
|
1301
|
+
});
|
|
1302
|
+
return {
|
|
1303
|
+
threadId: threadId || randomId6()
|
|
1304
|
+
};
|
|
1305
|
+
}
|
|
1306
|
+
};
|
|
1307
|
+
__name(AnthropicAdapter, "AnthropicAdapter");
|
|
1308
|
+
var THINKING_TAG = "<thinking>";
|
|
1309
|
+
var THINKING_TAG_END = "</thinking>";
|
|
1310
|
+
var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBuffer2 {
|
|
1311
|
+
buffer;
|
|
1312
|
+
didFilterThinkingTag = false;
|
|
1313
|
+
constructor() {
|
|
1314
|
+
this.buffer = "";
|
|
1315
|
+
}
|
|
1316
|
+
onTextChunk(text) {
|
|
1317
|
+
this.buffer += text;
|
|
1318
|
+
if (this.didFilterThinkingTag) {
|
|
1319
|
+
return text;
|
|
1320
|
+
}
|
|
1321
|
+
const potentialTag = this.buffer.slice(0, THINKING_TAG.length);
|
|
1322
|
+
if (THINKING_TAG.startsWith(potentialTag)) {
|
|
1323
|
+
if (this.buffer.includes(THINKING_TAG_END)) {
|
|
1324
|
+
const end = this.buffer.indexOf(THINKING_TAG_END);
|
|
1325
|
+
const filteredText = this.buffer.slice(end + THINKING_TAG_END.length);
|
|
1326
|
+
this.buffer = filteredText;
|
|
1327
|
+
this.didFilterThinkingTag = true;
|
|
1328
|
+
return filteredText;
|
|
1329
|
+
} else {
|
|
1330
|
+
return "";
|
|
1331
|
+
}
|
|
1332
|
+
}
|
|
1333
|
+
return text;
|
|
1334
|
+
}
|
|
1335
|
+
reset() {
|
|
1336
|
+
this.buffer = "";
|
|
1337
|
+
this.didFilterThinkingTag = false;
|
|
1338
|
+
}
|
|
1339
|
+
}, "FilterThinkingTextBuffer");
|
|
1340
|
+
|
|
1341
|
+
// src/service-adapters/experimental/ollama/ollama-adapter.ts
|
|
1342
|
+
import { Ollama } from "@langchain/community/llms/ollama";
|
|
1343
|
+
import { randomId as randomId7 } from "@copilotkit/shared";
|
|
1344
|
+
var DEFAULT_MODEL4 = "llama3:latest";
|
|
1345
|
+
var ExperimentalOllamaAdapter = class {
|
|
1346
|
+
model;
|
|
1347
|
+
constructor(options) {
|
|
1348
|
+
if (options == null ? void 0 : options.model) {
|
|
1349
|
+
this.model = options.model;
|
|
1350
|
+
} else {
|
|
1351
|
+
this.model = DEFAULT_MODEL4;
|
|
1352
|
+
}
|
|
1353
|
+
}
|
|
1354
|
+
async process(request) {
|
|
1355
|
+
const { messages, actions, eventSource } = request;
|
|
1356
|
+
const ollama = new Ollama({
|
|
1357
|
+
model: this.model
|
|
1358
|
+
});
|
|
1359
|
+
const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
|
|
1360
|
+
const _stream = await ollama.stream(contents);
|
|
1361
|
+
eventSource.stream(async (eventStream$) => {
|
|
1362
|
+
const currentMessageId = randomId7();
|
|
1363
|
+
eventStream$.sendTextMessageStart({
|
|
1364
|
+
messageId: currentMessageId
|
|
1365
|
+
});
|
|
1366
|
+
for await (const chunkText of _stream) {
|
|
1367
|
+
eventStream$.sendTextMessageContent({
|
|
1368
|
+
messageId: currentMessageId,
|
|
1369
|
+
content: chunkText
|
|
1370
|
+
});
|
|
1371
|
+
}
|
|
1372
|
+
eventStream$.sendTextMessageEnd({
|
|
1373
|
+
messageId: currentMessageId
|
|
1374
|
+
});
|
|
1375
|
+
eventStream$.complete();
|
|
1376
|
+
});
|
|
1377
|
+
return {
|
|
1378
|
+
threadId: request.threadId || randomId7()
|
|
1379
|
+
};
|
|
1380
|
+
}
|
|
1381
|
+
};
|
|
1382
|
+
__name(ExperimentalOllamaAdapter, "ExperimentalOllamaAdapter");
|
|
1383
|
+
|
|
1384
|
+
// src/service-adapters/experimental/empty/empty-adapter.ts
|
|
1385
|
+
import { randomId as randomId8 } from "@copilotkit/shared";
|
|
1386
|
+
var ExperimentalEmptyAdapter = class {
|
|
1387
|
+
async process(request) {
|
|
1388
|
+
return {
|
|
1389
|
+
threadId: request.threadId || randomId8()
|
|
1390
|
+
};
|
|
1391
|
+
}
|
|
1392
|
+
};
|
|
1393
|
+
__name(ExperimentalEmptyAdapter, "ExperimentalEmptyAdapter");
|
|
1394
|
+
|
|
1395
|
+
export {
|
|
1396
|
+
RemoteChain,
|
|
1397
|
+
OpenAIAdapter,
|
|
1398
|
+
streamLangChainResponse,
|
|
1399
|
+
LangChainAdapter,
|
|
1400
|
+
GoogleGenerativeAIAdapter,
|
|
1401
|
+
OpenAIAssistantAdapter,
|
|
1402
|
+
UnifyAdapter,
|
|
1403
|
+
GroqAdapter,
|
|
1404
|
+
AnthropicAdapter,
|
|
1405
|
+
ExperimentalOllamaAdapter,
|
|
1406
|
+
ExperimentalEmptyAdapter
|
|
1407
|
+
};
|
|
1408
|
+
//# sourceMappingURL=chunk-CLGKEUOA.mjs.map
|