@copilotkit/runtime 1.2.2-feat-runtime-remote-actions.1 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -13
- package/dist/{chunk-BPEPG56J.mjs → chunk-47TPNJX7.mjs} +2 -2
- package/dist/{chunk-Y5TWOZFD.mjs → chunk-4BZ6WXBB.mjs} +3 -3
- package/dist/{chunk-3SKYFYY2.mjs → chunk-IRRAL44O.mjs} +4 -5
- package/dist/chunk-IRRAL44O.mjs.map +1 -0
- package/dist/{chunk-BJ2LVHWA.mjs → chunk-OF6AN6HF.mjs} +521 -256
- package/dist/chunk-OF6AN6HF.mjs.map +1 -0
- package/dist/{chunk-UL2OKN2O.mjs → chunk-VWS65V7Y.mjs} +2 -2
- package/dist/{chunk-U2EKJP47.mjs → chunk-XCGRXAJU.mjs} +2 -2
- package/dist/{copilot-runtime-d427e991.d.ts → copilot-runtime-a1b5f1ce.d.ts} +1 -1
- package/dist/{index-079752b9.d.ts → groq-adapter-069ac812.d.ts} +82 -82
- package/dist/index.d.ts +7 -5
- package/dist/index.js +721 -454
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +10 -6
- package/dist/index.mjs.map +1 -1
- package/dist/{langserve-d6073a3b.d.ts → langserve-15a1286b.d.ts} +1 -1
- package/dist/lib/index.d.ts +4 -4
- package/dist/lib/index.js +445 -446
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/index.mjs +6 -6
- package/dist/lib/integrations/index.d.ts +3 -3
- package/dist/lib/integrations/index.js +3 -4
- package/dist/lib/integrations/index.js.map +1 -1
- package/dist/lib/integrations/index.mjs +4 -4
- package/dist/lib/integrations/nest/index.d.ts +2 -2
- package/dist/lib/integrations/nest/index.js +3 -4
- package/dist/lib/integrations/nest/index.js.map +1 -1
- package/dist/lib/integrations/nest/index.mjs +2 -2
- package/dist/lib/integrations/node-express/index.d.ts +2 -2
- package/dist/lib/integrations/node-express/index.js +3 -4
- package/dist/lib/integrations/node-express/index.js.map +1 -1
- package/dist/lib/integrations/node-express/index.mjs +2 -2
- package/dist/lib/integrations/node-http/index.d.ts +2 -2
- package/dist/lib/integrations/node-http/index.js +3 -4
- package/dist/lib/integrations/node-http/index.js.map +1 -1
- package/dist/lib/integrations/node-http/index.mjs +1 -1
- package/dist/service-adapters/index.d.ts +47 -3
- package/dist/service-adapters/index.js +708 -442
- package/dist/service-adapters/index.js.map +1 -1
- package/dist/service-adapters/index.mjs +3 -1
- package/package.json +6 -7
- package/src/index.ts +1 -0
- package/src/service-adapters/anthropic/anthropic-adapter.ts +197 -0
- package/src/service-adapters/anthropic/utils.ts +144 -0
- package/src/service-adapters/index.ts +9 -7
- package/dist/chunk-3SKYFYY2.mjs.map +0 -1
- package/dist/chunk-BJ2LVHWA.mjs.map +0 -1
- /package/dist/{chunk-BPEPG56J.mjs.map → chunk-47TPNJX7.mjs.map} +0 -0
- /package/dist/{chunk-Y5TWOZFD.mjs.map → chunk-4BZ6WXBB.mjs.map} +0 -0
- /package/dist/{chunk-UL2OKN2O.mjs.map → chunk-VWS65V7Y.mjs.map} +0 -0
- /package/dist/{chunk-U2EKJP47.mjs.map → chunk-XCGRXAJU.mjs.map} +0 -0
|
@@ -12,6 +12,84 @@ import {
|
|
|
12
12
|
__name
|
|
13
13
|
} from "./chunk-44O2JGUY.mjs";
|
|
14
14
|
|
|
15
|
+
// src/service-adapters/langchain/langserve.ts
|
|
16
|
+
import { RemoteRunnable } from "langchain/runnables/remote";
|
|
17
|
+
var RemoteChain = class {
|
|
18
|
+
name;
|
|
19
|
+
description;
|
|
20
|
+
chainUrl;
|
|
21
|
+
parameters;
|
|
22
|
+
parameterType;
|
|
23
|
+
constructor(options) {
|
|
24
|
+
this.name = options.name;
|
|
25
|
+
this.description = options.description;
|
|
26
|
+
this.chainUrl = options.chainUrl;
|
|
27
|
+
this.parameters = options.parameters;
|
|
28
|
+
this.parameterType = options.parameterType || "multi";
|
|
29
|
+
}
|
|
30
|
+
async toAction() {
|
|
31
|
+
if (!this.parameters) {
|
|
32
|
+
await this.inferLangServeParameters();
|
|
33
|
+
}
|
|
34
|
+
return {
|
|
35
|
+
name: this.name,
|
|
36
|
+
description: this.description,
|
|
37
|
+
parameters: this.parameters,
|
|
38
|
+
handler: async (args) => {
|
|
39
|
+
const runnable = new RemoteRunnable({
|
|
40
|
+
url: this.chainUrl
|
|
41
|
+
});
|
|
42
|
+
let input;
|
|
43
|
+
if (this.parameterType === "single") {
|
|
44
|
+
input = args[Object.keys(args)[0]];
|
|
45
|
+
} else {
|
|
46
|
+
input = args;
|
|
47
|
+
}
|
|
48
|
+
return await runnable.invoke(input);
|
|
49
|
+
}
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
async inferLangServeParameters() {
|
|
53
|
+
const supportedTypes = [
|
|
54
|
+
"string",
|
|
55
|
+
"number",
|
|
56
|
+
"boolean"
|
|
57
|
+
];
|
|
58
|
+
let schemaUrl = this.chainUrl.replace(/\/+$/, "") + "/input_schema";
|
|
59
|
+
let schema = await fetch(schemaUrl).then((res) => res.json()).catch(() => {
|
|
60
|
+
throw new Error("Failed to fetch langserve schema at " + schemaUrl);
|
|
61
|
+
});
|
|
62
|
+
if (supportedTypes.includes(schema.type)) {
|
|
63
|
+
this.parameterType = "single";
|
|
64
|
+
this.parameters = [
|
|
65
|
+
{
|
|
66
|
+
name: "input",
|
|
67
|
+
type: schema.type,
|
|
68
|
+
description: "The input to the chain"
|
|
69
|
+
}
|
|
70
|
+
];
|
|
71
|
+
} else if (schema.type === "object") {
|
|
72
|
+
this.parameterType = "multi";
|
|
73
|
+
this.parameters = Object.keys(schema.properties).map((key) => {
|
|
74
|
+
var _a;
|
|
75
|
+
let property = schema.properties[key];
|
|
76
|
+
if (!supportedTypes.includes(property.type)) {
|
|
77
|
+
throw new Error("Unsupported schema type");
|
|
78
|
+
}
|
|
79
|
+
return {
|
|
80
|
+
name: key,
|
|
81
|
+
type: property.type,
|
|
82
|
+
description: property.description || "",
|
|
83
|
+
required: ((_a = schema.required) == null ? void 0 : _a.includes(key)) || false
|
|
84
|
+
};
|
|
85
|
+
});
|
|
86
|
+
} else {
|
|
87
|
+
throw new Error("Unsupported schema type");
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
};
|
|
91
|
+
__name(RemoteChain, "RemoteChain");
|
|
92
|
+
|
|
15
93
|
// src/service-adapters/openai/openai-adapter.ts
|
|
16
94
|
import OpenAI from "openai";
|
|
17
95
|
|
|
@@ -247,167 +325,37 @@ var OpenAIAdapter = class {
|
|
|
247
325
|
};
|
|
248
326
|
__name(OpenAIAdapter, "OpenAIAdapter");
|
|
249
327
|
|
|
250
|
-
// src/service-adapters/
|
|
251
|
-
import
|
|
252
|
-
var
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
this.openai = params.openai || new OpenAI2({});
|
|
260
|
-
this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
|
|
261
|
-
this.fileSearchEnabled = params.fileSearchEnabled === false || true;
|
|
262
|
-
this.assistantId = params.assistantId;
|
|
263
|
-
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
328
|
+
// src/service-adapters/langchain/langchain-adapter.ts
|
|
329
|
+
import { randomId as randomId2 } from "@copilotkit/shared";
|
|
330
|
+
var LangChainAdapter = class {
|
|
331
|
+
options;
|
|
332
|
+
/**
|
|
333
|
+
* To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.
|
|
334
|
+
*/
|
|
335
|
+
constructor(options) {
|
|
336
|
+
this.options = options;
|
|
264
337
|
}
|
|
265
338
|
async process(request) {
|
|
266
|
-
const {
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);
|
|
272
|
-
} else if (lastMessage instanceof TextMessage) {
|
|
273
|
-
nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters);
|
|
274
|
-
} else {
|
|
275
|
-
throw new Error("No actionable message found in the messages");
|
|
276
|
-
}
|
|
277
|
-
return {
|
|
339
|
+
const { eventSource, model, actions, messages, threadId, runId } = request;
|
|
340
|
+
const result = await this.options.chainFn({
|
|
341
|
+
messages: messages.map(convertMessageToLangChainMessage),
|
|
342
|
+
tools: actions.map(convertActionInputToLangChainTool),
|
|
343
|
+
model,
|
|
278
344
|
threadId,
|
|
279
|
-
runId
|
|
280
|
-
};
|
|
281
|
-
}
|
|
282
|
-
async submitToolOutputs(threadId, runId, messages, eventSource) {
|
|
283
|
-
let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);
|
|
284
|
-
if (!run.required_action) {
|
|
285
|
-
throw new Error("No tool outputs required");
|
|
286
|
-
}
|
|
287
|
-
const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map((toolCall) => toolCall.id);
|
|
288
|
-
const resultMessages = messages.filter((message) => message instanceof ResultMessage && toolCallsIds.includes(message.actionExecutionId));
|
|
289
|
-
if (toolCallsIds.length != resultMessages.length) {
|
|
290
|
-
throw new Error("Number of function results does not match the number of tool calls");
|
|
291
|
-
}
|
|
292
|
-
const toolOutputs = resultMessages.map((message) => {
|
|
293
|
-
return {
|
|
294
|
-
tool_call_id: message.actionExecutionId,
|
|
295
|
-
output: message.result
|
|
296
|
-
};
|
|
297
|
-
});
|
|
298
|
-
const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
|
|
299
|
-
tool_outputs: toolOutputs,
|
|
300
|
-
...this.disableParallelToolCalls && {
|
|
301
|
-
parallel_tool_calls: false
|
|
302
|
-
}
|
|
303
|
-
});
|
|
304
|
-
await this.streamResponse(stream, eventSource);
|
|
305
|
-
return runId;
|
|
306
|
-
}
|
|
307
|
-
async submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters) {
|
|
308
|
-
messages = [
|
|
309
|
-
...messages
|
|
310
|
-
];
|
|
311
|
-
const instructionsMessage = messages.shift();
|
|
312
|
-
const instructions = instructionsMessage instanceof TextMessage ? instructionsMessage.content : "";
|
|
313
|
-
const userMessage = messages.map(convertMessageToOpenAIMessage).map(convertSystemMessageToAssistantAPI).at(-1);
|
|
314
|
-
if (userMessage.role !== "user") {
|
|
315
|
-
throw new Error("No user message found");
|
|
316
|
-
}
|
|
317
|
-
await this.openai.beta.threads.messages.create(threadId, {
|
|
318
|
-
role: "user",
|
|
319
|
-
content: userMessage.content
|
|
320
|
-
});
|
|
321
|
-
const openaiTools = actions.map(convertActionInputToOpenAITool);
|
|
322
|
-
const tools = [
|
|
323
|
-
...openaiTools,
|
|
324
|
-
...this.codeInterpreterEnabled ? [
|
|
325
|
-
{
|
|
326
|
-
type: "code_interpreter"
|
|
327
|
-
}
|
|
328
|
-
] : [],
|
|
329
|
-
...this.fileSearchEnabled ? [
|
|
330
|
-
{
|
|
331
|
-
type: "file_search"
|
|
332
|
-
}
|
|
333
|
-
] : []
|
|
334
|
-
];
|
|
335
|
-
let stream = this.openai.beta.threads.runs.stream(threadId, {
|
|
336
|
-
assistant_id: this.assistantId,
|
|
337
|
-
instructions,
|
|
338
|
-
tools,
|
|
339
|
-
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
|
|
340
|
-
max_completion_tokens: forwardedParameters.maxTokens
|
|
341
|
-
},
|
|
342
|
-
...this.disableParallelToolCalls && {
|
|
343
|
-
parallel_tool_calls: false
|
|
344
|
-
}
|
|
345
|
+
runId
|
|
345
346
|
});
|
|
346
|
-
await this.streamResponse(stream, eventSource);
|
|
347
|
-
return getRunIdFromStream(stream);
|
|
348
|
-
}
|
|
349
|
-
async streamResponse(stream, eventSource) {
|
|
350
347
|
eventSource.stream(async (eventStream$) => {
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
case "thread.message.created":
|
|
356
|
-
if (inFunctionCall) {
|
|
357
|
-
eventStream$.sendActionExecutionEnd();
|
|
358
|
-
}
|
|
359
|
-
eventStream$.sendTextMessageStart(chunk.data.id);
|
|
360
|
-
break;
|
|
361
|
-
case "thread.message.delta":
|
|
362
|
-
if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
|
|
363
|
-
eventStream$.sendTextMessageContent((_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value);
|
|
364
|
-
}
|
|
365
|
-
break;
|
|
366
|
-
case "thread.message.completed":
|
|
367
|
-
eventStream$.sendTextMessageEnd();
|
|
368
|
-
break;
|
|
369
|
-
case "thread.run.step.delta":
|
|
370
|
-
let toolCallId;
|
|
371
|
-
let toolCallName;
|
|
372
|
-
let toolCallArgs;
|
|
373
|
-
if (chunk.data.delta.step_details.type === "tool_calls" && ((_c = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _c[0].type) === "function") {
|
|
374
|
-
toolCallId = (_d = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _d[0].id;
|
|
375
|
-
toolCallName = (_e = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _e[0].function.name;
|
|
376
|
-
toolCallArgs = (_f = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _f[0].function.arguments;
|
|
377
|
-
}
|
|
378
|
-
if (toolCallName && toolCallId) {
|
|
379
|
-
if (inFunctionCall) {
|
|
380
|
-
eventStream$.sendActionExecutionEnd();
|
|
381
|
-
}
|
|
382
|
-
inFunctionCall = true;
|
|
383
|
-
eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
|
|
384
|
-
} else if (toolCallArgs) {
|
|
385
|
-
eventStream$.sendActionExecutionArgs(toolCallArgs);
|
|
386
|
-
}
|
|
387
|
-
break;
|
|
388
|
-
}
|
|
389
|
-
}
|
|
390
|
-
if (inFunctionCall) {
|
|
391
|
-
eventStream$.sendActionExecutionEnd();
|
|
392
|
-
}
|
|
393
|
-
eventStream$.complete();
|
|
348
|
+
await streamLangChainResponse({
|
|
349
|
+
result,
|
|
350
|
+
eventStream$
|
|
351
|
+
});
|
|
394
352
|
});
|
|
353
|
+
return {
|
|
354
|
+
threadId: threadId || randomId2()
|
|
355
|
+
};
|
|
395
356
|
}
|
|
396
357
|
};
|
|
397
|
-
__name(
|
|
398
|
-
function getRunIdFromStream(stream) {
|
|
399
|
-
return new Promise((resolve, reject) => {
|
|
400
|
-
let runIdGetter = /* @__PURE__ */ __name((event) => {
|
|
401
|
-
if (event.event === "thread.run.created") {
|
|
402
|
-
const runId = event.data.id;
|
|
403
|
-
stream.off("event", runIdGetter);
|
|
404
|
-
resolve(runId);
|
|
405
|
-
}
|
|
406
|
-
}, "runIdGetter");
|
|
407
|
-
stream.on("event", runIdGetter);
|
|
408
|
-
});
|
|
409
|
-
}
|
|
410
|
-
__name(getRunIdFromStream, "getRunIdFromStream");
|
|
358
|
+
__name(LangChainAdapter, "LangChainAdapter");
|
|
411
359
|
|
|
412
360
|
// src/service-adapters/google/google-genai-adapter.ts
|
|
413
361
|
import { GoogleGenerativeAI } from "@google/generative-ai";
|
|
@@ -498,7 +446,7 @@ function tryParseJson(str) {
|
|
|
498
446
|
__name(tryParseJson, "tryParseJson");
|
|
499
447
|
|
|
500
448
|
// src/service-adapters/google/google-genai-adapter.ts
|
|
501
|
-
import { randomId as
|
|
449
|
+
import { randomId as randomId3 } from "@copilotkit/shared";
|
|
502
450
|
var GoogleGenerativeAIAdapter = class {
|
|
503
451
|
model;
|
|
504
452
|
constructor(options) {
|
|
@@ -565,7 +513,7 @@ var GoogleGenerativeAIAdapter = class {
|
|
|
565
513
|
}
|
|
566
514
|
if (!isTextMessage) {
|
|
567
515
|
isTextMessage = true;
|
|
568
|
-
eventStream$.sendTextMessageStart(
|
|
516
|
+
eventStream$.sendTextMessageStart(randomId3());
|
|
569
517
|
}
|
|
570
518
|
eventStream$.sendTextMessageContent(chunkText);
|
|
571
519
|
}
|
|
@@ -575,13 +523,13 @@ var GoogleGenerativeAIAdapter = class {
|
|
|
575
523
|
let calls = (await result.response).functionCalls();
|
|
576
524
|
if (calls) {
|
|
577
525
|
for (let call of calls) {
|
|
578
|
-
eventStream$.sendActionExecution(
|
|
526
|
+
eventStream$.sendActionExecution(randomId3(), call.name, JSON.stringify(replaceNewlinesInObject(call.args)));
|
|
579
527
|
}
|
|
580
528
|
}
|
|
581
529
|
eventStream$.complete();
|
|
582
530
|
});
|
|
583
531
|
return {
|
|
584
|
-
threadId: request.threadId ||
|
|
532
|
+
threadId: request.threadId || randomId3()
|
|
585
533
|
};
|
|
586
534
|
}
|
|
587
535
|
};
|
|
@@ -604,115 +552,167 @@ function replaceNewlinesInObject(obj) {
|
|
|
604
552
|
}
|
|
605
553
|
__name(replaceNewlinesInObject, "replaceNewlinesInObject");
|
|
606
554
|
|
|
607
|
-
// src/service-adapters/
|
|
608
|
-
import
|
|
609
|
-
var
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
555
|
+
// src/service-adapters/openai/openai-assistant-adapter.ts
|
|
556
|
+
import OpenAI2 from "openai";
|
|
557
|
+
var OpenAIAssistantAdapter = class {
|
|
558
|
+
openai;
|
|
559
|
+
codeInterpreterEnabled;
|
|
560
|
+
assistantId;
|
|
561
|
+
fileSearchEnabled;
|
|
562
|
+
disableParallelToolCalls;
|
|
563
|
+
constructor(params) {
|
|
564
|
+
this.openai = params.openai || new OpenAI2({});
|
|
565
|
+
this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
|
|
566
|
+
this.fileSearchEnabled = params.fileSearchEnabled === false || true;
|
|
567
|
+
this.assistantId = params.assistantId;
|
|
568
|
+
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
616
569
|
}
|
|
617
570
|
async process(request) {
|
|
618
|
-
const {
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
threadId,
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
eventStream$
|
|
630
|
-
});
|
|
631
|
-
});
|
|
571
|
+
const { messages, actions, eventSource, runId, forwardedParameters } = request;
|
|
572
|
+
let threadId = request.threadId || (await this.openai.beta.threads.create()).id;
|
|
573
|
+
const lastMessage = messages.at(-1);
|
|
574
|
+
let nextRunId = void 0;
|
|
575
|
+
if (lastMessage instanceof ResultMessage && runId) {
|
|
576
|
+
nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);
|
|
577
|
+
} else if (lastMessage instanceof TextMessage) {
|
|
578
|
+
nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters);
|
|
579
|
+
} else {
|
|
580
|
+
throw new Error("No actionable message found in the messages");
|
|
581
|
+
}
|
|
632
582
|
return {
|
|
633
|
-
threadId
|
|
583
|
+
threadId,
|
|
584
|
+
runId: nextRunId
|
|
634
585
|
};
|
|
635
586
|
}
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
import { RemoteRunnable } from "langchain/runnables/remote";
|
|
641
|
-
var RemoteChain = class {
|
|
642
|
-
name;
|
|
643
|
-
description;
|
|
644
|
-
chainUrl;
|
|
645
|
-
parameters;
|
|
646
|
-
parameterType;
|
|
647
|
-
constructor(options) {
|
|
648
|
-
this.name = options.name;
|
|
649
|
-
this.description = options.description;
|
|
650
|
-
this.chainUrl = options.chainUrl;
|
|
651
|
-
this.parameters = options.parameters;
|
|
652
|
-
this.parameterType = options.parameterType || "multi";
|
|
653
|
-
}
|
|
654
|
-
async toAction() {
|
|
655
|
-
if (!this.parameters) {
|
|
656
|
-
await this.inferLangServeParameters();
|
|
587
|
+
async submitToolOutputs(threadId, runId, messages, eventSource) {
|
|
588
|
+
let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);
|
|
589
|
+
if (!run.required_action) {
|
|
590
|
+
throw new Error("No tool outputs required");
|
|
657
591
|
}
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
592
|
+
const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map((toolCall) => toolCall.id);
|
|
593
|
+
const resultMessages = messages.filter((message) => message instanceof ResultMessage && toolCallsIds.includes(message.actionExecutionId));
|
|
594
|
+
if (toolCallsIds.length != resultMessages.length) {
|
|
595
|
+
throw new Error("Number of function results does not match the number of tool calls");
|
|
596
|
+
}
|
|
597
|
+
const toolOutputs = resultMessages.map((message) => {
|
|
598
|
+
return {
|
|
599
|
+
tool_call_id: message.actionExecutionId,
|
|
600
|
+
output: message.result
|
|
601
|
+
};
|
|
602
|
+
});
|
|
603
|
+
const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
|
|
604
|
+
tool_outputs: toolOutputs,
|
|
605
|
+
...this.disableParallelToolCalls && {
|
|
606
|
+
parallel_tool_calls: false
|
|
673
607
|
}
|
|
674
|
-
};
|
|
608
|
+
});
|
|
609
|
+
await this.streamResponse(stream, eventSource);
|
|
610
|
+
return runId;
|
|
675
611
|
}
|
|
676
|
-
async
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
"number",
|
|
680
|
-
"boolean"
|
|
612
|
+
async submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters) {
|
|
613
|
+
messages = [
|
|
614
|
+
...messages
|
|
681
615
|
];
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
616
|
+
const instructionsMessage = messages.shift();
|
|
617
|
+
const instructions = instructionsMessage instanceof TextMessage ? instructionsMessage.content : "";
|
|
618
|
+
const userMessage = messages.map(convertMessageToOpenAIMessage).map(convertSystemMessageToAssistantAPI).at(-1);
|
|
619
|
+
if (userMessage.role !== "user") {
|
|
620
|
+
throw new Error("No user message found");
|
|
621
|
+
}
|
|
622
|
+
await this.openai.beta.threads.messages.create(threadId, {
|
|
623
|
+
role: "user",
|
|
624
|
+
content: userMessage.content
|
|
685
625
|
});
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
626
|
+
const openaiTools = actions.map(convertActionInputToOpenAITool);
|
|
627
|
+
const tools = [
|
|
628
|
+
...openaiTools,
|
|
629
|
+
...this.codeInterpreterEnabled ? [
|
|
689
630
|
{
|
|
690
|
-
|
|
691
|
-
type: schema.type,
|
|
692
|
-
description: "The input to the chain"
|
|
631
|
+
type: "code_interpreter"
|
|
693
632
|
}
|
|
694
|
-
]
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
var _a;
|
|
699
|
-
let property = schema.properties[key];
|
|
700
|
-
if (!supportedTypes.includes(property.type)) {
|
|
701
|
-
throw new Error("Unsupported schema type");
|
|
633
|
+
] : [],
|
|
634
|
+
...this.fileSearchEnabled ? [
|
|
635
|
+
{
|
|
636
|
+
type: "file_search"
|
|
702
637
|
}
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
638
|
+
] : []
|
|
639
|
+
];
|
|
640
|
+
let stream = this.openai.beta.threads.runs.stream(threadId, {
|
|
641
|
+
assistant_id: this.assistantId,
|
|
642
|
+
instructions,
|
|
643
|
+
tools,
|
|
644
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
|
|
645
|
+
max_completion_tokens: forwardedParameters.maxTokens
|
|
646
|
+
},
|
|
647
|
+
...this.disableParallelToolCalls && {
|
|
648
|
+
parallel_tool_calls: false
|
|
649
|
+
}
|
|
650
|
+
});
|
|
651
|
+
await this.streamResponse(stream, eventSource);
|
|
652
|
+
return getRunIdFromStream(stream);
|
|
653
|
+
}
|
|
654
|
+
async streamResponse(stream, eventSource) {
|
|
655
|
+
eventSource.stream(async (eventStream$) => {
|
|
656
|
+
var _a, _b, _c, _d, _e, _f;
|
|
657
|
+
let inFunctionCall = false;
|
|
658
|
+
for await (const chunk of stream) {
|
|
659
|
+
switch (chunk.event) {
|
|
660
|
+
case "thread.message.created":
|
|
661
|
+
if (inFunctionCall) {
|
|
662
|
+
eventStream$.sendActionExecutionEnd();
|
|
663
|
+
}
|
|
664
|
+
eventStream$.sendTextMessageStart(chunk.data.id);
|
|
665
|
+
break;
|
|
666
|
+
case "thread.message.delta":
|
|
667
|
+
if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
|
|
668
|
+
eventStream$.sendTextMessageContent((_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value);
|
|
669
|
+
}
|
|
670
|
+
break;
|
|
671
|
+
case "thread.message.completed":
|
|
672
|
+
eventStream$.sendTextMessageEnd();
|
|
673
|
+
break;
|
|
674
|
+
case "thread.run.step.delta":
|
|
675
|
+
let toolCallId;
|
|
676
|
+
let toolCallName;
|
|
677
|
+
let toolCallArgs;
|
|
678
|
+
if (chunk.data.delta.step_details.type === "tool_calls" && ((_c = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _c[0].type) === "function") {
|
|
679
|
+
toolCallId = (_d = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _d[0].id;
|
|
680
|
+
toolCallName = (_e = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _e[0].function.name;
|
|
681
|
+
toolCallArgs = (_f = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _f[0].function.arguments;
|
|
682
|
+
}
|
|
683
|
+
if (toolCallName && toolCallId) {
|
|
684
|
+
if (inFunctionCall) {
|
|
685
|
+
eventStream$.sendActionExecutionEnd();
|
|
686
|
+
}
|
|
687
|
+
inFunctionCall = true;
|
|
688
|
+
eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
|
|
689
|
+
} else if (toolCallArgs) {
|
|
690
|
+
eventStream$.sendActionExecutionArgs(toolCallArgs);
|
|
691
|
+
}
|
|
692
|
+
break;
|
|
693
|
+
}
|
|
694
|
+
}
|
|
695
|
+
if (inFunctionCall) {
|
|
696
|
+
eventStream$.sendActionExecutionEnd();
|
|
697
|
+
}
|
|
698
|
+
eventStream$.complete();
|
|
699
|
+
});
|
|
713
700
|
}
|
|
714
701
|
};
|
|
715
|
-
__name(
|
|
702
|
+
__name(OpenAIAssistantAdapter, "OpenAIAssistantAdapter");
|
|
703
|
+
function getRunIdFromStream(stream) {
|
|
704
|
+
return new Promise((resolve, reject) => {
|
|
705
|
+
let runIdGetter = /* @__PURE__ */ __name((event) => {
|
|
706
|
+
if (event.event === "thread.run.created") {
|
|
707
|
+
const runId = event.data.id;
|
|
708
|
+
stream.off("event", runIdGetter);
|
|
709
|
+
resolve(runId);
|
|
710
|
+
}
|
|
711
|
+
}, "runIdGetter");
|
|
712
|
+
stream.on("event", runIdGetter);
|
|
713
|
+
});
|
|
714
|
+
}
|
|
715
|
+
__name(getRunIdFromStream, "getRunIdFromStream");
|
|
716
716
|
|
|
717
717
|
// src/service-adapters/unify/unify-adapter.ts
|
|
718
718
|
import OpenAI3 from "openai";
|
|
@@ -890,13 +890,278 @@ var GroqAdapter = class {
|
|
|
890
890
|
};
|
|
891
891
|
__name(GroqAdapter, "GroqAdapter");
|
|
892
892
|
|
|
893
|
+
// src/service-adapters/anthropic/anthropic-adapter.ts
|
|
894
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
895
|
+
|
|
896
|
+
// src/service-adapters/anthropic/utils.ts
|
|
897
|
+
function limitMessagesToTokenCount2(messages, tools, model, maxTokens) {
|
|
898
|
+
maxTokens || (maxTokens = MAX_TOKENS);
|
|
899
|
+
const result = [];
|
|
900
|
+
const toolsNumTokens = countToolsTokens2(model, tools);
|
|
901
|
+
if (toolsNumTokens > maxTokens) {
|
|
902
|
+
throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
|
|
903
|
+
}
|
|
904
|
+
maxTokens -= toolsNumTokens;
|
|
905
|
+
for (const message of messages) {
|
|
906
|
+
if (message.role === "system") {
|
|
907
|
+
const numTokens = countMessageTokens2(model, message);
|
|
908
|
+
maxTokens -= numTokens;
|
|
909
|
+
if (maxTokens < 0) {
|
|
910
|
+
throw new Error("Not enough tokens for system message.");
|
|
911
|
+
}
|
|
912
|
+
}
|
|
913
|
+
}
|
|
914
|
+
let cutoff = false;
|
|
915
|
+
const reversedMessages = [
|
|
916
|
+
...messages
|
|
917
|
+
].reverse();
|
|
918
|
+
for (const message of reversedMessages) {
|
|
919
|
+
if (message.role === "system") {
|
|
920
|
+
result.unshift(message);
|
|
921
|
+
continue;
|
|
922
|
+
} else if (cutoff) {
|
|
923
|
+
continue;
|
|
924
|
+
}
|
|
925
|
+
let numTokens = countMessageTokens2(model, message);
|
|
926
|
+
if (maxTokens < numTokens) {
|
|
927
|
+
cutoff = true;
|
|
928
|
+
continue;
|
|
929
|
+
}
|
|
930
|
+
result.unshift(message);
|
|
931
|
+
maxTokens -= numTokens;
|
|
932
|
+
}
|
|
933
|
+
return result;
|
|
934
|
+
}
|
|
935
|
+
__name(limitMessagesToTokenCount2, "limitMessagesToTokenCount");
|
|
936
|
+
var MAX_TOKENS = 128e3;
|
|
937
|
+
function countToolsTokens2(model, tools) {
|
|
938
|
+
if (tools.length === 0) {
|
|
939
|
+
return 0;
|
|
940
|
+
}
|
|
941
|
+
const json = JSON.stringify(tools);
|
|
942
|
+
return countTokens2(model, json);
|
|
943
|
+
}
|
|
944
|
+
__name(countToolsTokens2, "countToolsTokens");
|
|
945
|
+
function countMessageTokens2(model, message) {
|
|
946
|
+
return countTokens2(model, JSON.stringify(message.content) || "");
|
|
947
|
+
}
|
|
948
|
+
__name(countMessageTokens2, "countMessageTokens");
|
|
949
|
+
function countTokens2(model, text) {
|
|
950
|
+
return text.length / 3;
|
|
951
|
+
}
|
|
952
|
+
__name(countTokens2, "countTokens");
|
|
953
|
+
function convertActionInputToAnthropicTool(action) {
|
|
954
|
+
return {
|
|
955
|
+
name: action.name,
|
|
956
|
+
description: action.description,
|
|
957
|
+
input_schema: JSON.parse(action.jsonSchema)
|
|
958
|
+
};
|
|
959
|
+
}
|
|
960
|
+
__name(convertActionInputToAnthropicTool, "convertActionInputToAnthropicTool");
|
|
961
|
+
function convertMessageToAnthropicMessage(message) {
|
|
962
|
+
if (message instanceof TextMessage) {
|
|
963
|
+
if (message.role === "system") {
|
|
964
|
+
return {
|
|
965
|
+
role: "assistant",
|
|
966
|
+
content: [
|
|
967
|
+
{
|
|
968
|
+
type: "text",
|
|
969
|
+
text: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
|
|
970
|
+
}
|
|
971
|
+
]
|
|
972
|
+
};
|
|
973
|
+
} else {
|
|
974
|
+
return {
|
|
975
|
+
role: message.role === "user" ? "user" : "assistant",
|
|
976
|
+
content: [
|
|
977
|
+
{
|
|
978
|
+
type: "text",
|
|
979
|
+
text: message.content
|
|
980
|
+
}
|
|
981
|
+
]
|
|
982
|
+
};
|
|
983
|
+
}
|
|
984
|
+
} else if (message instanceof ActionExecutionMessage) {
|
|
985
|
+
return {
|
|
986
|
+
role: "assistant",
|
|
987
|
+
content: [
|
|
988
|
+
{
|
|
989
|
+
id: message.id,
|
|
990
|
+
type: "tool_use",
|
|
991
|
+
input: message.arguments,
|
|
992
|
+
name: message.name
|
|
993
|
+
}
|
|
994
|
+
]
|
|
995
|
+
};
|
|
996
|
+
} else if (message instanceof ResultMessage) {
|
|
997
|
+
return {
|
|
998
|
+
role: "user",
|
|
999
|
+
content: [
|
|
1000
|
+
{
|
|
1001
|
+
type: "tool_result",
|
|
1002
|
+
content: message.result,
|
|
1003
|
+
tool_use_id: message.actionExecutionId
|
|
1004
|
+
}
|
|
1005
|
+
]
|
|
1006
|
+
};
|
|
1007
|
+
}
|
|
1008
|
+
}
|
|
1009
|
+
__name(convertMessageToAnthropicMessage, "convertMessageToAnthropicMessage");
|
|
1010
|
+
function groupAnthropicMessagesByRole(messageParams) {
|
|
1011
|
+
return messageParams.reduce((acc, message) => {
|
|
1012
|
+
const lastGroup = acc[acc.length - 1];
|
|
1013
|
+
if (lastGroup && lastGroup.role === message.role) {
|
|
1014
|
+
lastGroup.content = lastGroup.content.concat(message.content);
|
|
1015
|
+
} else {
|
|
1016
|
+
acc.push({
|
|
1017
|
+
role: message.role,
|
|
1018
|
+
content: [
|
|
1019
|
+
...message.content
|
|
1020
|
+
]
|
|
1021
|
+
});
|
|
1022
|
+
}
|
|
1023
|
+
return acc;
|
|
1024
|
+
}, []);
|
|
1025
|
+
}
|
|
1026
|
+
__name(groupAnthropicMessagesByRole, "groupAnthropicMessagesByRole");
|
|
1027
|
+
|
|
1028
|
+
// src/service-adapters/anthropic/anthropic-adapter.ts
|
|
1029
|
+
import { randomId as randomId6 } from "@copilotkit/shared";
|
|
1030
|
+
var DEFAULT_MODEL3 = "claude-3-opus-20240229";
|
|
1031
|
+
var AnthropicAdapter = class {
|
|
1032
|
+
model = DEFAULT_MODEL3;
|
|
1033
|
+
_anthropic;
|
|
1034
|
+
get anthropic() {
|
|
1035
|
+
return this._anthropic;
|
|
1036
|
+
}
|
|
1037
|
+
constructor(params) {
|
|
1038
|
+
this._anthropic = (params == null ? void 0 : params.anthropic) || new Anthropic({});
|
|
1039
|
+
if (params == null ? void 0 : params.model) {
|
|
1040
|
+
this.model = params.model;
|
|
1041
|
+
}
|
|
1042
|
+
}
|
|
1043
|
+
async process(request) {
|
|
1044
|
+
const { threadId, model = this.model, messages: rawMessages, actions, eventSource, forwardedParameters } = request;
|
|
1045
|
+
const tools = actions.map(convertActionInputToAnthropicTool);
|
|
1046
|
+
const messages = [
|
|
1047
|
+
...rawMessages
|
|
1048
|
+
];
|
|
1049
|
+
const instructionsMessage = messages.shift();
|
|
1050
|
+
const instructions = instructionsMessage instanceof TextMessage ? instructionsMessage.content : "";
|
|
1051
|
+
let anthropicMessages = messages.map(convertMessageToAnthropicMessage);
|
|
1052
|
+
anthropicMessages = limitMessagesToTokenCount2(anthropicMessages, tools, model);
|
|
1053
|
+
anthropicMessages = groupAnthropicMessagesByRole(anthropicMessages);
|
|
1054
|
+
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
1055
|
+
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
|
|
1056
|
+
toolChoice = {
|
|
1057
|
+
type: "tool",
|
|
1058
|
+
name: forwardedParameters.toolChoiceFunctionName
|
|
1059
|
+
};
|
|
1060
|
+
}
|
|
1061
|
+
const stream = this.anthropic.messages.create({
|
|
1062
|
+
system: instructions,
|
|
1063
|
+
model: this.model,
|
|
1064
|
+
messages: anthropicMessages,
|
|
1065
|
+
max_tokens: (forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) || 1024,
|
|
1066
|
+
...tools.length > 0 && {
|
|
1067
|
+
tools
|
|
1068
|
+
},
|
|
1069
|
+
...toolChoice && {
|
|
1070
|
+
tool_choice: toolChoice
|
|
1071
|
+
},
|
|
1072
|
+
stream: true
|
|
1073
|
+
});
|
|
1074
|
+
eventSource.stream(async (eventStream$) => {
|
|
1075
|
+
let mode = null;
|
|
1076
|
+
let didOutputText = false;
|
|
1077
|
+
let currentMessageId = randomId6();
|
|
1078
|
+
let currentToolCallId = randomId6();
|
|
1079
|
+
let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
|
|
1080
|
+
for await (const chunk of await stream) {
|
|
1081
|
+
if (chunk.type === "message_start") {
|
|
1082
|
+
currentMessageId = chunk.message.id;
|
|
1083
|
+
} else if (chunk.type === "content_block_start") {
|
|
1084
|
+
if (chunk.content_block.type === "text") {
|
|
1085
|
+
didOutputText = false;
|
|
1086
|
+
filterThinkingTextBuffer.reset();
|
|
1087
|
+
mode = "message";
|
|
1088
|
+
} else if (chunk.content_block.type === "tool_use") {
|
|
1089
|
+
currentToolCallId = chunk.content_block.id;
|
|
1090
|
+
eventStream$.sendActionExecutionStart(currentToolCallId, chunk.content_block.name);
|
|
1091
|
+
mode = "function";
|
|
1092
|
+
}
|
|
1093
|
+
} else if (chunk.type === "content_block_delta") {
|
|
1094
|
+
if (chunk.delta.type === "text_delta") {
|
|
1095
|
+
const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
|
|
1096
|
+
if (text.length > 0) {
|
|
1097
|
+
if (!didOutputText) {
|
|
1098
|
+
eventStream$.sendTextMessageStart(currentMessageId);
|
|
1099
|
+
didOutputText = true;
|
|
1100
|
+
}
|
|
1101
|
+
eventStream$.sendTextMessageContent(text);
|
|
1102
|
+
}
|
|
1103
|
+
} else if (chunk.delta.type === "input_json_delta") {
|
|
1104
|
+
eventStream$.sendActionExecutionArgs(chunk.delta.partial_json);
|
|
1105
|
+
}
|
|
1106
|
+
} else if (chunk.type === "content_block_stop") {
|
|
1107
|
+
if (mode === "message") {
|
|
1108
|
+
if (didOutputText) {
|
|
1109
|
+
eventStream$.sendTextMessageEnd();
|
|
1110
|
+
}
|
|
1111
|
+
} else if (mode === "function") {
|
|
1112
|
+
eventStream$.sendActionExecutionEnd();
|
|
1113
|
+
}
|
|
1114
|
+
}
|
|
1115
|
+
}
|
|
1116
|
+
eventStream$.complete();
|
|
1117
|
+
});
|
|
1118
|
+
return {
|
|
1119
|
+
threadId: threadId || randomId6()
|
|
1120
|
+
};
|
|
1121
|
+
}
|
|
1122
|
+
};
|
|
1123
|
+
__name(AnthropicAdapter, "AnthropicAdapter");
|
|
1124
|
+
var THINKING_TAG = "<thinking>";
|
|
1125
|
+
var THINKING_TAG_END = "</thinking>";
|
|
1126
|
+
var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBuffer2 {
|
|
1127
|
+
buffer;
|
|
1128
|
+
didFilterThinkingTag = false;
|
|
1129
|
+
constructor() {
|
|
1130
|
+
this.buffer = "";
|
|
1131
|
+
}
|
|
1132
|
+
onTextChunk(text) {
|
|
1133
|
+
this.buffer += text;
|
|
1134
|
+
if (this.didFilterThinkingTag) {
|
|
1135
|
+
return text;
|
|
1136
|
+
}
|
|
1137
|
+
const potentialTag = this.buffer.slice(0, THINKING_TAG.length);
|
|
1138
|
+
if (THINKING_TAG.startsWith(potentialTag)) {
|
|
1139
|
+
if (this.buffer.includes(THINKING_TAG_END)) {
|
|
1140
|
+
const end = this.buffer.indexOf(THINKING_TAG_END);
|
|
1141
|
+
const filteredText = this.buffer.slice(end + THINKING_TAG_END.length);
|
|
1142
|
+
this.buffer = filteredText;
|
|
1143
|
+
this.didFilterThinkingTag = true;
|
|
1144
|
+
return filteredText;
|
|
1145
|
+
} else {
|
|
1146
|
+
return "";
|
|
1147
|
+
}
|
|
1148
|
+
}
|
|
1149
|
+
return text;
|
|
1150
|
+
}
|
|
1151
|
+
reset() {
|
|
1152
|
+
this.buffer = "";
|
|
1153
|
+
this.didFilterThinkingTag = false;
|
|
1154
|
+
}
|
|
1155
|
+
}, "FilterThinkingTextBuffer");
|
|
1156
|
+
|
|
893
1157
|
export {
|
|
1158
|
+
RemoteChain,
|
|
894
1159
|
OpenAIAdapter,
|
|
895
|
-
OpenAIAssistantAdapter,
|
|
896
|
-
GoogleGenerativeAIAdapter,
|
|
897
1160
|
LangChainAdapter,
|
|
898
|
-
|
|
1161
|
+
GoogleGenerativeAIAdapter,
|
|
1162
|
+
OpenAIAssistantAdapter,
|
|
899
1163
|
UnifyAdapter,
|
|
900
|
-
GroqAdapter
|
|
1164
|
+
GroqAdapter,
|
|
1165
|
+
AnthropicAdapter
|
|
901
1166
|
};
|
|
902
|
-
//# sourceMappingURL=chunk-
|
|
1167
|
+
//# sourceMappingURL=chunk-OF6AN6HF.mjs.map
|