@copilotkit/runtime 1.50.0-beta.8 → 1.50.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +168 -0
- package/dist/chunk-27JKTS6P.mjs +1704 -0
- package/dist/chunk-27JKTS6P.mjs.map +1 -0
- package/dist/chunk-2GPTVDTO.mjs +25 -0
- package/dist/chunk-2GPTVDTO.mjs.map +1 -0
- package/dist/chunk-2OZAGFV3.mjs +43 -0
- package/dist/chunk-2OZAGFV3.mjs.map +1 -0
- package/dist/chunk-3AJVKDZX.mjs +3097 -0
- package/dist/chunk-3AJVKDZX.mjs.map +1 -0
- package/dist/chunk-45RCC3ZS.mjs +25 -0
- package/dist/chunk-45RCC3ZS.mjs.map +1 -0
- package/dist/chunk-4EHJ4XFJ.mjs +25 -0
- package/dist/chunk-4EHJ4XFJ.mjs.map +1 -0
- package/dist/chunk-4IANB4TC.mjs +25 -0
- package/dist/chunk-4IANB4TC.mjs.map +1 -0
- package/dist/chunk-4KES76K3.mjs +74 -0
- package/dist/chunk-4KES76K3.mjs.map +1 -0
- package/dist/chunk-4OGE3SLW.mjs +3100 -0
- package/dist/chunk-4OGE3SLW.mjs.map +1 -0
- package/dist/chunk-54YJBMCP.mjs +3097 -0
- package/dist/chunk-54YJBMCP.mjs.map +1 -0
- package/dist/chunk-62NE5S6M.mjs +226 -0
- package/dist/chunk-62NE5S6M.mjs.map +1 -0
- package/dist/chunk-6ER4SZYH.mjs +74 -0
- package/dist/chunk-6ER4SZYH.mjs.map +1 -0
- package/dist/chunk-6TNSLHVR.mjs +74 -0
- package/dist/chunk-6TNSLHVR.mjs.map +1 -0
- package/dist/chunk-6XRUR5UK.mjs +1 -0
- package/dist/chunk-6XRUR5UK.mjs.map +1 -0
- package/dist/chunk-7V4BK7TZ.mjs +25 -0
- package/dist/chunk-7V4BK7TZ.mjs.map +1 -0
- package/dist/chunk-7YZIEXD2.mjs +74 -0
- package/dist/chunk-7YZIEXD2.mjs.map +1 -0
- package/dist/chunk-A4XHOAFU.mjs +25 -0
- package/dist/chunk-A4XHOAFU.mjs.map +1 -0
- package/dist/chunk-A555KEAD.mjs +6020 -0
- package/dist/chunk-A555KEAD.mjs.map +1 -0
- package/dist/chunk-AF73TFTX.mjs +74 -0
- package/dist/chunk-AF73TFTX.mjs.map +1 -0
- package/dist/chunk-AMUJQ6IR.mjs +50 -0
- package/dist/chunk-AMUJQ6IR.mjs.map +1 -0
- package/dist/chunk-AQG2SVCA.mjs +25 -0
- package/dist/chunk-AQG2SVCA.mjs.map +1 -0
- package/dist/chunk-BJZHMXND.mjs +74 -0
- package/dist/chunk-BJZHMXND.mjs.map +1 -0
- package/dist/chunk-CB2OJXF6.mjs +25 -0
- package/dist/chunk-CB2OJXF6.mjs.map +1 -0
- package/dist/chunk-CEOMFPJU.mjs +6020 -0
- package/dist/chunk-CEOMFPJU.mjs.map +1 -0
- package/dist/chunk-CZVLR7CC.mjs +175 -0
- package/dist/chunk-CZVLR7CC.mjs.map +1 -0
- package/dist/chunk-DCEEHMLJ.mjs +1127 -0
- package/dist/chunk-DCEEHMLJ.mjs.map +1 -0
- package/dist/chunk-DE3CLKUG.mjs +25 -0
- package/dist/chunk-DE3CLKUG.mjs.map +1 -0
- package/dist/chunk-DTPRUTNV.mjs +25 -0
- package/dist/chunk-DTPRUTNV.mjs.map +1 -0
- package/dist/chunk-ERUOA47O.mjs +626 -0
- package/dist/chunk-ERUOA47O.mjs.map +1 -0
- package/dist/chunk-ESSRC64W.mjs +74 -0
- package/dist/chunk-ESSRC64W.mjs.map +1 -0
- package/dist/chunk-FHD4JECV.mjs +33 -0
- package/dist/chunk-FHD4JECV.mjs.map +1 -0
- package/dist/chunk-GRAN6K6N.mjs +25 -0
- package/dist/chunk-GRAN6K6N.mjs.map +1 -0
- package/dist/chunk-I27F2UPA.mjs +175 -0
- package/dist/chunk-I27F2UPA.mjs.map +1 -0
- package/dist/chunk-IAZKTOQW.mjs +25 -0
- package/dist/chunk-IAZKTOQW.mjs.map +1 -0
- package/dist/chunk-J6XZ5MFB.mjs +25 -0
- package/dist/chunk-J6XZ5MFB.mjs.map +1 -0
- package/dist/chunk-JJ32MA4C.mjs +73 -0
- package/dist/chunk-JJ32MA4C.mjs.map +1 -0
- package/dist/chunk-JJY4ZTHQ.mjs +25 -0
- package/dist/chunk-JJY4ZTHQ.mjs.map +1 -0
- package/dist/chunk-KEYLBFU2.mjs +3117 -0
- package/dist/chunk-KEYLBFU2.mjs.map +1 -0
- package/dist/chunk-KQ53L4WZ.mjs +3094 -0
- package/dist/chunk-KQ53L4WZ.mjs.map +1 -0
- package/dist/chunk-KTELVQ67.mjs +3098 -0
- package/dist/chunk-KTELVQ67.mjs.map +1 -0
- package/dist/chunk-LPEPX6NH.mjs +25 -0
- package/dist/chunk-LPEPX6NH.mjs.map +1 -0
- package/dist/chunk-MDXE55DK.mjs +3117 -0
- package/dist/chunk-MDXE55DK.mjs.map +1 -0
- package/dist/chunk-MMFUVOXH.mjs +73 -0
- package/dist/chunk-MMFUVOXH.mjs.map +1 -0
- package/dist/chunk-N3Y4U66N.mjs +253 -0
- package/dist/chunk-N3Y4U66N.mjs.map +1 -0
- package/dist/chunk-O7UYB4MH.mjs +25 -0
- package/dist/chunk-O7UYB4MH.mjs.map +1 -0
- package/dist/chunk-OFNVQHNM.mjs +3089 -0
- package/dist/chunk-OFNVQHNM.mjs.map +1 -0
- package/dist/chunk-OFSV5GET.mjs +3074 -0
- package/dist/chunk-OFSV5GET.mjs.map +1 -0
- package/dist/chunk-OMRST67R.mjs +25 -0
- package/dist/chunk-OMRST67R.mjs.map +1 -0
- package/dist/chunk-OWIGJONH.mjs +275 -0
- package/dist/chunk-OWIGJONH.mjs.map +1 -0
- package/dist/chunk-PRZHE74A.mjs +25 -0
- package/dist/chunk-PRZHE74A.mjs.map +1 -0
- package/dist/chunk-PTYRVXXP.mjs +80 -0
- package/dist/chunk-PTYRVXXP.mjs.map +1 -0
- package/dist/chunk-R22B5CCO.mjs +25 -0
- package/dist/chunk-R22B5CCO.mjs.map +1 -0
- package/dist/chunk-SHBDMA63.mjs +141 -0
- package/dist/chunk-SHBDMA63.mjs.map +1 -0
- package/dist/chunk-SPVXBPRA.mjs +74 -0
- package/dist/chunk-SPVXBPRA.mjs.map +1 -0
- package/dist/chunk-T72G46ME.mjs +25 -0
- package/dist/chunk-T72G46ME.mjs.map +1 -0
- package/dist/chunk-TGELROPU.mjs +25 -0
- package/dist/chunk-TGELROPU.mjs.map +1 -0
- package/dist/chunk-UNX4IAAD.mjs +25 -0
- package/dist/chunk-UNX4IAAD.mjs.map +1 -0
- package/dist/chunk-V4DHVC7M.mjs +3085 -0
- package/dist/chunk-V4DHVC7M.mjs.map +1 -0
- package/dist/chunk-VVRFOB66.mjs +25 -0
- package/dist/chunk-VVRFOB66.mjs.map +1 -0
- package/dist/chunk-W6NVBYM6.mjs +80 -0
- package/dist/chunk-W6NVBYM6.mjs.map +1 -0
- package/dist/chunk-W7MBACGC.mjs +74 -0
- package/dist/chunk-W7MBACGC.mjs.map +1 -0
- package/dist/chunk-WMD4XZZS.mjs +25 -0
- package/dist/chunk-WMD4XZZS.mjs.map +1 -0
- package/dist/chunk-WX2ZNCRT.mjs +74 -0
- package/dist/chunk-WX2ZNCRT.mjs.map +1 -0
- package/dist/chunk-XWBDEXDA.mjs +153 -0
- package/dist/chunk-XWBDEXDA.mjs.map +1 -0
- package/dist/chunk-Y2Z62E2T.mjs +74 -0
- package/dist/chunk-Y2Z62E2T.mjs.map +1 -0
- package/dist/chunk-YO4I6RVI.mjs +25 -0
- package/dist/chunk-YO4I6RVI.mjs.map +1 -0
- package/dist/chunk-Z6Q5IW6I.mjs +3098 -0
- package/dist/chunk-Z6Q5IW6I.mjs.map +1 -0
- package/dist/chunk-Z726O3G2.mjs +25 -0
- package/dist/chunk-Z726O3G2.mjs.map +1 -0
- package/dist/chunk-ZE4SMZZR.mjs +3097 -0
- package/dist/chunk-ZE4SMZZR.mjs.map +1 -0
- package/dist/chunk-ZULZB33C.mjs +73 -0
- package/dist/chunk-ZULZB33C.mjs.map +1 -0
- package/dist/chunk-ZVRGXMY7.mjs +25 -0
- package/dist/chunk-ZVRGXMY7.mjs.map +1 -0
- package/dist/chunk-ZZ35WBYQ.mjs +25 -0
- package/dist/chunk-ZZ35WBYQ.mjs.map +1 -0
- package/dist/graphql/message-conversion/index.d.ts +18 -0
- package/dist/graphql/message-conversion/index.js +725 -0
- package/dist/graphql/message-conversion/index.js.map +1 -0
- package/dist/graphql/message-conversion/index.mjs +245 -0
- package/dist/graphql/message-conversion/index.mjs.map +1 -0
- package/dist/graphql/types/base/index.d.ts +6 -0
- package/dist/graphql/types/base/index.js +63 -0
- package/dist/graphql/types/base/index.js.map +1 -0
- package/dist/graphql/types/base/index.mjs +8 -0
- package/dist/graphql/types/base/index.mjs.map +1 -0
- package/dist/graphql/types/converted/index.d.ts +2 -0
- package/dist/graphql/types/converted/index.js +200 -0
- package/dist/graphql/types/converted/index.js.map +1 -0
- package/dist/graphql/types/converted/index.mjs +19 -0
- package/dist/graphql/types/converted/index.mjs.map +1 -0
- package/dist/groq-adapter-540da9c3.d.ts +331 -0
- package/dist/groq-adapter-a6f5e9d2.d.ts +331 -0
- package/dist/groq-adapter-c8aec5c5.d.ts +321 -0
- package/dist/index-96b330da.d.ts +119 -0
- package/dist/index-adbd78f1.d.ts +154 -0
- package/dist/index.d.ts +67 -8
- package/dist/index.js +118 -28
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +120 -31
- package/dist/index.mjs.map +1 -1
- package/dist/langserve-0c6100e3.d.ts +257 -0
- package/dist/langserve-978d5790.d.ts +243 -0
- package/dist/langserve-9fc76ce5.d.ts +243 -0
- package/dist/lib/cloud/index.d.ts +6 -0
- package/dist/lib/cloud/index.js +18 -0
- package/dist/lib/cloud/index.js.map +1 -0
- package/dist/lib/cloud/index.mjs +1 -0
- package/dist/lib/cloud/index.mjs.map +1 -0
- package/dist/lib/index.d.ts +212 -0
- package/dist/lib/index.js +7843 -0
- package/dist/lib/index.js.map +1 -0
- package/dist/lib/index.mjs +76 -0
- package/dist/lib/index.mjs.map +1 -0
- package/dist/lib/integrations/index.d.ts +34 -0
- package/dist/lib/integrations/index.js +3052 -0
- package/dist/lib/integrations/index.js.map +1 -0
- package/dist/lib/integrations/index.mjs +37 -0
- package/dist/lib/integrations/index.mjs.map +1 -0
- package/dist/lib/integrations/nest/index.d.ts +15 -0
- package/dist/lib/integrations/nest/index.js +2959 -0
- package/dist/lib/integrations/nest/index.js.map +1 -0
- package/dist/lib/integrations/nest/index.mjs +14 -0
- package/dist/lib/integrations/nest/index.mjs.map +1 -0
- package/dist/lib/integrations/node-express/index.d.ts +15 -0
- package/dist/lib/integrations/node-express/index.js +2959 -0
- package/dist/lib/integrations/node-express/index.js.map +1 -0
- package/dist/lib/integrations/node-express/index.mjs +14 -0
- package/dist/lib/integrations/node-express/index.mjs.map +1 -0
- package/dist/lib/integrations/node-http/index.d.ts +15 -0
- package/dist/lib/integrations/node-http/index.js +2945 -0
- package/dist/lib/integrations/node-http/index.js.map +1 -0
- package/dist/lib/integrations/node-http/index.mjs +13 -0
- package/dist/lib/integrations/node-http/index.mjs.map +1 -0
- package/dist/service-adapters/index.d.ts +162 -0
- package/dist/service-adapters/index.js +1787 -0
- package/dist/service-adapters/index.js.map +1 -0
- package/dist/service-adapters/index.mjs +34 -0
- package/dist/service-adapters/index.mjs.map +1 -0
- package/dist/service-adapters/shared/index.d.ts +9 -0
- package/dist/service-adapters/shared/index.js +72 -0
- package/dist/service-adapters/shared/index.js.map +1 -0
- package/dist/service-adapters/shared/index.mjs +8 -0
- package/dist/service-adapters/shared/index.mjs.map +1 -0
- package/dist/shared-0a7346ce.d.ts +466 -0
- package/dist/shared-35c6eb04.d.ts +448 -0
- package/dist/shared-9ed1dc31.d.ts +414 -0
- package/dist/shared-da5708fe.d.ts +449 -0
- package/dist/utils/index.d.ts +65 -0
- package/dist/utils/index.js +175 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/index.mjs +12 -0
- package/dist/utils/index.mjs.map +1 -0
- package/package.json +14 -20
- package/src/lib/index.ts +7 -6
- package/src/lib/runtime/copilot-runtime.ts +62 -26
- package/src/lib/runtime/telemetry-agent-runner.ts +139 -0
|
@@ -0,0 +1,1704 @@
|
|
|
1
|
+
import {
|
|
2
|
+
convertServiceAdapterError
|
|
3
|
+
} from "./chunk-AMUJQ6IR.mjs";
|
|
4
|
+
import {
|
|
5
|
+
__name
|
|
6
|
+
} from "./chunk-FHD4JECV.mjs";
|
|
7
|
+
|
|
8
|
+
// src/service-adapters/langchain/langserve.ts
|
|
9
|
+
import { RemoteRunnable } from "langchain/runnables/remote";
|
|
10
|
+
var RemoteChain = class {
|
|
11
|
+
name;
|
|
12
|
+
description;
|
|
13
|
+
chainUrl;
|
|
14
|
+
parameters;
|
|
15
|
+
parameterType;
|
|
16
|
+
constructor(options) {
|
|
17
|
+
this.name = options.name;
|
|
18
|
+
this.description = options.description;
|
|
19
|
+
this.chainUrl = options.chainUrl;
|
|
20
|
+
this.parameters = options.parameters;
|
|
21
|
+
this.parameterType = options.parameterType || "multi";
|
|
22
|
+
}
|
|
23
|
+
async toAction() {
|
|
24
|
+
if (!this.parameters) {
|
|
25
|
+
await this.inferLangServeParameters();
|
|
26
|
+
}
|
|
27
|
+
return {
|
|
28
|
+
name: this.name,
|
|
29
|
+
description: this.description,
|
|
30
|
+
parameters: this.parameters,
|
|
31
|
+
handler: async (args) => {
|
|
32
|
+
const runnable = new RemoteRunnable({
|
|
33
|
+
url: this.chainUrl
|
|
34
|
+
});
|
|
35
|
+
let input;
|
|
36
|
+
if (this.parameterType === "single") {
|
|
37
|
+
input = args[Object.keys(args)[0]];
|
|
38
|
+
} else {
|
|
39
|
+
input = args;
|
|
40
|
+
}
|
|
41
|
+
return await runnable.invoke(input);
|
|
42
|
+
}
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
async inferLangServeParameters() {
|
|
46
|
+
const supportedTypes = [
|
|
47
|
+
"string",
|
|
48
|
+
"number",
|
|
49
|
+
"boolean"
|
|
50
|
+
];
|
|
51
|
+
let schemaUrl = this.chainUrl.replace(/\/+$/, "") + "/input_schema";
|
|
52
|
+
let schema = await fetch(schemaUrl).then((res) => res.json()).catch(() => {
|
|
53
|
+
throw new Error("Failed to fetch langserve schema at " + schemaUrl);
|
|
54
|
+
});
|
|
55
|
+
if (supportedTypes.includes(schema.type)) {
|
|
56
|
+
this.parameterType = "single";
|
|
57
|
+
this.parameters = [
|
|
58
|
+
{
|
|
59
|
+
name: "input",
|
|
60
|
+
type: schema.type,
|
|
61
|
+
description: "The input to the chain"
|
|
62
|
+
}
|
|
63
|
+
];
|
|
64
|
+
} else if (schema.type === "object") {
|
|
65
|
+
this.parameterType = "multi";
|
|
66
|
+
this.parameters = Object.keys(schema.properties).map((key) => {
|
|
67
|
+
var _a;
|
|
68
|
+
let property = schema.properties[key];
|
|
69
|
+
if (!supportedTypes.includes(property.type)) {
|
|
70
|
+
throw new Error("Unsupported schema type");
|
|
71
|
+
}
|
|
72
|
+
return {
|
|
73
|
+
name: key,
|
|
74
|
+
type: property.type,
|
|
75
|
+
description: property.description || "",
|
|
76
|
+
required: ((_a = schema.required) == null ? void 0 : _a.includes(key)) || false
|
|
77
|
+
};
|
|
78
|
+
});
|
|
79
|
+
} else {
|
|
80
|
+
throw new Error("Unsupported schema type");
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
};
|
|
84
|
+
__name(RemoteChain, "RemoteChain");
|
|
85
|
+
|
|
86
|
+
// src/service-adapters/openai/openai-adapter.ts
|
|
87
|
+
import OpenAI from "openai";
|
|
88
|
+
|
|
89
|
+
// src/service-adapters/openai/utils.ts
|
|
90
|
+
import { parseJson } from "@copilotkit/shared";
|
|
91
|
+
function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
|
|
92
|
+
maxTokens || (maxTokens = maxTokensForOpenAIModel(model));
|
|
93
|
+
const result = [];
|
|
94
|
+
const toolsNumTokens = countToolsTokens(model, tools);
|
|
95
|
+
if (toolsNumTokens > maxTokens) {
|
|
96
|
+
throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
|
|
97
|
+
}
|
|
98
|
+
maxTokens -= toolsNumTokens;
|
|
99
|
+
for (const message of messages) {
|
|
100
|
+
if ([
|
|
101
|
+
"system",
|
|
102
|
+
"developer"
|
|
103
|
+
].includes(message.role)) {
|
|
104
|
+
const numTokens = countMessageTokens(model, message);
|
|
105
|
+
maxTokens -= numTokens;
|
|
106
|
+
if (maxTokens < 0) {
|
|
107
|
+
throw new Error("Not enough tokens for system message.");
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
let cutoff = false;
|
|
112
|
+
const reversedMessages = [
|
|
113
|
+
...messages
|
|
114
|
+
].reverse();
|
|
115
|
+
for (const message of reversedMessages) {
|
|
116
|
+
if ([
|
|
117
|
+
"system",
|
|
118
|
+
"developer"
|
|
119
|
+
].includes(message.role)) {
|
|
120
|
+
result.unshift(message);
|
|
121
|
+
continue;
|
|
122
|
+
} else if (cutoff) {
|
|
123
|
+
continue;
|
|
124
|
+
}
|
|
125
|
+
let numTokens = countMessageTokens(model, message);
|
|
126
|
+
if (maxTokens < numTokens) {
|
|
127
|
+
cutoff = true;
|
|
128
|
+
continue;
|
|
129
|
+
}
|
|
130
|
+
result.unshift(message);
|
|
131
|
+
maxTokens -= numTokens;
|
|
132
|
+
}
|
|
133
|
+
return result;
|
|
134
|
+
}
|
|
135
|
+
__name(limitMessagesToTokenCount, "limitMessagesToTokenCount");
|
|
136
|
+
function maxTokensForOpenAIModel(model) {
|
|
137
|
+
return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;
|
|
138
|
+
}
|
|
139
|
+
__name(maxTokensForOpenAIModel, "maxTokensForOpenAIModel");
|
|
140
|
+
var DEFAULT_MAX_TOKENS = 128e3;
|
|
141
|
+
var maxTokensByModel = {
|
|
142
|
+
// o1
|
|
143
|
+
o1: 2e5,
|
|
144
|
+
"o1-2024-12-17": 2e5,
|
|
145
|
+
"o1-mini": 128e3,
|
|
146
|
+
"o1-mini-2024-09-12": 128e3,
|
|
147
|
+
"o1-preview": 128e3,
|
|
148
|
+
"o1-preview-2024-09-12": 128e3,
|
|
149
|
+
// o3-mini
|
|
150
|
+
"o3-mini": 2e5,
|
|
151
|
+
"o3-mini-2025-01-31": 2e5,
|
|
152
|
+
// GPT-4
|
|
153
|
+
"gpt-4o": 128e3,
|
|
154
|
+
"chatgpt-4o-latest": 128e3,
|
|
155
|
+
"gpt-4o-2024-08-06": 128e3,
|
|
156
|
+
"gpt-4o-2024-05-13": 128e3,
|
|
157
|
+
"gpt-4o-mini": 128e3,
|
|
158
|
+
"gpt-4o-mini-2024-07-18": 128e3,
|
|
159
|
+
"gpt-4-turbo": 128e3,
|
|
160
|
+
"gpt-4-turbo-2024-04-09": 128e3,
|
|
161
|
+
"gpt-4-0125-preview": 128e3,
|
|
162
|
+
"gpt-4-turbo-preview": 128e3,
|
|
163
|
+
"gpt-4-1106-preview": 128e3,
|
|
164
|
+
"gpt-4-vision-preview": 128e3,
|
|
165
|
+
"gpt-4-1106-vision-preview": 128e3,
|
|
166
|
+
"gpt-4-32k": 32768,
|
|
167
|
+
"gpt-4-32k-0613": 32768,
|
|
168
|
+
"gpt-4-32k-0314": 32768,
|
|
169
|
+
"gpt-4": 8192,
|
|
170
|
+
"gpt-4-0613": 8192,
|
|
171
|
+
"gpt-4-0314": 8192,
|
|
172
|
+
// GPT-3.5
|
|
173
|
+
"gpt-3.5-turbo-0125": 16385,
|
|
174
|
+
"gpt-3.5-turbo": 16385,
|
|
175
|
+
"gpt-3.5-turbo-1106": 16385,
|
|
176
|
+
"gpt-3.5-turbo-instruct": 4096,
|
|
177
|
+
"gpt-3.5-turbo-16k": 16385,
|
|
178
|
+
"gpt-3.5-turbo-0613": 4096,
|
|
179
|
+
"gpt-3.5-turbo-16k-0613": 16385,
|
|
180
|
+
"gpt-3.5-turbo-0301": 4097
|
|
181
|
+
};
|
|
182
|
+
function countToolsTokens(model, tools) {
|
|
183
|
+
if (tools.length === 0) {
|
|
184
|
+
return 0;
|
|
185
|
+
}
|
|
186
|
+
const json = JSON.stringify(tools);
|
|
187
|
+
return countTokens(model, json);
|
|
188
|
+
}
|
|
189
|
+
__name(countToolsTokens, "countToolsTokens");
|
|
190
|
+
function countMessageTokens(model, message) {
|
|
191
|
+
return countTokens(model, message.content || "");
|
|
192
|
+
}
|
|
193
|
+
__name(countMessageTokens, "countMessageTokens");
|
|
194
|
+
function countTokens(model, text) {
|
|
195
|
+
return text.length / 3;
|
|
196
|
+
}
|
|
197
|
+
__name(countTokens, "countTokens");
|
|
198
|
+
function convertActionInputToOpenAITool(action) {
|
|
199
|
+
return {
|
|
200
|
+
type: "function",
|
|
201
|
+
function: {
|
|
202
|
+
name: action.name,
|
|
203
|
+
description: action.description,
|
|
204
|
+
parameters: parseJson(action.jsonSchema, {})
|
|
205
|
+
}
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
__name(convertActionInputToOpenAITool, "convertActionInputToOpenAITool");
|
|
209
|
+
function convertMessageToOpenAIMessage(message, options) {
|
|
210
|
+
const { keepSystemRole } = options || {
|
|
211
|
+
keepSystemRole: false
|
|
212
|
+
};
|
|
213
|
+
if (message.isTextMessage()) {
|
|
214
|
+
let role = message.role;
|
|
215
|
+
if (message.role === "system" && !keepSystemRole) {
|
|
216
|
+
role = "developer";
|
|
217
|
+
}
|
|
218
|
+
return {
|
|
219
|
+
role,
|
|
220
|
+
content: message.content
|
|
221
|
+
};
|
|
222
|
+
} else if (message.isImageMessage()) {
|
|
223
|
+
return {
|
|
224
|
+
role: "user",
|
|
225
|
+
content: [
|
|
226
|
+
{
|
|
227
|
+
type: "image_url",
|
|
228
|
+
image_url: {
|
|
229
|
+
url: `data:image/${message.format};base64,${message.bytes}`
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
]
|
|
233
|
+
};
|
|
234
|
+
} else if (message.isActionExecutionMessage()) {
|
|
235
|
+
return {
|
|
236
|
+
role: "assistant",
|
|
237
|
+
tool_calls: [
|
|
238
|
+
{
|
|
239
|
+
id: message.id,
|
|
240
|
+
type: "function",
|
|
241
|
+
function: {
|
|
242
|
+
name: message.name,
|
|
243
|
+
arguments: JSON.stringify(message.arguments)
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
]
|
|
247
|
+
};
|
|
248
|
+
} else if (message.isResultMessage()) {
|
|
249
|
+
return {
|
|
250
|
+
role: "tool",
|
|
251
|
+
content: message.result,
|
|
252
|
+
tool_call_id: message.actionExecutionId
|
|
253
|
+
};
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
__name(convertMessageToOpenAIMessage, "convertMessageToOpenAIMessage");
|
|
257
|
+
function convertSystemMessageToAssistantAPI(message) {
|
|
258
|
+
return {
|
|
259
|
+
...message,
|
|
260
|
+
...[
|
|
261
|
+
"system",
|
|
262
|
+
"developer"
|
|
263
|
+
].includes(message.role) && {
|
|
264
|
+
role: "assistant",
|
|
265
|
+
content: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
|
|
266
|
+
}
|
|
267
|
+
};
|
|
268
|
+
}
|
|
269
|
+
__name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
|
|
270
|
+
|
|
271
|
+
// src/service-adapters/openai/openai-adapter.ts
|
|
272
|
+
import { randomUUID } from "@copilotkit/shared";
|
|
273
|
+
var DEFAULT_MODEL = "gpt-4o";
|
|
274
|
+
var OpenAIAdapter = class {
|
|
275
|
+
model = DEFAULT_MODEL;
|
|
276
|
+
disableParallelToolCalls = false;
|
|
277
|
+
_openai;
|
|
278
|
+
keepSystemRole = false;
|
|
279
|
+
get openai() {
|
|
280
|
+
return this._openai;
|
|
281
|
+
}
|
|
282
|
+
constructor(params) {
|
|
283
|
+
this._openai = (params == null ? void 0 : params.openai) || new OpenAI({});
|
|
284
|
+
if (params == null ? void 0 : params.model) {
|
|
285
|
+
this.model = params.model;
|
|
286
|
+
}
|
|
287
|
+
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
288
|
+
this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
|
|
289
|
+
}
|
|
290
|
+
async process(request) {
|
|
291
|
+
const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
|
|
292
|
+
const tools = actions.map(convertActionInputToOpenAITool);
|
|
293
|
+
const threadId = threadIdFromRequest ?? randomUUID();
|
|
294
|
+
const validToolUseIds = /* @__PURE__ */ new Set();
|
|
295
|
+
for (const message of messages) {
|
|
296
|
+
if (message.isActionExecutionMessage()) {
|
|
297
|
+
validToolUseIds.add(message.id);
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
const filteredMessages = messages.filter((message) => {
|
|
301
|
+
if (message.isResultMessage()) {
|
|
302
|
+
if (!validToolUseIds.has(message.actionExecutionId)) {
|
|
303
|
+
return false;
|
|
304
|
+
}
|
|
305
|
+
validToolUseIds.delete(message.actionExecutionId);
|
|
306
|
+
return true;
|
|
307
|
+
}
|
|
308
|
+
return true;
|
|
309
|
+
});
|
|
310
|
+
let openaiMessages = filteredMessages.map((m) => convertMessageToOpenAIMessage(m, {
|
|
311
|
+
keepSystemRole: this.keepSystemRole
|
|
312
|
+
}));
|
|
313
|
+
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
314
|
+
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
315
|
+
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
|
|
316
|
+
toolChoice = {
|
|
317
|
+
type: "function",
|
|
318
|
+
function: {
|
|
319
|
+
name: forwardedParameters.toolChoiceFunctionName
|
|
320
|
+
}
|
|
321
|
+
};
|
|
322
|
+
}
|
|
323
|
+
try {
|
|
324
|
+
const stream = this.openai.beta.chat.completions.stream({
|
|
325
|
+
model,
|
|
326
|
+
stream: true,
|
|
327
|
+
messages: openaiMessages,
|
|
328
|
+
...tools.length > 0 && {
|
|
329
|
+
tools
|
|
330
|
+
},
|
|
331
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
|
|
332
|
+
max_completion_tokens: forwardedParameters.maxTokens
|
|
333
|
+
},
|
|
334
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
|
|
335
|
+
stop: forwardedParameters.stop
|
|
336
|
+
},
|
|
337
|
+
...toolChoice && {
|
|
338
|
+
tool_choice: toolChoice
|
|
339
|
+
},
|
|
340
|
+
...this.disableParallelToolCalls && {
|
|
341
|
+
parallel_tool_calls: false
|
|
342
|
+
},
|
|
343
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
|
|
344
|
+
temperature: forwardedParameters.temperature
|
|
345
|
+
}
|
|
346
|
+
});
|
|
347
|
+
eventSource.stream(async (eventStream$) => {
|
|
348
|
+
var _a, _b;
|
|
349
|
+
let mode = null;
|
|
350
|
+
let currentMessageId;
|
|
351
|
+
let currentToolCallId;
|
|
352
|
+
try {
|
|
353
|
+
for await (const chunk of stream) {
|
|
354
|
+
if (chunk.choices.length === 0) {
|
|
355
|
+
continue;
|
|
356
|
+
}
|
|
357
|
+
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
|
|
358
|
+
const content = chunk.choices[0].delta.content;
|
|
359
|
+
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
360
|
+
mode = null;
|
|
361
|
+
eventStream$.sendTextMessageEnd({
|
|
362
|
+
messageId: currentMessageId
|
|
363
|
+
});
|
|
364
|
+
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
365
|
+
mode = null;
|
|
366
|
+
eventStream$.sendActionExecutionEnd({
|
|
367
|
+
actionExecutionId: currentToolCallId
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
if (mode === null) {
|
|
371
|
+
if (toolCall == null ? void 0 : toolCall.id) {
|
|
372
|
+
mode = "function";
|
|
373
|
+
currentToolCallId = toolCall.id;
|
|
374
|
+
eventStream$.sendActionExecutionStart({
|
|
375
|
+
actionExecutionId: currentToolCallId,
|
|
376
|
+
parentMessageId: chunk.id,
|
|
377
|
+
actionName: toolCall.function.name
|
|
378
|
+
});
|
|
379
|
+
} else if (content) {
|
|
380
|
+
mode = "message";
|
|
381
|
+
currentMessageId = chunk.id;
|
|
382
|
+
eventStream$.sendTextMessageStart({
|
|
383
|
+
messageId: currentMessageId
|
|
384
|
+
});
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
if (mode === "message" && content) {
|
|
388
|
+
eventStream$.sendTextMessageContent({
|
|
389
|
+
messageId: currentMessageId,
|
|
390
|
+
content
|
|
391
|
+
});
|
|
392
|
+
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
393
|
+
eventStream$.sendActionExecutionArgs({
|
|
394
|
+
actionExecutionId: currentToolCallId,
|
|
395
|
+
args: toolCall.function.arguments
|
|
396
|
+
});
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
if (mode === "message") {
|
|
400
|
+
eventStream$.sendTextMessageEnd({
|
|
401
|
+
messageId: currentMessageId
|
|
402
|
+
});
|
|
403
|
+
} else if (mode === "function") {
|
|
404
|
+
eventStream$.sendActionExecutionEnd({
|
|
405
|
+
actionExecutionId: currentToolCallId
|
|
406
|
+
});
|
|
407
|
+
}
|
|
408
|
+
} catch (error) {
|
|
409
|
+
console.error("[OpenAI] Error during API call:", error);
|
|
410
|
+
throw convertServiceAdapterError(error, "OpenAI");
|
|
411
|
+
}
|
|
412
|
+
eventStream$.complete();
|
|
413
|
+
});
|
|
414
|
+
} catch (error) {
|
|
415
|
+
console.error("[OpenAI] Error during API call:", error);
|
|
416
|
+
throw convertServiceAdapterError(error, "OpenAI");
|
|
417
|
+
}
|
|
418
|
+
return {
|
|
419
|
+
threadId
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
};
|
|
423
|
+
__name(OpenAIAdapter, "OpenAIAdapter");
|
|
424
|
+
|
|
425
|
+
// src/service-adapters/langchain/utils.ts
|
|
426
|
+
import { AIMessage, HumanMessage, SystemMessage, ToolMessage } from "@langchain/core/messages";
|
|
427
|
+
import { DynamicStructuredTool } from "@langchain/core/tools";
|
|
428
|
+
import { randomId, convertJsonSchemaToZodSchema } from "@copilotkit/shared";
|
|
429
|
+
function convertMessageToLangChainMessage(message) {
|
|
430
|
+
if (message.isTextMessage()) {
|
|
431
|
+
if (message.role == "user") {
|
|
432
|
+
return new HumanMessage(message.content);
|
|
433
|
+
} else if (message.role == "assistant") {
|
|
434
|
+
return new AIMessage(message.content);
|
|
435
|
+
} else if (message.role === "system") {
|
|
436
|
+
return new SystemMessage(message.content);
|
|
437
|
+
}
|
|
438
|
+
} else if (message.isActionExecutionMessage()) {
|
|
439
|
+
return new AIMessage({
|
|
440
|
+
content: "",
|
|
441
|
+
tool_calls: [
|
|
442
|
+
{
|
|
443
|
+
id: message.id,
|
|
444
|
+
args: message.arguments,
|
|
445
|
+
name: message.name
|
|
446
|
+
}
|
|
447
|
+
]
|
|
448
|
+
});
|
|
449
|
+
} else if (message.isResultMessage()) {
|
|
450
|
+
return new ToolMessage({
|
|
451
|
+
content: message.result,
|
|
452
|
+
tool_call_id: message.actionExecutionId
|
|
453
|
+
});
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
__name(convertMessageToLangChainMessage, "convertMessageToLangChainMessage");
|
|
457
|
+
function convertActionInputToLangChainTool(actionInput) {
|
|
458
|
+
return new DynamicStructuredTool({
|
|
459
|
+
...actionInput,
|
|
460
|
+
name: actionInput.name,
|
|
461
|
+
description: actionInput.description,
|
|
462
|
+
schema: convertJsonSchemaToZodSchema(JSON.parse(actionInput.jsonSchema), true),
|
|
463
|
+
func: async () => {
|
|
464
|
+
return "";
|
|
465
|
+
}
|
|
466
|
+
});
|
|
467
|
+
}
|
|
468
|
+
__name(convertActionInputToLangChainTool, "convertActionInputToLangChainTool");
|
|
469
|
+
function isAIMessage(message) {
|
|
470
|
+
return Object.prototype.toString.call(message) === "[object AIMessage]";
|
|
471
|
+
}
|
|
472
|
+
__name(isAIMessage, "isAIMessage");
|
|
473
|
+
function isAIMessageChunk(message) {
|
|
474
|
+
return Object.prototype.toString.call(message) === "[object AIMessageChunk]";
|
|
475
|
+
}
|
|
476
|
+
__name(isAIMessageChunk, "isAIMessageChunk");
|
|
477
|
+
function isBaseMessageChunk(message) {
|
|
478
|
+
return Object.prototype.toString.call(message) === "[object BaseMessageChunk]";
|
|
479
|
+
}
|
|
480
|
+
__name(isBaseMessageChunk, "isBaseMessageChunk");
|
|
481
|
+
function maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution) {
|
|
482
|
+
if (actionExecution) {
|
|
483
|
+
eventStream$.sendActionExecutionResult({
|
|
484
|
+
actionExecutionId: actionExecution.id,
|
|
485
|
+
actionName: actionExecution.name,
|
|
486
|
+
result: "Sending a message"
|
|
487
|
+
});
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
__name(maybeSendActionExecutionResultIsMessage, "maybeSendActionExecutionResultIsMessage");
|
|
491
|
+
async function streamLangChainResponse({ result, eventStream$, actionExecution }) {
|
|
492
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
493
|
+
if (typeof result === "string") {
|
|
494
|
+
if (!actionExecution || (actionExecution == null ? void 0 : actionExecution.returnDirect)) {
|
|
495
|
+
eventStream$.sendActionExecutionResult({
|
|
496
|
+
actionExecutionId: actionExecution.id,
|
|
497
|
+
actionName: actionExecution.name,
|
|
498
|
+
result
|
|
499
|
+
});
|
|
500
|
+
eventStream$.sendTextMessage(randomId(), result);
|
|
501
|
+
} else {
|
|
502
|
+
eventStream$.sendActionExecutionResult({
|
|
503
|
+
actionExecutionId: actionExecution.id,
|
|
504
|
+
actionName: actionExecution.name,
|
|
505
|
+
result
|
|
506
|
+
});
|
|
507
|
+
}
|
|
508
|
+
} else if (isAIMessage(result)) {
|
|
509
|
+
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
510
|
+
if (result.content) {
|
|
511
|
+
eventStream$.sendTextMessage(randomId(), result.content);
|
|
512
|
+
}
|
|
513
|
+
for (const toolCall of result.tool_calls) {
|
|
514
|
+
eventStream$.sendActionExecution({
|
|
515
|
+
actionExecutionId: toolCall.id || randomId(),
|
|
516
|
+
actionName: toolCall.name,
|
|
517
|
+
args: JSON.stringify(toolCall.args)
|
|
518
|
+
});
|
|
519
|
+
}
|
|
520
|
+
} else if (isBaseMessageChunk(result)) {
|
|
521
|
+
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
522
|
+
if ((_a = result.lc_kwargs) == null ? void 0 : _a.content) {
|
|
523
|
+
eventStream$.sendTextMessage(randomId(), result.content);
|
|
524
|
+
}
|
|
525
|
+
if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
|
|
526
|
+
for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
|
|
527
|
+
eventStream$.sendActionExecution({
|
|
528
|
+
actionExecutionId: toolCall.id || randomId(),
|
|
529
|
+
actionName: toolCall.name,
|
|
530
|
+
args: JSON.stringify(toolCall.args)
|
|
531
|
+
});
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
} else if (result && "getReader" in result) {
|
|
535
|
+
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
536
|
+
let reader = result.getReader();
|
|
537
|
+
let mode = null;
|
|
538
|
+
let currentMessageId;
|
|
539
|
+
const toolCallDetails = {
|
|
540
|
+
name: null,
|
|
541
|
+
id: null,
|
|
542
|
+
index: null,
|
|
543
|
+
prevIndex: null
|
|
544
|
+
};
|
|
545
|
+
while (true) {
|
|
546
|
+
try {
|
|
547
|
+
const { done, value } = await reader.read();
|
|
548
|
+
let toolCallName = void 0;
|
|
549
|
+
let toolCallId = void 0;
|
|
550
|
+
let toolCallArgs = void 0;
|
|
551
|
+
let hasToolCall = false;
|
|
552
|
+
let content = "";
|
|
553
|
+
if (value && value.content) {
|
|
554
|
+
content = Array.isArray(value.content) ? ((_d = value.content[0]) == null ? void 0 : _d.text) ?? "" : value.content;
|
|
555
|
+
}
|
|
556
|
+
if (isAIMessageChunk(value)) {
|
|
557
|
+
let chunk = (_e = value.tool_call_chunks) == null ? void 0 : _e[0];
|
|
558
|
+
toolCallArgs = chunk == null ? void 0 : chunk.args;
|
|
559
|
+
hasToolCall = chunk != void 0;
|
|
560
|
+
if (chunk == null ? void 0 : chunk.name)
|
|
561
|
+
toolCallDetails.name = chunk.name;
|
|
562
|
+
if ((chunk == null ? void 0 : chunk.index) != null) {
|
|
563
|
+
toolCallDetails.index = chunk.index;
|
|
564
|
+
if (toolCallDetails.prevIndex == null)
|
|
565
|
+
toolCallDetails.prevIndex = chunk.index;
|
|
566
|
+
}
|
|
567
|
+
if (chunk == null ? void 0 : chunk.id)
|
|
568
|
+
toolCallDetails.id = chunk.index != null ? `${chunk.id}-idx-${chunk.index}` : chunk.id;
|
|
569
|
+
toolCallName = toolCallDetails.name;
|
|
570
|
+
toolCallId = toolCallDetails.id;
|
|
571
|
+
} else if (isBaseMessageChunk(value)) {
|
|
572
|
+
let chunk = (_g = (_f = value.additional_kwargs) == null ? void 0 : _f.tool_calls) == null ? void 0 : _g[0];
|
|
573
|
+
toolCallName = (_h = chunk == null ? void 0 : chunk.function) == null ? void 0 : _h.name;
|
|
574
|
+
toolCallId = chunk == null ? void 0 : chunk.id;
|
|
575
|
+
toolCallArgs = (_i = chunk == null ? void 0 : chunk.function) == null ? void 0 : _i.arguments;
|
|
576
|
+
hasToolCall = (chunk == null ? void 0 : chunk.function) != void 0;
|
|
577
|
+
}
|
|
578
|
+
if (mode === "message" && (toolCallId || done)) {
|
|
579
|
+
mode = null;
|
|
580
|
+
eventStream$.sendTextMessageEnd({
|
|
581
|
+
messageId: currentMessageId
|
|
582
|
+
});
|
|
583
|
+
} else if (mode === "function" && (!hasToolCall || done)) {
|
|
584
|
+
mode = null;
|
|
585
|
+
eventStream$.sendActionExecutionEnd({
|
|
586
|
+
actionExecutionId: toolCallId
|
|
587
|
+
});
|
|
588
|
+
}
|
|
589
|
+
if (done) {
|
|
590
|
+
break;
|
|
591
|
+
}
|
|
592
|
+
if (mode === null) {
|
|
593
|
+
if (hasToolCall && toolCallId && toolCallName) {
|
|
594
|
+
mode = "function";
|
|
595
|
+
eventStream$.sendActionExecutionStart({
|
|
596
|
+
actionExecutionId: toolCallId,
|
|
597
|
+
actionName: toolCallName,
|
|
598
|
+
parentMessageId: (_j = value.lc_kwargs) == null ? void 0 : _j.id
|
|
599
|
+
});
|
|
600
|
+
} else if (content) {
|
|
601
|
+
mode = "message";
|
|
602
|
+
currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || randomId();
|
|
603
|
+
eventStream$.sendTextMessageStart({
|
|
604
|
+
messageId: currentMessageId
|
|
605
|
+
});
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
if (mode === "message" && content) {
|
|
609
|
+
eventStream$.sendTextMessageContent({
|
|
610
|
+
messageId: currentMessageId,
|
|
611
|
+
content
|
|
612
|
+
});
|
|
613
|
+
} else if (mode === "function" && toolCallArgs) {
|
|
614
|
+
if (toolCallDetails.index !== toolCallDetails.prevIndex) {
|
|
615
|
+
eventStream$.sendActionExecutionEnd({
|
|
616
|
+
actionExecutionId: toolCallId
|
|
617
|
+
});
|
|
618
|
+
eventStream$.sendActionExecutionStart({
|
|
619
|
+
actionExecutionId: toolCallId,
|
|
620
|
+
actionName: toolCallName,
|
|
621
|
+
parentMessageId: (_l = value.lc_kwargs) == null ? void 0 : _l.id
|
|
622
|
+
});
|
|
623
|
+
toolCallDetails.prevIndex = toolCallDetails.index;
|
|
624
|
+
}
|
|
625
|
+
eventStream$.sendActionExecutionArgs({
|
|
626
|
+
actionExecutionId: toolCallId,
|
|
627
|
+
args: toolCallArgs
|
|
628
|
+
});
|
|
629
|
+
}
|
|
630
|
+
} catch (error) {
|
|
631
|
+
console.error("Error reading from stream", error);
|
|
632
|
+
break;
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
} else if (actionExecution) {
|
|
636
|
+
eventStream$.sendActionExecutionResult({
|
|
637
|
+
actionExecutionId: actionExecution.id,
|
|
638
|
+
actionName: actionExecution.name,
|
|
639
|
+
result: encodeResult(result)
|
|
640
|
+
});
|
|
641
|
+
} else {
|
|
642
|
+
throw new Error("Invalid return type from LangChain function.");
|
|
643
|
+
}
|
|
644
|
+
eventStream$.complete();
|
|
645
|
+
}
|
|
646
|
+
__name(streamLangChainResponse, "streamLangChainResponse");
|
|
647
|
+
function encodeResult(result) {
|
|
648
|
+
if (result === void 0) {
|
|
649
|
+
return "";
|
|
650
|
+
} else if (typeof result === "string") {
|
|
651
|
+
return result;
|
|
652
|
+
} else {
|
|
653
|
+
return JSON.stringify(result);
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
__name(encodeResult, "encodeResult");
|
|
657
|
+
|
|
658
|
+
// src/service-adapters/langchain/langchain-adapter.ts
|
|
659
|
+
import { randomUUID as randomUUID2 } from "@copilotkit/shared";
|
|
660
|
+
import { awaitAllCallbacks } from "@langchain/core/callbacks/promises";
|
|
661
|
+
var LangChainAdapter = class {
|
|
662
|
+
options;
|
|
663
|
+
/**
|
|
664
|
+
* To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.
|
|
665
|
+
*/
|
|
666
|
+
constructor(options) {
|
|
667
|
+
this.options = options;
|
|
668
|
+
}
|
|
669
|
+
async process(request) {
|
|
670
|
+
try {
|
|
671
|
+
const { eventSource, model, actions, messages, runId, threadId: threadIdFromRequest } = request;
|
|
672
|
+
const threadId = threadIdFromRequest ?? randomUUID2();
|
|
673
|
+
const result = await this.options.chainFn({
|
|
674
|
+
messages: messages.map(convertMessageToLangChainMessage),
|
|
675
|
+
tools: actions.map(convertActionInputToLangChainTool),
|
|
676
|
+
model,
|
|
677
|
+
threadId,
|
|
678
|
+
runId
|
|
679
|
+
});
|
|
680
|
+
eventSource.stream(async (eventStream$) => {
|
|
681
|
+
await streamLangChainResponse({
|
|
682
|
+
result,
|
|
683
|
+
eventStream$
|
|
684
|
+
});
|
|
685
|
+
});
|
|
686
|
+
return {
|
|
687
|
+
threadId
|
|
688
|
+
};
|
|
689
|
+
} finally {
|
|
690
|
+
await awaitAllCallbacks();
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
};
|
|
694
|
+
__name(LangChainAdapter, "LangChainAdapter");
|
|
695
|
+
|
|
696
|
+
// src/service-adapters/google/google-genai-adapter.ts
|
|
697
|
+
import { ChatGoogle } from "@langchain/google-gauth";
|
|
698
|
+
import { AIMessage as AIMessage2 } from "@langchain/core/messages";
|
|
699
|
+
var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
|
|
700
|
+
constructor(options) {
|
|
701
|
+
super({
|
|
702
|
+
chainFn: async ({ messages, tools, threadId }) => {
|
|
703
|
+
const filteredMessages = messages.filter((message) => {
|
|
704
|
+
if (!(message instanceof AIMessage2)) {
|
|
705
|
+
return true;
|
|
706
|
+
}
|
|
707
|
+
return message.content && String(message.content).trim().length > 0 || message.tool_calls && message.tool_calls.length > 0;
|
|
708
|
+
});
|
|
709
|
+
const model = new ChatGoogle({
|
|
710
|
+
apiKey: (options == null ? void 0 : options.apiKey) ?? process.env.GOOGLE_API_KEY,
|
|
711
|
+
modelName: (options == null ? void 0 : options.model) ?? "gemini-1.5-pro",
|
|
712
|
+
apiVersion: "v1beta"
|
|
713
|
+
}).bindTools(tools);
|
|
714
|
+
return model.stream(filteredMessages, {
|
|
715
|
+
metadata: {
|
|
716
|
+
conversation_id: threadId
|
|
717
|
+
}
|
|
718
|
+
});
|
|
719
|
+
}
|
|
720
|
+
});
|
|
721
|
+
}
|
|
722
|
+
};
|
|
723
|
+
__name(GoogleGenerativeAIAdapter, "GoogleGenerativeAIAdapter");
|
|
724
|
+
|
|
725
|
+
// src/service-adapters/openai/openai-assistant-adapter.ts
|
|
726
|
+
import OpenAI2 from "openai";
|
|
727
|
+
var OpenAIAssistantAdapter = class {
|
|
728
|
+
openai;
|
|
729
|
+
codeInterpreterEnabled;
|
|
730
|
+
assistantId;
|
|
731
|
+
fileSearchEnabled;
|
|
732
|
+
disableParallelToolCalls;
|
|
733
|
+
keepSystemRole = false;
|
|
734
|
+
constructor(params) {
|
|
735
|
+
this.openai = params.openai || new OpenAI2({});
|
|
736
|
+
this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
|
|
737
|
+
this.fileSearchEnabled = params.fileSearchEnabled === false || true;
|
|
738
|
+
this.assistantId = params.assistantId;
|
|
739
|
+
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
740
|
+
this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
|
|
741
|
+
}
|
|
742
|
+
async process(request) {
|
|
743
|
+
var _a, _b;
|
|
744
|
+
const { messages, actions, eventSource, runId, forwardedParameters } = request;
|
|
745
|
+
let threadId = (_b = (_a = request.extensions) == null ? void 0 : _a.openaiAssistantAPI) == null ? void 0 : _b.threadId;
|
|
746
|
+
if (!threadId) {
|
|
747
|
+
threadId = (await this.openai.beta.threads.create()).id;
|
|
748
|
+
}
|
|
749
|
+
const lastMessage = messages.at(-1);
|
|
750
|
+
let nextRunId = void 0;
|
|
751
|
+
if (lastMessage.isResultMessage() && runId) {
|
|
752
|
+
nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);
|
|
753
|
+
} else if (lastMessage.isTextMessage()) {
|
|
754
|
+
nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters);
|
|
755
|
+
} else {
|
|
756
|
+
throw new Error("No actionable message found in the messages");
|
|
757
|
+
}
|
|
758
|
+
return {
|
|
759
|
+
runId: nextRunId,
|
|
760
|
+
threadId,
|
|
761
|
+
extensions: {
|
|
762
|
+
...request.extensions,
|
|
763
|
+
openaiAssistantAPI: {
|
|
764
|
+
threadId,
|
|
765
|
+
runId: nextRunId
|
|
766
|
+
}
|
|
767
|
+
}
|
|
768
|
+
};
|
|
769
|
+
}
|
|
770
|
+
async submitToolOutputs(threadId, runId, messages, eventSource) {
|
|
771
|
+
let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);
|
|
772
|
+
if (!run.required_action) {
|
|
773
|
+
throw new Error("No tool outputs required");
|
|
774
|
+
}
|
|
775
|
+
const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map((toolCall) => toolCall.id);
|
|
776
|
+
const resultMessages = messages.filter((message) => message.isResultMessage() && toolCallsIds.includes(message.actionExecutionId));
|
|
777
|
+
if (toolCallsIds.length != resultMessages.length) {
|
|
778
|
+
throw new Error("Number of function results does not match the number of tool calls");
|
|
779
|
+
}
|
|
780
|
+
const toolOutputs = resultMessages.map((message) => {
|
|
781
|
+
return {
|
|
782
|
+
tool_call_id: message.actionExecutionId,
|
|
783
|
+
output: message.result
|
|
784
|
+
};
|
|
785
|
+
});
|
|
786
|
+
const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
|
|
787
|
+
tool_outputs: toolOutputs,
|
|
788
|
+
...this.disableParallelToolCalls && {
|
|
789
|
+
parallel_tool_calls: false
|
|
790
|
+
}
|
|
791
|
+
});
|
|
792
|
+
await this.streamResponse(stream, eventSource);
|
|
793
|
+
return runId;
|
|
794
|
+
}
|
|
795
|
+
async submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters) {
|
|
796
|
+
messages = [
|
|
797
|
+
...messages
|
|
798
|
+
];
|
|
799
|
+
const instructionsMessage = messages.shift();
|
|
800
|
+
const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
|
|
801
|
+
const userMessage = messages.map((m) => convertMessageToOpenAIMessage(m, {
|
|
802
|
+
keepSystemRole: this.keepSystemRole
|
|
803
|
+
})).map(convertSystemMessageToAssistantAPI).at(-1);
|
|
804
|
+
if (userMessage.role !== "user") {
|
|
805
|
+
throw new Error("No user message found");
|
|
806
|
+
}
|
|
807
|
+
await this.openai.beta.threads.messages.create(threadId, {
|
|
808
|
+
role: "user",
|
|
809
|
+
content: userMessage.content
|
|
810
|
+
});
|
|
811
|
+
const openaiTools = actions.map(convertActionInputToOpenAITool);
|
|
812
|
+
const tools = [
|
|
813
|
+
...openaiTools,
|
|
814
|
+
...this.codeInterpreterEnabled ? [
|
|
815
|
+
{
|
|
816
|
+
type: "code_interpreter"
|
|
817
|
+
}
|
|
818
|
+
] : [],
|
|
819
|
+
...this.fileSearchEnabled ? [
|
|
820
|
+
{
|
|
821
|
+
type: "file_search"
|
|
822
|
+
}
|
|
823
|
+
] : []
|
|
824
|
+
];
|
|
825
|
+
let stream = this.openai.beta.threads.runs.stream(threadId, {
|
|
826
|
+
assistant_id: this.assistantId,
|
|
827
|
+
instructions,
|
|
828
|
+
tools,
|
|
829
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
|
|
830
|
+
max_completion_tokens: forwardedParameters.maxTokens
|
|
831
|
+
},
|
|
832
|
+
...this.disableParallelToolCalls && {
|
|
833
|
+
parallel_tool_calls: false
|
|
834
|
+
}
|
|
835
|
+
});
|
|
836
|
+
await this.streamResponse(stream, eventSource);
|
|
837
|
+
return getRunIdFromStream(stream);
|
|
838
|
+
}
|
|
839
|
+
async streamResponse(stream, eventSource) {
|
|
840
|
+
eventSource.stream(async (eventStream$) => {
|
|
841
|
+
var _a, _b, _c, _d, _e, _f;
|
|
842
|
+
let inFunctionCall = false;
|
|
843
|
+
let currentMessageId;
|
|
844
|
+
let currentToolCallId;
|
|
845
|
+
for await (const chunk of stream) {
|
|
846
|
+
switch (chunk.event) {
|
|
847
|
+
case "thread.message.created":
|
|
848
|
+
if (inFunctionCall) {
|
|
849
|
+
eventStream$.sendActionExecutionEnd({
|
|
850
|
+
actionExecutionId: currentToolCallId
|
|
851
|
+
});
|
|
852
|
+
}
|
|
853
|
+
currentMessageId = chunk.data.id;
|
|
854
|
+
eventStream$.sendTextMessageStart({
|
|
855
|
+
messageId: currentMessageId
|
|
856
|
+
});
|
|
857
|
+
break;
|
|
858
|
+
case "thread.message.delta":
|
|
859
|
+
if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
|
|
860
|
+
eventStream$.sendTextMessageContent({
|
|
861
|
+
messageId: currentMessageId,
|
|
862
|
+
content: (_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value
|
|
863
|
+
});
|
|
864
|
+
}
|
|
865
|
+
break;
|
|
866
|
+
case "thread.message.completed":
|
|
867
|
+
eventStream$.sendTextMessageEnd({
|
|
868
|
+
messageId: currentMessageId
|
|
869
|
+
});
|
|
870
|
+
break;
|
|
871
|
+
case "thread.run.step.delta":
|
|
872
|
+
let toolCallId;
|
|
873
|
+
let toolCallName;
|
|
874
|
+
let toolCallArgs;
|
|
875
|
+
if (chunk.data.delta.step_details.type === "tool_calls" && ((_c = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _c[0].type) === "function") {
|
|
876
|
+
toolCallId = (_d = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _d[0].id;
|
|
877
|
+
toolCallName = (_e = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _e[0].function.name;
|
|
878
|
+
toolCallArgs = (_f = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _f[0].function.arguments;
|
|
879
|
+
}
|
|
880
|
+
if (toolCallName && toolCallId) {
|
|
881
|
+
if (inFunctionCall) {
|
|
882
|
+
eventStream$.sendActionExecutionEnd({
|
|
883
|
+
actionExecutionId: currentToolCallId
|
|
884
|
+
});
|
|
885
|
+
}
|
|
886
|
+
inFunctionCall = true;
|
|
887
|
+
currentToolCallId = toolCallId;
|
|
888
|
+
eventStream$.sendActionExecutionStart({
|
|
889
|
+
actionExecutionId: currentToolCallId,
|
|
890
|
+
parentMessageId: chunk.data.id,
|
|
891
|
+
actionName: toolCallName
|
|
892
|
+
});
|
|
893
|
+
} else if (toolCallArgs) {
|
|
894
|
+
eventStream$.sendActionExecutionArgs({
|
|
895
|
+
actionExecutionId: currentToolCallId,
|
|
896
|
+
args: toolCallArgs
|
|
897
|
+
});
|
|
898
|
+
}
|
|
899
|
+
break;
|
|
900
|
+
}
|
|
901
|
+
}
|
|
902
|
+
if (inFunctionCall) {
|
|
903
|
+
eventStream$.sendActionExecutionEnd({
|
|
904
|
+
actionExecutionId: currentToolCallId
|
|
905
|
+
});
|
|
906
|
+
}
|
|
907
|
+
eventStream$.complete();
|
|
908
|
+
});
|
|
909
|
+
}
|
|
910
|
+
};
|
|
911
|
+
__name(OpenAIAssistantAdapter, "OpenAIAssistantAdapter");
|
|
912
|
+
function getRunIdFromStream(stream) {
|
|
913
|
+
return new Promise((resolve, reject) => {
|
|
914
|
+
let runIdGetter = /* @__PURE__ */ __name((event) => {
|
|
915
|
+
if (event.event === "thread.run.created") {
|
|
916
|
+
const runId = event.data.id;
|
|
917
|
+
stream.off("event", runIdGetter);
|
|
918
|
+
resolve(runId);
|
|
919
|
+
}
|
|
920
|
+
}, "runIdGetter");
|
|
921
|
+
stream.on("event", runIdGetter);
|
|
922
|
+
});
|
|
923
|
+
}
|
|
924
|
+
__name(getRunIdFromStream, "getRunIdFromStream");
|
|
925
|
+
|
|
926
|
+
// src/service-adapters/unify/unify-adapter.ts
|
|
927
|
+
import OpenAI3 from "openai";
|
|
928
|
+
import { randomId as randomId2, randomUUID as randomUUID3 } from "@copilotkit/shared";
|
|
929
|
+
var UnifyAdapter = class {
|
|
930
|
+
apiKey;
|
|
931
|
+
model;
|
|
932
|
+
start;
|
|
933
|
+
constructor(options) {
|
|
934
|
+
if (options == null ? void 0 : options.apiKey) {
|
|
935
|
+
this.apiKey = options.apiKey;
|
|
936
|
+
} else {
|
|
937
|
+
this.apiKey = "UNIFY_API_KEY";
|
|
938
|
+
}
|
|
939
|
+
this.model = options == null ? void 0 : options.model;
|
|
940
|
+
this.start = true;
|
|
941
|
+
}
|
|
942
|
+
async process(request) {
|
|
943
|
+
const tools = request.actions.map(convertActionInputToOpenAITool);
|
|
944
|
+
const openai = new OpenAI3({
|
|
945
|
+
apiKey: this.apiKey,
|
|
946
|
+
baseURL: "https://api.unify.ai/v0/"
|
|
947
|
+
});
|
|
948
|
+
const forwardedParameters = request.forwardedParameters;
|
|
949
|
+
const messages = request.messages.map((m) => convertMessageToOpenAIMessage(m));
|
|
950
|
+
const stream = await openai.chat.completions.create({
|
|
951
|
+
model: this.model,
|
|
952
|
+
messages,
|
|
953
|
+
stream: true,
|
|
954
|
+
...tools.length > 0 && {
|
|
955
|
+
tools
|
|
956
|
+
},
|
|
957
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
|
|
958
|
+
temperature: forwardedParameters.temperature
|
|
959
|
+
}
|
|
960
|
+
});
|
|
961
|
+
let model = null;
|
|
962
|
+
let currentMessageId;
|
|
963
|
+
let currentToolCallId;
|
|
964
|
+
request.eventSource.stream(async (eventStream$) => {
|
|
965
|
+
var _a, _b;
|
|
966
|
+
let mode = null;
|
|
967
|
+
for await (const chunk of stream) {
|
|
968
|
+
if (this.start) {
|
|
969
|
+
model = chunk.model;
|
|
970
|
+
currentMessageId = randomId2();
|
|
971
|
+
eventStream$.sendTextMessageStart({
|
|
972
|
+
messageId: currentMessageId
|
|
973
|
+
});
|
|
974
|
+
eventStream$.sendTextMessageContent({
|
|
975
|
+
messageId: currentMessageId,
|
|
976
|
+
content: `Model used: ${model}
|
|
977
|
+
`
|
|
978
|
+
});
|
|
979
|
+
eventStream$.sendTextMessageEnd({
|
|
980
|
+
messageId: currentMessageId
|
|
981
|
+
});
|
|
982
|
+
this.start = false;
|
|
983
|
+
}
|
|
984
|
+
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
|
|
985
|
+
const content = chunk.choices[0].delta.content;
|
|
986
|
+
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
987
|
+
mode = null;
|
|
988
|
+
eventStream$.sendTextMessageEnd({
|
|
989
|
+
messageId: currentMessageId
|
|
990
|
+
});
|
|
991
|
+
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
992
|
+
mode = null;
|
|
993
|
+
eventStream$.sendActionExecutionEnd({
|
|
994
|
+
actionExecutionId: currentToolCallId
|
|
995
|
+
});
|
|
996
|
+
}
|
|
997
|
+
if (mode === null) {
|
|
998
|
+
if (toolCall == null ? void 0 : toolCall.id) {
|
|
999
|
+
mode = "function";
|
|
1000
|
+
currentToolCallId = toolCall.id;
|
|
1001
|
+
eventStream$.sendActionExecutionStart({
|
|
1002
|
+
actionExecutionId: currentToolCallId,
|
|
1003
|
+
actionName: toolCall.function.name
|
|
1004
|
+
});
|
|
1005
|
+
} else if (content) {
|
|
1006
|
+
mode = "message";
|
|
1007
|
+
currentMessageId = chunk.id;
|
|
1008
|
+
eventStream$.sendTextMessageStart({
|
|
1009
|
+
messageId: currentMessageId
|
|
1010
|
+
});
|
|
1011
|
+
}
|
|
1012
|
+
}
|
|
1013
|
+
if (mode === "message" && content) {
|
|
1014
|
+
eventStream$.sendTextMessageContent({
|
|
1015
|
+
messageId: currentMessageId,
|
|
1016
|
+
content
|
|
1017
|
+
});
|
|
1018
|
+
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
1019
|
+
eventStream$.sendActionExecutionArgs({
|
|
1020
|
+
actionExecutionId: currentToolCallId,
|
|
1021
|
+
args: toolCall.function.arguments
|
|
1022
|
+
});
|
|
1023
|
+
}
|
|
1024
|
+
}
|
|
1025
|
+
if (mode === "message") {
|
|
1026
|
+
eventStream$.sendTextMessageEnd({
|
|
1027
|
+
messageId: currentMessageId
|
|
1028
|
+
});
|
|
1029
|
+
} else if (mode === "function") {
|
|
1030
|
+
eventStream$.sendActionExecutionEnd({
|
|
1031
|
+
actionExecutionId: currentToolCallId
|
|
1032
|
+
});
|
|
1033
|
+
}
|
|
1034
|
+
eventStream$.complete();
|
|
1035
|
+
});
|
|
1036
|
+
return {
|
|
1037
|
+
threadId: request.threadId || randomUUID3()
|
|
1038
|
+
};
|
|
1039
|
+
}
|
|
1040
|
+
};
|
|
1041
|
+
__name(UnifyAdapter, "UnifyAdapter");
|
|
1042
|
+
|
|
1043
|
+
// src/service-adapters/groq/groq-adapter.ts
|
|
1044
|
+
import { Groq } from "groq-sdk";
|
|
1045
|
+
import { randomUUID as randomUUID4 } from "@copilotkit/shared";
|
|
1046
|
+
var DEFAULT_MODEL2 = "llama-3.3-70b-versatile";
|
|
1047
|
+
var GroqAdapter = class {
|
|
1048
|
+
model = DEFAULT_MODEL2;
|
|
1049
|
+
disableParallelToolCalls = false;
|
|
1050
|
+
_groq;
|
|
1051
|
+
get groq() {
|
|
1052
|
+
return this._groq;
|
|
1053
|
+
}
|
|
1054
|
+
constructor(params) {
|
|
1055
|
+
this._groq = (params == null ? void 0 : params.groq) || new Groq({});
|
|
1056
|
+
if (params == null ? void 0 : params.model) {
|
|
1057
|
+
this.model = params.model;
|
|
1058
|
+
}
|
|
1059
|
+
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
1060
|
+
}
|
|
1061
|
+
async process(request) {
|
|
1062
|
+
const { threadId, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
|
|
1063
|
+
const tools = actions.map(convertActionInputToOpenAITool);
|
|
1064
|
+
let openaiMessages = messages.map((m) => convertMessageToOpenAIMessage(m, {
|
|
1065
|
+
keepSystemRole: true
|
|
1066
|
+
}));
|
|
1067
|
+
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
1068
|
+
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
1069
|
+
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
|
|
1070
|
+
toolChoice = {
|
|
1071
|
+
type: "function",
|
|
1072
|
+
function: {
|
|
1073
|
+
name: forwardedParameters.toolChoiceFunctionName
|
|
1074
|
+
}
|
|
1075
|
+
};
|
|
1076
|
+
}
|
|
1077
|
+
let stream;
|
|
1078
|
+
try {
|
|
1079
|
+
stream = await this.groq.chat.completions.create({
|
|
1080
|
+
model,
|
|
1081
|
+
stream: true,
|
|
1082
|
+
messages: openaiMessages,
|
|
1083
|
+
...tools.length > 0 && {
|
|
1084
|
+
tools
|
|
1085
|
+
},
|
|
1086
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
|
|
1087
|
+
max_tokens: forwardedParameters.maxTokens
|
|
1088
|
+
},
|
|
1089
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
|
|
1090
|
+
stop: forwardedParameters.stop
|
|
1091
|
+
},
|
|
1092
|
+
...toolChoice && {
|
|
1093
|
+
tool_choice: toolChoice
|
|
1094
|
+
},
|
|
1095
|
+
...this.disableParallelToolCalls && {
|
|
1096
|
+
parallel_tool_calls: false
|
|
1097
|
+
},
|
|
1098
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
|
|
1099
|
+
temperature: forwardedParameters.temperature
|
|
1100
|
+
}
|
|
1101
|
+
});
|
|
1102
|
+
} catch (error) {
|
|
1103
|
+
throw convertServiceAdapterError(error, "Groq");
|
|
1104
|
+
}
|
|
1105
|
+
eventSource.stream(async (eventStream$) => {
|
|
1106
|
+
var _a, _b;
|
|
1107
|
+
let mode = null;
|
|
1108
|
+
let currentMessageId;
|
|
1109
|
+
let currentToolCallId;
|
|
1110
|
+
try {
|
|
1111
|
+
for await (const chunk of stream) {
|
|
1112
|
+
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
|
|
1113
|
+
const content = chunk.choices[0].delta.content;
|
|
1114
|
+
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
1115
|
+
mode = null;
|
|
1116
|
+
eventStream$.sendTextMessageEnd({
|
|
1117
|
+
messageId: currentMessageId
|
|
1118
|
+
});
|
|
1119
|
+
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
1120
|
+
mode = null;
|
|
1121
|
+
eventStream$.sendActionExecutionEnd({
|
|
1122
|
+
actionExecutionId: currentToolCallId
|
|
1123
|
+
});
|
|
1124
|
+
}
|
|
1125
|
+
if (mode === null) {
|
|
1126
|
+
if (toolCall == null ? void 0 : toolCall.id) {
|
|
1127
|
+
mode = "function";
|
|
1128
|
+
currentToolCallId = toolCall.id;
|
|
1129
|
+
eventStream$.sendActionExecutionStart({
|
|
1130
|
+
actionExecutionId: currentToolCallId,
|
|
1131
|
+
actionName: toolCall.function.name,
|
|
1132
|
+
parentMessageId: chunk.id
|
|
1133
|
+
});
|
|
1134
|
+
} else if (content) {
|
|
1135
|
+
mode = "message";
|
|
1136
|
+
currentMessageId = chunk.id;
|
|
1137
|
+
eventStream$.sendTextMessageStart({
|
|
1138
|
+
messageId: currentMessageId
|
|
1139
|
+
});
|
|
1140
|
+
}
|
|
1141
|
+
}
|
|
1142
|
+
if (mode === "message" && content) {
|
|
1143
|
+
eventStream$.sendTextMessageContent({
|
|
1144
|
+
messageId: currentMessageId,
|
|
1145
|
+
content
|
|
1146
|
+
});
|
|
1147
|
+
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
1148
|
+
eventStream$.sendActionExecutionArgs({
|
|
1149
|
+
actionExecutionId: currentToolCallId,
|
|
1150
|
+
args: toolCall.function.arguments
|
|
1151
|
+
});
|
|
1152
|
+
}
|
|
1153
|
+
}
|
|
1154
|
+
if (mode === "message") {
|
|
1155
|
+
eventStream$.sendTextMessageEnd({
|
|
1156
|
+
messageId: currentMessageId
|
|
1157
|
+
});
|
|
1158
|
+
} else if (mode === "function") {
|
|
1159
|
+
eventStream$.sendActionExecutionEnd({
|
|
1160
|
+
actionExecutionId: currentToolCallId
|
|
1161
|
+
});
|
|
1162
|
+
}
|
|
1163
|
+
} catch (error) {
|
|
1164
|
+
throw convertServiceAdapterError(error, "Groq");
|
|
1165
|
+
}
|
|
1166
|
+
eventStream$.complete();
|
|
1167
|
+
});
|
|
1168
|
+
return {
|
|
1169
|
+
threadId: request.threadId || randomUUID4()
|
|
1170
|
+
};
|
|
1171
|
+
}
|
|
1172
|
+
};
|
|
1173
|
+
__name(GroqAdapter, "GroqAdapter");
|
|
1174
|
+
|
|
1175
|
+
// src/service-adapters/anthropic/anthropic-adapter.ts
|
|
1176
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
1177
|
+
|
|
1178
|
+
// src/service-adapters/anthropic/utils.ts
|
|
1179
|
+
function limitMessagesToTokenCount2(messages, tools, model, maxTokens) {
|
|
1180
|
+
maxTokens || (maxTokens = MAX_TOKENS);
|
|
1181
|
+
const result = [];
|
|
1182
|
+
const toolsNumTokens = countToolsTokens2(model, tools);
|
|
1183
|
+
if (toolsNumTokens > maxTokens) {
|
|
1184
|
+
throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
|
|
1185
|
+
}
|
|
1186
|
+
maxTokens -= toolsNumTokens;
|
|
1187
|
+
for (const message of messages) {
|
|
1188
|
+
if (message.role === "system") {
|
|
1189
|
+
const numTokens = countMessageTokens2(model, message);
|
|
1190
|
+
maxTokens -= numTokens;
|
|
1191
|
+
if (maxTokens < 0) {
|
|
1192
|
+
throw new Error("Not enough tokens for system message.");
|
|
1193
|
+
}
|
|
1194
|
+
}
|
|
1195
|
+
}
|
|
1196
|
+
let cutoff = false;
|
|
1197
|
+
const reversedMessages = [
|
|
1198
|
+
...messages
|
|
1199
|
+
].reverse();
|
|
1200
|
+
for (const message of reversedMessages) {
|
|
1201
|
+
if (message.role === "system") {
|
|
1202
|
+
result.unshift(message);
|
|
1203
|
+
continue;
|
|
1204
|
+
} else if (cutoff) {
|
|
1205
|
+
continue;
|
|
1206
|
+
}
|
|
1207
|
+
let numTokens = countMessageTokens2(model, message);
|
|
1208
|
+
if (maxTokens < numTokens) {
|
|
1209
|
+
cutoff = true;
|
|
1210
|
+
continue;
|
|
1211
|
+
}
|
|
1212
|
+
result.unshift(message);
|
|
1213
|
+
maxTokens -= numTokens;
|
|
1214
|
+
}
|
|
1215
|
+
return result;
|
|
1216
|
+
}
|
|
1217
|
+
__name(limitMessagesToTokenCount2, "limitMessagesToTokenCount");
|
|
1218
|
+
var MAX_TOKENS = 128e3;
|
|
1219
|
+
function countToolsTokens2(model, tools) {
|
|
1220
|
+
if (tools.length === 0) {
|
|
1221
|
+
return 0;
|
|
1222
|
+
}
|
|
1223
|
+
const json = JSON.stringify(tools);
|
|
1224
|
+
return countTokens2(model, json);
|
|
1225
|
+
}
|
|
1226
|
+
__name(countToolsTokens2, "countToolsTokens");
|
|
1227
|
+
function countMessageTokens2(model, message) {
|
|
1228
|
+
return countTokens2(model, JSON.stringify(message.content) || "");
|
|
1229
|
+
}
|
|
1230
|
+
__name(countMessageTokens2, "countMessageTokens");
|
|
1231
|
+
function countTokens2(model, text) {
|
|
1232
|
+
return text.length / 3;
|
|
1233
|
+
}
|
|
1234
|
+
__name(countTokens2, "countTokens");
|
|
1235
|
+
function convertActionInputToAnthropicTool(action) {
|
|
1236
|
+
return {
|
|
1237
|
+
name: action.name,
|
|
1238
|
+
description: action.description,
|
|
1239
|
+
input_schema: JSON.parse(action.jsonSchema)
|
|
1240
|
+
};
|
|
1241
|
+
}
|
|
1242
|
+
__name(convertActionInputToAnthropicTool, "convertActionInputToAnthropicTool");
|
|
1243
|
+
function convertMessageToAnthropicMessage(message) {
|
|
1244
|
+
if (message.isTextMessage()) {
|
|
1245
|
+
if (message.role === "system") {
|
|
1246
|
+
return {
|
|
1247
|
+
role: "assistant",
|
|
1248
|
+
content: [
|
|
1249
|
+
{
|
|
1250
|
+
type: "text",
|
|
1251
|
+
text: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
|
|
1252
|
+
}
|
|
1253
|
+
]
|
|
1254
|
+
};
|
|
1255
|
+
} else {
|
|
1256
|
+
return {
|
|
1257
|
+
role: message.role === "user" ? "user" : "assistant",
|
|
1258
|
+
content: [
|
|
1259
|
+
{
|
|
1260
|
+
type: "text",
|
|
1261
|
+
text: message.content
|
|
1262
|
+
}
|
|
1263
|
+
]
|
|
1264
|
+
};
|
|
1265
|
+
}
|
|
1266
|
+
} else if (message.isImageMessage()) {
|
|
1267
|
+
let mediaType;
|
|
1268
|
+
switch (message.format) {
|
|
1269
|
+
case "jpeg":
|
|
1270
|
+
mediaType = "image/jpeg";
|
|
1271
|
+
break;
|
|
1272
|
+
case "png":
|
|
1273
|
+
mediaType = "image/png";
|
|
1274
|
+
break;
|
|
1275
|
+
case "webp":
|
|
1276
|
+
mediaType = "image/webp";
|
|
1277
|
+
break;
|
|
1278
|
+
case "gif":
|
|
1279
|
+
mediaType = "image/gif";
|
|
1280
|
+
break;
|
|
1281
|
+
default:
|
|
1282
|
+
throw new Error(`Unsupported image format: ${message.format}`);
|
|
1283
|
+
}
|
|
1284
|
+
return {
|
|
1285
|
+
role: "user",
|
|
1286
|
+
content: [
|
|
1287
|
+
{
|
|
1288
|
+
type: "image",
|
|
1289
|
+
source: {
|
|
1290
|
+
type: "base64",
|
|
1291
|
+
media_type: mediaType,
|
|
1292
|
+
data: message.bytes
|
|
1293
|
+
}
|
|
1294
|
+
}
|
|
1295
|
+
]
|
|
1296
|
+
};
|
|
1297
|
+
} else if (message.isActionExecutionMessage()) {
|
|
1298
|
+
return {
|
|
1299
|
+
role: "assistant",
|
|
1300
|
+
content: [
|
|
1301
|
+
{
|
|
1302
|
+
id: message.id,
|
|
1303
|
+
type: "tool_use",
|
|
1304
|
+
input: message.arguments,
|
|
1305
|
+
name: message.name
|
|
1306
|
+
}
|
|
1307
|
+
]
|
|
1308
|
+
};
|
|
1309
|
+
} else if (message.isResultMessage()) {
|
|
1310
|
+
return {
|
|
1311
|
+
role: "user",
|
|
1312
|
+
content: [
|
|
1313
|
+
{
|
|
1314
|
+
type: "tool_result",
|
|
1315
|
+
content: message.result || "Action completed successfully",
|
|
1316
|
+
tool_use_id: message.actionExecutionId
|
|
1317
|
+
}
|
|
1318
|
+
]
|
|
1319
|
+
};
|
|
1320
|
+
}
|
|
1321
|
+
}
|
|
1322
|
+
__name(convertMessageToAnthropicMessage, "convertMessageToAnthropicMessage");
|
|
1323
|
+
|
|
1324
|
+
// src/service-adapters/anthropic/anthropic-adapter.ts
|
|
1325
|
+
import { randomId as randomId3, randomUUID as randomUUID5 } from "@copilotkit/shared";
|
|
1326
|
+
var DEFAULT_MODEL3 = "claude-3-5-sonnet-latest";
|
|
1327
|
+
var AnthropicAdapter = class {
|
|
1328
|
+
model = DEFAULT_MODEL3;
|
|
1329
|
+
promptCaching;
|
|
1330
|
+
_anthropic;
|
|
1331
|
+
get anthropic() {
|
|
1332
|
+
return this._anthropic;
|
|
1333
|
+
}
|
|
1334
|
+
constructor(params) {
|
|
1335
|
+
this._anthropic = (params == null ? void 0 : params.anthropic) || new Anthropic({});
|
|
1336
|
+
if (params == null ? void 0 : params.model) {
|
|
1337
|
+
this.model = params.model;
|
|
1338
|
+
}
|
|
1339
|
+
this.promptCaching = (params == null ? void 0 : params.promptCaching) || {
|
|
1340
|
+
enabled: false
|
|
1341
|
+
};
|
|
1342
|
+
}
|
|
1343
|
+
/**
|
|
1344
|
+
* Adds cache control to system prompt
|
|
1345
|
+
*/
|
|
1346
|
+
addSystemPromptCaching(system, debug = false) {
|
|
1347
|
+
if (!this.promptCaching.enabled || !system) {
|
|
1348
|
+
return system;
|
|
1349
|
+
}
|
|
1350
|
+
const originalTextLength = system.length;
|
|
1351
|
+
if (debug) {
|
|
1352
|
+
console.log(`[ANTHROPIC CACHE DEBUG] Added cache control to system prompt (${originalTextLength} chars).`);
|
|
1353
|
+
}
|
|
1354
|
+
return [
|
|
1355
|
+
{
|
|
1356
|
+
type: "text",
|
|
1357
|
+
text: system,
|
|
1358
|
+
cache_control: {
|
|
1359
|
+
type: "ephemeral"
|
|
1360
|
+
}
|
|
1361
|
+
}
|
|
1362
|
+
];
|
|
1363
|
+
}
|
|
1364
|
+
/**
|
|
1365
|
+
* Adds cache control to the final message
|
|
1366
|
+
*/
|
|
1367
|
+
addIncrementalMessageCaching(messages, debug = false) {
|
|
1368
|
+
if (!this.promptCaching.enabled || messages.length === 0) {
|
|
1369
|
+
return messages;
|
|
1370
|
+
}
|
|
1371
|
+
const finalMessage = messages[messages.length - 1];
|
|
1372
|
+
const messageNumber = messages.length;
|
|
1373
|
+
if (Array.isArray(finalMessage.content) && finalMessage.content.length > 0) {
|
|
1374
|
+
const finalBlock = finalMessage.content[finalMessage.content.length - 1];
|
|
1375
|
+
const updatedMessages = [
|
|
1376
|
+
...messages.slice(0, -1),
|
|
1377
|
+
{
|
|
1378
|
+
...finalMessage,
|
|
1379
|
+
content: [
|
|
1380
|
+
...finalMessage.content.slice(0, -1),
|
|
1381
|
+
{
|
|
1382
|
+
...finalBlock,
|
|
1383
|
+
cache_control: {
|
|
1384
|
+
type: "ephemeral"
|
|
1385
|
+
}
|
|
1386
|
+
}
|
|
1387
|
+
]
|
|
1388
|
+
}
|
|
1389
|
+
];
|
|
1390
|
+
if (debug) {
|
|
1391
|
+
console.log(`[ANTHROPIC CACHE DEBUG] Added cache control to final message (message ${messageNumber}).`);
|
|
1392
|
+
}
|
|
1393
|
+
return updatedMessages;
|
|
1394
|
+
}
|
|
1395
|
+
return messages;
|
|
1396
|
+
}
|
|
1397
|
+
shouldGenerateFallbackResponse(messages) {
|
|
1398
|
+
var _a, _b, _c;
|
|
1399
|
+
if (messages.length === 0)
|
|
1400
|
+
return false;
|
|
1401
|
+
const lastMessage = messages[messages.length - 1];
|
|
1402
|
+
const endsWithToolResult = lastMessage.role === "user" && Array.isArray(lastMessage.content) && lastMessage.content.some((content) => content.type === "tool_result");
|
|
1403
|
+
if (messages.length >= 3 && endsWithToolResult) {
|
|
1404
|
+
const lastThree = messages.slice(-3);
|
|
1405
|
+
const hasRecentToolPattern = ((_a = lastThree[0]) == null ? void 0 : _a.role) === "user" && // Initial user message
|
|
1406
|
+
((_b = lastThree[1]) == null ? void 0 : _b.role) === "assistant" && // Assistant tool use
|
|
1407
|
+
Array.isArray(lastThree[1].content) && lastThree[1].content.some((content) => content.type === "tool_use") && ((_c = lastThree[2]) == null ? void 0 : _c.role) === "user" && // Tool result
|
|
1408
|
+
Array.isArray(lastThree[2].content) && lastThree[2].content.some((content) => content.type === "tool_result");
|
|
1409
|
+
return hasRecentToolPattern;
|
|
1410
|
+
}
|
|
1411
|
+
return endsWithToolResult;
|
|
1412
|
+
}
|
|
1413
|
+
async process(request) {
|
|
1414
|
+
const { threadId, model = this.model, messages: rawMessages, actions, eventSource, forwardedParameters } = request;
|
|
1415
|
+
const tools = actions.map(convertActionInputToAnthropicTool);
|
|
1416
|
+
const messages = [
|
|
1417
|
+
...rawMessages
|
|
1418
|
+
];
|
|
1419
|
+
const instructionsMessage = messages.shift();
|
|
1420
|
+
const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
|
|
1421
|
+
const validToolUseIds = /* @__PURE__ */ new Set();
|
|
1422
|
+
for (const message of messages) {
|
|
1423
|
+
if (message.isActionExecutionMessage()) {
|
|
1424
|
+
validToolUseIds.add(message.id);
|
|
1425
|
+
}
|
|
1426
|
+
}
|
|
1427
|
+
const processedToolResultIds = /* @__PURE__ */ new Set();
|
|
1428
|
+
const anthropicMessages = messages.map((message) => {
|
|
1429
|
+
if (message.isResultMessage()) {
|
|
1430
|
+
if (!validToolUseIds.has(message.actionExecutionId)) {
|
|
1431
|
+
return null;
|
|
1432
|
+
}
|
|
1433
|
+
if (processedToolResultIds.has(message.actionExecutionId)) {
|
|
1434
|
+
return null;
|
|
1435
|
+
}
|
|
1436
|
+
processedToolResultIds.add(message.actionExecutionId);
|
|
1437
|
+
return {
|
|
1438
|
+
role: "user",
|
|
1439
|
+
content: [
|
|
1440
|
+
{
|
|
1441
|
+
type: "tool_result",
|
|
1442
|
+
content: message.result || "Action completed successfully",
|
|
1443
|
+
tool_use_id: message.actionExecutionId
|
|
1444
|
+
}
|
|
1445
|
+
]
|
|
1446
|
+
};
|
|
1447
|
+
}
|
|
1448
|
+
return convertMessageToAnthropicMessage(message);
|
|
1449
|
+
}).filter(Boolean).filter((msg) => {
|
|
1450
|
+
if (msg.role === "assistant" && Array.isArray(msg.content)) {
|
|
1451
|
+
const hasEmptyTextOnly = msg.content.length === 1 && msg.content[0].type === "text" && (!msg.content[0].text || msg.content[0].text.trim() === "");
|
|
1452
|
+
return !hasEmptyTextOnly;
|
|
1453
|
+
}
|
|
1454
|
+
return true;
|
|
1455
|
+
});
|
|
1456
|
+
const limitedMessages = limitMessagesToTokenCount2(anthropicMessages, tools, model);
|
|
1457
|
+
const cachedSystemPrompt = this.addSystemPromptCaching(instructions, this.promptCaching.debug);
|
|
1458
|
+
const cachedMessages = this.addIncrementalMessageCaching(limitedMessages, this.promptCaching.debug);
|
|
1459
|
+
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
1460
|
+
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
|
|
1461
|
+
toolChoice = {
|
|
1462
|
+
type: "tool",
|
|
1463
|
+
name: forwardedParameters.toolChoiceFunctionName
|
|
1464
|
+
};
|
|
1465
|
+
}
|
|
1466
|
+
try {
|
|
1467
|
+
const createParams = {
|
|
1468
|
+
system: cachedSystemPrompt,
|
|
1469
|
+
model: this.model,
|
|
1470
|
+
messages: cachedMessages,
|
|
1471
|
+
max_tokens: (forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) || 1024,
|
|
1472
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) ? {
|
|
1473
|
+
temperature: forwardedParameters.temperature
|
|
1474
|
+
} : {},
|
|
1475
|
+
...tools.length > 0 && {
|
|
1476
|
+
tools
|
|
1477
|
+
},
|
|
1478
|
+
...toolChoice && {
|
|
1479
|
+
tool_choice: toolChoice
|
|
1480
|
+
},
|
|
1481
|
+
stream: true
|
|
1482
|
+
};
|
|
1483
|
+
const stream = await this.anthropic.messages.create(createParams);
|
|
1484
|
+
eventSource.stream(async (eventStream$) => {
|
|
1485
|
+
let mode = null;
|
|
1486
|
+
let didOutputText = false;
|
|
1487
|
+
let currentMessageId = randomId3();
|
|
1488
|
+
let currentToolCallId = randomId3();
|
|
1489
|
+
let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
|
|
1490
|
+
let hasReceivedContent = false;
|
|
1491
|
+
try {
|
|
1492
|
+
for await (const chunk of stream) {
|
|
1493
|
+
if (chunk.type === "message_start") {
|
|
1494
|
+
currentMessageId = chunk.message.id;
|
|
1495
|
+
} else if (chunk.type === "content_block_start") {
|
|
1496
|
+
hasReceivedContent = true;
|
|
1497
|
+
if (chunk.content_block.type === "text") {
|
|
1498
|
+
didOutputText = false;
|
|
1499
|
+
filterThinkingTextBuffer.reset();
|
|
1500
|
+
mode = "message";
|
|
1501
|
+
} else if (chunk.content_block.type === "tool_use") {
|
|
1502
|
+
currentToolCallId = chunk.content_block.id;
|
|
1503
|
+
eventStream$.sendActionExecutionStart({
|
|
1504
|
+
actionExecutionId: currentToolCallId,
|
|
1505
|
+
actionName: chunk.content_block.name,
|
|
1506
|
+
parentMessageId: currentMessageId
|
|
1507
|
+
});
|
|
1508
|
+
mode = "function";
|
|
1509
|
+
}
|
|
1510
|
+
} else if (chunk.type === "content_block_delta") {
|
|
1511
|
+
if (chunk.delta.type === "text_delta") {
|
|
1512
|
+
const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
|
|
1513
|
+
if (text.length > 0) {
|
|
1514
|
+
if (!didOutputText) {
|
|
1515
|
+
eventStream$.sendTextMessageStart({
|
|
1516
|
+
messageId: currentMessageId
|
|
1517
|
+
});
|
|
1518
|
+
didOutputText = true;
|
|
1519
|
+
}
|
|
1520
|
+
eventStream$.sendTextMessageContent({
|
|
1521
|
+
messageId: currentMessageId,
|
|
1522
|
+
content: text
|
|
1523
|
+
});
|
|
1524
|
+
}
|
|
1525
|
+
} else if (chunk.delta.type === "input_json_delta") {
|
|
1526
|
+
eventStream$.sendActionExecutionArgs({
|
|
1527
|
+
actionExecutionId: currentToolCallId,
|
|
1528
|
+
args: chunk.delta.partial_json
|
|
1529
|
+
});
|
|
1530
|
+
}
|
|
1531
|
+
} else if (chunk.type === "content_block_stop") {
|
|
1532
|
+
if (mode === "message") {
|
|
1533
|
+
if (didOutputText) {
|
|
1534
|
+
eventStream$.sendTextMessageEnd({
|
|
1535
|
+
messageId: currentMessageId
|
|
1536
|
+
});
|
|
1537
|
+
}
|
|
1538
|
+
} else if (mode === "function") {
|
|
1539
|
+
eventStream$.sendActionExecutionEnd({
|
|
1540
|
+
actionExecutionId: currentToolCallId
|
|
1541
|
+
});
|
|
1542
|
+
}
|
|
1543
|
+
}
|
|
1544
|
+
}
|
|
1545
|
+
} catch (error) {
|
|
1546
|
+
throw convertServiceAdapterError(error, "Anthropic");
|
|
1547
|
+
}
|
|
1548
|
+
if (!hasReceivedContent && this.shouldGenerateFallbackResponse(cachedMessages)) {
|
|
1549
|
+
let fallbackContent = "Task completed successfully.";
|
|
1550
|
+
const lastMessage = cachedMessages[cachedMessages.length - 1];
|
|
1551
|
+
if ((lastMessage == null ? void 0 : lastMessage.role) === "user" && Array.isArray(lastMessage.content)) {
|
|
1552
|
+
const toolResult = lastMessage.content.find((c) => c.type === "tool_result");
|
|
1553
|
+
if ((toolResult == null ? void 0 : toolResult.content) && toolResult.content !== "Action completed successfully") {
|
|
1554
|
+
fallbackContent = toolResult.content;
|
|
1555
|
+
}
|
|
1556
|
+
}
|
|
1557
|
+
currentMessageId = randomId3();
|
|
1558
|
+
eventStream$.sendTextMessageStart({
|
|
1559
|
+
messageId: currentMessageId
|
|
1560
|
+
});
|
|
1561
|
+
eventStream$.sendTextMessageContent({
|
|
1562
|
+
messageId: currentMessageId,
|
|
1563
|
+
content: fallbackContent
|
|
1564
|
+
});
|
|
1565
|
+
eventStream$.sendTextMessageEnd({
|
|
1566
|
+
messageId: currentMessageId
|
|
1567
|
+
});
|
|
1568
|
+
}
|
|
1569
|
+
eventStream$.complete();
|
|
1570
|
+
});
|
|
1571
|
+
} catch (error) {
|
|
1572
|
+
throw convertServiceAdapterError(error, "Anthropic");
|
|
1573
|
+
}
|
|
1574
|
+
return {
|
|
1575
|
+
threadId: threadId || randomUUID5()
|
|
1576
|
+
};
|
|
1577
|
+
}
|
|
1578
|
+
};
|
|
1579
|
+
__name(AnthropicAdapter, "AnthropicAdapter");
|
|
1580
|
+
var THINKING_TAG = "<thinking>";
|
|
1581
|
+
var THINKING_TAG_END = "</thinking>";
|
|
1582
|
+
var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBuffer2 {
|
|
1583
|
+
buffer;
|
|
1584
|
+
didFilterThinkingTag = false;
|
|
1585
|
+
constructor() {
|
|
1586
|
+
this.buffer = "";
|
|
1587
|
+
}
|
|
1588
|
+
onTextChunk(text) {
|
|
1589
|
+
this.buffer += text;
|
|
1590
|
+
if (this.didFilterThinkingTag) {
|
|
1591
|
+
return text;
|
|
1592
|
+
}
|
|
1593
|
+
const potentialTag = this.buffer.slice(0, THINKING_TAG.length);
|
|
1594
|
+
if (THINKING_TAG.startsWith(potentialTag)) {
|
|
1595
|
+
if (this.buffer.includes(THINKING_TAG_END)) {
|
|
1596
|
+
const end = this.buffer.indexOf(THINKING_TAG_END);
|
|
1597
|
+
const filteredText = this.buffer.slice(end + THINKING_TAG_END.length);
|
|
1598
|
+
this.buffer = filteredText;
|
|
1599
|
+
this.didFilterThinkingTag = true;
|
|
1600
|
+
return filteredText;
|
|
1601
|
+
} else {
|
|
1602
|
+
return "";
|
|
1603
|
+
}
|
|
1604
|
+
}
|
|
1605
|
+
return text;
|
|
1606
|
+
}
|
|
1607
|
+
reset() {
|
|
1608
|
+
this.buffer = "";
|
|
1609
|
+
this.didFilterThinkingTag = false;
|
|
1610
|
+
}
|
|
1611
|
+
}, "FilterThinkingTextBuffer");
|
|
1612
|
+
|
|
1613
|
+
// src/service-adapters/experimental/ollama/ollama-adapter.ts
|
|
1614
|
+
import { Ollama } from "@langchain/community/llms/ollama";
|
|
1615
|
+
import { randomId as randomId4, randomUUID as randomUUID6 } from "@copilotkit/shared";
|
|
1616
|
+
var DEFAULT_MODEL4 = "llama3:latest";
|
|
1617
|
+
var ExperimentalOllamaAdapter = class {
|
|
1618
|
+
model;
|
|
1619
|
+
constructor(options) {
|
|
1620
|
+
if (options == null ? void 0 : options.model) {
|
|
1621
|
+
this.model = options.model;
|
|
1622
|
+
} else {
|
|
1623
|
+
this.model = DEFAULT_MODEL4;
|
|
1624
|
+
}
|
|
1625
|
+
}
|
|
1626
|
+
async process(request) {
|
|
1627
|
+
const { messages, actions, eventSource } = request;
|
|
1628
|
+
const ollama = new Ollama({
|
|
1629
|
+
model: this.model
|
|
1630
|
+
});
|
|
1631
|
+
const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
|
|
1632
|
+
const _stream = await ollama.stream(contents);
|
|
1633
|
+
eventSource.stream(async (eventStream$) => {
|
|
1634
|
+
const currentMessageId = randomId4();
|
|
1635
|
+
eventStream$.sendTextMessageStart({
|
|
1636
|
+
messageId: currentMessageId
|
|
1637
|
+
});
|
|
1638
|
+
for await (const chunkText of _stream) {
|
|
1639
|
+
eventStream$.sendTextMessageContent({
|
|
1640
|
+
messageId: currentMessageId,
|
|
1641
|
+
content: chunkText
|
|
1642
|
+
});
|
|
1643
|
+
}
|
|
1644
|
+
eventStream$.sendTextMessageEnd({
|
|
1645
|
+
messageId: currentMessageId
|
|
1646
|
+
});
|
|
1647
|
+
eventStream$.complete();
|
|
1648
|
+
});
|
|
1649
|
+
return {
|
|
1650
|
+
threadId: request.threadId || randomUUID6()
|
|
1651
|
+
};
|
|
1652
|
+
}
|
|
1653
|
+
};
|
|
1654
|
+
__name(ExperimentalOllamaAdapter, "ExperimentalOllamaAdapter");
|
|
1655
|
+
|
|
1656
|
+
// src/service-adapters/bedrock/bedrock-adapter.ts
|
|
1657
|
+
import { ChatBedrockConverse } from "@langchain/aws";
|
|
1658
|
+
var BedrockAdapter = class extends LangChainAdapter {
|
|
1659
|
+
constructor(options) {
|
|
1660
|
+
super({
|
|
1661
|
+
chainFn: async ({ messages, tools, threadId }) => {
|
|
1662
|
+
const model = new ChatBedrockConverse({
|
|
1663
|
+
model: (options == null ? void 0 : options.model) ?? "amazon.nova-lite-v1:0",
|
|
1664
|
+
region: (options == null ? void 0 : options.region) ?? "us-east-1",
|
|
1665
|
+
credentials: (options == null ? void 0 : options.credentials) ? {
|
|
1666
|
+
accessKeyId: options.credentials.accessKeyId,
|
|
1667
|
+
secretAccessKey: options.credentials.secretAccessKey
|
|
1668
|
+
} : void 0
|
|
1669
|
+
}).bindTools(tools);
|
|
1670
|
+
return model.stream(messages);
|
|
1671
|
+
}
|
|
1672
|
+
});
|
|
1673
|
+
}
|
|
1674
|
+
};
|
|
1675
|
+
__name(BedrockAdapter, "BedrockAdapter");
|
|
1676
|
+
|
|
1677
|
+
// src/service-adapters/empty/empty-adapter.ts
|
|
1678
|
+
import { randomUUID as randomUUID7 } from "@copilotkit/shared";
|
|
1679
|
+
var EmptyAdapter = class {
|
|
1680
|
+
async process(request) {
|
|
1681
|
+
return {
|
|
1682
|
+
threadId: request.threadId || randomUUID7()
|
|
1683
|
+
};
|
|
1684
|
+
}
|
|
1685
|
+
};
|
|
1686
|
+
__name(EmptyAdapter, "EmptyAdapter");
|
|
1687
|
+
var ExperimentalEmptyAdapter = EmptyAdapter;
|
|
1688
|
+
|
|
1689
|
+
export {
|
|
1690
|
+
OpenAIAdapter,
|
|
1691
|
+
streamLangChainResponse,
|
|
1692
|
+
LangChainAdapter,
|
|
1693
|
+
GoogleGenerativeAIAdapter,
|
|
1694
|
+
OpenAIAssistantAdapter,
|
|
1695
|
+
UnifyAdapter,
|
|
1696
|
+
GroqAdapter,
|
|
1697
|
+
RemoteChain,
|
|
1698
|
+
AnthropicAdapter,
|
|
1699
|
+
ExperimentalOllamaAdapter,
|
|
1700
|
+
BedrockAdapter,
|
|
1701
|
+
EmptyAdapter,
|
|
1702
|
+
ExperimentalEmptyAdapter
|
|
1703
|
+
};
|
|
1704
|
+
//# sourceMappingURL=chunk-27JKTS6P.mjs.map
|