@copilotkit/runtime 1.50.0-beta.8 → 1.50.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +168 -0
- package/dist/chunk-27JKTS6P.mjs +1704 -0
- package/dist/chunk-27JKTS6P.mjs.map +1 -0
- package/dist/chunk-2GPTVDTO.mjs +25 -0
- package/dist/chunk-2GPTVDTO.mjs.map +1 -0
- package/dist/chunk-2OZAGFV3.mjs +43 -0
- package/dist/chunk-2OZAGFV3.mjs.map +1 -0
- package/dist/chunk-3AJVKDZX.mjs +3097 -0
- package/dist/chunk-3AJVKDZX.mjs.map +1 -0
- package/dist/chunk-45RCC3ZS.mjs +25 -0
- package/dist/chunk-45RCC3ZS.mjs.map +1 -0
- package/dist/chunk-4EHJ4XFJ.mjs +25 -0
- package/dist/chunk-4EHJ4XFJ.mjs.map +1 -0
- package/dist/chunk-4IANB4TC.mjs +25 -0
- package/dist/chunk-4IANB4TC.mjs.map +1 -0
- package/dist/chunk-4KES76K3.mjs +74 -0
- package/dist/chunk-4KES76K3.mjs.map +1 -0
- package/dist/chunk-4OGE3SLW.mjs +3100 -0
- package/dist/chunk-4OGE3SLW.mjs.map +1 -0
- package/dist/chunk-54YJBMCP.mjs +3097 -0
- package/dist/chunk-54YJBMCP.mjs.map +1 -0
- package/dist/chunk-62NE5S6M.mjs +226 -0
- package/dist/chunk-62NE5S6M.mjs.map +1 -0
- package/dist/chunk-6ER4SZYH.mjs +74 -0
- package/dist/chunk-6ER4SZYH.mjs.map +1 -0
- package/dist/chunk-6TNSLHVR.mjs +74 -0
- package/dist/chunk-6TNSLHVR.mjs.map +1 -0
- package/dist/chunk-6XRUR5UK.mjs +1 -0
- package/dist/chunk-6XRUR5UK.mjs.map +1 -0
- package/dist/chunk-7V4BK7TZ.mjs +25 -0
- package/dist/chunk-7V4BK7TZ.mjs.map +1 -0
- package/dist/chunk-7YZIEXD2.mjs +74 -0
- package/dist/chunk-7YZIEXD2.mjs.map +1 -0
- package/dist/chunk-A4XHOAFU.mjs +25 -0
- package/dist/chunk-A4XHOAFU.mjs.map +1 -0
- package/dist/chunk-A555KEAD.mjs +6020 -0
- package/dist/chunk-A555KEAD.mjs.map +1 -0
- package/dist/chunk-AF73TFTX.mjs +74 -0
- package/dist/chunk-AF73TFTX.mjs.map +1 -0
- package/dist/chunk-AMUJQ6IR.mjs +50 -0
- package/dist/chunk-AMUJQ6IR.mjs.map +1 -0
- package/dist/chunk-AQG2SVCA.mjs +25 -0
- package/dist/chunk-AQG2SVCA.mjs.map +1 -0
- package/dist/chunk-BJZHMXND.mjs +74 -0
- package/dist/chunk-BJZHMXND.mjs.map +1 -0
- package/dist/chunk-CB2OJXF6.mjs +25 -0
- package/dist/chunk-CB2OJXF6.mjs.map +1 -0
- package/dist/chunk-CEOMFPJU.mjs +6020 -0
- package/dist/chunk-CEOMFPJU.mjs.map +1 -0
- package/dist/chunk-CZVLR7CC.mjs +175 -0
- package/dist/chunk-CZVLR7CC.mjs.map +1 -0
- package/dist/chunk-DCEEHMLJ.mjs +1127 -0
- package/dist/chunk-DCEEHMLJ.mjs.map +1 -0
- package/dist/chunk-DE3CLKUG.mjs +25 -0
- package/dist/chunk-DE3CLKUG.mjs.map +1 -0
- package/dist/chunk-DTPRUTNV.mjs +25 -0
- package/dist/chunk-DTPRUTNV.mjs.map +1 -0
- package/dist/chunk-ERUOA47O.mjs +626 -0
- package/dist/chunk-ERUOA47O.mjs.map +1 -0
- package/dist/chunk-ESSRC64W.mjs +74 -0
- package/dist/chunk-ESSRC64W.mjs.map +1 -0
- package/dist/chunk-FHD4JECV.mjs +33 -0
- package/dist/chunk-FHD4JECV.mjs.map +1 -0
- package/dist/chunk-GRAN6K6N.mjs +25 -0
- package/dist/chunk-GRAN6K6N.mjs.map +1 -0
- package/dist/chunk-I27F2UPA.mjs +175 -0
- package/dist/chunk-I27F2UPA.mjs.map +1 -0
- package/dist/chunk-IAZKTOQW.mjs +25 -0
- package/dist/chunk-IAZKTOQW.mjs.map +1 -0
- package/dist/chunk-J6XZ5MFB.mjs +25 -0
- package/dist/chunk-J6XZ5MFB.mjs.map +1 -0
- package/dist/chunk-JJ32MA4C.mjs +73 -0
- package/dist/chunk-JJ32MA4C.mjs.map +1 -0
- package/dist/chunk-JJY4ZTHQ.mjs +25 -0
- package/dist/chunk-JJY4ZTHQ.mjs.map +1 -0
- package/dist/chunk-KEYLBFU2.mjs +3117 -0
- package/dist/chunk-KEYLBFU2.mjs.map +1 -0
- package/dist/chunk-KQ53L4WZ.mjs +3094 -0
- package/dist/chunk-KQ53L4WZ.mjs.map +1 -0
- package/dist/chunk-KTELVQ67.mjs +3098 -0
- package/dist/chunk-KTELVQ67.mjs.map +1 -0
- package/dist/chunk-LPEPX6NH.mjs +25 -0
- package/dist/chunk-LPEPX6NH.mjs.map +1 -0
- package/dist/chunk-MDXE55DK.mjs +3117 -0
- package/dist/chunk-MDXE55DK.mjs.map +1 -0
- package/dist/chunk-MMFUVOXH.mjs +73 -0
- package/dist/chunk-MMFUVOXH.mjs.map +1 -0
- package/dist/chunk-N3Y4U66N.mjs +253 -0
- package/dist/chunk-N3Y4U66N.mjs.map +1 -0
- package/dist/chunk-O7UYB4MH.mjs +25 -0
- package/dist/chunk-O7UYB4MH.mjs.map +1 -0
- package/dist/chunk-OFNVQHNM.mjs +3089 -0
- package/dist/chunk-OFNVQHNM.mjs.map +1 -0
- package/dist/chunk-OFSV5GET.mjs +3074 -0
- package/dist/chunk-OFSV5GET.mjs.map +1 -0
- package/dist/chunk-OMRST67R.mjs +25 -0
- package/dist/chunk-OMRST67R.mjs.map +1 -0
- package/dist/chunk-OWIGJONH.mjs +275 -0
- package/dist/chunk-OWIGJONH.mjs.map +1 -0
- package/dist/chunk-PRZHE74A.mjs +25 -0
- package/dist/chunk-PRZHE74A.mjs.map +1 -0
- package/dist/chunk-PTYRVXXP.mjs +80 -0
- package/dist/chunk-PTYRVXXP.mjs.map +1 -0
- package/dist/chunk-R22B5CCO.mjs +25 -0
- package/dist/chunk-R22B5CCO.mjs.map +1 -0
- package/dist/chunk-SHBDMA63.mjs +141 -0
- package/dist/chunk-SHBDMA63.mjs.map +1 -0
- package/dist/chunk-SPVXBPRA.mjs +74 -0
- package/dist/chunk-SPVXBPRA.mjs.map +1 -0
- package/dist/chunk-T72G46ME.mjs +25 -0
- package/dist/chunk-T72G46ME.mjs.map +1 -0
- package/dist/chunk-TGELROPU.mjs +25 -0
- package/dist/chunk-TGELROPU.mjs.map +1 -0
- package/dist/chunk-UNX4IAAD.mjs +25 -0
- package/dist/chunk-UNX4IAAD.mjs.map +1 -0
- package/dist/chunk-V4DHVC7M.mjs +3085 -0
- package/dist/chunk-V4DHVC7M.mjs.map +1 -0
- package/dist/chunk-VVRFOB66.mjs +25 -0
- package/dist/chunk-VVRFOB66.mjs.map +1 -0
- package/dist/chunk-W6NVBYM6.mjs +80 -0
- package/dist/chunk-W6NVBYM6.mjs.map +1 -0
- package/dist/chunk-W7MBACGC.mjs +74 -0
- package/dist/chunk-W7MBACGC.mjs.map +1 -0
- package/dist/chunk-WMD4XZZS.mjs +25 -0
- package/dist/chunk-WMD4XZZS.mjs.map +1 -0
- package/dist/chunk-WX2ZNCRT.mjs +74 -0
- package/dist/chunk-WX2ZNCRT.mjs.map +1 -0
- package/dist/chunk-XWBDEXDA.mjs +153 -0
- package/dist/chunk-XWBDEXDA.mjs.map +1 -0
- package/dist/chunk-Y2Z62E2T.mjs +74 -0
- package/dist/chunk-Y2Z62E2T.mjs.map +1 -0
- package/dist/chunk-YO4I6RVI.mjs +25 -0
- package/dist/chunk-YO4I6RVI.mjs.map +1 -0
- package/dist/chunk-Z6Q5IW6I.mjs +3098 -0
- package/dist/chunk-Z6Q5IW6I.mjs.map +1 -0
- package/dist/chunk-Z726O3G2.mjs +25 -0
- package/dist/chunk-Z726O3G2.mjs.map +1 -0
- package/dist/chunk-ZE4SMZZR.mjs +3097 -0
- package/dist/chunk-ZE4SMZZR.mjs.map +1 -0
- package/dist/chunk-ZULZB33C.mjs +73 -0
- package/dist/chunk-ZULZB33C.mjs.map +1 -0
- package/dist/chunk-ZVRGXMY7.mjs +25 -0
- package/dist/chunk-ZVRGXMY7.mjs.map +1 -0
- package/dist/chunk-ZZ35WBYQ.mjs +25 -0
- package/dist/chunk-ZZ35WBYQ.mjs.map +1 -0
- package/dist/graphql/message-conversion/index.d.ts +18 -0
- package/dist/graphql/message-conversion/index.js +725 -0
- package/dist/graphql/message-conversion/index.js.map +1 -0
- package/dist/graphql/message-conversion/index.mjs +245 -0
- package/dist/graphql/message-conversion/index.mjs.map +1 -0
- package/dist/graphql/types/base/index.d.ts +6 -0
- package/dist/graphql/types/base/index.js +63 -0
- package/dist/graphql/types/base/index.js.map +1 -0
- package/dist/graphql/types/base/index.mjs +8 -0
- package/dist/graphql/types/base/index.mjs.map +1 -0
- package/dist/graphql/types/converted/index.d.ts +2 -0
- package/dist/graphql/types/converted/index.js +200 -0
- package/dist/graphql/types/converted/index.js.map +1 -0
- package/dist/graphql/types/converted/index.mjs +19 -0
- package/dist/graphql/types/converted/index.mjs.map +1 -0
- package/dist/groq-adapter-540da9c3.d.ts +331 -0
- package/dist/groq-adapter-a6f5e9d2.d.ts +331 -0
- package/dist/groq-adapter-c8aec5c5.d.ts +321 -0
- package/dist/index-96b330da.d.ts +119 -0
- package/dist/index-adbd78f1.d.ts +154 -0
- package/dist/index.d.ts +67 -8
- package/dist/index.js +118 -28
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +120 -31
- package/dist/index.mjs.map +1 -1
- package/dist/langserve-0c6100e3.d.ts +257 -0
- package/dist/langserve-978d5790.d.ts +243 -0
- package/dist/langserve-9fc76ce5.d.ts +243 -0
- package/dist/lib/cloud/index.d.ts +6 -0
- package/dist/lib/cloud/index.js +18 -0
- package/dist/lib/cloud/index.js.map +1 -0
- package/dist/lib/cloud/index.mjs +1 -0
- package/dist/lib/cloud/index.mjs.map +1 -0
- package/dist/lib/index.d.ts +212 -0
- package/dist/lib/index.js +7843 -0
- package/dist/lib/index.js.map +1 -0
- package/dist/lib/index.mjs +76 -0
- package/dist/lib/index.mjs.map +1 -0
- package/dist/lib/integrations/index.d.ts +34 -0
- package/dist/lib/integrations/index.js +3052 -0
- package/dist/lib/integrations/index.js.map +1 -0
- package/dist/lib/integrations/index.mjs +37 -0
- package/dist/lib/integrations/index.mjs.map +1 -0
- package/dist/lib/integrations/nest/index.d.ts +15 -0
- package/dist/lib/integrations/nest/index.js +2959 -0
- package/dist/lib/integrations/nest/index.js.map +1 -0
- package/dist/lib/integrations/nest/index.mjs +14 -0
- package/dist/lib/integrations/nest/index.mjs.map +1 -0
- package/dist/lib/integrations/node-express/index.d.ts +15 -0
- package/dist/lib/integrations/node-express/index.js +2959 -0
- package/dist/lib/integrations/node-express/index.js.map +1 -0
- package/dist/lib/integrations/node-express/index.mjs +14 -0
- package/dist/lib/integrations/node-express/index.mjs.map +1 -0
- package/dist/lib/integrations/node-http/index.d.ts +15 -0
- package/dist/lib/integrations/node-http/index.js +2945 -0
- package/dist/lib/integrations/node-http/index.js.map +1 -0
- package/dist/lib/integrations/node-http/index.mjs +13 -0
- package/dist/lib/integrations/node-http/index.mjs.map +1 -0
- package/dist/service-adapters/index.d.ts +162 -0
- package/dist/service-adapters/index.js +1787 -0
- package/dist/service-adapters/index.js.map +1 -0
- package/dist/service-adapters/index.mjs +34 -0
- package/dist/service-adapters/index.mjs.map +1 -0
- package/dist/service-adapters/shared/index.d.ts +9 -0
- package/dist/service-adapters/shared/index.js +72 -0
- package/dist/service-adapters/shared/index.js.map +1 -0
- package/dist/service-adapters/shared/index.mjs +8 -0
- package/dist/service-adapters/shared/index.mjs.map +1 -0
- package/dist/shared-0a7346ce.d.ts +466 -0
- package/dist/shared-35c6eb04.d.ts +448 -0
- package/dist/shared-9ed1dc31.d.ts +414 -0
- package/dist/shared-da5708fe.d.ts +449 -0
- package/dist/utils/index.d.ts +65 -0
- package/dist/utils/index.js +175 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/index.mjs +12 -0
- package/dist/utils/index.mjs.map +1 -0
- package/package.json +14 -20
- package/src/lib/index.ts +7 -6
- package/src/lib/runtime/copilot-runtime.ts +62 -26
- package/src/lib/runtime/telemetry-agent-runner.ts +139 -0
|
@@ -0,0 +1,1787 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/service-adapters/index.ts
|
|
31
|
+
var service_adapters_exports = {};
|
|
32
|
+
__export(service_adapters_exports, {
|
|
33
|
+
AnthropicAdapter: () => AnthropicAdapter,
|
|
34
|
+
BedrockAdapter: () => BedrockAdapter,
|
|
35
|
+
EmptyAdapter: () => EmptyAdapter,
|
|
36
|
+
ExperimentalEmptyAdapter: () => ExperimentalEmptyAdapter,
|
|
37
|
+
ExperimentalOllamaAdapter: () => ExperimentalOllamaAdapter,
|
|
38
|
+
GoogleGenerativeAIAdapter: () => GoogleGenerativeAIAdapter,
|
|
39
|
+
GroqAdapter: () => GroqAdapter,
|
|
40
|
+
LangChainAdapter: () => LangChainAdapter,
|
|
41
|
+
OpenAIAdapter: () => OpenAIAdapter,
|
|
42
|
+
OpenAIAssistantAdapter: () => OpenAIAssistantAdapter,
|
|
43
|
+
RemoteChain: () => RemoteChain,
|
|
44
|
+
UnifyAdapter: () => UnifyAdapter,
|
|
45
|
+
convertServiceAdapterError: () => convertServiceAdapterError
|
|
46
|
+
});
|
|
47
|
+
module.exports = __toCommonJS(service_adapters_exports);
|
|
48
|
+
|
|
49
|
+
// src/service-adapters/langchain/langserve.ts
|
|
50
|
+
var import_remote = require("langchain/runnables/remote");
|
|
51
|
+
var RemoteChain = class {
|
|
52
|
+
name;
|
|
53
|
+
description;
|
|
54
|
+
chainUrl;
|
|
55
|
+
parameters;
|
|
56
|
+
parameterType;
|
|
57
|
+
constructor(options) {
|
|
58
|
+
this.name = options.name;
|
|
59
|
+
this.description = options.description;
|
|
60
|
+
this.chainUrl = options.chainUrl;
|
|
61
|
+
this.parameters = options.parameters;
|
|
62
|
+
this.parameterType = options.parameterType || "multi";
|
|
63
|
+
}
|
|
64
|
+
async toAction() {
|
|
65
|
+
if (!this.parameters) {
|
|
66
|
+
await this.inferLangServeParameters();
|
|
67
|
+
}
|
|
68
|
+
return {
|
|
69
|
+
name: this.name,
|
|
70
|
+
description: this.description,
|
|
71
|
+
parameters: this.parameters,
|
|
72
|
+
handler: async (args) => {
|
|
73
|
+
const runnable = new import_remote.RemoteRunnable({
|
|
74
|
+
url: this.chainUrl
|
|
75
|
+
});
|
|
76
|
+
let input;
|
|
77
|
+
if (this.parameterType === "single") {
|
|
78
|
+
input = args[Object.keys(args)[0]];
|
|
79
|
+
} else {
|
|
80
|
+
input = args;
|
|
81
|
+
}
|
|
82
|
+
return await runnable.invoke(input);
|
|
83
|
+
}
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
async inferLangServeParameters() {
|
|
87
|
+
const supportedTypes = [
|
|
88
|
+
"string",
|
|
89
|
+
"number",
|
|
90
|
+
"boolean"
|
|
91
|
+
];
|
|
92
|
+
let schemaUrl = this.chainUrl.replace(/\/+$/, "") + "/input_schema";
|
|
93
|
+
let schema = await fetch(schemaUrl).then((res) => res.json()).catch(() => {
|
|
94
|
+
throw new Error("Failed to fetch langserve schema at " + schemaUrl);
|
|
95
|
+
});
|
|
96
|
+
if (supportedTypes.includes(schema.type)) {
|
|
97
|
+
this.parameterType = "single";
|
|
98
|
+
this.parameters = [
|
|
99
|
+
{
|
|
100
|
+
name: "input",
|
|
101
|
+
type: schema.type,
|
|
102
|
+
description: "The input to the chain"
|
|
103
|
+
}
|
|
104
|
+
];
|
|
105
|
+
} else if (schema.type === "object") {
|
|
106
|
+
this.parameterType = "multi";
|
|
107
|
+
this.parameters = Object.keys(schema.properties).map((key) => {
|
|
108
|
+
var _a;
|
|
109
|
+
let property = schema.properties[key];
|
|
110
|
+
if (!supportedTypes.includes(property.type)) {
|
|
111
|
+
throw new Error("Unsupported schema type");
|
|
112
|
+
}
|
|
113
|
+
return {
|
|
114
|
+
name: key,
|
|
115
|
+
type: property.type,
|
|
116
|
+
description: property.description || "",
|
|
117
|
+
required: ((_a = schema.required) == null ? void 0 : _a.includes(key)) || false
|
|
118
|
+
};
|
|
119
|
+
});
|
|
120
|
+
} else {
|
|
121
|
+
throw new Error("Unsupported schema type");
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
};
|
|
125
|
+
__name(RemoteChain, "RemoteChain");
|
|
126
|
+
|
|
127
|
+
// src/service-adapters/shared/error-utils.ts
|
|
128
|
+
var import_shared = require("@copilotkit/shared");
|
|
129
|
+
function convertServiceAdapterError(error, adapterName) {
|
|
130
|
+
var _a, _b, _c;
|
|
131
|
+
const errorName = ((_a = error == null ? void 0 : error.constructor) == null ? void 0 : _a.name) || error.name;
|
|
132
|
+
const errorMessage = (error == null ? void 0 : error.message) || String(error);
|
|
133
|
+
const statusCode = error.status || error.statusCode || ((_b = error.response) == null ? void 0 : _b.status);
|
|
134
|
+
const responseData = error.error || ((_c = error.response) == null ? void 0 : _c.data) || error.data;
|
|
135
|
+
const structuredError = new import_shared.CopilotKitLowLevelError({
|
|
136
|
+
error: error instanceof Error ? error : new Error(errorMessage),
|
|
137
|
+
url: `${adapterName} service adapter`,
|
|
138
|
+
message: `${adapterName} API error: ${errorMessage}`
|
|
139
|
+
});
|
|
140
|
+
if (statusCode) {
|
|
141
|
+
structuredError.statusCode = statusCode;
|
|
142
|
+
}
|
|
143
|
+
if (responseData) {
|
|
144
|
+
structuredError.responseData = responseData;
|
|
145
|
+
}
|
|
146
|
+
if (errorName) {
|
|
147
|
+
structuredError.originalErrorType = errorName;
|
|
148
|
+
}
|
|
149
|
+
let newCode;
|
|
150
|
+
if (statusCode === 401) {
|
|
151
|
+
newCode = import_shared.CopilotKitErrorCode.AUTHENTICATION_ERROR;
|
|
152
|
+
} else if (statusCode >= 400 && statusCode < 500) {
|
|
153
|
+
newCode = import_shared.CopilotKitErrorCode.CONFIGURATION_ERROR;
|
|
154
|
+
} else if (statusCode >= 500) {
|
|
155
|
+
newCode = import_shared.CopilotKitErrorCode.NETWORK_ERROR;
|
|
156
|
+
} else if (statusCode) {
|
|
157
|
+
newCode = import_shared.CopilotKitErrorCode.CONFIGURATION_ERROR;
|
|
158
|
+
} else {
|
|
159
|
+
newCode = import_shared.CopilotKitErrorCode.NETWORK_ERROR;
|
|
160
|
+
}
|
|
161
|
+
structuredError.code = newCode;
|
|
162
|
+
if (structuredError.extensions) {
|
|
163
|
+
structuredError.extensions.code = newCode;
|
|
164
|
+
}
|
|
165
|
+
return structuredError;
|
|
166
|
+
}
|
|
167
|
+
__name(convertServiceAdapterError, "convertServiceAdapterError");
|
|
168
|
+
|
|
169
|
+
// src/service-adapters/openai/openai-adapter.ts
|
|
170
|
+
var import_openai = __toESM(require("openai"));
|
|
171
|
+
|
|
172
|
+
// src/service-adapters/openai/utils.ts
|
|
173
|
+
var import_shared2 = require("@copilotkit/shared");
|
|
174
|
+
function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
|
|
175
|
+
maxTokens || (maxTokens = maxTokensForOpenAIModel(model));
|
|
176
|
+
const result = [];
|
|
177
|
+
const toolsNumTokens = countToolsTokens(model, tools);
|
|
178
|
+
if (toolsNumTokens > maxTokens) {
|
|
179
|
+
throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
|
|
180
|
+
}
|
|
181
|
+
maxTokens -= toolsNumTokens;
|
|
182
|
+
for (const message of messages) {
|
|
183
|
+
if ([
|
|
184
|
+
"system",
|
|
185
|
+
"developer"
|
|
186
|
+
].includes(message.role)) {
|
|
187
|
+
const numTokens = countMessageTokens(model, message);
|
|
188
|
+
maxTokens -= numTokens;
|
|
189
|
+
if (maxTokens < 0) {
|
|
190
|
+
throw new Error("Not enough tokens for system message.");
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
let cutoff = false;
|
|
195
|
+
const reversedMessages = [
|
|
196
|
+
...messages
|
|
197
|
+
].reverse();
|
|
198
|
+
for (const message of reversedMessages) {
|
|
199
|
+
if ([
|
|
200
|
+
"system",
|
|
201
|
+
"developer"
|
|
202
|
+
].includes(message.role)) {
|
|
203
|
+
result.unshift(message);
|
|
204
|
+
continue;
|
|
205
|
+
} else if (cutoff) {
|
|
206
|
+
continue;
|
|
207
|
+
}
|
|
208
|
+
let numTokens = countMessageTokens(model, message);
|
|
209
|
+
if (maxTokens < numTokens) {
|
|
210
|
+
cutoff = true;
|
|
211
|
+
continue;
|
|
212
|
+
}
|
|
213
|
+
result.unshift(message);
|
|
214
|
+
maxTokens -= numTokens;
|
|
215
|
+
}
|
|
216
|
+
return result;
|
|
217
|
+
}
|
|
218
|
+
__name(limitMessagesToTokenCount, "limitMessagesToTokenCount");
|
|
219
|
+
function maxTokensForOpenAIModel(model) {
|
|
220
|
+
return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;
|
|
221
|
+
}
|
|
222
|
+
__name(maxTokensForOpenAIModel, "maxTokensForOpenAIModel");
|
|
223
|
+
var DEFAULT_MAX_TOKENS = 128e3;
|
|
224
|
+
var maxTokensByModel = {
|
|
225
|
+
// o1
|
|
226
|
+
o1: 2e5,
|
|
227
|
+
"o1-2024-12-17": 2e5,
|
|
228
|
+
"o1-mini": 128e3,
|
|
229
|
+
"o1-mini-2024-09-12": 128e3,
|
|
230
|
+
"o1-preview": 128e3,
|
|
231
|
+
"o1-preview-2024-09-12": 128e3,
|
|
232
|
+
// o3-mini
|
|
233
|
+
"o3-mini": 2e5,
|
|
234
|
+
"o3-mini-2025-01-31": 2e5,
|
|
235
|
+
// GPT-4
|
|
236
|
+
"gpt-4o": 128e3,
|
|
237
|
+
"chatgpt-4o-latest": 128e3,
|
|
238
|
+
"gpt-4o-2024-08-06": 128e3,
|
|
239
|
+
"gpt-4o-2024-05-13": 128e3,
|
|
240
|
+
"gpt-4o-mini": 128e3,
|
|
241
|
+
"gpt-4o-mini-2024-07-18": 128e3,
|
|
242
|
+
"gpt-4-turbo": 128e3,
|
|
243
|
+
"gpt-4-turbo-2024-04-09": 128e3,
|
|
244
|
+
"gpt-4-0125-preview": 128e3,
|
|
245
|
+
"gpt-4-turbo-preview": 128e3,
|
|
246
|
+
"gpt-4-1106-preview": 128e3,
|
|
247
|
+
"gpt-4-vision-preview": 128e3,
|
|
248
|
+
"gpt-4-1106-vision-preview": 128e3,
|
|
249
|
+
"gpt-4-32k": 32768,
|
|
250
|
+
"gpt-4-32k-0613": 32768,
|
|
251
|
+
"gpt-4-32k-0314": 32768,
|
|
252
|
+
"gpt-4": 8192,
|
|
253
|
+
"gpt-4-0613": 8192,
|
|
254
|
+
"gpt-4-0314": 8192,
|
|
255
|
+
// GPT-3.5
|
|
256
|
+
"gpt-3.5-turbo-0125": 16385,
|
|
257
|
+
"gpt-3.5-turbo": 16385,
|
|
258
|
+
"gpt-3.5-turbo-1106": 16385,
|
|
259
|
+
"gpt-3.5-turbo-instruct": 4096,
|
|
260
|
+
"gpt-3.5-turbo-16k": 16385,
|
|
261
|
+
"gpt-3.5-turbo-0613": 4096,
|
|
262
|
+
"gpt-3.5-turbo-16k-0613": 16385,
|
|
263
|
+
"gpt-3.5-turbo-0301": 4097
|
|
264
|
+
};
|
|
265
|
+
function countToolsTokens(model, tools) {
|
|
266
|
+
if (tools.length === 0) {
|
|
267
|
+
return 0;
|
|
268
|
+
}
|
|
269
|
+
const json = JSON.stringify(tools);
|
|
270
|
+
return countTokens(model, json);
|
|
271
|
+
}
|
|
272
|
+
__name(countToolsTokens, "countToolsTokens");
|
|
273
|
+
function countMessageTokens(model, message) {
|
|
274
|
+
return countTokens(model, message.content || "");
|
|
275
|
+
}
|
|
276
|
+
__name(countMessageTokens, "countMessageTokens");
|
|
277
|
+
function countTokens(model, text) {
|
|
278
|
+
return text.length / 3;
|
|
279
|
+
}
|
|
280
|
+
__name(countTokens, "countTokens");
|
|
281
|
+
function convertActionInputToOpenAITool(action) {
|
|
282
|
+
return {
|
|
283
|
+
type: "function",
|
|
284
|
+
function: {
|
|
285
|
+
name: action.name,
|
|
286
|
+
description: action.description,
|
|
287
|
+
parameters: (0, import_shared2.parseJson)(action.jsonSchema, {})
|
|
288
|
+
}
|
|
289
|
+
};
|
|
290
|
+
}
|
|
291
|
+
__name(convertActionInputToOpenAITool, "convertActionInputToOpenAITool");
|
|
292
|
+
function convertMessageToOpenAIMessage(message, options) {
|
|
293
|
+
const { keepSystemRole } = options || {
|
|
294
|
+
keepSystemRole: false
|
|
295
|
+
};
|
|
296
|
+
if (message.isTextMessage()) {
|
|
297
|
+
let role = message.role;
|
|
298
|
+
if (message.role === "system" && !keepSystemRole) {
|
|
299
|
+
role = "developer";
|
|
300
|
+
}
|
|
301
|
+
return {
|
|
302
|
+
role,
|
|
303
|
+
content: message.content
|
|
304
|
+
};
|
|
305
|
+
} else if (message.isImageMessage()) {
|
|
306
|
+
return {
|
|
307
|
+
role: "user",
|
|
308
|
+
content: [
|
|
309
|
+
{
|
|
310
|
+
type: "image_url",
|
|
311
|
+
image_url: {
|
|
312
|
+
url: `data:image/${message.format};base64,${message.bytes}`
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
]
|
|
316
|
+
};
|
|
317
|
+
} else if (message.isActionExecutionMessage()) {
|
|
318
|
+
return {
|
|
319
|
+
role: "assistant",
|
|
320
|
+
tool_calls: [
|
|
321
|
+
{
|
|
322
|
+
id: message.id,
|
|
323
|
+
type: "function",
|
|
324
|
+
function: {
|
|
325
|
+
name: message.name,
|
|
326
|
+
arguments: JSON.stringify(message.arguments)
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
]
|
|
330
|
+
};
|
|
331
|
+
} else if (message.isResultMessage()) {
|
|
332
|
+
return {
|
|
333
|
+
role: "tool",
|
|
334
|
+
content: message.result,
|
|
335
|
+
tool_call_id: message.actionExecutionId
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
__name(convertMessageToOpenAIMessage, "convertMessageToOpenAIMessage");
|
|
340
|
+
function convertSystemMessageToAssistantAPI(message) {
|
|
341
|
+
return {
|
|
342
|
+
...message,
|
|
343
|
+
...[
|
|
344
|
+
"system",
|
|
345
|
+
"developer"
|
|
346
|
+
].includes(message.role) && {
|
|
347
|
+
role: "assistant",
|
|
348
|
+
content: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
|
|
349
|
+
}
|
|
350
|
+
};
|
|
351
|
+
}
|
|
352
|
+
__name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
|
|
353
|
+
|
|
354
|
+
// src/service-adapters/openai/openai-adapter.ts
|
|
355
|
+
var import_shared3 = require("@copilotkit/shared");
|
|
356
|
+
var DEFAULT_MODEL = "gpt-4o";
|
|
357
|
+
var OpenAIAdapter = class {
|
|
358
|
+
model = DEFAULT_MODEL;
|
|
359
|
+
disableParallelToolCalls = false;
|
|
360
|
+
_openai;
|
|
361
|
+
keepSystemRole = false;
|
|
362
|
+
get openai() {
|
|
363
|
+
return this._openai;
|
|
364
|
+
}
|
|
365
|
+
constructor(params) {
|
|
366
|
+
this._openai = (params == null ? void 0 : params.openai) || new import_openai.default({});
|
|
367
|
+
if (params == null ? void 0 : params.model) {
|
|
368
|
+
this.model = params.model;
|
|
369
|
+
}
|
|
370
|
+
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
371
|
+
this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
|
|
372
|
+
}
|
|
373
|
+
async process(request) {
|
|
374
|
+
const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
|
|
375
|
+
const tools = actions.map(convertActionInputToOpenAITool);
|
|
376
|
+
const threadId = threadIdFromRequest ?? (0, import_shared3.randomUUID)();
|
|
377
|
+
const validToolUseIds = /* @__PURE__ */ new Set();
|
|
378
|
+
for (const message of messages) {
|
|
379
|
+
if (message.isActionExecutionMessage()) {
|
|
380
|
+
validToolUseIds.add(message.id);
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
const filteredMessages = messages.filter((message) => {
|
|
384
|
+
if (message.isResultMessage()) {
|
|
385
|
+
if (!validToolUseIds.has(message.actionExecutionId)) {
|
|
386
|
+
return false;
|
|
387
|
+
}
|
|
388
|
+
validToolUseIds.delete(message.actionExecutionId);
|
|
389
|
+
return true;
|
|
390
|
+
}
|
|
391
|
+
return true;
|
|
392
|
+
});
|
|
393
|
+
let openaiMessages = filteredMessages.map((m) => convertMessageToOpenAIMessage(m, {
|
|
394
|
+
keepSystemRole: this.keepSystemRole
|
|
395
|
+
}));
|
|
396
|
+
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
397
|
+
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
398
|
+
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
|
|
399
|
+
toolChoice = {
|
|
400
|
+
type: "function",
|
|
401
|
+
function: {
|
|
402
|
+
name: forwardedParameters.toolChoiceFunctionName
|
|
403
|
+
}
|
|
404
|
+
};
|
|
405
|
+
}
|
|
406
|
+
try {
|
|
407
|
+
const stream = this.openai.beta.chat.completions.stream({
|
|
408
|
+
model,
|
|
409
|
+
stream: true,
|
|
410
|
+
messages: openaiMessages,
|
|
411
|
+
...tools.length > 0 && {
|
|
412
|
+
tools
|
|
413
|
+
},
|
|
414
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
|
|
415
|
+
max_completion_tokens: forwardedParameters.maxTokens
|
|
416
|
+
},
|
|
417
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
|
|
418
|
+
stop: forwardedParameters.stop
|
|
419
|
+
},
|
|
420
|
+
...toolChoice && {
|
|
421
|
+
tool_choice: toolChoice
|
|
422
|
+
},
|
|
423
|
+
...this.disableParallelToolCalls && {
|
|
424
|
+
parallel_tool_calls: false
|
|
425
|
+
},
|
|
426
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
|
|
427
|
+
temperature: forwardedParameters.temperature
|
|
428
|
+
}
|
|
429
|
+
});
|
|
430
|
+
eventSource.stream(async (eventStream$) => {
|
|
431
|
+
var _a, _b;
|
|
432
|
+
let mode = null;
|
|
433
|
+
let currentMessageId;
|
|
434
|
+
let currentToolCallId;
|
|
435
|
+
try {
|
|
436
|
+
for await (const chunk of stream) {
|
|
437
|
+
if (chunk.choices.length === 0) {
|
|
438
|
+
continue;
|
|
439
|
+
}
|
|
440
|
+
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
|
|
441
|
+
const content = chunk.choices[0].delta.content;
|
|
442
|
+
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
443
|
+
mode = null;
|
|
444
|
+
eventStream$.sendTextMessageEnd({
|
|
445
|
+
messageId: currentMessageId
|
|
446
|
+
});
|
|
447
|
+
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
448
|
+
mode = null;
|
|
449
|
+
eventStream$.sendActionExecutionEnd({
|
|
450
|
+
actionExecutionId: currentToolCallId
|
|
451
|
+
});
|
|
452
|
+
}
|
|
453
|
+
if (mode === null) {
|
|
454
|
+
if (toolCall == null ? void 0 : toolCall.id) {
|
|
455
|
+
mode = "function";
|
|
456
|
+
currentToolCallId = toolCall.id;
|
|
457
|
+
eventStream$.sendActionExecutionStart({
|
|
458
|
+
actionExecutionId: currentToolCallId,
|
|
459
|
+
parentMessageId: chunk.id,
|
|
460
|
+
actionName: toolCall.function.name
|
|
461
|
+
});
|
|
462
|
+
} else if (content) {
|
|
463
|
+
mode = "message";
|
|
464
|
+
currentMessageId = chunk.id;
|
|
465
|
+
eventStream$.sendTextMessageStart({
|
|
466
|
+
messageId: currentMessageId
|
|
467
|
+
});
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
if (mode === "message" && content) {
|
|
471
|
+
eventStream$.sendTextMessageContent({
|
|
472
|
+
messageId: currentMessageId,
|
|
473
|
+
content
|
|
474
|
+
});
|
|
475
|
+
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
476
|
+
eventStream$.sendActionExecutionArgs({
|
|
477
|
+
actionExecutionId: currentToolCallId,
|
|
478
|
+
args: toolCall.function.arguments
|
|
479
|
+
});
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
if (mode === "message") {
|
|
483
|
+
eventStream$.sendTextMessageEnd({
|
|
484
|
+
messageId: currentMessageId
|
|
485
|
+
});
|
|
486
|
+
} else if (mode === "function") {
|
|
487
|
+
eventStream$.sendActionExecutionEnd({
|
|
488
|
+
actionExecutionId: currentToolCallId
|
|
489
|
+
});
|
|
490
|
+
}
|
|
491
|
+
} catch (error) {
|
|
492
|
+
console.error("[OpenAI] Error during API call:", error);
|
|
493
|
+
throw convertServiceAdapterError(error, "OpenAI");
|
|
494
|
+
}
|
|
495
|
+
eventStream$.complete();
|
|
496
|
+
});
|
|
497
|
+
} catch (error) {
|
|
498
|
+
console.error("[OpenAI] Error during API call:", error);
|
|
499
|
+
throw convertServiceAdapterError(error, "OpenAI");
|
|
500
|
+
}
|
|
501
|
+
return {
|
|
502
|
+
threadId
|
|
503
|
+
};
|
|
504
|
+
}
|
|
505
|
+
};
|
|
506
|
+
__name(OpenAIAdapter, "OpenAIAdapter");
|
|
507
|
+
|
|
508
|
+
// src/service-adapters/langchain/utils.ts
|
|
509
|
+
var import_messages = require("@langchain/core/messages");
|
|
510
|
+
var import_tools = require("@langchain/core/tools");
|
|
511
|
+
var import_shared5 = require("@copilotkit/shared");
|
|
512
|
+
function convertMessageToLangChainMessage(message) {
|
|
513
|
+
if (message.isTextMessage()) {
|
|
514
|
+
if (message.role == "user") {
|
|
515
|
+
return new import_messages.HumanMessage(message.content);
|
|
516
|
+
} else if (message.role == "assistant") {
|
|
517
|
+
return new import_messages.AIMessage(message.content);
|
|
518
|
+
} else if (message.role === "system") {
|
|
519
|
+
return new import_messages.SystemMessage(message.content);
|
|
520
|
+
}
|
|
521
|
+
} else if (message.isActionExecutionMessage()) {
|
|
522
|
+
return new import_messages.AIMessage({
|
|
523
|
+
content: "",
|
|
524
|
+
tool_calls: [
|
|
525
|
+
{
|
|
526
|
+
id: message.id,
|
|
527
|
+
args: message.arguments,
|
|
528
|
+
name: message.name
|
|
529
|
+
}
|
|
530
|
+
]
|
|
531
|
+
});
|
|
532
|
+
} else if (message.isResultMessage()) {
|
|
533
|
+
return new import_messages.ToolMessage({
|
|
534
|
+
content: message.result,
|
|
535
|
+
tool_call_id: message.actionExecutionId
|
|
536
|
+
});
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
__name(convertMessageToLangChainMessage, "convertMessageToLangChainMessage");
|
|
540
|
+
function convertActionInputToLangChainTool(actionInput) {
|
|
541
|
+
return new import_tools.DynamicStructuredTool({
|
|
542
|
+
...actionInput,
|
|
543
|
+
name: actionInput.name,
|
|
544
|
+
description: actionInput.description,
|
|
545
|
+
schema: (0, import_shared5.convertJsonSchemaToZodSchema)(JSON.parse(actionInput.jsonSchema), true),
|
|
546
|
+
func: async () => {
|
|
547
|
+
return "";
|
|
548
|
+
}
|
|
549
|
+
});
|
|
550
|
+
}
|
|
551
|
+
__name(convertActionInputToLangChainTool, "convertActionInputToLangChainTool");
|
|
552
|
+
function isAIMessage(message) {
|
|
553
|
+
return Object.prototype.toString.call(message) === "[object AIMessage]";
|
|
554
|
+
}
|
|
555
|
+
__name(isAIMessage, "isAIMessage");
|
|
556
|
+
function isAIMessageChunk(message) {
|
|
557
|
+
return Object.prototype.toString.call(message) === "[object AIMessageChunk]";
|
|
558
|
+
}
|
|
559
|
+
__name(isAIMessageChunk, "isAIMessageChunk");
|
|
560
|
+
function isBaseMessageChunk(message) {
|
|
561
|
+
return Object.prototype.toString.call(message) === "[object BaseMessageChunk]";
|
|
562
|
+
}
|
|
563
|
+
__name(isBaseMessageChunk, "isBaseMessageChunk");
|
|
564
|
+
function maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution) {
|
|
565
|
+
if (actionExecution) {
|
|
566
|
+
eventStream$.sendActionExecutionResult({
|
|
567
|
+
actionExecutionId: actionExecution.id,
|
|
568
|
+
actionName: actionExecution.name,
|
|
569
|
+
result: "Sending a message"
|
|
570
|
+
});
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
__name(maybeSendActionExecutionResultIsMessage, "maybeSendActionExecutionResultIsMessage");
|
|
574
|
+
async function streamLangChainResponse({ result, eventStream$, actionExecution }) {
|
|
575
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
576
|
+
if (typeof result === "string") {
|
|
577
|
+
if (!actionExecution || (actionExecution == null ? void 0 : actionExecution.returnDirect)) {
|
|
578
|
+
eventStream$.sendActionExecutionResult({
|
|
579
|
+
actionExecutionId: actionExecution.id,
|
|
580
|
+
actionName: actionExecution.name,
|
|
581
|
+
result
|
|
582
|
+
});
|
|
583
|
+
eventStream$.sendTextMessage((0, import_shared5.randomId)(), result);
|
|
584
|
+
} else {
|
|
585
|
+
eventStream$.sendActionExecutionResult({
|
|
586
|
+
actionExecutionId: actionExecution.id,
|
|
587
|
+
actionName: actionExecution.name,
|
|
588
|
+
result
|
|
589
|
+
});
|
|
590
|
+
}
|
|
591
|
+
} else if (isAIMessage(result)) {
|
|
592
|
+
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
593
|
+
if (result.content) {
|
|
594
|
+
eventStream$.sendTextMessage((0, import_shared5.randomId)(), result.content);
|
|
595
|
+
}
|
|
596
|
+
for (const toolCall of result.tool_calls) {
|
|
597
|
+
eventStream$.sendActionExecution({
|
|
598
|
+
actionExecutionId: toolCall.id || (0, import_shared5.randomId)(),
|
|
599
|
+
actionName: toolCall.name,
|
|
600
|
+
args: JSON.stringify(toolCall.args)
|
|
601
|
+
});
|
|
602
|
+
}
|
|
603
|
+
} else if (isBaseMessageChunk(result)) {
|
|
604
|
+
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
605
|
+
if ((_a = result.lc_kwargs) == null ? void 0 : _a.content) {
|
|
606
|
+
eventStream$.sendTextMessage((0, import_shared5.randomId)(), result.content);
|
|
607
|
+
}
|
|
608
|
+
if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
|
|
609
|
+
for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
|
|
610
|
+
eventStream$.sendActionExecution({
|
|
611
|
+
actionExecutionId: toolCall.id || (0, import_shared5.randomId)(),
|
|
612
|
+
actionName: toolCall.name,
|
|
613
|
+
args: JSON.stringify(toolCall.args)
|
|
614
|
+
});
|
|
615
|
+
}
|
|
616
|
+
}
|
|
617
|
+
} else if (result && "getReader" in result) {
|
|
618
|
+
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
619
|
+
let reader = result.getReader();
|
|
620
|
+
let mode = null;
|
|
621
|
+
let currentMessageId;
|
|
622
|
+
const toolCallDetails = {
|
|
623
|
+
name: null,
|
|
624
|
+
id: null,
|
|
625
|
+
index: null,
|
|
626
|
+
prevIndex: null
|
|
627
|
+
};
|
|
628
|
+
while (true) {
|
|
629
|
+
try {
|
|
630
|
+
const { done, value } = await reader.read();
|
|
631
|
+
let toolCallName = void 0;
|
|
632
|
+
let toolCallId = void 0;
|
|
633
|
+
let toolCallArgs = void 0;
|
|
634
|
+
let hasToolCall = false;
|
|
635
|
+
let content = "";
|
|
636
|
+
if (value && value.content) {
|
|
637
|
+
content = Array.isArray(value.content) ? ((_d = value.content[0]) == null ? void 0 : _d.text) ?? "" : value.content;
|
|
638
|
+
}
|
|
639
|
+
if (isAIMessageChunk(value)) {
|
|
640
|
+
let chunk = (_e = value.tool_call_chunks) == null ? void 0 : _e[0];
|
|
641
|
+
toolCallArgs = chunk == null ? void 0 : chunk.args;
|
|
642
|
+
hasToolCall = chunk != void 0;
|
|
643
|
+
if (chunk == null ? void 0 : chunk.name)
|
|
644
|
+
toolCallDetails.name = chunk.name;
|
|
645
|
+
if ((chunk == null ? void 0 : chunk.index) != null) {
|
|
646
|
+
toolCallDetails.index = chunk.index;
|
|
647
|
+
if (toolCallDetails.prevIndex == null)
|
|
648
|
+
toolCallDetails.prevIndex = chunk.index;
|
|
649
|
+
}
|
|
650
|
+
if (chunk == null ? void 0 : chunk.id)
|
|
651
|
+
toolCallDetails.id = chunk.index != null ? `${chunk.id}-idx-${chunk.index}` : chunk.id;
|
|
652
|
+
toolCallName = toolCallDetails.name;
|
|
653
|
+
toolCallId = toolCallDetails.id;
|
|
654
|
+
} else if (isBaseMessageChunk(value)) {
|
|
655
|
+
let chunk = (_g = (_f = value.additional_kwargs) == null ? void 0 : _f.tool_calls) == null ? void 0 : _g[0];
|
|
656
|
+
toolCallName = (_h = chunk == null ? void 0 : chunk.function) == null ? void 0 : _h.name;
|
|
657
|
+
toolCallId = chunk == null ? void 0 : chunk.id;
|
|
658
|
+
toolCallArgs = (_i = chunk == null ? void 0 : chunk.function) == null ? void 0 : _i.arguments;
|
|
659
|
+
hasToolCall = (chunk == null ? void 0 : chunk.function) != void 0;
|
|
660
|
+
}
|
|
661
|
+
if (mode === "message" && (toolCallId || done)) {
|
|
662
|
+
mode = null;
|
|
663
|
+
eventStream$.sendTextMessageEnd({
|
|
664
|
+
messageId: currentMessageId
|
|
665
|
+
});
|
|
666
|
+
} else if (mode === "function" && (!hasToolCall || done)) {
|
|
667
|
+
mode = null;
|
|
668
|
+
eventStream$.sendActionExecutionEnd({
|
|
669
|
+
actionExecutionId: toolCallId
|
|
670
|
+
});
|
|
671
|
+
}
|
|
672
|
+
if (done) {
|
|
673
|
+
break;
|
|
674
|
+
}
|
|
675
|
+
if (mode === null) {
|
|
676
|
+
if (hasToolCall && toolCallId && toolCallName) {
|
|
677
|
+
mode = "function";
|
|
678
|
+
eventStream$.sendActionExecutionStart({
|
|
679
|
+
actionExecutionId: toolCallId,
|
|
680
|
+
actionName: toolCallName,
|
|
681
|
+
parentMessageId: (_j = value.lc_kwargs) == null ? void 0 : _j.id
|
|
682
|
+
});
|
|
683
|
+
} else if (content) {
|
|
684
|
+
mode = "message";
|
|
685
|
+
currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || (0, import_shared5.randomId)();
|
|
686
|
+
eventStream$.sendTextMessageStart({
|
|
687
|
+
messageId: currentMessageId
|
|
688
|
+
});
|
|
689
|
+
}
|
|
690
|
+
}
|
|
691
|
+
if (mode === "message" && content) {
|
|
692
|
+
eventStream$.sendTextMessageContent({
|
|
693
|
+
messageId: currentMessageId,
|
|
694
|
+
content
|
|
695
|
+
});
|
|
696
|
+
} else if (mode === "function" && toolCallArgs) {
|
|
697
|
+
if (toolCallDetails.index !== toolCallDetails.prevIndex) {
|
|
698
|
+
eventStream$.sendActionExecutionEnd({
|
|
699
|
+
actionExecutionId: toolCallId
|
|
700
|
+
});
|
|
701
|
+
eventStream$.sendActionExecutionStart({
|
|
702
|
+
actionExecutionId: toolCallId,
|
|
703
|
+
actionName: toolCallName,
|
|
704
|
+
parentMessageId: (_l = value.lc_kwargs) == null ? void 0 : _l.id
|
|
705
|
+
});
|
|
706
|
+
toolCallDetails.prevIndex = toolCallDetails.index;
|
|
707
|
+
}
|
|
708
|
+
eventStream$.sendActionExecutionArgs({
|
|
709
|
+
actionExecutionId: toolCallId,
|
|
710
|
+
args: toolCallArgs
|
|
711
|
+
});
|
|
712
|
+
}
|
|
713
|
+
} catch (error) {
|
|
714
|
+
console.error("Error reading from stream", error);
|
|
715
|
+
break;
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
} else if (actionExecution) {
|
|
719
|
+
eventStream$.sendActionExecutionResult({
|
|
720
|
+
actionExecutionId: actionExecution.id,
|
|
721
|
+
actionName: actionExecution.name,
|
|
722
|
+
result: encodeResult(result)
|
|
723
|
+
});
|
|
724
|
+
} else {
|
|
725
|
+
throw new Error("Invalid return type from LangChain function.");
|
|
726
|
+
}
|
|
727
|
+
eventStream$.complete();
|
|
728
|
+
}
|
|
729
|
+
__name(streamLangChainResponse, "streamLangChainResponse");
|
|
730
|
+
function encodeResult(result) {
|
|
731
|
+
if (result === void 0) {
|
|
732
|
+
return "";
|
|
733
|
+
} else if (typeof result === "string") {
|
|
734
|
+
return result;
|
|
735
|
+
} else {
|
|
736
|
+
return JSON.stringify(result);
|
|
737
|
+
}
|
|
738
|
+
}
|
|
739
|
+
__name(encodeResult, "encodeResult");
|
|
740
|
+
|
|
741
|
+
// src/service-adapters/langchain/langchain-adapter.ts
|
|
742
|
+
var import_shared6 = require("@copilotkit/shared");
|
|
743
|
+
var import_promises = require("@langchain/core/callbacks/promises");
|
|
744
|
+
var LangChainAdapter = class {
|
|
745
|
+
options;
|
|
746
|
+
/**
|
|
747
|
+
* To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.
|
|
748
|
+
*/
|
|
749
|
+
constructor(options) {
|
|
750
|
+
this.options = options;
|
|
751
|
+
}
|
|
752
|
+
async process(request) {
|
|
753
|
+
try {
|
|
754
|
+
const { eventSource, model, actions, messages, runId, threadId: threadIdFromRequest } = request;
|
|
755
|
+
const threadId = threadIdFromRequest ?? (0, import_shared6.randomUUID)();
|
|
756
|
+
const result = await this.options.chainFn({
|
|
757
|
+
messages: messages.map(convertMessageToLangChainMessage),
|
|
758
|
+
tools: actions.map(convertActionInputToLangChainTool),
|
|
759
|
+
model,
|
|
760
|
+
threadId,
|
|
761
|
+
runId
|
|
762
|
+
});
|
|
763
|
+
eventSource.stream(async (eventStream$) => {
|
|
764
|
+
await streamLangChainResponse({
|
|
765
|
+
result,
|
|
766
|
+
eventStream$
|
|
767
|
+
});
|
|
768
|
+
});
|
|
769
|
+
return {
|
|
770
|
+
threadId
|
|
771
|
+
};
|
|
772
|
+
} finally {
|
|
773
|
+
await (0, import_promises.awaitAllCallbacks)();
|
|
774
|
+
}
|
|
775
|
+
}
|
|
776
|
+
};
|
|
777
|
+
__name(LangChainAdapter, "LangChainAdapter");
|
|
778
|
+
|
|
779
|
+
// src/service-adapters/google/google-genai-adapter.ts
|
|
780
|
+
var import_google_gauth = require("@langchain/google-gauth");
|
|
781
|
+
var import_messages2 = require("@langchain/core/messages");
|
|
782
|
+
var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
|
|
783
|
+
constructor(options) {
|
|
784
|
+
super({
|
|
785
|
+
chainFn: async ({ messages, tools, threadId }) => {
|
|
786
|
+
const filteredMessages = messages.filter((message) => {
|
|
787
|
+
if (!(message instanceof import_messages2.AIMessage)) {
|
|
788
|
+
return true;
|
|
789
|
+
}
|
|
790
|
+
return message.content && String(message.content).trim().length > 0 || message.tool_calls && message.tool_calls.length > 0;
|
|
791
|
+
});
|
|
792
|
+
const model = new import_google_gauth.ChatGoogle({
|
|
793
|
+
apiKey: (options == null ? void 0 : options.apiKey) ?? process.env.GOOGLE_API_KEY,
|
|
794
|
+
modelName: (options == null ? void 0 : options.model) ?? "gemini-1.5-pro",
|
|
795
|
+
apiVersion: "v1beta"
|
|
796
|
+
}).bindTools(tools);
|
|
797
|
+
return model.stream(filteredMessages, {
|
|
798
|
+
metadata: {
|
|
799
|
+
conversation_id: threadId
|
|
800
|
+
}
|
|
801
|
+
});
|
|
802
|
+
}
|
|
803
|
+
});
|
|
804
|
+
}
|
|
805
|
+
};
|
|
806
|
+
__name(GoogleGenerativeAIAdapter, "GoogleGenerativeAIAdapter");
|
|
807
|
+
|
|
808
|
+
// src/service-adapters/openai/openai-assistant-adapter.ts
|
|
809
|
+
var import_openai2 = __toESM(require("openai"));
|
|
810
|
+
var OpenAIAssistantAdapter = class {
|
|
811
|
+
openai;
|
|
812
|
+
codeInterpreterEnabled;
|
|
813
|
+
assistantId;
|
|
814
|
+
fileSearchEnabled;
|
|
815
|
+
disableParallelToolCalls;
|
|
816
|
+
keepSystemRole = false;
|
|
817
|
+
constructor(params) {
|
|
818
|
+
this.openai = params.openai || new import_openai2.default({});
|
|
819
|
+
this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
|
|
820
|
+
this.fileSearchEnabled = params.fileSearchEnabled === false || true;
|
|
821
|
+
this.assistantId = params.assistantId;
|
|
822
|
+
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
823
|
+
this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
|
|
824
|
+
}
|
|
825
|
+
async process(request) {
|
|
826
|
+
var _a, _b;
|
|
827
|
+
const { messages, actions, eventSource, runId, forwardedParameters } = request;
|
|
828
|
+
let threadId = (_b = (_a = request.extensions) == null ? void 0 : _a.openaiAssistantAPI) == null ? void 0 : _b.threadId;
|
|
829
|
+
if (!threadId) {
|
|
830
|
+
threadId = (await this.openai.beta.threads.create()).id;
|
|
831
|
+
}
|
|
832
|
+
const lastMessage = messages.at(-1);
|
|
833
|
+
let nextRunId = void 0;
|
|
834
|
+
if (lastMessage.isResultMessage() && runId) {
|
|
835
|
+
nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);
|
|
836
|
+
} else if (lastMessage.isTextMessage()) {
|
|
837
|
+
nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters);
|
|
838
|
+
} else {
|
|
839
|
+
throw new Error("No actionable message found in the messages");
|
|
840
|
+
}
|
|
841
|
+
return {
|
|
842
|
+
runId: nextRunId,
|
|
843
|
+
threadId,
|
|
844
|
+
extensions: {
|
|
845
|
+
...request.extensions,
|
|
846
|
+
openaiAssistantAPI: {
|
|
847
|
+
threadId,
|
|
848
|
+
runId: nextRunId
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
};
|
|
852
|
+
}
|
|
853
|
+
async submitToolOutputs(threadId, runId, messages, eventSource) {
|
|
854
|
+
let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);
|
|
855
|
+
if (!run.required_action) {
|
|
856
|
+
throw new Error("No tool outputs required");
|
|
857
|
+
}
|
|
858
|
+
const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map((toolCall) => toolCall.id);
|
|
859
|
+
const resultMessages = messages.filter((message) => message.isResultMessage() && toolCallsIds.includes(message.actionExecutionId));
|
|
860
|
+
if (toolCallsIds.length != resultMessages.length) {
|
|
861
|
+
throw new Error("Number of function results does not match the number of tool calls");
|
|
862
|
+
}
|
|
863
|
+
const toolOutputs = resultMessages.map((message) => {
|
|
864
|
+
return {
|
|
865
|
+
tool_call_id: message.actionExecutionId,
|
|
866
|
+
output: message.result
|
|
867
|
+
};
|
|
868
|
+
});
|
|
869
|
+
const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
|
|
870
|
+
tool_outputs: toolOutputs,
|
|
871
|
+
...this.disableParallelToolCalls && {
|
|
872
|
+
parallel_tool_calls: false
|
|
873
|
+
}
|
|
874
|
+
});
|
|
875
|
+
await this.streamResponse(stream, eventSource);
|
|
876
|
+
return runId;
|
|
877
|
+
}
|
|
878
|
+
async submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters) {
|
|
879
|
+
messages = [
|
|
880
|
+
...messages
|
|
881
|
+
];
|
|
882
|
+
const instructionsMessage = messages.shift();
|
|
883
|
+
const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
|
|
884
|
+
const userMessage = messages.map((m) => convertMessageToOpenAIMessage(m, {
|
|
885
|
+
keepSystemRole: this.keepSystemRole
|
|
886
|
+
})).map(convertSystemMessageToAssistantAPI).at(-1);
|
|
887
|
+
if (userMessage.role !== "user") {
|
|
888
|
+
throw new Error("No user message found");
|
|
889
|
+
}
|
|
890
|
+
await this.openai.beta.threads.messages.create(threadId, {
|
|
891
|
+
role: "user",
|
|
892
|
+
content: userMessage.content
|
|
893
|
+
});
|
|
894
|
+
const openaiTools = actions.map(convertActionInputToOpenAITool);
|
|
895
|
+
const tools = [
|
|
896
|
+
...openaiTools,
|
|
897
|
+
...this.codeInterpreterEnabled ? [
|
|
898
|
+
{
|
|
899
|
+
type: "code_interpreter"
|
|
900
|
+
}
|
|
901
|
+
] : [],
|
|
902
|
+
...this.fileSearchEnabled ? [
|
|
903
|
+
{
|
|
904
|
+
type: "file_search"
|
|
905
|
+
}
|
|
906
|
+
] : []
|
|
907
|
+
];
|
|
908
|
+
let stream = this.openai.beta.threads.runs.stream(threadId, {
|
|
909
|
+
assistant_id: this.assistantId,
|
|
910
|
+
instructions,
|
|
911
|
+
tools,
|
|
912
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
|
|
913
|
+
max_completion_tokens: forwardedParameters.maxTokens
|
|
914
|
+
},
|
|
915
|
+
...this.disableParallelToolCalls && {
|
|
916
|
+
parallel_tool_calls: false
|
|
917
|
+
}
|
|
918
|
+
});
|
|
919
|
+
await this.streamResponse(stream, eventSource);
|
|
920
|
+
return getRunIdFromStream(stream);
|
|
921
|
+
}
|
|
922
|
+
async streamResponse(stream, eventSource) {
|
|
923
|
+
eventSource.stream(async (eventStream$) => {
|
|
924
|
+
var _a, _b, _c, _d, _e, _f;
|
|
925
|
+
let inFunctionCall = false;
|
|
926
|
+
let currentMessageId;
|
|
927
|
+
let currentToolCallId;
|
|
928
|
+
for await (const chunk of stream) {
|
|
929
|
+
switch (chunk.event) {
|
|
930
|
+
case "thread.message.created":
|
|
931
|
+
if (inFunctionCall) {
|
|
932
|
+
eventStream$.sendActionExecutionEnd({
|
|
933
|
+
actionExecutionId: currentToolCallId
|
|
934
|
+
});
|
|
935
|
+
}
|
|
936
|
+
currentMessageId = chunk.data.id;
|
|
937
|
+
eventStream$.sendTextMessageStart({
|
|
938
|
+
messageId: currentMessageId
|
|
939
|
+
});
|
|
940
|
+
break;
|
|
941
|
+
case "thread.message.delta":
|
|
942
|
+
if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
|
|
943
|
+
eventStream$.sendTextMessageContent({
|
|
944
|
+
messageId: currentMessageId,
|
|
945
|
+
content: (_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value
|
|
946
|
+
});
|
|
947
|
+
}
|
|
948
|
+
break;
|
|
949
|
+
case "thread.message.completed":
|
|
950
|
+
eventStream$.sendTextMessageEnd({
|
|
951
|
+
messageId: currentMessageId
|
|
952
|
+
});
|
|
953
|
+
break;
|
|
954
|
+
case "thread.run.step.delta":
|
|
955
|
+
let toolCallId;
|
|
956
|
+
let toolCallName;
|
|
957
|
+
let toolCallArgs;
|
|
958
|
+
if (chunk.data.delta.step_details.type === "tool_calls" && ((_c = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _c[0].type) === "function") {
|
|
959
|
+
toolCallId = (_d = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _d[0].id;
|
|
960
|
+
toolCallName = (_e = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _e[0].function.name;
|
|
961
|
+
toolCallArgs = (_f = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _f[0].function.arguments;
|
|
962
|
+
}
|
|
963
|
+
if (toolCallName && toolCallId) {
|
|
964
|
+
if (inFunctionCall) {
|
|
965
|
+
eventStream$.sendActionExecutionEnd({
|
|
966
|
+
actionExecutionId: currentToolCallId
|
|
967
|
+
});
|
|
968
|
+
}
|
|
969
|
+
inFunctionCall = true;
|
|
970
|
+
currentToolCallId = toolCallId;
|
|
971
|
+
eventStream$.sendActionExecutionStart({
|
|
972
|
+
actionExecutionId: currentToolCallId,
|
|
973
|
+
parentMessageId: chunk.data.id,
|
|
974
|
+
actionName: toolCallName
|
|
975
|
+
});
|
|
976
|
+
} else if (toolCallArgs) {
|
|
977
|
+
eventStream$.sendActionExecutionArgs({
|
|
978
|
+
actionExecutionId: currentToolCallId,
|
|
979
|
+
args: toolCallArgs
|
|
980
|
+
});
|
|
981
|
+
}
|
|
982
|
+
break;
|
|
983
|
+
}
|
|
984
|
+
}
|
|
985
|
+
if (inFunctionCall) {
|
|
986
|
+
eventStream$.sendActionExecutionEnd({
|
|
987
|
+
actionExecutionId: currentToolCallId
|
|
988
|
+
});
|
|
989
|
+
}
|
|
990
|
+
eventStream$.complete();
|
|
991
|
+
});
|
|
992
|
+
}
|
|
993
|
+
};
|
|
994
|
+
__name(OpenAIAssistantAdapter, "OpenAIAssistantAdapter");
|
|
995
|
+
function getRunIdFromStream(stream) {
|
|
996
|
+
return new Promise((resolve, reject) => {
|
|
997
|
+
let runIdGetter = /* @__PURE__ */ __name((event) => {
|
|
998
|
+
if (event.event === "thread.run.created") {
|
|
999
|
+
const runId = event.data.id;
|
|
1000
|
+
stream.off("event", runIdGetter);
|
|
1001
|
+
resolve(runId);
|
|
1002
|
+
}
|
|
1003
|
+
}, "runIdGetter");
|
|
1004
|
+
stream.on("event", runIdGetter);
|
|
1005
|
+
});
|
|
1006
|
+
}
|
|
1007
|
+
__name(getRunIdFromStream, "getRunIdFromStream");
|
|
1008
|
+
|
|
1009
|
+
// src/service-adapters/unify/unify-adapter.ts
|
|
1010
|
+
var import_openai3 = __toESM(require("openai"));
|
|
1011
|
+
var import_shared7 = require("@copilotkit/shared");
|
|
1012
|
+
var UnifyAdapter = class {
|
|
1013
|
+
apiKey;
|
|
1014
|
+
model;
|
|
1015
|
+
start;
|
|
1016
|
+
constructor(options) {
|
|
1017
|
+
if (options == null ? void 0 : options.apiKey) {
|
|
1018
|
+
this.apiKey = options.apiKey;
|
|
1019
|
+
} else {
|
|
1020
|
+
this.apiKey = "UNIFY_API_KEY";
|
|
1021
|
+
}
|
|
1022
|
+
this.model = options == null ? void 0 : options.model;
|
|
1023
|
+
this.start = true;
|
|
1024
|
+
}
|
|
1025
|
+
async process(request) {
|
|
1026
|
+
const tools = request.actions.map(convertActionInputToOpenAITool);
|
|
1027
|
+
const openai = new import_openai3.default({
|
|
1028
|
+
apiKey: this.apiKey,
|
|
1029
|
+
baseURL: "https://api.unify.ai/v0/"
|
|
1030
|
+
});
|
|
1031
|
+
const forwardedParameters = request.forwardedParameters;
|
|
1032
|
+
const messages = request.messages.map((m) => convertMessageToOpenAIMessage(m));
|
|
1033
|
+
const stream = await openai.chat.completions.create({
|
|
1034
|
+
model: this.model,
|
|
1035
|
+
messages,
|
|
1036
|
+
stream: true,
|
|
1037
|
+
...tools.length > 0 && {
|
|
1038
|
+
tools
|
|
1039
|
+
},
|
|
1040
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
|
|
1041
|
+
temperature: forwardedParameters.temperature
|
|
1042
|
+
}
|
|
1043
|
+
});
|
|
1044
|
+
let model = null;
|
|
1045
|
+
let currentMessageId;
|
|
1046
|
+
let currentToolCallId;
|
|
1047
|
+
request.eventSource.stream(async (eventStream$) => {
|
|
1048
|
+
var _a, _b;
|
|
1049
|
+
let mode = null;
|
|
1050
|
+
for await (const chunk of stream) {
|
|
1051
|
+
if (this.start) {
|
|
1052
|
+
model = chunk.model;
|
|
1053
|
+
currentMessageId = (0, import_shared7.randomId)();
|
|
1054
|
+
eventStream$.sendTextMessageStart({
|
|
1055
|
+
messageId: currentMessageId
|
|
1056
|
+
});
|
|
1057
|
+
eventStream$.sendTextMessageContent({
|
|
1058
|
+
messageId: currentMessageId,
|
|
1059
|
+
content: `Model used: ${model}
|
|
1060
|
+
`
|
|
1061
|
+
});
|
|
1062
|
+
eventStream$.sendTextMessageEnd({
|
|
1063
|
+
messageId: currentMessageId
|
|
1064
|
+
});
|
|
1065
|
+
this.start = false;
|
|
1066
|
+
}
|
|
1067
|
+
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
|
|
1068
|
+
const content = chunk.choices[0].delta.content;
|
|
1069
|
+
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
1070
|
+
mode = null;
|
|
1071
|
+
eventStream$.sendTextMessageEnd({
|
|
1072
|
+
messageId: currentMessageId
|
|
1073
|
+
});
|
|
1074
|
+
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
1075
|
+
mode = null;
|
|
1076
|
+
eventStream$.sendActionExecutionEnd({
|
|
1077
|
+
actionExecutionId: currentToolCallId
|
|
1078
|
+
});
|
|
1079
|
+
}
|
|
1080
|
+
if (mode === null) {
|
|
1081
|
+
if (toolCall == null ? void 0 : toolCall.id) {
|
|
1082
|
+
mode = "function";
|
|
1083
|
+
currentToolCallId = toolCall.id;
|
|
1084
|
+
eventStream$.sendActionExecutionStart({
|
|
1085
|
+
actionExecutionId: currentToolCallId,
|
|
1086
|
+
actionName: toolCall.function.name
|
|
1087
|
+
});
|
|
1088
|
+
} else if (content) {
|
|
1089
|
+
mode = "message";
|
|
1090
|
+
currentMessageId = chunk.id;
|
|
1091
|
+
eventStream$.sendTextMessageStart({
|
|
1092
|
+
messageId: currentMessageId
|
|
1093
|
+
});
|
|
1094
|
+
}
|
|
1095
|
+
}
|
|
1096
|
+
if (mode === "message" && content) {
|
|
1097
|
+
eventStream$.sendTextMessageContent({
|
|
1098
|
+
messageId: currentMessageId,
|
|
1099
|
+
content
|
|
1100
|
+
});
|
|
1101
|
+
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
1102
|
+
eventStream$.sendActionExecutionArgs({
|
|
1103
|
+
actionExecutionId: currentToolCallId,
|
|
1104
|
+
args: toolCall.function.arguments
|
|
1105
|
+
});
|
|
1106
|
+
}
|
|
1107
|
+
}
|
|
1108
|
+
if (mode === "message") {
|
|
1109
|
+
eventStream$.sendTextMessageEnd({
|
|
1110
|
+
messageId: currentMessageId
|
|
1111
|
+
});
|
|
1112
|
+
} else if (mode === "function") {
|
|
1113
|
+
eventStream$.sendActionExecutionEnd({
|
|
1114
|
+
actionExecutionId: currentToolCallId
|
|
1115
|
+
});
|
|
1116
|
+
}
|
|
1117
|
+
eventStream$.complete();
|
|
1118
|
+
});
|
|
1119
|
+
return {
|
|
1120
|
+
threadId: request.threadId || (0, import_shared7.randomUUID)()
|
|
1121
|
+
};
|
|
1122
|
+
}
|
|
1123
|
+
};
|
|
1124
|
+
__name(UnifyAdapter, "UnifyAdapter");
|
|
1125
|
+
|
|
1126
|
+
// src/service-adapters/groq/groq-adapter.ts
|
|
1127
|
+
var import_groq_sdk = require("groq-sdk");
|
|
1128
|
+
var import_shared8 = require("@copilotkit/shared");
|
|
1129
|
+
var DEFAULT_MODEL2 = "llama-3.3-70b-versatile";
|
|
1130
|
+
var GroqAdapter = class {
|
|
1131
|
+
model = DEFAULT_MODEL2;
|
|
1132
|
+
disableParallelToolCalls = false;
|
|
1133
|
+
_groq;
|
|
1134
|
+
get groq() {
|
|
1135
|
+
return this._groq;
|
|
1136
|
+
}
|
|
1137
|
+
constructor(params) {
|
|
1138
|
+
this._groq = (params == null ? void 0 : params.groq) || new import_groq_sdk.Groq({});
|
|
1139
|
+
if (params == null ? void 0 : params.model) {
|
|
1140
|
+
this.model = params.model;
|
|
1141
|
+
}
|
|
1142
|
+
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
1143
|
+
}
|
|
1144
|
+
async process(request) {
|
|
1145
|
+
const { threadId, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
|
|
1146
|
+
const tools = actions.map(convertActionInputToOpenAITool);
|
|
1147
|
+
let openaiMessages = messages.map((m) => convertMessageToOpenAIMessage(m, {
|
|
1148
|
+
keepSystemRole: true
|
|
1149
|
+
}));
|
|
1150
|
+
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
1151
|
+
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
1152
|
+
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
|
|
1153
|
+
toolChoice = {
|
|
1154
|
+
type: "function",
|
|
1155
|
+
function: {
|
|
1156
|
+
name: forwardedParameters.toolChoiceFunctionName
|
|
1157
|
+
}
|
|
1158
|
+
};
|
|
1159
|
+
}
|
|
1160
|
+
let stream;
|
|
1161
|
+
try {
|
|
1162
|
+
stream = await this.groq.chat.completions.create({
|
|
1163
|
+
model,
|
|
1164
|
+
stream: true,
|
|
1165
|
+
messages: openaiMessages,
|
|
1166
|
+
...tools.length > 0 && {
|
|
1167
|
+
tools
|
|
1168
|
+
},
|
|
1169
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
|
|
1170
|
+
max_tokens: forwardedParameters.maxTokens
|
|
1171
|
+
},
|
|
1172
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
|
|
1173
|
+
stop: forwardedParameters.stop
|
|
1174
|
+
},
|
|
1175
|
+
...toolChoice && {
|
|
1176
|
+
tool_choice: toolChoice
|
|
1177
|
+
},
|
|
1178
|
+
...this.disableParallelToolCalls && {
|
|
1179
|
+
parallel_tool_calls: false
|
|
1180
|
+
},
|
|
1181
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
|
|
1182
|
+
temperature: forwardedParameters.temperature
|
|
1183
|
+
}
|
|
1184
|
+
});
|
|
1185
|
+
} catch (error) {
|
|
1186
|
+
throw convertServiceAdapterError(error, "Groq");
|
|
1187
|
+
}
|
|
1188
|
+
eventSource.stream(async (eventStream$) => {
|
|
1189
|
+
var _a, _b;
|
|
1190
|
+
let mode = null;
|
|
1191
|
+
let currentMessageId;
|
|
1192
|
+
let currentToolCallId;
|
|
1193
|
+
try {
|
|
1194
|
+
for await (const chunk of stream) {
|
|
1195
|
+
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
|
|
1196
|
+
const content = chunk.choices[0].delta.content;
|
|
1197
|
+
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
1198
|
+
mode = null;
|
|
1199
|
+
eventStream$.sendTextMessageEnd({
|
|
1200
|
+
messageId: currentMessageId
|
|
1201
|
+
});
|
|
1202
|
+
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
1203
|
+
mode = null;
|
|
1204
|
+
eventStream$.sendActionExecutionEnd({
|
|
1205
|
+
actionExecutionId: currentToolCallId
|
|
1206
|
+
});
|
|
1207
|
+
}
|
|
1208
|
+
if (mode === null) {
|
|
1209
|
+
if (toolCall == null ? void 0 : toolCall.id) {
|
|
1210
|
+
mode = "function";
|
|
1211
|
+
currentToolCallId = toolCall.id;
|
|
1212
|
+
eventStream$.sendActionExecutionStart({
|
|
1213
|
+
actionExecutionId: currentToolCallId,
|
|
1214
|
+
actionName: toolCall.function.name,
|
|
1215
|
+
parentMessageId: chunk.id
|
|
1216
|
+
});
|
|
1217
|
+
} else if (content) {
|
|
1218
|
+
mode = "message";
|
|
1219
|
+
currentMessageId = chunk.id;
|
|
1220
|
+
eventStream$.sendTextMessageStart({
|
|
1221
|
+
messageId: currentMessageId
|
|
1222
|
+
});
|
|
1223
|
+
}
|
|
1224
|
+
}
|
|
1225
|
+
if (mode === "message" && content) {
|
|
1226
|
+
eventStream$.sendTextMessageContent({
|
|
1227
|
+
messageId: currentMessageId,
|
|
1228
|
+
content
|
|
1229
|
+
});
|
|
1230
|
+
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
1231
|
+
eventStream$.sendActionExecutionArgs({
|
|
1232
|
+
actionExecutionId: currentToolCallId,
|
|
1233
|
+
args: toolCall.function.arguments
|
|
1234
|
+
});
|
|
1235
|
+
}
|
|
1236
|
+
}
|
|
1237
|
+
if (mode === "message") {
|
|
1238
|
+
eventStream$.sendTextMessageEnd({
|
|
1239
|
+
messageId: currentMessageId
|
|
1240
|
+
});
|
|
1241
|
+
} else if (mode === "function") {
|
|
1242
|
+
eventStream$.sendActionExecutionEnd({
|
|
1243
|
+
actionExecutionId: currentToolCallId
|
|
1244
|
+
});
|
|
1245
|
+
}
|
|
1246
|
+
} catch (error) {
|
|
1247
|
+
throw convertServiceAdapterError(error, "Groq");
|
|
1248
|
+
}
|
|
1249
|
+
eventStream$.complete();
|
|
1250
|
+
});
|
|
1251
|
+
return {
|
|
1252
|
+
threadId: request.threadId || (0, import_shared8.randomUUID)()
|
|
1253
|
+
};
|
|
1254
|
+
}
|
|
1255
|
+
};
|
|
1256
|
+
__name(GroqAdapter, "GroqAdapter");
|
|
1257
|
+
|
|
1258
|
+
// src/service-adapters/anthropic/anthropic-adapter.ts
|
|
1259
|
+
var import_sdk = __toESM(require("@anthropic-ai/sdk"));
|
|
1260
|
+
|
|
1261
|
+
// src/service-adapters/anthropic/utils.ts
|
|
1262
|
+
function limitMessagesToTokenCount2(messages, tools, model, maxTokens) {
|
|
1263
|
+
maxTokens || (maxTokens = MAX_TOKENS);
|
|
1264
|
+
const result = [];
|
|
1265
|
+
const toolsNumTokens = countToolsTokens2(model, tools);
|
|
1266
|
+
if (toolsNumTokens > maxTokens) {
|
|
1267
|
+
throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
|
|
1268
|
+
}
|
|
1269
|
+
maxTokens -= toolsNumTokens;
|
|
1270
|
+
for (const message of messages) {
|
|
1271
|
+
if (message.role === "system") {
|
|
1272
|
+
const numTokens = countMessageTokens2(model, message);
|
|
1273
|
+
maxTokens -= numTokens;
|
|
1274
|
+
if (maxTokens < 0) {
|
|
1275
|
+
throw new Error("Not enough tokens for system message.");
|
|
1276
|
+
}
|
|
1277
|
+
}
|
|
1278
|
+
}
|
|
1279
|
+
let cutoff = false;
|
|
1280
|
+
const reversedMessages = [
|
|
1281
|
+
...messages
|
|
1282
|
+
].reverse();
|
|
1283
|
+
for (const message of reversedMessages) {
|
|
1284
|
+
if (message.role === "system") {
|
|
1285
|
+
result.unshift(message);
|
|
1286
|
+
continue;
|
|
1287
|
+
} else if (cutoff) {
|
|
1288
|
+
continue;
|
|
1289
|
+
}
|
|
1290
|
+
let numTokens = countMessageTokens2(model, message);
|
|
1291
|
+
if (maxTokens < numTokens) {
|
|
1292
|
+
cutoff = true;
|
|
1293
|
+
continue;
|
|
1294
|
+
}
|
|
1295
|
+
result.unshift(message);
|
|
1296
|
+
maxTokens -= numTokens;
|
|
1297
|
+
}
|
|
1298
|
+
return result;
|
|
1299
|
+
}
|
|
1300
|
+
__name(limitMessagesToTokenCount2, "limitMessagesToTokenCount");
|
|
1301
|
+
var MAX_TOKENS = 128e3;
|
|
1302
|
+
function countToolsTokens2(model, tools) {
|
|
1303
|
+
if (tools.length === 0) {
|
|
1304
|
+
return 0;
|
|
1305
|
+
}
|
|
1306
|
+
const json = JSON.stringify(tools);
|
|
1307
|
+
return countTokens2(model, json);
|
|
1308
|
+
}
|
|
1309
|
+
__name(countToolsTokens2, "countToolsTokens");
|
|
1310
|
+
function countMessageTokens2(model, message) {
|
|
1311
|
+
return countTokens2(model, JSON.stringify(message.content) || "");
|
|
1312
|
+
}
|
|
1313
|
+
__name(countMessageTokens2, "countMessageTokens");
|
|
1314
|
+
function countTokens2(model, text) {
|
|
1315
|
+
return text.length / 3;
|
|
1316
|
+
}
|
|
1317
|
+
__name(countTokens2, "countTokens");
|
|
1318
|
+
function convertActionInputToAnthropicTool(action) {
|
|
1319
|
+
return {
|
|
1320
|
+
name: action.name,
|
|
1321
|
+
description: action.description,
|
|
1322
|
+
input_schema: JSON.parse(action.jsonSchema)
|
|
1323
|
+
};
|
|
1324
|
+
}
|
|
1325
|
+
__name(convertActionInputToAnthropicTool, "convertActionInputToAnthropicTool");
|
|
1326
|
+
function convertMessageToAnthropicMessage(message) {
|
|
1327
|
+
if (message.isTextMessage()) {
|
|
1328
|
+
if (message.role === "system") {
|
|
1329
|
+
return {
|
|
1330
|
+
role: "assistant",
|
|
1331
|
+
content: [
|
|
1332
|
+
{
|
|
1333
|
+
type: "text",
|
|
1334
|
+
text: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
|
|
1335
|
+
}
|
|
1336
|
+
]
|
|
1337
|
+
};
|
|
1338
|
+
} else {
|
|
1339
|
+
return {
|
|
1340
|
+
role: message.role === "user" ? "user" : "assistant",
|
|
1341
|
+
content: [
|
|
1342
|
+
{
|
|
1343
|
+
type: "text",
|
|
1344
|
+
text: message.content
|
|
1345
|
+
}
|
|
1346
|
+
]
|
|
1347
|
+
};
|
|
1348
|
+
}
|
|
1349
|
+
} else if (message.isImageMessage()) {
|
|
1350
|
+
let mediaType;
|
|
1351
|
+
switch (message.format) {
|
|
1352
|
+
case "jpeg":
|
|
1353
|
+
mediaType = "image/jpeg";
|
|
1354
|
+
break;
|
|
1355
|
+
case "png":
|
|
1356
|
+
mediaType = "image/png";
|
|
1357
|
+
break;
|
|
1358
|
+
case "webp":
|
|
1359
|
+
mediaType = "image/webp";
|
|
1360
|
+
break;
|
|
1361
|
+
case "gif":
|
|
1362
|
+
mediaType = "image/gif";
|
|
1363
|
+
break;
|
|
1364
|
+
default:
|
|
1365
|
+
throw new Error(`Unsupported image format: ${message.format}`);
|
|
1366
|
+
}
|
|
1367
|
+
return {
|
|
1368
|
+
role: "user",
|
|
1369
|
+
content: [
|
|
1370
|
+
{
|
|
1371
|
+
type: "image",
|
|
1372
|
+
source: {
|
|
1373
|
+
type: "base64",
|
|
1374
|
+
media_type: mediaType,
|
|
1375
|
+
data: message.bytes
|
|
1376
|
+
}
|
|
1377
|
+
}
|
|
1378
|
+
]
|
|
1379
|
+
};
|
|
1380
|
+
} else if (message.isActionExecutionMessage()) {
|
|
1381
|
+
return {
|
|
1382
|
+
role: "assistant",
|
|
1383
|
+
content: [
|
|
1384
|
+
{
|
|
1385
|
+
id: message.id,
|
|
1386
|
+
type: "tool_use",
|
|
1387
|
+
input: message.arguments,
|
|
1388
|
+
name: message.name
|
|
1389
|
+
}
|
|
1390
|
+
]
|
|
1391
|
+
};
|
|
1392
|
+
} else if (message.isResultMessage()) {
|
|
1393
|
+
return {
|
|
1394
|
+
role: "user",
|
|
1395
|
+
content: [
|
|
1396
|
+
{
|
|
1397
|
+
type: "tool_result",
|
|
1398
|
+
content: message.result || "Action completed successfully",
|
|
1399
|
+
tool_use_id: message.actionExecutionId
|
|
1400
|
+
}
|
|
1401
|
+
]
|
|
1402
|
+
};
|
|
1403
|
+
}
|
|
1404
|
+
}
|
|
1405
|
+
__name(convertMessageToAnthropicMessage, "convertMessageToAnthropicMessage");
|
|
1406
|
+
|
|
1407
|
+
// src/service-adapters/anthropic/anthropic-adapter.ts
|
|
1408
|
+
var import_shared10 = require("@copilotkit/shared");
|
|
1409
|
+
var DEFAULT_MODEL3 = "claude-3-5-sonnet-latest";
|
|
1410
|
+
var AnthropicAdapter = class {
|
|
1411
|
+
model = DEFAULT_MODEL3;
|
|
1412
|
+
promptCaching;
|
|
1413
|
+
_anthropic;
|
|
1414
|
+
get anthropic() {
|
|
1415
|
+
return this._anthropic;
|
|
1416
|
+
}
|
|
1417
|
+
constructor(params) {
|
|
1418
|
+
this._anthropic = (params == null ? void 0 : params.anthropic) || new import_sdk.default({});
|
|
1419
|
+
if (params == null ? void 0 : params.model) {
|
|
1420
|
+
this.model = params.model;
|
|
1421
|
+
}
|
|
1422
|
+
this.promptCaching = (params == null ? void 0 : params.promptCaching) || {
|
|
1423
|
+
enabled: false
|
|
1424
|
+
};
|
|
1425
|
+
}
|
|
1426
|
+
/**
|
|
1427
|
+
* Adds cache control to system prompt
|
|
1428
|
+
*/
|
|
1429
|
+
addSystemPromptCaching(system, debug = false) {
|
|
1430
|
+
if (!this.promptCaching.enabled || !system) {
|
|
1431
|
+
return system;
|
|
1432
|
+
}
|
|
1433
|
+
const originalTextLength = system.length;
|
|
1434
|
+
if (debug) {
|
|
1435
|
+
console.log(`[ANTHROPIC CACHE DEBUG] Added cache control to system prompt (${originalTextLength} chars).`);
|
|
1436
|
+
}
|
|
1437
|
+
return [
|
|
1438
|
+
{
|
|
1439
|
+
type: "text",
|
|
1440
|
+
text: system,
|
|
1441
|
+
cache_control: {
|
|
1442
|
+
type: "ephemeral"
|
|
1443
|
+
}
|
|
1444
|
+
}
|
|
1445
|
+
];
|
|
1446
|
+
}
|
|
1447
|
+
/**
|
|
1448
|
+
* Adds cache control to the final message
|
|
1449
|
+
*/
|
|
1450
|
+
addIncrementalMessageCaching(messages, debug = false) {
|
|
1451
|
+
if (!this.promptCaching.enabled || messages.length === 0) {
|
|
1452
|
+
return messages;
|
|
1453
|
+
}
|
|
1454
|
+
const finalMessage = messages[messages.length - 1];
|
|
1455
|
+
const messageNumber = messages.length;
|
|
1456
|
+
if (Array.isArray(finalMessage.content) && finalMessage.content.length > 0) {
|
|
1457
|
+
const finalBlock = finalMessage.content[finalMessage.content.length - 1];
|
|
1458
|
+
const updatedMessages = [
|
|
1459
|
+
...messages.slice(0, -1),
|
|
1460
|
+
{
|
|
1461
|
+
...finalMessage,
|
|
1462
|
+
content: [
|
|
1463
|
+
...finalMessage.content.slice(0, -1),
|
|
1464
|
+
{
|
|
1465
|
+
...finalBlock,
|
|
1466
|
+
cache_control: {
|
|
1467
|
+
type: "ephemeral"
|
|
1468
|
+
}
|
|
1469
|
+
}
|
|
1470
|
+
]
|
|
1471
|
+
}
|
|
1472
|
+
];
|
|
1473
|
+
if (debug) {
|
|
1474
|
+
console.log(`[ANTHROPIC CACHE DEBUG] Added cache control to final message (message ${messageNumber}).`);
|
|
1475
|
+
}
|
|
1476
|
+
return updatedMessages;
|
|
1477
|
+
}
|
|
1478
|
+
return messages;
|
|
1479
|
+
}
|
|
1480
|
+
shouldGenerateFallbackResponse(messages) {
|
|
1481
|
+
var _a, _b, _c;
|
|
1482
|
+
if (messages.length === 0)
|
|
1483
|
+
return false;
|
|
1484
|
+
const lastMessage = messages[messages.length - 1];
|
|
1485
|
+
const endsWithToolResult = lastMessage.role === "user" && Array.isArray(lastMessage.content) && lastMessage.content.some((content) => content.type === "tool_result");
|
|
1486
|
+
if (messages.length >= 3 && endsWithToolResult) {
|
|
1487
|
+
const lastThree = messages.slice(-3);
|
|
1488
|
+
const hasRecentToolPattern = ((_a = lastThree[0]) == null ? void 0 : _a.role) === "user" && // Initial user message
|
|
1489
|
+
((_b = lastThree[1]) == null ? void 0 : _b.role) === "assistant" && // Assistant tool use
|
|
1490
|
+
Array.isArray(lastThree[1].content) && lastThree[1].content.some((content) => content.type === "tool_use") && ((_c = lastThree[2]) == null ? void 0 : _c.role) === "user" && // Tool result
|
|
1491
|
+
Array.isArray(lastThree[2].content) && lastThree[2].content.some((content) => content.type === "tool_result");
|
|
1492
|
+
return hasRecentToolPattern;
|
|
1493
|
+
}
|
|
1494
|
+
return endsWithToolResult;
|
|
1495
|
+
}
|
|
1496
|
+
async process(request) {
|
|
1497
|
+
const { threadId, model = this.model, messages: rawMessages, actions, eventSource, forwardedParameters } = request;
|
|
1498
|
+
const tools = actions.map(convertActionInputToAnthropicTool);
|
|
1499
|
+
const messages = [
|
|
1500
|
+
...rawMessages
|
|
1501
|
+
];
|
|
1502
|
+
const instructionsMessage = messages.shift();
|
|
1503
|
+
const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
|
|
1504
|
+
const validToolUseIds = /* @__PURE__ */ new Set();
|
|
1505
|
+
for (const message of messages) {
|
|
1506
|
+
if (message.isActionExecutionMessage()) {
|
|
1507
|
+
validToolUseIds.add(message.id);
|
|
1508
|
+
}
|
|
1509
|
+
}
|
|
1510
|
+
const processedToolResultIds = /* @__PURE__ */ new Set();
|
|
1511
|
+
const anthropicMessages = messages.map((message) => {
|
|
1512
|
+
if (message.isResultMessage()) {
|
|
1513
|
+
if (!validToolUseIds.has(message.actionExecutionId)) {
|
|
1514
|
+
return null;
|
|
1515
|
+
}
|
|
1516
|
+
if (processedToolResultIds.has(message.actionExecutionId)) {
|
|
1517
|
+
return null;
|
|
1518
|
+
}
|
|
1519
|
+
processedToolResultIds.add(message.actionExecutionId);
|
|
1520
|
+
return {
|
|
1521
|
+
role: "user",
|
|
1522
|
+
content: [
|
|
1523
|
+
{
|
|
1524
|
+
type: "tool_result",
|
|
1525
|
+
content: message.result || "Action completed successfully",
|
|
1526
|
+
tool_use_id: message.actionExecutionId
|
|
1527
|
+
}
|
|
1528
|
+
]
|
|
1529
|
+
};
|
|
1530
|
+
}
|
|
1531
|
+
return convertMessageToAnthropicMessage(message);
|
|
1532
|
+
}).filter(Boolean).filter((msg) => {
|
|
1533
|
+
if (msg.role === "assistant" && Array.isArray(msg.content)) {
|
|
1534
|
+
const hasEmptyTextOnly = msg.content.length === 1 && msg.content[0].type === "text" && (!msg.content[0].text || msg.content[0].text.trim() === "");
|
|
1535
|
+
return !hasEmptyTextOnly;
|
|
1536
|
+
}
|
|
1537
|
+
return true;
|
|
1538
|
+
});
|
|
1539
|
+
const limitedMessages = limitMessagesToTokenCount2(anthropicMessages, tools, model);
|
|
1540
|
+
const cachedSystemPrompt = this.addSystemPromptCaching(instructions, this.promptCaching.debug);
|
|
1541
|
+
const cachedMessages = this.addIncrementalMessageCaching(limitedMessages, this.promptCaching.debug);
|
|
1542
|
+
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
1543
|
+
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
|
|
1544
|
+
toolChoice = {
|
|
1545
|
+
type: "tool",
|
|
1546
|
+
name: forwardedParameters.toolChoiceFunctionName
|
|
1547
|
+
};
|
|
1548
|
+
}
|
|
1549
|
+
try {
|
|
1550
|
+
const createParams = {
|
|
1551
|
+
system: cachedSystemPrompt,
|
|
1552
|
+
model: this.model,
|
|
1553
|
+
messages: cachedMessages,
|
|
1554
|
+
max_tokens: (forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) || 1024,
|
|
1555
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) ? {
|
|
1556
|
+
temperature: forwardedParameters.temperature
|
|
1557
|
+
} : {},
|
|
1558
|
+
...tools.length > 0 && {
|
|
1559
|
+
tools
|
|
1560
|
+
},
|
|
1561
|
+
...toolChoice && {
|
|
1562
|
+
tool_choice: toolChoice
|
|
1563
|
+
},
|
|
1564
|
+
stream: true
|
|
1565
|
+
};
|
|
1566
|
+
const stream = await this.anthropic.messages.create(createParams);
|
|
1567
|
+
eventSource.stream(async (eventStream$) => {
|
|
1568
|
+
let mode = null;
|
|
1569
|
+
let didOutputText = false;
|
|
1570
|
+
let currentMessageId = (0, import_shared10.randomId)();
|
|
1571
|
+
let currentToolCallId = (0, import_shared10.randomId)();
|
|
1572
|
+
let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
|
|
1573
|
+
let hasReceivedContent = false;
|
|
1574
|
+
try {
|
|
1575
|
+
for await (const chunk of stream) {
|
|
1576
|
+
if (chunk.type === "message_start") {
|
|
1577
|
+
currentMessageId = chunk.message.id;
|
|
1578
|
+
} else if (chunk.type === "content_block_start") {
|
|
1579
|
+
hasReceivedContent = true;
|
|
1580
|
+
if (chunk.content_block.type === "text") {
|
|
1581
|
+
didOutputText = false;
|
|
1582
|
+
filterThinkingTextBuffer.reset();
|
|
1583
|
+
mode = "message";
|
|
1584
|
+
} else if (chunk.content_block.type === "tool_use") {
|
|
1585
|
+
currentToolCallId = chunk.content_block.id;
|
|
1586
|
+
eventStream$.sendActionExecutionStart({
|
|
1587
|
+
actionExecutionId: currentToolCallId,
|
|
1588
|
+
actionName: chunk.content_block.name,
|
|
1589
|
+
parentMessageId: currentMessageId
|
|
1590
|
+
});
|
|
1591
|
+
mode = "function";
|
|
1592
|
+
}
|
|
1593
|
+
} else if (chunk.type === "content_block_delta") {
|
|
1594
|
+
if (chunk.delta.type === "text_delta") {
|
|
1595
|
+
const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
|
|
1596
|
+
if (text.length > 0) {
|
|
1597
|
+
if (!didOutputText) {
|
|
1598
|
+
eventStream$.sendTextMessageStart({
|
|
1599
|
+
messageId: currentMessageId
|
|
1600
|
+
});
|
|
1601
|
+
didOutputText = true;
|
|
1602
|
+
}
|
|
1603
|
+
eventStream$.sendTextMessageContent({
|
|
1604
|
+
messageId: currentMessageId,
|
|
1605
|
+
content: text
|
|
1606
|
+
});
|
|
1607
|
+
}
|
|
1608
|
+
} else if (chunk.delta.type === "input_json_delta") {
|
|
1609
|
+
eventStream$.sendActionExecutionArgs({
|
|
1610
|
+
actionExecutionId: currentToolCallId,
|
|
1611
|
+
args: chunk.delta.partial_json
|
|
1612
|
+
});
|
|
1613
|
+
}
|
|
1614
|
+
} else if (chunk.type === "content_block_stop") {
|
|
1615
|
+
if (mode === "message") {
|
|
1616
|
+
if (didOutputText) {
|
|
1617
|
+
eventStream$.sendTextMessageEnd({
|
|
1618
|
+
messageId: currentMessageId
|
|
1619
|
+
});
|
|
1620
|
+
}
|
|
1621
|
+
} else if (mode === "function") {
|
|
1622
|
+
eventStream$.sendActionExecutionEnd({
|
|
1623
|
+
actionExecutionId: currentToolCallId
|
|
1624
|
+
});
|
|
1625
|
+
}
|
|
1626
|
+
}
|
|
1627
|
+
}
|
|
1628
|
+
} catch (error) {
|
|
1629
|
+
throw convertServiceAdapterError(error, "Anthropic");
|
|
1630
|
+
}
|
|
1631
|
+
if (!hasReceivedContent && this.shouldGenerateFallbackResponse(cachedMessages)) {
|
|
1632
|
+
let fallbackContent = "Task completed successfully.";
|
|
1633
|
+
const lastMessage = cachedMessages[cachedMessages.length - 1];
|
|
1634
|
+
if ((lastMessage == null ? void 0 : lastMessage.role) === "user" && Array.isArray(lastMessage.content)) {
|
|
1635
|
+
const toolResult = lastMessage.content.find((c) => c.type === "tool_result");
|
|
1636
|
+
if ((toolResult == null ? void 0 : toolResult.content) && toolResult.content !== "Action completed successfully") {
|
|
1637
|
+
fallbackContent = toolResult.content;
|
|
1638
|
+
}
|
|
1639
|
+
}
|
|
1640
|
+
currentMessageId = (0, import_shared10.randomId)();
|
|
1641
|
+
eventStream$.sendTextMessageStart({
|
|
1642
|
+
messageId: currentMessageId
|
|
1643
|
+
});
|
|
1644
|
+
eventStream$.sendTextMessageContent({
|
|
1645
|
+
messageId: currentMessageId,
|
|
1646
|
+
content: fallbackContent
|
|
1647
|
+
});
|
|
1648
|
+
eventStream$.sendTextMessageEnd({
|
|
1649
|
+
messageId: currentMessageId
|
|
1650
|
+
});
|
|
1651
|
+
}
|
|
1652
|
+
eventStream$.complete();
|
|
1653
|
+
});
|
|
1654
|
+
} catch (error) {
|
|
1655
|
+
throw convertServiceAdapterError(error, "Anthropic");
|
|
1656
|
+
}
|
|
1657
|
+
return {
|
|
1658
|
+
threadId: threadId || (0, import_shared10.randomUUID)()
|
|
1659
|
+
};
|
|
1660
|
+
}
|
|
1661
|
+
};
|
|
1662
|
+
__name(AnthropicAdapter, "AnthropicAdapter");
|
|
1663
|
+
var THINKING_TAG = "<thinking>";
|
|
1664
|
+
var THINKING_TAG_END = "</thinking>";
|
|
1665
|
+
var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBuffer2 {
|
|
1666
|
+
buffer;
|
|
1667
|
+
didFilterThinkingTag = false;
|
|
1668
|
+
constructor() {
|
|
1669
|
+
this.buffer = "";
|
|
1670
|
+
}
|
|
1671
|
+
onTextChunk(text) {
|
|
1672
|
+
this.buffer += text;
|
|
1673
|
+
if (this.didFilterThinkingTag) {
|
|
1674
|
+
return text;
|
|
1675
|
+
}
|
|
1676
|
+
const potentialTag = this.buffer.slice(0, THINKING_TAG.length);
|
|
1677
|
+
if (THINKING_TAG.startsWith(potentialTag)) {
|
|
1678
|
+
if (this.buffer.includes(THINKING_TAG_END)) {
|
|
1679
|
+
const end = this.buffer.indexOf(THINKING_TAG_END);
|
|
1680
|
+
const filteredText = this.buffer.slice(end + THINKING_TAG_END.length);
|
|
1681
|
+
this.buffer = filteredText;
|
|
1682
|
+
this.didFilterThinkingTag = true;
|
|
1683
|
+
return filteredText;
|
|
1684
|
+
} else {
|
|
1685
|
+
return "";
|
|
1686
|
+
}
|
|
1687
|
+
}
|
|
1688
|
+
return text;
|
|
1689
|
+
}
|
|
1690
|
+
reset() {
|
|
1691
|
+
this.buffer = "";
|
|
1692
|
+
this.didFilterThinkingTag = false;
|
|
1693
|
+
}
|
|
1694
|
+
}, "FilterThinkingTextBuffer");
|
|
1695
|
+
|
|
1696
|
+
// src/service-adapters/experimental/ollama/ollama-adapter.ts
|
|
1697
|
+
var import_ollama = require("@langchain/community/llms/ollama");
|
|
1698
|
+
var import_shared12 = require("@copilotkit/shared");
|
|
1699
|
+
var DEFAULT_MODEL4 = "llama3:latest";
|
|
1700
|
+
var ExperimentalOllamaAdapter = class {
|
|
1701
|
+
model;
|
|
1702
|
+
constructor(options) {
|
|
1703
|
+
if (options == null ? void 0 : options.model) {
|
|
1704
|
+
this.model = options.model;
|
|
1705
|
+
} else {
|
|
1706
|
+
this.model = DEFAULT_MODEL4;
|
|
1707
|
+
}
|
|
1708
|
+
}
|
|
1709
|
+
async process(request) {
|
|
1710
|
+
const { messages, actions, eventSource } = request;
|
|
1711
|
+
const ollama = new import_ollama.Ollama({
|
|
1712
|
+
model: this.model
|
|
1713
|
+
});
|
|
1714
|
+
const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
|
|
1715
|
+
const _stream = await ollama.stream(contents);
|
|
1716
|
+
eventSource.stream(async (eventStream$) => {
|
|
1717
|
+
const currentMessageId = (0, import_shared12.randomId)();
|
|
1718
|
+
eventStream$.sendTextMessageStart({
|
|
1719
|
+
messageId: currentMessageId
|
|
1720
|
+
});
|
|
1721
|
+
for await (const chunkText of _stream) {
|
|
1722
|
+
eventStream$.sendTextMessageContent({
|
|
1723
|
+
messageId: currentMessageId,
|
|
1724
|
+
content: chunkText
|
|
1725
|
+
});
|
|
1726
|
+
}
|
|
1727
|
+
eventStream$.sendTextMessageEnd({
|
|
1728
|
+
messageId: currentMessageId
|
|
1729
|
+
});
|
|
1730
|
+
eventStream$.complete();
|
|
1731
|
+
});
|
|
1732
|
+
return {
|
|
1733
|
+
threadId: request.threadId || (0, import_shared12.randomUUID)()
|
|
1734
|
+
};
|
|
1735
|
+
}
|
|
1736
|
+
};
|
|
1737
|
+
__name(ExperimentalOllamaAdapter, "ExperimentalOllamaAdapter");
|
|
1738
|
+
|
|
1739
|
+
// src/service-adapters/bedrock/bedrock-adapter.ts
|
|
1740
|
+
var import_aws = require("@langchain/aws");
|
|
1741
|
+
var BedrockAdapter = class extends LangChainAdapter {
|
|
1742
|
+
constructor(options) {
|
|
1743
|
+
super({
|
|
1744
|
+
chainFn: async ({ messages, tools, threadId }) => {
|
|
1745
|
+
const model = new import_aws.ChatBedrockConverse({
|
|
1746
|
+
model: (options == null ? void 0 : options.model) ?? "amazon.nova-lite-v1:0",
|
|
1747
|
+
region: (options == null ? void 0 : options.region) ?? "us-east-1",
|
|
1748
|
+
credentials: (options == null ? void 0 : options.credentials) ? {
|
|
1749
|
+
accessKeyId: options.credentials.accessKeyId,
|
|
1750
|
+
secretAccessKey: options.credentials.secretAccessKey
|
|
1751
|
+
} : void 0
|
|
1752
|
+
}).bindTools(tools);
|
|
1753
|
+
return model.stream(messages);
|
|
1754
|
+
}
|
|
1755
|
+
});
|
|
1756
|
+
}
|
|
1757
|
+
};
|
|
1758
|
+
__name(BedrockAdapter, "BedrockAdapter");
|
|
1759
|
+
|
|
1760
|
+
// src/service-adapters/empty/empty-adapter.ts
|
|
1761
|
+
var import_shared13 = require("@copilotkit/shared");
|
|
1762
|
+
var EmptyAdapter = class {
|
|
1763
|
+
async process(request) {
|
|
1764
|
+
return {
|
|
1765
|
+
threadId: request.threadId || (0, import_shared13.randomUUID)()
|
|
1766
|
+
};
|
|
1767
|
+
}
|
|
1768
|
+
};
|
|
1769
|
+
__name(EmptyAdapter, "EmptyAdapter");
|
|
1770
|
+
var ExperimentalEmptyAdapter = EmptyAdapter;
|
|
1771
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
1772
|
+
0 && (module.exports = {
|
|
1773
|
+
AnthropicAdapter,
|
|
1774
|
+
BedrockAdapter,
|
|
1775
|
+
EmptyAdapter,
|
|
1776
|
+
ExperimentalEmptyAdapter,
|
|
1777
|
+
ExperimentalOllamaAdapter,
|
|
1778
|
+
GoogleGenerativeAIAdapter,
|
|
1779
|
+
GroqAdapter,
|
|
1780
|
+
LangChainAdapter,
|
|
1781
|
+
OpenAIAdapter,
|
|
1782
|
+
OpenAIAssistantAdapter,
|
|
1783
|
+
RemoteChain,
|
|
1784
|
+
UnifyAdapter,
|
|
1785
|
+
convertServiceAdapterError
|
|
1786
|
+
});
|
|
1787
|
+
//# sourceMappingURL=index.js.map
|