@copilotkit/runtime 0.37.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.js +7 -0
- package/.turbo/turbo-build.log +70 -0
- package/CHANGELOG.md +1 -0
- package/__snapshots__/schema/schema.graphql +178 -0
- package/dist/chunk-2CCVVJDU.mjs +56 -0
- package/dist/chunk-2CCVVJDU.mjs.map +1 -0
- package/dist/chunk-4UA4RB4C.mjs +185 -0
- package/dist/chunk-4UA4RB4C.mjs.map +1 -0
- package/dist/chunk-5HGYI6EG.mjs +678 -0
- package/dist/chunk-5HGYI6EG.mjs.map +1 -0
- package/dist/chunk-7IFP53C6.mjs +169 -0
- package/dist/chunk-7IFP53C6.mjs.map +1 -0
- package/dist/chunk-BLTAUVRP.mjs +30 -0
- package/dist/chunk-BLTAUVRP.mjs.map +1 -0
- package/dist/chunk-NFCPM5AM.mjs +43 -0
- package/dist/chunk-NFCPM5AM.mjs.map +1 -0
- package/dist/chunk-XPAUPJMW.mjs +1051 -0
- package/dist/chunk-XPAUPJMW.mjs.map +1 -0
- package/dist/graphql/types/base/index.d.ts +6 -0
- package/dist/graphql/types/base/index.js +63 -0
- package/dist/graphql/types/base/index.js.map +1 -0
- package/dist/graphql/types/base/index.mjs +7 -0
- package/dist/graphql/types/base/index.mjs.map +1 -0
- package/dist/graphql/types/converted/index.d.ts +2 -0
- package/dist/graphql/types/converted/index.js +88 -0
- package/dist/graphql/types/converted/index.js.map +1 -0
- package/dist/graphql/types/converted/index.mjs +12 -0
- package/dist/graphql/types/converted/index.mjs.map +1 -0
- package/dist/index-aa091e3c.d.ts +49 -0
- package/dist/index-f0875df3.d.ts +197 -0
- package/dist/index.d.ts +15 -0
- package/dist/index.js +2171 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +49 -0
- package/dist/index.mjs.map +1 -0
- package/dist/langchain-adapter-9ce103f3.d.ts +200 -0
- package/dist/langserve-fd5066ee.d.ts +96 -0
- package/dist/lib/index.d.ts +15 -0
- package/dist/lib/index.js +2170 -0
- package/dist/lib/index.js.map +1 -0
- package/dist/lib/index.mjs +46 -0
- package/dist/lib/index.mjs.map +1 -0
- package/dist/lib/integrations/index.d.ts +9 -0
- package/dist/lib/integrations/index.js +1024 -0
- package/dist/lib/integrations/index.js.map +1 -0
- package/dist/lib/integrations/index.mjs +24 -0
- package/dist/lib/integrations/index.mjs.map +1 -0
- package/dist/lib/integrations/node-http/index.d.ts +8 -0
- package/dist/lib/integrations/node-http/index.js +969 -0
- package/dist/lib/integrations/node-http/index.js.map +1 -0
- package/dist/lib/integrations/node-http/index.mjs +10 -0
- package/dist/lib/integrations/node-http/index.mjs.map +1 -0
- package/dist/pages-router-b6bc6c60.d.ts +30 -0
- package/dist/service-adapters/index.d.ts +11 -0
- package/dist/service-adapters/index.js +912 -0
- package/dist/service-adapters/index.js.map +1 -0
- package/dist/service-adapters/index.mjs +18 -0
- package/dist/service-adapters/index.mjs.map +1 -0
- package/jest.config.js +5 -0
- package/package.json +63 -0
- package/scripts/generate-gql-schema.ts +13 -0
- package/src/graphql/inputs/action.input.ts +13 -0
- package/src/graphql/inputs/cloud-guardrails.input.ts +19 -0
- package/src/graphql/inputs/cloud.input.ts +8 -0
- package/src/graphql/inputs/context-property.input.ts +10 -0
- package/src/graphql/inputs/custom-property.input.ts +15 -0
- package/src/graphql/inputs/frontend.input.ts +11 -0
- package/src/graphql/inputs/generate-copilot-response.input.ts +22 -0
- package/src/graphql/inputs/message.input.ts +50 -0
- package/src/graphql/resolvers/copilot.resolver.ts +147 -0
- package/src/graphql/types/base/index.ts +10 -0
- package/src/graphql/types/converted/index.ts +29 -0
- package/src/graphql/types/copilot-response.type.ts +75 -0
- package/src/graphql/types/enums.ts +22 -0
- package/src/graphql/types/guardrails-result.type.ts +20 -0
- package/src/graphql/types/message-status.type.ts +40 -0
- package/src/graphql/types/response-status.type.ts +52 -0
- package/src/index.ts +2 -0
- package/src/lib/copilot-cloud.ts +63 -0
- package/src/lib/copilot-runtime.ts +261 -0
- package/src/lib/guardrails.ts +3 -0
- package/src/lib/index.ts +7 -0
- package/src/lib/integrations/index.ts +4 -0
- package/src/lib/integrations/nextjs/app-router.ts +29 -0
- package/src/lib/integrations/nextjs/pages-router.ts +36 -0
- package/src/lib/integrations/node-http/index.ts +23 -0
- package/src/lib/integrations/shared.ts +68 -0
- package/src/service-adapters/conversion.ts +47 -0
- package/src/service-adapters/events.ts +197 -0
- package/src/service-adapters/experimental/groq/groq-adapter.ts +124 -0
- package/src/service-adapters/experimental/ollama/ollama-adapter.ts +75 -0
- package/src/service-adapters/google/google-genai-adapter.ts +149 -0
- package/src/service-adapters/google/utils.ts +94 -0
- package/src/service-adapters/index.ts +6 -0
- package/src/service-adapters/langchain/langchain-adapter.ts +82 -0
- package/src/service-adapters/langchain/langserve.ts +81 -0
- package/src/service-adapters/langchain/types.ts +14 -0
- package/src/service-adapters/langchain/utils.ts +235 -0
- package/src/service-adapters/openai/openai-adapter.ts +142 -0
- package/src/service-adapters/openai/openai-assistant-adapter.ts +260 -0
- package/src/service-adapters/openai/utils.ts +164 -0
- package/src/service-adapters/service-adapter.ts +29 -0
- package/tsconfig.json +11 -0
- package/tsup.config.ts +17 -0
- package/typedoc.json +4 -0
|
@@ -0,0 +1,912 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/service-adapters/index.ts
|
|
31
|
+
var service_adapters_exports = {};
|
|
32
|
+
__export(service_adapters_exports, {
|
|
33
|
+
GoogleGenerativeAIAdapter: () => GoogleGenerativeAIAdapter,
|
|
34
|
+
LangChainAdapter: () => LangChainAdapter,
|
|
35
|
+
OpenAIAdapter: () => OpenAIAdapter,
|
|
36
|
+
OpenAIAssistantAdapter: () => OpenAIAssistantAdapter,
|
|
37
|
+
RemoteChain: () => RemoteChain
|
|
38
|
+
});
|
|
39
|
+
module.exports = __toCommonJS(service_adapters_exports);
|
|
40
|
+
|
|
41
|
+
// src/service-adapters/openai/openai-adapter.ts
|
|
42
|
+
var import_openai = __toESM(require("openai"));
|
|
43
|
+
|
|
44
|
+
// src/graphql/types/base/index.ts
|
|
45
|
+
var import_type_graphql = require("type-graphql");
|
|
46
|
+
function _ts_decorate(decorators, target, key, desc) {
|
|
47
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
48
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function")
|
|
49
|
+
r = Reflect.decorate(decorators, target, key, desc);
|
|
50
|
+
else
|
|
51
|
+
for (var i = decorators.length - 1; i >= 0; i--)
|
|
52
|
+
if (d = decorators[i])
|
|
53
|
+
r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
54
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
55
|
+
}
|
|
56
|
+
__name(_ts_decorate, "_ts_decorate");
|
|
57
|
+
function _ts_metadata(k, v) {
|
|
58
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function")
|
|
59
|
+
return Reflect.metadata(k, v);
|
|
60
|
+
}
|
|
61
|
+
__name(_ts_metadata, "_ts_metadata");
|
|
62
|
+
var BaseMessage = class {
|
|
63
|
+
id;
|
|
64
|
+
createdAt;
|
|
65
|
+
};
|
|
66
|
+
__name(BaseMessage, "BaseMessage");
|
|
67
|
+
_ts_decorate([
|
|
68
|
+
(0, import_type_graphql.Field)(() => String),
|
|
69
|
+
_ts_metadata("design:type", String)
|
|
70
|
+
], BaseMessage.prototype, "id", void 0);
|
|
71
|
+
_ts_decorate([
|
|
72
|
+
(0, import_type_graphql.Field)(() => Date),
|
|
73
|
+
_ts_metadata("design:type", typeof Date === "undefined" ? Object : Date)
|
|
74
|
+
], BaseMessage.prototype, "createdAt", void 0);
|
|
75
|
+
BaseMessage = _ts_decorate([
|
|
76
|
+
(0, import_type_graphql.InputType)()
|
|
77
|
+
], BaseMessage);
|
|
78
|
+
|
|
79
|
+
// src/graphql/types/converted/index.ts
|
|
80
|
+
var TextMessage = class extends BaseMessage {
|
|
81
|
+
content;
|
|
82
|
+
role;
|
|
83
|
+
};
|
|
84
|
+
__name(TextMessage, "TextMessage");
|
|
85
|
+
var ActionExecutionMessage = class extends BaseMessage {
|
|
86
|
+
name;
|
|
87
|
+
arguments;
|
|
88
|
+
scope;
|
|
89
|
+
};
|
|
90
|
+
__name(ActionExecutionMessage, "ActionExecutionMessage");
|
|
91
|
+
var ResultMessage = class extends BaseMessage {
|
|
92
|
+
actionExecutionId;
|
|
93
|
+
actionName;
|
|
94
|
+
result;
|
|
95
|
+
};
|
|
96
|
+
__name(ResultMessage, "ResultMessage");
|
|
97
|
+
|
|
98
|
+
// src/service-adapters/openai/utils.ts
|
|
99
|
+
var import_js_tiktoken = require("js-tiktoken");
|
|
100
|
+
function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
|
|
101
|
+
maxTokens || (maxTokens = maxTokensForOpenAIModel(model));
|
|
102
|
+
const result = [];
|
|
103
|
+
const toolsNumTokens = countToolsTokens(model, tools);
|
|
104
|
+
if (toolsNumTokens > maxTokens) {
|
|
105
|
+
throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
|
|
106
|
+
}
|
|
107
|
+
maxTokens -= toolsNumTokens;
|
|
108
|
+
for (const message of messages) {
|
|
109
|
+
if (message.role === "system") {
|
|
110
|
+
const numTokens = countMessageTokens(model, message);
|
|
111
|
+
maxTokens -= numTokens;
|
|
112
|
+
if (maxTokens < 0) {
|
|
113
|
+
throw new Error("Not enough tokens for system message.");
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
let cutoff = false;
|
|
118
|
+
const reversedMessages = [
|
|
119
|
+
...messages
|
|
120
|
+
].reverse();
|
|
121
|
+
for (const message of reversedMessages) {
|
|
122
|
+
if (message.role === "system") {
|
|
123
|
+
result.unshift(message);
|
|
124
|
+
continue;
|
|
125
|
+
} else if (cutoff) {
|
|
126
|
+
continue;
|
|
127
|
+
}
|
|
128
|
+
let numTokens = countMessageTokens(model, message);
|
|
129
|
+
if (maxTokens < numTokens) {
|
|
130
|
+
cutoff = true;
|
|
131
|
+
continue;
|
|
132
|
+
}
|
|
133
|
+
result.unshift(message);
|
|
134
|
+
maxTokens -= numTokens;
|
|
135
|
+
}
|
|
136
|
+
return result;
|
|
137
|
+
}
|
|
138
|
+
__name(limitMessagesToTokenCount, "limitMessagesToTokenCount");
|
|
139
|
+
function maxTokensForOpenAIModel(model) {
|
|
140
|
+
return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;
|
|
141
|
+
}
|
|
142
|
+
__name(maxTokensForOpenAIModel, "maxTokensForOpenAIModel");
|
|
143
|
+
var DEFAULT_MAX_TOKENS = 128e3;
|
|
144
|
+
var maxTokensByModel = {
|
|
145
|
+
// GPT-4
|
|
146
|
+
"gpt-4o": 128e3,
|
|
147
|
+
"gpt-4o-2024-05-13": 128e3,
|
|
148
|
+
"gpt-4-turbo": 128e3,
|
|
149
|
+
"gpt-4-turbo-2024-04-09": 128e3,
|
|
150
|
+
"gpt-4-0125-preview": 128e3,
|
|
151
|
+
"gpt-4-turbo-preview": 128e3,
|
|
152
|
+
"gpt-4-1106-preview": 128e3,
|
|
153
|
+
"gpt-4-vision-preview": 128e3,
|
|
154
|
+
"gpt-4-1106-vision-preview": 128e3,
|
|
155
|
+
"gpt-4-32k": 32768,
|
|
156
|
+
"gpt-4-32k-0613": 32768,
|
|
157
|
+
"gpt-4-32k-0314": 32768,
|
|
158
|
+
"gpt-4": 8192,
|
|
159
|
+
"gpt-4-0613": 8192,
|
|
160
|
+
"gpt-4-0314": 8192,
|
|
161
|
+
// GPT-3.5
|
|
162
|
+
"gpt-3.5-turbo-0125": 16385,
|
|
163
|
+
"gpt-3.5-turbo": 16385,
|
|
164
|
+
"gpt-3.5-turbo-1106": 16385,
|
|
165
|
+
"gpt-3.5-turbo-instruct": 4096,
|
|
166
|
+
"gpt-3.5-turbo-16k": 16385,
|
|
167
|
+
"gpt-3.5-turbo-0613": 4096,
|
|
168
|
+
"gpt-3.5-turbo-16k-0613": 16385,
|
|
169
|
+
"gpt-3.5-turbo-0301": 4097
|
|
170
|
+
};
|
|
171
|
+
function countToolsTokens(model, tools) {
|
|
172
|
+
if (tools.length === 0) {
|
|
173
|
+
return 0;
|
|
174
|
+
}
|
|
175
|
+
const json = JSON.stringify(tools);
|
|
176
|
+
return countTokens(model, json);
|
|
177
|
+
}
|
|
178
|
+
__name(countToolsTokens, "countToolsTokens");
|
|
179
|
+
function countMessageTokens(model, message) {
|
|
180
|
+
return countTokens(model, message.content || "");
|
|
181
|
+
}
|
|
182
|
+
__name(countMessageTokens, "countMessageTokens");
|
|
183
|
+
function countTokens(model, text) {
|
|
184
|
+
let enc;
|
|
185
|
+
try {
|
|
186
|
+
enc = (0, import_js_tiktoken.encodingForModel)(model);
|
|
187
|
+
} catch (e) {
|
|
188
|
+
enc = (0, import_js_tiktoken.encodingForModel)("gpt-4");
|
|
189
|
+
}
|
|
190
|
+
return enc.encode(text).length;
|
|
191
|
+
}
|
|
192
|
+
__name(countTokens, "countTokens");
|
|
193
|
+
function convertActionInputToOpenAITool(action) {
|
|
194
|
+
return {
|
|
195
|
+
type: "function",
|
|
196
|
+
function: {
|
|
197
|
+
name: action.name,
|
|
198
|
+
description: action.description,
|
|
199
|
+
parameters: JSON.parse(action.jsonSchema)
|
|
200
|
+
}
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
__name(convertActionInputToOpenAITool, "convertActionInputToOpenAITool");
|
|
204
|
+
function convertMessageToOpenAIMessage(message) {
|
|
205
|
+
if (message instanceof TextMessage) {
|
|
206
|
+
return {
|
|
207
|
+
role: message.role,
|
|
208
|
+
content: message.content
|
|
209
|
+
};
|
|
210
|
+
} else if (message instanceof ActionExecutionMessage) {
|
|
211
|
+
return {
|
|
212
|
+
role: "assistant",
|
|
213
|
+
tool_calls: [
|
|
214
|
+
{
|
|
215
|
+
id: message.id,
|
|
216
|
+
type: "function",
|
|
217
|
+
function: {
|
|
218
|
+
name: message.name,
|
|
219
|
+
arguments: JSON.stringify(message.arguments)
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
]
|
|
223
|
+
};
|
|
224
|
+
} else if (message instanceof ResultMessage) {
|
|
225
|
+
return {
|
|
226
|
+
role: "tool",
|
|
227
|
+
content: message.result,
|
|
228
|
+
tool_call_id: message.actionExecutionId
|
|
229
|
+
};
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
__name(convertMessageToOpenAIMessage, "convertMessageToOpenAIMessage");
|
|
233
|
+
function convertSystemMessageToAssistantAPI(message) {
|
|
234
|
+
return {
|
|
235
|
+
...message,
|
|
236
|
+
...message.role === "system" && {
|
|
237
|
+
role: "assistant",
|
|
238
|
+
content: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
|
|
239
|
+
}
|
|
240
|
+
};
|
|
241
|
+
}
|
|
242
|
+
__name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
|
|
243
|
+
|
|
244
|
+
// src/service-adapters/openai/openai-adapter.ts
|
|
245
|
+
var DEFAULT_MODEL = "gpt-4o";
|
|
246
|
+
var OpenAIAdapter = class {
|
|
247
|
+
model = DEFAULT_MODEL;
|
|
248
|
+
_openai;
|
|
249
|
+
get openai() {
|
|
250
|
+
return this._openai;
|
|
251
|
+
}
|
|
252
|
+
constructor(params) {
|
|
253
|
+
this._openai = (params == null ? void 0 : params.openai) || new import_openai.default({});
|
|
254
|
+
if (params == null ? void 0 : params.model) {
|
|
255
|
+
this.model = params.model;
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
async process({ model = this.model, messages, actions, eventSource }) {
|
|
259
|
+
const tools = actions.map(convertActionInputToOpenAITool);
|
|
260
|
+
let openaiMessages = messages.map(convertMessageToOpenAIMessage);
|
|
261
|
+
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
262
|
+
const stream = this.openai.beta.chat.completions.stream({
|
|
263
|
+
model,
|
|
264
|
+
stream: true,
|
|
265
|
+
messages: openaiMessages,
|
|
266
|
+
...tools.length > 0 && {
|
|
267
|
+
tools
|
|
268
|
+
}
|
|
269
|
+
});
|
|
270
|
+
eventSource.stream(async (eventStream$) => {
|
|
271
|
+
var _a, _b;
|
|
272
|
+
let mode = null;
|
|
273
|
+
for await (const chunk of stream) {
|
|
274
|
+
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
|
|
275
|
+
const content = chunk.choices[0].delta.content;
|
|
276
|
+
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
277
|
+
mode = null;
|
|
278
|
+
eventStream$.sendTextMessageEnd();
|
|
279
|
+
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
280
|
+
mode = null;
|
|
281
|
+
eventStream$.sendActionExecutionEnd();
|
|
282
|
+
}
|
|
283
|
+
if (mode === null) {
|
|
284
|
+
if (toolCall == null ? void 0 : toolCall.id) {
|
|
285
|
+
mode = "function";
|
|
286
|
+
eventStream$.sendActionExecutionStart(toolCall.id, toolCall.function.name);
|
|
287
|
+
} else if (content) {
|
|
288
|
+
mode = "message";
|
|
289
|
+
eventStream$.sendTextMessageStart(chunk.id);
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
if (mode === "message" && content) {
|
|
293
|
+
eventStream$.sendTextMessageContent(content);
|
|
294
|
+
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
295
|
+
eventStream$.sendActionExecutionArgs(toolCall.function.arguments);
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
if (mode === "message") {
|
|
299
|
+
eventStream$.sendTextMessageEnd();
|
|
300
|
+
} else if (mode === "function") {
|
|
301
|
+
eventStream$.sendActionExecutionEnd();
|
|
302
|
+
}
|
|
303
|
+
eventStream$.complete();
|
|
304
|
+
});
|
|
305
|
+
return {};
|
|
306
|
+
}
|
|
307
|
+
};
|
|
308
|
+
__name(OpenAIAdapter, "OpenAIAdapter");
|
|
309
|
+
|
|
310
|
+
// src/service-adapters/openai/openai-assistant-adapter.ts
|
|
311
|
+
var import_openai2 = __toESM(require("openai"));
|
|
312
|
+
var OpenAIAssistantAdapter = class {
|
|
313
|
+
openai;
|
|
314
|
+
codeInterpreterEnabled;
|
|
315
|
+
assistantId;
|
|
316
|
+
fileSearchEnabled;
|
|
317
|
+
constructor(params) {
|
|
318
|
+
this.openai = params.openai || new import_openai2.default({});
|
|
319
|
+
this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
|
|
320
|
+
this.fileSearchEnabled = params.fileSearchEnabled === false || true;
|
|
321
|
+
this.assistantId = params.assistantId;
|
|
322
|
+
}
|
|
323
|
+
async process({ messages, actions, eventSource, threadId, runId }) {
|
|
324
|
+
threadId || (threadId = (await this.openai.beta.threads.create()).id);
|
|
325
|
+
const lastMessage = messages.at(-1);
|
|
326
|
+
let nextRunId = void 0;
|
|
327
|
+
if (lastMessage instanceof ResultMessage && runId) {
|
|
328
|
+
nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);
|
|
329
|
+
} else if (lastMessage instanceof TextMessage) {
|
|
330
|
+
nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource);
|
|
331
|
+
} else {
|
|
332
|
+
throw new Error("No actionable message found in the messages");
|
|
333
|
+
}
|
|
334
|
+
return {
|
|
335
|
+
threadId,
|
|
336
|
+
runId: nextRunId
|
|
337
|
+
};
|
|
338
|
+
}
|
|
339
|
+
async submitToolOutputs(threadId, runId, messages, eventSource) {
|
|
340
|
+
let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);
|
|
341
|
+
if (!run.required_action) {
|
|
342
|
+
throw new Error("No tool outputs required");
|
|
343
|
+
}
|
|
344
|
+
const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map((toolCall) => toolCall.id);
|
|
345
|
+
const resultMessages = messages.filter((message) => message instanceof ResultMessage && toolCallsIds.includes(message.actionExecutionId));
|
|
346
|
+
if (toolCallsIds.length != resultMessages.length) {
|
|
347
|
+
throw new Error("Number of function results does not match the number of tool calls");
|
|
348
|
+
}
|
|
349
|
+
const toolOutputs = resultMessages.map((message) => {
|
|
350
|
+
return {
|
|
351
|
+
tool_call_id: message.actionExecutionId,
|
|
352
|
+
output: message.result
|
|
353
|
+
};
|
|
354
|
+
});
|
|
355
|
+
const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
|
|
356
|
+
tool_outputs: toolOutputs
|
|
357
|
+
});
|
|
358
|
+
await this.streamResponse(stream, eventSource);
|
|
359
|
+
return runId;
|
|
360
|
+
}
|
|
361
|
+
async submitUserMessage(threadId, messages, actions, eventSource) {
|
|
362
|
+
messages = [
|
|
363
|
+
...messages
|
|
364
|
+
];
|
|
365
|
+
const instructionsMessage = messages.shift();
|
|
366
|
+
const instructions = instructionsMessage instanceof TextMessage ? instructionsMessage.content : "";
|
|
367
|
+
const userMessage = messages.map(convertMessageToOpenAIMessage).map(convertSystemMessageToAssistantAPI).at(-1);
|
|
368
|
+
if (userMessage.role !== "user") {
|
|
369
|
+
throw new Error("No user message found");
|
|
370
|
+
}
|
|
371
|
+
await this.openai.beta.threads.messages.create(threadId, {
|
|
372
|
+
role: "user",
|
|
373
|
+
content: userMessage.content
|
|
374
|
+
});
|
|
375
|
+
const openaiTools = actions.map(convertActionInputToOpenAITool);
|
|
376
|
+
const tools = [
|
|
377
|
+
...openaiTools,
|
|
378
|
+
...this.codeInterpreterEnabled ? [
|
|
379
|
+
{
|
|
380
|
+
type: "code_interpreter"
|
|
381
|
+
}
|
|
382
|
+
] : [],
|
|
383
|
+
...this.fileSearchEnabled ? [
|
|
384
|
+
{
|
|
385
|
+
type: "file_search"
|
|
386
|
+
}
|
|
387
|
+
] : []
|
|
388
|
+
];
|
|
389
|
+
let stream = this.openai.beta.threads.runs.stream(threadId, {
|
|
390
|
+
assistant_id: this.assistantId,
|
|
391
|
+
instructions,
|
|
392
|
+
tools
|
|
393
|
+
});
|
|
394
|
+
await this.streamResponse(stream, eventSource);
|
|
395
|
+
return getRunIdFromStream(stream);
|
|
396
|
+
}
|
|
397
|
+
async streamResponse(stream, eventSource) {
|
|
398
|
+
eventSource.stream(async (eventStream$) => {
|
|
399
|
+
var _a, _b, _c, _d, _e, _f;
|
|
400
|
+
let inFunctionCall = false;
|
|
401
|
+
for await (const chunk of stream) {
|
|
402
|
+
switch (chunk.event) {
|
|
403
|
+
case "thread.message.created":
|
|
404
|
+
if (inFunctionCall) {
|
|
405
|
+
eventStream$.sendActionExecutionEnd();
|
|
406
|
+
}
|
|
407
|
+
eventStream$.sendTextMessageStart(chunk.data.id);
|
|
408
|
+
break;
|
|
409
|
+
case "thread.message.delta":
|
|
410
|
+
if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
|
|
411
|
+
eventStream$.sendTextMessageContent((_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value);
|
|
412
|
+
}
|
|
413
|
+
break;
|
|
414
|
+
case "thread.message.completed":
|
|
415
|
+
eventStream$.sendTextMessageEnd();
|
|
416
|
+
break;
|
|
417
|
+
case "thread.run.step.delta":
|
|
418
|
+
let toolCallId;
|
|
419
|
+
let toolCallName;
|
|
420
|
+
let toolCallArgs;
|
|
421
|
+
if (chunk.data.delta.step_details.type === "tool_calls" && ((_c = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _c[0].type) === "function") {
|
|
422
|
+
toolCallId = (_d = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _d[0].id;
|
|
423
|
+
toolCallName = (_e = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _e[0].function.name;
|
|
424
|
+
toolCallArgs = (_f = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _f[0].function.arguments;
|
|
425
|
+
}
|
|
426
|
+
if (toolCallName && toolCallId) {
|
|
427
|
+
if (inFunctionCall) {
|
|
428
|
+
eventStream$.sendActionExecutionEnd();
|
|
429
|
+
}
|
|
430
|
+
inFunctionCall = true;
|
|
431
|
+
eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
|
|
432
|
+
} else if (toolCallArgs) {
|
|
433
|
+
eventStream$.sendActionExecutionArgs(toolCallArgs);
|
|
434
|
+
}
|
|
435
|
+
break;
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
if (inFunctionCall) {
|
|
439
|
+
eventStream$.sendActionExecutionEnd();
|
|
440
|
+
}
|
|
441
|
+
eventStream$.complete();
|
|
442
|
+
});
|
|
443
|
+
}
|
|
444
|
+
};
|
|
445
|
+
__name(OpenAIAssistantAdapter, "OpenAIAssistantAdapter");
|
|
446
|
+
function getRunIdFromStream(stream) {
|
|
447
|
+
return new Promise((resolve, reject) => {
|
|
448
|
+
let runIdGetter = /* @__PURE__ */ __name((event) => {
|
|
449
|
+
if (event.event === "thread.run.created") {
|
|
450
|
+
const runId = event.data.id;
|
|
451
|
+
stream.off("event", runIdGetter);
|
|
452
|
+
resolve(runId);
|
|
453
|
+
}
|
|
454
|
+
}, "runIdGetter");
|
|
455
|
+
stream.on("event", runIdGetter);
|
|
456
|
+
});
|
|
457
|
+
}
|
|
458
|
+
__name(getRunIdFromStream, "getRunIdFromStream");
|
|
459
|
+
|
|
460
|
+
// src/service-adapters/google/google-genai-adapter.ts
|
|
461
|
+
var import_generative_ai = require("@google/generative-ai");
|
|
462
|
+
|
|
463
|
+
// src/service-adapters/google/utils.ts
|
|
464
|
+
function convertMessageToGoogleGenAIMessage(message) {
|
|
465
|
+
if (message instanceof TextMessage) {
|
|
466
|
+
const role = {
|
|
467
|
+
user: "user",
|
|
468
|
+
assistant: "model",
|
|
469
|
+
system: "user"
|
|
470
|
+
}[message.role];
|
|
471
|
+
const text = message.role === "system" ? "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content : message.content;
|
|
472
|
+
return {
|
|
473
|
+
role,
|
|
474
|
+
parts: [
|
|
475
|
+
{
|
|
476
|
+
text
|
|
477
|
+
}
|
|
478
|
+
]
|
|
479
|
+
};
|
|
480
|
+
} else if (message instanceof ActionExecutionMessage) {
|
|
481
|
+
return {
|
|
482
|
+
role: "model",
|
|
483
|
+
parts: [
|
|
484
|
+
{
|
|
485
|
+
functionCall: {
|
|
486
|
+
name: message.name,
|
|
487
|
+
args: message.arguments
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
]
|
|
491
|
+
};
|
|
492
|
+
} else if (message instanceof ResultMessage) {
|
|
493
|
+
return {
|
|
494
|
+
role: "model",
|
|
495
|
+
parts: [
|
|
496
|
+
{
|
|
497
|
+
functionResponse: {
|
|
498
|
+
name: message.actionName,
|
|
499
|
+
response: {
|
|
500
|
+
name: message.actionName,
|
|
501
|
+
content: tryParseJson(message.result)
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
]
|
|
506
|
+
};
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
__name(convertMessageToGoogleGenAIMessage, "convertMessageToGoogleGenAIMessage");
|
|
510
|
+
function transformActionToGoogleGenAITool(action) {
|
|
511
|
+
const name = action.name;
|
|
512
|
+
const description = action.description;
|
|
513
|
+
const parameters = JSON.parse(action.jsonSchema);
|
|
514
|
+
const transformProperties = /* @__PURE__ */ __name((props) => {
|
|
515
|
+
for (const key in props) {
|
|
516
|
+
if (props[key].type) {
|
|
517
|
+
props[key].type = props[key].type.toUpperCase();
|
|
518
|
+
}
|
|
519
|
+
if (props[key].properties) {
|
|
520
|
+
transformProperties(props[key].properties);
|
|
521
|
+
}
|
|
522
|
+
}
|
|
523
|
+
}, "transformProperties");
|
|
524
|
+
transformProperties(parameters);
|
|
525
|
+
return {
|
|
526
|
+
functionDeclarations: [
|
|
527
|
+
{
|
|
528
|
+
name,
|
|
529
|
+
description,
|
|
530
|
+
parameters
|
|
531
|
+
}
|
|
532
|
+
]
|
|
533
|
+
};
|
|
534
|
+
}
|
|
535
|
+
__name(transformActionToGoogleGenAITool, "transformActionToGoogleGenAITool");
|
|
536
|
+
function tryParseJson(str) {
|
|
537
|
+
if (!str) {
|
|
538
|
+
return "";
|
|
539
|
+
}
|
|
540
|
+
try {
|
|
541
|
+
return JSON.parse(str);
|
|
542
|
+
} catch (e) {
|
|
543
|
+
return str;
|
|
544
|
+
}
|
|
545
|
+
}
|
|
546
|
+
__name(tryParseJson, "tryParseJson");
|
|
547
|
+
|
|
548
|
+
// src/service-adapters/google/google-genai-adapter.ts
|
|
549
|
+
var import_nanoid = require("nanoid");
|
|
550
|
+
var GoogleGenerativeAIAdapter = class {
|
|
551
|
+
model;
|
|
552
|
+
constructor(options) {
|
|
553
|
+
if (options == null ? void 0 : options.model) {
|
|
554
|
+
this.model = options.model;
|
|
555
|
+
} else {
|
|
556
|
+
const genAI = new import_generative_ai.GoogleGenerativeAI(process.env["GOOGLE_API_KEY"]);
|
|
557
|
+
this.model = genAI.getGenerativeModel({
|
|
558
|
+
model: "gemini-pro"
|
|
559
|
+
});
|
|
560
|
+
}
|
|
561
|
+
}
|
|
562
|
+
async process(request) {
|
|
563
|
+
const { messages, actions, eventSource } = request;
|
|
564
|
+
const history = messages.slice(1, -1).map(convertMessageToGoogleGenAIMessage);
|
|
565
|
+
const currentMessage = convertMessageToGoogleGenAIMessage(messages.at(-1));
|
|
566
|
+
if (!currentMessage) {
|
|
567
|
+
throw new Error("No current message");
|
|
568
|
+
}
|
|
569
|
+
let systemMessage;
|
|
570
|
+
const firstMessage = messages.at(0);
|
|
571
|
+
if (firstMessage instanceof TextMessage && firstMessage.role === "system") {
|
|
572
|
+
systemMessage = firstMessage.content.trim();
|
|
573
|
+
} else {
|
|
574
|
+
throw new Error("First message is not a system message");
|
|
575
|
+
}
|
|
576
|
+
const tools = actions.map(transformActionToGoogleGenAITool);
|
|
577
|
+
const isFirstGenGeminiPro = this.model.model === "gemini-pro" || this.model.model === "models/gemini-pro";
|
|
578
|
+
const chat = this.model.startChat({
|
|
579
|
+
history: [
|
|
580
|
+
...history,
|
|
581
|
+
// gemini-pro does not support system instructions, so we need to add them to the history
|
|
582
|
+
...isFirstGenGeminiPro ? [
|
|
583
|
+
{
|
|
584
|
+
role: "user",
|
|
585
|
+
parts: [
|
|
586
|
+
{
|
|
587
|
+
text: systemMessage
|
|
588
|
+
}
|
|
589
|
+
]
|
|
590
|
+
}
|
|
591
|
+
] : []
|
|
592
|
+
],
|
|
593
|
+
// only gemini-1.5-pro-latest and later supports setting system instructions
|
|
594
|
+
...isFirstGenGeminiPro ? {} : {
|
|
595
|
+
systemInstruction: {
|
|
596
|
+
role: "user",
|
|
597
|
+
parts: [
|
|
598
|
+
{
|
|
599
|
+
text: systemMessage
|
|
600
|
+
}
|
|
601
|
+
]
|
|
602
|
+
}
|
|
603
|
+
},
|
|
604
|
+
tools
|
|
605
|
+
});
|
|
606
|
+
const result = await chat.sendMessageStream(currentMessage.parts);
|
|
607
|
+
eventSource.stream(async (eventStream$) => {
|
|
608
|
+
let isTextMessage = false;
|
|
609
|
+
for await (const chunk of result.stream) {
|
|
610
|
+
const chunkText = chunk.text();
|
|
611
|
+
if (!isTextMessage) {
|
|
612
|
+
isTextMessage = true;
|
|
613
|
+
eventStream$.sendTextMessageStart((0, import_nanoid.nanoid)());
|
|
614
|
+
}
|
|
615
|
+
eventStream$.sendTextMessageContent(chunkText);
|
|
616
|
+
}
|
|
617
|
+
if (isTextMessage) {
|
|
618
|
+
eventStream$.sendTextMessageEnd();
|
|
619
|
+
}
|
|
620
|
+
let calls = (await result.response).functionCalls();
|
|
621
|
+
if (calls) {
|
|
622
|
+
for (let call of calls) {
|
|
623
|
+
eventStream$.sendActionExecution((0, import_nanoid.nanoid)(), call.name, JSON.stringify(replaceNewlinesInObject(call.args)));
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
eventStream$.complete();
|
|
627
|
+
});
|
|
628
|
+
return {};
|
|
629
|
+
}
|
|
630
|
+
};
|
|
631
|
+
__name(GoogleGenerativeAIAdapter, "GoogleGenerativeAIAdapter");
|
|
632
|
+
function replaceNewlinesInObject(obj) {
|
|
633
|
+
if (typeof obj === "string") {
|
|
634
|
+
return obj.replace(/\\\\n/g, "\n");
|
|
635
|
+
} else if (Array.isArray(obj)) {
|
|
636
|
+
return obj.map(replaceNewlinesInObject);
|
|
637
|
+
} else if (typeof obj === "object" && obj !== null) {
|
|
638
|
+
const newObj = {};
|
|
639
|
+
for (const key in obj) {
|
|
640
|
+
if (obj.hasOwnProperty(key)) {
|
|
641
|
+
newObj[key] = replaceNewlinesInObject(obj[key]);
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
return newObj;
|
|
645
|
+
}
|
|
646
|
+
return obj;
|
|
647
|
+
}
|
|
648
|
+
__name(replaceNewlinesInObject, "replaceNewlinesInObject");
|
|
649
|
+
|
|
650
|
+
// src/service-adapters/langchain/utils.ts
|
|
651
|
+
var import_messages = require("@langchain/core/messages");
|
|
652
|
+
var import_tools = require("@langchain/core/tools");
|
|
653
|
+
var import_zod = require("zod");
|
|
654
|
+
var import_nanoid2 = require("nanoid");
|
|
655
|
+
function convertMessageToLangChainMessage(message) {
|
|
656
|
+
if (message instanceof TextMessage) {
|
|
657
|
+
if (message.role == "user") {
|
|
658
|
+
return new import_messages.HumanMessage(message.content);
|
|
659
|
+
} else if (message.role == "assistant") {
|
|
660
|
+
return new import_messages.AIMessage(message.content);
|
|
661
|
+
} else if (message.role === "system") {
|
|
662
|
+
return new import_messages.SystemMessage(message.content);
|
|
663
|
+
}
|
|
664
|
+
} else if (message instanceof ActionExecutionMessage) {
|
|
665
|
+
return new import_messages.AIMessage({
|
|
666
|
+
content: "",
|
|
667
|
+
tool_calls: [
|
|
668
|
+
{
|
|
669
|
+
id: message.id,
|
|
670
|
+
args: message.arguments,
|
|
671
|
+
name: message.name
|
|
672
|
+
}
|
|
673
|
+
]
|
|
674
|
+
});
|
|
675
|
+
} else if (message instanceof ResultMessage) {
|
|
676
|
+
return new import_messages.ToolMessage({
|
|
677
|
+
content: message.result,
|
|
678
|
+
tool_call_id: message.actionExecutionId
|
|
679
|
+
});
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
__name(convertMessageToLangChainMessage, "convertMessageToLangChainMessage");
|
|
683
|
+
function convertJsonSchemaToZodSchema(jsonSchema, required) {
|
|
684
|
+
if (jsonSchema.type === "object") {
|
|
685
|
+
const spec = {};
|
|
686
|
+
for (const [key, value] of Object.entries(jsonSchema.properties)) {
|
|
687
|
+
spec[key] = convertJsonSchemaToZodSchema(value, jsonSchema.required ? jsonSchema.required.includes(key) : false);
|
|
688
|
+
}
|
|
689
|
+
let schema = import_zod.z.object(spec);
|
|
690
|
+
return !required ? schema.optional() : schema;
|
|
691
|
+
} else if (jsonSchema.type === "string") {
|
|
692
|
+
let schema = import_zod.z.string().describe(jsonSchema.description);
|
|
693
|
+
return !required ? schema.optional() : schema;
|
|
694
|
+
} else if (jsonSchema.type === "number") {
|
|
695
|
+
let schema = import_zod.z.number().describe(jsonSchema.description);
|
|
696
|
+
return !required ? schema.optional() : schema;
|
|
697
|
+
} else if (jsonSchema.type === "boolean") {
|
|
698
|
+
let schema = import_zod.z.boolean().describe(jsonSchema.description);
|
|
699
|
+
return !required ? schema.optional() : schema;
|
|
700
|
+
} else if (jsonSchema.type === "array") {
|
|
701
|
+
let itemSchema = convertJsonSchemaToZodSchema(jsonSchema.items, false);
|
|
702
|
+
let schema = import_zod.z.array(itemSchema);
|
|
703
|
+
return !required ? schema.optional() : schema;
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
__name(convertJsonSchemaToZodSchema, "convertJsonSchemaToZodSchema");
|
|
707
|
+
function convertActionInputToLangChainTool(actionInput) {
|
|
708
|
+
return new import_tools.DynamicStructuredTool({
|
|
709
|
+
name: actionInput.name,
|
|
710
|
+
description: actionInput.description,
|
|
711
|
+
schema: convertJsonSchemaToZodSchema(JSON.parse(actionInput.jsonSchema), true),
|
|
712
|
+
func: async () => {
|
|
713
|
+
return "";
|
|
714
|
+
}
|
|
715
|
+
});
|
|
716
|
+
}
|
|
717
|
+
__name(convertActionInputToLangChainTool, "convertActionInputToLangChainTool");
|
|
718
|
+
async function streamLangChainResponse({ result, eventStream$, actionExecution }) {
|
|
719
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
720
|
+
if (typeof result === "string") {
|
|
721
|
+
if (!actionExecution) {
|
|
722
|
+
eventStream$.sendTextMessage((0, import_nanoid2.nanoid)(), result);
|
|
723
|
+
} else {
|
|
724
|
+
eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, result);
|
|
725
|
+
}
|
|
726
|
+
} else if (result instanceof import_messages.AIMessage) {
|
|
727
|
+
if (result.content) {
|
|
728
|
+
eventStream$.sendTextMessage((0, import_nanoid2.nanoid)(), result.content);
|
|
729
|
+
}
|
|
730
|
+
for (const toolCall of result.tool_calls) {
|
|
731
|
+
eventStream$.sendActionExecution(toolCall.id || (0, import_nanoid2.nanoid)(), toolCall.name, JSON.stringify(toolCall.args));
|
|
732
|
+
}
|
|
733
|
+
} else if (result instanceof import_messages.BaseMessageChunk) {
|
|
734
|
+
if ((_a = result.lc_kwargs) == null ? void 0 : _a.content) {
|
|
735
|
+
eventStream$.sendTextMessage((0, import_nanoid2.nanoid)(), result.content);
|
|
736
|
+
}
|
|
737
|
+
if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
|
|
738
|
+
for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
|
|
739
|
+
eventStream$.sendActionExecution(toolCall.id || (0, import_nanoid2.nanoid)(), toolCall.name, JSON.stringify(toolCall.args));
|
|
740
|
+
}
|
|
741
|
+
}
|
|
742
|
+
} else if ("getReader" in result) {
|
|
743
|
+
let reader = result.getReader();
|
|
744
|
+
let mode = null;
|
|
745
|
+
while (true) {
|
|
746
|
+
try {
|
|
747
|
+
const { done, value } = await reader.read();
|
|
748
|
+
let toolCallName = void 0;
|
|
749
|
+
let toolCallId = void 0;
|
|
750
|
+
let toolCallArgs = void 0;
|
|
751
|
+
let hasToolCall = false;
|
|
752
|
+
let content = value.content;
|
|
753
|
+
if (value instanceof import_messages.AIMessageChunk) {
|
|
754
|
+
let chunk = (_d = value.tool_call_chunks) == null ? void 0 : _d[0];
|
|
755
|
+
toolCallName = chunk == null ? void 0 : chunk.name;
|
|
756
|
+
toolCallId = chunk == null ? void 0 : chunk.id;
|
|
757
|
+
toolCallArgs = chunk == null ? void 0 : chunk.args;
|
|
758
|
+
hasToolCall = chunk != void 0;
|
|
759
|
+
} else if (value instanceof import_messages.BaseMessageChunk) {
|
|
760
|
+
let chunk = (_f = (_e = value.additional_kwargs) == null ? void 0 : _e.tool_calls) == null ? void 0 : _f[0];
|
|
761
|
+
toolCallName = (_g = chunk == null ? void 0 : chunk.function) == null ? void 0 : _g.name;
|
|
762
|
+
toolCallId = chunk == null ? void 0 : chunk.id;
|
|
763
|
+
toolCallArgs = (_h = chunk == null ? void 0 : chunk.function) == null ? void 0 : _h.arguments;
|
|
764
|
+
hasToolCall = (chunk == null ? void 0 : chunk.function) != void 0;
|
|
765
|
+
}
|
|
766
|
+
if (mode === "message" && (toolCallId || done)) {
|
|
767
|
+
mode = null;
|
|
768
|
+
eventStream$.sendTextMessageEnd();
|
|
769
|
+
} else if (mode === "function" && (!hasToolCall || done)) {
|
|
770
|
+
mode = null;
|
|
771
|
+
eventStream$.sendActionExecutionEnd();
|
|
772
|
+
}
|
|
773
|
+
if (done) {
|
|
774
|
+
break;
|
|
775
|
+
}
|
|
776
|
+
if (mode === null) {
|
|
777
|
+
if (hasToolCall) {
|
|
778
|
+
mode = "function";
|
|
779
|
+
eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
|
|
780
|
+
} else if (content) {
|
|
781
|
+
mode = "message";
|
|
782
|
+
eventStream$.sendTextMessageStart((0, import_nanoid2.nanoid)());
|
|
783
|
+
}
|
|
784
|
+
}
|
|
785
|
+
if (mode === "message" && content) {
|
|
786
|
+
eventStream$.sendTextMessageContent(content);
|
|
787
|
+
} else if (mode === "function" && toolCallArgs) {
|
|
788
|
+
eventStream$.sendActionExecutionArgs(toolCallArgs);
|
|
789
|
+
}
|
|
790
|
+
} catch (error) {
|
|
791
|
+
console.error("Error reading from stream", error);
|
|
792
|
+
break;
|
|
793
|
+
}
|
|
794
|
+
}
|
|
795
|
+
} else if (actionExecution) {
|
|
796
|
+
eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, JSON.stringify(result));
|
|
797
|
+
} else {
|
|
798
|
+
throw new Error("Invalid return type from LangChain function.");
|
|
799
|
+
}
|
|
800
|
+
eventStream$.complete();
|
|
801
|
+
}
|
|
802
|
+
__name(streamLangChainResponse, "streamLangChainResponse");
|
|
803
|
+
|
|
804
|
+
// src/service-adapters/langchain/langchain-adapter.ts
|
|
805
|
+
var LangChainAdapter = class {
|
|
806
|
+
options;
|
|
807
|
+
/**
|
|
808
|
+
* To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.
|
|
809
|
+
*/
|
|
810
|
+
constructor(options) {
|
|
811
|
+
this.options = options;
|
|
812
|
+
}
|
|
813
|
+
async process({ eventSource, model, actions, messages, threadId, runId }) {
|
|
814
|
+
const result = await this.options.chainFn({
|
|
815
|
+
messages: messages.map(convertMessageToLangChainMessage),
|
|
816
|
+
tools: actions.map(convertActionInputToLangChainTool),
|
|
817
|
+
model,
|
|
818
|
+
threadId,
|
|
819
|
+
runId
|
|
820
|
+
});
|
|
821
|
+
eventSource.stream(async (eventStream$) => {
|
|
822
|
+
await streamLangChainResponse({
|
|
823
|
+
result,
|
|
824
|
+
eventStream$
|
|
825
|
+
});
|
|
826
|
+
});
|
|
827
|
+
return {};
|
|
828
|
+
}
|
|
829
|
+
};
|
|
830
|
+
__name(LangChainAdapter, "LangChainAdapter");
|
|
831
|
+
|
|
832
|
+
// src/service-adapters/langchain/langserve.ts
|
|
833
|
+
var import_remote = require("langchain/runnables/remote");
|
|
834
|
+
var RemoteChain = class {
|
|
835
|
+
constructor(options) {
|
|
836
|
+
this.name = options.name;
|
|
837
|
+
this.description = options.description;
|
|
838
|
+
this.chainUrl = options.chainUrl;
|
|
839
|
+
this.parameters = options.parameters;
|
|
840
|
+
this.parameterType = options.parameterType || "multi";
|
|
841
|
+
}
|
|
842
|
+
async toAction() {
|
|
843
|
+
if (!this.parameters) {
|
|
844
|
+
await this.inferLangServeParameters();
|
|
845
|
+
}
|
|
846
|
+
return {
|
|
847
|
+
name: this.name,
|
|
848
|
+
description: this.description,
|
|
849
|
+
parameters: this.parameters,
|
|
850
|
+
handler: async (args) => {
|
|
851
|
+
const runnable = new import_remote.RemoteRunnable({
|
|
852
|
+
url: this.chainUrl
|
|
853
|
+
});
|
|
854
|
+
let input;
|
|
855
|
+
if (this.parameterType === "single") {
|
|
856
|
+
input = args[Object.keys(args)[0]];
|
|
857
|
+
} else {
|
|
858
|
+
input = args;
|
|
859
|
+
}
|
|
860
|
+
return await runnable.invoke(input);
|
|
861
|
+
}
|
|
862
|
+
};
|
|
863
|
+
}
|
|
864
|
+
async inferLangServeParameters() {
|
|
865
|
+
const supportedTypes = [
|
|
866
|
+
"string",
|
|
867
|
+
"number",
|
|
868
|
+
"boolean"
|
|
869
|
+
];
|
|
870
|
+
let schemaUrl = this.chainUrl.replace(/\/+$/, "") + "/input_schema";
|
|
871
|
+
let schema = await fetch(schemaUrl).then((res) => res.json()).catch(() => {
|
|
872
|
+
throw new Error("Failed to fetch langserve schema at " + schemaUrl);
|
|
873
|
+
});
|
|
874
|
+
if (supportedTypes.includes(schema.type)) {
|
|
875
|
+
this.parameterType = "single";
|
|
876
|
+
this.parameters = [
|
|
877
|
+
{
|
|
878
|
+
name: "input",
|
|
879
|
+
type: schema.type,
|
|
880
|
+
description: "The input to the chain"
|
|
881
|
+
}
|
|
882
|
+
];
|
|
883
|
+
} else if (schema.type === "object") {
|
|
884
|
+
this.parameterType = "multi";
|
|
885
|
+
this.parameters = Object.keys(schema.properties).map((key) => {
|
|
886
|
+
var _a;
|
|
887
|
+
let property = schema.properties[key];
|
|
888
|
+
if (!supportedTypes.includes(property.type)) {
|
|
889
|
+
throw new Error("Unsupported schema type");
|
|
890
|
+
}
|
|
891
|
+
return {
|
|
892
|
+
name: key,
|
|
893
|
+
type: property.type,
|
|
894
|
+
description: property.description || "",
|
|
895
|
+
required: ((_a = schema.required) == null ? void 0 : _a.includes(key)) || false
|
|
896
|
+
};
|
|
897
|
+
});
|
|
898
|
+
} else {
|
|
899
|
+
throw new Error("Unsupported schema type");
|
|
900
|
+
}
|
|
901
|
+
}
|
|
902
|
+
};
|
|
903
|
+
__name(RemoteChain, "RemoteChain");
|
|
904
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
905
|
+
0 && (module.exports = {
|
|
906
|
+
GoogleGenerativeAIAdapter,
|
|
907
|
+
LangChainAdapter,
|
|
908
|
+
OpenAIAdapter,
|
|
909
|
+
OpenAIAssistantAdapter,
|
|
910
|
+
RemoteChain
|
|
911
|
+
});
|
|
912
|
+
//# sourceMappingURL=index.js.map
|