@chatluna/v1-shared-adapter 1.0.25 → 1.0.27
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.cjs +33 -3
- package/lib/index.mjs +33 -3
- package/lib/utils.d.ts +2 -2
- package/package.json +2 -2
package/lib/index.cjs
CHANGED
|
@@ -197,11 +197,11 @@ async function langchainMessageToOpenAIMessage(messages, plugin, model, supportI
|
|
|
197
197
|
for (const rawMessage of messages) {
|
|
198
198
|
const role = messageTypeToOpenAIRole(rawMessage.getType());
|
|
199
199
|
const msg = {
|
|
200
|
-
content: rawMessage.content,
|
|
200
|
+
content: rawMessage.content === "" ? null : rawMessage.content,
|
|
201
201
|
name: role === "assistant" || role === "tool" ? rawMessage.name : void 0,
|
|
202
202
|
role,
|
|
203
203
|
// function_call: rawMessage.additional_kwargs.function_call,
|
|
204
|
-
tool_call_id: rawMessage.tool_call_id
|
|
204
|
+
tool_call_id: rawMessage.tool_call_id || void 0
|
|
205
205
|
};
|
|
206
206
|
if (msg.tool_calls == null) {
|
|
207
207
|
delete msg.tool_calls;
|
|
@@ -224,7 +224,7 @@ async function langchainMessageToOpenAIMessage(messages, plugin, model, supportI
|
|
|
224
224
|
}
|
|
225
225
|
const images = rawMessage.additional_kwargs.images;
|
|
226
226
|
const lowerModel = normalizedModel?.toLowerCase() ?? "";
|
|
227
|
-
if ((lowerModel?.includes("vision") || lowerModel?.includes("gpt-4o") || lowerModel?.includes("claude") || lowerModel?.includes("gemini") || lowerModel?.includes("qwen-vl") || lowerModel?.includes("omni") || lowerModel?.includes("qwen2.5-vl") || lowerModel?.includes("qwen2.5-omni") || lowerModel?.includes("qwen-omni") || lowerModel?.includes("qwen2-vl") || lowerModel?.includes("qvq") || normalizedModel?.includes("o1") || normalizedModel?.includes("o4") || normalizedModel?.includes("o3") || normalizedModel?.includes("gpt-4.1") || normalizedModel?.includes("gpt-5") || supportImageInput2) && images != null) {
|
|
227
|
+
if ((lowerModel?.includes("vision") || lowerModel?.includes("gpt-4o") || lowerModel?.includes("claude") || lowerModel?.includes("gemini") || lowerModel?.includes("qwen-vl") || lowerModel?.includes("omni") || lowerModel?.includes("qwen2.5-vl") || lowerModel?.includes("qwen2.5-omni") || lowerModel?.includes("qwen-omni") || lowerModel?.includes("qwen2-vl") || lowerModel?.includes("qwen3.5") || lowerModel?.includes("qvq") || normalizedModel?.includes("o1") || normalizedModel?.includes("o4") || normalizedModel?.includes("o3") || normalizedModel?.includes("gpt-4.1") || normalizedModel?.includes("gpt-5") || supportImageInput2) && images != null) {
|
|
228
228
|
msg.content = [
|
|
229
229
|
{
|
|
230
230
|
type: "text",
|
|
@@ -275,6 +275,36 @@ async function langchainMessageToOpenAIMessage(messages, plugin, model, supportI
|
|
|
275
275
|
}
|
|
276
276
|
result.push(msg);
|
|
277
277
|
}
|
|
278
|
+
for (let i = 0; i < result.length; i++) {
|
|
279
|
+
if (result[i].role !== "assistant") continue;
|
|
280
|
+
const assistantMsg = result[i];
|
|
281
|
+
const toolMessages = [];
|
|
282
|
+
for (let j = i + 1; j < result.length && result[j].role === "tool"; j++) {
|
|
283
|
+
toolMessages.push(result[j]);
|
|
284
|
+
}
|
|
285
|
+
if (toolMessages.length === 0) continue;
|
|
286
|
+
if (!assistantMsg.tool_calls) {
|
|
287
|
+
assistantMsg.tool_calls = [];
|
|
288
|
+
}
|
|
289
|
+
for (let k = 0; k < toolMessages.length; k++) {
|
|
290
|
+
if (!assistantMsg.tool_calls[k]) {
|
|
291
|
+
assistantMsg.tool_calls[k] = {
|
|
292
|
+
id: `call_${k}`,
|
|
293
|
+
type: "function",
|
|
294
|
+
function: {
|
|
295
|
+
name: toolMessages[k].name || "unknown",
|
|
296
|
+
arguments: "{}"
|
|
297
|
+
}
|
|
298
|
+
};
|
|
299
|
+
}
|
|
300
|
+
if (!assistantMsg.tool_calls[k].id) {
|
|
301
|
+
assistantMsg.tool_calls[k].id = `call_${k}`;
|
|
302
|
+
}
|
|
303
|
+
if (!toolMessages[k].tool_call_id) {
|
|
304
|
+
toolMessages[k].tool_call_id = assistantMsg.tool_calls[k].id;
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
}
|
|
278
308
|
if (removeSystemMessage) {
|
|
279
309
|
return transformSystemMessages(result);
|
|
280
310
|
}
|
package/lib/index.mjs
CHANGED
|
@@ -161,11 +161,11 @@ async function langchainMessageToOpenAIMessage(messages, plugin, model, supportI
|
|
|
161
161
|
for (const rawMessage of messages) {
|
|
162
162
|
const role = messageTypeToOpenAIRole(rawMessage.getType());
|
|
163
163
|
const msg = {
|
|
164
|
-
content: rawMessage.content,
|
|
164
|
+
content: rawMessage.content === "" ? null : rawMessage.content,
|
|
165
165
|
name: role === "assistant" || role === "tool" ? rawMessage.name : void 0,
|
|
166
166
|
role,
|
|
167
167
|
// function_call: rawMessage.additional_kwargs.function_call,
|
|
168
|
-
tool_call_id: rawMessage.tool_call_id
|
|
168
|
+
tool_call_id: rawMessage.tool_call_id || void 0
|
|
169
169
|
};
|
|
170
170
|
if (msg.tool_calls == null) {
|
|
171
171
|
delete msg.tool_calls;
|
|
@@ -188,7 +188,7 @@ async function langchainMessageToOpenAIMessage(messages, plugin, model, supportI
|
|
|
188
188
|
}
|
|
189
189
|
const images = rawMessage.additional_kwargs.images;
|
|
190
190
|
const lowerModel = normalizedModel?.toLowerCase() ?? "";
|
|
191
|
-
if ((lowerModel?.includes("vision") || lowerModel?.includes("gpt-4o") || lowerModel?.includes("claude") || lowerModel?.includes("gemini") || lowerModel?.includes("qwen-vl") || lowerModel?.includes("omni") || lowerModel?.includes("qwen2.5-vl") || lowerModel?.includes("qwen2.5-omni") || lowerModel?.includes("qwen-omni") || lowerModel?.includes("qwen2-vl") || lowerModel?.includes("qvq") || normalizedModel?.includes("o1") || normalizedModel?.includes("o4") || normalizedModel?.includes("o3") || normalizedModel?.includes("gpt-4.1") || normalizedModel?.includes("gpt-5") || supportImageInput2) && images != null) {
|
|
191
|
+
if ((lowerModel?.includes("vision") || lowerModel?.includes("gpt-4o") || lowerModel?.includes("claude") || lowerModel?.includes("gemini") || lowerModel?.includes("qwen-vl") || lowerModel?.includes("omni") || lowerModel?.includes("qwen2.5-vl") || lowerModel?.includes("qwen2.5-omni") || lowerModel?.includes("qwen-omni") || lowerModel?.includes("qwen2-vl") || lowerModel?.includes("qwen3.5") || lowerModel?.includes("qvq") || normalizedModel?.includes("o1") || normalizedModel?.includes("o4") || normalizedModel?.includes("o3") || normalizedModel?.includes("gpt-4.1") || normalizedModel?.includes("gpt-5") || supportImageInput2) && images != null) {
|
|
192
192
|
msg.content = [
|
|
193
193
|
{
|
|
194
194
|
type: "text",
|
|
@@ -239,6 +239,36 @@ async function langchainMessageToOpenAIMessage(messages, plugin, model, supportI
|
|
|
239
239
|
}
|
|
240
240
|
result.push(msg);
|
|
241
241
|
}
|
|
242
|
+
for (let i = 0; i < result.length; i++) {
|
|
243
|
+
if (result[i].role !== "assistant") continue;
|
|
244
|
+
const assistantMsg = result[i];
|
|
245
|
+
const toolMessages = [];
|
|
246
|
+
for (let j = i + 1; j < result.length && result[j].role === "tool"; j++) {
|
|
247
|
+
toolMessages.push(result[j]);
|
|
248
|
+
}
|
|
249
|
+
if (toolMessages.length === 0) continue;
|
|
250
|
+
if (!assistantMsg.tool_calls) {
|
|
251
|
+
assistantMsg.tool_calls = [];
|
|
252
|
+
}
|
|
253
|
+
for (let k = 0; k < toolMessages.length; k++) {
|
|
254
|
+
if (!assistantMsg.tool_calls[k]) {
|
|
255
|
+
assistantMsg.tool_calls[k] = {
|
|
256
|
+
id: `call_${k}`,
|
|
257
|
+
type: "function",
|
|
258
|
+
function: {
|
|
259
|
+
name: toolMessages[k].name || "unknown",
|
|
260
|
+
arguments: "{}"
|
|
261
|
+
}
|
|
262
|
+
};
|
|
263
|
+
}
|
|
264
|
+
if (!assistantMsg.tool_calls[k].id) {
|
|
265
|
+
assistantMsg.tool_calls[k].id = `call_${k}`;
|
|
266
|
+
}
|
|
267
|
+
if (!toolMessages[k].tool_call_id) {
|
|
268
|
+
toolMessages[k].tool_call_id = assistantMsg.tool_calls[k].id;
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
}
|
|
242
272
|
if (removeSystemMessage) {
|
|
243
273
|
return transformSystemMessages(result);
|
|
244
274
|
}
|
package/lib/utils.d.ts
CHANGED
|
@@ -11,5 +11,5 @@ export declare function messageTypeToOpenAIRole(type: MessageType): ChatCompleti
|
|
|
11
11
|
export declare function formatToolsToOpenAITools(tools: StructuredTool[], includeGoogleSearch: boolean): ChatCompletionTool[];
|
|
12
12
|
export declare function formatToolToOpenAITool(tool: StructuredTool): ChatCompletionTool;
|
|
13
13
|
export declare function removeAdditionalProperties(schema: JsonSchema7Type): JsonSchema7Type;
|
|
14
|
-
export declare function convertMessageToMessageChunk(message: ChatCompletionResponseMessage):
|
|
15
|
-
export declare function convertDeltaToMessageChunk(delta: Record<string, any>, defaultRole?: ChatCompletionResponseMessageRoleEnum):
|
|
14
|
+
export declare function convertMessageToMessageChunk(message: ChatCompletionResponseMessage): HumanMessageChunk | AIMessageChunk | SystemMessageChunk | FunctionMessageChunk | ToolMessageChunk | ChatMessageChunk;
|
|
15
|
+
export declare function convertDeltaToMessageChunk(delta: Record<string, any>, defaultRole?: ChatCompletionResponseMessageRoleEnum): HumanMessageChunk | AIMessageChunk | SystemMessageChunk | FunctionMessageChunk | ToolMessageChunk | ChatMessageChunk;
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@chatluna/v1-shared-adapter",
|
|
3
3
|
"description": "chatluna shared adapter",
|
|
4
|
-
"version": "1.0.
|
|
4
|
+
"version": "1.0.27",
|
|
5
5
|
"main": "lib/index.cjs",
|
|
6
6
|
"module": "lib/index.mjs",
|
|
7
7
|
"typings": "lib/index.d.ts",
|
|
@@ -70,6 +70,6 @@
|
|
|
70
70
|
},
|
|
71
71
|
"peerDependencies": {
|
|
72
72
|
"koishi": "^4.18.9",
|
|
73
|
-
"koishi-plugin-chatluna": "^1.3.
|
|
73
|
+
"koishi-plugin-chatluna": "^1.3.22"
|
|
74
74
|
}
|
|
75
75
|
}
|