@chatluna/v1-shared-adapter 1.0.25 → 1.0.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.cjs CHANGED
@@ -194,14 +194,15 @@ async function langchainMessageToOpenAIMessage(messages, plugin, model, supportI
194
194
  const result = [];
195
195
  const normalizedModel = model ? normalizeOpenAIModelName(model) : model;
196
196
  const isDeepseekThinkModel = normalizedModel?.includes("deepseek-reasoner");
197
+ console.log(messages);
197
198
  for (const rawMessage of messages) {
198
199
  const role = messageTypeToOpenAIRole(rawMessage.getType());
199
200
  const msg = {
200
- content: rawMessage.content,
201
+ content: rawMessage.content === "" ? null : rawMessage.content,
201
202
  name: role === "assistant" || role === "tool" ? rawMessage.name : void 0,
202
203
  role,
203
204
  // function_call: rawMessage.additional_kwargs.function_call,
204
- tool_call_id: rawMessage.tool_call_id
205
+ tool_call_id: rawMessage.tool_call_id || void 0
205
206
  };
206
207
  if (msg.tool_calls == null) {
207
208
  delete msg.tool_calls;
@@ -224,7 +225,7 @@ async function langchainMessageToOpenAIMessage(messages, plugin, model, supportI
224
225
  }
225
226
  const images = rawMessage.additional_kwargs.images;
226
227
  const lowerModel = normalizedModel?.toLowerCase() ?? "";
227
- if ((lowerModel?.includes("vision") || lowerModel?.includes("gpt-4o") || lowerModel?.includes("claude") || lowerModel?.includes("gemini") || lowerModel?.includes("qwen-vl") || lowerModel?.includes("omni") || lowerModel?.includes("qwen2.5-vl") || lowerModel?.includes("qwen2.5-omni") || lowerModel?.includes("qwen-omni") || lowerModel?.includes("qwen2-vl") || lowerModel?.includes("qvq") || normalizedModel?.includes("o1") || normalizedModel?.includes("o4") || normalizedModel?.includes("o3") || normalizedModel?.includes("gpt-4.1") || normalizedModel?.includes("gpt-5") || supportImageInput2) && images != null) {
228
+ if ((lowerModel?.includes("vision") || lowerModel?.includes("gpt-4o") || lowerModel?.includes("claude") || lowerModel?.includes("gemini") || lowerModel?.includes("qwen-vl") || lowerModel?.includes("omni") || lowerModel?.includes("qwen2.5-vl") || lowerModel?.includes("qwen2.5-omni") || lowerModel?.includes("qwen-omni") || lowerModel?.includes("qwen2-vl") || lowerModel?.includes("qwen3.5") || lowerModel?.includes("qvq") || normalizedModel?.includes("o1") || normalizedModel?.includes("o4") || normalizedModel?.includes("o3") || normalizedModel?.includes("gpt-4.1") || normalizedModel?.includes("gpt-5") || supportImageInput2) && images != null) {
228
229
  msg.content = [
229
230
  {
230
231
  type: "text",
@@ -275,6 +276,36 @@ async function langchainMessageToOpenAIMessage(messages, plugin, model, supportI
275
276
  }
276
277
  result.push(msg);
277
278
  }
279
+ for (let i = 0; i < result.length; i++) {
280
+ if (result[i].role !== "assistant") continue;
281
+ const assistantMsg = result[i];
282
+ const toolMessages = [];
283
+ for (let j = i + 1; j < result.length && result[j].role === "tool"; j++) {
284
+ toolMessages.push(result[j]);
285
+ }
286
+ if (toolMessages.length === 0) continue;
287
+ if (!assistantMsg.tool_calls) {
288
+ assistantMsg.tool_calls = [];
289
+ }
290
+ for (let k = 0; k < toolMessages.length; k++) {
291
+ if (!assistantMsg.tool_calls[k]) {
292
+ assistantMsg.tool_calls[k] = {
293
+ id: `call_${k}`,
294
+ type: "function",
295
+ function: {
296
+ name: toolMessages[k].name || "unknown",
297
+ arguments: "{}"
298
+ }
299
+ };
300
+ }
301
+ if (!assistantMsg.tool_calls[k].id) {
302
+ assistantMsg.tool_calls[k].id = `call_${k}`;
303
+ }
304
+ if (!toolMessages[k].tool_call_id) {
305
+ toolMessages[k].tool_call_id = assistantMsg.tool_calls[k].id;
306
+ }
307
+ }
308
+ }
278
309
  if (removeSystemMessage) {
279
310
  return transformSystemMessages(result);
280
311
  }
package/lib/index.mjs CHANGED
@@ -158,14 +158,15 @@ async function langchainMessageToOpenAIMessage(messages, plugin, model, supportI
158
158
  const result = [];
159
159
  const normalizedModel = model ? normalizeOpenAIModelName(model) : model;
160
160
  const isDeepseekThinkModel = normalizedModel?.includes("deepseek-reasoner");
161
+ console.log(messages);
161
162
  for (const rawMessage of messages) {
162
163
  const role = messageTypeToOpenAIRole(rawMessage.getType());
163
164
  const msg = {
164
- content: rawMessage.content,
165
+ content: rawMessage.content === "" ? null : rawMessage.content,
165
166
  name: role === "assistant" || role === "tool" ? rawMessage.name : void 0,
166
167
  role,
167
168
  // function_call: rawMessage.additional_kwargs.function_call,
168
- tool_call_id: rawMessage.tool_call_id
169
+ tool_call_id: rawMessage.tool_call_id || void 0
169
170
  };
170
171
  if (msg.tool_calls == null) {
171
172
  delete msg.tool_calls;
@@ -188,7 +189,7 @@ async function langchainMessageToOpenAIMessage(messages, plugin, model, supportI
188
189
  }
189
190
  const images = rawMessage.additional_kwargs.images;
190
191
  const lowerModel = normalizedModel?.toLowerCase() ?? "";
191
- if ((lowerModel?.includes("vision") || lowerModel?.includes("gpt-4o") || lowerModel?.includes("claude") || lowerModel?.includes("gemini") || lowerModel?.includes("qwen-vl") || lowerModel?.includes("omni") || lowerModel?.includes("qwen2.5-vl") || lowerModel?.includes("qwen2.5-omni") || lowerModel?.includes("qwen-omni") || lowerModel?.includes("qwen2-vl") || lowerModel?.includes("qvq") || normalizedModel?.includes("o1") || normalizedModel?.includes("o4") || normalizedModel?.includes("o3") || normalizedModel?.includes("gpt-4.1") || normalizedModel?.includes("gpt-5") || supportImageInput2) && images != null) {
192
+ if ((lowerModel?.includes("vision") || lowerModel?.includes("gpt-4o") || lowerModel?.includes("claude") || lowerModel?.includes("gemini") || lowerModel?.includes("qwen-vl") || lowerModel?.includes("omni") || lowerModel?.includes("qwen2.5-vl") || lowerModel?.includes("qwen2.5-omni") || lowerModel?.includes("qwen-omni") || lowerModel?.includes("qwen2-vl") || lowerModel?.includes("qwen3.5") || lowerModel?.includes("qvq") || normalizedModel?.includes("o1") || normalizedModel?.includes("o4") || normalizedModel?.includes("o3") || normalizedModel?.includes("gpt-4.1") || normalizedModel?.includes("gpt-5") || supportImageInput2) && images != null) {
192
193
  msg.content = [
193
194
  {
194
195
  type: "text",
@@ -239,6 +240,36 @@ async function langchainMessageToOpenAIMessage(messages, plugin, model, supportI
239
240
  }
240
241
  result.push(msg);
241
242
  }
243
+ for (let i = 0; i < result.length; i++) {
244
+ if (result[i].role !== "assistant") continue;
245
+ const assistantMsg = result[i];
246
+ const toolMessages = [];
247
+ for (let j = i + 1; j < result.length && result[j].role === "tool"; j++) {
248
+ toolMessages.push(result[j]);
249
+ }
250
+ if (toolMessages.length === 0) continue;
251
+ if (!assistantMsg.tool_calls) {
252
+ assistantMsg.tool_calls = [];
253
+ }
254
+ for (let k = 0; k < toolMessages.length; k++) {
255
+ if (!assistantMsg.tool_calls[k]) {
256
+ assistantMsg.tool_calls[k] = {
257
+ id: `call_${k}`,
258
+ type: "function",
259
+ function: {
260
+ name: toolMessages[k].name || "unknown",
261
+ arguments: "{}"
262
+ }
263
+ };
264
+ }
265
+ if (!assistantMsg.tool_calls[k].id) {
266
+ assistantMsg.tool_calls[k].id = `call_${k}`;
267
+ }
268
+ if (!toolMessages[k].tool_call_id) {
269
+ toolMessages[k].tool_call_id = assistantMsg.tool_calls[k].id;
270
+ }
271
+ }
272
+ }
242
273
  if (removeSystemMessage) {
243
274
  return transformSystemMessages(result);
244
275
  }
package/lib/utils.d.ts CHANGED
@@ -11,5 +11,5 @@ export declare function messageTypeToOpenAIRole(type: MessageType): ChatCompleti
11
11
  export declare function formatToolsToOpenAITools(tools: StructuredTool[], includeGoogleSearch: boolean): ChatCompletionTool[];
12
12
  export declare function formatToolToOpenAITool(tool: StructuredTool): ChatCompletionTool;
13
13
  export declare function removeAdditionalProperties(schema: JsonSchema7Type): JsonSchema7Type;
14
- export declare function convertMessageToMessageChunk(message: ChatCompletionResponseMessage): AIMessageChunk | HumanMessageChunk | SystemMessageChunk | FunctionMessageChunk | ToolMessageChunk | ChatMessageChunk;
15
- export declare function convertDeltaToMessageChunk(delta: Record<string, any>, defaultRole?: ChatCompletionResponseMessageRoleEnum): AIMessageChunk | HumanMessageChunk | SystemMessageChunk | FunctionMessageChunk | ToolMessageChunk | ChatMessageChunk;
14
+ export declare function convertMessageToMessageChunk(message: ChatCompletionResponseMessage): HumanMessageChunk | AIMessageChunk | SystemMessageChunk | FunctionMessageChunk | ToolMessageChunk | ChatMessageChunk;
15
+ export declare function convertDeltaToMessageChunk(delta: Record<string, any>, defaultRole?: ChatCompletionResponseMessageRoleEnum): HumanMessageChunk | AIMessageChunk | SystemMessageChunk | FunctionMessageChunk | ToolMessageChunk | ChatMessageChunk;
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@chatluna/v1-shared-adapter",
3
3
  "description": "chatluna shared adapter",
4
- "version": "1.0.25",
4
+ "version": "1.0.26",
5
5
  "main": "lib/index.cjs",
6
6
  "module": "lib/index.mjs",
7
7
  "typings": "lib/index.d.ts",