koishi-plugin-chatluna-google-gemini-adapter 1.0.0-beta.3 → 1.0.0-beta.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/client.js +2 -1
- package/lib/index.d.ts +1 -1
- package/lib/index.js +1 -1
- package/lib/requester.d.ts +1 -1
- package/lib/requester.js +69 -20
- package/lib/types.d.ts +26 -2
- package/lib/utils.d.ts +8 -4
- package/lib/utils.js +149 -12
- package/package.json +6 -4
package/lib/client.js
CHANGED
|
@@ -30,10 +30,11 @@ class GeminiClient extends client_1.PlatformModelAndEmbeddingsClient {
|
|
|
30
30
|
.map((model) => {
|
|
31
31
|
return {
|
|
32
32
|
name: model,
|
|
33
|
+
maxTokens: model.includes('vision') ? 12288 : 30720,
|
|
33
34
|
type: model.includes('embedding')
|
|
34
35
|
? types_1.ModelType.embeddings
|
|
35
36
|
: types_1.ModelType.llm,
|
|
36
|
-
functionCall:
|
|
37
|
+
functionCall: !model.includes('vision'),
|
|
37
38
|
supportMode: ['all']
|
|
38
39
|
};
|
|
39
40
|
});
|
package/lib/index.d.ts
CHANGED
|
@@ -9,4 +9,4 @@ export interface Config extends ChatLunaPlugin.Config {
|
|
|
9
9
|
}
|
|
10
10
|
export declare const Config: Schema<Config>;
|
|
11
11
|
export declare const inject: string[];
|
|
12
|
-
export declare const name = "chatluna-gemini-adapter";
|
|
12
|
+
export declare const name = "chatluna-google-gemini-adapter";
|
package/lib/index.js
CHANGED
package/lib/requester.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { EmbeddingsRequester, EmbeddingsRequestParams, ModelRequester, ModelRequestParams } from 'koishi-plugin-chatluna/lib/llm-core/platform/api';
|
|
2
2
|
import { ClientConfig } from 'koishi-plugin-chatluna/lib/llm-core/platform/config';
|
|
3
|
-
import { ChatGenerationChunk } from 'langchain/
|
|
3
|
+
import { ChatGenerationChunk } from '@langchain/core/outputs';
|
|
4
4
|
export declare class GeminiRequester extends ModelRequester implements EmbeddingsRequester {
|
|
5
5
|
private _config;
|
|
6
6
|
constructor(_config: ClientConfig);
|
package/lib/requester.js
CHANGED
|
@@ -2,7 +2,8 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.GeminiRequester = void 0;
|
|
4
4
|
const api_1 = require("koishi-plugin-chatluna/lib/llm-core/platform/api");
|
|
5
|
-
const
|
|
5
|
+
const messages_1 = require("@langchain/core/messages");
|
|
6
|
+
const outputs_1 = require("@langchain/core/outputs");
|
|
6
7
|
const error_1 = require("koishi-plugin-chatluna/lib/utils/error");
|
|
7
8
|
const sse_1 = require("koishi-plugin-chatluna/lib/utils/sse");
|
|
8
9
|
const utils_1 = require("./utils");
|
|
@@ -19,23 +20,23 @@ class GeminiRequester extends api_1.ModelRequester {
|
|
|
19
20
|
async *completionStream(params) {
|
|
20
21
|
try {
|
|
21
22
|
const response = await this._post(`models/${params.model}:streamGenerateContent`, {
|
|
22
|
-
contents: (0, utils_1.langchainMessageToGeminiMessage)(params.input, params.model),
|
|
23
|
+
contents: await (0, utils_1.langchainMessageToGeminiMessage)(params.input, params.model),
|
|
23
24
|
safetySettings: [
|
|
24
25
|
{
|
|
25
26
|
category: 'HARM_CATEGORY_HARASSMENT',
|
|
26
|
-
threshold: '
|
|
27
|
+
threshold: 'BLOCK_NONE'
|
|
27
28
|
},
|
|
28
29
|
{
|
|
29
30
|
category: 'HARM_CATEGORY_HATE_SPEECH',
|
|
30
|
-
threshold: '
|
|
31
|
+
threshold: 'BLOCK_NONE'
|
|
31
32
|
},
|
|
32
33
|
{
|
|
33
34
|
category: 'HARM_CATEGORY_SEXUALLY_EXPLICIT',
|
|
34
|
-
threshold: '
|
|
35
|
+
threshold: 'BLOCK_NONE'
|
|
35
36
|
},
|
|
36
37
|
{
|
|
37
38
|
category: 'HARM_CATEGORY_DANGEROUS_CONTENT',
|
|
38
|
-
threshold: '
|
|
39
|
+
threshold: 'BLOCK_NONE'
|
|
39
40
|
}
|
|
40
41
|
],
|
|
41
42
|
generationConfig: {
|
|
@@ -45,7 +46,12 @@ class GeminiRequester extends api_1.ModelRequester {
|
|
|
45
46
|
? undefined
|
|
46
47
|
: params.maxTokens,
|
|
47
48
|
topP: params.topP
|
|
48
|
-
}
|
|
49
|
+
},
|
|
50
|
+
tools: !params.model.includes('vision') && params.tools != null
|
|
51
|
+
? {
|
|
52
|
+
functionDeclarations: (0, utils_1.formatToolsToGeminiAITools)(params.tools)
|
|
53
|
+
}
|
|
54
|
+
: undefined
|
|
49
55
|
}, {
|
|
50
56
|
signal: params.signal
|
|
51
57
|
});
|
|
@@ -55,35 +61,79 @@ class GeminiRequester extends api_1.ModelRequester {
|
|
|
55
61
|
const jsonParser = new json_1.JSONParser();
|
|
56
62
|
const writable = stream.writable.getWriter();
|
|
57
63
|
jsonParser.onEnd = async () => {
|
|
58
|
-
await writable.
|
|
64
|
+
await writable.close();
|
|
59
65
|
};
|
|
60
66
|
jsonParser.onValue = async ({ value }) => {
|
|
61
67
|
const transformValue = value;
|
|
62
68
|
if (transformValue.candidates && transformValue.candidates[0]) {
|
|
63
|
-
const parts = transformValue.candidates[0].content
|
|
64
|
-
.parts;
|
|
69
|
+
const parts = transformValue.candidates[0].content.parts;
|
|
65
70
|
if (parts.length < 1) {
|
|
66
71
|
throw new Error(JSON.stringify(value));
|
|
67
72
|
}
|
|
68
|
-
const
|
|
69
|
-
|
|
70
|
-
await writable.write(text);
|
|
73
|
+
for (const part of parts) {
|
|
74
|
+
await writable.write(part);
|
|
71
75
|
}
|
|
72
76
|
}
|
|
73
77
|
};
|
|
74
78
|
await (0, sse_1.sse)(response, async (rawData) => {
|
|
75
79
|
jsonParser.write(rawData);
|
|
76
80
|
return true;
|
|
77
|
-
});
|
|
81
|
+
}, 10);
|
|
78
82
|
let content = '';
|
|
83
|
+
let isVisionModel = params.model.includes('vision');
|
|
84
|
+
const functionCall = {
|
|
85
|
+
name: '',
|
|
86
|
+
args: '',
|
|
87
|
+
arguments: ''
|
|
88
|
+
};
|
|
79
89
|
for await (const chunk of iterable) {
|
|
80
|
-
|
|
81
|
-
|
|
90
|
+
const messagePart = (0, utils_1.partAsType)(chunk);
|
|
91
|
+
const chatFunctionCallingPart = (0, utils_1.partAsType)(chunk);
|
|
92
|
+
if (messagePart.text) {
|
|
93
|
+
content += messagePart.text;
|
|
94
|
+
// match /w*model:
|
|
95
|
+
if (isVisionModel && /\s*model:\s*/.test(content)) {
|
|
96
|
+
isVisionModel = false;
|
|
97
|
+
content = content.replace(/\s*model:\s*/, '');
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
if (chatFunctionCallingPart.functionCall) {
|
|
101
|
+
const deltaFunctionCall = chatFunctionCallingPart.functionCall;
|
|
102
|
+
if (deltaFunctionCall) {
|
|
103
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
104
|
+
let args = deltaFunctionCall.args?.input ??
|
|
105
|
+
deltaFunctionCall.args;
|
|
106
|
+
try {
|
|
107
|
+
let parsedArgs = JSON.parse(args);
|
|
108
|
+
if (typeof parsedArgs !== 'string') {
|
|
109
|
+
args = parsedArgs;
|
|
110
|
+
}
|
|
111
|
+
parsedArgs = JSON.parse(args);
|
|
112
|
+
if (typeof parsedArgs !== 'string') {
|
|
113
|
+
args = parsedArgs;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
catch (e) { }
|
|
117
|
+
functionCall.args = JSON.stringify(args);
|
|
118
|
+
functionCall.name =
|
|
119
|
+
functionCall.name + (deltaFunctionCall.name ?? '');
|
|
120
|
+
functionCall.arguments = deltaFunctionCall.args;
|
|
121
|
+
}
|
|
82
122
|
}
|
|
83
123
|
try {
|
|
84
|
-
const messageChunk = new
|
|
85
|
-
messageChunk.
|
|
86
|
-
|
|
124
|
+
const messageChunk = new messages_1.AIMessageChunk(content);
|
|
125
|
+
messageChunk.additional_kwargs = {
|
|
126
|
+
function_call: functionCall.name.length > 0
|
|
127
|
+
? {
|
|
128
|
+
name: functionCall.name,
|
|
129
|
+
arguments: functionCall.args,
|
|
130
|
+
args: functionCall.arguments
|
|
131
|
+
}
|
|
132
|
+
: undefined
|
|
133
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
134
|
+
};
|
|
135
|
+
messageChunk.content = content;
|
|
136
|
+
const generationChunk = new outputs_1.ChatGenerationChunk({
|
|
87
137
|
message: messageChunk,
|
|
88
138
|
text: messageChunk.content
|
|
89
139
|
});
|
|
@@ -175,7 +225,6 @@ class GeminiRequester extends api_1.ModelRequester {
|
|
|
175
225
|
}
|
|
176
226
|
}
|
|
177
227
|
const body = JSON.stringify(data);
|
|
178
|
-
// console.log('POST', requestUrl, body)
|
|
179
228
|
return (0, request_1.chatLunaFetch)(requestUrl, {
|
|
180
229
|
body,
|
|
181
230
|
headers: this._buildHeaders(),
|
package/lib/types.d.ts
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
export interface ChatCompletionResponseMessage {
|
|
2
2
|
role: string;
|
|
3
|
-
parts?:
|
|
3
|
+
parts?: ChatPart[];
|
|
4
4
|
}
|
|
5
|
+
export type ChatPart = ChatMessagePart | ChatUploadDataPart | ChatFunctionCallingPart | ChatFunctionResponsePart;
|
|
5
6
|
export type ChatMessagePart = {
|
|
6
7
|
text: string;
|
|
7
8
|
};
|
|
@@ -11,6 +12,18 @@ export type ChatUploadDataPart = {
|
|
|
11
12
|
data?: string;
|
|
12
13
|
};
|
|
13
14
|
};
|
|
15
|
+
export type ChatFunctionCallingPart = {
|
|
16
|
+
functionCall: {
|
|
17
|
+
name: string;
|
|
18
|
+
args?: any;
|
|
19
|
+
};
|
|
20
|
+
};
|
|
21
|
+
export type ChatFunctionResponsePart = {
|
|
22
|
+
functionResponse: {
|
|
23
|
+
name: string;
|
|
24
|
+
response: any;
|
|
25
|
+
};
|
|
26
|
+
};
|
|
14
27
|
export interface ChatResponse {
|
|
15
28
|
candidates: {
|
|
16
29
|
content: ChatCompletionResponseMessage;
|
|
@@ -28,9 +41,20 @@ export interface ChatResponse {
|
|
|
28
41
|
}[];
|
|
29
42
|
};
|
|
30
43
|
}
|
|
44
|
+
export interface ChatCompletionFunction {
|
|
45
|
+
name: string;
|
|
46
|
+
description?: string;
|
|
47
|
+
parameters?: {
|
|
48
|
+
[key: string]: any;
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
export interface ChatCompletionMessageFunctionCall {
|
|
52
|
+
name: string;
|
|
53
|
+
args?: any;
|
|
54
|
+
}
|
|
31
55
|
export interface CreateEmbeddingResponse {
|
|
32
56
|
embedding: {
|
|
33
57
|
values: number[];
|
|
34
58
|
};
|
|
35
59
|
}
|
|
36
|
-
export type ChatCompletionResponseMessageRoleEnum = 'system' | 'model' | 'user';
|
|
60
|
+
export type ChatCompletionResponseMessageRoleEnum = 'system' | 'model' | 'user' | 'function';
|
package/lib/utils.d.ts
CHANGED
|
@@ -1,5 +1,9 @@
|
|
|
1
|
-
import { AIMessageChunk, BaseMessage, ChatMessageChunk, HumanMessageChunk, MessageType, SystemMessageChunk } from 'langchain/
|
|
2
|
-
import { ChatCompletionResponseMessage, ChatCompletionResponseMessageRoleEnum } from './types';
|
|
3
|
-
|
|
1
|
+
import { AIMessageChunk, BaseMessage, ChatMessageChunk, HumanMessageChunk, MessageType, SystemMessageChunk } from '@langchain/core/messages';
|
|
2
|
+
import { ChatCompletionFunction, ChatCompletionResponseMessage, ChatCompletionResponseMessageRoleEnum, ChatPart } from './types';
|
|
3
|
+
import { StructuredTool } from '@langchain/core/tools';
|
|
4
|
+
export declare function langchainMessageToGeminiMessage(messages: BaseMessage[], model?: string): Promise<ChatCompletionResponseMessage[]>;
|
|
5
|
+
export declare function partAsType<T extends ChatPart>(part: ChatPart): T;
|
|
6
|
+
export declare function formatToolsToGeminiAITools(tools: StructuredTool[]): ChatCompletionFunction[];
|
|
7
|
+
export declare function formatToolToGeminiAITool(tool: StructuredTool): ChatCompletionFunction;
|
|
4
8
|
export declare function messageTypeToGeminiRole(type: MessageType): ChatCompletionResponseMessageRoleEnum;
|
|
5
|
-
export declare function convertDeltaToMessageChunk(delta: Record<string, any>, defaultRole?: ChatCompletionResponseMessageRoleEnum):
|
|
9
|
+
export declare function convertDeltaToMessageChunk(delta: Record<string, any>, defaultRole?: ChatCompletionResponseMessageRoleEnum): AIMessageChunk | HumanMessageChunk | SystemMessageChunk | ChatMessageChunk;
|
package/lib/utils.js
CHANGED
|
@@ -1,11 +1,81 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.convertDeltaToMessageChunk = exports.messageTypeToGeminiRole = exports.langchainMessageToGeminiMessage = void 0;
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
3
|
+
exports.convertDeltaToMessageChunk = exports.messageTypeToGeminiRole = exports.formatToolToGeminiAITool = exports.formatToolsToGeminiAITools = exports.partAsType = exports.langchainMessageToGeminiMessage = void 0;
|
|
4
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
5
|
+
const messages_1 = require("@langchain/core/messages");
|
|
6
|
+
const zod_to_json_schema_1 = require("zod-to-json-schema");
|
|
7
|
+
async function langchainMessageToGeminiMessage(messages, model) {
|
|
8
|
+
const mappedMessage = await Promise.all(messages.map(async (rawMessage) => {
|
|
8
9
|
const role = messageTypeToGeminiRole(rawMessage._getType());
|
|
10
|
+
if (role === 'function' ||
|
|
11
|
+
rawMessage.additional_kwargs?.function_call != null) {
|
|
12
|
+
return {
|
|
13
|
+
role: 'function',
|
|
14
|
+
parts: [
|
|
15
|
+
{
|
|
16
|
+
functionResponse: rawMessage.additional_kwargs?.function_call !=
|
|
17
|
+
null
|
|
18
|
+
? undefined
|
|
19
|
+
: {
|
|
20
|
+
name: rawMessage.name,
|
|
21
|
+
response: {
|
|
22
|
+
name: rawMessage.name,
|
|
23
|
+
content: (() => {
|
|
24
|
+
try {
|
|
25
|
+
const result = JSON.parse(rawMessage.content);
|
|
26
|
+
if (typeof result ===
|
|
27
|
+
'string') {
|
|
28
|
+
return {
|
|
29
|
+
response: result
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
else {
|
|
33
|
+
return result;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
catch (e) {
|
|
37
|
+
return {
|
|
38
|
+
response: rawMessage.content
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
})()
|
|
42
|
+
}
|
|
43
|
+
},
|
|
44
|
+
functionCall: rawMessage.additional_kwargs?.function_call !=
|
|
45
|
+
null
|
|
46
|
+
? {
|
|
47
|
+
name: rawMessage.additional_kwargs
|
|
48
|
+
.function_call.name,
|
|
49
|
+
args: (() => {
|
|
50
|
+
try {
|
|
51
|
+
const result = JSON.parse(rawMessage
|
|
52
|
+
.additional_kwargs
|
|
53
|
+
.function_call
|
|
54
|
+
.arguments);
|
|
55
|
+
if (typeof result === 'string') {
|
|
56
|
+
return {
|
|
57
|
+
input: result
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
else {
|
|
61
|
+
return result;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
catch (e) {
|
|
65
|
+
return {
|
|
66
|
+
input: rawMessage
|
|
67
|
+
.additional_kwargs
|
|
68
|
+
.function_call
|
|
69
|
+
.arguments
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
})()
|
|
73
|
+
}
|
|
74
|
+
: undefined
|
|
75
|
+
}
|
|
76
|
+
]
|
|
77
|
+
};
|
|
78
|
+
}
|
|
9
79
|
const images = rawMessage.additional_kwargs.images;
|
|
10
80
|
const result = {
|
|
11
81
|
role,
|
|
@@ -20,14 +90,14 @@ function langchainMessageToGeminiMessage(messages, model) {
|
|
|
20
90
|
result.parts.push({
|
|
21
91
|
inline_data: {
|
|
22
92
|
// base64 image match type
|
|
23
|
-
data: image,
|
|
93
|
+
data: image.replace(/^data:image\/\w+;base64,/, ''),
|
|
24
94
|
mime_type: 'image/jpeg'
|
|
25
95
|
}
|
|
26
96
|
});
|
|
27
97
|
}
|
|
28
98
|
}
|
|
29
99
|
return result;
|
|
30
|
-
});
|
|
100
|
+
}));
|
|
31
101
|
const result = [];
|
|
32
102
|
for (let i = 0; i < mappedMessage.length; i++) {
|
|
33
103
|
const message = mappedMessage[i];
|
|
@@ -50,7 +120,7 @@ function langchainMessageToGeminiMessage(messages, model) {
|
|
|
50
120
|
parts: [{ text: 'Okay, what do I need to do?' }]
|
|
51
121
|
});
|
|
52
122
|
}
|
|
53
|
-
if (result[result.length - 1].role === '
|
|
123
|
+
if (result[result.length - 1].role === 'model') {
|
|
54
124
|
result.push({
|
|
55
125
|
role: 'user',
|
|
56
126
|
parts: [
|
|
@@ -60,9 +130,74 @@ function langchainMessageToGeminiMessage(messages, model) {
|
|
|
60
130
|
]
|
|
61
131
|
});
|
|
62
132
|
}
|
|
133
|
+
if (model.includes('vision')) {
|
|
134
|
+
// format prompts
|
|
135
|
+
const textBuffer = [];
|
|
136
|
+
const last = result.pop();
|
|
137
|
+
for (let i = 0; i < result.length; i++) {
|
|
138
|
+
const message = result[i];
|
|
139
|
+
const text = message.parts[0].text;
|
|
140
|
+
textBuffer.push(`${message.role}: ${text}`);
|
|
141
|
+
}
|
|
142
|
+
const lastParts = last.parts;
|
|
143
|
+
let lastImagesParts = lastParts.filter((part) => part.inline_data?.mime_type ===
|
|
144
|
+
'image/jpeg');
|
|
145
|
+
if (lastImagesParts.length < 1) {
|
|
146
|
+
for (let i = result.length - 1; i >= 0; i--) {
|
|
147
|
+
const message = result[i];
|
|
148
|
+
const images = message.parts.filter((part) => part.inline_data?.mime_type ===
|
|
149
|
+
'image/jpeg');
|
|
150
|
+
if (images.length > 0) {
|
|
151
|
+
lastImagesParts = images;
|
|
152
|
+
break;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
;
|
|
157
|
+
lastParts.filter((part) => part.text !== undefined &&
|
|
158
|
+
part.text !== null).forEach((part) => {
|
|
159
|
+
textBuffer.push(`${last.role}: ${part.text}`);
|
|
160
|
+
});
|
|
161
|
+
return [
|
|
162
|
+
{
|
|
163
|
+
role: 'user',
|
|
164
|
+
parts: [
|
|
165
|
+
{
|
|
166
|
+
text: textBuffer.join('\n')
|
|
167
|
+
},
|
|
168
|
+
...lastImagesParts
|
|
169
|
+
]
|
|
170
|
+
}
|
|
171
|
+
];
|
|
172
|
+
}
|
|
63
173
|
return result;
|
|
64
174
|
}
|
|
65
175
|
exports.langchainMessageToGeminiMessage = langchainMessageToGeminiMessage;
|
|
176
|
+
function partAsType(part) {
|
|
177
|
+
return part;
|
|
178
|
+
}
|
|
179
|
+
exports.partAsType = partAsType;
|
|
180
|
+
function formatToolsToGeminiAITools(tools) {
|
|
181
|
+
if (tools.length < 1) {
|
|
182
|
+
return undefined;
|
|
183
|
+
}
|
|
184
|
+
return tools.map(formatToolToGeminiAITool);
|
|
185
|
+
}
|
|
186
|
+
exports.formatToolsToGeminiAITools = formatToolsToGeminiAITools;
|
|
187
|
+
function formatToolToGeminiAITool(tool) {
|
|
188
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
189
|
+
const parameters = (0, zod_to_json_schema_1.zodToJsonSchema)(tool.schema);
|
|
190
|
+
// remove unsupported properties
|
|
191
|
+
delete parameters['$schema'];
|
|
192
|
+
delete parameters['additionalProperties'];
|
|
193
|
+
return {
|
|
194
|
+
name: tool.name,
|
|
195
|
+
description: tool.description,
|
|
196
|
+
// any?
|
|
197
|
+
parameters
|
|
198
|
+
};
|
|
199
|
+
}
|
|
200
|
+
exports.formatToolToGeminiAITool = formatToolToGeminiAITool;
|
|
66
201
|
function messageTypeToGeminiRole(type) {
|
|
67
202
|
switch (type) {
|
|
68
203
|
case 'system':
|
|
@@ -71,6 +206,8 @@ function messageTypeToGeminiRole(type) {
|
|
|
71
206
|
return 'model';
|
|
72
207
|
case 'human':
|
|
73
208
|
return 'user';
|
|
209
|
+
case 'function':
|
|
210
|
+
return 'function';
|
|
74
211
|
default:
|
|
75
212
|
throw new Error(`Unknown message type: ${type}`);
|
|
76
213
|
}
|
|
@@ -97,16 +234,16 @@ delta, defaultRole) {
|
|
|
97
234
|
additional_kwargs = {};
|
|
98
235
|
}
|
|
99
236
|
if (role === 'user') {
|
|
100
|
-
return new
|
|
237
|
+
return new messages_1.HumanMessageChunk({ content });
|
|
101
238
|
}
|
|
102
239
|
else if (role === 'assistant') {
|
|
103
|
-
return new
|
|
240
|
+
return new messages_1.AIMessageChunk({ content, additional_kwargs });
|
|
104
241
|
}
|
|
105
242
|
else if (role === 'system') {
|
|
106
|
-
return new
|
|
243
|
+
return new messages_1.SystemMessageChunk({ content });
|
|
107
244
|
}
|
|
108
245
|
else {
|
|
109
|
-
return new
|
|
246
|
+
return new messages_1.ChatMessageChunk({ content, role });
|
|
110
247
|
}
|
|
111
248
|
}
|
|
112
249
|
exports.convertDeltaToMessageChunk = convertDeltaToMessageChunk;
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "koishi-plugin-chatluna-google-gemini-adapter",
|
|
3
3
|
"description": "google-gemini adapter for chatluna",
|
|
4
|
-
"version": "1.0.0-beta.
|
|
4
|
+
"version": "1.0.0-beta.6",
|
|
5
5
|
"main": "lib/index.js",
|
|
6
6
|
"typings": "lib/index.d.ts",
|
|
7
7
|
"files": [
|
|
@@ -38,16 +38,18 @@
|
|
|
38
38
|
"adapter"
|
|
39
39
|
],
|
|
40
40
|
"dependencies": {
|
|
41
|
+
"@langchain/core": "^0.1.10",
|
|
41
42
|
"@streamparser/json": "^0.0.19",
|
|
42
|
-
"
|
|
43
|
+
"zod": "^3.22.4",
|
|
44
|
+
"zod-to-json-schema": "^3.22.3"
|
|
43
45
|
},
|
|
44
46
|
"devDependencies": {
|
|
45
47
|
"atsc": "^1.2.2",
|
|
46
|
-
"koishi": "^4.16.
|
|
48
|
+
"koishi": "^4.16.3"
|
|
47
49
|
},
|
|
48
50
|
"peerDependencies": {
|
|
49
51
|
"koishi": "^4.16.0",
|
|
50
|
-
"koishi-plugin-chatluna": "^1.0.0-beta.
|
|
52
|
+
"koishi-plugin-chatluna": "^1.0.0-beta.32"
|
|
51
53
|
},
|
|
52
54
|
"koishi": {
|
|
53
55
|
"description": {
|