doomiaichat 6.0.1 → 6.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/azureai.js +39 -9
- package/dist/declare.d.ts +1 -0
- package/dist/openai.js +6 -3
- package/package.json +3 -3
- package/src/azureai.ts +33 -5
- package/src/declare.ts +2 -1
- package/src/openai.ts +8 -4
package/dist/azureai.js
CHANGED
|
@@ -119,7 +119,7 @@ class AzureAI extends openaibase_1.default {
|
|
|
119
119
|
*/
|
|
120
120
|
chatRequestInStream(chatText, callChatOption, attach, axiosOption) {
|
|
121
121
|
var _a, e_1, _b, _c;
|
|
122
|
-
var _d;
|
|
122
|
+
var _d, _e, _f;
|
|
123
123
|
return __awaiter(this, void 0, void 0, function* () {
|
|
124
124
|
if (!chatText)
|
|
125
125
|
this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
@@ -129,6 +129,7 @@ class AzureAI extends openaibase_1.default {
|
|
|
129
129
|
let message = typeof (chatText) == 'string' ? [{ role: 'user', content: chatText }] : chatText;
|
|
130
130
|
axiosOption = Object.assign({}, axiosOption || { timeout: 60000 });
|
|
131
131
|
let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
|
|
132
|
+
let has_tool_calls = 0, currentIndex, previous_index = -1, tool_calls = []; // 使用数组来存储工具调用
|
|
132
133
|
try {
|
|
133
134
|
const response = yield this.aiApi.streamChatCompletions((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.model) || this.chatModel, message, {
|
|
134
135
|
temperature: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.temperature),
|
|
@@ -136,34 +137,63 @@ class AzureAI extends openaibase_1.default {
|
|
|
136
137
|
topP: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_p) || this.top_p),
|
|
137
138
|
presencePenalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.presence_penalty),
|
|
138
139
|
frequencyPenalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.frequency_penalty),
|
|
139
|
-
n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1
|
|
140
|
+
n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1,
|
|
141
|
+
tools: ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 && (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.tools)) ? callChatOption.tools : undefined,
|
|
142
|
+
toolChoice: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 ? 'auto' : undefined
|
|
140
143
|
});
|
|
144
|
+
//console.log('tools', callChatOption.enableToolCall, callChatOption.tools)
|
|
141
145
|
let replytext = [];
|
|
142
146
|
try {
|
|
143
|
-
for (var
|
|
147
|
+
for (var _g = true, response_1 = __asyncValues(response), response_1_1; response_1_1 = yield response_1.next(), _a = response_1_1.done, !_a;) {
|
|
144
148
|
_c = response_1_1.value;
|
|
145
|
-
|
|
149
|
+
_g = false;
|
|
146
150
|
try {
|
|
147
151
|
const event = _c;
|
|
148
152
|
for (const choice of event.choices) {
|
|
149
153
|
const { finishReason: finishreason, index } = choice;
|
|
150
|
-
const
|
|
151
|
-
|
|
152
|
-
|
|
154
|
+
const toolCalls = (_d = choice.delta) === null || _d === void 0 ? void 0 : _d.toolCalls;
|
|
155
|
+
console.log('toolCalls', toolCalls);
|
|
156
|
+
///存在了toolCalls
|
|
157
|
+
if (toolCalls && toolCalls.length) {
|
|
158
|
+
currentIndex = toolCalls[0].index;
|
|
159
|
+
has_tool_calls = 1;
|
|
160
|
+
// 检查index是否发生变化
|
|
161
|
+
console.log('currentIndex,previous_index', currentIndex, previous_index);
|
|
162
|
+
if (currentIndex !== previous_index) {
|
|
163
|
+
tool_calls.push({
|
|
164
|
+
id: toolCalls[0].id,
|
|
165
|
+
type: 'function',
|
|
166
|
+
function: {
|
|
167
|
+
name: toolCalls[0].function.name,
|
|
168
|
+
arguments: toolCalls[0].function.arguments
|
|
169
|
+
}
|
|
170
|
+
});
|
|
171
|
+
// 更新previousIndex以供下次比较
|
|
172
|
+
previous_index = currentIndex;
|
|
173
|
+
}
|
|
174
|
+
else {
|
|
175
|
+
tool_calls[previous_index].function.arguments += toolCalls[0].function.arguments;
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
else {
|
|
179
|
+
const content = (_e = choice.delta) === null || _e === void 0 ? void 0 : _e.content;
|
|
180
|
+
replytext.push(content);
|
|
181
|
+
}
|
|
182
|
+
let output = { successed: true, requestid, segment: (_f = choice.delta) === null || _f === void 0 ? void 0 : _f.content, text: replytext.join(''), finish_reason: finishreason, index, has_tool_calls: has_tool_calls, tool_calls: tool_calls };
|
|
153
183
|
if (attach)
|
|
154
184
|
output = Object.assign({}, output, attach);
|
|
155
185
|
this.emit(finishreason ? 'chatdone' : 'chattext', output);
|
|
156
186
|
}
|
|
157
187
|
}
|
|
158
188
|
finally {
|
|
159
|
-
|
|
189
|
+
_g = true;
|
|
160
190
|
}
|
|
161
191
|
}
|
|
162
192
|
}
|
|
163
193
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
164
194
|
finally {
|
|
165
195
|
try {
|
|
166
|
-
if (!
|
|
196
|
+
if (!_g && !_a && (_b = response_1.return)) yield _b.call(response_1);
|
|
167
197
|
}
|
|
168
198
|
finally { if (e_1) throw e_1.error; }
|
|
169
199
|
}
|
package/dist/declare.d.ts
CHANGED
package/dist/openai.js
CHANGED
|
@@ -25,6 +25,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
25
25
|
*/
|
|
26
26
|
const openaibase_1 = __importDefault(require("./openaibase"));
|
|
27
27
|
const openai_1 = __importDefault(require("openai"));
|
|
28
|
+
// import { ChatCompletionToolChoiceOption } from "openai/resources";
|
|
28
29
|
class OpenAIGpt extends openaibase_1.default {
|
|
29
30
|
/**
|
|
30
31
|
* 初始化OpenAI 的聊天对象Api
|
|
@@ -80,8 +81,8 @@ class OpenAIGpt extends openaibase_1.default {
|
|
|
80
81
|
presence_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.presence_penalty),
|
|
81
82
|
frequency_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.frequency_penalty),
|
|
82
83
|
n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1,
|
|
83
|
-
tools: callChatOption.tools,
|
|
84
|
-
tool_choice: (callChatOption
|
|
84
|
+
tools: ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 && (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.tools)) ? callChatOption.tools : undefined,
|
|
85
|
+
tool_choice: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 ? 'auto' : undefined,
|
|
85
86
|
}, axiosOption);
|
|
86
87
|
return { successed: true, message: response.data.choices, usage: response.data.usage };
|
|
87
88
|
}
|
|
@@ -125,6 +126,8 @@ class OpenAIGpt extends openaibase_1.default {
|
|
|
125
126
|
presence_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.presence_penalty),
|
|
126
127
|
frequency_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.frequency_penalty),
|
|
127
128
|
n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1,
|
|
129
|
+
tools: ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 && (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.tools)) ? callChatOption.tools : undefined,
|
|
130
|
+
tool_choice: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 ? 'auto' : undefined,
|
|
128
131
|
stream: true
|
|
129
132
|
}, axiosOption);
|
|
130
133
|
let replytext = [];
|
|
@@ -134,7 +137,7 @@ class OpenAIGpt extends openaibase_1.default {
|
|
|
134
137
|
_d = false;
|
|
135
138
|
try {
|
|
136
139
|
const chunk = _c;
|
|
137
|
-
const [choice] = chunk.choices, {
|
|
140
|
+
const [choice] = chunk.choices, { finish_reason: finishreason, index, usage } = choice, { content } = choice.delta;
|
|
138
141
|
replytext.push(content);
|
|
139
142
|
let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage };
|
|
140
143
|
if (attach)
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "doomiaichat",
|
|
3
|
-
"version": "6.0.
|
|
3
|
+
"version": "6.0.3",
|
|
4
4
|
"description": "Doomisoft OpenAI",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"scripts": {
|
|
@@ -16,8 +16,8 @@
|
|
|
16
16
|
"typescript": "^4.9.5"
|
|
17
17
|
},
|
|
18
18
|
"dependencies": {
|
|
19
|
-
"@azure/openai": "^1.0.0-beta.
|
|
19
|
+
"@azure/openai": "^1.0.0-beta.11",
|
|
20
20
|
"axios": "^1.3.4",
|
|
21
|
-
"openai": "^4.
|
|
21
|
+
"openai": "^4.29.0"
|
|
22
22
|
}
|
|
23
23
|
}
|
package/src/azureai.ts
CHANGED
|
@@ -113,6 +113,7 @@ export default class AzureAI extends OpenAIBase<OpenAIClient> {
|
|
|
113
113
|
let message: Array<any> = typeof (chatText) == 'string' ?[{ role: 'user', content: chatText }] : chatText;
|
|
114
114
|
axiosOption = Object.assign({}, axiosOption || { timeout: 60000 })
|
|
115
115
|
let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
|
|
116
|
+
let has_tool_calls=0,currentIndex, previous_index=-1, tool_calls:any[] = [];// 使用数组来存储工具调用
|
|
116
117
|
try {
|
|
117
118
|
const response: any = await this.aiApi.streamChatCompletions(
|
|
118
119
|
callChatOption?.model || this.chatModel,
|
|
@@ -123,22 +124,49 @@ export default class AzureAI extends OpenAIBase<OpenAIClient> {
|
|
|
123
124
|
topP: Number(callChatOption?.top_p || this.top_p),
|
|
124
125
|
presencePenalty: Number(callChatOption?.presence_penalty || this.presence_penalty),
|
|
125
126
|
frequencyPenalty: Number(callChatOption?.frequency_penalty || this.frequency_penalty),
|
|
126
|
-
n: Number(callChatOption?.replyCounts || 1) || 1
|
|
127
|
+
n: Number(callChatOption?.replyCounts || 1) || 1,
|
|
128
|
+
tools: (callChatOption?.enableToolCall===1 && callChatOption?.tools) ? callChatOption.tools : undefined,
|
|
129
|
+
toolChoice: callChatOption?.enableToolCall === 1 ?'auto':undefined
|
|
127
130
|
});
|
|
131
|
+
//console.log('tools', callChatOption.enableToolCall, callChatOption.tools)
|
|
128
132
|
let replytext: string[] = [];
|
|
129
133
|
for await (const event of response) {
|
|
130
134
|
for (const choice of event.choices) {
|
|
131
135
|
const { finishReason: finishreason, index } = choice;
|
|
132
|
-
const
|
|
133
|
-
|
|
134
|
-
|
|
136
|
+
const toolCalls = choice.delta?.toolCalls;
|
|
137
|
+
console.log('toolCalls', toolCalls);
|
|
138
|
+
///存在了toolCalls
|
|
139
|
+
if (toolCalls && toolCalls.length){
|
|
140
|
+
currentIndex = toolCalls[0].index;
|
|
141
|
+
has_tool_calls = 1;
|
|
142
|
+
// 检查index是否发生变化
|
|
143
|
+
console.log('currentIndex,previous_index', currentIndex, previous_index)
|
|
144
|
+
if (currentIndex !== previous_index) {
|
|
145
|
+
tool_calls.push({
|
|
146
|
+
id: toolCalls[0].id,
|
|
147
|
+
type: 'function',
|
|
148
|
+
function: {
|
|
149
|
+
name: toolCalls[0].function.name,
|
|
150
|
+
arguments: toolCalls[0].function.arguments
|
|
151
|
+
}
|
|
152
|
+
});
|
|
153
|
+
// 更新previousIndex以供下次比较
|
|
154
|
+
previous_index = currentIndex;
|
|
155
|
+
} else {
|
|
156
|
+
tool_calls[previous_index].function.arguments += toolCalls[0].function.arguments
|
|
157
|
+
}
|
|
158
|
+
}else{
|
|
159
|
+
const content = choice.delta?.content;
|
|
160
|
+
replytext.push(content);
|
|
161
|
+
}
|
|
162
|
+
let output = { successed: true, requestid, segment: choice.delta?.content, text: replytext.join(''), finish_reason: finishreason, index, has_tool_calls: has_tool_calls, tool_calls: tool_calls };
|
|
135
163
|
if (attach) output = Object.assign({}, output, attach);
|
|
136
164
|
this.emit(finishreason ? 'chatdone' : 'chattext', output)
|
|
137
165
|
}
|
|
138
166
|
}
|
|
139
167
|
return { successed: true, requestid }
|
|
140
168
|
} catch (error) {
|
|
141
|
-
this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' +
|
|
169
|
+
this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' +error });
|
|
142
170
|
return { successed: false, requestid }
|
|
143
171
|
}
|
|
144
172
|
}
|
package/src/declare.ts
CHANGED
|
@@ -39,7 +39,8 @@ export interface OpenAIApiParameters {
|
|
|
39
39
|
'frequency_penalty'?: number;
|
|
40
40
|
'replyCounts'?: number; ///返回多少答案
|
|
41
41
|
'tools'?:Array<any>,
|
|
42
|
-
'tool_choice'?: string
|
|
42
|
+
'tool_choice'?: string,
|
|
43
|
+
'enableToolCall'?: number ///是否允许调用toolfunction
|
|
43
44
|
}
|
|
44
45
|
/**
|
|
45
46
|
* Azure 上的OpenAI的链接参数
|
package/src/openai.ts
CHANGED
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
import OpenAIBase from "./openaibase"
|
|
6
6
|
import { OpenAIApiParameters, ChatReponse, EmbeddingResult } from './declare'
|
|
7
7
|
import OpenAI from "openai";
|
|
8
|
-
import { ChatCompletionToolChoiceOption } from "openai/resources";
|
|
8
|
+
// import { ChatCompletionToolChoiceOption } from "openai/resources";
|
|
9
9
|
export default class OpenAIGpt extends OpenAIBase<OpenAI> {
|
|
10
10
|
/**
|
|
11
11
|
* 初始化OpenAI 的聊天对象Api
|
|
@@ -57,8 +57,8 @@ export default class OpenAIGpt extends OpenAIBase<OpenAI> {
|
|
|
57
57
|
presence_penalty: Number(callChatOption?.presence_penalty || this.presence_penalty),
|
|
58
58
|
frequency_penalty: Number(callChatOption?.frequency_penalty || this.frequency_penalty),
|
|
59
59
|
n: Number(callChatOption?.replyCounts || 1) || 1,
|
|
60
|
-
tools: callChatOption.tools,
|
|
61
|
-
tool_choice:
|
|
60
|
+
tools: (callChatOption?.enableToolCall === 1 && callChatOption?.tools) ? callChatOption.tools : undefined,
|
|
61
|
+
tool_choice: callChatOption?.enableToolCall === 1 ? 'auto' : undefined,
|
|
62
62
|
}, axiosOption);
|
|
63
63
|
return { successed: true, message: response.data.choices, usage: response.data.usage };
|
|
64
64
|
} catch (error) {
|
|
@@ -99,11 +99,15 @@ export default class OpenAIGpt extends OpenAIBase<OpenAI> {
|
|
|
99
99
|
presence_penalty: Number(callChatOption?.presence_penalty || this.presence_penalty),
|
|
100
100
|
frequency_penalty: Number(callChatOption?.frequency_penalty || this.frequency_penalty),
|
|
101
101
|
n: Number(callChatOption?.replyCounts || 1) || 1,
|
|
102
|
+
tools: (callChatOption?.enableToolCall === 1 && callChatOption?.tools) ? callChatOption.tools : undefined,
|
|
103
|
+
tool_choice: callChatOption?.enableToolCall === 1 ? 'auto' : undefined,
|
|
102
104
|
stream:true
|
|
103
105
|
}, axiosOption);
|
|
104
106
|
let replytext: string[] = [];
|
|
105
107
|
for await (const chunk of response) {
|
|
106
|
-
const [choice] = chunk.choices,
|
|
108
|
+
const [choice] = chunk.choices,
|
|
109
|
+
{ finish_reason:finishreason, index, usage } = choice,
|
|
110
|
+
{ content} = choice.delta;
|
|
107
111
|
replytext.push(content);
|
|
108
112
|
let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage };
|
|
109
113
|
if (attach) output = Object.assign({}, output, attach);
|