doomiaichat 6.1.0 → 7.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/declare.d.ts +6 -0
- package/dist/declare.js +17 -1
- package/dist/doubaoai.d.ts +34 -0
- package/dist/doubaoai.js +151 -0
- package/dist/gptprovider.d.ts +1 -0
- package/dist/gptprovider.js +4 -0
- package/package.json +1 -1
- package/src/declare.ts +22 -7
- package/src/doubaoai.ts +133 -0
- package/src/gptprovider.ts +4 -0
package/dist/declare.d.ts
CHANGED
|
@@ -73,6 +73,12 @@ export interface RpcResult extends ApiResult {
|
|
|
73
73
|
* @returns
|
|
74
74
|
*/
|
|
75
75
|
export declare function request(opts?: any): Promise<RpcResult>;
|
|
76
|
+
/**
|
|
77
|
+
*
|
|
78
|
+
* @param opts
|
|
79
|
+
* @returns
|
|
80
|
+
*/
|
|
81
|
+
export declare function requestStream(opts: any, processChunkData: Function): void;
|
|
76
82
|
/**
|
|
77
83
|
* 数据缓存提供者接口
|
|
78
84
|
*/
|
package/dist/declare.js
CHANGED
|
@@ -12,7 +12,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
12
12
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
13
|
};
|
|
14
14
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
-
exports.request = void 0;
|
|
15
|
+
exports.requestStream = exports.request = void 0;
|
|
16
16
|
const axios_1 = __importDefault(require("axios"));
|
|
17
17
|
/**
|
|
18
18
|
* Axios远程请求封装
|
|
@@ -33,3 +33,19 @@ function request(opts = {}) {
|
|
|
33
33
|
});
|
|
34
34
|
}
|
|
35
35
|
exports.request = request;
|
|
36
|
+
/**
|
|
37
|
+
*
|
|
38
|
+
* @param opts
|
|
39
|
+
* @returns
|
|
40
|
+
*/
|
|
41
|
+
function requestStream(opts = {}, processChunkData) {
|
|
42
|
+
if (!opts.data)
|
|
43
|
+
opts.data = opts.body;
|
|
44
|
+
(0, axios_1.default)(opts)
|
|
45
|
+
.then((res) => {
|
|
46
|
+
res.data.on('data', (chunk) => {
|
|
47
|
+
processChunkData(chunk);
|
|
48
|
+
});
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
exports.requestStream = requestStream;
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import GptBase from "./gptbase";
|
|
2
|
+
export default class DouBaoAI extends GptBase {
|
|
3
|
+
protected apiKey: string;
|
|
4
|
+
protected apiOption: any;
|
|
5
|
+
/**
|
|
6
|
+
* 构造函数
|
|
7
|
+
*/
|
|
8
|
+
constructor(apiKey: string, apiOption?: any);
|
|
9
|
+
/**
|
|
10
|
+
* 请求接口
|
|
11
|
+
*/
|
|
12
|
+
chatRequest(chatText: string | Array<any>, callChatOption: any, axiosOption?: any): Promise<any>;
|
|
13
|
+
/**
|
|
14
|
+
* 组装最后的调用参数
|
|
15
|
+
* @param callChatOption
|
|
16
|
+
* @returns
|
|
17
|
+
*/
|
|
18
|
+
private assembleApiParams;
|
|
19
|
+
/**
|
|
20
|
+
* 流式的聊天模式
|
|
21
|
+
* @param chatText
|
|
22
|
+
* @param _paramOption
|
|
23
|
+
* @param axiosOption
|
|
24
|
+
*/
|
|
25
|
+
chatRequestInStream(chatText: string | Array<any>, callChatOption: any, attach?: any, axiosOption?: any): Promise<any>;
|
|
26
|
+
/**
|
|
27
|
+
* 处理每次流式返回的对话片段
|
|
28
|
+
* @param chunks
|
|
29
|
+
* @param requestid
|
|
30
|
+
* @param replytext
|
|
31
|
+
* @param attach
|
|
32
|
+
*/
|
|
33
|
+
processChunkData(chunks: string[], requestid: Number, replytext: string[], attach: any): void;
|
|
34
|
+
}
|
package/dist/doubaoai.js
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
/**
|
|
16
|
+
* 火山方舟-豆包大模型引擎
|
|
17
|
+
*/
|
|
18
|
+
const declare_1 = require("./declare");
|
|
19
|
+
const gptbase_1 = __importDefault(require("./gptbase"));
|
|
20
|
+
class DouBaoAI extends gptbase_1.default {
|
|
21
|
+
/**
|
|
22
|
+
* 构造函数
|
|
23
|
+
*/
|
|
24
|
+
constructor(apiKey, apiOption = {}) {
|
|
25
|
+
super();
|
|
26
|
+
this.apiOption = {};
|
|
27
|
+
this.apiKey = apiKey;
|
|
28
|
+
this.apiOption = apiOption;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* 请求接口
|
|
32
|
+
*/
|
|
33
|
+
chatRequest(chatText, callChatOption, axiosOption = {}) {
|
|
34
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
35
|
+
if (!chatText)
|
|
36
|
+
return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
37
|
+
const callParams = this.assembleApiParams(chatText, false, callChatOption, axiosOption);
|
|
38
|
+
try {
|
|
39
|
+
const response = yield (0, declare_1.request)(callParams);
|
|
40
|
+
if (response.successed && !response.data.code) {
|
|
41
|
+
return { successed: true, message: response.data.choices, usage: response.data.usage };
|
|
42
|
+
// return { successed: true, ...response.data };
|
|
43
|
+
}
|
|
44
|
+
return Object.assign({ successed: false }, response.data);
|
|
45
|
+
}
|
|
46
|
+
catch (error) {
|
|
47
|
+
console.log('result is error ', error);
|
|
48
|
+
return { successed: false, error };
|
|
49
|
+
}
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* 组装最后的调用参数
|
|
54
|
+
* @param callChatOption
|
|
55
|
+
* @returns
|
|
56
|
+
*/
|
|
57
|
+
assembleApiParams(chatText, streamCall = false, callChatOption, axiosOption = {}) {
|
|
58
|
+
let messages = typeof (chatText) == 'string' ? [{ role: 'user', content: chatText }] : chatText;
|
|
59
|
+
let params = {};
|
|
60
|
+
if ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.apiOption.temperature)
|
|
61
|
+
params.temperature = Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.apiOption.temperature);
|
|
62
|
+
params.max_tokens = Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.maxtoken) || this.apiOption.maxtoken);
|
|
63
|
+
if ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_p) || this.apiOption.top_p)
|
|
64
|
+
params.top_p = Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_p) || this.apiOption.top_p);
|
|
65
|
+
if ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.apiOption.presence_penalty)
|
|
66
|
+
params.presence_penalty = Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.apiOption.presence_penalty);
|
|
67
|
+
if ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.apiOption.frequency_penalty)
|
|
68
|
+
params.frequency_penalty = Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.apiOption.frequency_penalty);
|
|
69
|
+
if ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_logprobs) || this.apiOption.top_logprobs) {
|
|
70
|
+
params.logprobs = true;
|
|
71
|
+
params.top_logprobs = Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_logprobs) || this.apiOption.top_logprobs);
|
|
72
|
+
}
|
|
73
|
+
params.tools = ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 && (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.tools)) ? callChatOption.tools : undefined;
|
|
74
|
+
params.tool_choice = (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 ? 'auto' : undefined;
|
|
75
|
+
const axiosParams = Object.assign(Object.assign({}, axiosOption), { method: "post", headers: {
|
|
76
|
+
'Content-Type': 'application/json',
|
|
77
|
+
'authorization': `Bearer ${this.apiKey}`
|
|
78
|
+
}, data: Object.assign(Object.assign({ model: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.model) || this.apiOption.model }, params), { messages, stream: streamCall }), url: 'https://ark.cn-beijing.volces.com/api/v3/chat/completions' });
|
|
79
|
+
if (streamCall)
|
|
80
|
+
axiosParams.responseType = 'stream';
|
|
81
|
+
return axiosParams;
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* 流式的聊天模式
|
|
85
|
+
* @param chatText
|
|
86
|
+
* @param _paramOption
|
|
87
|
+
* @param axiosOption
|
|
88
|
+
*/
|
|
89
|
+
chatRequestInStream(chatText, callChatOption, attach, axiosOption) {
|
|
90
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
91
|
+
if (!chatText)
|
|
92
|
+
this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
93
|
+
axiosOption = Object.assign({}, axiosOption || { timeout: 10000 });
|
|
94
|
+
const callParams = this.assembleApiParams(chatText, true, callChatOption, axiosOption);
|
|
95
|
+
let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000), replytext = [];
|
|
96
|
+
try {
|
|
97
|
+
(0, declare_1.requestStream)(callParams, (chunk) => {
|
|
98
|
+
let streamText = chunk.toString().replace('[DONE]', '').replace(/[\r\n]+/gm, '');
|
|
99
|
+
this.processChunkData(streamText.split(/data: /), requestid, replytext, attach);
|
|
100
|
+
});
|
|
101
|
+
return { successed: true, requestid };
|
|
102
|
+
}
|
|
103
|
+
catch (error) {
|
|
104
|
+
this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' + error });
|
|
105
|
+
return { successed: false, requestid };
|
|
106
|
+
}
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
/**
|
|
110
|
+
* 处理每次流式返回的对话片段
|
|
111
|
+
* @param chunks
|
|
112
|
+
* @param requestid
|
|
113
|
+
* @param replytext
|
|
114
|
+
* @param attach
|
|
115
|
+
*/
|
|
116
|
+
processChunkData(chunks, requestid, replytext, attach) {
|
|
117
|
+
let has_tool_calls = 0, currentIndex, previous_index = -1, tool_calls = []; // 使用数组来存储工具调用
|
|
118
|
+
for (const splitString of chunks) {
|
|
119
|
+
if (!splitString)
|
|
120
|
+
continue;
|
|
121
|
+
const chunk = JSON.parse(splitString);
|
|
122
|
+
const [choice] = chunk.choices, { finish_reason: finishreason, index, usage } = choice, { content, tool_calls: toolCalls } = choice.delta;
|
|
123
|
+
if (toolCalls && toolCalls.length) {
|
|
124
|
+
currentIndex = toolCalls[0].index;
|
|
125
|
+
has_tool_calls = 1;
|
|
126
|
+
if (currentIndex !== previous_index) {
|
|
127
|
+
tool_calls.push({
|
|
128
|
+
id: toolCalls[0].id,
|
|
129
|
+
type: 'function',
|
|
130
|
+
function: {
|
|
131
|
+
name: toolCalls[0].function.name,
|
|
132
|
+
arguments: toolCalls[0].function.arguments
|
|
133
|
+
}
|
|
134
|
+
});
|
|
135
|
+
// 更新previousIndex以供下次比较
|
|
136
|
+
previous_index = currentIndex;
|
|
137
|
+
}
|
|
138
|
+
else {
|
|
139
|
+
tool_calls[previous_index].function.arguments += toolCalls[0].function.arguments;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
else
|
|
143
|
+
replytext.push(content);
|
|
144
|
+
let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage, has_tool_calls: has_tool_calls, tool_calls: tool_calls };
|
|
145
|
+
if (attach)
|
|
146
|
+
output = Object.assign({}, output, attach);
|
|
147
|
+
this.emit(finishreason ? 'chatdone' : 'chattext', output);
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
exports.default = DouBaoAI;
|
package/dist/gptprovider.d.ts
CHANGED
|
@@ -8,6 +8,7 @@ export declare const GptProviderEnum: {
|
|
|
8
8
|
readonly OPENAIPROXY: "openaiproxy";
|
|
9
9
|
readonly MICROSOFT: "microsoft";
|
|
10
10
|
readonly BAIDU: "baidu";
|
|
11
|
+
readonly DOUBAO: "doubao";
|
|
11
12
|
readonly GOOGLE: "google";
|
|
12
13
|
readonly STABILITY: "stability";
|
|
13
14
|
readonly STABILITY2: "stability2";
|
package/dist/gptprovider.js
CHANGED
|
@@ -14,6 +14,7 @@ const azureai_1 = __importDefault(require("./azureai"));
|
|
|
14
14
|
const stabilityai_1 = __importDefault(require("./stabilityai"));
|
|
15
15
|
const stabilityplusai_1 = __importDefault(require("./stabilityplusai"));
|
|
16
16
|
const baiduai_1 = __importDefault(require("./baiduai"));
|
|
17
|
+
const doubaoai_1 = __importDefault(require("./doubaoai"));
|
|
17
18
|
/**
|
|
18
19
|
* OpenAI/NLP 的服务提供商 OpenAI,微软,百度文心(待接入),google(待接入)
|
|
19
20
|
*/
|
|
@@ -22,6 +23,7 @@ exports.GptProviderEnum = {
|
|
|
22
23
|
OPENAIPROXY: 'openaiproxy',
|
|
23
24
|
MICROSOFT: 'microsoft',
|
|
24
25
|
BAIDU: 'baidu',
|
|
26
|
+
DOUBAO: 'doubao',
|
|
25
27
|
GOOGLE: 'google',
|
|
26
28
|
STABILITY: 'stability',
|
|
27
29
|
STABILITY2: 'stability2',
|
|
@@ -45,6 +47,8 @@ function createGpt(provider, apikey, setting) {
|
|
|
45
47
|
case exports.GptProviderEnum.BAIDU:
|
|
46
48
|
let cred = typeof (apikey) === 'string' ? { apikey, securitykey: apikey } : apikey;
|
|
47
49
|
return new baiduai_1.default(cred);
|
|
50
|
+
case exports.GptProviderEnum.DOUBAO:
|
|
51
|
+
return new doubaoai_1.default(apikey + '', { model, maxtoken, temperature, top_p, presence_penalty, frequency_penalty });
|
|
48
52
|
case exports.GptProviderEnum.STABILITY:
|
|
49
53
|
return new stabilityai_1.default(apikey + '', { endpoint, engine }, setting);
|
|
50
54
|
case exports.GptProviderEnum.STABILITY2:
|
package/package.json
CHANGED
package/src/declare.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
|
|
2
2
|
import { EmbeddingItem } from '@azure/openai';
|
|
3
3
|
import axios from 'axios';
|
|
4
|
+
|
|
4
5
|
export interface ApiResult {
|
|
5
6
|
/**
|
|
6
7
|
* return the result of api called
|
|
@@ -25,29 +26,29 @@ export interface ChatReponse extends ApiResult {
|
|
|
25
26
|
* @memberof ChatReponse
|
|
26
27
|
*/
|
|
27
28
|
'message'?: Array<any>;
|
|
28
|
-
'usage'?:any;
|
|
29
|
+
'usage'?: any;
|
|
29
30
|
}
|
|
30
31
|
/**
|
|
31
32
|
* 调用OpenAI Api的参数约定
|
|
32
33
|
*/
|
|
33
34
|
export interface OpenAIApiParameters {
|
|
34
|
-
'embedding'?:string, ///模型引擎,兼容Azure
|
|
35
|
+
'embedding'?: string, ///模型引擎,兼容Azure
|
|
35
36
|
'model'?: string, ///模型名称
|
|
36
37
|
'maxtoken'?: number; ///返回的最大token
|
|
37
38
|
'temperature'?: number;
|
|
38
|
-
'top_p'?:number;
|
|
39
|
+
'top_p'?: number;
|
|
39
40
|
'presence_penalty'?: number;
|
|
40
41
|
'frequency_penalty'?: number;
|
|
41
42
|
'replyCounts'?: number; ///返回多少答案
|
|
42
|
-
'tools'?:Array<any>,
|
|
43
|
+
'tools'?: Array<any>,
|
|
43
44
|
'tool_choice'?: string,
|
|
44
45
|
'enableToolCall'?: number ///是否允许调用toolfunction
|
|
45
46
|
}
|
|
46
47
|
/**
|
|
47
48
|
* Azure 上的OpenAI的链接参数
|
|
48
49
|
*/
|
|
49
|
-
export interface ProxyPatameters{
|
|
50
|
-
'serviceurl':string; ///端点
|
|
50
|
+
export interface ProxyPatameters {
|
|
51
|
+
'serviceurl': string; ///端点
|
|
51
52
|
}
|
|
52
53
|
|
|
53
54
|
/**
|
|
@@ -84,9 +85,23 @@ export async function request(opts: any = {}): Promise<RpcResult> {
|
|
|
84
85
|
let result = await axios(opts);
|
|
85
86
|
return { successed: true, data: result.data }
|
|
86
87
|
} catch (err) {
|
|
87
|
-
return { successed: false, error: err,data:err }
|
|
88
|
+
return { successed: false, error: err, data: err }
|
|
88
89
|
}
|
|
89
90
|
}
|
|
91
|
+
/**
|
|
92
|
+
*
|
|
93
|
+
* @param opts
|
|
94
|
+
* @returns
|
|
95
|
+
*/
|
|
96
|
+
export function requestStream(opts: any = {},processChunkData:Function) {
|
|
97
|
+
if (!opts.data) opts.data = opts.body;
|
|
98
|
+
axios(opts)
|
|
99
|
+
.then((res: any) => {
|
|
100
|
+
res.data.on('data', (chunk: any) => {
|
|
101
|
+
processChunkData(chunk);
|
|
102
|
+
});
|
|
103
|
+
})
|
|
104
|
+
}
|
|
90
105
|
|
|
91
106
|
|
|
92
107
|
/**
|
package/src/doubaoai.ts
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* 火山方舟-豆包大模型引擎
|
|
3
|
+
*/
|
|
4
|
+
import { request, requestStream } from "./declare";
|
|
5
|
+
import GptBase from "./gptbase"
|
|
6
|
+
export default class DouBaoAI extends GptBase {
|
|
7
|
+
protected apiKey: string;
|
|
8
|
+
protected apiOption: any = {}
|
|
9
|
+
/**
|
|
10
|
+
* 构造函数
|
|
11
|
+
*/
|
|
12
|
+
constructor(apiKey: string, apiOption: any = {}) {
|
|
13
|
+
super();
|
|
14
|
+
this.apiKey = apiKey;
|
|
15
|
+
this.apiOption = apiOption;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* 请求接口
|
|
19
|
+
*/
|
|
20
|
+
public async chatRequest(chatText: string | Array<any>, callChatOption: any, axiosOption: any = {}): Promise<any> {
|
|
21
|
+
if (!chatText) return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
22
|
+
const callParams = this.assembleApiParams(chatText, false, callChatOption, axiosOption);
|
|
23
|
+
try {
|
|
24
|
+
const response = await request(callParams)
|
|
25
|
+
if (response.successed && !response.data.code) {
|
|
26
|
+
return { successed: true, message: response.data.choices, usage: response.data.usage }
|
|
27
|
+
// return { successed: true, ...response.data };
|
|
28
|
+
}
|
|
29
|
+
return { successed: false, ...response.data };
|
|
30
|
+
} catch (error) {
|
|
31
|
+
console.log('result is error ', error)
|
|
32
|
+
return { successed: false, error };
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* 组装最后的调用参数
|
|
38
|
+
* @param callChatOption
|
|
39
|
+
* @returns
|
|
40
|
+
*/
|
|
41
|
+
private assembleApiParams(chatText: string | Array<any>, streamCall: boolean = false, callChatOption: any, axiosOption: any = {}): any {
|
|
42
|
+
let messages: Array<any> = typeof (chatText) == 'string' ? [{ role: 'user', content: chatText }] : chatText;
|
|
43
|
+
let params: any = {};
|
|
44
|
+
if (callChatOption?.temperature || this.apiOption.temperature) params.temperature = Number(callChatOption?.temperature || this.apiOption.temperature);
|
|
45
|
+
params.max_tokens = Number(callChatOption?.maxtoken || this.apiOption.maxtoken);
|
|
46
|
+
if (callChatOption?.top_p || this.apiOption.top_p) params.top_p = Number(callChatOption?.top_p || this.apiOption.top_p);
|
|
47
|
+
if (callChatOption?.presence_penalty || this.apiOption.presence_penalty) params.presence_penalty = Number(callChatOption?.presence_penalty || this.apiOption.presence_penalty);
|
|
48
|
+
if (callChatOption?.frequency_penalty || this.apiOption.frequency_penalty) params.frequency_penalty = Number(callChatOption?.frequency_penalty || this.apiOption.frequency_penalty);
|
|
49
|
+
if (callChatOption?.top_logprobs || this.apiOption.top_logprobs) {
|
|
50
|
+
params.logprobs = true;
|
|
51
|
+
params.top_logprobs = Number(callChatOption?.top_logprobs || this.apiOption.top_logprobs);
|
|
52
|
+
}
|
|
53
|
+
params.tools = (callChatOption?.enableToolCall === 1 && callChatOption?.tools) ? callChatOption.tools : undefined;
|
|
54
|
+
params.tool_choice = callChatOption?.enableToolCall === 1 ? 'auto' : undefined;
|
|
55
|
+
const axiosParams = {
|
|
56
|
+
...axiosOption,
|
|
57
|
+
method: "post",
|
|
58
|
+
headers: {
|
|
59
|
+
'Content-Type': 'application/json',
|
|
60
|
+
'authorization': `Bearer ${this.apiKey}`
|
|
61
|
+
},
|
|
62
|
+
data: {
|
|
63
|
+
model: callChatOption?.model || this.apiOption.model,
|
|
64
|
+
...params,
|
|
65
|
+
messages,
|
|
66
|
+
stream: streamCall
|
|
67
|
+
},
|
|
68
|
+
url: 'https://ark.cn-beijing.volces.com/api/v3/chat/completions'
|
|
69
|
+
};
|
|
70
|
+
if (streamCall) axiosParams.responseType = 'stream';
|
|
71
|
+
return axiosParams;
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* 流式的聊天模式
|
|
75
|
+
* @param chatText
|
|
76
|
+
* @param _paramOption
|
|
77
|
+
* @param axiosOption
|
|
78
|
+
*/
|
|
79
|
+
override async chatRequestInStream(chatText: string | Array<any>, callChatOption: any, attach?: any, axiosOption?: any): Promise<any> {
|
|
80
|
+
if (!chatText) this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
81
|
+
axiosOption = Object.assign({}, axiosOption || { timeout: 10000 })
|
|
82
|
+
const callParams = this.assembleApiParams(chatText, true, callChatOption, axiosOption);
|
|
83
|
+
let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000), replytext: string[] = [];
|
|
84
|
+
try {
|
|
85
|
+
requestStream(callParams, (chunk: any) => {
|
|
86
|
+
let streamText = chunk.toString().replace('[DONE]', '').replace(/[\r\n]+/gm, '')
|
|
87
|
+
this.processChunkData(streamText.split(/data: /), requestid, replytext, attach)
|
|
88
|
+
})
|
|
89
|
+
return { successed: true, requestid }
|
|
90
|
+
} catch (error) {
|
|
91
|
+
this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' + error });
|
|
92
|
+
return { successed: false, requestid }
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* 处理每次流式返回的对话片段
|
|
97
|
+
* @param chunks
|
|
98
|
+
* @param requestid
|
|
99
|
+
* @param replytext
|
|
100
|
+
* @param attach
|
|
101
|
+
*/
|
|
102
|
+
processChunkData(chunks: string[], requestid: Number, replytext: string[], attach: any) {
|
|
103
|
+
let has_tool_calls = 0, currentIndex, previous_index = -1, tool_calls: any[] = [];// 使用数组来存储工具调用
|
|
104
|
+
for (const splitString of chunks) {
|
|
105
|
+
if (!splitString) continue;
|
|
106
|
+
const chunk = JSON.parse(splitString);
|
|
107
|
+
const [choice] = chunk.choices,
|
|
108
|
+
{ finish_reason: finishreason, index, usage } = choice,
|
|
109
|
+
{ content, tool_calls: toolCalls } = choice.delta;
|
|
110
|
+
if (toolCalls && toolCalls.length) {
|
|
111
|
+
currentIndex = toolCalls[0].index;
|
|
112
|
+
has_tool_calls = 1;
|
|
113
|
+
if (currentIndex !== previous_index) {
|
|
114
|
+
tool_calls.push({
|
|
115
|
+
id: toolCalls[0].id,
|
|
116
|
+
type: 'function',
|
|
117
|
+
function: {
|
|
118
|
+
name: toolCalls[0].function.name,
|
|
119
|
+
arguments: toolCalls[0].function.arguments
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
// 更新previousIndex以供下次比较
|
|
123
|
+
previous_index = currentIndex;
|
|
124
|
+
} else {
|
|
125
|
+
tool_calls[previous_index].function.arguments += toolCalls[0].function.arguments
|
|
126
|
+
}
|
|
127
|
+
} else replytext.push(content);
|
|
128
|
+
let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage, has_tool_calls: has_tool_calls, tool_calls: tool_calls };
|
|
129
|
+
if (attach) output = Object.assign({}, output, attach);
|
|
130
|
+
this.emit(finishreason ? 'chatdone' : 'chattext', output)
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
}
|
package/src/gptprovider.ts
CHANGED
|
@@ -8,6 +8,7 @@ import AzureAI from './azureai'
|
|
|
8
8
|
import StabilityAI from './stabilityai'
|
|
9
9
|
import StabilityPlusAI from './stabilityplusai'
|
|
10
10
|
import BaiduWenXinAI, { ApiCredential } from './baiduai'
|
|
11
|
+
import DouBaoAI from './doubaoai'
|
|
11
12
|
import GptBase from './gptbase';
|
|
12
13
|
/**
|
|
13
14
|
* OpenAI/NLP 的服务提供商 OpenAI,微软,百度文心(待接入),google(待接入)
|
|
@@ -17,6 +18,7 @@ export const GptProviderEnum = {
|
|
|
17
18
|
OPENAIPROXY:'openaiproxy',
|
|
18
19
|
MICROSOFT: 'microsoft',
|
|
19
20
|
BAIDU: 'baidu',
|
|
21
|
+
DOUBAO:'doubao',
|
|
20
22
|
GOOGLE:'google',
|
|
21
23
|
STABILITY:'stability',
|
|
22
24
|
STABILITY2: 'stability2',
|
|
@@ -41,6 +43,8 @@ export function createGpt(provider: GptProviderEnum, apikey: string|ApiCredentia
|
|
|
41
43
|
case GptProviderEnum.BAIDU:
|
|
42
44
|
let cred: ApiCredential = typeof (apikey) === 'string' ? { apikey, securitykey: apikey } : apikey
|
|
43
45
|
return new BaiduWenXinAI(cred);
|
|
46
|
+
case GptProviderEnum.DOUBAO:
|
|
47
|
+
return new DouBaoAI(apikey + '', { model, maxtoken, temperature, top_p, presence_penalty, frequency_penalty })
|
|
44
48
|
case GptProviderEnum.STABILITY:
|
|
45
49
|
return new StabilityAI(apikey + '', { endpoint, engine }, setting);
|
|
46
50
|
case GptProviderEnum.STABILITY2:
|