doomiaichat 6.1.0 → 7.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/aimp.d.ts +34 -0
- package/dist/aimp.js +133 -0
- package/dist/declare.d.ts +6 -0
- package/dist/declare.js +17 -1
- package/dist/doubaoai.d.ts +34 -0
- package/dist/doubaoai.js +149 -0
- package/dist/gptprovider.d.ts +2 -0
- package/dist/gptprovider.js +9 -1
- package/package.json +1 -1
- package/src/aimp.ts +100 -0
- package/src/declare.ts +22 -7
- package/src/doubaoai.ts +129 -0
- package/src/gptprovider.ts +10 -2
package/dist/aimp.d.ts
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import GptBase from "./gptbase";
|
|
2
|
+
import { ChatReponse } from './declare';
|
|
3
|
+
export default class AIMiddlePlatform extends GptBase {
|
|
4
|
+
protected apikey: string;
|
|
5
|
+
protected agent: {
|
|
6
|
+
endpoint: string;
|
|
7
|
+
agentid: string;
|
|
8
|
+
};
|
|
9
|
+
/**
|
|
10
|
+
*
|
|
11
|
+
* @param apikey 调用AI中台 的key
|
|
12
|
+
* @param agent 智能体信息
|
|
13
|
+
*/
|
|
14
|
+
constructor(apikey: string, agent: {
|
|
15
|
+
endpoint: string;
|
|
16
|
+
agentid: string;
|
|
17
|
+
});
|
|
18
|
+
/**
|
|
19
|
+
* 非流式传输聊天请求
|
|
20
|
+
* @param chatText
|
|
21
|
+
* @param callChatOption
|
|
22
|
+
* @param axiosOption
|
|
23
|
+
*/
|
|
24
|
+
chatRequest(chatText: string, callChatOption: any, axiosOption?: any): Promise<ChatReponse>;
|
|
25
|
+
/**
|
|
26
|
+
* 流式传输聊天请求
|
|
27
|
+
* @param chatText
|
|
28
|
+
* @param callChatOption
|
|
29
|
+
* @param attach
|
|
30
|
+
* @param axiosOption
|
|
31
|
+
* @returns
|
|
32
|
+
*/
|
|
33
|
+
chatRequestInStream(chatText: string | Array<any>, callChatOption: any, attach?: any, axiosOption?: any): Promise<any>;
|
|
34
|
+
}
|
package/dist/aimp.js
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|
12
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
13
|
+
var m = o[Symbol.asyncIterator], i;
|
|
14
|
+
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
|
15
|
+
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
|
16
|
+
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
|
17
|
+
};
|
|
18
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
19
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
20
|
+
};
|
|
21
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
22
|
+
/**
|
|
23
|
+
* 接入AI平台的中间层
|
|
24
|
+
*/
|
|
25
|
+
const axios_1 = __importDefault(require("axios"));
|
|
26
|
+
const declare_1 = require("./declare");
|
|
27
|
+
const gptbase_1 = __importDefault(require("./gptbase"));
|
|
28
|
+
const stream_1 = require("stream");
|
|
29
|
+
class AIMiddlePlatform extends gptbase_1.default {
|
|
30
|
+
/**
|
|
31
|
+
*
|
|
32
|
+
* @param apikey 调用AI中台 的key
|
|
33
|
+
* @param agent 智能体信息
|
|
34
|
+
*/
|
|
35
|
+
constructor(apikey, agent) {
|
|
36
|
+
super();
|
|
37
|
+
this.apikey = apikey;
|
|
38
|
+
this.agent = agent;
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* 非流式传输聊天请求
|
|
42
|
+
* @param chatText
|
|
43
|
+
* @param callChatOption
|
|
44
|
+
* @param axiosOption
|
|
45
|
+
*/
|
|
46
|
+
chatRequest(chatText, callChatOption, axiosOption = {}) {
|
|
47
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
48
|
+
if (!chatText)
|
|
49
|
+
this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
50
|
+
axiosOption = Object.assign({}, axiosOption || { timeout: 60000 });
|
|
51
|
+
const opts = Object.assign({ headers: {
|
|
52
|
+
'Content-Type': 'application/json',
|
|
53
|
+
'authorization': `Bearer ${this.apikey}`
|
|
54
|
+
}, method: 'post', url: `${this.agent.endpoint}/api/v1/agents/${this.agent.agentid}/completions`, data: {
|
|
55
|
+
question: chatText,
|
|
56
|
+
session_id: callChatOption.session_id,
|
|
57
|
+
stream: false
|
|
58
|
+
} }, axiosOption);
|
|
59
|
+
const response = yield (0, declare_1.request)(opts);
|
|
60
|
+
if (!response.successed)
|
|
61
|
+
return { successed: false, error: 'failed' };
|
|
62
|
+
return { successed: true, message: response.data.answer };
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* 流式传输聊天请求
|
|
67
|
+
* @param chatText
|
|
68
|
+
* @param callChatOption
|
|
69
|
+
* @param attach
|
|
70
|
+
* @param axiosOption
|
|
71
|
+
* @returns
|
|
72
|
+
*/
|
|
73
|
+
chatRequestInStream(chatText, callChatOption, attach, axiosOption) {
|
|
74
|
+
var _a, e_1, _b, _c;
|
|
75
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
76
|
+
if (!chatText)
|
|
77
|
+
this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
78
|
+
axiosOption = Object.assign({}, axiosOption || { timeout: 60000 });
|
|
79
|
+
let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
|
|
80
|
+
try {
|
|
81
|
+
const opts = Object.assign({ headers: {
|
|
82
|
+
'Content-Type': 'application/json',
|
|
83
|
+
'authorization': `Bearer ${this.apikey}`
|
|
84
|
+
}, method: 'post', url: `${this.agent.endpoint}/api/v1/agents/${this.agent.agentid}/completions`, data: {
|
|
85
|
+
question: chatText,
|
|
86
|
+
session_id: callChatOption.session_id,
|
|
87
|
+
stream: true,
|
|
88
|
+
optional: callChatOption.optional,
|
|
89
|
+
}, responseType: 'stream' }, axiosOption);
|
|
90
|
+
// console.log('opts', opts)
|
|
91
|
+
const response = yield (0, axios_1.default)(opts);
|
|
92
|
+
const readableStream = stream_1.Readable.from(response.data);
|
|
93
|
+
let index = 0, session_id;
|
|
94
|
+
try {
|
|
95
|
+
for (var _d = true, readableStream_1 = __asyncValues(readableStream), readableStream_1_1; readableStream_1_1 = yield readableStream_1.next(), _a = readableStream_1_1.done, !_a;) {
|
|
96
|
+
_c = readableStream_1_1.value;
|
|
97
|
+
_d = false;
|
|
98
|
+
try {
|
|
99
|
+
const chunk = _c;
|
|
100
|
+
const answerData = JSON.parse(chunk.toString().replace('data:', ''));
|
|
101
|
+
const { answer, running_status } = answerData.data;
|
|
102
|
+
if (running_status === true)
|
|
103
|
+
continue;
|
|
104
|
+
if (!session_id)
|
|
105
|
+
session_id = answerData.data.session_id;
|
|
106
|
+
const finished = answerData.data === true;
|
|
107
|
+
let output = { successed: true, requestid, segment: answer, text: answer, finish_reason: finished ? 'stop' : null, index: index++, session_id };
|
|
108
|
+
if (attach)
|
|
109
|
+
output = Object.assign({}, output, attach);
|
|
110
|
+
this.emit(finished ? 'chatdone' : 'chattext', output);
|
|
111
|
+
}
|
|
112
|
+
finally {
|
|
113
|
+
_d = true;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
118
|
+
finally {
|
|
119
|
+
try {
|
|
120
|
+
if (!_d && !_a && (_b = readableStream_1.return)) yield _b.call(readableStream_1);
|
|
121
|
+
}
|
|
122
|
+
finally { if (e_1) throw e_1.error; }
|
|
123
|
+
}
|
|
124
|
+
return { successed: true, requestid };
|
|
125
|
+
}
|
|
126
|
+
catch (error) {
|
|
127
|
+
this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' + error });
|
|
128
|
+
return { successed: false, requestid };
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
exports.default = AIMiddlePlatform;
|
package/dist/declare.d.ts
CHANGED
|
@@ -73,6 +73,12 @@ export interface RpcResult extends ApiResult {
|
|
|
73
73
|
* @returns
|
|
74
74
|
*/
|
|
75
75
|
export declare function request(opts?: any): Promise<RpcResult>;
|
|
76
|
+
/**
|
|
77
|
+
*
|
|
78
|
+
* @param opts
|
|
79
|
+
* @returns
|
|
80
|
+
*/
|
|
81
|
+
export declare function requestStream(opts: any, processChunkData: Function): void;
|
|
76
82
|
/**
|
|
77
83
|
* 数据缓存提供者接口
|
|
78
84
|
*/
|
package/dist/declare.js
CHANGED
|
@@ -12,7 +12,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
12
12
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
13
|
};
|
|
14
14
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
-
exports.request = void 0;
|
|
15
|
+
exports.requestStream = exports.request = void 0;
|
|
16
16
|
const axios_1 = __importDefault(require("axios"));
|
|
17
17
|
/**
|
|
18
18
|
* Axios远程请求封装
|
|
@@ -33,3 +33,19 @@ function request(opts = {}) {
|
|
|
33
33
|
});
|
|
34
34
|
}
|
|
35
35
|
exports.request = request;
|
|
36
|
+
/**
|
|
37
|
+
*
|
|
38
|
+
* @param opts
|
|
39
|
+
* @returns
|
|
40
|
+
*/
|
|
41
|
+
function requestStream(opts = {}, processChunkData) {
|
|
42
|
+
if (!opts.data)
|
|
43
|
+
opts.data = opts.body;
|
|
44
|
+
(0, axios_1.default)(opts)
|
|
45
|
+
.then((res) => {
|
|
46
|
+
res.data.on('data', (chunk) => {
|
|
47
|
+
processChunkData(chunk);
|
|
48
|
+
});
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
exports.requestStream = requestStream;
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import GptBase from "./gptbase";
|
|
2
|
+
export default class DouBaoAI extends GptBase {
|
|
3
|
+
protected apiKey: string;
|
|
4
|
+
protected apiOption: any;
|
|
5
|
+
/**
|
|
6
|
+
* 构造函数
|
|
7
|
+
*/
|
|
8
|
+
constructor(apiKey: string, apiOption?: any);
|
|
9
|
+
/**
|
|
10
|
+
* 请求接口
|
|
11
|
+
*/
|
|
12
|
+
chatRequest(chatText: string | Array<any>, callChatOption: any, axiosOption?: any): Promise<any>;
|
|
13
|
+
/**
|
|
14
|
+
* 组装最后的调用参数
|
|
15
|
+
* @param callChatOption
|
|
16
|
+
* @returns
|
|
17
|
+
*/
|
|
18
|
+
private assembleApiParams;
|
|
19
|
+
/**
|
|
20
|
+
* 流式的聊天模式
|
|
21
|
+
* @param chatText
|
|
22
|
+
* @param _paramOption
|
|
23
|
+
* @param axiosOption
|
|
24
|
+
*/
|
|
25
|
+
chatRequestInStream(chatText: string | Array<any>, callChatOption: any, attach?: any, axiosOption?: any): Promise<any>;
|
|
26
|
+
/**
|
|
27
|
+
* 处理每次流式返回的对话片段
|
|
28
|
+
* @param chunks
|
|
29
|
+
* @param requestid
|
|
30
|
+
* @param replytext
|
|
31
|
+
* @param attach
|
|
32
|
+
*/
|
|
33
|
+
processChunkData(chunks: string[], requestid: Number, replytext: string[], attach: any): void;
|
|
34
|
+
}
|
package/dist/doubaoai.js
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
/**
|
|
16
|
+
* 火山方舟-豆包大模型引擎
|
|
17
|
+
*/
|
|
18
|
+
const declare_1 = require("./declare");
|
|
19
|
+
const gptbase_1 = __importDefault(require("./gptbase"));
|
|
20
|
+
class DouBaoAI extends gptbase_1.default {
|
|
21
|
+
/**
|
|
22
|
+
* 构造函数
|
|
23
|
+
*/
|
|
24
|
+
constructor(apiKey, apiOption = {}) {
|
|
25
|
+
super();
|
|
26
|
+
this.apiOption = {};
|
|
27
|
+
this.apiKey = apiKey;
|
|
28
|
+
this.apiOption = apiOption;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* 请求接口
|
|
32
|
+
*/
|
|
33
|
+
chatRequest(chatText, callChatOption, axiosOption = {}) {
|
|
34
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
35
|
+
if (!chatText)
|
|
36
|
+
return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
37
|
+
const callParams = this.assembleApiParams(chatText, false, callChatOption, axiosOption);
|
|
38
|
+
try {
|
|
39
|
+
const response = yield (0, declare_1.request)(callParams);
|
|
40
|
+
if (response.successed && !response.data.code)
|
|
41
|
+
return { successed: true, message: response.data.choices, usage: response.data.usage };
|
|
42
|
+
return Object.assign({ successed: false }, response.data);
|
|
43
|
+
}
|
|
44
|
+
catch (error) {
|
|
45
|
+
console.log('result is error ', error);
|
|
46
|
+
return { successed: false, error };
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* 组装最后的调用参数
|
|
52
|
+
* @param callChatOption
|
|
53
|
+
* @returns
|
|
54
|
+
*/
|
|
55
|
+
assembleApiParams(chatText, streamCall = false, callChatOption, axiosOption = {}) {
|
|
56
|
+
let messages = typeof (chatText) == 'string' ? [{ role: 'user', content: chatText }] : chatText;
|
|
57
|
+
let params = {};
|
|
58
|
+
if ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.apiOption.temperature)
|
|
59
|
+
params.temperature = Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.apiOption.temperature);
|
|
60
|
+
params.max_tokens = Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.maxtoken) || this.apiOption.maxtoken);
|
|
61
|
+
if ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_p) || this.apiOption.top_p)
|
|
62
|
+
params.top_p = Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_p) || this.apiOption.top_p);
|
|
63
|
+
if ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.apiOption.presence_penalty)
|
|
64
|
+
params.presence_penalty = Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.apiOption.presence_penalty);
|
|
65
|
+
if ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.apiOption.frequency_penalty)
|
|
66
|
+
params.frequency_penalty = Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.apiOption.frequency_penalty);
|
|
67
|
+
if ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_logprobs) || this.apiOption.top_logprobs) {
|
|
68
|
+
params.logprobs = true;
|
|
69
|
+
params.top_logprobs = Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_logprobs) || this.apiOption.top_logprobs);
|
|
70
|
+
}
|
|
71
|
+
params.tools = ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 && (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.tools)) ? callChatOption.tools : undefined;
|
|
72
|
+
params.tool_choice = (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 ? 'auto' : undefined;
|
|
73
|
+
const axiosParams = Object.assign(Object.assign({}, axiosOption), { method: "post", headers: {
|
|
74
|
+
'Content-Type': 'application/json',
|
|
75
|
+
'authorization': `Bearer ${this.apiKey}`
|
|
76
|
+
}, data: Object.assign(Object.assign({ model: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.model) || this.apiOption.model }, params), { messages, stream: streamCall }), url: 'https://ark.cn-beijing.volces.com/api/v3/chat/completions' });
|
|
77
|
+
if (streamCall)
|
|
78
|
+
axiosParams.responseType = 'stream';
|
|
79
|
+
return axiosParams;
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* 流式的聊天模式
|
|
83
|
+
* @param chatText
|
|
84
|
+
* @param _paramOption
|
|
85
|
+
* @param axiosOption
|
|
86
|
+
*/
|
|
87
|
+
chatRequestInStream(chatText, callChatOption, attach, axiosOption) {
|
|
88
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
89
|
+
if (!chatText)
|
|
90
|
+
this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
91
|
+
axiosOption = Object.assign({}, axiosOption || { timeout: 10000 });
|
|
92
|
+
const callParams = this.assembleApiParams(chatText, true, callChatOption, axiosOption);
|
|
93
|
+
let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000), replytext = [];
|
|
94
|
+
try {
|
|
95
|
+
(0, declare_1.requestStream)(callParams, (chunk) => {
|
|
96
|
+
let streamText = chunk.toString().replace('[DONE]', '').replace(/[\r\n]+/gm, '');
|
|
97
|
+
this.processChunkData(streamText.split(/data: /), requestid, replytext, attach);
|
|
98
|
+
});
|
|
99
|
+
return { successed: true, requestid };
|
|
100
|
+
}
|
|
101
|
+
catch (error) {
|
|
102
|
+
this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' + error });
|
|
103
|
+
return { successed: false, requestid };
|
|
104
|
+
}
|
|
105
|
+
});
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* 处理每次流式返回的对话片段
|
|
109
|
+
* @param chunks
|
|
110
|
+
* @param requestid
|
|
111
|
+
* @param replytext
|
|
112
|
+
* @param attach
|
|
113
|
+
*/
|
|
114
|
+
processChunkData(chunks, requestid, replytext, attach) {
|
|
115
|
+
let has_tool_calls = 0, currentIndex, previous_index = -1, tool_calls = []; // 使用数组来存储工具调用
|
|
116
|
+
for (const splitString of chunks) {
|
|
117
|
+
if (!splitString)
|
|
118
|
+
continue;
|
|
119
|
+
const chunk = JSON.parse(splitString);
|
|
120
|
+
const [choice] = chunk.choices, { finish_reason: finishreason, index, usage } = choice, { content, tool_calls: toolCalls } = choice.delta;
|
|
121
|
+
if (toolCalls && toolCalls.length) {
|
|
122
|
+
currentIndex = toolCalls[0].index;
|
|
123
|
+
has_tool_calls = 1;
|
|
124
|
+
if (currentIndex !== previous_index) {
|
|
125
|
+
tool_calls.push({
|
|
126
|
+
id: toolCalls[0].id,
|
|
127
|
+
type: 'function',
|
|
128
|
+
function: {
|
|
129
|
+
name: toolCalls[0].function.name,
|
|
130
|
+
arguments: toolCalls[0].function.arguments
|
|
131
|
+
}
|
|
132
|
+
});
|
|
133
|
+
// 更新previousIndex以供下次比较
|
|
134
|
+
previous_index = currentIndex;
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
tool_calls[previous_index].function.arguments += toolCalls[0].function.arguments;
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
else
|
|
141
|
+
replytext.push(content);
|
|
142
|
+
let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage, has_tool_calls: has_tool_calls, tool_calls: tool_calls };
|
|
143
|
+
if (attach)
|
|
144
|
+
output = Object.assign({}, output, attach);
|
|
145
|
+
this.emit(finishreason ? 'chatdone' : 'chattext', output);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
exports.default = DouBaoAI;
|
package/dist/gptprovider.d.ts
CHANGED
|
@@ -7,7 +7,9 @@ export declare const GptProviderEnum: {
|
|
|
7
7
|
readonly OPENAI: "openai";
|
|
8
8
|
readonly OPENAIPROXY: "openaiproxy";
|
|
9
9
|
readonly MICROSOFT: "microsoft";
|
|
10
|
+
readonly AIMP: "aimp";
|
|
10
11
|
readonly BAIDU: "baidu";
|
|
12
|
+
readonly DOUBAO: "doubao";
|
|
11
13
|
readonly GOOGLE: "google";
|
|
12
14
|
readonly STABILITY: "stability";
|
|
13
15
|
readonly STABILITY2: "stability2";
|
package/dist/gptprovider.js
CHANGED
|
@@ -14,6 +14,8 @@ const azureai_1 = __importDefault(require("./azureai"));
|
|
|
14
14
|
const stabilityai_1 = __importDefault(require("./stabilityai"));
|
|
15
15
|
const stabilityplusai_1 = __importDefault(require("./stabilityplusai"));
|
|
16
16
|
const baiduai_1 = __importDefault(require("./baiduai"));
|
|
17
|
+
const aimp_1 = __importDefault(require("./aimp"));
|
|
18
|
+
const doubaoai_1 = __importDefault(require("./doubaoai"));
|
|
17
19
|
/**
|
|
18
20
|
* OpenAI/NLP 的服务提供商 OpenAI,微软,百度文心(待接入),google(待接入)
|
|
19
21
|
*/
|
|
@@ -21,7 +23,9 @@ exports.GptProviderEnum = {
|
|
|
21
23
|
OPENAI: 'openai',
|
|
22
24
|
OPENAIPROXY: 'openaiproxy',
|
|
23
25
|
MICROSOFT: 'microsoft',
|
|
26
|
+
AIMP: 'aimp',
|
|
24
27
|
BAIDU: 'baidu',
|
|
28
|
+
DOUBAO: 'doubao',
|
|
25
29
|
GOOGLE: 'google',
|
|
26
30
|
STABILITY: 'stability',
|
|
27
31
|
STABILITY2: 'stability2',
|
|
@@ -34,7 +38,7 @@ exports.GptProviderEnum = {
|
|
|
34
38
|
* @returns
|
|
35
39
|
*/
|
|
36
40
|
function createGpt(provider, apikey, setting) {
|
|
37
|
-
let { model, maxtoken, temperature, serviceurl, endpoint, engine, version, embedding, top_p, presence_penalty, frequency_penalty } = setting || {};
|
|
41
|
+
let { model, agentid, maxtoken, temperature, serviceurl, endpoint, engine, version, embedding, top_p, presence_penalty, frequency_penalty } = setting || {};
|
|
38
42
|
switch (provider) {
|
|
39
43
|
case exports.GptProviderEnum.OPENAI:
|
|
40
44
|
return new openai_1.default(apikey + '', { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
|
|
@@ -45,6 +49,10 @@ function createGpt(provider, apikey, setting) {
|
|
|
45
49
|
case exports.GptProviderEnum.BAIDU:
|
|
46
50
|
let cred = typeof (apikey) === 'string' ? { apikey, securitykey: apikey } : apikey;
|
|
47
51
|
return new baiduai_1.default(cred);
|
|
52
|
+
case exports.GptProviderEnum.AIMP:
|
|
53
|
+
return new aimp_1.default(apikey + '', { endpoint, agentid });
|
|
54
|
+
case exports.GptProviderEnum.DOUBAO:
|
|
55
|
+
return new doubaoai_1.default(apikey + '', { model, maxtoken, temperature, top_p, presence_penalty, frequency_penalty });
|
|
48
56
|
case exports.GptProviderEnum.STABILITY:
|
|
49
57
|
return new stabilityai_1.default(apikey + '', { endpoint, engine }, setting);
|
|
50
58
|
case exports.GptProviderEnum.STABILITY2:
|
package/package.json
CHANGED
package/src/aimp.ts
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* 接入AI平台的中间层
|
|
3
|
+
*/
|
|
4
|
+
import axios from 'axios';
|
|
5
|
+
import { request } from "./declare";
|
|
6
|
+
import GptBase from "./gptbase"
|
|
7
|
+
import { Readable } from 'stream';
|
|
8
|
+
import { ChatReponse } from './declare';
|
|
9
|
+
export default class AIMiddlePlatform extends GptBase {
|
|
10
|
+
|
|
11
|
+
protected apikey: string;
|
|
12
|
+
protected agent: { endpoint: string, agentid: string };
|
|
13
|
+
/**
|
|
14
|
+
*
|
|
15
|
+
* @param apikey 调用AI中台 的key
|
|
16
|
+
* @param agent 智能体信息
|
|
17
|
+
*/
|
|
18
|
+
constructor(apikey: string, agent: { endpoint: string, agentid: string }) {
|
|
19
|
+
super();
|
|
20
|
+
this.apikey = apikey;
|
|
21
|
+
this.agent = agent;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* 非流式传输聊天请求
|
|
26
|
+
* @param chatText
|
|
27
|
+
* @param callChatOption
|
|
28
|
+
* @param axiosOption
|
|
29
|
+
*/
|
|
30
|
+
public async chatRequest(chatText: string, callChatOption: any, axiosOption: any = {}): Promise<ChatReponse> {
|
|
31
|
+
if (!chatText) this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
32
|
+
axiosOption = Object.assign({}, axiosOption || { timeout: 60000 })
|
|
33
|
+
const opts: any = {
|
|
34
|
+
headers: {
|
|
35
|
+
'Content-Type': 'application/json',
|
|
36
|
+
'authorization': `Bearer ${this.apikey}`
|
|
37
|
+
},
|
|
38
|
+
method: 'post',
|
|
39
|
+
url: `${this.agent.endpoint}/api/v1/agents/${this.agent.agentid}/completions`,
|
|
40
|
+
data: {
|
|
41
|
+
question: chatText,
|
|
42
|
+
session_id: callChatOption.session_id,
|
|
43
|
+
stream: false
|
|
44
|
+
},
|
|
45
|
+
...axiosOption
|
|
46
|
+
}
|
|
47
|
+
const response = await request(opts);
|
|
48
|
+
if (!response.successed) return { successed: false, error: 'failed' };
|
|
49
|
+
return { successed: true, message: response.data.answer };
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* 流式传输聊天请求
|
|
53
|
+
* @param chatText
|
|
54
|
+
* @param callChatOption
|
|
55
|
+
* @param attach
|
|
56
|
+
* @param axiosOption
|
|
57
|
+
* @returns
|
|
58
|
+
*/
|
|
59
|
+
override async chatRequestInStream(chatText: string | Array<any>, callChatOption: any, attach?: any, axiosOption?: any): Promise<any> {
|
|
60
|
+
if (!chatText) this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
61
|
+
axiosOption = Object.assign({}, axiosOption || { timeout: 60000 })
|
|
62
|
+
let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
|
|
63
|
+
try {
|
|
64
|
+
const opts: any = {
|
|
65
|
+
headers: {
|
|
66
|
+
'Content-Type': 'application/json',
|
|
67
|
+
'authorization': `Bearer ${this.apikey}`
|
|
68
|
+
},
|
|
69
|
+
method: 'post',
|
|
70
|
+
url: `${this.agent.endpoint}/api/v1/agents/${this.agent.agentid}/completions`,
|
|
71
|
+
data: {
|
|
72
|
+
question: chatText,
|
|
73
|
+
session_id: callChatOption.session_id,
|
|
74
|
+
stream: true,
|
|
75
|
+
optional: callChatOption.optional,
|
|
76
|
+
},
|
|
77
|
+
responseType: 'stream',
|
|
78
|
+
...axiosOption
|
|
79
|
+
}
|
|
80
|
+
// console.log('opts', opts)
|
|
81
|
+
const response = await axios(opts);
|
|
82
|
+
const readableStream = Readable.from(response.data);
|
|
83
|
+
let index = 0, session_id;
|
|
84
|
+
for await (const chunk of readableStream) {
|
|
85
|
+
const answerData = JSON.parse(chunk.toString().replace('data:',''));
|
|
86
|
+
const { answer, running_status } = answerData.data;
|
|
87
|
+
if (running_status === true) continue;
|
|
88
|
+
if (!session_id) session_id = answerData.data.session_id;
|
|
89
|
+
const finished = answerData.data === true;
|
|
90
|
+
let output = { successed: true, requestid, segment: answer, text: answer, finish_reason: finished ? 'stop' : null, index: index++, session_id };
|
|
91
|
+
if (attach) output = Object.assign({}, output, attach);
|
|
92
|
+
this.emit(finished ? 'chatdone' : 'chattext', output)
|
|
93
|
+
}
|
|
94
|
+
return { successed: true, requestid }
|
|
95
|
+
} catch (error) {
|
|
96
|
+
this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' + error });
|
|
97
|
+
return { successed: false, requestid }
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
package/src/declare.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
|
|
2
2
|
import { EmbeddingItem } from '@azure/openai';
|
|
3
3
|
import axios from 'axios';
|
|
4
|
+
|
|
4
5
|
export interface ApiResult {
|
|
5
6
|
/**
|
|
6
7
|
* return the result of api called
|
|
@@ -25,29 +26,29 @@ export interface ChatReponse extends ApiResult {
|
|
|
25
26
|
* @memberof ChatReponse
|
|
26
27
|
*/
|
|
27
28
|
'message'?: Array<any>;
|
|
28
|
-
'usage'?:any;
|
|
29
|
+
'usage'?: any;
|
|
29
30
|
}
|
|
30
31
|
/**
|
|
31
32
|
* 调用OpenAI Api的参数约定
|
|
32
33
|
*/
|
|
33
34
|
export interface OpenAIApiParameters {
|
|
34
|
-
'embedding'?:string, ///模型引擎,兼容Azure
|
|
35
|
+
'embedding'?: string, ///模型引擎,兼容Azure
|
|
35
36
|
'model'?: string, ///模型名称
|
|
36
37
|
'maxtoken'?: number; ///返回的最大token
|
|
37
38
|
'temperature'?: number;
|
|
38
|
-
'top_p'?:number;
|
|
39
|
+
'top_p'?: number;
|
|
39
40
|
'presence_penalty'?: number;
|
|
40
41
|
'frequency_penalty'?: number;
|
|
41
42
|
'replyCounts'?: number; ///返回多少答案
|
|
42
|
-
'tools'?:Array<any>,
|
|
43
|
+
'tools'?: Array<any>,
|
|
43
44
|
'tool_choice'?: string,
|
|
44
45
|
'enableToolCall'?: number ///是否允许调用toolfunction
|
|
45
46
|
}
|
|
46
47
|
/**
|
|
47
48
|
* Azure 上的OpenAI的链接参数
|
|
48
49
|
*/
|
|
49
|
-
export interface ProxyPatameters{
|
|
50
|
-
'serviceurl':string; ///端点
|
|
50
|
+
export interface ProxyPatameters {
|
|
51
|
+
'serviceurl': string; ///端点
|
|
51
52
|
}
|
|
52
53
|
|
|
53
54
|
/**
|
|
@@ -84,9 +85,23 @@ export async function request(opts: any = {}): Promise<RpcResult> {
|
|
|
84
85
|
let result = await axios(opts);
|
|
85
86
|
return { successed: true, data: result.data }
|
|
86
87
|
} catch (err) {
|
|
87
|
-
return { successed: false, error: err,data:err }
|
|
88
|
+
return { successed: false, error: err, data: err }
|
|
88
89
|
}
|
|
89
90
|
}
|
|
91
|
+
/**
|
|
92
|
+
*
|
|
93
|
+
* @param opts
|
|
94
|
+
* @returns
|
|
95
|
+
*/
|
|
96
|
+
export function requestStream(opts: any = {},processChunkData:Function) {
|
|
97
|
+
if (!opts.data) opts.data = opts.body;
|
|
98
|
+
axios(opts)
|
|
99
|
+
.then((res: any) => {
|
|
100
|
+
res.data.on('data', (chunk: any) => {
|
|
101
|
+
processChunkData(chunk);
|
|
102
|
+
});
|
|
103
|
+
})
|
|
104
|
+
}
|
|
90
105
|
|
|
91
106
|
|
|
92
107
|
/**
|
package/src/doubaoai.ts
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* 火山方舟-豆包大模型引擎
|
|
3
|
+
*/
|
|
4
|
+
import { request, requestStream } from "./declare";
|
|
5
|
+
import GptBase from "./gptbase"
|
|
6
|
+
export default class DouBaoAI extends GptBase {
|
|
7
|
+
protected apiKey: string;
|
|
8
|
+
protected apiOption: any = {}
|
|
9
|
+
/**
|
|
10
|
+
* 构造函数
|
|
11
|
+
*/
|
|
12
|
+
constructor(apiKey: string, apiOption: any = {}) {
|
|
13
|
+
super();
|
|
14
|
+
this.apiKey = apiKey;
|
|
15
|
+
this.apiOption = apiOption;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* 请求接口
|
|
19
|
+
*/
|
|
20
|
+
public async chatRequest(chatText: string | Array<any>, callChatOption: any, axiosOption: any = {}): Promise<any> {
|
|
21
|
+
if (!chatText) return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
22
|
+
const callParams = this.assembleApiParams(chatText, false, callChatOption, axiosOption);
|
|
23
|
+
try {
|
|
24
|
+
const response = await request(callParams)
|
|
25
|
+
if (response.successed && !response.data.code) return { successed: true, message: response.data.choices, usage: response.data.usage }
|
|
26
|
+
return { successed: false, ...response.data };
|
|
27
|
+
} catch (error) {
|
|
28
|
+
console.log('result is error ', error)
|
|
29
|
+
return { successed: false, error };
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* 组装最后的调用参数
|
|
34
|
+
* @param callChatOption
|
|
35
|
+
* @returns
|
|
36
|
+
*/
|
|
37
|
+
private assembleApiParams(chatText: string | Array<any>, streamCall: boolean = false, callChatOption: any, axiosOption: any = {}): any {
|
|
38
|
+
let messages: Array<any> = typeof (chatText) == 'string' ? [{ role: 'user', content: chatText }] : chatText;
|
|
39
|
+
let params: any = {};
|
|
40
|
+
if (callChatOption?.temperature || this.apiOption.temperature) params.temperature = Number(callChatOption?.temperature || this.apiOption.temperature);
|
|
41
|
+
params.max_tokens = Number(callChatOption?.maxtoken || this.apiOption.maxtoken);
|
|
42
|
+
if (callChatOption?.top_p || this.apiOption.top_p) params.top_p = Number(callChatOption?.top_p || this.apiOption.top_p);
|
|
43
|
+
if (callChatOption?.presence_penalty || this.apiOption.presence_penalty) params.presence_penalty = Number(callChatOption?.presence_penalty || this.apiOption.presence_penalty);
|
|
44
|
+
if (callChatOption?.frequency_penalty || this.apiOption.frequency_penalty) params.frequency_penalty = Number(callChatOption?.frequency_penalty || this.apiOption.frequency_penalty);
|
|
45
|
+
if (callChatOption?.top_logprobs || this.apiOption.top_logprobs) {
|
|
46
|
+
params.logprobs = true;
|
|
47
|
+
params.top_logprobs = Number(callChatOption?.top_logprobs || this.apiOption.top_logprobs);
|
|
48
|
+
}
|
|
49
|
+
params.tools = (callChatOption?.enableToolCall === 1 && callChatOption?.tools) ? callChatOption.tools : undefined;
|
|
50
|
+
params.tool_choice = callChatOption?.enableToolCall === 1 ? 'auto' : undefined;
|
|
51
|
+
const axiosParams = {
|
|
52
|
+
...axiosOption,
|
|
53
|
+
method: "post",
|
|
54
|
+
headers: {
|
|
55
|
+
'Content-Type': 'application/json',
|
|
56
|
+
'authorization': `Bearer ${this.apiKey}`
|
|
57
|
+
},
|
|
58
|
+
data: {
|
|
59
|
+
model: callChatOption?.model || this.apiOption.model,
|
|
60
|
+
...params,
|
|
61
|
+
messages,
|
|
62
|
+
stream: streamCall
|
|
63
|
+
},
|
|
64
|
+
url: 'https://ark.cn-beijing.volces.com/api/v3/chat/completions'
|
|
65
|
+
};
|
|
66
|
+
if (streamCall) axiosParams.responseType = 'stream';
|
|
67
|
+
return axiosParams;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* 流式的聊天模式
|
|
71
|
+
* @param chatText
|
|
72
|
+
* @param _paramOption
|
|
73
|
+
* @param axiosOption
|
|
74
|
+
*/
|
|
75
|
+
override async chatRequestInStream(chatText: string | Array<any>, callChatOption: any, attach?: any, axiosOption?: any): Promise<any> {
|
|
76
|
+
if (!chatText) this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
77
|
+
axiosOption = Object.assign({}, axiosOption || { timeout: 10000 })
|
|
78
|
+
const callParams = this.assembleApiParams(chatText, true, callChatOption, axiosOption);
|
|
79
|
+
let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000), replytext: string[] = [];
|
|
80
|
+
try {
|
|
81
|
+
requestStream(callParams, (chunk: any) => {
|
|
82
|
+
let streamText = chunk.toString().replace('[DONE]', '').replace(/[\r\n]+/gm, '')
|
|
83
|
+
this.processChunkData(streamText.split(/data: /), requestid, replytext, attach)
|
|
84
|
+
})
|
|
85
|
+
return { successed: true, requestid }
|
|
86
|
+
} catch (error) {
|
|
87
|
+
this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' + error });
|
|
88
|
+
return { successed: false, requestid }
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* 处理每次流式返回的对话片段
|
|
93
|
+
* @param chunks
|
|
94
|
+
* @param requestid
|
|
95
|
+
* @param replytext
|
|
96
|
+
* @param attach
|
|
97
|
+
*/
|
|
98
|
+
processChunkData(chunks: string[], requestid: Number, replytext: string[], attach: any) {
|
|
99
|
+
let has_tool_calls = 0, currentIndex, previous_index = -1, tool_calls: any[] = [];// 使用数组来存储工具调用
|
|
100
|
+
for (const splitString of chunks) {
|
|
101
|
+
if (!splitString) continue;
|
|
102
|
+
const chunk = JSON.parse(splitString);
|
|
103
|
+
const [choice] = chunk.choices,
|
|
104
|
+
{ finish_reason: finishreason, index, usage } = choice,
|
|
105
|
+
{ content, tool_calls: toolCalls } = choice.delta;
|
|
106
|
+
if (toolCalls && toolCalls.length) {
|
|
107
|
+
currentIndex = toolCalls[0].index;
|
|
108
|
+
has_tool_calls = 1;
|
|
109
|
+
if (currentIndex !== previous_index) {
|
|
110
|
+
tool_calls.push({
|
|
111
|
+
id: toolCalls[0].id,
|
|
112
|
+
type: 'function',
|
|
113
|
+
function: {
|
|
114
|
+
name: toolCalls[0].function.name,
|
|
115
|
+
arguments: toolCalls[0].function.arguments
|
|
116
|
+
}
|
|
117
|
+
});
|
|
118
|
+
// 更新previousIndex以供下次比较
|
|
119
|
+
previous_index = currentIndex;
|
|
120
|
+
} else {
|
|
121
|
+
tool_calls[previous_index].function.arguments += toolCalls[0].function.arguments
|
|
122
|
+
}
|
|
123
|
+
} else replytext.push(content);
|
|
124
|
+
let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage, has_tool_calls: has_tool_calls, tool_calls: tool_calls };
|
|
125
|
+
if (attach) output = Object.assign({}, output, attach);
|
|
126
|
+
this.emit(finishreason ? 'chatdone' : 'chattext', output)
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
}
|
package/src/gptprovider.ts
CHANGED
|
@@ -8,6 +8,8 @@ import AzureAI from './azureai'
|
|
|
8
8
|
import StabilityAI from './stabilityai'
|
|
9
9
|
import StabilityPlusAI from './stabilityplusai'
|
|
10
10
|
import BaiduWenXinAI, { ApiCredential } from './baiduai'
|
|
11
|
+
import AIMiddlePlatform from './aimp';
|
|
12
|
+
import DouBaoAI from './doubaoai'
|
|
11
13
|
import GptBase from './gptbase';
|
|
12
14
|
/**
|
|
13
15
|
* OpenAI/NLP 的服务提供商 OpenAI,微软,百度文心(待接入),google(待接入)
|
|
@@ -16,7 +18,9 @@ export const GptProviderEnum = {
|
|
|
16
18
|
OPENAI: 'openai',
|
|
17
19
|
OPENAIPROXY:'openaiproxy',
|
|
18
20
|
MICROSOFT: 'microsoft',
|
|
21
|
+
AIMP: 'aimp', ///AI 中台业务
|
|
19
22
|
BAIDU: 'baidu',
|
|
23
|
+
DOUBAO:'doubao',
|
|
20
24
|
GOOGLE:'google',
|
|
21
25
|
STABILITY:'stability',
|
|
22
26
|
STABILITY2: 'stability2',
|
|
@@ -30,7 +34,7 @@ export type GptProviderEnum = typeof GptProviderEnum[keyof typeof GptProviderEnu
|
|
|
30
34
|
* @returns
|
|
31
35
|
*/
|
|
32
36
|
export function createGpt(provider: GptProviderEnum, apikey: string|ApiCredential, setting: any): GptBase | null {
|
|
33
|
-
let { model, maxtoken, temperature, serviceurl,endpoint, engine, version, embedding, top_p, presence_penalty, frequency_penalty } = setting || {};
|
|
37
|
+
let { model,agentid, maxtoken, temperature, serviceurl,endpoint, engine, version, embedding, top_p, presence_penalty, frequency_penalty } = setting || {};
|
|
34
38
|
switch (provider) {
|
|
35
39
|
case GptProviderEnum.OPENAI:
|
|
36
40
|
return new OpenAIGpt(apikey + '', { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
|
|
@@ -39,8 +43,12 @@ export function createGpt(provider: GptProviderEnum, apikey: string|ApiCredentia
|
|
|
39
43
|
case GptProviderEnum.MICROSOFT:
|
|
40
44
|
return new AzureAI(apikey + '', { endpoint, engine, version }, { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty }, );
|
|
41
45
|
case GptProviderEnum.BAIDU:
|
|
42
|
-
let cred: ApiCredential = typeof (apikey) === 'string' ? { apikey, securitykey: apikey } : apikey
|
|
46
|
+
let cred: ApiCredential = typeof (apikey) === 'string' ? { apikey, securitykey: apikey } : apikey;
|
|
43
47
|
return new BaiduWenXinAI(cred);
|
|
48
|
+
case GptProviderEnum.AIMP:
|
|
49
|
+
return new AIMiddlePlatform(apikey+'',{ endpoint,agentid });
|
|
50
|
+
case GptProviderEnum.DOUBAO:
|
|
51
|
+
return new DouBaoAI(apikey + '', { model, maxtoken, temperature, top_p, presence_penalty, frequency_penalty })
|
|
44
52
|
case GptProviderEnum.STABILITY:
|
|
45
53
|
return new StabilityAI(apikey + '', { endpoint, engine }, setting);
|
|
46
54
|
case GptProviderEnum.STABILITY2:
|