doomiaichat 7.1.29 → 7.1.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +3 -2
- package/src/aimp.ts +0 -125
- package/src/azureai.ts +0 -180
- package/src/baiduai.ts +0 -86
- package/src/corzauthorization.ts +0 -59
- package/src/corzbot.ts +0 -434
- package/src/declare.ts +0 -152
- package/src/deepseek.ts +0 -11
- package/src/doubaoai.ts +0 -129
- package/src/gptbase.ts +0 -52
- package/src/gptprovider.ts +0 -74
- package/src/index.ts +0 -2
- package/src/openai.ts +0 -136
- package/src/openaibase.ts +0 -30
- package/src/openaiproxy.ts +0 -97
- package/src/stabilityai.ts +0 -67
- package/src/stabilityplusai.ts +0 -77
- package/tsconfig.json +0 -31
package/src/doubaoai.ts
DELETED
|
@@ -1,129 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* 火山方舟-豆包大模型引擎
|
|
3
|
-
*/
|
|
4
|
-
import { request, requestStream } from "./declare";
|
|
5
|
-
import GptBase from "./gptbase"
|
|
6
|
-
export default class DouBaoAI extends GptBase {
|
|
7
|
-
protected apiKey: string;
|
|
8
|
-
protected apiOption: any = {}
|
|
9
|
-
/**
|
|
10
|
-
* 构造函数
|
|
11
|
-
*/
|
|
12
|
-
constructor(apiKey: string, apiOption: any = {}) {
|
|
13
|
-
super();
|
|
14
|
-
this.apiKey = apiKey;
|
|
15
|
-
this.apiOption = apiOption;
|
|
16
|
-
}
|
|
17
|
-
/**
|
|
18
|
-
* 请求接口
|
|
19
|
-
*/
|
|
20
|
-
public async chatRequest(chatText: string | Array<any>, callChatOption: any, axiosOption: any = {}): Promise<any> {
|
|
21
|
-
if (!chatText) return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
22
|
-
const callParams = this.assembleApiParams(chatText, false, callChatOption, axiosOption);
|
|
23
|
-
try {
|
|
24
|
-
const response = await request(callParams)
|
|
25
|
-
if (response.successed && !response.data.code) return { successed: true, message: response.data.choices, usage: response.data.usage }
|
|
26
|
-
return { successed: false, ...response.data };
|
|
27
|
-
} catch (error) {
|
|
28
|
-
console.log('result is error ', error)
|
|
29
|
-
return { successed: false, error };
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
/**
|
|
33
|
-
* 组装最后的调用参数
|
|
34
|
-
* @param callChatOption
|
|
35
|
-
* @returns
|
|
36
|
-
*/
|
|
37
|
-
private assembleApiParams(chatText: string | Array<any>, streamCall: boolean = false, callChatOption: any, axiosOption: any = {}): any {
|
|
38
|
-
let messages: Array<any> = typeof (chatText) == 'string' ? [{ role: 'user', content: chatText }] : chatText;
|
|
39
|
-
let params: any = {};
|
|
40
|
-
if (callChatOption?.temperature || this.apiOption.temperature) params.temperature = Number(callChatOption?.temperature || this.apiOption.temperature);
|
|
41
|
-
params.max_tokens = Number(callChatOption?.maxtoken || this.apiOption.maxtoken);
|
|
42
|
-
if (callChatOption?.top_p || this.apiOption.top_p) params.top_p = Number(callChatOption?.top_p || this.apiOption.top_p);
|
|
43
|
-
if (callChatOption?.presence_penalty || this.apiOption.presence_penalty) params.presence_penalty = Number(callChatOption?.presence_penalty || this.apiOption.presence_penalty);
|
|
44
|
-
if (callChatOption?.frequency_penalty || this.apiOption.frequency_penalty) params.frequency_penalty = Number(callChatOption?.frequency_penalty || this.apiOption.frequency_penalty);
|
|
45
|
-
if (callChatOption?.top_logprobs || this.apiOption.top_logprobs) {
|
|
46
|
-
params.logprobs = true;
|
|
47
|
-
params.top_logprobs = Number(callChatOption?.top_logprobs || this.apiOption.top_logprobs);
|
|
48
|
-
}
|
|
49
|
-
params.tools = (callChatOption?.enableToolCall === 1 && callChatOption?.tools) ? callChatOption.tools : undefined;
|
|
50
|
-
params.tool_choice = callChatOption?.enableToolCall === 1 ? 'auto' : undefined;
|
|
51
|
-
const axiosParams = {
|
|
52
|
-
...axiosOption,
|
|
53
|
-
method: "post",
|
|
54
|
-
headers: {
|
|
55
|
-
'Content-Type': 'application/json',
|
|
56
|
-
'authorization': `Bearer ${this.apiKey}`
|
|
57
|
-
},
|
|
58
|
-
data: {
|
|
59
|
-
model: callChatOption?.model || this.apiOption.model,
|
|
60
|
-
...params,
|
|
61
|
-
messages,
|
|
62
|
-
stream: streamCall
|
|
63
|
-
},
|
|
64
|
-
url: 'https://ark.cn-beijing.volces.com/api/v3/chat/completions'
|
|
65
|
-
};
|
|
66
|
-
if (streamCall) axiosParams.responseType = 'stream';
|
|
67
|
-
return axiosParams;
|
|
68
|
-
}
|
|
69
|
-
/**
|
|
70
|
-
* 流式的聊天模式
|
|
71
|
-
* @param chatText
|
|
72
|
-
* @param _paramOption
|
|
73
|
-
* @param axiosOption
|
|
74
|
-
*/
|
|
75
|
-
override async chatRequestInStream(chatText: string | Array<any>, callChatOption: any, attach?: any, axiosOption?: any): Promise<any> {
|
|
76
|
-
if (!chatText) this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
77
|
-
axiosOption = Object.assign({}, axiosOption || { timeout: 10000 })
|
|
78
|
-
const callParams = this.assembleApiParams(chatText, true, callChatOption, axiosOption);
|
|
79
|
-
let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000), replytext: string[] = [];
|
|
80
|
-
try {
|
|
81
|
-
requestStream(callParams, (chunk: any) => {
|
|
82
|
-
let streamText = chunk.toString().replace('[DONE]', '').replace(/[\r\n]+/gm, '')
|
|
83
|
-
this.processChunkData(streamText.split(/data: /), requestid, replytext, attach)
|
|
84
|
-
})
|
|
85
|
-
return { successed: true, requestid }
|
|
86
|
-
} catch (error) {
|
|
87
|
-
this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' + error });
|
|
88
|
-
return { successed: false, requestid }
|
|
89
|
-
}
|
|
90
|
-
}
|
|
91
|
-
/**
|
|
92
|
-
* 处理每次流式返回的对话片段
|
|
93
|
-
* @param chunks
|
|
94
|
-
* @param requestid
|
|
95
|
-
* @param replytext
|
|
96
|
-
* @param attach
|
|
97
|
-
*/
|
|
98
|
-
processChunkData(chunks: string[], requestid: Number, replytext: string[], attach: any) {
|
|
99
|
-
let has_tool_calls = 0, currentIndex, previous_index = -1, tool_calls: any[] = [];// 使用数组来存储工具调用
|
|
100
|
-
for (const splitString of chunks) {
|
|
101
|
-
if (!splitString) continue;
|
|
102
|
-
const chunk = JSON.parse(splitString);
|
|
103
|
-
const [choice] = chunk.choices,
|
|
104
|
-
{ finish_reason: finishreason, index, usage } = choice,
|
|
105
|
-
{ content, tool_calls: toolCalls } = choice.delta;
|
|
106
|
-
if (toolCalls && toolCalls.length) {
|
|
107
|
-
currentIndex = toolCalls[0].index;
|
|
108
|
-
has_tool_calls = 1;
|
|
109
|
-
if (currentIndex !== previous_index) {
|
|
110
|
-
tool_calls.push({
|
|
111
|
-
id: toolCalls[0].id,
|
|
112
|
-
type: 'function',
|
|
113
|
-
function: {
|
|
114
|
-
name: toolCalls[0].function.name,
|
|
115
|
-
arguments: toolCalls[0].function.arguments
|
|
116
|
-
}
|
|
117
|
-
});
|
|
118
|
-
// 更新previousIndex以供下次比较
|
|
119
|
-
previous_index = currentIndex;
|
|
120
|
-
} else {
|
|
121
|
-
tool_calls[previous_index].function.arguments += toolCalls[0].function.arguments
|
|
122
|
-
}
|
|
123
|
-
} else replytext.push(content);
|
|
124
|
-
let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage, has_tool_calls: has_tool_calls, tool_calls: tool_calls };
|
|
125
|
-
if (attach) output = Object.assign({}, output, attach);
|
|
126
|
-
this.emit(finishreason ? 'chatdone' : 'chattext', output)
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
}
|
package/src/gptbase.ts
DELETED
|
@@ -1,52 +0,0 @@
|
|
|
1
|
-
import { EventEmitter } from "events"
|
|
2
|
-
import { ApiResult } from './declare'
|
|
3
|
-
|
|
4
|
-
export default abstract class GptBase extends EventEmitter {
|
|
5
|
-
/**
|
|
6
|
-
* 构造函数
|
|
7
|
-
*/
|
|
8
|
-
constructor() {
|
|
9
|
-
super();
|
|
10
|
-
}
|
|
11
|
-
/**
|
|
12
|
-
* 获得文字的向量
|
|
13
|
-
* @param text
|
|
14
|
-
*/
|
|
15
|
-
getTextEmbedding(_text: string, _axiosOption: any):any {return null;}
|
|
16
|
-
/**
|
|
17
|
-
* 自由聊天模式
|
|
18
|
-
* @param chatText
|
|
19
|
-
* @param _paramOption
|
|
20
|
-
* @param axiosOption
|
|
21
|
-
*/
|
|
22
|
-
abstract chatRequest(chatText: string | Array<any>, _paramOption: any, axiosOption?: any): Promise<ApiResult>;
|
|
23
|
-
|
|
24
|
-
/**
|
|
25
|
-
* 创建一个会话主题id
|
|
26
|
-
* @returns
|
|
27
|
-
*/
|
|
28
|
-
async createCoversation(_client?:any):Promise<string|null> {
|
|
29
|
-
return null;
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
/**
|
|
33
|
-
* 设置智能体的变量
|
|
34
|
-
* @returns
|
|
35
|
-
*/
|
|
36
|
-
async setVariables(_params: any):Promise<ApiResult> {
|
|
37
|
-
return { successed: true };
|
|
38
|
-
}
|
|
39
|
-
/**
|
|
40
|
-
* 获取智能体的变量
|
|
41
|
-
* @returns
|
|
42
|
-
*/
|
|
43
|
-
async getVariables(_params: any): Promise<any> { return {successed: false}}
|
|
44
|
-
/**
|
|
45
|
-
* 流式的聊天模式
|
|
46
|
-
* @param chatText
|
|
47
|
-
* @param _paramOption
|
|
48
|
-
* @param axiosOption
|
|
49
|
-
*/
|
|
50
|
-
chatRequestInStream(_chatText: string | Array<any>, _paramOption: any, _attach?: any, _axiosOption?: any): any { return null; }
|
|
51
|
-
}
|
|
52
|
-
|
package/src/gptprovider.ts
DELETED
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
//ts check
|
|
2
|
-
/**
|
|
3
|
-
* 语音转文字服务商工厂
|
|
4
|
-
*/
|
|
5
|
-
import OpenAIGpt from './openai';
|
|
6
|
-
import DeepSeek from './deepseek';
|
|
7
|
-
import CorzBot from './corzbot'
|
|
8
|
-
import OpenAIProxt from './openaiproxy';
|
|
9
|
-
import AzureAI from './azureai'
|
|
10
|
-
import StabilityAI from './stabilityai'
|
|
11
|
-
import StabilityPlusAI from './stabilityplusai'
|
|
12
|
-
import BaiduWenXinAI, { ApiCredential } from './baiduai'
|
|
13
|
-
import AIMiddlePlatform from './aimp';
|
|
14
|
-
import DouBaoAI from './doubaoai'
|
|
15
|
-
import GptBase from './gptbase';
|
|
16
|
-
import { CorzAuthorization } from './corzauthorization'
|
|
17
|
-
// 扣子的身份认证应用
|
|
18
|
-
const corzAuth: Map<string, CorzAuthorization> = new Map();
|
|
19
|
-
/**
|
|
20
|
-
* OpenAI/NLP 的服务提供商 OpenAI,微软,百度文心(待接入),google(待接入)
|
|
21
|
-
*/
|
|
22
|
-
export const GptProviderEnum = {
|
|
23
|
-
OPENAI: 'openai',
|
|
24
|
-
OPENAIPROXY:'openaiproxy',
|
|
25
|
-
MICROSOFT: 'microsoft',
|
|
26
|
-
AIMP: 'aimp', ///AI 中台业务
|
|
27
|
-
COZE:'coze',
|
|
28
|
-
BAIDU: 'baidu',
|
|
29
|
-
DOUBAO:'doubao',
|
|
30
|
-
DEEPSEEK:'deepseek',
|
|
31
|
-
GOOGLE:'google',
|
|
32
|
-
STABILITY:'stability',
|
|
33
|
-
STABILITY2: 'stability2',
|
|
34
|
-
} as const;
|
|
35
|
-
export type GptProviderEnum = typeof GptProviderEnum[keyof typeof GptProviderEnum];
|
|
36
|
-
/**
|
|
37
|
-
* 根据类型创建不同的TTS引擎对象
|
|
38
|
-
* @param {*} provider
|
|
39
|
-
* @param {*} apikey
|
|
40
|
-
* @param {*} setting
|
|
41
|
-
* @returns
|
|
42
|
-
*/
|
|
43
|
-
export function createGpt(provider: GptProviderEnum, apikey: string|ApiCredential, setting: any): GptBase | null {
|
|
44
|
-
let { model,agentid, maxtoken, temperature, serviceurl,endpoint, engine, version, embedding, top_p, presence_penalty, frequency_penalty } = setting || {};
|
|
45
|
-
switch (provider) {
|
|
46
|
-
case GptProviderEnum.OPENAI:
|
|
47
|
-
return new OpenAIGpt(apikey + '', { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
|
|
48
|
-
case GptProviderEnum.DEEPSEEK:
|
|
49
|
-
return new DeepSeek(apikey + '', { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
|
|
50
|
-
case GptProviderEnum.OPENAIPROXY:
|
|
51
|
-
return new OpenAIProxt(apikey + '', { serviceurl}, { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
|
|
52
|
-
case GptProviderEnum.MICROSOFT:
|
|
53
|
-
return new AzureAI(apikey + '', { endpoint, engine, version }, { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty }, );
|
|
54
|
-
case GptProviderEnum.BAIDU:
|
|
55
|
-
let cred: ApiCredential = typeof (apikey) === 'string' ? { apikey, securitykey: apikey } : apikey;
|
|
56
|
-
return new BaiduWenXinAI(cred);
|
|
57
|
-
case GptProviderEnum.AIMP:
|
|
58
|
-
return new AIMiddlePlatform(apikey+'',{ endpoint,agentid });
|
|
59
|
-
case GptProviderEnum.COZE:
|
|
60
|
-
let authorizationProvider = corzAuth.get(apikey + '');
|
|
61
|
-
if (!authorizationProvider) {
|
|
62
|
-
authorizationProvider = new CorzAuthorization(apikey + '',setting);
|
|
63
|
-
corzAuth.set(apikey + '', authorizationProvider);
|
|
64
|
-
}
|
|
65
|
-
return new CorzBot(authorizationProvider!, setting);
|
|
66
|
-
case GptProviderEnum.DOUBAO:
|
|
67
|
-
return new DouBaoAI(apikey + '', { model, maxtoken, temperature, top_p, presence_penalty, frequency_penalty })
|
|
68
|
-
case GptProviderEnum.STABILITY:
|
|
69
|
-
return new StabilityAI(apikey + '', { endpoint, engine }, setting);
|
|
70
|
-
case GptProviderEnum.STABILITY2:
|
|
71
|
-
return new StabilityPlusAI(apikey + '', { endpoint, engine }, setting);
|
|
72
|
-
default: return null;
|
|
73
|
-
}
|
|
74
|
-
};
|
package/src/index.ts
DELETED
package/src/openai.ts
DELETED
|
@@ -1,136 +0,0 @@
|
|
|
1
|
-
// import { Configuration, OpenAIApi, ChatCompletionRequestMessage } from "azure-openai"
|
|
2
|
-
/**
|
|
3
|
-
* OpenAI
|
|
4
|
-
*/
|
|
5
|
-
import OpenAIBase from "./openaibase"
|
|
6
|
-
import { OpenAIApiParameters, ChatReponse, EmbeddingResult } from './declare'
|
|
7
|
-
import OpenAI from "openai";
|
|
8
|
-
// import { ChatCompletionToolChoiceOption } from "openai/resources";
|
|
9
|
-
export default class OpenAIGpt extends OpenAIBase<OpenAI> {
|
|
10
|
-
/**
|
|
11
|
-
* 初始化OpenAI 的聊天对象Api
|
|
12
|
-
*/
|
|
13
|
-
createOpenAI(apiKey: string): OpenAI {
|
|
14
|
-
return new OpenAI({ apiKey })
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
/**
|
|
18
|
-
* 获得文字的向量
|
|
19
|
-
* @param text
|
|
20
|
-
*/
|
|
21
|
-
override async getTextEmbedding(text: string|string[], axiosOption: any): Promise<EmbeddingResult> {
|
|
22
|
-
if (!text) return { successed: false, error: { errcode: 2, errmsg: 'content required' } };
|
|
23
|
-
if (!this.aiApi) {
|
|
24
|
-
this.aiApi = this.createOpenAI(this.apiKey);
|
|
25
|
-
}
|
|
26
|
-
try {
|
|
27
|
-
const response: any = await this.aiApi.embeddings.create({
|
|
28
|
-
model: this.embeddingmodel,
|
|
29
|
-
input: text,
|
|
30
|
-
}, axiosOption);
|
|
31
|
-
return { successed: true, embedding: response.data.data};//[0].embedding };
|
|
32
|
-
} catch (error) {
|
|
33
|
-
return { successed: false, error };
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
/**
|
|
37
|
-
* 向OpenAI发送一个聊天请求
|
|
38
|
-
* @param {*} chatText
|
|
39
|
-
*/
|
|
40
|
-
public async chatRequest(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, axiosOption: any = {}): Promise<ChatReponse> {
|
|
41
|
-
if (!chatText) return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
42
|
-
if (!this.aiApi) this.aiApi = this.createOpenAI(this.apiKey);
|
|
43
|
-
|
|
44
|
-
let message: Array<any> = typeof (chatText) == 'string' ?
|
|
45
|
-
[{ role: 'user', content: chatText }] : chatText;
|
|
46
|
-
try {
|
|
47
|
-
// const response: any = await this.aiApi.createChatCompletion({
|
|
48
|
-
const response: any = await this.aiApi.chat.completions.create(
|
|
49
|
-
{
|
|
50
|
-
model:callChatOption?.model || this.chatModel,
|
|
51
|
-
messages:message,
|
|
52
|
-
temperature: Number(callChatOption?.temperature || this.temperature),
|
|
53
|
-
max_tokens: Number(callChatOption?.maxtoken || this.maxtoken),
|
|
54
|
-
top_p: Number(callChatOption?.top_p || this.top_p),
|
|
55
|
-
presence_penalty: Number(callChatOption?.presence_penalty || this.presence_penalty),
|
|
56
|
-
frequency_penalty: Number(callChatOption?.frequency_penalty || this.frequency_penalty),
|
|
57
|
-
n: Number(callChatOption?.replyCounts || 1) || 1,
|
|
58
|
-
// tools: (callChatOption?.enableToolCall === 1 && callChatOption?.tools) ? callChatOption.tools : undefined,
|
|
59
|
-
// tool_choice: callChatOption?.enableToolCall === 1 ? 'auto' : undefined,
|
|
60
|
-
}, axiosOption);
|
|
61
|
-
// console.log('response.data', response)
|
|
62
|
-
return { successed: true, message: response.choices, usage: response.usage };
|
|
63
|
-
} catch (error) {
|
|
64
|
-
console.log('result is error ', error)
|
|
65
|
-
return { successed: false, error };
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
}
|
|
69
|
-
/**
|
|
70
|
-
* 流式的聊天模式
|
|
71
|
-
* @param chatText
|
|
72
|
-
* @param _paramOption
|
|
73
|
-
* @param axiosOption
|
|
74
|
-
*/
|
|
75
|
-
override async chatRequestInStream(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, attach?: any, axiosOption?: any): Promise<any> {
|
|
76
|
-
if (!chatText) this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
77
|
-
if (!this.aiApi) {
|
|
78
|
-
this.aiApi = this.createOpenAI(this.apiKey);
|
|
79
|
-
}
|
|
80
|
-
let message: Array<any> = typeof (chatText) == 'string' ? [{ role: 'user', content: chatText }] : chatText;
|
|
81
|
-
axiosOption = Object.assign({}, axiosOption || { timeout: 60000 })
|
|
82
|
-
let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
|
|
83
|
-
try {
|
|
84
|
-
const response: any = await this.aiApi.chat.completions.create(
|
|
85
|
-
{
|
|
86
|
-
model: callChatOption?.model || this.chatModel,
|
|
87
|
-
messages: message,
|
|
88
|
-
temperature: Number(callChatOption?.temperature || this.temperature),
|
|
89
|
-
max_tokens: Number(callChatOption?.maxtoken || this.maxtoken),
|
|
90
|
-
top_p: Number(callChatOption?.top_p || this.top_p),
|
|
91
|
-
presence_penalty: Number(callChatOption?.presence_penalty || this.presence_penalty),
|
|
92
|
-
frequency_penalty: Number(callChatOption?.frequency_penalty || this.frequency_penalty),
|
|
93
|
-
n: Number(callChatOption?.replyCounts || 1) || 1,
|
|
94
|
-
tools: (callChatOption?.enableToolCall === 1 && callChatOption?.tools) ? callChatOption.tools : undefined,
|
|
95
|
-
tool_choice: callChatOption?.enableToolCall === 1 ? 'auto' : undefined,
|
|
96
|
-
stream:true
|
|
97
|
-
}, axiosOption);
|
|
98
|
-
let replytext: string[] = [];
|
|
99
|
-
let has_tool_calls = 0, currentIndex, previous_index = -1, tool_calls: any[] = [];// 使用数组来存储工具调用
|
|
100
|
-
for await (const chunk of response) {
|
|
101
|
-
const [choice] = chunk.choices,
|
|
102
|
-
{ finish_reason:finishreason, index, usage } = choice,
|
|
103
|
-
{ content, tool_calls:toolCalls } = choice.delta;
|
|
104
|
-
if (toolCalls && toolCalls.length) {
|
|
105
|
-
currentIndex = toolCalls[0].index;
|
|
106
|
-
has_tool_calls = 1;
|
|
107
|
-
// 检查index是否发生变化
|
|
108
|
-
//console.log('currentIndex,previous_index', currentIndex, previous_index)
|
|
109
|
-
if (currentIndex !== previous_index) {
|
|
110
|
-
tool_calls.push({
|
|
111
|
-
id: toolCalls[0].id,
|
|
112
|
-
type: 'function',
|
|
113
|
-
function: {
|
|
114
|
-
name: toolCalls[0].function.name,
|
|
115
|
-
arguments: toolCalls[0].function.arguments
|
|
116
|
-
}
|
|
117
|
-
});
|
|
118
|
-
// 更新previousIndex以供下次比较
|
|
119
|
-
previous_index = currentIndex;
|
|
120
|
-
} else {
|
|
121
|
-
tool_calls[previous_index].function.arguments += toolCalls[0].function.arguments
|
|
122
|
-
}
|
|
123
|
-
} else {
|
|
124
|
-
replytext.push(content);
|
|
125
|
-
}
|
|
126
|
-
let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage, has_tool_calls: has_tool_calls, tool_calls: tool_calls };
|
|
127
|
-
if (attach) output = Object.assign({}, output, attach);
|
|
128
|
-
this.emit(finishreason ? 'chatdone' : 'chattext', output)
|
|
129
|
-
}
|
|
130
|
-
return { successed: true, requestid }
|
|
131
|
-
} catch (error) {
|
|
132
|
-
this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' + error });
|
|
133
|
-
return { successed: false, requestid }
|
|
134
|
-
}
|
|
135
|
-
}
|
|
136
|
-
}
|
package/src/openaibase.ts
DELETED
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
// import { Configuration, OpenAIApi, ChatCompletionRequestMessage } from "azure-openai"
|
|
2
|
-
import { OpenAIApiParameters } from "./declare";
|
|
3
|
-
import GptBase from "./gptbase"
|
|
4
|
-
export default abstract class OpenAIBase<T> extends GptBase {
|
|
5
|
-
protected readonly apiKey: string;
|
|
6
|
-
protected readonly chatModel: string;
|
|
7
|
-
protected readonly maxtoken: number;
|
|
8
|
-
protected readonly top_p: number;
|
|
9
|
-
protected readonly presence_penalty: number;
|
|
10
|
-
protected readonly frequency_penalty: number;
|
|
11
|
-
protected readonly temperature: number;
|
|
12
|
-
protected readonly embeddingmodel: string;
|
|
13
|
-
protected aiApi: T | undefined;//OpenAIApi | undefined;
|
|
14
|
-
constructor(apiKey: string, apiOption: OpenAIApiParameters = {}) {
|
|
15
|
-
super();
|
|
16
|
-
this.apiKey = apiKey;
|
|
17
|
-
this.chatModel = apiOption.model || 'gpt-3.5-turbo';
|
|
18
|
-
this.maxtoken = apiOption.maxtoken || 2048;
|
|
19
|
-
this.top_p = apiOption.top_p || 0.95;
|
|
20
|
-
this.temperature = apiOption.temperature || 0.9;
|
|
21
|
-
this.presence_penalty = apiOption.presence_penalty || 0;
|
|
22
|
-
this.frequency_penalty = apiOption.frequency_penalty || 0;
|
|
23
|
-
this.embeddingmodel = apiOption.embedding || 'text-embedding-ada-002';
|
|
24
|
-
}
|
|
25
|
-
/**
|
|
26
|
-
* 初始化OpenAI 的聊天对象Api
|
|
27
|
-
*/
|
|
28
|
-
abstract createOpenAI(apiKey: string): T ;
|
|
29
|
-
|
|
30
|
-
}
|
package/src/openaiproxy.ts
DELETED
|
@@ -1,97 +0,0 @@
|
|
|
1
|
-
import { ApiResult, OpenAIApiParameters, ProxyPatameters } from './declare';
|
|
2
|
-
import OpenAIGpt from './openai';
|
|
3
|
-
import axios from 'axios';
|
|
4
|
-
const ERROR_RESPONSE: string[] = ['[AUTHORIZATION NEEDED]', '[AUTHORIZATION ERROR]', '[BODY ERROR]','[REQUEST ERROR]']
|
|
5
|
-
export default class OpenAIProxy extends OpenAIGpt {
|
|
6
|
-
protected readonly proxySetting: ProxyPatameters;
|
|
7
|
-
constructor(apiKey: string, proxyOption: ProxyPatameters, apiOption: OpenAIApiParameters = {}) {
|
|
8
|
-
super(apiKey, apiOption);
|
|
9
|
-
this.proxySetting = proxyOption;
|
|
10
|
-
}
|
|
11
|
-
/**
|
|
12
|
-
* 重写chatRequest方法
|
|
13
|
-
* @param chatText
|
|
14
|
-
* @param callChatOption
|
|
15
|
-
* @param axiosOption
|
|
16
|
-
*/
|
|
17
|
-
override async chatRequest(chatText: string | any[], _paramOption: any, axiosOption?: any): Promise<ApiResult> {
|
|
18
|
-
const opts: any = {
|
|
19
|
-
headers: {
|
|
20
|
-
'Content-Type': 'application/json',
|
|
21
|
-
'authorization': `Bearer ${this.apiKey}`
|
|
22
|
-
},
|
|
23
|
-
method: 'post',
|
|
24
|
-
url: this.proxySetting.serviceurl,
|
|
25
|
-
data: {
|
|
26
|
-
chatText,
|
|
27
|
-
option: _paramOption
|
|
28
|
-
},
|
|
29
|
-
...axiosOption
|
|
30
|
-
}
|
|
31
|
-
const requestResult = await axios(opts);
|
|
32
|
-
return requestResult.data as ApiResult;
|
|
33
|
-
}
|
|
34
|
-
/**
|
|
35
|
-
* 重写chatRequestInStream方法
|
|
36
|
-
* @param chatText
|
|
37
|
-
* @param callChatOption
|
|
38
|
-
* @param attach
|
|
39
|
-
* @param axiosOption
|
|
40
|
-
*/
|
|
41
|
-
override async chatRequestInStream(chatText:Array<any>, callChatOption: OpenAIApiParameters, attach?: any, axiosOption?: any): Promise<any> {
|
|
42
|
-
// const decoder = new TextDecoder();
|
|
43
|
-
//overContent,
|
|
44
|
-
let streamText,requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
|
|
45
|
-
const opts:any = {
|
|
46
|
-
headers: {
|
|
47
|
-
'Content-Type': 'application/json',
|
|
48
|
-
'authorization': `Bearer ${this.apiKey}`
|
|
49
|
-
},
|
|
50
|
-
method: 'post',
|
|
51
|
-
url: this.proxySetting.serviceurl +'/stream',
|
|
52
|
-
data: {
|
|
53
|
-
messages:chatText,
|
|
54
|
-
option: callChatOption,
|
|
55
|
-
axiosOption
|
|
56
|
-
},
|
|
57
|
-
responseType: 'stream',
|
|
58
|
-
}
|
|
59
|
-
let unCompleteSegment:string = '';
|
|
60
|
-
axios(opts)
|
|
61
|
-
.then(res => {
|
|
62
|
-
res.data.on('data', (chunk:any) => {
|
|
63
|
-
streamText = chunk.toString(); //decoder.decode(chunk);
|
|
64
|
-
if (streamText){
|
|
65
|
-
///请求的响应发生了错误
|
|
66
|
-
if (ERROR_RESPONSE.includes(streamText)) {
|
|
67
|
-
return this.emit('requesterror', { successed: false, requestid, error: 'Request Remote OpenAI Error : ' + streamText });
|
|
68
|
-
}
|
|
69
|
-
const fullData = (unCompleteSegment +streamText).split('*&$')
|
|
70
|
-
unCompleteSegment = '';
|
|
71
|
-
// console.log('fullData', fullData.length);
|
|
72
|
-
for (const segment of fullData){
|
|
73
|
-
if (!segment) continue;
|
|
74
|
-
try {
|
|
75
|
-
////判断接收到的不是一个完整的JSON段了,则该段作为下一次的数据段
|
|
76
|
-
if (!segment.endsWith('}')) {
|
|
77
|
-
unCompleteSegment = segment;
|
|
78
|
-
break;
|
|
79
|
-
}
|
|
80
|
-
const objData = Object.assign(JSON.parse(segment), attach);
|
|
81
|
-
this.emit(objData.finish_reason?'chatdone':'chattext', objData);
|
|
82
|
-
} catch (errParse) {
|
|
83
|
-
|
|
84
|
-
break;
|
|
85
|
-
//this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message' + errParse });
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
}
|
|
90
|
-
return;
|
|
91
|
-
});
|
|
92
|
-
//res.data.on('end', () => { this.emit('chatdone', Object.assign(streamText, attach)); });
|
|
93
|
-
}).catch(err=>{
|
|
94
|
-
this.emit('requesterror', { successed: false, requestid, error: 'Axios Error : ' + err });
|
|
95
|
-
})
|
|
96
|
-
}
|
|
97
|
-
}
|
package/src/stabilityai.ts
DELETED
|
@@ -1,67 +0,0 @@
|
|
|
1
|
-
import { request, AzureOpenAIPatameters, StabilityOption, StabilityResult } from "./declare";
|
|
2
|
-
import GptBase from "./gptbase"
|
|
3
|
-
export default class StabilityAI extends GptBase {
|
|
4
|
-
|
|
5
|
-
protected readonly apiKey: string;
|
|
6
|
-
protected readonly apiSetting: AzureOpenAIPatameters
|
|
7
|
-
protected readonly apiOption:StabilityOption;
|
|
8
|
-
/**
|
|
9
|
-
*
|
|
10
|
-
* @param apiKey 调用OpenAI 的key
|
|
11
|
-
* @param azureOption 用作accesstoken的缓存
|
|
12
|
-
* @param apiOption 用作accesstoken的缓存
|
|
13
|
-
*/
|
|
14
|
-
constructor(apiKey: string, urlOption: AzureOpenAIPatameters, apiOption: StabilityOption = {}) {
|
|
15
|
-
super();
|
|
16
|
-
this.apiKey = apiKey;
|
|
17
|
-
this.apiSetting = urlOption;
|
|
18
|
-
this.apiOption = apiOption;
|
|
19
|
-
if (!this.apiSetting.endpoint.toLowerCase().startsWith('http')) {
|
|
20
|
-
this.apiSetting.endpoint = 'https://' + this.apiSetting.endpoint;
|
|
21
|
-
}
|
|
22
|
-
}
|
|
23
|
-
/**
|
|
24
|
-
* 请求Stable作画的接口
|
|
25
|
-
*/
|
|
26
|
-
public async chatRequest(chatText: string, paramOption: StabilityOption, axiosOption: any = {}): Promise<StabilityResult> {
|
|
27
|
-
if (!chatText) return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
28
|
-
axiosOption = Object.assign({}, axiosOption,{
|
|
29
|
-
headers:{
|
|
30
|
-
"Content-Type": "application/json",
|
|
31
|
-
"Accept": "application/json",
|
|
32
|
-
"Authorization": `Bearer ${this.apiKey}`
|
|
33
|
-
}
|
|
34
|
-
})
|
|
35
|
-
try {
|
|
36
|
-
let param = {
|
|
37
|
-
...axiosOption,
|
|
38
|
-
method: "post",
|
|
39
|
-
url: `${this.apiSetting.endpoint}/v1/generation/${this.apiSetting.engine}/text-to-image`,
|
|
40
|
-
data: {
|
|
41
|
-
text_prompts: [
|
|
42
|
-
{
|
|
43
|
-
text: chatText
|
|
44
|
-
}
|
|
45
|
-
],
|
|
46
|
-
cfg_scale: paramOption.cfg_scale || this.apiOption.cfg_scale || 7,
|
|
47
|
-
clip_guidance_preset: paramOption.clip_guidance_preset || this.apiOption.clip_guidance_preset || "FAST_BLUE",
|
|
48
|
-
height: paramOption.height || this.apiOption.height || 512,
|
|
49
|
-
width: paramOption.width || this.apiOption.width || 512,
|
|
50
|
-
samples: paramOption.samples || this.apiOption.samples || 1,
|
|
51
|
-
steps: paramOption.steps || this.apiOption.steps || 30,
|
|
52
|
-
},
|
|
53
|
-
|
|
54
|
-
};
|
|
55
|
-
const response:any = await request(param)
|
|
56
|
-
if (response.successed) {
|
|
57
|
-
let data = response.data;
|
|
58
|
-
return { successed: true, type: 'image', data: data.artifacts, };
|
|
59
|
-
}
|
|
60
|
-
return { successed: false, ...response.data };
|
|
61
|
-
} catch (error) {
|
|
62
|
-
console.log('result is error ', error)
|
|
63
|
-
return { successed: false, error };
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
}
|
|
67
|
-
}
|
package/src/stabilityplusai.ts
DELETED
|
@@ -1,77 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
import { AzureOpenAIPatameters, StabilityOption, StabilityResult, request } from "./declare";
|
|
3
|
-
import GptBase from "./gptbase";
|
|
4
|
-
export default class StabilityPlusAI extends GptBase {
|
|
5
|
-
protected readonly apiKey: string;
|
|
6
|
-
// protected readonly apiSetting: AzureOpenAIPatameters
|
|
7
|
-
protected readonly apiOption: StabilityOption;
|
|
8
|
-
constructor(apiKey: string, _urlOption: AzureOpenAIPatameters, apiOption: StabilityOption = {}) {
|
|
9
|
-
super();
|
|
10
|
-
this.apiKey = apiKey;
|
|
11
|
-
// this.apiSetting = urlOption;
|
|
12
|
-
this.apiOption = apiOption;
|
|
13
|
-
// if (!this.apiSetting.endpoint.toLowerCase().startsWith('http')) {
|
|
14
|
-
// this.apiSetting.endpoint = 'https://' + this.apiSetting.endpoint;
|
|
15
|
-
// }
|
|
16
|
-
}
|
|
17
|
-
/**
|
|
18
|
-
* 请求Stable作画的接口
|
|
19
|
-
*/
|
|
20
|
-
public async chatRequest(chatText: string, paramOption: StabilityOption, axiosOption: any = {}): Promise<StabilityResult> {
|
|
21
|
-
if (!chatText) return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
22
|
-
axiosOption = Object.assign({}, axiosOption, {
|
|
23
|
-
headers: {
|
|
24
|
-
"Content-Type": "application/json",
|
|
25
|
-
"Accept": "application/json",
|
|
26
|
-
}
|
|
27
|
-
})
|
|
28
|
-
try {
|
|
29
|
-
const requestOption = {
|
|
30
|
-
...axiosOption,
|
|
31
|
-
method: "POST",
|
|
32
|
-
data: {
|
|
33
|
-
"enable_hr": false,
|
|
34
|
-
"denoising_strength": paramOption.denoising_strength || this.apiOption.denoising_strength || 0.5,
|
|
35
|
-
"firstphase_width": 0,
|
|
36
|
-
"firstphase_height": 0,
|
|
37
|
-
"hr_scale": paramOption.hr_scale || this.apiOption.hr_scale || 2,
|
|
38
|
-
"hr_upscaler": "string",
|
|
39
|
-
"hr_second_pass_steps": 0,
|
|
40
|
-
"hr_resize_x": 0,
|
|
41
|
-
"hr_resize_y": 0,
|
|
42
|
-
"prompt": chatText,
|
|
43
|
-
"styles":["string"], //[paramOption.engine || this.apiSetting.engine || "bra_v5"], //["bra_v5"], //模型
|
|
44
|
-
"seed": paramOption.seed || this.apiOption.seed || -1,
|
|
45
|
-
"subseed": -1,
|
|
46
|
-
"subseed_strength": 0,
|
|
47
|
-
"seed_resize_from_h": -1,
|
|
48
|
-
"seed_resize_from_w": -1,
|
|
49
|
-
"sampler_name": paramOption.sampler || this.apiOption.sampler || "Euler a", //"Euler",
|
|
50
|
-
"batch_size": 1,
|
|
51
|
-
"n_iter": paramOption.samples || this.apiOption.samples || 1, //生成的数量
|
|
52
|
-
"steps": paramOption.steps || this.apiOption.steps || 20,
|
|
53
|
-
"cfg_scale": paramOption.cfg_scale || this.apiOption.cfg_scale || 7,
|
|
54
|
-
"width": paramOption.width || this.apiOption.width || 512,
|
|
55
|
-
"height": paramOption.height || this.apiOption.height || 512,
|
|
56
|
-
"restore_faces": false,
|
|
57
|
-
"tiling": false,
|
|
58
|
-
"do_not_save_samples": false,
|
|
59
|
-
"do_not_save_grid": false,
|
|
60
|
-
"negative_prompt": paramOption.negative || ''
|
|
61
|
-
},
|
|
62
|
-
url: `${paramOption.endpoint}/sdapi/v1/txt2img`,
|
|
63
|
-
};
|
|
64
|
-
// console.log('stablity param', requestOption);
|
|
65
|
-
const response: any = await request(requestOption)
|
|
66
|
-
if (response.successed) {
|
|
67
|
-
return { successed: true, type: 'image', data: response.data.images, };
|
|
68
|
-
}
|
|
69
|
-
// console.log('response result ', response.data)
|
|
70
|
-
return { successed: false, ...response.data };
|
|
71
|
-
} catch (error) {
|
|
72
|
-
console.log('result is error ', error)
|
|
73
|
-
return { successed: false, error };
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
}
|
|
77
|
-
}
|