doomiaichat 5.1.0 → 6.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/azureai.d.ts +22 -8
- package/dist/azureai.js +121 -17
- package/dist/declare.d.ts +27 -0
- package/dist/gptprovider.d.ts +1 -0
- package/dist/gptprovider.js +5 -1
- package/dist/openai.d.ts +7 -19
- package/dist/openai.js +87 -74
- package/dist/openaibase.d.ts +18 -0
- package/dist/openaibase.js +20 -0
- package/dist/openaiproxy.d.ts +21 -0
- package/dist/openaiproxy.js +102 -0
- package/dist/stabilityai.d.ts +1 -20
- package/dist/stabilityplusai.d.ts +6 -2
- package/dist/stabilityplusai.js +17 -17
- package/package.json +4 -3
- package/src/azureai.ts +98 -19
- package/src/declare.ts +35 -4
- package/src/gptprovider.ts +5 -1
- package/src/openai.ts +81 -617
- package/src/openaibase.ts +30 -0
- package/src/openaiproxy.ts +89 -0
- package/src/stabilityai.ts +1 -22
- package/src/stabilityplusai.ts +23 -21
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
// import { Configuration, OpenAIApi, ChatCompletionRequestMessage } from "azure-openai"
|
|
2
|
+
import { OpenAIApiParameters } from "./declare";
|
|
3
|
+
import GptBase from "./gptbase"
|
|
4
|
+
export default abstract class OpenAIBase<T> extends GptBase {
|
|
5
|
+
protected readonly apiKey: string;
|
|
6
|
+
protected readonly chatModel: string;
|
|
7
|
+
protected readonly maxtoken: number;
|
|
8
|
+
protected readonly top_p: number;
|
|
9
|
+
protected readonly presence_penalty: number;
|
|
10
|
+
protected readonly frequency_penalty: number;
|
|
11
|
+
protected readonly temperature: number;
|
|
12
|
+
protected readonly embeddingmodel: string;
|
|
13
|
+
protected aiApi: T | undefined;//OpenAIApi | undefined;
|
|
14
|
+
constructor(apiKey: string, apiOption: OpenAIApiParameters = {}) {
|
|
15
|
+
super();
|
|
16
|
+
this.apiKey = apiKey;
|
|
17
|
+
this.chatModel = apiOption.model || 'gpt-3.5-turbo';
|
|
18
|
+
this.maxtoken = apiOption.maxtoken || 2048;
|
|
19
|
+
this.top_p = apiOption.top_p || 0.95;
|
|
20
|
+
this.temperature = apiOption.temperature || 0.9;
|
|
21
|
+
this.presence_penalty = apiOption.presence_penalty || 0;
|
|
22
|
+
this.frequency_penalty = apiOption.frequency_penalty || 0;
|
|
23
|
+
this.embeddingmodel = apiOption.embedding || 'text-embedding-ada-002';
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* 初始化OpenAI 的聊天对象Api
|
|
27
|
+
*/
|
|
28
|
+
abstract createOpenAI(apiKey: string): T ;
|
|
29
|
+
|
|
30
|
+
}
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { ApiResult, OpenAIApiParameters, ProxyPatameters } from './declare';
|
|
2
|
+
import OpenAIGpt from './openai';
|
|
3
|
+
import axios from 'axios';
|
|
4
|
+
const ERROR_RESPONSE: string[] = ['[AUTHORIZATION NEEDED]', '[AUTHORIZATION ERROR]', '[BODY ERROR]','[REQUEST ERROR]']
|
|
5
|
+
export default class OpenAIProxy extends OpenAIGpt {
|
|
6
|
+
protected readonly proxySetting: ProxyPatameters;
|
|
7
|
+
constructor(apiKey: string, proxyOption: ProxyPatameters, apiOption: OpenAIApiParameters = {}) {
|
|
8
|
+
super(apiKey, apiOption);
|
|
9
|
+
this.proxySetting = proxyOption;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* 重写chatRequest方法
|
|
13
|
+
* @param chatText
|
|
14
|
+
* @param callChatOption
|
|
15
|
+
* @param axiosOption
|
|
16
|
+
*/
|
|
17
|
+
override async chatRequest(chatText: string | any[], _paramOption: any, axiosOption?: any): Promise<ApiResult> {
|
|
18
|
+
const opts: any = {
|
|
19
|
+
headers: {
|
|
20
|
+
'Content-Type': 'application/json',
|
|
21
|
+
'authorization': `Bearer ${this.apiKey}`
|
|
22
|
+
},
|
|
23
|
+
method: 'post',
|
|
24
|
+
url: this.proxySetting.serviceurl,
|
|
25
|
+
data: {
|
|
26
|
+
chatText,
|
|
27
|
+
option: _paramOption
|
|
28
|
+
},
|
|
29
|
+
...axiosOption
|
|
30
|
+
}
|
|
31
|
+
const requestResult = await axios(opts);
|
|
32
|
+
return requestResult.data as ApiResult;
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* 重写chatRequestInStream方法
|
|
36
|
+
* @param chatText
|
|
37
|
+
* @param callChatOption
|
|
38
|
+
* @param attach
|
|
39
|
+
* @param axiosOption
|
|
40
|
+
*/
|
|
41
|
+
override async chatRequestInStream(chatText:Array<any>, callChatOption: OpenAIApiParameters, attach?: any, axiosOption?: any): Promise<any> {
|
|
42
|
+
// const decoder = new TextDecoder();
|
|
43
|
+
//overContent,
|
|
44
|
+
let streamText,requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
|
|
45
|
+
const opts:any = {
|
|
46
|
+
headers: {
|
|
47
|
+
'Content-Type': 'application/json',
|
|
48
|
+
'authorization': `Bearer ${this.apiKey}`
|
|
49
|
+
},
|
|
50
|
+
method: 'post',
|
|
51
|
+
url: this.proxySetting.serviceurl +'/stream',
|
|
52
|
+
data: {
|
|
53
|
+
messages:chatText,
|
|
54
|
+
option: callChatOption,
|
|
55
|
+
axiosOption
|
|
56
|
+
},
|
|
57
|
+
responseType: 'stream',
|
|
58
|
+
}
|
|
59
|
+
axios(opts)
|
|
60
|
+
.then(res => {
|
|
61
|
+
res.data.on('data', (chunk:any) => {
|
|
62
|
+
streamText = chunk.toString(); //decoder.decode(chunk);
|
|
63
|
+
if (streamText){
|
|
64
|
+
///请求的响应发生了错误
|
|
65
|
+
if (ERROR_RESPONSE.includes(streamText)) {
|
|
66
|
+
return this.emit('requesterror', { successed: false, requestid, error: 'Request Remote OpenAI Error : ' + streamText });
|
|
67
|
+
}
|
|
68
|
+
///已经全部结束了
|
|
69
|
+
if (streamText==='[END]') {
|
|
70
|
+
return this.emit('chatdone', streamText || {})
|
|
71
|
+
} else{
|
|
72
|
+
///持续的文字输出中
|
|
73
|
+
try{
|
|
74
|
+
//streamText = JSON.parse(streamText);
|
|
75
|
+
// overContent = JSON.parse(streamText);
|
|
76
|
+
return this.emit('chattext', Object.assign(streamText, attach));
|
|
77
|
+
}catch(errParse){
|
|
78
|
+
return this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message', streamText });
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
return;
|
|
83
|
+
});
|
|
84
|
+
//res.data.on('end', () => { this.emit('chatdone', Object.assign(streamText, attach)); });
|
|
85
|
+
}).catch(err=>{
|
|
86
|
+
this.emit('requesterror', { successed: false, requestid, error: 'Axios Error : ' + err });
|
|
87
|
+
})
|
|
88
|
+
}
|
|
89
|
+
}
|
package/src/stabilityai.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { request, AzureOpenAIPatameters, StabilityOption, StabilityResult } from "./declare";
|
|
2
2
|
import GptBase from "./gptbase"
|
|
3
3
|
export default class StabilityAI extends GptBase {
|
|
4
4
|
|
|
@@ -65,24 +65,3 @@ export default class StabilityAI extends GptBase {
|
|
|
65
65
|
|
|
66
66
|
}
|
|
67
67
|
}
|
|
68
|
-
|
|
69
|
-
export interface StabilityOption {
|
|
70
|
-
'cfg_scale'?: number,
|
|
71
|
-
'clip_guidance_preset'?: string,
|
|
72
|
-
'height'?: number,
|
|
73
|
-
'width'?: number,
|
|
74
|
-
'samples'?: number,
|
|
75
|
-
'seed'?:number,
|
|
76
|
-
'steps'?: number,
|
|
77
|
-
'sampler'?:string,
|
|
78
|
-
'negative'?:string,
|
|
79
|
-
'engine'?:string,
|
|
80
|
-
'endpoint'?:string
|
|
81
|
-
'denoising_strength'?:number,
|
|
82
|
-
'hr_scale'?:number
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
export interface StabilityResult extends ApiResult {
|
|
86
|
-
'data'?:any;
|
|
87
|
-
'type'?:string;
|
|
88
|
-
}
|
package/src/stabilityplusai.ts
CHANGED
|
@@ -1,11 +1,23 @@
|
|
|
1
1
|
|
|
2
|
-
import { request } from "./declare";
|
|
3
|
-
import
|
|
4
|
-
export default class StabilityPlusAI extends
|
|
2
|
+
import { AzureOpenAIPatameters, StabilityOption, StabilityResult, request } from "./declare";
|
|
3
|
+
import GptBase from "./gptbase";
|
|
4
|
+
export default class StabilityPlusAI extends GptBase {
|
|
5
|
+
protected readonly apiKey: string;
|
|
6
|
+
// protected readonly apiSetting: AzureOpenAIPatameters
|
|
7
|
+
protected readonly apiOption: StabilityOption;
|
|
8
|
+
constructor(apiKey: string, _urlOption: AzureOpenAIPatameters, apiOption: StabilityOption = {}) {
|
|
9
|
+
super();
|
|
10
|
+
this.apiKey = apiKey;
|
|
11
|
+
// this.apiSetting = urlOption;
|
|
12
|
+
this.apiOption = apiOption;
|
|
13
|
+
// if (!this.apiSetting.endpoint.toLowerCase().startsWith('http')) {
|
|
14
|
+
// this.apiSetting.endpoint = 'https://' + this.apiSetting.endpoint;
|
|
15
|
+
// }
|
|
16
|
+
}
|
|
5
17
|
/**
|
|
6
18
|
* 请求Stable作画的接口
|
|
7
19
|
*/
|
|
8
|
-
public
|
|
20
|
+
public async chatRequest(chatText: string, paramOption: StabilityOption, axiosOption: any = {}): Promise<StabilityResult> {
|
|
9
21
|
if (!chatText) return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
10
22
|
axiosOption = Object.assign({}, axiosOption, {
|
|
11
23
|
headers: {
|
|
@@ -14,10 +26,9 @@ export default class StabilityPlusAI extends StabilityAI {
|
|
|
14
26
|
}
|
|
15
27
|
})
|
|
16
28
|
try {
|
|
17
|
-
|
|
29
|
+
const requestOption = {
|
|
18
30
|
...axiosOption,
|
|
19
|
-
method: "
|
|
20
|
-
url: `${paramOption.endpoint || this.apiSetting.endpoint}/sdapi/v1/txt2img`,
|
|
31
|
+
method: "POST",
|
|
21
32
|
data: {
|
|
22
33
|
"enable_hr": false,
|
|
23
34
|
"denoising_strength": paramOption.denoising_strength || this.apiOption.denoising_strength || 0.5,
|
|
@@ -46,25 +57,16 @@ export default class StabilityPlusAI extends StabilityAI {
|
|
|
46
57
|
"tiling": false,
|
|
47
58
|
"do_not_save_samples": false,
|
|
48
59
|
"do_not_save_grid": false,
|
|
49
|
-
"negative_prompt": paramOption.negative || ''
|
|
50
|
-
// "eta": 0,
|
|
51
|
-
// "s_churn": 0,
|
|
52
|
-
// "s_tmax": 0,
|
|
53
|
-
// "s_tmin": 0,
|
|
54
|
-
// "s_noise": 1,
|
|
55
|
-
// "script_args": [],
|
|
56
|
-
// "sampler_index": paramOption.sampler || this.apiOption.sampler || "Euler", //"Euler",
|
|
57
|
-
// "send_images": true,
|
|
58
|
-
// "save_images": false,
|
|
59
|
-
// "alwayson_scripts": {}
|
|
60
|
+
"negative_prompt": paramOption.negative || ''
|
|
60
61
|
},
|
|
61
|
-
|
|
62
|
+
url: `${paramOption.endpoint}/sdapi/v1/txt2img`,
|
|
62
63
|
};
|
|
63
|
-
|
|
64
|
+
// console.log('stablity param', requestOption);
|
|
65
|
+
const response: any = await request(requestOption)
|
|
64
66
|
if (response.successed) {
|
|
65
67
|
return { successed: true, type: 'image', data: response.data.images, };
|
|
66
68
|
}
|
|
67
|
-
console.log('response result ', response.data)
|
|
69
|
+
// console.log('response result ', response.data)
|
|
68
70
|
return { successed: false, ...response.data };
|
|
69
71
|
} catch (error) {
|
|
70
72
|
console.log('result is error ', error)
|