doomiaichat 5.1.0 → 6.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/azureai.d.ts +22 -8
- package/dist/azureai.js +121 -17
- package/dist/declare.d.ts +27 -0
- package/dist/gptprovider.d.ts +1 -0
- package/dist/gptprovider.js +5 -1
- package/dist/openai.d.ts +7 -19
- package/dist/openai.js +87 -74
- package/dist/openaibase.d.ts +18 -0
- package/dist/openaibase.js +20 -0
- package/dist/openaiproxy.d.ts +21 -0
- package/dist/openaiproxy.js +102 -0
- package/dist/stabilityai.d.ts +1 -20
- package/dist/stabilityplusai.d.ts +6 -2
- package/dist/stabilityplusai.js +17 -17
- package/package.json +4 -3
- package/src/azureai.ts +98 -19
- package/src/declare.ts +35 -4
- package/src/gptprovider.ts +5 -1
- package/src/openai.ts +81 -617
- package/src/openaibase.ts +30 -0
- package/src/openaiproxy.ts +89 -0
- package/src/stabilityai.ts +1 -22
- package/src/stabilityplusai.ts +23 -21
package/dist/azureai.d.ts
CHANGED
|
@@ -1,20 +1,34 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
1
|
+
/**
|
|
2
|
+
* 微软AZure OpenAI
|
|
3
|
+
*/
|
|
4
|
+
import OpenAIBase from "./openaibase";
|
|
5
|
+
import { AzureOpenAIPatameters, ChatReponse, EmbeddingResult, OpenAIApiParameters } from "./declare";
|
|
6
|
+
import { OpenAIClient } from "@azure/openai";
|
|
7
|
+
export default class AzureAI extends OpenAIBase<OpenAIClient> {
|
|
5
8
|
protected readonly azureSetting: AzureOpenAIPatameters;
|
|
6
9
|
constructor(apiKey: string, azureOption: AzureOpenAIPatameters, apiOption?: OpenAIApiParameters);
|
|
7
10
|
/**
|
|
8
11
|
* 初始化OpenAI 的聊天对象Api
|
|
9
12
|
*/
|
|
10
|
-
createOpenAI(apiKey: string):
|
|
11
|
-
/**
|
|
12
|
-
* ZAure OpenAI 最新的URL地址
|
|
13
|
-
*/
|
|
13
|
+
createOpenAI(apiKey: string): OpenAIClient;
|
|
14
14
|
get EmbeddingUrl(): string;
|
|
15
15
|
/**
|
|
16
16
|
* 获得文字的向量
|
|
17
17
|
* @param text
|
|
18
18
|
*/
|
|
19
19
|
getTextEmbedding(text: string, axiosOption?: any): Promise<EmbeddingResult>;
|
|
20
|
+
/**
|
|
21
|
+
* 非流式聊天请求
|
|
22
|
+
* @param _chatText
|
|
23
|
+
* @param _paramOption
|
|
24
|
+
* @param _axiosOption
|
|
25
|
+
*/
|
|
26
|
+
chatRequest(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, _axiosOption?: any): Promise<ChatReponse>;
|
|
27
|
+
/**
|
|
28
|
+
* 流式的聊天模式
|
|
29
|
+
* @param chatText
|
|
30
|
+
* @param _paramOption
|
|
31
|
+
* @param axiosOption
|
|
32
|
+
*/
|
|
33
|
+
chatRequestInStream(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, attach?: any, axiosOption?: any): Promise<any>;
|
|
20
34
|
}
|
package/dist/azureai.js
CHANGED
|
@@ -8,15 +8,24 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
8
8
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
9
|
});
|
|
10
10
|
};
|
|
11
|
+
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|
12
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
13
|
+
var m = o[Symbol.asyncIterator], i;
|
|
14
|
+
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
|
15
|
+
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
|
16
|
+
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
|
17
|
+
};
|
|
11
18
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
19
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
20
|
};
|
|
14
21
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
-
|
|
22
|
+
/**
|
|
23
|
+
* 微软AZure OpenAI
|
|
24
|
+
*/
|
|
25
|
+
const openaibase_1 = __importDefault(require("./openaibase"));
|
|
16
26
|
const declare_1 = require("./declare");
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class AzureAI extends openai_1.default {
|
|
27
|
+
const openai_1 = require("@azure/openai");
|
|
28
|
+
class AzureAI extends openaibase_1.default {
|
|
20
29
|
constructor(apiKey, azureOption, apiOption = {}) {
|
|
21
30
|
super(apiKey, apiOption);
|
|
22
31
|
this.azureSetting = azureOption;
|
|
@@ -28,19 +37,8 @@ class AzureAI extends openai_1.default {
|
|
|
28
37
|
* 初始化OpenAI 的聊天对象Api
|
|
29
38
|
*/
|
|
30
39
|
createOpenAI(apiKey) {
|
|
31
|
-
return new
|
|
32
|
-
azure: {
|
|
33
|
-
apiKey,
|
|
34
|
-
endpoint: this.azureSetting.endpoint,
|
|
35
|
-
deploymentName: this.azureSetting.engine
|
|
36
|
-
} }));
|
|
40
|
+
return new openai_1.OpenAIClient(this.azureSetting.endpoint, new openai_1.AzureKeyCredential(apiKey));
|
|
37
41
|
}
|
|
38
|
-
/**
|
|
39
|
-
* ZAure OpenAI 最新的URL地址
|
|
40
|
-
*/
|
|
41
|
-
// get BaseUrl(): string {
|
|
42
|
-
// return `${this.azureSetting.endpoint}/openai/deployments/${this.azureSetting.engine}/chat/completions?api-version=${this.azureSetting.version || '2023-03-15-preview'}`
|
|
43
|
-
// }
|
|
44
42
|
get EmbeddingUrl() {
|
|
45
43
|
return `${this.azureSetting.endpoint}/openai/deployments/${this.embeddingmodel || 'openai-embedding-ada-002'}/embeddings?api-version=2022-12-01`;
|
|
46
44
|
}
|
|
@@ -63,7 +61,7 @@ class AzureAI extends openai_1.default {
|
|
|
63
61
|
input: text
|
|
64
62
|
}, url: this.EmbeddingUrl });
|
|
65
63
|
const response = yield (0, declare_1.request)(param);
|
|
66
|
-
if (response.data) {
|
|
64
|
+
if (response.successed && response.data) {
|
|
67
65
|
return { successed: true, embedding: response.data.data[0].embedding };
|
|
68
66
|
}
|
|
69
67
|
return Object.assign({ successed: false }, response.data);
|
|
@@ -73,5 +71,111 @@ class AzureAI extends openai_1.default {
|
|
|
73
71
|
}
|
|
74
72
|
});
|
|
75
73
|
}
|
|
74
|
+
/**
|
|
75
|
+
* 非流式聊天请求
|
|
76
|
+
* @param _chatText
|
|
77
|
+
* @param _paramOption
|
|
78
|
+
* @param _axiosOption
|
|
79
|
+
*/
|
|
80
|
+
chatRequest(chatText, callChatOption, _axiosOption = {}) {
|
|
81
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
82
|
+
if (!chatText)
|
|
83
|
+
return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
84
|
+
if (!this.aiApi)
|
|
85
|
+
this.aiApi = this.createOpenAI(this.apiKey);
|
|
86
|
+
let message = typeof (chatText) == 'string' ?
|
|
87
|
+
[{ role: 'user', content: chatText }] : chatText;
|
|
88
|
+
try {
|
|
89
|
+
const response = yield this.aiApi.getChatCompletions((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.model) || this.chatModel, message, {
|
|
90
|
+
temperature: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.temperature),
|
|
91
|
+
maxTokens: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.maxtoken) || this.maxtoken),
|
|
92
|
+
topP: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_p) || this.top_p),
|
|
93
|
+
presencePenalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.presence_penalty),
|
|
94
|
+
frequencyPenalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.frequency_penalty),
|
|
95
|
+
n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1,
|
|
96
|
+
tools: callChatOption.tools,
|
|
97
|
+
toolChoice: (callChatOption.tool_choice || 'none'),
|
|
98
|
+
});
|
|
99
|
+
const { promptTokens: prompt_tokens, completionTokens: completion_tokens, totalTokens: total_tokens } = response.usage;
|
|
100
|
+
let rebuildChoice = [];
|
|
101
|
+
for (const msg of response.choices) {
|
|
102
|
+
///, contentFilterResults: content_filter
|
|
103
|
+
const { index, finishReason: finish_reason, message } = msg;
|
|
104
|
+
rebuildChoice.push({ index, finish_reason, message });
|
|
105
|
+
}
|
|
106
|
+
return { successed: true, message: rebuildChoice, usage: { prompt_tokens, completion_tokens, total_tokens } };
|
|
107
|
+
}
|
|
108
|
+
catch (error) {
|
|
109
|
+
console.log('result is error ', error);
|
|
110
|
+
return { successed: false, error };
|
|
111
|
+
}
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* 流式的聊天模式
|
|
116
|
+
* @param chatText
|
|
117
|
+
* @param _paramOption
|
|
118
|
+
* @param axiosOption
|
|
119
|
+
*/
|
|
120
|
+
chatRequestInStream(chatText, callChatOption, attach, axiosOption) {
|
|
121
|
+
var _a, e_1, _b, _c;
|
|
122
|
+
var _d;
|
|
123
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
124
|
+
if (!chatText)
|
|
125
|
+
this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
126
|
+
if (!this.aiApi) {
|
|
127
|
+
this.aiApi = this.createOpenAI(this.apiKey);
|
|
128
|
+
}
|
|
129
|
+
let message = typeof (chatText) == 'string' ? [{ role: 'user', content: chatText }] : chatText;
|
|
130
|
+
axiosOption = Object.assign({}, axiosOption || { timeout: 60000 });
|
|
131
|
+
let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
|
|
132
|
+
try {
|
|
133
|
+
const response = yield this.aiApi.streamChatCompletions((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.model) || this.chatModel, message, {
|
|
134
|
+
temperature: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.temperature),
|
|
135
|
+
maxTokens: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.maxtoken) || this.maxtoken),
|
|
136
|
+
topP: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_p) || this.top_p),
|
|
137
|
+
presencePenalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.presence_penalty),
|
|
138
|
+
frequencyPenalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.frequency_penalty),
|
|
139
|
+
n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1
|
|
140
|
+
});
|
|
141
|
+
let replytext = [];
|
|
142
|
+
try {
|
|
143
|
+
for (var _e = true, response_1 = __asyncValues(response), response_1_1; response_1_1 = yield response_1.next(), _a = response_1_1.done, !_a;) {
|
|
144
|
+
_c = response_1_1.value;
|
|
145
|
+
_e = false;
|
|
146
|
+
try {
|
|
147
|
+
const event = _c;
|
|
148
|
+
for (const choice of event.choices) {
|
|
149
|
+
const { finishReason: finishreason, index } = choice;
|
|
150
|
+
const content = (_d = choice.delta) === null || _d === void 0 ? void 0 : _d.content;
|
|
151
|
+
if (!content)
|
|
152
|
+
continue;
|
|
153
|
+
replytext.push(content);
|
|
154
|
+
let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index }; //, usage };
|
|
155
|
+
if (attach)
|
|
156
|
+
output = Object.assign({}, output, attach);
|
|
157
|
+
this.emit(finishreason ? 'chatdone' : 'chattext', output);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
finally {
|
|
161
|
+
_e = true;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
166
|
+
finally {
|
|
167
|
+
try {
|
|
168
|
+
if (!_e && !_a && (_b = response_1.return)) yield _b.call(response_1);
|
|
169
|
+
}
|
|
170
|
+
finally { if (e_1) throw e_1.error; }
|
|
171
|
+
}
|
|
172
|
+
return { successed: true, requestid };
|
|
173
|
+
}
|
|
174
|
+
catch (error) {
|
|
175
|
+
this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' + error });
|
|
176
|
+
return { successed: false, requestid };
|
|
177
|
+
}
|
|
178
|
+
});
|
|
179
|
+
}
|
|
76
180
|
}
|
|
77
181
|
exports.default = AzureAI;
|
package/dist/declare.d.ts
CHANGED
|
@@ -35,10 +35,18 @@ export interface OpenAIApiParameters {
|
|
|
35
35
|
'presence_penalty'?: number;
|
|
36
36
|
'frequency_penalty'?: number;
|
|
37
37
|
'replyCounts'?: number;
|
|
38
|
+
'tools'?: Array<any>;
|
|
39
|
+
'tool_choice'?: string;
|
|
38
40
|
}
|
|
39
41
|
/**
|
|
40
42
|
* Azure 上的OpenAI的链接参数
|
|
41
43
|
*/
|
|
44
|
+
export interface ProxyPatameters {
|
|
45
|
+
'serviceurl': string;
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* OpenAI Proxy 链接参数
|
|
49
|
+
*/
|
|
42
50
|
export interface AzureOpenAIPatameters {
|
|
43
51
|
'endpoint': string;
|
|
44
52
|
'engine': string;
|
|
@@ -84,3 +92,22 @@ export interface CacheProvider {
|
|
|
84
92
|
*/
|
|
85
93
|
delete(key: string): void;
|
|
86
94
|
}
|
|
95
|
+
export interface StabilityOption {
|
|
96
|
+
'cfg_scale'?: number;
|
|
97
|
+
'clip_guidance_preset'?: string;
|
|
98
|
+
'height'?: number;
|
|
99
|
+
'width'?: number;
|
|
100
|
+
'samples'?: number;
|
|
101
|
+
'seed'?: number;
|
|
102
|
+
'steps'?: number;
|
|
103
|
+
'sampler'?: string;
|
|
104
|
+
'negative'?: string;
|
|
105
|
+
'engine'?: string;
|
|
106
|
+
'endpoint'?: string;
|
|
107
|
+
'denoising_strength'?: number;
|
|
108
|
+
'hr_scale'?: number;
|
|
109
|
+
}
|
|
110
|
+
export interface StabilityResult extends ApiResult {
|
|
111
|
+
'data'?: any;
|
|
112
|
+
'type'?: string;
|
|
113
|
+
}
|
package/dist/gptprovider.d.ts
CHANGED
package/dist/gptprovider.js
CHANGED
|
@@ -9,6 +9,7 @@ exports.createGpt = exports.GptProviderEnum = void 0;
|
|
|
9
9
|
* 语音转文字服务商工厂
|
|
10
10
|
*/
|
|
11
11
|
const openai_1 = __importDefault(require("./openai"));
|
|
12
|
+
const openaiproxy_1 = __importDefault(require("./openaiproxy"));
|
|
12
13
|
const azureai_1 = __importDefault(require("./azureai"));
|
|
13
14
|
const stabilityai_1 = __importDefault(require("./stabilityai"));
|
|
14
15
|
const stabilityplusai_1 = __importDefault(require("./stabilityplusai"));
|
|
@@ -18,6 +19,7 @@ const baiduai_1 = __importDefault(require("./baiduai"));
|
|
|
18
19
|
*/
|
|
19
20
|
exports.GptProviderEnum = {
|
|
20
21
|
OPENAI: 'openai',
|
|
22
|
+
OPENAIPROXY: 'openaiproxy',
|
|
21
23
|
MICROSOFT: 'microsoft',
|
|
22
24
|
BAIDU: 'baidu',
|
|
23
25
|
GOOGLE: 'google',
|
|
@@ -32,10 +34,12 @@ exports.GptProviderEnum = {
|
|
|
32
34
|
* @returns
|
|
33
35
|
*/
|
|
34
36
|
function createGpt(provider, apikey, setting) {
|
|
35
|
-
let { model, maxtoken, temperature, endpoint, engine, version, embedding, top_p, presence_penalty, frequency_penalty } = setting || {};
|
|
37
|
+
let { model, maxtoken, temperature, serviceurl, endpoint, engine, version, embedding, top_p, presence_penalty, frequency_penalty } = setting || {};
|
|
36
38
|
switch (provider) {
|
|
37
39
|
case exports.GptProviderEnum.OPENAI:
|
|
38
40
|
return new openai_1.default(apikey + '', { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
|
|
41
|
+
case exports.GptProviderEnum.OPENAIPROXY:
|
|
42
|
+
return new openaiproxy_1.default(apikey + '', { serviceurl }, { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
|
|
39
43
|
case exports.GptProviderEnum.MICROSOFT:
|
|
40
44
|
return new azureai_1.default(apikey + '', { endpoint, engine, version }, { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
|
|
41
45
|
case exports.GptProviderEnum.BAIDU:
|
package/dist/openai.d.ts
CHANGED
|
@@ -1,26 +1,14 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI
|
|
3
|
+
*/
|
|
4
|
+
import OpenAIBase from "./openaibase";
|
|
3
5
|
import { OpenAIApiParameters, ChatReponse, EmbeddingResult } from './declare';
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
protected aiApi: OpenAIApi | undefined;
|
|
7
|
-
protected readonly chatModel: string;
|
|
8
|
-
protected readonly maxtoken: number;
|
|
9
|
-
protected readonly top_p: number;
|
|
10
|
-
protected readonly presence_penalty: number;
|
|
11
|
-
protected readonly frequency_penalty: number;
|
|
12
|
-
protected readonly temperature: number;
|
|
13
|
-
protected readonly embeddingmodel: string;
|
|
14
|
-
/**
|
|
15
|
-
*
|
|
16
|
-
* @param apiKey 调用OpenAI 的key
|
|
17
|
-
* @param apiOption
|
|
18
|
-
*/
|
|
19
|
-
constructor(apiKey: string, apiOption?: OpenAIApiParameters);
|
|
6
|
+
import OpenAI from "openai";
|
|
7
|
+
export default class OpenAIGpt extends OpenAIBase<OpenAI> {
|
|
20
8
|
/**
|
|
21
9
|
* 初始化OpenAI 的聊天对象Api
|
|
22
10
|
*/
|
|
23
|
-
createOpenAI(apiKey: string):
|
|
11
|
+
createOpenAI(apiKey: string): OpenAI;
|
|
24
12
|
/**
|
|
25
13
|
* 获得文字的向量
|
|
26
14
|
* @param text
|
package/dist/openai.js
CHANGED
|
@@ -8,34 +8,29 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
8
8
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
9
|
});
|
|
10
10
|
};
|
|
11
|
+
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|
12
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
13
|
+
var m = o[Symbol.asyncIterator], i;
|
|
14
|
+
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
|
15
|
+
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
|
16
|
+
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
|
17
|
+
};
|
|
11
18
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
19
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
20
|
};
|
|
14
21
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
*/
|
|
23
|
-
constructor(apiKey, apiOption = {}) {
|
|
24
|
-
super();
|
|
25
|
-
this.apiKey = apiKey;
|
|
26
|
-
this.chatModel = apiOption.model || 'gpt-3.5-turbo';
|
|
27
|
-
this.maxtoken = apiOption.maxtoken || 2048;
|
|
28
|
-
this.top_p = apiOption.top_p || 0.95;
|
|
29
|
-
this.temperature = apiOption.temperature || 0.9;
|
|
30
|
-
this.presence_penalty = apiOption.presence_penalty || 0;
|
|
31
|
-
this.frequency_penalty = apiOption.frequency_penalty || 0;
|
|
32
|
-
this.embeddingmodel = apiOption.embedding || 'text-embedding-ada-002';
|
|
33
|
-
}
|
|
22
|
+
// import { Configuration, OpenAIApi, ChatCompletionRequestMessage } from "azure-openai"
|
|
23
|
+
/**
|
|
24
|
+
* OpenAI
|
|
25
|
+
*/
|
|
26
|
+
const openaibase_1 = __importDefault(require("./openaibase"));
|
|
27
|
+
const openai_1 = __importDefault(require("openai"));
|
|
28
|
+
class OpenAIGpt extends openaibase_1.default {
|
|
34
29
|
/**
|
|
35
30
|
* 初始化OpenAI 的聊天对象Api
|
|
36
31
|
*/
|
|
37
32
|
createOpenAI(apiKey) {
|
|
38
|
-
return new
|
|
33
|
+
return new openai_1.default({ apiKey });
|
|
39
34
|
}
|
|
40
35
|
/**
|
|
41
36
|
* 获得文字的向量
|
|
@@ -49,7 +44,8 @@ class OpenAIGpt extends gptbase_1.default {
|
|
|
49
44
|
this.aiApi = this.createOpenAI(this.apiKey);
|
|
50
45
|
}
|
|
51
46
|
try {
|
|
52
|
-
const response =
|
|
47
|
+
//const response: any = await this.aiApi.createEmbedding({
|
|
48
|
+
const response = yield this.aiApi.embeddings.create({
|
|
53
49
|
model: this.embeddingmodel,
|
|
54
50
|
input: text,
|
|
55
51
|
}, axiosOption);
|
|
@@ -69,15 +65,13 @@ class OpenAIGpt extends gptbase_1.default {
|
|
|
69
65
|
return __awaiter(this, void 0, void 0, function* () {
|
|
70
66
|
if (!chatText)
|
|
71
67
|
return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
72
|
-
if (!this.aiApi)
|
|
68
|
+
if (!this.aiApi)
|
|
73
69
|
this.aiApi = this.createOpenAI(this.apiKey);
|
|
74
|
-
//return { successed: false, error: { errcode: 1, errmsg: '聊天机器人无效' } };
|
|
75
|
-
}
|
|
76
70
|
let message = typeof (chatText) == 'string' ?
|
|
77
71
|
[{ role: 'user', content: chatText }] : chatText;
|
|
78
|
-
// console.log('message', message)
|
|
79
72
|
try {
|
|
80
|
-
const response =
|
|
73
|
+
// const response: any = await this.aiApi.createChatCompletion({
|
|
74
|
+
const response = yield this.aiApi.chat.completions.create({
|
|
81
75
|
model: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.model) || this.chatModel,
|
|
82
76
|
messages: message,
|
|
83
77
|
temperature: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.temperature),
|
|
@@ -85,14 +79,10 @@ class OpenAIGpt extends gptbase_1.default {
|
|
|
85
79
|
top_p: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_p) || this.top_p),
|
|
86
80
|
presence_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.presence_penalty),
|
|
87
81
|
frequency_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.frequency_penalty),
|
|
88
|
-
n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1
|
|
82
|
+
n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1,
|
|
83
|
+
tools: callChatOption.tools,
|
|
84
|
+
tool_choice: (callChatOption.tool_choice || 'none'),
|
|
89
85
|
}, axiosOption);
|
|
90
|
-
// console.log('finish_reason==>', response.data.choices)
|
|
91
|
-
////输出的内容不合规
|
|
92
|
-
if (response.data.choices[0].finish_reason === 'content_filter') {
|
|
93
|
-
console.log('content_filter');
|
|
94
|
-
return { successed: false, error: 'content_filter' };
|
|
95
|
-
}
|
|
96
86
|
return { successed: true, message: response.data.choices, usage: response.data.usage };
|
|
97
87
|
}
|
|
98
88
|
catch (error) {
|
|
@@ -108,6 +98,7 @@ class OpenAIGpt extends gptbase_1.default {
|
|
|
108
98
|
* @param axiosOption
|
|
109
99
|
*/
|
|
110
100
|
chatRequestInStream(chatText, callChatOption, attach, axiosOption) {
|
|
101
|
+
var _a, e_1, _b, _c;
|
|
111
102
|
return __awaiter(this, void 0, void 0, function* () {
|
|
112
103
|
if (!chatText)
|
|
113
104
|
this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
@@ -117,65 +108,87 @@ class OpenAIGpt extends gptbase_1.default {
|
|
|
117
108
|
// const DATA_END_TAG = `"usage":null}`
|
|
118
109
|
let message = typeof (chatText) == 'string' ?
|
|
119
110
|
[{ role: 'user', content: chatText }] : chatText;
|
|
120
|
-
axiosOption = Object.assign({}, axiosOption || { timeout: 60000 }, { responseType: 'stream' })
|
|
111
|
+
//axiosOption = Object.assign({}, axiosOption || { timeout: 60000 }, { responseType: 'stream' })
|
|
112
|
+
axiosOption = Object.assign({}, axiosOption || { timeout: 60000 });
|
|
121
113
|
let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
|
|
122
114
|
try {
|
|
123
|
-
let finishreason = null, usage = null,
|
|
115
|
+
// let finishreason: any = null, usage: any = null,errtxt = '';
|
|
124
116
|
///便于知道返回的requestid
|
|
125
117
|
// console.log('model', callChatOption?.model || this.chatModel,)
|
|
126
|
-
const response =
|
|
118
|
+
//const response: any = await this.aiApi.chat.completions.create({
|
|
119
|
+
const response = yield this.aiApi.chat.completions.create({
|
|
127
120
|
model: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.model) || this.chatModel,
|
|
128
121
|
messages: message,
|
|
129
|
-
|
|
122
|
+
temperature: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.temperature),
|
|
123
|
+
max_tokens: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.maxtoken) || this.maxtoken),
|
|
130
124
|
top_p: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_p) || this.top_p),
|
|
131
125
|
presence_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.presence_penalty),
|
|
132
126
|
frequency_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.frequency_penalty),
|
|
133
|
-
|
|
134
|
-
|
|
127
|
+
n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1,
|
|
128
|
+
stream: true
|
|
135
129
|
}, axiosOption);
|
|
136
130
|
let replytext = [];
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
return;
|
|
142
|
-
// console.log('before add', lines.join('').split('data:'));
|
|
143
|
-
let alltext = (errtxt + lines.join('')).split('data:');
|
|
144
|
-
// console.log('alltext',alltext)
|
|
145
|
-
errtxt = '';
|
|
146
|
-
for (const line of alltext) {
|
|
147
|
-
let txt = line.trim();
|
|
148
|
-
if (!txt)
|
|
149
|
-
continue;
|
|
150
|
-
if (txt === '[DONE]') {
|
|
151
|
-
let output = { successed: true, requestid, text: replytext.join(''), finish_reason: 'stop', usage };
|
|
152
|
-
if (attach)
|
|
153
|
-
output = Object.assign({}, output, attach);
|
|
154
|
-
this.emit('chatdone', output);
|
|
155
|
-
return; // Stream finished
|
|
156
|
-
}
|
|
131
|
+
try {
|
|
132
|
+
for (var _d = true, response_1 = __asyncValues(response), response_1_1; response_1_1 = yield response_1.next(), _a = response_1_1.done, !_a;) {
|
|
133
|
+
_c = response_1_1.value;
|
|
134
|
+
_d = false;
|
|
157
135
|
try {
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
let streamtext = parsed.choices[0].delta.content;
|
|
165
|
-
replytext.push(streamtext);
|
|
166
|
-
let output = { successed: true, requestid, text: replytext.join(''), finish_reason: finishreason, index: parsed.choices[0].index, usage };
|
|
136
|
+
const chunk = _c;
|
|
137
|
+
const [choice] = chunk.choices, { content, finishreason, index, usage } = choice.delta;
|
|
138
|
+
if (!content)
|
|
139
|
+
continue;
|
|
140
|
+
replytext.push(content);
|
|
141
|
+
let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage };
|
|
167
142
|
if (attach)
|
|
168
143
|
output = Object.assign({}, output, attach);
|
|
169
144
|
this.emit(finishreason ? 'chatdone' : 'chattext', output);
|
|
170
|
-
if (finishreason)
|
|
171
|
-
return;
|
|
172
145
|
}
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message', errtxt });
|
|
146
|
+
finally {
|
|
147
|
+
_d = true;
|
|
176
148
|
}
|
|
177
149
|
}
|
|
178
|
-
}
|
|
150
|
+
}
|
|
151
|
+
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
152
|
+
finally {
|
|
153
|
+
try {
|
|
154
|
+
if (!_d && !_a && (_b = response_1.return)) yield _b.call(response_1);
|
|
155
|
+
}
|
|
156
|
+
finally { if (e_1) throw e_1.error; }
|
|
157
|
+
}
|
|
158
|
+
// response.data.on('data', (data: any) => {
|
|
159
|
+
// const lines = data.toString().split('\n').filter((line: string) => line.trim() !== '');
|
|
160
|
+
// ///已经返回了结束原因
|
|
161
|
+
// if (finishreason) return;
|
|
162
|
+
// let alltext = (errtxt +lines.join('')).split('data:');
|
|
163
|
+
// errtxt = '';
|
|
164
|
+
// for (const line of alltext) {
|
|
165
|
+
// let txt = line.trim();
|
|
166
|
+
// if (!txt) continue;
|
|
167
|
+
// if (txt === '[DONE]') {
|
|
168
|
+
// let output = { successed: true, requestid, text: replytext.join(''), finish_reason: 'stop', usage };
|
|
169
|
+
// if (attach) output = Object.assign({}, output, attach);
|
|
170
|
+
// this.emit('chatdone', output)
|
|
171
|
+
// return; // Stream finished
|
|
172
|
+
// }
|
|
173
|
+
// try {
|
|
174
|
+
// ///{ delta: { content: '$\\' }, index: 0, finish_reason: null }
|
|
175
|
+
// ///发送出去
|
|
176
|
+
// const parsed = JSON.parse(txt);
|
|
177
|
+
// ///已经返回一个正确的了,可以重置这个变量了
|
|
178
|
+
// finishreason = parsed.choices[0].finish_reason;
|
|
179
|
+
// usage = parsed.usage;
|
|
180
|
+
// let streamtext = parsed.choices[0].delta.content;
|
|
181
|
+
// replytext.push(streamtext);
|
|
182
|
+
// let output = { successed: true, requestid, segment: streamtext, text: replytext.join(''), finish_reason: finishreason, index: parsed.choices[0].index, usage };
|
|
183
|
+
// if (attach) output = Object.assign({}, output, attach);
|
|
184
|
+
// this.emit(finishreason ? 'chatdone' : 'chattext', output)
|
|
185
|
+
// if (finishreason) return;
|
|
186
|
+
// } catch (error) {
|
|
187
|
+
// errtxt+=txt; ///这一段json没有结束,作为下一次的流过来时使用
|
|
188
|
+
// this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message', errtxt });
|
|
189
|
+
// }
|
|
190
|
+
// }
|
|
191
|
+
// });
|
|
179
192
|
return { successed: true, requestid };
|
|
180
193
|
}
|
|
181
194
|
catch (error) {
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { OpenAIApiParameters } from "./declare";
|
|
2
|
+
import GptBase from "./gptbase";
|
|
3
|
+
export default abstract class OpenAIBase<T> extends GptBase {
|
|
4
|
+
protected readonly apiKey: string;
|
|
5
|
+
protected readonly chatModel: string;
|
|
6
|
+
protected readonly maxtoken: number;
|
|
7
|
+
protected readonly top_p: number;
|
|
8
|
+
protected readonly presence_penalty: number;
|
|
9
|
+
protected readonly frequency_penalty: number;
|
|
10
|
+
protected readonly temperature: number;
|
|
11
|
+
protected readonly embeddingmodel: string;
|
|
12
|
+
protected aiApi: T | undefined;
|
|
13
|
+
constructor(apiKey: string, apiOption?: OpenAIApiParameters);
|
|
14
|
+
/**
|
|
15
|
+
* 初始化OpenAI 的聊天对象Api
|
|
16
|
+
*/
|
|
17
|
+
abstract createOpenAI(apiKey: string): T;
|
|
18
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const gptbase_1 = __importDefault(require("./gptbase"));
|
|
7
|
+
class OpenAIBase extends gptbase_1.default {
|
|
8
|
+
constructor(apiKey, apiOption = {}) {
|
|
9
|
+
super();
|
|
10
|
+
this.apiKey = apiKey;
|
|
11
|
+
this.chatModel = apiOption.model || 'gpt-3.5-turbo';
|
|
12
|
+
this.maxtoken = apiOption.maxtoken || 2048;
|
|
13
|
+
this.top_p = apiOption.top_p || 0.95;
|
|
14
|
+
this.temperature = apiOption.temperature || 0.9;
|
|
15
|
+
this.presence_penalty = apiOption.presence_penalty || 0;
|
|
16
|
+
this.frequency_penalty = apiOption.frequency_penalty || 0;
|
|
17
|
+
this.embeddingmodel = apiOption.embedding || 'text-embedding-ada-002';
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
exports.default = OpenAIBase;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { ApiResult, OpenAIApiParameters, ProxyPatameters } from './declare';
|
|
2
|
+
import OpenAIGpt from './openai';
|
|
3
|
+
export default class OpenAIProxy extends OpenAIGpt {
|
|
4
|
+
protected readonly proxySetting: ProxyPatameters;
|
|
5
|
+
constructor(apiKey: string, proxyOption: ProxyPatameters, apiOption?: OpenAIApiParameters);
|
|
6
|
+
/**
|
|
7
|
+
* 重写chatRequest方法
|
|
8
|
+
* @param chatText
|
|
9
|
+
* @param callChatOption
|
|
10
|
+
* @param axiosOption
|
|
11
|
+
*/
|
|
12
|
+
chatRequest(chatText: string | any[], _paramOption: any, axiosOption?: any): Promise<ApiResult>;
|
|
13
|
+
/**
|
|
14
|
+
* 重写chatRequestInStream方法
|
|
15
|
+
* @param chatText
|
|
16
|
+
* @param callChatOption
|
|
17
|
+
* @param attach
|
|
18
|
+
* @param axiosOption
|
|
19
|
+
*/
|
|
20
|
+
chatRequestInStream(chatText: Array<any>, callChatOption: OpenAIApiParameters, attach?: any, axiosOption?: any): Promise<any>;
|
|
21
|
+
}
|