doomiaichat 2.6.0 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/azureai.d.ts +11 -2
- package/dist/azureai.js +66 -36
- package/dist/baiduai.d.ts +1 -0
- package/dist/baiduai.js +3 -0
- package/dist/declare.d.ts +2 -0
- package/dist/gptbase.d.ts +7 -0
- package/dist/gptprovider.js +3 -3
- package/dist/openai.d.ts +12 -4
- package/dist/openai.js +78 -5
- package/package.json +2 -2
- package/src/azureai.ts +75 -47
- package/src/baiduai.ts +5 -0
- package/src/declare.ts +2 -0
- package/src/gptbase.ts +7 -0
- package/src/gptprovider.ts +3 -3
- package/src/openai.ts +70 -7
package/dist/azureai.d.ts
CHANGED
|
@@ -1,12 +1,16 @@
|
|
|
1
|
+
import { OpenAIApi } from "azure-openai";
|
|
1
2
|
import { AzureOpenAIPatameters, ChatReponse, EmbeddingResult, OpenAIApiParameters } from "./declare";
|
|
2
3
|
import OpenAIGpt from "./openai";
|
|
3
4
|
export default class AzureAI extends OpenAIGpt {
|
|
4
5
|
protected readonly azureSetting: AzureOpenAIPatameters;
|
|
5
6
|
constructor(apiKey: string, azureOption: AzureOpenAIPatameters, apiOption?: OpenAIApiParameters);
|
|
7
|
+
/**
|
|
8
|
+
* 初始化OpenAI 的聊天对象Api
|
|
9
|
+
*/
|
|
10
|
+
createOpenAI(apiKey: string): OpenAIApi;
|
|
6
11
|
/**
|
|
7
12
|
* ZAure OpenAI 最新的URL地址
|
|
8
13
|
*/
|
|
9
|
-
get BaseUrl(): string;
|
|
10
14
|
get EmbeddingUrl(): string;
|
|
11
15
|
/**
|
|
12
16
|
* 获得文字的向量
|
|
@@ -16,7 +20,12 @@ export default class AzureAI extends OpenAIGpt {
|
|
|
16
20
|
/**
|
|
17
21
|
* 请求GPT接口
|
|
18
22
|
*/
|
|
19
|
-
|
|
23
|
+
/**
|
|
24
|
+
* 流式的聊天模式
|
|
25
|
+
* @param chatText
|
|
26
|
+
* @param _paramOption
|
|
27
|
+
* @param axiosOption
|
|
28
|
+
*/
|
|
20
29
|
/**
|
|
21
30
|
* 获得一种内容的相似说法
|
|
22
31
|
* 微软的AI参数中已经没有了n的参数
|
package/dist/azureai.js
CHANGED
|
@@ -12,7 +12,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
12
12
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
13
|
};
|
|
14
14
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const azure_openai_1 = require("azure-openai");
|
|
15
16
|
const declare_1 = require("./declare");
|
|
17
|
+
// import { Configuration, OpenAIApi, ChatCompletionRequestMessage } from "azure-openai"
|
|
16
18
|
const openai_1 = __importDefault(require("./openai"));
|
|
17
19
|
class AzureAI extends openai_1.default {
|
|
18
20
|
constructor(apiKey, azureOption, apiOption = {}) {
|
|
@@ -24,13 +26,24 @@ class AzureAI extends openai_1.default {
|
|
|
24
26
|
}
|
|
25
27
|
}
|
|
26
28
|
/**
|
|
27
|
-
*
|
|
29
|
+
* 初始化OpenAI 的聊天对象Api
|
|
28
30
|
*/
|
|
29
|
-
|
|
30
|
-
return
|
|
31
|
+
createOpenAI(apiKey) {
|
|
32
|
+
return new azure_openai_1.OpenAIApi(new azure_openai_1.Configuration({ apiKey,
|
|
33
|
+
azure: {
|
|
34
|
+
apiKey,
|
|
35
|
+
endpoint: this.azureSetting.endpoint,
|
|
36
|
+
deploymentName: this.azureSetting.engine
|
|
37
|
+
} }));
|
|
31
38
|
}
|
|
39
|
+
/**
|
|
40
|
+
* ZAure OpenAI 最新的URL地址
|
|
41
|
+
*/
|
|
42
|
+
// get BaseUrl(): string {
|
|
43
|
+
// return `${this.azureSetting.endpoint}/openai/deployments/${this.azureSetting.engine}/chat/completions?api-version=${this.azureSetting.version || '2023-03-15-preview'}`
|
|
44
|
+
// }
|
|
32
45
|
get EmbeddingUrl() {
|
|
33
|
-
return `${this.azureSetting.endpoint}/openai/deployments/${this.
|
|
46
|
+
return `${this.azureSetting.endpoint}/openai/deployments/${this.embeddingmodel || 'openai-embedding-ada-002'}/embeddings?api-version=2022-12-01`;
|
|
34
47
|
}
|
|
35
48
|
/**
|
|
36
49
|
* 获得文字的向量
|
|
@@ -50,7 +63,6 @@ class AzureAI extends openai_1.default {
|
|
|
50
63
|
let param = Object.assign(Object.assign({}, axiosOption), { method: "post", data: {
|
|
51
64
|
input: text
|
|
52
65
|
}, url: this.EmbeddingUrl });
|
|
53
|
-
console.log('param', param);
|
|
54
66
|
const response = yield (0, declare_1.request)(param);
|
|
55
67
|
if (response.data) {
|
|
56
68
|
return { successed: true, embedding: response.data.data[0].embedding };
|
|
@@ -65,37 +77,55 @@ class AzureAI extends openai_1.default {
|
|
|
65
77
|
/**
|
|
66
78
|
* 请求GPT接口
|
|
67
79
|
*/
|
|
68
|
-
chatRequest(chatText
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
80
|
+
// public override async chatRequest(chatText: string | Array<any>, paramOption: OpenAIApiParameters, axiosOption: any = {}): Promise<ChatReponse> {
|
|
81
|
+
// if (!chatText) return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
82
|
+
// // if (!axiosOption.headers)
|
|
83
|
+
// // axiosOption.headers = { 'api-key': this.apiKey, 'Content-Type': 'application/json' };
|
|
84
|
+
// // else {
|
|
85
|
+
// // axiosOption.headers['api-key'] = this.apiKey;
|
|
86
|
+
// // axiosOption.headers['Content-Type'] = 'application/json';
|
|
87
|
+
// // }
|
|
88
|
+
// if (!this.azureApi) {
|
|
89
|
+
// this.azureApi = this.createAzureAI(this.apiKey);
|
|
90
|
+
// }
|
|
91
|
+
// let messages: Array<ChatCompletionRequestMessage> = typeof (chatText) == 'string' ?
|
|
92
|
+
// [{ role: 'user', content: chatText }] : chatText;
|
|
93
|
+
// try {
|
|
94
|
+
// // let param = {
|
|
95
|
+
// // ...axiosOption,
|
|
96
|
+
// // method: "post",
|
|
97
|
+
// // data: {
|
|
98
|
+
// // messages,
|
|
99
|
+
// // temperature: Number(paramOption?.temperature || this.temperature),
|
|
100
|
+
// // max_tokens: Number(paramOption?.maxtoken || this.maxtoken),
|
|
101
|
+
// // },
|
|
102
|
+
// // url: this.BaseUrl
|
|
103
|
+
// // };
|
|
104
|
+
// // // console.log('axiosOption', param)
|
|
105
|
+
// // const response = await request(param)
|
|
106
|
+
// const response: any = await this.azureApi.createChatCompletion({
|
|
107
|
+
// model: this.azureSetting.engine,
|
|
108
|
+
// messages: messages,
|
|
109
|
+
// temperature: Number(paramOption?.temperature || this.temperature),
|
|
110
|
+
// max_tokens: Number(paramOption?.maxtoken || this.maxtoken),
|
|
111
|
+
// n: Number(paramOption?.replyCounts || 1) || 1
|
|
112
|
+
// }, axiosOption);
|
|
113
|
+
// console.log('response.data', JSON.stringify(response.data))
|
|
114
|
+
// if (response.data.choices) {
|
|
115
|
+
// return { successed: true, message: response.data.choices, usage: response.data.usage };
|
|
116
|
+
// }
|
|
117
|
+
// return { successed: false, ...response.data };
|
|
118
|
+
// } catch (error) {
|
|
119
|
+
// console.log('result is error ', error)
|
|
120
|
+
// return { successed: false, error };
|
|
121
|
+
// }
|
|
122
|
+
// }
|
|
123
|
+
/**
|
|
124
|
+
* 流式的聊天模式
|
|
125
|
+
* @param chatText
|
|
126
|
+
* @param _paramOption
|
|
127
|
+
* @param axiosOption
|
|
128
|
+
*/
|
|
99
129
|
/**
|
|
100
130
|
* 获得一种内容的相似说法
|
|
101
131
|
* 微软的AI参数中已经没有了n的参数
|
package/dist/baiduai.d.ts
CHANGED
|
@@ -23,6 +23,7 @@ export default class BaiduWenXinAI extends GptBase {
|
|
|
23
23
|
* 请求GPT接口
|
|
24
24
|
*/
|
|
25
25
|
chatRequest(chatText: string | Array<any>, _paramOption: any, axiosOption?: any): Promise<ApiResult>;
|
|
26
|
+
chatRequestInStream(_chatText: string | any[], _paramOption: any, _axiosOption: any): void;
|
|
26
27
|
commentQuestionAnswer(_question: string, _answer: string, _axiosOption: any): Promise<CommentResult>;
|
|
27
28
|
getScentenceEmotional(_s1: string, _axiosOption: any): Promise<EmotionResult>;
|
|
28
29
|
getScentenseSimilarity(_s1: string, _s2: string, _axiosOption: any): Promise<SimilarityResult>;
|
package/dist/baiduai.js
CHANGED
|
@@ -97,6 +97,9 @@ class BaiduWenXinAI extends gptbase_1.default {
|
|
|
97
97
|
}
|
|
98
98
|
});
|
|
99
99
|
}
|
|
100
|
+
chatRequestInStream(_chatText, _paramOption, _axiosOption) {
|
|
101
|
+
throw new Error("Method not implemented.");
|
|
102
|
+
}
|
|
100
103
|
commentQuestionAnswer(_question, _answer, _axiosOption) {
|
|
101
104
|
throw new Error("Method not implemented.");
|
|
102
105
|
}
|
package/dist/declare.d.ts
CHANGED
|
@@ -21,6 +21,7 @@ export interface ChatReponse extends ApiResult {
|
|
|
21
21
|
* @memberof ChatReponse
|
|
22
22
|
*/
|
|
23
23
|
'message'?: Array<any>;
|
|
24
|
+
'usage'?: any;
|
|
24
25
|
}
|
|
25
26
|
export interface OutlineSummaryItem {
|
|
26
27
|
/**
|
|
@@ -46,6 +47,7 @@ export interface SummaryReponse extends ApiResult {
|
|
|
46
47
|
* 调用OpenAI Api的参数约定
|
|
47
48
|
*/
|
|
48
49
|
export interface OpenAIApiParameters {
|
|
50
|
+
'embedding'?: string;
|
|
49
51
|
'model'?: string;
|
|
50
52
|
'maxtoken'?: number;
|
|
51
53
|
'temperature'?: number;
|
package/dist/gptbase.d.ts
CHANGED
|
@@ -18,6 +18,13 @@ export default abstract class GptBase extends EventEmitter {
|
|
|
18
18
|
* @param axiosOption
|
|
19
19
|
*/
|
|
20
20
|
abstract chatRequest(chatText: string | Array<any>, _paramOption: any, axiosOption: any): Promise<ApiResult>;
|
|
21
|
+
/**
|
|
22
|
+
* 流式的聊天模式
|
|
23
|
+
* @param chatText
|
|
24
|
+
* @param _paramOption
|
|
25
|
+
* @param axiosOption
|
|
26
|
+
*/
|
|
27
|
+
abstract chatRequestInStream(chatText: string | Array<any>, _paramOption: any, axiosOption: any): void;
|
|
21
28
|
/**
|
|
22
29
|
* 点评问题回答的评价
|
|
23
30
|
* @param question 问题题干
|
package/dist/gptprovider.js
CHANGED
|
@@ -28,12 +28,12 @@ exports.GptProviderEnum = {
|
|
|
28
28
|
* @returns
|
|
29
29
|
*/
|
|
30
30
|
function createGpt(provider, apikey, setting) {
|
|
31
|
-
let { model, maxtoken, temperature, endpoint, engine, version } = setting || {};
|
|
31
|
+
let { model, maxtoken, temperature, endpoint, engine, version, embedding } = setting || {};
|
|
32
32
|
switch (provider) {
|
|
33
33
|
case exports.GptProviderEnum.OPENAI:
|
|
34
|
-
return new openai_1.default(apikey + '', { model, maxtoken, temperature });
|
|
34
|
+
return new openai_1.default(apikey + '', { model, maxtoken, temperature, embedding });
|
|
35
35
|
case exports.GptProviderEnum.MICROSOFT:
|
|
36
|
-
return new azureai_1.default(apikey + '', { endpoint, engine, version }, { model, maxtoken, temperature });
|
|
36
|
+
return new azureai_1.default(apikey + '', { endpoint, engine, version }, { model, maxtoken, temperature, embedding });
|
|
37
37
|
case exports.GptProviderEnum.BAIDU:
|
|
38
38
|
let cred = typeof (apikey) === 'string' ? { apikey, securitykey: apikey } : apikey;
|
|
39
39
|
return new baiduai_1.default(cred);
|
package/dist/openai.d.ts
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
|
-
import { OpenAIApi, ChatCompletionRequestMessage } from "openai";
|
|
1
|
+
import { OpenAIApi, ChatCompletionRequestMessage } from "azure-openai";
|
|
2
2
|
import GptBase from "./gptbase";
|
|
3
|
-
import { OpenAIApiParameters, ChatReponse, SummaryReponse, FaqItem, ExaminationPaperResult, EmotionResult, SimilarityResult, QuestionItem, CommentResult } from './declare';
|
|
3
|
+
import { OpenAIApiParameters, ChatReponse, SummaryReponse, FaqItem, ExaminationPaperResult, EmotionResult, SimilarityResult, QuestionItem, CommentResult, EmbeddingResult } from './declare';
|
|
4
4
|
export default class OpenAIGpt extends GptBase {
|
|
5
5
|
protected readonly apiKey: string;
|
|
6
|
-
|
|
6
|
+
protected aiApi: OpenAIApi | undefined;
|
|
7
7
|
protected readonly chatModel: string;
|
|
8
8
|
protected readonly maxtoken: number;
|
|
9
9
|
protected readonly temperature: number;
|
|
10
|
+
protected readonly embeddingmodel: string;
|
|
10
11
|
/**
|
|
11
12
|
*
|
|
12
13
|
* @param apiKey 调用OpenAI 的key
|
|
@@ -21,12 +22,19 @@ export default class OpenAIGpt extends GptBase {
|
|
|
21
22
|
* 获得文字的向量
|
|
22
23
|
* @param text
|
|
23
24
|
*/
|
|
24
|
-
getTextEmbedding(
|
|
25
|
+
getTextEmbedding(text: string, axiosOption: any): Promise<EmbeddingResult>;
|
|
25
26
|
/**
|
|
26
27
|
* 向OpenAI发送一个聊天请求
|
|
27
28
|
* @param {*} chatText
|
|
28
29
|
*/
|
|
29
30
|
chatRequest(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, axiosOption?: any): Promise<ChatReponse>;
|
|
31
|
+
/**
|
|
32
|
+
* 流式的聊天模式
|
|
33
|
+
* @param chatText
|
|
34
|
+
* @param _paramOption
|
|
35
|
+
* @param axiosOption
|
|
36
|
+
*/
|
|
37
|
+
chatRequestInStream(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, axiosOption: any): Promise<void>;
|
|
30
38
|
/**
|
|
31
39
|
* 点评问题回答的评价
|
|
32
40
|
* @param question
|
package/dist/openai.js
CHANGED
|
@@ -12,7 +12,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
12
12
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
13
|
};
|
|
14
14
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
-
const
|
|
15
|
+
const azure_openai_1 = require("azure-openai");
|
|
16
16
|
// import { EventEmitter } from "events";
|
|
17
17
|
const gptbase_1 = __importDefault(require("./gptbase"));
|
|
18
18
|
const SECTION_LENGTH = 1600; ///每2400个字符分成一组
|
|
@@ -37,20 +37,36 @@ class OpenAIGpt extends gptbase_1.default {
|
|
|
37
37
|
this.chatModel = apiOption.model || 'gpt-3.5-turbo';
|
|
38
38
|
this.maxtoken = apiOption.maxtoken || 2048;
|
|
39
39
|
this.temperature = apiOption.temperature || 0.9;
|
|
40
|
+
this.embeddingmodel = apiOption.embedding || 'text-embedding-ada-002';
|
|
40
41
|
}
|
|
41
42
|
/**
|
|
42
43
|
* 初始化OpenAI 的聊天对象Api
|
|
43
44
|
*/
|
|
44
45
|
createOpenAI(apiKey) {
|
|
45
|
-
return new
|
|
46
|
+
return new azure_openai_1.OpenAIApi(new azure_openai_1.Configuration({ apiKey }));
|
|
46
47
|
}
|
|
47
48
|
/**
|
|
48
49
|
* 获得文字的向量
|
|
49
50
|
* @param text
|
|
50
51
|
*/
|
|
51
|
-
getTextEmbedding(
|
|
52
|
+
getTextEmbedding(text, axiosOption) {
|
|
52
53
|
return __awaiter(this, void 0, void 0, function* () {
|
|
53
|
-
|
|
54
|
+
if (!text)
|
|
55
|
+
return { successed: false, error: { errcode: 2, errmsg: 'content required' } };
|
|
56
|
+
if (!this.aiApi) {
|
|
57
|
+
this.aiApi = this.createOpenAI(this.apiKey);
|
|
58
|
+
}
|
|
59
|
+
try {
|
|
60
|
+
const response = yield this.aiApi.createEmbedding({
|
|
61
|
+
model: this.embeddingmodel,
|
|
62
|
+
input: text,
|
|
63
|
+
}, axiosOption);
|
|
64
|
+
return { successed: true, embedding: response.data.data[0].embedding };
|
|
65
|
+
}
|
|
66
|
+
catch (error) {
|
|
67
|
+
// console.log('result is error ', error)
|
|
68
|
+
return { successed: false, error };
|
|
69
|
+
}
|
|
54
70
|
});
|
|
55
71
|
}
|
|
56
72
|
/**
|
|
@@ -76,7 +92,7 @@ class OpenAIGpt extends gptbase_1.default {
|
|
|
76
92
|
max_tokens: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.maxtoken) || this.maxtoken),
|
|
77
93
|
n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1
|
|
78
94
|
}, axiosOption);
|
|
79
|
-
return { successed: true, message: response.data.choices };
|
|
95
|
+
return { successed: true, message: response.data.choices, usage: response.data.usage };
|
|
80
96
|
}
|
|
81
97
|
catch (error) {
|
|
82
98
|
console.log('result is error ', error);
|
|
@@ -84,6 +100,63 @@ class OpenAIGpt extends gptbase_1.default {
|
|
|
84
100
|
}
|
|
85
101
|
});
|
|
86
102
|
}
|
|
103
|
+
/**
|
|
104
|
+
* 流式的聊天模式
|
|
105
|
+
* @param chatText
|
|
106
|
+
* @param _paramOption
|
|
107
|
+
* @param axiosOption
|
|
108
|
+
*/
|
|
109
|
+
chatRequestInStream(chatText, callChatOption, axiosOption) {
|
|
110
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
111
|
+
if (!chatText)
|
|
112
|
+
this.emit('chaterror', { successed: false, error: 'no text in chat' });
|
|
113
|
+
if (!this.aiApi) {
|
|
114
|
+
this.aiApi = this.createOpenAI(this.apiKey);
|
|
115
|
+
}
|
|
116
|
+
let message = typeof (chatText) == 'string' ?
|
|
117
|
+
[{ role: 'user', content: chatText }] : chatText;
|
|
118
|
+
axiosOption = Object.assign({}, axiosOption || { timeout: 60000 }, { responseType: 'stream' });
|
|
119
|
+
try {
|
|
120
|
+
let finishreason = null, usage = null;
|
|
121
|
+
const response = yield this.aiApi.createChatCompletion({
|
|
122
|
+
// engine: callChatOption?.engine||this.engine,
|
|
123
|
+
model: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.model) || this.chatModel,
|
|
124
|
+
messages: message,
|
|
125
|
+
stream: true,
|
|
126
|
+
temperature: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.temperature),
|
|
127
|
+
max_tokens: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.maxtoken) || this.maxtoken)
|
|
128
|
+
}, axiosOption);
|
|
129
|
+
response.data.on('data', (data) => {
|
|
130
|
+
const lines = data.toString().split('\n').filter((line) => line.trim() !== '');
|
|
131
|
+
for (const line of lines) {
|
|
132
|
+
const message = line.replace(/^data: /, '');
|
|
133
|
+
// console.log('message', message)
|
|
134
|
+
if (message === '[DONE]') {
|
|
135
|
+
this.emit('chatdone', { successed: true, finish_reason: finishreason, usage });
|
|
136
|
+
return; // Stream finished
|
|
137
|
+
}
|
|
138
|
+
try {
|
|
139
|
+
///{ delta: { content: '$\\' }, index: 0, finish_reason: null }
|
|
140
|
+
///发送出去
|
|
141
|
+
const parsed = JSON.parse(message);
|
|
142
|
+
finishreason = parsed.choices[0].finish_reason;
|
|
143
|
+
usage = parsed.usage;
|
|
144
|
+
this.emit('chattext', { successed: true, text: parsed.choices[0].delta.content, finish_reason: parsed.choices[0].finish_reason, index: parsed.choices[0].index, usage });
|
|
145
|
+
}
|
|
146
|
+
catch (error) {
|
|
147
|
+
this.emit('chaterror', { successed: false, error: 'no text in chat', message });
|
|
148
|
+
//console.error('Could not JSON parse stream message', message, error);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
catch (error) {
|
|
154
|
+
this.emit('error', { successed: false, error: 'call axios faied ' + error });
|
|
155
|
+
//console.log('result is error ', error)
|
|
156
|
+
// return { successed: false, error };
|
|
157
|
+
}
|
|
158
|
+
});
|
|
159
|
+
}
|
|
87
160
|
/**
|
|
88
161
|
* 点评问题回答的评价
|
|
89
162
|
* @param question
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "doomiaichat",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "3.0.0",
|
|
4
4
|
"description": "Doomisoft OpenAI",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"scripts": {
|
|
@@ -17,6 +17,6 @@
|
|
|
17
17
|
},
|
|
18
18
|
"dependencies": {
|
|
19
19
|
"axios": "^1.3.4",
|
|
20
|
-
"openai": "^
|
|
20
|
+
"azure-openai": "^0.9.4"
|
|
21
21
|
}
|
|
22
22
|
}
|
package/src/azureai.ts
CHANGED
|
@@ -1,32 +1,44 @@
|
|
|
1
|
+
import { Configuration, OpenAIApi } from "azure-openai";
|
|
1
2
|
import { AzureOpenAIPatameters, ChatReponse, EmbeddingResult, OpenAIApiParameters, request } from "./declare";
|
|
2
|
-
import
|
|
3
|
-
import
|
|
3
|
+
// import { Configuration, OpenAIApi, ChatCompletionRequestMessage } from "azure-openai"
|
|
4
|
+
import OpenAIGpt from "./openai"
|
|
4
5
|
export default class AzureAI extends OpenAIGpt {
|
|
5
6
|
protected readonly azureSetting: AzureOpenAIPatameters;
|
|
6
|
-
constructor(apiKey: string, azureOption:AzureOpenAIPatameters, apiOption: OpenAIApiParameters = {}) {
|
|
7
|
+
constructor(apiKey: string, azureOption: AzureOpenAIPatameters, apiOption: OpenAIApiParameters = {}) {
|
|
7
8
|
super(apiKey, apiOption);
|
|
8
9
|
this.azureSetting = azureOption;
|
|
9
10
|
if (!this.azureSetting.endpoint.toLowerCase().startsWith('https://') &&
|
|
10
|
-
!this.azureSetting.endpoint.toLowerCase().startsWith('https://')){
|
|
11
|
+
!this.azureSetting.endpoint.toLowerCase().startsWith('https://')) {
|
|
11
12
|
this.azureSetting.endpoint = 'https://' + this.azureSetting.endpoint;
|
|
12
13
|
}
|
|
13
14
|
}
|
|
14
15
|
/**
|
|
15
|
-
*
|
|
16
|
+
* 初始化OpenAI 的聊天对象Api
|
|
16
17
|
*/
|
|
17
|
-
|
|
18
|
-
return
|
|
18
|
+
override createOpenAI(apiKey: string): OpenAIApi {
|
|
19
|
+
return new OpenAIApi(new Configuration({ apiKey,
|
|
20
|
+
azure:{
|
|
21
|
+
apiKey,
|
|
22
|
+
endpoint: this.azureSetting.endpoint,
|
|
23
|
+
deploymentName: this.azureSetting.engine
|
|
24
|
+
} }))
|
|
19
25
|
}
|
|
26
|
+
/**
|
|
27
|
+
* ZAure OpenAI 最新的URL地址
|
|
28
|
+
*/
|
|
29
|
+
// get BaseUrl(): string {
|
|
30
|
+
// return `${this.azureSetting.endpoint}/openai/deployments/${this.azureSetting.engine}/chat/completions?api-version=${this.azureSetting.version || '2023-03-15-preview'}`
|
|
31
|
+
// }
|
|
20
32
|
|
|
21
33
|
get EmbeddingUrl(): string {
|
|
22
|
-
return `${this.azureSetting.endpoint}/openai/deployments/${this.
|
|
34
|
+
return `${this.azureSetting.endpoint}/openai/deployments/${this.embeddingmodel || 'openai-embedding-ada-002'}/embeddings?api-version=2022-12-01`
|
|
23
35
|
}
|
|
24
36
|
|
|
25
37
|
/**
|
|
26
38
|
* 获得文字的向量
|
|
27
39
|
* @param text
|
|
28
40
|
*/
|
|
29
|
-
override async getTextEmbedding(text: string,axiosOption: any = {}): Promise<EmbeddingResult> {
|
|
41
|
+
override async getTextEmbedding(text: string, axiosOption: any = {}): Promise<EmbeddingResult> {
|
|
30
42
|
if (!text) return { successed: false, error: { errcode: 2, errmsg: 'content required' } };
|
|
31
43
|
if (!axiosOption.headers)
|
|
32
44
|
axiosOption.headers = { 'api-key': this.apiKey, 'Content-Type': 'application/json' };
|
|
@@ -44,7 +56,6 @@ export default class AzureAI extends OpenAIGpt {
|
|
|
44
56
|
},
|
|
45
57
|
url: this.EmbeddingUrl
|
|
46
58
|
};
|
|
47
|
-
console.log('param', param)
|
|
48
59
|
const response = await request(param)
|
|
49
60
|
if (response.data) {
|
|
50
61
|
return { successed: true, embedding: response.data.data[0].embedding };
|
|
@@ -57,40 +68,57 @@ export default class AzureAI extends OpenAIGpt {
|
|
|
57
68
|
/**
|
|
58
69
|
* 请求GPT接口
|
|
59
70
|
*/
|
|
60
|
-
public override async chatRequest(chatText: string | Array<any>, paramOption: OpenAIApiParameters, axiosOption: any = {}): Promise<ChatReponse> {
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
71
|
+
// public override async chatRequest(chatText: string | Array<any>, paramOption: OpenAIApiParameters, axiosOption: any = {}): Promise<ChatReponse> {
|
|
72
|
+
// if (!chatText) return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
|
|
73
|
+
// // if (!axiosOption.headers)
|
|
74
|
+
// // axiosOption.headers = { 'api-key': this.apiKey, 'Content-Type': 'application/json' };
|
|
75
|
+
// // else {
|
|
76
|
+
// // axiosOption.headers['api-key'] = this.apiKey;
|
|
77
|
+
// // axiosOption.headers['Content-Type'] = 'application/json';
|
|
78
|
+
// // }
|
|
79
|
+
// if (!this.azureApi) {
|
|
80
|
+
// this.azureApi = this.createAzureAI(this.apiKey);
|
|
81
|
+
// }
|
|
82
|
+
// let messages: Array<ChatCompletionRequestMessage> = typeof (chatText) == 'string' ?
|
|
83
|
+
// [{ role: 'user', content: chatText }] : chatText;
|
|
84
|
+
// try {
|
|
85
|
+
// // let param = {
|
|
86
|
+
// // ...axiosOption,
|
|
87
|
+
// // method: "post",
|
|
88
|
+
// // data: {
|
|
89
|
+
// // messages,
|
|
90
|
+
// // temperature: Number(paramOption?.temperature || this.temperature),
|
|
91
|
+
// // max_tokens: Number(paramOption?.maxtoken || this.maxtoken),
|
|
92
|
+
// // },
|
|
93
|
+
// // url: this.BaseUrl
|
|
94
|
+
// // };
|
|
95
|
+
// // // console.log('axiosOption', param)
|
|
96
|
+
// // const response = await request(param)
|
|
97
|
+
// const response: any = await this.azureApi.createChatCompletion({
|
|
98
|
+
// model: this.azureSetting.engine,
|
|
99
|
+
// messages: messages,
|
|
100
|
+
// temperature: Number(paramOption?.temperature || this.temperature),
|
|
101
|
+
// max_tokens: Number(paramOption?.maxtoken || this.maxtoken),
|
|
102
|
+
// n: Number(paramOption?.replyCounts || 1) || 1
|
|
103
|
+
// }, axiosOption);
|
|
104
|
+
// console.log('response.data', JSON.stringify(response.data))
|
|
105
|
+
// if (response.data.choices) {
|
|
106
|
+
// return { successed: true, message: response.data.choices, usage: response.data.usage };
|
|
107
|
+
// }
|
|
108
|
+
// return { successed: false, ...response.data };
|
|
109
|
+
// } catch (error) {
|
|
110
|
+
// console.log('result is error ', error)
|
|
111
|
+
// return { successed: false, error };
|
|
112
|
+
// }
|
|
113
|
+
|
|
114
|
+
// }
|
|
68
115
|
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
data: {
|
|
76
|
-
messages,
|
|
77
|
-
temperature: Number(paramOption?.temperature || this.temperature),
|
|
78
|
-
max_tokens: Number(paramOption?.maxtoken || this.maxtoken),
|
|
79
|
-
},
|
|
80
|
-
url: this.BaseUrl
|
|
81
|
-
};
|
|
82
|
-
// console.log('axiosOption', param)
|
|
83
|
-
const response =await request(param)
|
|
84
|
-
if (response.data.choices){
|
|
85
|
-
return { successed: true, message: response.data.choices };
|
|
86
|
-
}
|
|
87
|
-
return { successed: false, ...response.data };
|
|
88
|
-
} catch (error) {
|
|
89
|
-
console.log('result is error ', error)
|
|
90
|
-
return { successed: false, error };
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
}
|
|
116
|
+
/**
|
|
117
|
+
* 流式的聊天模式
|
|
118
|
+
* @param chatText
|
|
119
|
+
* @param _paramOption
|
|
120
|
+
* @param axiosOption
|
|
121
|
+
*/
|
|
94
122
|
/**
|
|
95
123
|
* 获得一种内容的相似说法
|
|
96
124
|
* 微软的AI参数中已经没有了n的参数
|
|
@@ -110,19 +138,19 @@ export default class AzureAI extends OpenAIGpt {
|
|
|
110
138
|
const messages = [
|
|
111
139
|
{ role: 'system', content: '忘记我们的聊天,你现在是一名专业的语言大师' },
|
|
112
140
|
{ role: 'user', content: text },
|
|
113
|
-
{ role: 'user', content: '最终结果按照["意思相同语句","意思相同语句"]的JSON数组的格式输出。'}//如:"今天天气真好"的相同2句内容,输出结果为:["今天晴空万里无云","今天的天气适合出游"]。' },
|
|
141
|
+
{ role: 'user', content: '最终结果按照["意思相同语句","意思相同语句"]的JSON数组的格式输出。' }//如:"今天天气真好"的相同2句内容,输出结果为:["今天晴空万里无云","今天的天气适合出游"]。' },
|
|
114
142
|
]
|
|
115
143
|
let result = await this.chatRequest(messages, {}, axiosOption);
|
|
116
144
|
if (!result.successed || !result.message) return result;
|
|
117
145
|
let value = result.message[0].message.content.trim();
|
|
118
146
|
let replyJson = this.fixedJsonString(value);
|
|
119
147
|
///能够提取到内容
|
|
120
|
-
if (replyJson.length) return { successed: true, message:replyJson }
|
|
148
|
+
if (replyJson.length) return { successed: true, message: replyJson }
|
|
121
149
|
///回答的内容非JSON格式,自己来提取算了
|
|
122
|
-
|
|
150
|
+
|
|
123
151
|
console.log('自己组装', value);
|
|
124
152
|
let sentences = value.split(/",|\n/g) ///用换行或",来割分文本内容
|
|
125
|
-
sentences = sentences.map((str: string)=>{
|
|
153
|
+
sentences = sentences.map((str: string) => {
|
|
126
154
|
return str.replace(/(\[|"|\]|\{|\})/g, '')
|
|
127
155
|
})
|
|
128
156
|
// let matched = value.match(/\d+分/g), score = 0;
|
package/src/baiduai.ts
CHANGED
|
@@ -2,6 +2,7 @@ import { EmotionResult, SimilarityResult, ChatReponse, SummaryReponse, Examinati
|
|
|
2
2
|
import GptBase from "./gptbase"
|
|
3
3
|
const TOKEN_CACHE_KEY = "key:_doomisoft:baiduwenxin:"
|
|
4
4
|
export default class BaiduWenXinAI extends GptBase {
|
|
5
|
+
|
|
5
6
|
protected credential: ApiCredential;
|
|
6
7
|
private Cacher: CacheProvider|undefined;
|
|
7
8
|
/**
|
|
@@ -81,6 +82,10 @@ export default class BaiduWenXinAI extends GptBase {
|
|
|
81
82
|
}
|
|
82
83
|
|
|
83
84
|
}
|
|
85
|
+
|
|
86
|
+
chatRequestInStream(_chatText: string | any[], _paramOption: any, _axiosOption: any): void {
|
|
87
|
+
throw new Error("Method not implemented.");
|
|
88
|
+
}
|
|
84
89
|
commentQuestionAnswer(_question: string, _answer: string, _axiosOption: any): Promise<CommentResult>{
|
|
85
90
|
throw new Error("Method not implemented.");
|
|
86
91
|
}
|
package/src/declare.ts
CHANGED
|
@@ -24,6 +24,7 @@ export interface ChatReponse extends ApiResult {
|
|
|
24
24
|
* @memberof ChatReponse
|
|
25
25
|
*/
|
|
26
26
|
'message'?: Array<any>;
|
|
27
|
+
'usage'?:any;
|
|
27
28
|
}
|
|
28
29
|
export interface OutlineSummaryItem {
|
|
29
30
|
|
|
@@ -51,6 +52,7 @@ export interface SummaryReponse extends ApiResult {
|
|
|
51
52
|
* 调用OpenAI Api的参数约定
|
|
52
53
|
*/
|
|
53
54
|
export interface OpenAIApiParameters {
|
|
55
|
+
'embedding'?:string, ///模型引擎,兼容Azure
|
|
54
56
|
'model'?: string, ///模型名称
|
|
55
57
|
'maxtoken'?: number; ///返回的最大token
|
|
56
58
|
'temperature'?: number;
|
package/src/gptbase.ts
CHANGED
|
@@ -20,6 +20,13 @@ export default abstract class GptBase extends EventEmitter {
|
|
|
20
20
|
* @param axiosOption
|
|
21
21
|
*/
|
|
22
22
|
abstract chatRequest(chatText: string | Array<any>, _paramOption: any, axiosOption: any): Promise<ApiResult>;
|
|
23
|
+
/**
|
|
24
|
+
* 流式的聊天模式
|
|
25
|
+
* @param chatText
|
|
26
|
+
* @param _paramOption
|
|
27
|
+
* @param axiosOption
|
|
28
|
+
*/
|
|
29
|
+
abstract chatRequestInStream(chatText: string | Array<any>, _paramOption: any, axiosOption: any):void;
|
|
23
30
|
/**
|
|
24
31
|
* 点评问题回答的评价
|
|
25
32
|
* @param question 问题题干
|
package/src/gptprovider.ts
CHANGED
|
@@ -24,12 +24,12 @@ export type GptProviderEnum = typeof GptProviderEnum[keyof typeof GptProviderEnu
|
|
|
24
24
|
* @returns
|
|
25
25
|
*/
|
|
26
26
|
export function createGpt(provider: GptProviderEnum, apikey: string|ApiCredential, setting: any): GptBase | null {
|
|
27
|
-
let { model, maxtoken, temperature,endpoint,engine,version } = setting || {};
|
|
27
|
+
let { model, maxtoken, temperature, endpoint, engine, version, embedding } = setting || {};
|
|
28
28
|
switch (provider) {
|
|
29
29
|
case GptProviderEnum.OPENAI:
|
|
30
|
-
return new OpenAIGpt(apikey+'', { model, maxtoken, temperature });
|
|
30
|
+
return new OpenAIGpt(apikey + '', { model, maxtoken, temperature, embedding });
|
|
31
31
|
case GptProviderEnum.MICROSOFT:
|
|
32
|
-
return new AzureAI(apikey+'', { endpoint, engine, version }, { model, maxtoken, temperature }, );
|
|
32
|
+
return new AzureAI(apikey + '', { endpoint, engine, version }, { model, maxtoken, temperature, embedding }, );
|
|
33
33
|
case GptProviderEnum.BAIDU:
|
|
34
34
|
let cred: ApiCredential = typeof (apikey) === 'string' ? { apikey, securitykey: apikey } : apikey
|
|
35
35
|
return new BaiduWenXinAI(cred);
|
package/src/openai.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import { Configuration, OpenAIApi, ChatCompletionRequestMessage } from "openai"
|
|
1
|
+
import { Configuration, OpenAIApi, ChatCompletionRequestMessage } from "azure-openai"
|
|
2
2
|
// import { EventEmitter } from "events";
|
|
3
3
|
import GptBase from "./gptbase"
|
|
4
|
-
import { OpenAIApiParameters, ChatReponse, OutlineSummaryItem, SummaryReponse, FaqItem, ExaminationPaperResult, EmotionResult, SimilarityResult, QuestionItem, CommentResult } from './declare'
|
|
4
|
+
import { OpenAIApiParameters, ChatReponse, OutlineSummaryItem, SummaryReponse, FaqItem, ExaminationPaperResult, EmotionResult, SimilarityResult, QuestionItem, CommentResult, EmbeddingResult } from './declare'
|
|
5
5
|
const SECTION_LENGTH = 1600; ///每2400个字符分成一组
|
|
6
6
|
const MESSAGE_LENGTH = 1; ///每次送8句话给openai 进行解析,送多了,会报错
|
|
7
7
|
//请将答案放在最后,标记为答案:()
|
|
@@ -16,10 +16,11 @@ const QUESTION_TYPE: string[] = ['singlechoice', 'multiplechoice', 'trueorfalse'
|
|
|
16
16
|
|
|
17
17
|
export default class OpenAIGpt extends GptBase {
|
|
18
18
|
protected readonly apiKey: string;
|
|
19
|
-
|
|
19
|
+
protected aiApi: OpenAIApi | undefined;
|
|
20
20
|
protected readonly chatModel: string;
|
|
21
21
|
protected readonly maxtoken: number;
|
|
22
22
|
protected readonly temperature: number;
|
|
23
|
+
protected readonly embeddingmodel:string;
|
|
23
24
|
/**
|
|
24
25
|
*
|
|
25
26
|
* @param apiKey 调用OpenAI 的key
|
|
@@ -27,10 +28,12 @@ export default class OpenAIGpt extends GptBase {
|
|
|
27
28
|
*/
|
|
28
29
|
constructor(apiKey: string, apiOption: OpenAIApiParameters = {}) {
|
|
29
30
|
super();
|
|
31
|
+
|
|
30
32
|
this.apiKey = apiKey;
|
|
31
33
|
this.chatModel = apiOption.model || 'gpt-3.5-turbo';
|
|
32
34
|
this.maxtoken = apiOption.maxtoken || 2048;
|
|
33
35
|
this.temperature = apiOption.temperature || 0.9;
|
|
36
|
+
this.embeddingmodel = apiOption.embedding || 'text-embedding-ada-002';
|
|
34
37
|
}
|
|
35
38
|
/**
|
|
36
39
|
* 初始化OpenAI 的聊天对象Api
|
|
@@ -43,8 +46,21 @@ export default class OpenAIGpt extends GptBase {
|
|
|
43
46
|
* 获得文字的向量
|
|
44
47
|
* @param text
|
|
45
48
|
*/
|
|
46
|
-
async getTextEmbedding(
|
|
47
|
-
return
|
|
49
|
+
async getTextEmbedding(text: string, axiosOption: any): Promise<EmbeddingResult>{
|
|
50
|
+
if (!text) return { successed: false, error: { errcode: 2, errmsg: 'content required' } };
|
|
51
|
+
if (!this.aiApi) {
|
|
52
|
+
this.aiApi = this.createOpenAI(this.apiKey);
|
|
53
|
+
}
|
|
54
|
+
try {
|
|
55
|
+
const response:any = await this.aiApi.createEmbedding({
|
|
56
|
+
model: this.embeddingmodel,
|
|
57
|
+
input: text,
|
|
58
|
+
}, axiosOption);
|
|
59
|
+
return { successed: true, embedding: response.data.data[0].embedding };
|
|
60
|
+
} catch (error) {
|
|
61
|
+
// console.log('result is error ', error)
|
|
62
|
+
return { successed: false, error };
|
|
63
|
+
}
|
|
48
64
|
}
|
|
49
65
|
/**
|
|
50
66
|
* 向OpenAI发送一个聊天请求
|
|
@@ -61,20 +77,67 @@ export default class OpenAIGpt extends GptBase {
|
|
|
61
77
|
[{ role: 'user', content: chatText }] : chatText;
|
|
62
78
|
// console.log('message', message)
|
|
63
79
|
try {
|
|
64
|
-
const response = await this.aiApi.createChatCompletion({
|
|
80
|
+
const response:any = await this.aiApi.createChatCompletion({
|
|
65
81
|
model: callChatOption?.model || this.chatModel,
|
|
66
82
|
messages: message,
|
|
67
83
|
temperature: Number(callChatOption?.temperature || this.temperature),
|
|
68
84
|
max_tokens: Number(callChatOption?.maxtoken || this.maxtoken),
|
|
69
85
|
n: Number(callChatOption?.replyCounts || 1) || 1
|
|
70
86
|
}, axiosOption);
|
|
71
|
-
return { successed: true, message: response.data.choices };
|
|
87
|
+
return { successed: true, message: response.data.choices, usage: response.data.usage };
|
|
72
88
|
} catch (error) {
|
|
73
89
|
console.log('result is error ', error)
|
|
74
90
|
return { successed: false, error };
|
|
75
91
|
}
|
|
76
92
|
|
|
77
93
|
}
|
|
94
|
+
/**
|
|
95
|
+
* 流式的聊天模式
|
|
96
|
+
* @param chatText
|
|
97
|
+
* @param _paramOption
|
|
98
|
+
* @param axiosOption
|
|
99
|
+
*/
|
|
100
|
+
async chatRequestInStream(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, axiosOption: any):Promise<void>{
|
|
101
|
+
if (!chatText) this.emit('chaterror', { successed: false, error:'no text in chat'});
|
|
102
|
+
if (!this.aiApi) {
|
|
103
|
+
this.aiApi = this.createOpenAI(this.apiKey);
|
|
104
|
+
}
|
|
105
|
+
let message: Array<ChatCompletionRequestMessage> = typeof (chatText) == 'string' ?
|
|
106
|
+
[{ role: 'user', content: chatText }] : chatText;
|
|
107
|
+
axiosOption = Object.assign({}, axiosOption || { timeout: 60000 }, { responseType: 'stream' })
|
|
108
|
+
try {
|
|
109
|
+
let finishreason:any = null,usage:any = null;
|
|
110
|
+
const response: any = await this.aiApi.createChatCompletion({
|
|
111
|
+
model: callChatOption?.model || this.chatModel,
|
|
112
|
+
messages: message,
|
|
113
|
+
stream:true,
|
|
114
|
+
temperature: Number(callChatOption?.temperature || this.temperature),
|
|
115
|
+
max_tokens: Number(callChatOption?.maxtoken || this.maxtoken)
|
|
116
|
+
}, axiosOption);
|
|
117
|
+
response.data.on('data', (data:any) => {
|
|
118
|
+
const lines = data.toString().split('\n').filter((line:string) => line.trim() !== '');
|
|
119
|
+
for (const line of lines) {
|
|
120
|
+
const message = line.replace(/^data: /, '');
|
|
121
|
+
if (message === '[DONE]') {
|
|
122
|
+
this.emit('chatdone', { successed: true, finish_reason: finishreason, usage })
|
|
123
|
+
return; // Stream finished
|
|
124
|
+
}
|
|
125
|
+
try {
|
|
126
|
+
///{ delta: { content: '$\\' }, index: 0, finish_reason: null }
|
|
127
|
+
///发送出去
|
|
128
|
+
const parsed = JSON.parse(message);
|
|
129
|
+
finishreason = parsed.choices[0].finish_reason;
|
|
130
|
+
usage = parsed.usage;
|
|
131
|
+
this.emit('chattext', { successed: true, text: parsed.choices[0].delta.content, finish_reason: parsed.choices[0].finish_reason, index: parsed.choices[0].index, usage})
|
|
132
|
+
} catch (error) {
|
|
133
|
+
this.emit('chaterror', { successed: false, error: 'JSON parse stream message', message });
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
});
|
|
137
|
+
} catch (error) {
|
|
138
|
+
this.emit('error', { successed: false, error: 'call axios faied ' + error });
|
|
139
|
+
}
|
|
140
|
+
}
|
|
78
141
|
|
|
79
142
|
/**
|
|
80
143
|
* 点评问题回答的评价
|