doomiaichat 3.0.2 → 3.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/gptbase.d.ts CHANGED
@@ -24,7 +24,7 @@ export default abstract class GptBase extends EventEmitter {
24
24
  * @param _paramOption
25
25
  * @param axiosOption
26
26
  */
27
- abstract chatRequestInStream(chatText: string | Array<any>, _paramOption: any, axiosOption: any): void;
27
+ abstract chatRequestInStream(chatText: string | Array<any>, _paramOption: any, axiosOption: any): any;
28
28
  /**
29
29
  * 点评问题回答的评价
30
30
  * @param question 问题题干
package/dist/openai.d.ts CHANGED
@@ -34,7 +34,7 @@ export default class OpenAIGpt extends GptBase {
34
34
  * @param _paramOption
35
35
  * @param axiosOption
36
36
  */
37
- chatRequestInStream(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, axiosOption: any): Promise<void>;
37
+ chatRequestInStream(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, axiosOption: any): Promise<any>;
38
38
  /**
39
39
  * 点评问题回答的评价
40
40
  * @param question
package/dist/openai.js CHANGED
@@ -116,8 +116,10 @@ class OpenAIGpt extends gptbase_1.default {
116
116
  let message = typeof (chatText) == 'string' ?
117
117
  [{ role: 'user', content: chatText }] : chatText;
118
118
  axiosOption = Object.assign({}, axiosOption || { timeout: 60000 }, { responseType: 'stream' });
119
+ let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
119
120
  try {
120
121
  let finishreason = null, usage = null;
122
+ ///便于知道返回的requestid
121
123
  const response = yield this.aiApi.createChatCompletion({
122
124
  model: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.model) || this.chatModel,
123
125
  messages: message,
@@ -125,12 +127,13 @@ class OpenAIGpt extends gptbase_1.default {
125
127
  temperature: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.temperature),
126
128
  max_tokens: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.maxtoken) || this.maxtoken)
127
129
  }, axiosOption);
130
+ let replytext = [];
128
131
  response.data.on('data', (data) => {
129
132
  const lines = data.toString().split('\n').filter((line) => line.trim() !== '');
130
133
  for (const line of lines) {
131
134
  const message = line.replace(/^data: /, '');
132
135
  if (message === '[DONE]') {
133
- this.emit('chatdone', { successed: true, finish_reason: finishreason, usage });
136
+ this.emit('chatdone', { successed: true, requestid, text: replytext.join(''), finish_reason: finishreason, usage });
134
137
  return; // Stream finished
135
138
  }
136
139
  try {
@@ -139,16 +142,20 @@ class OpenAIGpt extends gptbase_1.default {
139
142
  const parsed = JSON.parse(message);
140
143
  finishreason = parsed.choices[0].finish_reason;
141
144
  usage = parsed.usage;
142
- this.emit('chattext', { successed: true, text: parsed.choices[0].delta.content, finish_reason: parsed.choices[0].finish_reason, index: parsed.choices[0].index, usage });
145
+ let streamtext = parsed.choices[0].delta.content;
146
+ replytext.push(streamtext);
147
+ this.emit('chattext', { successed: true, requestid, text: streamtext, finish_reason: parsed.choices[0].finish_reason, index: parsed.choices[0].index, usage });
143
148
  }
144
149
  catch (error) {
145
- this.emit('chaterror', { successed: false, error: 'JSON parse stream message', message });
150
+ this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message', message });
146
151
  }
147
152
  }
148
153
  });
154
+ return { successed: true, requestid };
149
155
  }
150
156
  catch (error) {
151
- this.emit('error', { successed: false, error: 'call axios faied ' + error });
157
+ // this.emit('error', { successed: false, requestid, error: 'call axios faied ' + error });
158
+ return { successed: false, requestid };
152
159
  }
153
160
  });
154
161
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "doomiaichat",
3
- "version": "3.0.2",
3
+ "version": "3.2.0",
4
4
  "description": "Doomisoft OpenAI",
5
5
  "main": "dist/index.js",
6
6
  "scripts": {
package/src/gptbase.ts CHANGED
@@ -26,7 +26,7 @@ export default abstract class GptBase extends EventEmitter {
26
26
  * @param _paramOption
27
27
  * @param axiosOption
28
28
  */
29
- abstract chatRequestInStream(chatText: string | Array<any>, _paramOption: any, axiosOption: any):void;
29
+ abstract chatRequestInStream(chatText: string | Array<any>, _paramOption: any, axiosOption: any):any;
30
30
  /**
31
31
  * 点评问题回答的评价
32
32
  * @param question 问题题干
package/src/openai.ts CHANGED
@@ -97,7 +97,7 @@ export default class OpenAIGpt extends GptBase {
97
97
  * @param _paramOption
98
98
  * @param axiosOption
99
99
  */
100
- async chatRequestInStream(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, axiosOption: any):Promise<void>{
100
+ async chatRequestInStream(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, axiosOption: any):Promise<any>{
101
101
  if (!chatText) this.emit('chaterror', { successed: false, error:'no text in chat'});
102
102
  if (!this.aiApi) {
103
103
  this.aiApi = this.createOpenAI(this.apiKey);
@@ -105,8 +105,11 @@ export default class OpenAIGpt extends GptBase {
105
105
  let message: Array<ChatCompletionRequestMessage> = typeof (chatText) == 'string' ?
106
106
  [{ role: 'user', content: chatText }] : chatText;
107
107
  axiosOption = Object.assign({}, axiosOption || { timeout: 60000 }, { responseType: 'stream' })
108
+ let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
108
109
  try {
109
110
  let finishreason:any = null,usage:any = null;
111
+ ///便于知道返回的requestid
112
+
110
113
  const response: any = await this.aiApi.createChatCompletion({
111
114
  model: callChatOption?.model || this.chatModel,
112
115
  messages: message,
@@ -114,12 +117,13 @@ export default class OpenAIGpt extends GptBase {
114
117
  temperature: Number(callChatOption?.temperature || this.temperature),
115
118
  max_tokens: Number(callChatOption?.maxtoken || this.maxtoken)
116
119
  }, axiosOption);
120
+ let replytext:string[] = [];
117
121
  response.data.on('data', (data:any) => {
118
122
  const lines = data.toString().split('\n').filter((line:string) => line.trim() !== '');
119
123
  for (const line of lines) {
120
124
  const message = line.replace(/^data: /, '');
121
125
  if (message === '[DONE]') {
122
- this.emit('chatdone', { successed: true, finish_reason: finishreason, usage })
126
+ this.emit('chatdone', { successed: true, requestid,text: replytext.join(''), finish_reason: finishreason, usage })
123
127
  return; // Stream finished
124
128
  }
125
129
  try {
@@ -128,14 +132,18 @@ export default class OpenAIGpt extends GptBase {
128
132
  const parsed = JSON.parse(message);
129
133
  finishreason = parsed.choices[0].finish_reason;
130
134
  usage = parsed.usage;
131
- this.emit('chattext', { successed: true, text: parsed.choices[0].delta.content, finish_reason: parsed.choices[0].finish_reason, index: parsed.choices[0].index, usage})
135
+ let streamtext = parsed.choices[0].delta.content;
136
+ replytext.push(streamtext)
137
+ this.emit('chattext', { successed: true, requestid, text: streamtext, finish_reason: parsed.choices[0].finish_reason, index: parsed.choices[0].index, usage})
132
138
  } catch (error) {
133
- this.emit('chaterror', { successed: false, error: 'JSON parse stream message', message });
139
+ this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message', message });
134
140
  }
135
141
  }
136
142
  });
143
+ return { successed: true, requestid }
137
144
  } catch (error) {
138
- this.emit('error', { successed: false, error: 'call axios faied ' + error });
145
+ // this.emit('error', { successed: false, requestid, error: 'call axios faied ' + error });
146
+ return { successed: false, requestid }
139
147
  }
140
148
  }
141
149