doomiaichat 6.0.4 → 6.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/azureai.js CHANGED
@@ -93,16 +93,19 @@ class AzureAI extends openaibase_1.default {
93
93
  presencePenalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.presence_penalty),
94
94
  frequencyPenalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.frequency_penalty),
95
95
  n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1,
96
- tools: callChatOption.tools,
97
- toolChoice: (callChatOption.tool_choice || 'none'),
96
+ tools: ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 && (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.tools)) ? callChatOption.tools : undefined,
97
+ toolChoice: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 ? 'auto' : undefined
98
98
  });
99
99
  const { promptTokens: prompt_tokens, completionTokens: completion_tokens, totalTokens: total_tokens } = response.usage;
100
100
  let rebuildChoice = [];
101
101
  for (const msg of response.choices) {
102
- ///, contentFilterResults: content_filter
103
102
  const { index, finishReason: finish_reason, message } = msg;
104
103
  rebuildChoice.push({ index, finish_reason, message });
105
104
  }
105
+ // if (response.data.choices[0].finish_reason === 'content_filter') {
106
+ // console.log('content_filter');
107
+ // return { successed: false, error: 'content_filter' };
108
+ // }
106
109
  return { successed: true, message: rebuildChoice, usage: { prompt_tokens, completion_tokens, total_tokens } };
107
110
  }
108
111
  catch (error) {
@@ -157,7 +160,7 @@ class AzureAI extends openaibase_1.default {
157
160
  currentIndex = toolCalls[0].index;
158
161
  has_tool_calls = 1;
159
162
  // 检查index是否发生变化
160
- console.log('currentIndex,previous_index', currentIndex, previous_index);
163
+ //console.log('currentIndex,previous_index', currentIndex, previous_index)
161
164
  if (currentIndex !== previous_index) {
162
165
  tool_calls.push({
163
166
  id: toolCalls[0].id,
package/dist/openai.js CHANGED
@@ -81,10 +81,11 @@ class OpenAIGpt extends openaibase_1.default {
81
81
  presence_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.presence_penalty),
82
82
  frequency_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.frequency_penalty),
83
83
  n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1,
84
- tools: ((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 && (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.tools)) ? callChatOption.tools : undefined,
85
- tool_choice: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.enableToolCall) === 1 ? 'auto' : undefined,
84
+ // tools: (callChatOption?.enableToolCall === 1 && callChatOption?.tools) ? callChatOption.tools : undefined,
85
+ // tool_choice: callChatOption?.enableToolCall === 1 ? 'auto' : undefined,
86
86
  }, axiosOption);
87
- return { successed: true, message: response.data.choices, usage: response.data.usage };
87
+ // console.log('response.data', response)
88
+ return { successed: true, message: response.choices, usage: response.usage };
88
89
  }
89
90
  catch (error) {
90
91
  console.log('result is error ', error);
@@ -106,17 +107,10 @@ class OpenAIGpt extends openaibase_1.default {
106
107
  if (!this.aiApi) {
107
108
  this.aiApi = this.createOpenAI(this.apiKey);
108
109
  }
109
- // const DATA_END_TAG = `"usage":null}`
110
- let message = typeof (chatText) == 'string' ?
111
- [{ role: 'user', content: chatText }] : chatText;
112
- //axiosOption = Object.assign({}, axiosOption || { timeout: 60000 }, { responseType: 'stream' })
110
+ let message = typeof (chatText) == 'string' ? [{ role: 'user', content: chatText }] : chatText;
113
111
  axiosOption = Object.assign({}, axiosOption || { timeout: 60000 });
114
112
  let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
115
113
  try {
116
- // let finishreason: any = null, usage: any = null,errtxt = '';
117
- ///便于知道返回的requestid
118
- // console.log('model', callChatOption?.model,this.chatModel,)
119
- //const response: any = await this.aiApi.chat.completions.create({
120
114
  const response = yield this.aiApi.chat.completions.create({
121
115
  model: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.model) || this.chatModel,
122
116
  messages: message,
@@ -131,15 +125,39 @@ class OpenAIGpt extends openaibase_1.default {
131
125
  stream: true
132
126
  }, axiosOption);
133
127
  let replytext = [];
128
+ let has_tool_calls = 0, currentIndex, previous_index = -1, tool_calls = []; // 使用数组来存储工具调用
134
129
  try {
135
130
  for (var _d = true, response_1 = __asyncValues(response), response_1_1; response_1_1 = yield response_1.next(), _a = response_1_1.done, !_a;) {
136
131
  _c = response_1_1.value;
137
132
  _d = false;
138
133
  try {
139
134
  const chunk = _c;
140
- const [choice] = chunk.choices, { finish_reason: finishreason, index, usage } = choice, { content } = choice.delta;
141
- replytext.push(content);
142
- let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage };
135
+ const [choice] = chunk.choices, { finish_reason: finishreason, index, usage } = choice, { content, tool_calls: toolCalls } = choice.delta;
136
+ if (toolCalls && toolCalls.length) {
137
+ currentIndex = toolCalls[0].index;
138
+ has_tool_calls = 1;
139
+ // 检查index是否发生变化
140
+ //console.log('currentIndex,previous_index', currentIndex, previous_index)
141
+ if (currentIndex !== previous_index) {
142
+ tool_calls.push({
143
+ id: toolCalls[0].id,
144
+ type: 'function',
145
+ function: {
146
+ name: toolCalls[0].function.name,
147
+ arguments: toolCalls[0].function.arguments
148
+ }
149
+ });
150
+ // 更新previousIndex以供下次比较
151
+ previous_index = currentIndex;
152
+ }
153
+ else {
154
+ tool_calls[previous_index].function.arguments += toolCalls[0].function.arguments;
155
+ }
156
+ }
157
+ else {
158
+ replytext.push(content);
159
+ }
160
+ let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage, has_tool_calls: has_tool_calls, tool_calls: tool_calls };
143
161
  if (attach)
144
162
  output = Object.assign({}, output, attach);
145
163
  this.emit(finishreason ? 'chatdone' : 'chattext', output);
@@ -156,40 +174,6 @@ class OpenAIGpt extends openaibase_1.default {
156
174
  }
157
175
  finally { if (e_1) throw e_1.error; }
158
176
  }
159
- // response.data.on('data', (data: any) => {
160
- // const lines = data.toString().split('\n').filter((line: string) => line.trim() !== '');
161
- // ///已经返回了结束原因
162
- // if (finishreason) return;
163
- // let alltext = (errtxt +lines.join('')).split('data:');
164
- // errtxt = '';
165
- // for (const line of alltext) {
166
- // let txt = line.trim();
167
- // if (!txt) continue;
168
- // if (txt === '[DONE]') {
169
- // let output = { successed: true, requestid, text: replytext.join(''), finish_reason: 'stop', usage };
170
- // if (attach) output = Object.assign({}, output, attach);
171
- // this.emit('chatdone', output)
172
- // return; // Stream finished
173
- // }
174
- // try {
175
- // ///{ delta: { content: '$\\' }, index: 0, finish_reason: null }
176
- // ///发送出去
177
- // const parsed = JSON.parse(txt);
178
- // ///已经返回一个正确的了,可以重置这个变量了
179
- // finishreason = parsed.choices[0].finish_reason;
180
- // usage = parsed.usage;
181
- // let streamtext = parsed.choices[0].delta.content;
182
- // replytext.push(streamtext);
183
- // let output = { successed: true, requestid, segment: streamtext, text: replytext.join(''), finish_reason: finishreason, index: parsed.choices[0].index, usage };
184
- // if (attach) output = Object.assign({}, output, attach);
185
- // this.emit(finishreason ? 'chatdone' : 'chattext', output)
186
- // if (finishreason) return;
187
- // } catch (error) {
188
- // errtxt+=txt; ///这一段json没有结束,作为下一次的流过来时使用
189
- // this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message', errtxt });
190
- // }
191
- // }
192
- // });
193
177
  return { successed: true, requestid };
194
178
  }
195
179
  catch (error) {
@@ -65,7 +65,7 @@ class OpenAIProxy extends openai_1.default {
65
65
  },
66
66
  responseType: 'stream',
67
67
  };
68
- // let lastResponse:any = null;
68
+ let unCompleteSegment = '';
69
69
  (0, axios_1.default)(opts)
70
70
  .then(res => {
71
71
  res.data.on('data', (chunk) => {
@@ -75,17 +75,24 @@ class OpenAIProxy extends openai_1.default {
75
75
  if (ERROR_RESPONSE.includes(streamText)) {
76
76
  return this.emit('requesterror', { successed: false, requestid, error: 'Request Remote OpenAI Error : ' + streamText });
77
77
  }
78
- const fullData = streamText.split('*&$');
78
+ const fullData = (unCompleteSegment + streamText).split('*&$');
79
+ unCompleteSegment = '';
79
80
  // console.log('fullData', fullData.length);
80
81
  for (const segment of fullData) {
81
82
  if (!segment)
82
83
  continue;
83
84
  try {
85
+ ////判断接收到的不是一个完整的JSON段了,则该段作为下一次的数据段
86
+ if (!segment.endsWith('}')) {
87
+ unCompleteSegment = segment;
88
+ break;
89
+ }
84
90
  const objData = Object.assign(JSON.parse(segment), attach);
85
91
  this.emit(objData.finish_reason ? 'chatdone' : 'chattext', objData);
86
92
  }
87
93
  catch (errParse) {
88
- this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message' + errParse });
94
+ break;
95
+ //this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message' + errParse });
89
96
  }
90
97
  }
91
98
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "doomiaichat",
3
- "version": "6.0.4",
3
+ "version": "6.0.6",
4
4
  "description": "Doomisoft OpenAI",
5
5
  "main": "dist/index.js",
6
6
  "scripts": {
package/src/azureai.ts CHANGED
@@ -3,7 +3,7 @@
3
3
  */
4
4
  import OpenAIBase from "./openaibase"
5
5
  import { AzureOpenAIPatameters, ChatReponse, EmbeddingResult, OpenAIApiParameters, request } from "./declare";
6
- import { OpenAIClient, AzureKeyCredential, ChatCompletionsNamedToolSelection } from "@azure/openai";
6
+ import { OpenAIClient, AzureKeyCredential } from "@azure/openai";
7
7
  export default class AzureAI extends OpenAIBase<OpenAIClient> {
8
8
 
9
9
  protected readonly azureSetting: AzureOpenAIPatameters;
@@ -80,16 +80,19 @@ export default class AzureAI extends OpenAIBase<OpenAIClient> {
80
80
  presencePenalty: Number(callChatOption?.presence_penalty || this.presence_penalty),
81
81
  frequencyPenalty: Number(callChatOption?.frequency_penalty || this.frequency_penalty),
82
82
  n: Number(callChatOption?.replyCounts || 1) || 1,
83
- tools: callChatOption.tools,
84
- toolChoice: (callChatOption.tool_choice || 'none') as ChatCompletionsNamedToolSelection,
83
+ tools: (callChatOption?.enableToolCall === 1 && callChatOption?.tools) ? callChatOption.tools : undefined,
84
+ toolChoice: callChatOption?.enableToolCall === 1 ? 'auto' : undefined
85
85
  });
86
86
  const { promptTokens: prompt_tokens, completionTokens: completion_tokens, totalTokens: total_tokens } = response.usage
87
87
  let rebuildChoice = [];
88
88
  for (const msg of response.choices) {
89
- ///, contentFilterResults: content_filter
90
89
  const { index, finishReason: finish_reason, message } = msg
91
90
  rebuildChoice.push({ index, finish_reason, message })
92
91
  }
92
+ // if (response.data.choices[0].finish_reason === 'content_filter') {
93
+ // console.log('content_filter');
94
+ // return { successed: false, error: 'content_filter' };
95
+ // }
93
96
  return { successed: true, message: rebuildChoice, usage: { prompt_tokens, completion_tokens, total_tokens } };
94
97
 
95
98
  } catch (error) {
@@ -139,7 +142,7 @@ export default class AzureAI extends OpenAIBase<OpenAIClient> {
139
142
  currentIndex = toolCalls[0].index;
140
143
  has_tool_calls = 1;
141
144
  // 检查index是否发生变化
142
- console.log('currentIndex,previous_index', currentIndex, previous_index)
145
+ //console.log('currentIndex,previous_index', currentIndex, previous_index)
143
146
  if (currentIndex !== previous_index) {
144
147
  tool_calls.push({
145
148
  id: toolCalls[0].id,
package/src/openai.ts CHANGED
@@ -57,10 +57,11 @@ export default class OpenAIGpt extends OpenAIBase<OpenAI> {
57
57
  presence_penalty: Number(callChatOption?.presence_penalty || this.presence_penalty),
58
58
  frequency_penalty: Number(callChatOption?.frequency_penalty || this.frequency_penalty),
59
59
  n: Number(callChatOption?.replyCounts || 1) || 1,
60
- tools: (callChatOption?.enableToolCall === 1 && callChatOption?.tools) ? callChatOption.tools : undefined,
61
- tool_choice: callChatOption?.enableToolCall === 1 ? 'auto' : undefined,
60
+ // tools: (callChatOption?.enableToolCall === 1 && callChatOption?.tools) ? callChatOption.tools : undefined,
61
+ // tool_choice: callChatOption?.enableToolCall === 1 ? 'auto' : undefined,
62
62
  }, axiosOption);
63
- return { successed: true, message: response.data.choices, usage: response.data.usage };
63
+ // console.log('response.data', response)
64
+ return { successed: true, message: response.choices, usage: response.usage };
64
65
  } catch (error) {
65
66
  console.log('result is error ', error)
66
67
  return { successed: false, error };
@@ -78,17 +79,10 @@ export default class OpenAIGpt extends OpenAIBase<OpenAI> {
78
79
  if (!this.aiApi) {
79
80
  this.aiApi = this.createOpenAI(this.apiKey);
80
81
  }
81
- // const DATA_END_TAG = `"usage":null}`
82
- let message: Array<any> = typeof (chatText) == 'string' ?
83
- [{ role: 'user', content: chatText }] : chatText;
84
- //axiosOption = Object.assign({}, axiosOption || { timeout: 60000 }, { responseType: 'stream' })
82
+ let message: Array<any> = typeof (chatText) == 'string' ? [{ role: 'user', content: chatText }] : chatText;
85
83
  axiosOption = Object.assign({}, axiosOption || { timeout: 60000 })
86
84
  let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
87
85
  try {
88
- // let finishreason: any = null, usage: any = null,errtxt = '';
89
- ///便于知道返回的requestid
90
- // console.log('model', callChatOption?.model,this.chatModel,)
91
- //const response: any = await this.aiApi.chat.completions.create({
92
86
  const response: any = await this.aiApi.chat.completions.create(
93
87
  {
94
88
  model: callChatOption?.model || this.chatModel,
@@ -104,49 +98,37 @@ export default class OpenAIGpt extends OpenAIBase<OpenAI> {
104
98
  stream:true
105
99
  }, axiosOption);
106
100
  let replytext: string[] = [];
101
+ let has_tool_calls = 0, currentIndex, previous_index = -1, tool_calls: any[] = [];// 使用数组来存储工具调用
107
102
  for await (const chunk of response) {
108
103
  const [choice] = chunk.choices,
109
104
  { finish_reason:finishreason, index, usage } = choice,
110
- { content} = choice.delta;
111
- replytext.push(content);
112
- let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage };
105
+ { content, tool_calls:toolCalls } = choice.delta;
106
+ if (toolCalls && toolCalls.length) {
107
+ currentIndex = toolCalls[0].index;
108
+ has_tool_calls = 1;
109
+ // 检查index是否发生变化
110
+ //console.log('currentIndex,previous_index', currentIndex, previous_index)
111
+ if (currentIndex !== previous_index) {
112
+ tool_calls.push({
113
+ id: toolCalls[0].id,
114
+ type: 'function',
115
+ function: {
116
+ name: toolCalls[0].function.name,
117
+ arguments: toolCalls[0].function.arguments
118
+ }
119
+ });
120
+ // 更新previousIndex以供下次比较
121
+ previous_index = currentIndex;
122
+ } else {
123
+ tool_calls[previous_index].function.arguments += toolCalls[0].function.arguments
124
+ }
125
+ } else {
126
+ replytext.push(content);
127
+ }
128
+ let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index, usage, has_tool_calls: has_tool_calls, tool_calls: tool_calls };
113
129
  if (attach) output = Object.assign({}, output, attach);
114
130
  this.emit(finishreason ? 'chatdone' : 'chattext', output)
115
131
  }
116
- // response.data.on('data', (data: any) => {
117
- // const lines = data.toString().split('\n').filter((line: string) => line.trim() !== '');
118
- // ///已经返回了结束原因
119
- // if (finishreason) return;
120
- // let alltext = (errtxt +lines.join('')).split('data:');
121
- // errtxt = '';
122
- // for (const line of alltext) {
123
- // let txt = line.trim();
124
- // if (!txt) continue;
125
- // if (txt === '[DONE]') {
126
- // let output = { successed: true, requestid, text: replytext.join(''), finish_reason: 'stop', usage };
127
- // if (attach) output = Object.assign({}, output, attach);
128
- // this.emit('chatdone', output)
129
- // return; // Stream finished
130
- // }
131
- // try {
132
- // ///{ delta: { content: '$\\' }, index: 0, finish_reason: null }
133
- // ///发送出去
134
- // const parsed = JSON.parse(txt);
135
- // ///已经返回一个正确的了,可以重置这个变量了
136
- // finishreason = parsed.choices[0].finish_reason;
137
- // usage = parsed.usage;
138
- // let streamtext = parsed.choices[0].delta.content;
139
- // replytext.push(streamtext);
140
- // let output = { successed: true, requestid, segment: streamtext, text: replytext.join(''), finish_reason: finishreason, index: parsed.choices[0].index, usage };
141
- // if (attach) output = Object.assign({}, output, attach);
142
- // this.emit(finishreason ? 'chatdone' : 'chattext', output)
143
- // if (finishreason) return;
144
- // } catch (error) {
145
- // errtxt+=txt; ///这一段json没有结束,作为下一次的流过来时使用
146
- // this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message', errtxt });
147
- // }
148
- // }
149
- // });
150
132
  return { successed: true, requestid }
151
133
  } catch (error) {
152
134
  this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' + error });
@@ -56,7 +56,7 @@ export default class OpenAIProxy extends OpenAIGpt {
56
56
  },
57
57
  responseType: 'stream',
58
58
  }
59
- // let lastResponse:any = null;
59
+ let unCompleteSegment:string = '';
60
60
  axios(opts)
61
61
  .then(res => {
62
62
  res.data.on('data', (chunk:any) => {
@@ -66,15 +66,23 @@ export default class OpenAIProxy extends OpenAIGpt {
66
66
  if (ERROR_RESPONSE.includes(streamText)) {
67
67
  return this.emit('requesterror', { successed: false, requestid, error: 'Request Remote OpenAI Error : ' + streamText });
68
68
  }
69
- const fullData = streamText.split('*&$')
69
+ const fullData = (unCompleteSegment +streamText).split('*&$')
70
+ unCompleteSegment = '';
70
71
  // console.log('fullData', fullData.length);
71
72
  for (const segment of fullData){
72
73
  if (!segment) continue;
73
74
  try {
75
+ ////判断接收到的不是一个完整的JSON段了,则该段作为下一次的数据段
76
+ if (!segment.endsWith('}')) {
77
+ unCompleteSegment = segment;
78
+ break;
79
+ }
74
80
  const objData = Object.assign(JSON.parse(segment), attach);
75
81
  this.emit(objData.finish_reason?'chatdone':'chattext', objData);
76
82
  } catch (errParse) {
77
- this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message' + errParse });
83
+
84
+ break;
85
+ //this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message' + errParse });
78
86
  }
79
87
  }
80
88