doomiaichat 5.0.0 → 5.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/declare.d.ts CHANGED
@@ -31,6 +31,9 @@ export interface OpenAIApiParameters {
31
31
  'model'?: string;
32
32
  'maxtoken'?: number;
33
33
  'temperature'?: number;
34
+ 'top_p'?: number;
35
+ 'presence_penalty'?: number;
36
+ 'frequency_penalty'?: number;
34
37
  'replyCounts'?: number;
35
38
  }
36
39
  /**
@@ -32,12 +32,12 @@ exports.GptProviderEnum = {
32
32
  * @returns
33
33
  */
34
34
  function createGpt(provider, apikey, setting) {
35
- let { model, maxtoken, temperature, endpoint, engine, version, embedding } = setting || {};
35
+ let { model, maxtoken, temperature, endpoint, engine, version, embedding, top_p, presence_penalty, frequency_penalty } = setting || {};
36
36
  switch (provider) {
37
37
  case exports.GptProviderEnum.OPENAI:
38
- return new openai_1.default(apikey + '', { model, maxtoken, temperature, embedding });
38
+ return new openai_1.default(apikey + '', { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
39
39
  case exports.GptProviderEnum.MICROSOFT:
40
- return new azureai_1.default(apikey + '', { endpoint, engine, version }, { model, maxtoken, temperature, embedding });
40
+ return new azureai_1.default(apikey + '', { endpoint, engine, version }, { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
41
41
  case exports.GptProviderEnum.BAIDU:
42
42
  let cred = typeof (apikey) === 'string' ? { apikey, securitykey: apikey } : apikey;
43
43
  return new baiduai_1.default(cred);
package/dist/openai.d.ts CHANGED
@@ -6,6 +6,9 @@ export default class OpenAIGpt extends GptBase {
6
6
  protected aiApi: OpenAIApi | undefined;
7
7
  protected readonly chatModel: string;
8
8
  protected readonly maxtoken: number;
9
+ protected readonly top_p: number;
10
+ protected readonly presence_penalty: number;
11
+ protected readonly frequency_penalty: number;
9
12
  protected readonly temperature: number;
10
13
  protected readonly embeddingmodel: string;
11
14
  /**
package/dist/openai.js CHANGED
@@ -25,7 +25,10 @@ class OpenAIGpt extends gptbase_1.default {
25
25
  this.apiKey = apiKey;
26
26
  this.chatModel = apiOption.model || 'gpt-3.5-turbo';
27
27
  this.maxtoken = apiOption.maxtoken || 2048;
28
+ this.top_p = apiOption.top_p || 0.95;
28
29
  this.temperature = apiOption.temperature || 0.9;
30
+ this.presence_penalty = apiOption.presence_penalty || 0;
31
+ this.frequency_penalty = apiOption.frequency_penalty || 0;
29
32
  this.embeddingmodel = apiOption.embedding || 'text-embedding-ada-002';
30
33
  }
31
34
  /**
@@ -79,6 +82,9 @@ class OpenAIGpt extends gptbase_1.default {
79
82
  messages: message,
80
83
  temperature: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.temperature),
81
84
  max_tokens: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.maxtoken) || this.maxtoken),
85
+ top_p: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_p) || this.top_p),
86
+ presence_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.presence_penalty),
87
+ frequency_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.frequency_penalty),
82
88
  n: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.replyCounts) || 1) || 1
83
89
  }, axiosOption);
84
90
  // console.log('finish_reason==>', response.data.choices)
@@ -121,6 +127,9 @@ class OpenAIGpt extends gptbase_1.default {
121
127
  model: (callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.model) || this.chatModel,
122
128
  messages: message,
123
129
  stream: true,
130
+ top_p: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.top_p) || this.top_p),
131
+ presence_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.presence_penalty) || this.presence_penalty),
132
+ frequency_penalty: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.frequency_penalty) || this.frequency_penalty),
124
133
  temperature: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.temperature) || this.temperature),
125
134
  max_tokens: Number((callChatOption === null || callChatOption === void 0 ? void 0 : callChatOption.maxtoken) || this.maxtoken)
126
135
  }, axiosOption);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "doomiaichat",
3
- "version": "5.0.0",
3
+ "version": "5.1.0",
4
4
  "description": "Doomisoft OpenAI",
5
5
  "main": "dist/index.js",
6
6
  "scripts": {
package/src/declare.ts CHANGED
@@ -34,6 +34,9 @@ export interface OpenAIApiParameters {
34
34
  'model'?: string, ///模型名称
35
35
  'maxtoken'?: number; ///返回的最大token
36
36
  'temperature'?: number;
37
+ 'top_p'?:number;
38
+ 'presence_penalty'?: number;
39
+ 'frequency_penalty'?: number;
37
40
  'replyCounts'?: number; ///返回多少答案
38
41
  }
39
42
  /**
@@ -28,12 +28,12 @@ export type GptProviderEnum = typeof GptProviderEnum[keyof typeof GptProviderEnu
28
28
  * @returns
29
29
  */
30
30
  export function createGpt(provider: GptProviderEnum, apikey: string|ApiCredential, setting: any): GptBase | null {
31
- let { model, maxtoken, temperature, endpoint, engine, version, embedding } = setting || {};
31
+ let { model, maxtoken, temperature, endpoint, engine, version, embedding, top_p, presence_penalty, frequency_penalty } = setting || {};
32
32
  switch (provider) {
33
33
  case GptProviderEnum.OPENAI:
34
- return new OpenAIGpt(apikey + '', { model, maxtoken, temperature, embedding });
34
+ return new OpenAIGpt(apikey + '', { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
35
35
  case GptProviderEnum.MICROSOFT:
36
- return new AzureAI(apikey + '', { endpoint, engine, version }, { model, maxtoken, temperature, embedding }, );
36
+ return new AzureAI(apikey + '', { endpoint, engine, version }, { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty }, );
37
37
  case GptProviderEnum.BAIDU:
38
38
  let cred: ApiCredential = typeof (apikey) === 'string' ? { apikey, securitykey: apikey } : apikey
39
39
  return new BaiduWenXinAI(cred);
package/src/openai.ts CHANGED
@@ -7,6 +7,9 @@ export default class OpenAIGpt extends GptBase {
7
7
  protected aiApi: OpenAIApi | undefined;
8
8
  protected readonly chatModel: string;
9
9
  protected readonly maxtoken: number;
10
+ protected readonly top_p: number;
11
+ protected readonly presence_penalty:number;
12
+ protected readonly frequency_penalty: number;
10
13
  protected readonly temperature: number;
11
14
  protected readonly embeddingmodel: string;
12
15
  /**
@@ -20,7 +23,10 @@ export default class OpenAIGpt extends GptBase {
20
23
  this.apiKey = apiKey;
21
24
  this.chatModel = apiOption.model || 'gpt-3.5-turbo';
22
25
  this.maxtoken = apiOption.maxtoken || 2048;
26
+ this.top_p = apiOption.top_p || 0.95;
23
27
  this.temperature = apiOption.temperature || 0.9;
28
+ this.presence_penalty = apiOption.presence_penalty || 0;
29
+ this.frequency_penalty = apiOption.frequency_penalty || 0;
24
30
  this.embeddingmodel = apiOption.embedding || 'text-embedding-ada-002';
25
31
  }
26
32
  /**
@@ -70,6 +76,9 @@ export default class OpenAIGpt extends GptBase {
70
76
  messages: message,
71
77
  temperature: Number(callChatOption?.temperature || this.temperature),
72
78
  max_tokens: Number(callChatOption?.maxtoken || this.maxtoken),
79
+ top_p: Number(callChatOption?.top_p || this.top_p),
80
+ presence_penalty: Number(callChatOption?.presence_penalty || this.presence_penalty),
81
+ frequency_penalty: Number(callChatOption?.frequency_penalty || this.frequency_penalty),
73
82
  n: Number(callChatOption?.replyCounts || 1) || 1
74
83
  }, axiosOption);
75
84
  // console.log('finish_reason==>', response.data.choices)
@@ -109,6 +118,9 @@ export default class OpenAIGpt extends GptBase {
109
118
  model: callChatOption?.model || this.chatModel,
110
119
  messages: message,
111
120
  stream: true,
121
+ top_p: Number(callChatOption?.top_p || this.top_p),
122
+ presence_penalty: Number(callChatOption?.presence_penalty || this.presence_penalty),
123
+ frequency_penalty: Number(callChatOption?.frequency_penalty || this.frequency_penalty),
112
124
  temperature: Number(callChatOption?.temperature || this.temperature),
113
125
  max_tokens: Number(callChatOption?.maxtoken || this.maxtoken)
114
126
  }, axiosOption);