doomiaichat 5.1.0 → 6.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,102 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ var __importDefault = (this && this.__importDefault) || function (mod) {
12
+ return (mod && mod.__esModule) ? mod : { "default": mod };
13
+ };
14
+ Object.defineProperty(exports, "__esModule", { value: true });
15
+ const openai_1 = __importDefault(require("./openai"));
16
+ const axios_1 = __importDefault(require("axios"));
17
+ const ERROR_RESPONSE = ['[AUTHORIZATION NEEDED]', '[AUTHORIZATION ERROR]', '[BODY ERROR]', '[REQUEST ERROR]'];
18
+ class OpenAIProxy extends openai_1.default {
19
+ constructor(apiKey, proxyOption, apiOption = {}) {
20
+ super(apiKey, apiOption);
21
+ this.proxySetting = proxyOption;
22
+ }
23
+ /**
24
+ * 重写chatRequest方法
25
+ * @param chatText
26
+ * @param callChatOption
27
+ * @param axiosOption
28
+ */
29
+ chatRequest(chatText, _paramOption, axiosOption) {
30
+ return __awaiter(this, void 0, void 0, function* () {
31
+ const opts = Object.assign({ headers: {
32
+ 'Content-Type': 'application/json',
33
+ 'authorization': `Bearer ${this.apiKey}`
34
+ }, method: 'post', url: this.proxySetting.serviceurl, data: {
35
+ chatText,
36
+ option: _paramOption
37
+ } }, axiosOption);
38
+ const requestResult = yield (0, axios_1.default)(opts);
39
+ return requestResult.data;
40
+ });
41
+ }
42
+ /**
43
+ * 重写chatRequestInStream方法
44
+ * @param chatText
45
+ * @param callChatOption
46
+ * @param attach
47
+ * @param axiosOption
48
+ */
49
+ chatRequestInStream(chatText, callChatOption, attach, axiosOption) {
50
+ return __awaiter(this, void 0, void 0, function* () {
51
+ // const decoder = new TextDecoder();
52
+ //overContent,
53
+ let streamText, requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
54
+ const opts = {
55
+ headers: {
56
+ 'Content-Type': 'application/json',
57
+ 'authorization': `Bearer ${this.apiKey}`
58
+ },
59
+ method: 'post',
60
+ url: this.proxySetting.serviceurl + '/stream',
61
+ data: {
62
+ messages: chatText,
63
+ option: callChatOption,
64
+ axiosOption
65
+ },
66
+ responseType: 'stream',
67
+ };
68
+ (0, axios_1.default)(opts)
69
+ .then(res => {
70
+ res.data.on('data', (chunk) => {
71
+ streamText = chunk.toString(); //decoder.decode(chunk);
72
+ if (streamText) {
73
+ ///请求的响应发生了错误
74
+ if (ERROR_RESPONSE.includes(streamText)) {
75
+ return this.emit('requesterror', { successed: false, requestid, error: 'Request Remote OpenAI Error : ' + streamText });
76
+ }
77
+ ///已经全部结束了
78
+ if (streamText === '[END]') {
79
+ return this.emit('chatdone', streamText || {});
80
+ }
81
+ else {
82
+ ///持续的文字输出中
83
+ try {
84
+ streamText = JSON.parse(streamText);
85
+ // overContent = JSON.parse(streamText);
86
+ return this.emit('chattext', Object.assign(streamText, attach));
87
+ }
88
+ catch (errParse) {
89
+ return this.emit('chaterror', { successed: false, requestid, error: 'JSON parse stream message', streamText });
90
+ }
91
+ }
92
+ }
93
+ return;
94
+ });
95
+ //res.data.on('end', () => { this.emit('chatdone', Object.assign(streamText, attach)); });
96
+ }).catch(err => {
97
+ this.emit('requesterror', { successed: false, requestid, error: 'Axios Error : ' + err });
98
+ });
99
+ });
100
+ }
101
+ }
102
+ exports.default = OpenAIProxy;
@@ -1,4 +1,4 @@
1
- import { ApiResult, AzureOpenAIPatameters } from "./declare";
1
+ import { AzureOpenAIPatameters, StabilityOption, StabilityResult } from "./declare";
2
2
  import GptBase from "./gptbase";
3
3
  export default class StabilityAI extends GptBase {
4
4
  protected readonly apiKey: string;
@@ -16,22 +16,3 @@ export default class StabilityAI extends GptBase {
16
16
  */
17
17
  chatRequest(chatText: string, paramOption: StabilityOption, axiosOption?: any): Promise<StabilityResult>;
18
18
  }
19
- export interface StabilityOption {
20
- 'cfg_scale'?: number;
21
- 'clip_guidance_preset'?: string;
22
- 'height'?: number;
23
- 'width'?: number;
24
- 'samples'?: number;
25
- 'seed'?: number;
26
- 'steps'?: number;
27
- 'sampler'?: string;
28
- 'negative'?: string;
29
- 'engine'?: string;
30
- 'endpoint'?: string;
31
- 'denoising_strength'?: number;
32
- 'hr_scale'?: number;
33
- }
34
- export interface StabilityResult extends ApiResult {
35
- 'data'?: any;
36
- 'type'?: string;
37
- }
@@ -1,5 +1,9 @@
1
- import StabilityAI, { StabilityOption, StabilityResult } from "./stabilityai";
2
- export default class StabilityPlusAI extends StabilityAI {
1
+ import { AzureOpenAIPatameters, StabilityOption, StabilityResult } from "./declare";
2
+ import GptBase from "./gptbase";
3
+ export default class StabilityPlusAI extends GptBase {
4
+ protected readonly apiKey: string;
5
+ protected readonly apiOption: StabilityOption;
6
+ constructor(apiKey: string, _urlOption: AzureOpenAIPatameters, apiOption?: StabilityOption);
3
7
  /**
4
8
  * 请求Stable作画的接口
5
9
  */
@@ -13,8 +13,17 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
13
13
  };
14
14
  Object.defineProperty(exports, "__esModule", { value: true });
15
15
  const declare_1 = require("./declare");
16
- const stabilityai_1 = __importDefault(require("./stabilityai"));
17
- class StabilityPlusAI extends stabilityai_1.default {
16
+ const gptbase_1 = __importDefault(require("./gptbase"));
17
+ class StabilityPlusAI extends gptbase_1.default {
18
+ constructor(apiKey, _urlOption, apiOption = {}) {
19
+ super();
20
+ this.apiKey = apiKey;
21
+ // this.apiSetting = urlOption;
22
+ this.apiOption = apiOption;
23
+ // if (!this.apiSetting.endpoint.toLowerCase().startsWith('http')) {
24
+ // this.apiSetting.endpoint = 'https://' + this.apiSetting.endpoint;
25
+ // }
26
+ }
18
27
  /**
19
28
  * 请求Stable作画的接口
20
29
  */
@@ -29,7 +38,7 @@ class StabilityPlusAI extends stabilityai_1.default {
29
38
  }
30
39
  });
31
40
  try {
32
- let param = Object.assign(Object.assign({}, axiosOption), { method: "post", url: `${paramOption.endpoint || this.apiSetting.endpoint}/sdapi/v1/txt2img`, data: {
41
+ const requestOption = Object.assign(Object.assign({}, axiosOption), { method: "POST", data: {
33
42
  "enable_hr": false,
34
43
  "denoising_strength": paramOption.denoising_strength || this.apiOption.denoising_strength || 0.5,
35
44
  "firstphase_width": 0,
@@ -57,23 +66,14 @@ class StabilityPlusAI extends stabilityai_1.default {
57
66
  "tiling": false,
58
67
  "do_not_save_samples": false,
59
68
  "do_not_save_grid": false,
60
- "negative_prompt": paramOption.negative || '',
61
- // "eta": 0,
62
- // "s_churn": 0,
63
- // "s_tmax": 0,
64
- // "s_tmin": 0,
65
- // "s_noise": 1,
66
- // "script_args": [],
67
- // "sampler_index": paramOption.sampler || this.apiOption.sampler || "Euler", //"Euler",
68
- // "send_images": true,
69
- // "save_images": false,
70
- // "alwayson_scripts": {}
71
- } });
72
- const response = yield (0, declare_1.request)(param);
69
+ "negative_prompt": paramOption.negative || ''
70
+ }, url: `${paramOption.endpoint}/sdapi/v1/txt2img` });
71
+ // console.log('stablity param', requestOption);
72
+ const response = yield (0, declare_1.request)(requestOption);
73
73
  if (response.successed) {
74
74
  return { successed: true, type: 'image', data: response.data.images, };
75
75
  }
76
- console.log('response result ', response.data);
76
+ // console.log('response result ', response.data)
77
77
  return Object.assign({ successed: false }, response.data);
78
78
  }
79
79
  catch (error) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "doomiaichat",
3
- "version": "5.1.0",
3
+ "version": "6.0.1",
4
4
  "description": "Doomisoft OpenAI",
5
5
  "main": "dist/index.js",
6
6
  "scripts": {
@@ -12,11 +12,12 @@
12
12
  "author": "Stephen.Shen",
13
13
  "license": "ISC",
14
14
  "devDependencies": {
15
- "@types/node": "^18.15.11",
15
+ "@types/node": "^18.19.1",
16
16
  "typescript": "^4.9.5"
17
17
  },
18
18
  "dependencies": {
19
+ "@azure/openai": "^1.0.0-beta.10",
19
20
  "axios": "^1.3.4",
20
- "azure-openai": "^0.9.4"
21
+ "openai": "^4.24.1"
21
22
  }
22
23
  }
package/src/azureai.ts CHANGED
@@ -1,8 +1,11 @@
1
- import { Configuration, OpenAIApi } from "azure-openai";
2
- import { AzureOpenAIPatameters, EmbeddingResult, OpenAIApiParameters, request } from "./declare";
3
- // import { Configuration, OpenAIApi, ChatCompletionRequestMessage } from "azure-openai"
4
- import OpenAIGpt from "./openai"
5
- export default class AzureAI extends OpenAIGpt {
1
+ /**
2
+ * 微软AZure OpenAI
3
+ */
4
+ import OpenAIBase from "./openaibase"
5
+ import { AzureOpenAIPatameters, ChatReponse, EmbeddingResult, OpenAIApiParameters, request } from "./declare";
6
+ import { OpenAIClient, AzureKeyCredential, ChatCompletionsNamedToolSelection } from "@azure/openai";
7
+ export default class AzureAI extends OpenAIBase<OpenAIClient> {
8
+
6
9
  protected readonly azureSetting: AzureOpenAIPatameters;
7
10
  constructor(apiKey: string, azureOption: AzureOpenAIPatameters, apiOption: OpenAIApiParameters = {}) {
8
11
  super(apiKey, apiOption);
@@ -14,20 +17,9 @@ export default class AzureAI extends OpenAIGpt {
14
17
  /**
15
18
  * 初始化OpenAI 的聊天对象Api
16
19
  */
17
- override createOpenAI(apiKey: string): OpenAIApi {
18
- return new OpenAIApi(new Configuration({ apiKey,
19
- azure:{
20
- apiKey,
21
- endpoint: this.azureSetting.endpoint,
22
- deploymentName: this.azureSetting.engine
23
- } }))
20
+ createOpenAI(apiKey: string): OpenAIClient {
21
+ return new OpenAIClient(this.azureSetting.endpoint, new AzureKeyCredential(apiKey));
24
22
  }
25
- /**
26
- * ZAure OpenAI 最新的URL地址
27
- */
28
- // get BaseUrl(): string {
29
- // return `${this.azureSetting.endpoint}/openai/deployments/${this.azureSetting.engine}/chat/completions?api-version=${this.azureSetting.version || '2023-03-15-preview'}`
30
- // }
31
23
 
32
24
  get EmbeddingUrl(): string {
33
25
  return `${this.azureSetting.endpoint}/openai/deployments/${this.embeddingmodel || 'openai-embedding-ada-002'}/embeddings?api-version=2022-12-01`
@@ -56,7 +48,7 @@ export default class AzureAI extends OpenAIGpt {
56
48
  url: this.EmbeddingUrl
57
49
  };
58
50
  const response = await request(param)
59
- if (response.data) {
51
+ if (response.successed && response.data) {
60
52
  return { successed: true, embedding: response.data.data[0].embedding };
61
53
  }
62
54
  return { successed: false, ...response.data };
@@ -64,4 +56,90 @@ export default class AzureAI extends OpenAIGpt {
64
56
  return { successed: false, error };
65
57
  }
66
58
  }
59
+
60
+ /**
61
+ * 非流式聊天请求
62
+ * @param _chatText
63
+ * @param _paramOption
64
+ * @param _axiosOption
65
+ */
66
+ public async chatRequest(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, _axiosOption: any = {}): Promise<ChatReponse> {
67
+ if (!chatText) return { successed: false, error: { errcode: 2, errmsg: '缺失聊天的内容' } };
68
+ if (!this.aiApi) this.aiApi = this.createOpenAI(this.apiKey);
69
+
70
+ let message: Array<any> = typeof (chatText) == 'string' ?
71
+ [{ role: 'user', content: chatText }] : chatText;
72
+ try {
73
+ const response: any = await this.aiApi.getChatCompletions(
74
+ callChatOption?.model || this.chatModel,
75
+ message,
76
+ {
77
+ temperature: Number(callChatOption?.temperature || this.temperature),
78
+ maxTokens: Number(callChatOption?.maxtoken || this.maxtoken),
79
+ topP: Number(callChatOption?.top_p || this.top_p),
80
+ presencePenalty: Number(callChatOption?.presence_penalty || this.presence_penalty),
81
+ frequencyPenalty: Number(callChatOption?.frequency_penalty || this.frequency_penalty),
82
+ n: Number(callChatOption?.replyCounts || 1) || 1,
83
+ tools: callChatOption.tools,
84
+ toolChoice: (callChatOption.tool_choice || 'none') as ChatCompletionsNamedToolSelection,
85
+ });
86
+ const { promptTokens: prompt_tokens, completionTokens: completion_tokens, totalTokens: total_tokens } = response.usage
87
+ let rebuildChoice = [];
88
+ for (const msg of response.choices) {
89
+ ///, contentFilterResults: content_filter
90
+ const { index, finishReason: finish_reason, message } = msg
91
+ rebuildChoice.push({ index, finish_reason, message })
92
+ }
93
+ return { successed: true, message: rebuildChoice, usage: { prompt_tokens, completion_tokens, total_tokens } };
94
+
95
+ } catch (error) {
96
+ console.log('result is error ', error)
97
+ return { successed: false, error };
98
+ }
99
+
100
+ }
101
+
102
+ /**
103
+ * 流式的聊天模式
104
+ * @param chatText
105
+ * @param _paramOption
106
+ * @param axiosOption
107
+ */
108
+ override async chatRequestInStream(chatText: string | Array<any>, callChatOption: OpenAIApiParameters, attach?: any, axiosOption?: any): Promise<any> {
109
+ if (!chatText) this.emit('chaterror', { successed: false, error: 'no text in chat' });
110
+ if (!this.aiApi) {
111
+ this.aiApi = this.createOpenAI(this.apiKey);
112
+ }
113
+ let message: Array<any> = typeof (chatText) == 'string' ?[{ role: 'user', content: chatText }] : chatText;
114
+ axiosOption = Object.assign({}, axiosOption || { timeout: 60000 })
115
+ let requestid = Math.ceil(Math.random() * (new Date().getTime() * Math.random()) / 1000);
116
+ try {
117
+ const response: any = await this.aiApi.streamChatCompletions(
118
+ callChatOption?.model || this.chatModel,
119
+ message,
120
+ {
121
+ temperature: Number(callChatOption?.temperature || this.temperature),
122
+ maxTokens: Number(callChatOption?.maxtoken || this.maxtoken),
123
+ topP: Number(callChatOption?.top_p || this.top_p),
124
+ presencePenalty: Number(callChatOption?.presence_penalty || this.presence_penalty),
125
+ frequencyPenalty: Number(callChatOption?.frequency_penalty || this.frequency_penalty),
126
+ n: Number(callChatOption?.replyCounts || 1) || 1
127
+ });
128
+ let replytext: string[] = [];
129
+ for await (const event of response) {
130
+ for (const choice of event.choices) {
131
+ const { finishReason: finishreason, index } = choice;
132
+ const content = choice.delta?.content;
133
+ replytext.push(content);
134
+ let output = { successed: true, requestid, segment: content, text: replytext.join(''), finish_reason: finishreason, index };//, usage };
135
+ if (attach) output = Object.assign({}, output, attach);
136
+ this.emit(finishreason ? 'chatdone' : 'chattext', output)
137
+ }
138
+ }
139
+ return { successed: true, requestid }
140
+ } catch (error) {
141
+ this.emit('requesterror', { successed: false, requestid, error: 'call axios faied ' + error });
142
+ return { successed: false, requestid }
143
+ }
144
+ }
67
145
  }
package/src/declare.ts CHANGED
@@ -38,15 +38,24 @@ export interface OpenAIApiParameters {
38
38
  'presence_penalty'?: number;
39
39
  'frequency_penalty'?: number;
40
40
  'replyCounts'?: number; ///返回多少答案
41
+ 'tools'?:Array<any>,
42
+ 'tool_choice'?: string
41
43
  }
42
44
  /**
43
45
  * Azure 上的OpenAI的链接参数
44
46
  */
45
- export interface AzureOpenAIPatameters{
46
- 'endpoint':string; ///端点
47
- 'engine':string; ///GPT部署的项目名称
47
+ export interface ProxyPatameters{
48
+ 'serviceurl':string; ///端点
49
+ }
50
+
51
+ /**
52
+ * OpenAI Proxy 链接参数
53
+ */
54
+ export interface AzureOpenAIPatameters {
55
+ 'endpoint': string; ///端点
56
+ 'engine': string; ///GPT部署的项目名称
48
57
  'embedding'?: string; ///向量引擎项目名称
49
- 'version'?:string; ///Api 版本
58
+ 'version'?: string; ///Api 版本
50
59
  }
51
60
 
52
61
  /**
@@ -101,4 +110,26 @@ export interface CacheProvider {
101
110
  */
102
111
  delete(key: string): void;
103
112
 
113
+ }
114
+
115
+
116
+ export interface StabilityOption {
117
+ 'cfg_scale'?: number,
118
+ 'clip_guidance_preset'?: string,
119
+ 'height'?: number,
120
+ 'width'?: number,
121
+ 'samples'?: number,
122
+ 'seed'?: number,
123
+ 'steps'?: number,
124
+ 'sampler'?: string,
125
+ 'negative'?: string,
126
+ 'engine'?: string,
127
+ 'endpoint'?: string
128
+ 'denoising_strength'?: number,
129
+ 'hr_scale'?: number
130
+ }
131
+
132
+ export interface StabilityResult extends ApiResult {
133
+ 'data'?: any;
134
+ 'type'?: string;
104
135
  }
@@ -3,6 +3,7 @@
3
3
  * 语音转文字服务商工厂
4
4
  */
5
5
  import OpenAIGpt from './openai';
6
+ import OpenAIProxt from './openaiproxy';
6
7
  import AzureAI from './azureai'
7
8
  import StabilityAI from './stabilityai'
8
9
  import StabilityPlusAI from './stabilityplusai'
@@ -13,6 +14,7 @@ import GptBase from './gptbase';
13
14
  */
14
15
  export const GptProviderEnum = {
15
16
  OPENAI: 'openai',
17
+ OPENAIPROXY:'openaiproxy',
16
18
  MICROSOFT: 'microsoft',
17
19
  BAIDU: 'baidu',
18
20
  GOOGLE:'google',
@@ -28,10 +30,12 @@ export type GptProviderEnum = typeof GptProviderEnum[keyof typeof GptProviderEnu
28
30
  * @returns
29
31
  */
30
32
  export function createGpt(provider: GptProviderEnum, apikey: string|ApiCredential, setting: any): GptBase | null {
31
- let { model, maxtoken, temperature, endpoint, engine, version, embedding, top_p, presence_penalty, frequency_penalty } = setting || {};
33
+ let { model, maxtoken, temperature, serviceurl,endpoint, engine, version, embedding, top_p, presence_penalty, frequency_penalty } = setting || {};
32
34
  switch (provider) {
33
35
  case GptProviderEnum.OPENAI:
34
36
  return new OpenAIGpt(apikey + '', { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
37
+ case GptProviderEnum.OPENAIPROXY:
38
+ return new OpenAIProxt(apikey + '', { serviceurl}, { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty });
35
39
  case GptProviderEnum.MICROSOFT:
36
40
  return new AzureAI(apikey + '', { endpoint, engine, version }, { model, maxtoken, temperature, embedding, top_p, presence_penalty, frequency_penalty }, );
37
41
  case GptProviderEnum.BAIDU: