@leikeduntech/leiai-js 3.5.4 → 3.5.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/bin/cli.js +146 -146
  2. package/build/index.js +45 -24
  3. package/package.json +82 -82
package/bin/cli.js CHANGED
@@ -1,146 +1,146 @@
1
- #!/usr/bin/env node
2
- import crypto from 'node:crypto'
3
-
4
- import * as url from 'url'
5
- import { cac } from 'cac'
6
- import Conf from 'conf'
7
- import { readPackageUp } from 'read-pkg-up'
8
-
9
- import { ChatGPTAPI } from '../build/index.js'
10
-
11
- async function main() {
12
- const dirname = url.fileURLToPath(new URL('.', import.meta.url))
13
- const pkg = await readPackageUp({ cwd: dirname })
14
- const version = (pkg && pkg.packageJson && pkg.packageJson.version) || '4'
15
- const config = new Conf({ projectName: 'chatgpt' })
16
-
17
- const cli = cac('chatgpt')
18
- cli
19
- .command('<prompt>', 'Ask ChatGPT a question')
20
- .option('-c, --continue', 'Continue last conversation', {
21
- default: false
22
- })
23
- .option('-d, --debug', 'Enables debug logging', {
24
- default: false
25
- })
26
- .option('-s, --stream', 'Streams the response', {
27
- default: true
28
- })
29
- .option('-s, --store', 'Enables the local message cache', {
30
- default: true
31
- })
32
- .option('-t, --timeout <timeout>', 'Timeout in milliseconds')
33
- .option('-k, --apiKey <apiKey>', 'OpenAI API key')
34
- .option('-o, --apiOrg <apiOrg>', 'OpenAI API key')
35
- .option('-m, --model <model>', 'Model (gpt-3.5-turbo, gpt-4)', {
36
- default: 'gpt-3.5-turbo'
37
- })
38
- .option(
39
- '-n, --conversationName <conversationName>',
40
- 'Unique name for the conversation'
41
- )
42
- .action(async (prompt, options) => {
43
- const apiOrg = options.apiOrg || process.env.OPENAI_API_ORG
44
- const apiKey = options.apiKey || process.env.OPENAI_API_KEY
45
- if (!apiKey) {
46
- console.error('error: either set OPENAI_API_KEY or use --apiKey\n')
47
- cli.outputHelp()
48
- process.exit(1)
49
- }
50
-
51
- const apiKeyHash = hash(apiKey)
52
- const conversationName = options.conversationName || 'default'
53
- const conversationKey = `${conversationName}:${apiKeyHash}`
54
- const conversation =
55
- options.continue && options.store
56
- ? config.get(conversationKey, {}) || {}
57
- : {}
58
- const model = options.model
59
- let conversationId = undefined
60
- let parentMessageId = undefined
61
-
62
- if (conversation.lastMessageId) {
63
- const lastMessage = conversation[conversation.lastMessageId]
64
- if (lastMessage) {
65
- conversationId = lastMessage.conversationId
66
- parentMessageId = lastMessage.id
67
- }
68
- }
69
-
70
- if (options.debug) {
71
- console.log('using config', config.path)
72
- }
73
-
74
- const api = new ChatGPTAPI({
75
- apiKey,
76
- apiOrg,
77
- debug: options.debug,
78
- completionParams: {
79
- model
80
- },
81
- getMessageById: async (id) => {
82
- if (options.store) {
83
- return conversation[id]
84
- } else {
85
- return null
86
- }
87
- },
88
- upsertMessage: async (message) => {
89
- if (options.store) {
90
- conversation[message.id] = message
91
- conversation.lastMessageId = message.id
92
- config.set(conversationKey, conversation)
93
- }
94
- }
95
- })
96
-
97
- const res = await api.sendMessage(prompt, {
98
- conversationId,
99
- parentMessageId,
100
- timeoutMs: options.timeout || undefined,
101
- onProgress: options.stream
102
- ? (progress) => {
103
- if (progress.delta) {
104
- process.stdout.write(progress.delta)
105
- }
106
- }
107
- : undefined
108
- })
109
-
110
- if (options.stream) {
111
- process.stdout.write('\n')
112
- } else {
113
- console.log(res.text)
114
- }
115
- })
116
-
117
- cli.command('rm-cache', 'Clears the local message cache').action(() => {
118
- config.clear()
119
- console.log('cleared cache', config.path)
120
- })
121
-
122
- cli.command('ls-cache', 'Prints the local message cache path').action(() => {
123
- console.log(config.path)
124
- })
125
-
126
- cli.help()
127
- cli.version(version)
128
-
129
- try {
130
- cli.parse()
131
- } catch (err) {
132
- console.error(`error: ${err.message}\n`)
133
- cli.outputHelp()
134
- process.exit(1)
135
- }
136
- }
137
-
138
- function hash(d) {
139
- const buffer = Buffer.isBuffer(d) ? d : Buffer.from(d.toString())
140
- return crypto.createHash('sha256').update(buffer).digest('hex')
141
- }
142
-
143
- main().catch((err) => {
144
- console.error(err)
145
- process.exit(1)
146
- })
1
+ #!/usr/bin/env node
2
+ import crypto from 'node:crypto'
3
+
4
+ import * as url from 'url'
5
+ import { cac } from 'cac'
6
+ import Conf from 'conf'
7
+ import { readPackageUp } from 'read-pkg-up'
8
+
9
+ import { ChatGPTAPI } from '../build/index.js'
10
+
11
+ async function main() {
12
+ const dirname = url.fileURLToPath(new URL('.', import.meta.url))
13
+ const pkg = await readPackageUp({ cwd: dirname })
14
+ const version = (pkg && pkg.packageJson && pkg.packageJson.version) || '4'
15
+ const config = new Conf({ projectName: 'chatgpt' })
16
+
17
+ const cli = cac('chatgpt')
18
+ cli
19
+ .command('<prompt>', 'Ask ChatGPT a question')
20
+ .option('-c, --continue', 'Continue last conversation', {
21
+ default: false
22
+ })
23
+ .option('-d, --debug', 'Enables debug logging', {
24
+ default: false
25
+ })
26
+ .option('-s, --stream', 'Streams the response', {
27
+ default: true
28
+ })
29
+ .option('-s, --store', 'Enables the local message cache', {
30
+ default: true
31
+ })
32
+ .option('-t, --timeout <timeout>', 'Timeout in milliseconds')
33
+ .option('-k, --apiKey <apiKey>', 'OpenAI API key')
34
+ .option('-o, --apiOrg <apiOrg>', 'OpenAI API key')
35
+ .option('-m, --model <model>', 'Model (gpt-3.5-turbo, gpt-4)', {
36
+ default: 'gpt-3.5-turbo'
37
+ })
38
+ .option(
39
+ '-n, --conversationName <conversationName>',
40
+ 'Unique name for the conversation'
41
+ )
42
+ .action(async (prompt, options) => {
43
+ const apiOrg = options.apiOrg || process.env.OPENAI_API_ORG
44
+ const apiKey = options.apiKey || process.env.OPENAI_API_KEY
45
+ if (!apiKey) {
46
+ console.error('error: either set OPENAI_API_KEY or use --apiKey\n')
47
+ cli.outputHelp()
48
+ process.exit(1)
49
+ }
50
+
51
+ const apiKeyHash = hash(apiKey)
52
+ const conversationName = options.conversationName || 'default'
53
+ const conversationKey = `${conversationName}:${apiKeyHash}`
54
+ const conversation =
55
+ options.continue && options.store
56
+ ? config.get(conversationKey, {}) || {}
57
+ : {}
58
+ const model = options.model
59
+ let conversationId = undefined
60
+ let parentMessageId = undefined
61
+
62
+ if (conversation.lastMessageId) {
63
+ const lastMessage = conversation[conversation.lastMessageId]
64
+ if (lastMessage) {
65
+ conversationId = lastMessage.conversationId
66
+ parentMessageId = lastMessage.id
67
+ }
68
+ }
69
+
70
+ if (options.debug) {
71
+ console.log('using config', config.path)
72
+ }
73
+
74
+ const api = new ChatGPTAPI({
75
+ apiKey,
76
+ apiOrg,
77
+ debug: options.debug,
78
+ completionParams: {
79
+ model
80
+ },
81
+ getMessageById: async (id) => {
82
+ if (options.store) {
83
+ return conversation[id]
84
+ } else {
85
+ return null
86
+ }
87
+ },
88
+ upsertMessage: async (message) => {
89
+ if (options.store) {
90
+ conversation[message.id] = message
91
+ conversation.lastMessageId = message.id
92
+ config.set(conversationKey, conversation)
93
+ }
94
+ }
95
+ })
96
+
97
+ const res = await api.sendMessage(prompt, {
98
+ conversationId,
99
+ parentMessageId,
100
+ timeoutMs: options.timeout || undefined,
101
+ onProgress: options.stream
102
+ ? (progress) => {
103
+ if (progress.delta) {
104
+ process.stdout.write(progress.delta)
105
+ }
106
+ }
107
+ : undefined
108
+ })
109
+
110
+ if (options.stream) {
111
+ process.stdout.write('\n')
112
+ } else {
113
+ console.log(res.text)
114
+ }
115
+ })
116
+
117
+ cli.command('rm-cache', 'Clears the local message cache').action(() => {
118
+ config.clear()
119
+ console.log('cleared cache', config.path)
120
+ })
121
+
122
+ cli.command('ls-cache', 'Prints the local message cache path').action(() => {
123
+ console.log(config.path)
124
+ })
125
+
126
+ cli.help()
127
+ cli.version(version)
128
+
129
+ try {
130
+ cli.parse()
131
+ } catch (err) {
132
+ console.error(`error: ${err.message}\n`)
133
+ cli.outputHelp()
134
+ process.exit(1)
135
+ }
136
+ }
137
+
138
+ function hash(d) {
139
+ const buffer = Buffer.isBuffer(d) ? d : Buffer.from(d.toString())
140
+ return crypto.createHash('sha256').update(buffer).digest('hex')
141
+ }
142
+
143
+ main().catch((err) => {
144
+ console.error(err)
145
+ process.exit(1)
146
+ })
package/build/index.js CHANGED
@@ -153,6 +153,9 @@ function signTencentHunyuan(bodyData, url, keyList) {
153
153
  const signBase64 = CryptoJS.enc.Base64.stringify(hmac);
154
154
  return signBase64;
155
155
  }
156
+ function isPositiveNumber(str) {
157
+ return /^[1-9]\d*$/.test(str);
158
+ }
156
159
 
157
160
  // src/chatgpt-api.ts
158
161
  var CHATGPT_MODEL = "gpt-3.5-turbo";
@@ -184,8 +187,8 @@ var ChatGPTAPI = class {
184
187
  messageStore,
185
188
  completionParams,
186
189
  systemMessage,
187
- maxModelTokens = 4e3,
188
- maxResponseTokens = 2e3,
190
+ maxModelTokens = 0,
191
+ maxResponseTokens = -1e7,
189
192
  getMessageById,
190
193
  upsertMessage,
191
194
  fetch: fetch2 = fetch
@@ -198,7 +201,6 @@ var ChatGPTAPI = class {
198
201
  this._fetch = fetch2;
199
202
  this._completionParams = {
200
203
  model: CHATGPT_MODEL,
201
- temperature: ["baidu", "zhipu"].indexOf(manufacturer.toLowerCase()) > -1 ? 0.95 : ["xunfei"].indexOf(manufacturer.toLowerCase()) > -1 ? 0.5 : 1,
202
204
  ...completionParams
203
205
  };
204
206
  this._systemMessage = systemMessage;
@@ -389,6 +391,9 @@ var ChatGPTAPI = class {
389
391
  messages,
390
392
  stream
391
393
  };
394
+ if (!isPositiveNumber(maxTokens)) {
395
+ delete body.max_tokens;
396
+ }
392
397
  if (this._manufacturer.toLowerCase() === "baidu" && typeof pluginList === "string" && pluginList.indexOf("zhishiku") > -1) {
393
398
  if (body.model)
394
399
  delete body.model;
@@ -926,14 +931,16 @@ var ChatGPTAPI = class {
926
931
  });
927
932
  }
928
933
  let objText = [{ type: "text", text }];
929
- if (completionParams.fileList && completionParams.fileList.length > 0 && completionParams.model === "gpt-4-vision-preview") {
934
+ if (completionParams.fileList && completionParams.fileList.length > 0) {
930
935
  completionParams.fileList.forEach((item) => {
931
- objText.push({
932
- type: "image_url",
933
- image_url: {
934
- url: item.imgUrl
935
- }
936
- });
936
+ if ((item == null ? void 0 : item.type.indexOf("image")) > -1) {
937
+ objText.push({
938
+ type: "image_url",
939
+ image_url: {
940
+ url: item.imgUrl
941
+ }
942
+ });
943
+ }
937
944
  });
938
945
  if (["openai", "azure"].indexOf(this._manufacturer.toLowerCase()) > -1) {
939
946
  text = objText;
@@ -941,9 +948,15 @@ var ChatGPTAPI = class {
941
948
  }
942
949
  const systemMessageOffset = messages.length;
943
950
  let userMessage = null;
944
- if (["baidu", "zhipu", "xunfei", "aliyun", "tencent", "chatdoc", "anthropic"].indexOf(
945
- this._manufacturer.toLowerCase()
946
- ) > -1) {
951
+ if ([
952
+ "baidu",
953
+ "zhipu",
954
+ "xunfei",
955
+ "aliyun",
956
+ "tencent",
957
+ "chatdoc",
958
+ "anthropic"
959
+ ].indexOf(this._manufacturer.toLowerCase()) > -1) {
947
960
  userMessage = [{ role: "user", content: text }];
948
961
  } else if (pluginData) {
949
962
  userMessage = {
@@ -999,9 +1012,15 @@ ${message.content}`]);
999
1012
  }
1000
1013
  const parentMessageRole = parentMessage.role || "user";
1001
1014
  let parentMessageItem = null;
1002
- if (["baidu", "zhipu", "xunfei", "aliyun", "tencent", "chatdoc", "anthropic"].indexOf(
1003
- this._manufacturer.toLowerCase()
1004
- ) > -1) {
1015
+ if ([
1016
+ "baidu",
1017
+ "zhipu",
1018
+ "xunfei",
1019
+ "aliyun",
1020
+ "tencent",
1021
+ "chatdoc",
1022
+ "anthropic"
1023
+ ].indexOf(this._manufacturer.toLowerCase()) > -1) {
1005
1024
  parentMessageItem = {
1006
1025
  role: parentMessageRole,
1007
1026
  content: parentMessage.text
@@ -1031,14 +1050,16 @@ ${message.content}`]);
1031
1050
  parentMessageId = parentMessage.parentMessageId;
1032
1051
  } while (true);
1033
1052
  let maxTokens = this._maxModelTokens;
1034
- if (["openai", "azure"].indexOf(this._manufacturer.toLowerCase()) > -1) {
1035
- maxTokens = Math.max(
1036
- 1,
1037
- Math.min(this._maxModelTokens - numTokens, this._maxResponseTokens)
1038
- );
1039
- } else if (numTokens > this._maxModelTokens) {
1040
- maxTokens = this._maxModelTokens;
1041
- errorMessage = `${this._manufacturer}\uFF1A\u5F53\u524D\u63D0\u95EE\u4E0A\u4E0B\u6587\u5185\u5BB9\u957F\u5EA6${numTokens}tokns\u8D85\u957F\uFF0C\u8BE5\u6A21\u578B\u6700\u5927\u63D0\u95EE\u957F\u5EA6\u4E3A${this._maxModelTokens}tokens\uFF0C\u8BF7\u5207\u6362\u5176\u4ED6\u589E\u5F3AAI\u6A21\u578B\u6216\u51CF\u5C11\u5B57\u6570\u6216\u8005\u5173\u95ED\u4E0A\u4E0B\u6587\u5386\u53F2\u63D0\u9AD8\u5355\u6B21\u63D0\u95EE\u957F\u5EA6\uFF01`;
1053
+ if (isPositiveNumber(maxTokens)) {
1054
+ if (["openai", "azure"].indexOf(this._manufacturer.toLowerCase()) > -1) {
1055
+ maxTokens = Math.max(
1056
+ 1,
1057
+ Math.min(this._maxModelTokens - numTokens, this._maxResponseTokens)
1058
+ );
1059
+ } else if (numTokens > this._maxModelTokens) {
1060
+ maxTokens = this._maxModelTokens;
1061
+ errorMessage = `${this._manufacturer}\uFF1A\u5F53\u524D\u63D0\u95EE\u4E0A\u4E0B\u6587\u5185\u5BB9\u957F\u5EA6${numTokens}tokns\u8D85\u957F\uFF0C\u8BE5\u6A21\u578B\u6700\u5927\u63D0\u95EE\u957F\u5EA6\u4E3A${this._maxModelTokens}tokens\uFF0C\u8BF7\u5207\u6362\u5176\u4ED6\u589E\u5F3AAI\u6A21\u578B\u6216\u51CF\u5C11\u5B57\u6570\u6216\u8005\u5173\u95ED\u4E0A\u4E0B\u6587\u5386\u53F2\u63D0\u9AD8\u5355\u6B21\u63D0\u95EE\u957F\u5EA6\uFF01`;
1062
+ }
1042
1063
  }
1043
1064
  return { messages, maxTokens, numTokens, errorMessage };
1044
1065
  }
package/package.json CHANGED
@@ -1,82 +1,82 @@
1
- {
2
- "name": "@leikeduntech/leiai-js",
3
- "version": "3.5.4",
4
- "author": "liuhean",
5
- "repository": {
6
- "type": "git",
7
- "url": "git+https://github.com/liuhean2021/leiai-js.git"
8
- },
9
- "license": "UNLICENSED",
10
- "type": "module",
11
- "source": "./src/index.ts",
12
- "types": "./build/index.d.ts",
13
- "scripts": {
14
- "build": "tsup",
15
- "dev": "tsup --watch",
16
- "clean": "del build",
17
- "prebuild": "run-s clean",
18
- "predev": "run-s clean",
19
- "pretest": "run-s build",
20
- "docs": "typedoc",
21
- "prepare": "husky install",
22
- "pre-commit": "lint-staged",
23
- "test": "run-p test:*",
24
- "test:prettier": "prettier '**/*.{js,jsx,ts,tsx}' --check"
25
- },
26
- "exports": {
27
- ".": {
28
- "types": "./build/index.d.ts",
29
- "import": "./build/index.js",
30
- "default": "./build/index.js"
31
- }
32
- },
33
- "files": [
34
- "build",
35
- "bin"
36
- ],
37
- "bin": {
38
- "leiai-js": "bin/cli.js"
39
- },
40
- "engines": {
41
- "node": ">=14"
42
- },
43
- "dependencies": {
44
- "@leikeduntech/spark-nodejs": "1.1.0",
45
- "cac": "^6.7.14",
46
- "conf": "^11.0.1",
47
- "crypto-js": "^4.1.1",
48
- "eventsource-parser": "^1.0.0",
49
- "js-tiktoken": "^1.0.5",
50
- "keyv": "^4.5.2",
51
- "p-timeout": "^6.1.1",
52
- "quick-lru": "^6.1.1",
53
- "read-pkg-up": "^9.1.0",
54
- "uuid": "^9.0.0"
55
- },
56
- "devDependencies": {
57
- "@keyv/redis": "^2.5.7",
58
- "@trivago/prettier-plugin-sort-imports": "^4.1.1",
59
- "@types/node": "^18.16.3",
60
- "@types/uuid": "^9.0.1",
61
- "del-cli": "^5.0.0",
62
- "dotenv-safe": "^8.2.0",
63
- "husky": "^8.0.3",
64
- "lint-staged": "^13.2.2",
65
- "npm-run-all": "^4.1.5",
66
- "ora": "^6.3.0",
67
- "prettier": "^2.8.8",
68
- "tsup": "^6.7.0",
69
- "tsx": "^3.12.7",
70
- "typedoc": "^0.24.6",
71
- "typedoc-plugin-markdown": "^3.15.3",
72
- "typescript": "^5.0.4"
73
- },
74
- "lint-staged": {
75
- "*.{ts,tsx}": [
76
- "prettier --write"
77
- ]
78
- },
79
- "keywords": [
80
- "leiai"
81
- ]
82
- }
1
+ {
2
+ "name": "@leikeduntech/leiai-js",
3
+ "version": "3.5.5",
4
+ "author": "liuhean",
5
+ "repository": {
6
+ "type": "git",
7
+ "url": "git+https://github.com/liuhean2021/leiai-js.git"
8
+ },
9
+ "license": "UNLICENSED",
10
+ "type": "module",
11
+ "source": "./src/index.ts",
12
+ "types": "./build/index.d.ts",
13
+ "scripts": {
14
+ "build": "tsup",
15
+ "dev": "tsup --watch",
16
+ "clean": "del build",
17
+ "prebuild": "run-s clean",
18
+ "predev": "run-s clean",
19
+ "pretest": "run-s build",
20
+ "docs": "typedoc",
21
+ "prepare": "husky install",
22
+ "pre-commit": "lint-staged",
23
+ "test": "run-p test:*",
24
+ "test:prettier": "prettier '**/*.{js,jsx,ts,tsx}' --check"
25
+ },
26
+ "exports": {
27
+ ".": {
28
+ "types": "./build/index.d.ts",
29
+ "import": "./build/index.js",
30
+ "default": "./build/index.js"
31
+ }
32
+ },
33
+ "files": [
34
+ "build",
35
+ "bin"
36
+ ],
37
+ "bin": {
38
+ "leiai-js": "bin/cli.js"
39
+ },
40
+ "engines": {
41
+ "node": ">=14"
42
+ },
43
+ "dependencies": {
44
+ "@leikeduntech/spark-nodejs": "1.1.0",
45
+ "cac": "^6.7.14",
46
+ "conf": "^11.0.1",
47
+ "crypto-js": "^4.1.1",
48
+ "eventsource-parser": "^1.0.0",
49
+ "js-tiktoken": "^1.0.5",
50
+ "keyv": "^4.5.2",
51
+ "p-timeout": "^6.1.1",
52
+ "quick-lru": "^6.1.1",
53
+ "read-pkg-up": "^9.1.0",
54
+ "uuid": "^9.0.0"
55
+ },
56
+ "devDependencies": {
57
+ "@keyv/redis": "^2.5.7",
58
+ "@trivago/prettier-plugin-sort-imports": "^4.1.1",
59
+ "@types/node": "^18.16.3",
60
+ "@types/uuid": "^9.0.1",
61
+ "del-cli": "^5.0.0",
62
+ "dotenv-safe": "^8.2.0",
63
+ "husky": "^8.0.3",
64
+ "lint-staged": "^13.2.2",
65
+ "npm-run-all": "^4.1.5",
66
+ "ora": "^6.3.0",
67
+ "prettier": "^2.8.8",
68
+ "tsup": "^6.7.0",
69
+ "tsx": "^3.12.7",
70
+ "typedoc": "^0.24.6",
71
+ "typedoc-plugin-markdown": "^3.15.3",
72
+ "typescript": "^5.0.4"
73
+ },
74
+ "lint-staged": {
75
+ "*.{ts,tsx}": [
76
+ "prettier --write"
77
+ ]
78
+ },
79
+ "keywords": [
80
+ "leiai"
81
+ ]
82
+ }