@leikeduntech/leiai-js 3.5.3 → 3.5.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/bin/cli.js +146 -146
  2. package/build/index.js +48 -25
  3. package/package.json +82 -82
package/bin/cli.js CHANGED
@@ -1,146 +1,146 @@
1
- #!/usr/bin/env node
2
- import crypto from 'node:crypto'
3
-
4
- import * as url from 'url'
5
- import { cac } from 'cac'
6
- import Conf from 'conf'
7
- import { readPackageUp } from 'read-pkg-up'
8
-
9
- import { ChatGPTAPI } from '../build/index.js'
10
-
11
- async function main() {
12
- const dirname = url.fileURLToPath(new URL('.', import.meta.url))
13
- const pkg = await readPackageUp({ cwd: dirname })
14
- const version = (pkg && pkg.packageJson && pkg.packageJson.version) || '4'
15
- const config = new Conf({ projectName: 'chatgpt' })
16
-
17
- const cli = cac('chatgpt')
18
- cli
19
- .command('<prompt>', 'Ask ChatGPT a question')
20
- .option('-c, --continue', 'Continue last conversation', {
21
- default: false
22
- })
23
- .option('-d, --debug', 'Enables debug logging', {
24
- default: false
25
- })
26
- .option('-s, --stream', 'Streams the response', {
27
- default: true
28
- })
29
- .option('-s, --store', 'Enables the local message cache', {
30
- default: true
31
- })
32
- .option('-t, --timeout <timeout>', 'Timeout in milliseconds')
33
- .option('-k, --apiKey <apiKey>', 'OpenAI API key')
34
- .option('-o, --apiOrg <apiOrg>', 'OpenAI API key')
35
- .option('-m, --model <model>', 'Model (gpt-3.5-turbo, gpt-4)', {
36
- default: 'gpt-3.5-turbo'
37
- })
38
- .option(
39
- '-n, --conversationName <conversationName>',
40
- 'Unique name for the conversation'
41
- )
42
- .action(async (prompt, options) => {
43
- const apiOrg = options.apiOrg || process.env.OPENAI_API_ORG
44
- const apiKey = options.apiKey || process.env.OPENAI_API_KEY
45
- if (!apiKey) {
46
- console.error('error: either set OPENAI_API_KEY or use --apiKey\n')
47
- cli.outputHelp()
48
- process.exit(1)
49
- }
50
-
51
- const apiKeyHash = hash(apiKey)
52
- const conversationName = options.conversationName || 'default'
53
- const conversationKey = `${conversationName}:${apiKeyHash}`
54
- const conversation =
55
- options.continue && options.store
56
- ? config.get(conversationKey, {}) || {}
57
- : {}
58
- const model = options.model
59
- let conversationId = undefined
60
- let parentMessageId = undefined
61
-
62
- if (conversation.lastMessageId) {
63
- const lastMessage = conversation[conversation.lastMessageId]
64
- if (lastMessage) {
65
- conversationId = lastMessage.conversationId
66
- parentMessageId = lastMessage.id
67
- }
68
- }
69
-
70
- if (options.debug) {
71
- console.log('using config', config.path)
72
- }
73
-
74
- const api = new ChatGPTAPI({
75
- apiKey,
76
- apiOrg,
77
- debug: options.debug,
78
- completionParams: {
79
- model
80
- },
81
- getMessageById: async (id) => {
82
- if (options.store) {
83
- return conversation[id]
84
- } else {
85
- return null
86
- }
87
- },
88
- upsertMessage: async (message) => {
89
- if (options.store) {
90
- conversation[message.id] = message
91
- conversation.lastMessageId = message.id
92
- config.set(conversationKey, conversation)
93
- }
94
- }
95
- })
96
-
97
- const res = await api.sendMessage(prompt, {
98
- conversationId,
99
- parentMessageId,
100
- timeoutMs: options.timeout || undefined,
101
- onProgress: options.stream
102
- ? (progress) => {
103
- if (progress.delta) {
104
- process.stdout.write(progress.delta)
105
- }
106
- }
107
- : undefined
108
- })
109
-
110
- if (options.stream) {
111
- process.stdout.write('\n')
112
- } else {
113
- console.log(res.text)
114
- }
115
- })
116
-
117
- cli.command('rm-cache', 'Clears the local message cache').action(() => {
118
- config.clear()
119
- console.log('cleared cache', config.path)
120
- })
121
-
122
- cli.command('ls-cache', 'Prints the local message cache path').action(() => {
123
- console.log(config.path)
124
- })
125
-
126
- cli.help()
127
- cli.version(version)
128
-
129
- try {
130
- cli.parse()
131
- } catch (err) {
132
- console.error(`error: ${err.message}\n`)
133
- cli.outputHelp()
134
- process.exit(1)
135
- }
136
- }
137
-
138
- function hash(d) {
139
- const buffer = Buffer.isBuffer(d) ? d : Buffer.from(d.toString())
140
- return crypto.createHash('sha256').update(buffer).digest('hex')
141
- }
142
-
143
- main().catch((err) => {
144
- console.error(err)
145
- process.exit(1)
146
- })
1
+ #!/usr/bin/env node
2
+ import crypto from 'node:crypto'
3
+
4
+ import * as url from 'url'
5
+ import { cac } from 'cac'
6
+ import Conf from 'conf'
7
+ import { readPackageUp } from 'read-pkg-up'
8
+
9
+ import { ChatGPTAPI } from '../build/index.js'
10
+
11
+ async function main() {
12
+ const dirname = url.fileURLToPath(new URL('.', import.meta.url))
13
+ const pkg = await readPackageUp({ cwd: dirname })
14
+ const version = (pkg && pkg.packageJson && pkg.packageJson.version) || '4'
15
+ const config = new Conf({ projectName: 'chatgpt' })
16
+
17
+ const cli = cac('chatgpt')
18
+ cli
19
+ .command('<prompt>', 'Ask ChatGPT a question')
20
+ .option('-c, --continue', 'Continue last conversation', {
21
+ default: false
22
+ })
23
+ .option('-d, --debug', 'Enables debug logging', {
24
+ default: false
25
+ })
26
+ .option('-s, --stream', 'Streams the response', {
27
+ default: true
28
+ })
29
+ .option('-s, --store', 'Enables the local message cache', {
30
+ default: true
31
+ })
32
+ .option('-t, --timeout <timeout>', 'Timeout in milliseconds')
33
+ .option('-k, --apiKey <apiKey>', 'OpenAI API key')
34
+ .option('-o, --apiOrg <apiOrg>', 'OpenAI API key')
35
+ .option('-m, --model <model>', 'Model (gpt-3.5-turbo, gpt-4)', {
36
+ default: 'gpt-3.5-turbo'
37
+ })
38
+ .option(
39
+ '-n, --conversationName <conversationName>',
40
+ 'Unique name for the conversation'
41
+ )
42
+ .action(async (prompt, options) => {
43
+ const apiOrg = options.apiOrg || process.env.OPENAI_API_ORG
44
+ const apiKey = options.apiKey || process.env.OPENAI_API_KEY
45
+ if (!apiKey) {
46
+ console.error('error: either set OPENAI_API_KEY or use --apiKey\n')
47
+ cli.outputHelp()
48
+ process.exit(1)
49
+ }
50
+
51
+ const apiKeyHash = hash(apiKey)
52
+ const conversationName = options.conversationName || 'default'
53
+ const conversationKey = `${conversationName}:${apiKeyHash}`
54
+ const conversation =
55
+ options.continue && options.store
56
+ ? config.get(conversationKey, {}) || {}
57
+ : {}
58
+ const model = options.model
59
+ let conversationId = undefined
60
+ let parentMessageId = undefined
61
+
62
+ if (conversation.lastMessageId) {
63
+ const lastMessage = conversation[conversation.lastMessageId]
64
+ if (lastMessage) {
65
+ conversationId = lastMessage.conversationId
66
+ parentMessageId = lastMessage.id
67
+ }
68
+ }
69
+
70
+ if (options.debug) {
71
+ console.log('using config', config.path)
72
+ }
73
+
74
+ const api = new ChatGPTAPI({
75
+ apiKey,
76
+ apiOrg,
77
+ debug: options.debug,
78
+ completionParams: {
79
+ model
80
+ },
81
+ getMessageById: async (id) => {
82
+ if (options.store) {
83
+ return conversation[id]
84
+ } else {
85
+ return null
86
+ }
87
+ },
88
+ upsertMessage: async (message) => {
89
+ if (options.store) {
90
+ conversation[message.id] = message
91
+ conversation.lastMessageId = message.id
92
+ config.set(conversationKey, conversation)
93
+ }
94
+ }
95
+ })
96
+
97
+ const res = await api.sendMessage(prompt, {
98
+ conversationId,
99
+ parentMessageId,
100
+ timeoutMs: options.timeout || undefined,
101
+ onProgress: options.stream
102
+ ? (progress) => {
103
+ if (progress.delta) {
104
+ process.stdout.write(progress.delta)
105
+ }
106
+ }
107
+ : undefined
108
+ })
109
+
110
+ if (options.stream) {
111
+ process.stdout.write('\n')
112
+ } else {
113
+ console.log(res.text)
114
+ }
115
+ })
116
+
117
+ cli.command('rm-cache', 'Clears the local message cache').action(() => {
118
+ config.clear()
119
+ console.log('cleared cache', config.path)
120
+ })
121
+
122
+ cli.command('ls-cache', 'Prints the local message cache path').action(() => {
123
+ console.log(config.path)
124
+ })
125
+
126
+ cli.help()
127
+ cli.version(version)
128
+
129
+ try {
130
+ cli.parse()
131
+ } catch (err) {
132
+ console.error(`error: ${err.message}\n`)
133
+ cli.outputHelp()
134
+ process.exit(1)
135
+ }
136
+ }
137
+
138
+ function hash(d) {
139
+ const buffer = Buffer.isBuffer(d) ? d : Buffer.from(d.toString())
140
+ return crypto.createHash('sha256').update(buffer).digest('hex')
141
+ }
142
+
143
+ main().catch((err) => {
144
+ console.error(err)
145
+ process.exit(1)
146
+ })
package/build/index.js CHANGED
@@ -153,6 +153,9 @@ function signTencentHunyuan(bodyData, url, keyList) {
153
153
  const signBase64 = CryptoJS.enc.Base64.stringify(hmac);
154
154
  return signBase64;
155
155
  }
156
+ function isPositiveNumber(str) {
157
+ return /^[1-9]\d*$/.test(str);
158
+ }
156
159
 
157
160
  // src/chatgpt-api.ts
158
161
  var CHATGPT_MODEL = "gpt-3.5-turbo";
@@ -184,8 +187,8 @@ var ChatGPTAPI = class {
184
187
  messageStore,
185
188
  completionParams,
186
189
  systemMessage,
187
- maxModelTokens = 4e3,
188
- maxResponseTokens = 2e3,
190
+ maxModelTokens = 0,
191
+ maxResponseTokens = -1e7,
189
192
  getMessageById,
190
193
  upsertMessage,
191
194
  fetch: fetch2 = fetch
@@ -198,7 +201,6 @@ var ChatGPTAPI = class {
198
201
  this._fetch = fetch2;
199
202
  this._completionParams = {
200
203
  model: CHATGPT_MODEL,
201
- temperature: ["baidu", "zhipu"].indexOf(manufacturer.toLowerCase()) > -1 ? 0.95 : ["xunfei"].indexOf(manufacturer.toLowerCase()) > -1 ? 0.5 : 1,
202
204
  ...completionParams
203
205
  };
204
206
  this._systemMessage = systemMessage;
@@ -371,6 +373,8 @@ var ChatGPTAPI = class {
371
373
  delete headers.Authorization;
372
374
  } else if (this._manufacturer.toLowerCase() === "chatdoc") {
373
375
  url = this._apiBaseUrl;
376
+ } else if (this._manufacturer.toLocaleLowerCase() === "anthropic") {
377
+ url = this._apiBaseUrl;
374
378
  }
375
379
  let pluginList;
376
380
  if (completionParams.pluginList) {
@@ -387,6 +391,9 @@ var ChatGPTAPI = class {
387
391
  messages,
388
392
  stream
389
393
  };
394
+ if (!isPositiveNumber(maxTokens)) {
395
+ delete body.max_tokens;
396
+ }
390
397
  if (this._manufacturer.toLowerCase() === "baidu" && typeof pluginList === "string" && pluginList.indexOf("zhishiku") > -1) {
391
398
  if (body.model)
392
399
  delete body.model;
@@ -646,7 +653,7 @@ var ChatGPTAPI = class {
646
653
  result.id = response.id;
647
654
  }
648
655
  }
649
- if (((_l = response.choices) == null ? void 0 : _l.length) && ["openai", "azure", "tencent"].indexOf(
656
+ if (((_l = response.choices) == null ? void 0 : _l.length) && ["openai", "azure", "tencent", "anthropic"].indexOf(
650
657
  this._manufacturer.toLowerCase()
651
658
  ) > -1) {
652
659
  const delta = response.choices[0].delta;
@@ -924,14 +931,16 @@ var ChatGPTAPI = class {
924
931
  });
925
932
  }
926
933
  let objText = [{ type: "text", text }];
927
- if (completionParams.fileList && completionParams.fileList.length > 0 && completionParams.model === "gpt-4-vision-preview") {
934
+ if (completionParams.fileList && completionParams.fileList.length > 0) {
928
935
  completionParams.fileList.forEach((item) => {
929
- objText.push({
930
- type: "image_url",
931
- image_url: {
932
- url: item.imgUrl
933
- }
934
- });
936
+ if ((item == null ? void 0 : item.type.indexOf("image")) > -1) {
937
+ objText.push({
938
+ type: "image_url",
939
+ image_url: {
940
+ url: item.imgUrl
941
+ }
942
+ });
943
+ }
935
944
  });
936
945
  if (["openai", "azure"].indexOf(this._manufacturer.toLowerCase()) > -1) {
937
946
  text = objText;
@@ -939,9 +948,15 @@ var ChatGPTAPI = class {
939
948
  }
940
949
  const systemMessageOffset = messages.length;
941
950
  let userMessage = null;
942
- if (["baidu", "zhipu", "xunfei", "aliyun", "tencent", "chatdoc"].indexOf(
943
- this._manufacturer.toLowerCase()
944
- ) > -1) {
951
+ if ([
952
+ "baidu",
953
+ "zhipu",
954
+ "xunfei",
955
+ "aliyun",
956
+ "tencent",
957
+ "chatdoc",
958
+ "anthropic"
959
+ ].indexOf(this._manufacturer.toLowerCase()) > -1) {
945
960
  userMessage = [{ role: "user", content: text }];
946
961
  } else if (pluginData) {
947
962
  userMessage = {
@@ -997,9 +1012,15 @@ ${message.content}`]);
997
1012
  }
998
1013
  const parentMessageRole = parentMessage.role || "user";
999
1014
  let parentMessageItem = null;
1000
- if (["baidu", "zhipu", "xunfei", "aliyun", "tencent", "chatdoc"].indexOf(
1001
- this._manufacturer.toLowerCase()
1002
- ) > -1) {
1015
+ if ([
1016
+ "baidu",
1017
+ "zhipu",
1018
+ "xunfei",
1019
+ "aliyun",
1020
+ "tencent",
1021
+ "chatdoc",
1022
+ "anthropic"
1023
+ ].indexOf(this._manufacturer.toLowerCase()) > -1) {
1003
1024
  parentMessageItem = {
1004
1025
  role: parentMessageRole,
1005
1026
  content: parentMessage.text
@@ -1029,14 +1050,16 @@ ${message.content}`]);
1029
1050
  parentMessageId = parentMessage.parentMessageId;
1030
1051
  } while (true);
1031
1052
  let maxTokens = this._maxModelTokens;
1032
- if (["openai", "azure"].indexOf(this._manufacturer.toLowerCase()) > -1) {
1033
- maxTokens = Math.max(
1034
- 1,
1035
- Math.min(this._maxModelTokens - numTokens, this._maxResponseTokens)
1036
- );
1037
- } else if (numTokens > this._maxModelTokens) {
1038
- maxTokens = this._maxModelTokens;
1039
- errorMessage = `${this._manufacturer}\uFF1A\u5F53\u524D\u63D0\u95EE\u4E0A\u4E0B\u6587\u5185\u5BB9\u957F\u5EA6${numTokens}tokns\u8D85\u957F\uFF0C\u8BE5\u6A21\u578B\u6700\u5927\u63D0\u95EE\u957F\u5EA6\u4E3A${this._maxModelTokens}tokens\uFF0C\u8BF7\u5207\u6362\u5176\u4ED6\u589E\u5F3AAI\u6A21\u578B\u6216\u51CF\u5C11\u5B57\u6570\u6216\u8005\u5173\u95ED\u4E0A\u4E0B\u6587\u5386\u53F2\u63D0\u9AD8\u5355\u6B21\u63D0\u95EE\u957F\u5EA6\uFF01`;
1053
+ if (isPositiveNumber(maxTokens)) {
1054
+ if (["openai", "azure"].indexOf(this._manufacturer.toLowerCase()) > -1) {
1055
+ maxTokens = Math.max(
1056
+ 1,
1057
+ Math.min(this._maxModelTokens - numTokens, this._maxResponseTokens)
1058
+ );
1059
+ } else if (numTokens > this._maxModelTokens) {
1060
+ maxTokens = this._maxModelTokens;
1061
+ errorMessage = `${this._manufacturer}\uFF1A\u5F53\u524D\u63D0\u95EE\u4E0A\u4E0B\u6587\u5185\u5BB9\u957F\u5EA6${numTokens}tokns\u8D85\u957F\uFF0C\u8BE5\u6A21\u578B\u6700\u5927\u63D0\u95EE\u957F\u5EA6\u4E3A${this._maxModelTokens}tokens\uFF0C\u8BF7\u5207\u6362\u5176\u4ED6\u589E\u5F3AAI\u6A21\u578B\u6216\u51CF\u5C11\u5B57\u6570\u6216\u8005\u5173\u95ED\u4E0A\u4E0B\u6587\u5386\u53F2\u63D0\u9AD8\u5355\u6B21\u63D0\u95EE\u957F\u5EA6\uFF01`;
1062
+ }
1040
1063
  }
1041
1064
  return { messages, maxTokens, numTokens, errorMessage };
1042
1065
  }
package/package.json CHANGED
@@ -1,82 +1,82 @@
1
- {
2
- "name": "@leikeduntech/leiai-js",
3
- "version": "3.5.3",
4
- "author": "liuhean",
5
- "repository": {
6
- "type": "git",
7
- "url": "git+https://github.com/liuhean2021/leiai-js.git"
8
- },
9
- "license": "UNLICENSED",
10
- "type": "module",
11
- "source": "./src/index.ts",
12
- "types": "./build/index.d.ts",
13
- "scripts": {
14
- "build": "tsup",
15
- "dev": "tsup --watch",
16
- "clean": "del build",
17
- "prebuild": "run-s clean",
18
- "predev": "run-s clean",
19
- "pretest": "run-s build",
20
- "docs": "typedoc",
21
- "prepare": "husky install",
22
- "pre-commit": "lint-staged",
23
- "test": "run-p test:*",
24
- "test:prettier": "prettier '**/*.{js,jsx,ts,tsx}' --check"
25
- },
26
- "exports": {
27
- ".": {
28
- "types": "./build/index.d.ts",
29
- "import": "./build/index.js",
30
- "default": "./build/index.js"
31
- }
32
- },
33
- "files": [
34
- "build",
35
- "bin"
36
- ],
37
- "bin": {
38
- "leiai-js": "bin/cli.js"
39
- },
40
- "engines": {
41
- "node": ">=14"
42
- },
43
- "dependencies": {
44
- "@leikeduntech/spark-nodejs": "1.1.0",
45
- "cac": "^6.7.14",
46
- "conf": "^11.0.1",
47
- "crypto-js": "^4.1.1",
48
- "eventsource-parser": "^1.0.0",
49
- "js-tiktoken": "^1.0.5",
50
- "keyv": "^4.5.2",
51
- "p-timeout": "^6.1.1",
52
- "quick-lru": "^6.1.1",
53
- "read-pkg-up": "^9.1.0",
54
- "uuid": "^9.0.0"
55
- },
56
- "devDependencies": {
57
- "@keyv/redis": "^2.5.7",
58
- "@trivago/prettier-plugin-sort-imports": "^4.1.1",
59
- "@types/node": "^18.16.3",
60
- "@types/uuid": "^9.0.1",
61
- "del-cli": "^5.0.0",
62
- "dotenv-safe": "^8.2.0",
63
- "husky": "^8.0.3",
64
- "lint-staged": "^13.2.2",
65
- "npm-run-all": "^4.1.5",
66
- "ora": "^6.3.0",
67
- "prettier": "^2.8.8",
68
- "tsup": "^6.7.0",
69
- "tsx": "^3.12.7",
70
- "typedoc": "^0.24.6",
71
- "typedoc-plugin-markdown": "^3.15.3",
72
- "typescript": "^5.0.4"
73
- },
74
- "lint-staged": {
75
- "*.{ts,tsx}": [
76
- "prettier --write"
77
- ]
78
- },
79
- "keywords": [
80
- "leiai"
81
- ]
82
- }
1
+ {
2
+ "name": "@leikeduntech/leiai-js",
3
+ "version": "3.5.5",
4
+ "author": "liuhean",
5
+ "repository": {
6
+ "type": "git",
7
+ "url": "git+https://github.com/liuhean2021/leiai-js.git"
8
+ },
9
+ "license": "UNLICENSED",
10
+ "type": "module",
11
+ "source": "./src/index.ts",
12
+ "types": "./build/index.d.ts",
13
+ "scripts": {
14
+ "build": "tsup",
15
+ "dev": "tsup --watch",
16
+ "clean": "del build",
17
+ "prebuild": "run-s clean",
18
+ "predev": "run-s clean",
19
+ "pretest": "run-s build",
20
+ "docs": "typedoc",
21
+ "prepare": "husky install",
22
+ "pre-commit": "lint-staged",
23
+ "test": "run-p test:*",
24
+ "test:prettier": "prettier '**/*.{js,jsx,ts,tsx}' --check"
25
+ },
26
+ "exports": {
27
+ ".": {
28
+ "types": "./build/index.d.ts",
29
+ "import": "./build/index.js",
30
+ "default": "./build/index.js"
31
+ }
32
+ },
33
+ "files": [
34
+ "build",
35
+ "bin"
36
+ ],
37
+ "bin": {
38
+ "leiai-js": "bin/cli.js"
39
+ },
40
+ "engines": {
41
+ "node": ">=14"
42
+ },
43
+ "dependencies": {
44
+ "@leikeduntech/spark-nodejs": "1.1.0",
45
+ "cac": "^6.7.14",
46
+ "conf": "^11.0.1",
47
+ "crypto-js": "^4.1.1",
48
+ "eventsource-parser": "^1.0.0",
49
+ "js-tiktoken": "^1.0.5",
50
+ "keyv": "^4.5.2",
51
+ "p-timeout": "^6.1.1",
52
+ "quick-lru": "^6.1.1",
53
+ "read-pkg-up": "^9.1.0",
54
+ "uuid": "^9.0.0"
55
+ },
56
+ "devDependencies": {
57
+ "@keyv/redis": "^2.5.7",
58
+ "@trivago/prettier-plugin-sort-imports": "^4.1.1",
59
+ "@types/node": "^18.16.3",
60
+ "@types/uuid": "^9.0.1",
61
+ "del-cli": "^5.0.0",
62
+ "dotenv-safe": "^8.2.0",
63
+ "husky": "^8.0.3",
64
+ "lint-staged": "^13.2.2",
65
+ "npm-run-all": "^4.1.5",
66
+ "ora": "^6.3.0",
67
+ "prettier": "^2.8.8",
68
+ "tsup": "^6.7.0",
69
+ "tsx": "^3.12.7",
70
+ "typedoc": "^0.24.6",
71
+ "typedoc-plugin-markdown": "^3.15.3",
72
+ "typescript": "^5.0.4"
73
+ },
74
+ "lint-staged": {
75
+ "*.{ts,tsx}": [
76
+ "prettier --write"
77
+ ]
78
+ },
79
+ "keywords": [
80
+ "leiai"
81
+ ]
82
+ }