@git-ai/cli 1.0.2 → 1.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,38 +1,34 @@
1
- import { config } from "../utils/Storage.mjs";
2
- import { OPENAI_MAX_TOKEN_DEFAULT } from "../const.mjs";
3
- import Logger from "../utils/Logger.mjs";
4
- import BaseAction from "./BaseAction.mjs";
5
-
6
- class SetMaxTokenAction extends BaseAction {
7
- constructor(maxToken) {
8
- super(maxToken);
9
- this.maxToken = maxToken;
10
- }
11
-
12
- async execute() {
13
- const newValue =
14
- typeof this.maxToken === "string" && this.maxToken.trim()
15
- ? this.maxToken.trim()
16
- : "";
17
- const numberReg = /^[1-9]\d*$/;
18
-
19
- if (!numberReg.test(newValue)) {
20
- Logger.error(`请输入正确的数字,当前值: "${this.maxToken}"`);
21
- throw new Error("无效的数字格式");
22
- }
23
-
24
- if (parseInt(newValue) > OPENAI_MAX_TOKEN_DEFAULT) {
25
- Logger.warn(`不建议 max-token 设置太大`);
26
- }
27
-
28
- config.set("maxToken", newValue);
29
- Logger.success(
30
- `最大 token 数已设置: ${newValue} (${Math.round(newValue / 1000)}k)`
31
- );
32
- }
33
- }
34
-
35
- export default async function (maxToken) {
36
- const action = new SetMaxTokenAction(maxToken);
37
- await action.run();
38
- }
1
+ import { config } from '../utils/Storage.mjs';
2
+ import { OPENAI_MAX_TOKEN_DEFAULT } from '../const.mjs';
3
+ import Logger from '../utils/Logger.mjs';
4
+ import BaseAction from './BaseAction.mjs';
5
+
6
+ class SetMaxTokenAction extends BaseAction {
7
+ constructor(maxToken) {
8
+ super(maxToken);
9
+ this.maxToken = maxToken;
10
+ }
11
+
12
+ async execute() {
13
+ const newValue =
14
+ typeof this.maxToken === 'string' && this.maxToken.trim() ? this.maxToken.trim() : '';
15
+ const numberReg = /^[1-9]\d*$/;
16
+
17
+ if (!numberReg.test(newValue)) {
18
+ Logger.error(`请输入正确的数字,当前值: "${this.maxToken}"`);
19
+ throw new Error('无效的数字格式');
20
+ }
21
+
22
+ if (parseInt(newValue) > OPENAI_MAX_TOKEN_DEFAULT) {
23
+ Logger.warn(`不建议 max-token 设置太大`);
24
+ }
25
+
26
+ config.set('maxToken', newValue);
27
+ Logger.success(`最大 token 数已设置: ${newValue} (${Math.round(newValue / 1000)}k)`);
28
+ }
29
+ }
30
+
31
+ export default async function (maxToken) {
32
+ const action = new SetMaxTokenAction(maxToken);
33
+ await action.run();
34
+ }
@@ -1,131 +1,122 @@
1
- import { chat, getModelList } from "../utils/OpenAI.mjs";
2
- import { config } from "../utils/Storage.mjs";
3
- import inquirer from "inquirer";
4
- import { BIN } from "../const.mjs";
5
- import Logger from "../utils/Logger.mjs";
6
- import Spinner from "../utils/Spinner.mjs";
7
- import BaseAction from "./BaseAction.mjs";
8
- class SetModelAction extends BaseAction {
9
- constructor(modelId) {
10
- super(modelId);
11
- this.modelId = typeof modelId === "string" ? modelId.trim() : "";
12
- this.defaultModel = config.get("model") || "";
13
- this.key = config.get("key");
14
- this.baseURL = config.get("baseURL");
15
- }
16
-
17
- async selectModels() {
18
- const list = [];
19
- Logger.info(`Base URL:${this.baseURL}`);
20
- const spinner = new Spinner("正在加载模型列表...");
21
- spinner.start();
22
-
23
- try {
24
- list.push(...(await getModelList()));
25
-
26
- if (!list.length) {
27
- throw "未找到可用的模型";
28
- }
29
- spinner.success(`可用模型:${list.length}个`);
30
- } catch (error) {
31
- spinner.error("加载模型列表失败");
32
- throw error && error.message ? error.message : error;
33
- }
34
- try {
35
- const answers = await inquirer.prompt([
36
- {
37
- type: "checkbox",
38
- name: "models",
39
- message: `请选择模型`,
40
- choices: list,
41
- loop: false,
42
- validate(input) {
43
- if (!input || input.length === 0) {
44
- return "请至少选择 1 个模型";
45
- }
46
- return true;
47
- },
48
- default: [],
49
- },
50
- ]);
51
- if (!answers.models.length) {
52
- throw "最少选择 1 个模型";
53
- }
54
- this.modelId = answers.models.join(",");
55
- config.set("model", this.modelId);
56
- } catch (error) {
57
- throw error && error.message ? error.message : error;
58
- }
59
- }
60
- // 验证模型是否可用
61
- async validateModel() {
62
- const spinner = new Spinner("正在验证模型是否可用...");
63
- spinner.start();
64
-
65
- const startTimestamp = Date.now();
66
- const configModel = config.get("model")
67
- ? config.get("model").split(",")
68
- : [];
69
- if (!this.baseURL || !configModel.length) return;
70
- const total = configModel.length;
71
- const errTotal = [];
72
- while (configModel.length) {
73
- const model = configModel.shift();
74
- try {
75
- const result = await chat({
76
- model: model,
77
- messages: [
78
- {
79
- role: "user",
80
- content: "1+1=?",
81
- },
82
- ],
83
- });
84
- formatCompletions(result);
85
- } catch {
86
- errTotal.push(model);
87
- }
88
- }
89
- spinner.stop();
90
- if (total - errTotal.length > 0) {
91
- Logger.success(`模型验证通过 ${total - errTotal.length} 个`);
92
- }
93
- if (errTotal.length) {
94
- Logger.error(
95
- `模型验证失败 ${errTotal.length} 个,分别是:\n${errTotal.join(
96
- "\n - "
97
- )}`
98
- );
99
- }
100
-
101
- const endTimestamp = Date.now();
102
- const duration = endTimestamp - startTimestamp;
103
-
104
- Logger.success(`本次检查模型用时: ${(duration / 1000).toFixed(3)} 秒`);
105
- }
106
- async execute() {
107
- config.set("model", this.modelId);
108
- if (!this.modelId && this.defaultModel) {
109
- Logger.warn("已清空设置的模型");
110
- }
111
- if (!this.modelId) {
112
- if (this.baseURL && !this.key) {
113
- Logger.warn(
114
- `检测到 api key 为空,可能获取列表失败。设置 \`${BIN} set-key <your key>\``
115
- );
116
- }
117
- await this.selectModels();
118
- }
119
- if (this.modelId) {
120
- Logger.success(
121
- `已设置模型: \n - ${this.modelId.split(`,`).join("\n - ")}`
122
- );
123
- }
124
- await this.validateModel();
125
- }
126
- }
127
-
128
- export default async function (event) {
129
- const action = new SetModelAction(event);
130
- await action.run();
131
- }
1
+ import { chat, getModelList } from '../utils/OpenAI.mjs';
2
+ import { config } from '../utils/Storage.mjs';
3
+ import inquirer from 'inquirer';
4
+ import { BIN } from '../const.mjs';
5
+ import Logger from '../utils/Logger.mjs';
6
+ import Spinner from '../utils/Spinner.mjs';
7
+ import BaseAction from './BaseAction.mjs';
8
+ import { formatMessage } from '../utils/MessageUtils.mjs';
9
+ class SetModelAction extends BaseAction {
10
+ constructor(modelId) {
11
+ super(modelId);
12
+ this.modelId = typeof modelId === 'string' ? modelId.trim() : '';
13
+ this.defaultModel = config.get('model') || '';
14
+ this.key = config.get('key');
15
+ this.baseURL = config.get('baseURL');
16
+ }
17
+
18
+ async selectModels() {
19
+ const list = [];
20
+ Logger.info(`Base URL:${this.baseURL}`);
21
+ const spinner = new Spinner('正在加载模型列表...');
22
+ spinner.start();
23
+
24
+ try {
25
+ list.push(...(await getModelList()));
26
+
27
+ if (!list.length) {
28
+ throw '未找到可用的模型';
29
+ }
30
+ spinner.success(`可用模型:${list.length}个`);
31
+ } catch (error) {
32
+ spinner.error('加载模型列表失败');
33
+ throw error && error.message ? error.message : error;
34
+ }
35
+ try {
36
+ const answers = await inquirer.prompt([
37
+ {
38
+ type: 'checkbox',
39
+ name: 'models',
40
+ message: `请选择模型`,
41
+ choices: list,
42
+ loop: false,
43
+ validate(input) {
44
+ if (!input || input.length === 0) {
45
+ return '请至少选择 1 个模型';
46
+ }
47
+ return true;
48
+ },
49
+ default: [],
50
+ },
51
+ ]);
52
+ if (!answers.models.length) {
53
+ throw '最少选择 1 个模型';
54
+ }
55
+ this.modelId = answers.models.join(',');
56
+ config.set('model', this.modelId);
57
+ } catch (error) {
58
+ throw error && error.message ? error.message : error;
59
+ }
60
+ }
61
+ // 验证模型是否可用
62
+ async validateModel() {
63
+ const spinner = new Spinner('正在验证模型是否可用...');
64
+ spinner.start();
65
+
66
+ const startTimestamp = Date.now();
67
+ const configModel = config.get('model') ? config.get('model').split(',') : [];
68
+ if (!this.baseURL || !configModel.length) return;
69
+ const total = configModel.length;
70
+ const errTotal = [];
71
+ while (configModel.length) {
72
+ const model = configModel.shift();
73
+ try {
74
+ const result = await chat({
75
+ model: model,
76
+ messages: [
77
+ {
78
+ role: 'user',
79
+ content: '1+1=?',
80
+ },
81
+ ],
82
+ });
83
+ formatMessage(result);
84
+ } catch {
85
+ errTotal.push(model);
86
+ }
87
+ }
88
+ spinner.stop();
89
+ if (total - errTotal.length > 0) {
90
+ Logger.success(`模型验证通过 ${total - errTotal.length} 个`);
91
+ }
92
+ if (errTotal.length) {
93
+ Logger.error(`模型验证失败 ${errTotal.length} 个,分别是:\n${errTotal.join('\n - ')}`);
94
+ }
95
+
96
+ const endTimestamp = Date.now();
97
+ const duration = endTimestamp - startTimestamp;
98
+
99
+ Logger.success(`本次检查模型用时: ${(duration / 1000).toFixed(3)} 秒`);
100
+ }
101
+ async execute() {
102
+ config.set('model', this.modelId);
103
+ if (!this.modelId && this.defaultModel) {
104
+ Logger.warn('已清空设置的模型');
105
+ }
106
+ if (!this.modelId) {
107
+ if (this.baseURL && !this.key) {
108
+ Logger.warn(`检测到 api key 为空,可能获取列表失败。设置 \`${BIN} set-key <your key>\``);
109
+ }
110
+ await this.selectModels();
111
+ }
112
+ if (this.modelId) {
113
+ Logger.success(`已设置模型: \n - ${this.modelId.split(`,`).join('\n - ')}`);
114
+ }
115
+ await this.validateModel();
116
+ }
117
+ }
118
+
119
+ export default async function (event) {
120
+ const action = new SetModelAction(event);
121
+ await action.run();
122
+ }
@@ -1,161 +1,151 @@
1
- import axios from "axios";
2
- import { promises as fs } from "fs";
3
- import { isAbsolute, resolve } from "path";
4
- import { chat } from "../utils/OpenAI.mjs";
5
- import { config } from "../utils/Storage.mjs";
6
- import inquirer from "inquirer";
7
- import { formatCompletions } from "../utils/MessageUtils.mjs";
8
- import { OPENAI_MODEL_LIST_URL } from "../const.mjs";
9
- import Logger from "../utils/Logger.mjs";
10
- import Spinner from "../utils/Spinner.mjs";
11
- import BaseAction from "./BaseAction.mjs";
12
-
13
- class SelectModelAction extends BaseAction {
14
- async execute() {
15
- await this.setModel();
16
- }
17
-
18
- async getModelList() {
19
- if (!OPENAI_MODEL_LIST_URL) {
20
- Logger.notice(`需要配置环境变量 \`OPENAI_MODEL_LIST_URL\` 。
21
- 此变量指定 OpenAI 模型列表的来源,支持 HTTP(S) URL 或本地文件路径。
22
-
23
- ** OPENAI_MODEL_LIST_URL 示例:**
24
- * URL: \`https://raw.githubusercontent.com/xx025/carrot/main/model_list.json\`
25
- * Linux/macOS 路径: \`/path/to/your/local/model_list.json\`
26
- * Windows 路径: \`C:\\Path\\To\\Your\\Local\\model_list.json\`
27
-
28
- ** JSON 配置示例:**
29
- 模型id、baseURL、key 配置多个会随机取,必填(id、baseURL)、非必填(keys)
30
- \`\`\`json
31
- {
32
- "data": [
33
- {
34
- "id": "modelId,modelId,modelId",
35
- "baseURL": "baseURL,baseURL,baseURL",
36
- "keys": "key,key,key"
37
- }
38
- ]
39
- }
40
- \`\`\`
41
- `);
42
- throw "未配置环境变量 `OPENAI_MODEL_LIST_URL`";
43
- }
44
- const source = OPENAI_MODEL_LIST_URL;
45
- const isHttpSource = /^https?:\/\//i.test(source);
46
- const spinner = new Spinner("正在加载模型列表...");
47
- spinner.start();
48
- try {
49
- if (isHttpSource) {
50
- const { data } = await axios.get(source);
51
- spinner.stop();
52
- return data;
53
- }
54
- const filePath = isAbsolute(source)
55
- ? source
56
- : resolve(process.cwd(), source);
57
- const content = await fs.readFile(filePath, "utf8");
58
- spinner.stop();
59
- return JSON.parse(content);
60
- } catch (error) {
61
- spinner.stop();
62
- throw error && error.message ? error.message : error;
63
- }
64
- }
65
-
66
- async selectModels() {
67
- const modelConfig = {
68
- list: [],
69
- origin: [],
70
- };
71
-
72
- try {
73
- modelConfig.origin = (await this.getModelList()).data || [];
74
- modelConfig.list = modelConfig.origin.map((item) => item.id);
75
- if (!modelConfig.list.length) {
76
- throw "未找到可用的模型";
77
- }
78
- } catch (error) {
79
- throw error && error.message ? error.message : error;
80
- }
81
- try {
82
- const answers = await inquirer.prompt([
83
- {
84
- type: "rawlist",
85
- name: "model",
86
- message: `请选择模型`,
87
- choices: modelConfig.list,
88
- default: "",
89
- loop: false,
90
- },
91
- ]);
92
- const item = modelConfig.origin.find((item) => item.id === answers.model);
93
- if (!item.baseURL || !item.id) {
94
- throw `模型信息不完整: 请检查 \`baseURL\` 和 \`id\``;
95
- }
96
- config.set("baseURL", item.baseURL);
97
- config.set("model", item.id);
98
- config.set("key", item.keys);
99
- Logger.success(
100
- `已设置模型: \n - ${item.id.split(`,`).join("\n - ")}`
101
- );
102
- } catch (error) {
103
- throw `设置模型失败: ${error && error.message ? error.message : error}`;
104
- }
105
- }
106
- // 验证模型是否可用
107
- async validateModel() {
108
- const spinner = new Spinner("正在验证模型是否可用...");
109
- spinner.start();
110
-
111
- const startTimestamp = Date.now();
112
- const configModel = config.get("model")
113
- ? config.get("model").split(",")
114
- : [];
115
- const total = configModel.length;
116
- const errTotal = [];
117
- while (configModel.length) {
118
- const model = configModel.shift();
119
- try {
120
- const result = await chat({
121
- model: model,
122
- messages: [
123
- {
124
- role: "user",
125
- content: "1+1=?",
126
- },
127
- ],
128
- });
129
- formatCompletions(result);
130
- } catch {
131
- errTotal.push(model);
132
- }
133
- }
134
- spinner.stop();
135
- if (total - errTotal.length > 0) {
136
- Logger.success(`模型验证通过 ${total - errTotal.length} 个`);
137
- }
138
- if (errTotal.length) {
139
- Logger.error(
140
- `模型验证失败 ${errTotal.length} 个,分别是:\n${errTotal.join(
141
- "\n - "
142
- )}`
143
- );
144
- }
145
-
146
- const endTimestamp = Date.now();
147
- const duration = endTimestamp - startTimestamp;
148
-
149
- Logger.success(`本次检查模型用时: ${(duration / 1000).toFixed(3)} 秒`);
150
- }
151
-
152
- async setModel() {
153
- await this.selectModels();
154
- await this.validateModel();
155
- }
156
- }
157
-
158
- export default async function () {
159
- const action = new SelectModelAction();
160
- await action.run();
161
- }
1
+ import axios from 'axios';
2
+ import { promises as fs } from 'fs';
3
+ import { isAbsolute, resolve } from 'path';
4
+ import { chat } from '../utils/OpenAI.mjs';
5
+ import { config } from '../utils/Storage.mjs';
6
+ import inquirer from 'inquirer';
7
+ import { formatCompletions } from '../utils/MessageUtils.mjs';
8
+ import { OPENAI_MODEL_LIST_URL } from '../const.mjs';
9
+ import Logger from '../utils/Logger.mjs';
10
+ import Spinner from '../utils/Spinner.mjs';
11
+ import BaseAction from './BaseAction.mjs';
12
+
13
+ class SelectModelAction extends BaseAction {
14
+ async execute() {
15
+ await this.setModel();
16
+ }
17
+
18
+ async getModelList() {
19
+ if (!OPENAI_MODEL_LIST_URL) {
20
+ Logger.notice(`需要配置环境变量 \`OPENAI_MODEL_LIST_URL\` 。
21
+ 此变量指定 OpenAI 模型列表的来源,支持 HTTP(S) URL 或本地文件路径。
22
+
23
+ ** OPENAI_MODEL_LIST_URL 示例:**
24
+ * URL: \`https://raw.githubusercontent.com/xx025/carrot/main/model_list.json\`
25
+ * Linux/macOS 路径: \`/path/to/your/local/model_list.json\`
26
+ * Windows 路径: \`C:\\Path\\To\\Your\\Local\\model_list.json\`
27
+
28
+ ** JSON 配置示例:**
29
+ 模型id、baseURL、key 配置多个会随机取,必填(id、baseURL)、非必填(keys)
30
+ \`\`\`json
31
+ {
32
+ "data": [
33
+ {
34
+ "id": "modelId,modelId,modelId",
35
+ "baseURL": "baseURL,baseURL,baseURL",
36
+ "keys": "key,key,key"
37
+ }
38
+ ]
39
+ }
40
+ \`\`\`
41
+ `);
42
+ throw '未配置环境变量 `OPENAI_MODEL_LIST_URL`';
43
+ }
44
+ const source = OPENAI_MODEL_LIST_URL;
45
+ const isHttpSource = /^https?:\/\//i.test(source);
46
+ const spinner = new Spinner('正在加载模型列表...');
47
+ spinner.start();
48
+ try {
49
+ if (isHttpSource) {
50
+ const { data } = await axios.get(source);
51
+ spinner.stop();
52
+ return data;
53
+ }
54
+ const filePath = isAbsolute(source) ? source : resolve(process.cwd(), source);
55
+ const content = await fs.readFile(filePath, 'utf8');
56
+ spinner.stop();
57
+ return JSON.parse(content);
58
+ } catch (error) {
59
+ spinner.stop();
60
+ throw error && error.message ? error.message : error;
61
+ }
62
+ }
63
+
64
+ async selectModels() {
65
+ const modelConfig = {
66
+ list: [],
67
+ origin: [],
68
+ };
69
+
70
+ try {
71
+ modelConfig.origin = (await this.getModelList()).data || [];
72
+ modelConfig.list = modelConfig.origin.map((item) => item.id);
73
+ if (!modelConfig.list.length) {
74
+ throw '未找到可用的模型';
75
+ }
76
+ } catch (error) {
77
+ throw error && error.message ? error.message : error;
78
+ }
79
+ try {
80
+ const answers = await inquirer.prompt([
81
+ {
82
+ type: 'rawlist',
83
+ name: 'model',
84
+ message: `请选择模型`,
85
+ choices: modelConfig.list,
86
+ default: '',
87
+ loop: false,
88
+ },
89
+ ]);
90
+ const item = modelConfig.origin.find((item) => item.id === answers.model);
91
+ if (!item.baseURL || !item.id) {
92
+ throw `模型信息不完整: 请检查 \`baseURL\` \`id\``;
93
+ }
94
+ config.set('baseURL', item.baseURL);
95
+ config.set('model', item.id);
96
+ config.set('key', item.keys);
97
+ Logger.success(`已设置模型: \n - ${item.id.split(`,`).join('\n - ')}`);
98
+ } catch (error) {
99
+ throw `设置模型失败: ${error && error.message ? error.message : error}`;
100
+ }
101
+ }
102
+ // 验证模型是否可用
103
+ async validateModel() {
104
+ const spinner = new Spinner('正在验证模型是否可用...');
105
+ spinner.start();
106
+
107
+ const startTimestamp = Date.now();
108
+ const configModel = config.get('model') ? config.get('model').split(',') : [];
109
+ const total = configModel.length;
110
+ const errTotal = [];
111
+ while (configModel.length) {
112
+ const model = configModel.shift();
113
+ try {
114
+ const result = await chat({
115
+ model: model,
116
+ messages: [
117
+ {
118
+ role: 'user',
119
+ content: '1+1=?',
120
+ },
121
+ ],
122
+ });
123
+ formatCompletions(result);
124
+ } catch {
125
+ errTotal.push(model);
126
+ }
127
+ }
128
+ spinner.stop();
129
+ if (total - errTotal.length > 0) {
130
+ Logger.success(`模型验证通过 ${total - errTotal.length} 个`);
131
+ }
132
+ if (errTotal.length) {
133
+ Logger.error(`模型验证失败 ${errTotal.length} 个,分别是:\n${errTotal.join('\n - ')}`);
134
+ }
135
+
136
+ const endTimestamp = Date.now();
137
+ const duration = endTimestamp - startTimestamp;
138
+
139
+ Logger.success(`本次检查模型用时: ${(duration / 1000).toFixed(3)} 秒`);
140
+ }
141
+
142
+ async setModel() {
143
+ await this.selectModels();
144
+ await this.validateModel();
145
+ }
146
+ }
147
+
148
+ export default async function () {
149
+ const action = new SelectModelAction();
150
+ await action.run();
151
+ }