1314mc-helper 0.2.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +160 -0
- package/bin/cli.js +328 -0
- package/package.json +56 -0
- package/src/index.js +359 -0
- package/src/tools/claude.js +200 -0
- package/src/tools/cline.js +103 -0
- package/src/tools/codex.js +154 -0
- package/src/tools/continue.js +149 -0
- package/src/tools/cursor.js +99 -0
- package/src/tools/droid.js +253 -0
- package/src/tools/openclaw.js +295 -0
- package/src/tools/opencode.js +227 -0
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
import fs from 'fs-extra';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import os from 'os';
|
|
4
|
+
import TOML from '@iarna/toml';
|
|
5
|
+
import { getApiBaseForProtocol } from '../index.js';
|
|
6
|
+
|
|
7
|
+
const DUOJIE_PROVIDER_ID = 'duojie';
|
|
8
|
+
const DEFAULT_MODEL = 'gpt-5.4';
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* 获取 Codex CLI 配置路径
|
|
12
|
+
*/
|
|
13
|
+
function getCodexConfigPaths() {
|
|
14
|
+
const home = os.homedir();
|
|
15
|
+
return {
|
|
16
|
+
configDir: path.join(home, '.codex'),
|
|
17
|
+
configFile: path.join(home, '.codex', 'config.toml'),
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function getWireApi(model) {
|
|
22
|
+
return model.startsWith('gpt') ? 'responses' : 'chat';
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async function readConfigToml(configFile) {
|
|
26
|
+
if (!(await fs.pathExists(configFile))) {
|
|
27
|
+
return {};
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const content = await fs.readFile(configFile, 'utf-8');
|
|
31
|
+
if (!content.trim()) {
|
|
32
|
+
return {};
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
return TOML.parse(content);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* 配置 Codex CLI
|
|
40
|
+
* 使用官方 config.toml 结构
|
|
41
|
+
*/
|
|
42
|
+
export async function configureCodex(apiKey) {
|
|
43
|
+
const paths = getCodexConfigPaths();
|
|
44
|
+
|
|
45
|
+
try {
|
|
46
|
+
await fs.ensureDir(paths.configDir);
|
|
47
|
+
|
|
48
|
+
let existingConfig = {};
|
|
49
|
+
try {
|
|
50
|
+
existingConfig = await readConfigToml(paths.configFile);
|
|
51
|
+
} catch {
|
|
52
|
+
existingConfig = {};
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const wireApi = getWireApi(DEFAULT_MODEL);
|
|
56
|
+
|
|
57
|
+
const modelProviders = {
|
|
58
|
+
...(existingConfig.model_providers || {}),
|
|
59
|
+
[DUOJIE_PROVIDER_ID]: {
|
|
60
|
+
...(existingConfig.model_providers?.[DUOJIE_PROVIDER_ID] || {}),
|
|
61
|
+
name: '1314mc',
|
|
62
|
+
base_url: getApiBaseForProtocol(wireApi),
|
|
63
|
+
experimental_bearer_token: apiKey,
|
|
64
|
+
wire_api: wireApi,
|
|
65
|
+
},
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
const newConfig = {
|
|
69
|
+
...existingConfig,
|
|
70
|
+
model: DEFAULT_MODEL,
|
|
71
|
+
model_provider: DUOJIE_PROVIDER_ID,
|
|
72
|
+
model_providers: modelProviders,
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
const tomlContent = `# Codex CLI Configuration\n# Generated by duojie-helper\n# Docs: https://developers.openai.com/codex/config-reference/\n\n${TOML.stringify(newConfig)}`;
|
|
76
|
+
|
|
77
|
+
await fs.writeFile(paths.configFile, tomlContent, 'utf-8');
|
|
78
|
+
|
|
79
|
+
return {
|
|
80
|
+
success: true,
|
|
81
|
+
message: `已配置 → ${paths.configFile}`,
|
|
82
|
+
configPath: paths.configFile,
|
|
83
|
+
hint: '已写入 config.toml(纯配置,无需额外环境变量)',
|
|
84
|
+
};
|
|
85
|
+
} catch (error) {
|
|
86
|
+
return {
|
|
87
|
+
success: false,
|
|
88
|
+
message: `配置失败: ${error.message}`,
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* 检查 Codex CLI 配置状态
|
|
95
|
+
*/
|
|
96
|
+
configureCodex.checkStatus = async function() {
|
|
97
|
+
const paths = getCodexConfigPaths();
|
|
98
|
+
|
|
99
|
+
if (await fs.pathExists(paths.configFile)) {
|
|
100
|
+
try {
|
|
101
|
+
const config = await readConfigToml(paths.configFile);
|
|
102
|
+
const provider = config.model_providers?.[DUOJIE_PROVIDER_ID];
|
|
103
|
+
|
|
104
|
+
if (provider?.base_url?.includes('duojie') || provider?.base_url?.includes('1314mc')) {
|
|
105
|
+
return { configured: true, message: '已配置 1314mc API' };
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
if (config.model_provider || Object.keys(config.model_providers || {}).length > 0) {
|
|
109
|
+
return { configured: true, message: '已配置(非 1314mc)' };
|
|
110
|
+
}
|
|
111
|
+
} catch {
|
|
112
|
+
return { configured: true, message: '配置文件存在但格式无效' };
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
return { configured: false };
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* 撤销 Codex CLI 配置
|
|
121
|
+
*/
|
|
122
|
+
configureCodex.revoke = async function() {
|
|
123
|
+
const paths = getCodexConfigPaths();
|
|
124
|
+
|
|
125
|
+
if (!(await fs.pathExists(paths.configFile))) {
|
|
126
|
+
return;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
try {
|
|
130
|
+
const config = await readConfigToml(paths.configFile);
|
|
131
|
+
|
|
132
|
+
if (config.model_providers?.[DUOJIE_PROVIDER_ID]) {
|
|
133
|
+
delete config.model_providers[DUOJIE_PROVIDER_ID];
|
|
134
|
+
if (Object.keys(config.model_providers).length === 0) {
|
|
135
|
+
delete config.model_providers;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
if (config.model_provider === DUOJIE_PROVIDER_ID) {
|
|
140
|
+
delete config.model_provider;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
if (config.model === DEFAULT_MODEL) {
|
|
144
|
+
delete config.model;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
const tomlContent = TOML.stringify(config);
|
|
148
|
+
await fs.writeFile(paths.configFile, tomlContent ? `${tomlContent}` : '', 'utf-8');
|
|
149
|
+
} catch {
|
|
150
|
+
// ignore
|
|
151
|
+
}
|
|
152
|
+
};
|
|
153
|
+
|
|
154
|
+
export default configureCodex;
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
import fs from 'fs-extra';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import os from 'os';
|
|
4
|
+
import { getModels, getApiBaseForProtocol } from '../index.js';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* 获取 Continue 配置路径
|
|
8
|
+
*/
|
|
9
|
+
function getContinueConfigPaths() {
|
|
10
|
+
const home = os.homedir();
|
|
11
|
+
return {
|
|
12
|
+
configDir: path.join(home, '.continue'),
|
|
13
|
+
configFile: path.join(home, '.continue', 'config.json'),
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* 配置 Continue
|
|
19
|
+
* 自动配置所有可用模型
|
|
20
|
+
*/
|
|
21
|
+
export async function configureContinue(apiKey) {
|
|
22
|
+
const paths = getContinueConfigPaths();
|
|
23
|
+
|
|
24
|
+
try {
|
|
25
|
+
await fs.ensureDir(paths.configDir);
|
|
26
|
+
|
|
27
|
+
let config = {};
|
|
28
|
+
if (await fs.pathExists(paths.configFile)) {
|
|
29
|
+
try {
|
|
30
|
+
config = await fs.readJson(paths.configFile);
|
|
31
|
+
} catch {
|
|
32
|
+
config = {};
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// 获取可用模型列表
|
|
37
|
+
const models = getModels();
|
|
38
|
+
const duojieModels = [];
|
|
39
|
+
|
|
40
|
+
const openaiProtocols = new Set([
|
|
41
|
+
'openai', 'openai-response', 'openai-responses',
|
|
42
|
+
'openai-completion', 'openai-completions', 'responses', 'chat',
|
|
43
|
+
]);
|
|
44
|
+
|
|
45
|
+
function providerForApi(api) {
|
|
46
|
+
return openaiProtocols.has(`${api || ''}`.toLowerCase()) ? 'openai' : 'anthropic';
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
function protocolLabel(api) {
|
|
50
|
+
return providerForApi(api) === 'openai' ? 'OpenAI' : 'Anthropic';
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// 处理所有模型(遍历 apis 数组,双协议生成两条记录)
|
|
54
|
+
const allCategories = [
|
|
55
|
+
...(models.claude || []),
|
|
56
|
+
...(models.gpt || []),
|
|
57
|
+
...(models.gemini || []),
|
|
58
|
+
...(models.other || []),
|
|
59
|
+
];
|
|
60
|
+
|
|
61
|
+
for (const m of allCategories) {
|
|
62
|
+
const apis = m.apis || ['anthropic-messages'];
|
|
63
|
+
for (const api of apis) {
|
|
64
|
+
const provider = providerForApi(api);
|
|
65
|
+
duojieModels.push({
|
|
66
|
+
title: `1314mc ${m.name} (${protocolLabel(api)})`,
|
|
67
|
+
provider: provider,
|
|
68
|
+
model: m.id,
|
|
69
|
+
apiKey: apiKey,
|
|
70
|
+
apiBase: getApiBaseForProtocol(api),
|
|
71
|
+
completionOptions: {
|
|
72
|
+
maxTokens: 16384,
|
|
73
|
+
},
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// 把默认模型尽量设为 gpt-5.4(如果存在),否则保持当前顺序
|
|
79
|
+
const gpt54 = duojieModels.find(m => m.model === 'gpt-5.4');
|
|
80
|
+
if (gpt54) {
|
|
81
|
+
const others = duojieModels.filter(m => m !== gpt54);
|
|
82
|
+
duojieModels.length = 0;
|
|
83
|
+
duojieModels.push(gpt54, ...others);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// 合并模型配置 - 移除旧的 Duojie 配置
|
|
87
|
+
const existingModels = config.models || [];
|
|
88
|
+
const filteredModels = existingModels.filter(m =>
|
|
89
|
+
!m.title?.startsWith('1314mc')
|
|
90
|
+
);
|
|
91
|
+
|
|
92
|
+
const newConfig = {
|
|
93
|
+
...config,
|
|
94
|
+
models: [...duojieModels, ...filteredModels],
|
|
95
|
+
_mc1314: {
|
|
96
|
+
configured: true,
|
|
97
|
+
configuredAt: new Date().toISOString(),
|
|
98
|
+
},
|
|
99
|
+
};
|
|
100
|
+
|
|
101
|
+
await fs.writeJson(paths.configFile, newConfig, { spaces: 2 });
|
|
102
|
+
|
|
103
|
+
return {
|
|
104
|
+
success: true,
|
|
105
|
+
message: `已配置 ${duojieModels.length} 个模型 → ${paths.configFile}`,
|
|
106
|
+
configPath: paths.configFile,
|
|
107
|
+
hint: '重启 VS Code/JetBrains 使配置生效',
|
|
108
|
+
};
|
|
109
|
+
} catch (error) {
|
|
110
|
+
return {
|
|
111
|
+
success: false,
|
|
112
|
+
message: `配置失败: ${error.message}`,
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
configureContinue.checkStatus = async function() {
|
|
118
|
+
const paths = getContinueConfigPaths();
|
|
119
|
+
|
|
120
|
+
if (await fs.pathExists(paths.configFile)) {
|
|
121
|
+
try {
|
|
122
|
+
const config = await fs.readJson(paths.configFile);
|
|
123
|
+
if (config._mc1314?.configured) {
|
|
124
|
+
return { configured: true, message: '已配置 1314mc API' };
|
|
125
|
+
} else if (config.models?.length > 0) {
|
|
126
|
+
return { configured: true, message: '已配置(非 1314mc)' };
|
|
127
|
+
}
|
|
128
|
+
} catch {}
|
|
129
|
+
}
|
|
130
|
+
return { configured: false };
|
|
131
|
+
};
|
|
132
|
+
|
|
133
|
+
configureContinue.revoke = async function() {
|
|
134
|
+
const paths = getContinueConfigPaths();
|
|
135
|
+
|
|
136
|
+
if (await fs.pathExists(paths.configFile)) {
|
|
137
|
+
try {
|
|
138
|
+
const config = await fs.readJson(paths.configFile);
|
|
139
|
+
// 移除 Duojie 模型
|
|
140
|
+
config.models = (config.models || []).filter(m =>
|
|
141
|
+
!m.title?.startsWith('1314mc')
|
|
142
|
+
);
|
|
143
|
+
delete config._mc1314;
|
|
144
|
+
await fs.writeJson(paths.configFile, config, { spaces: 2 });
|
|
145
|
+
} catch {}
|
|
146
|
+
}
|
|
147
|
+
};
|
|
148
|
+
|
|
149
|
+
export default configureContinue;
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import fs from 'fs-extra';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import os from 'os';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
import { API_CONFIG } from '../index.js';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* 获取 Cursor 配置路径
|
|
9
|
+
*/
|
|
10
|
+
function getCursorConfigPaths() {
|
|
11
|
+
const home = os.homedir();
|
|
12
|
+
const platform = os.platform();
|
|
13
|
+
|
|
14
|
+
let configDir;
|
|
15
|
+
if (platform === 'win32') {
|
|
16
|
+
configDir = path.join(process.env.APPDATA || home, 'Cursor', 'User');
|
|
17
|
+
} else if (platform === 'darwin') {
|
|
18
|
+
configDir = path.join(home, 'Library', 'Application Support', 'Cursor', 'User');
|
|
19
|
+
} else {
|
|
20
|
+
configDir = path.join(home, '.config', 'Cursor', 'User');
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
return {
|
|
24
|
+
configDir,
|
|
25
|
+
settingsFile: path.join(configDir, 'settings.json'),
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* 配置 Cursor
|
|
31
|
+
* Cursor 需要通过 UI 手动配置自定义模型(需要高级会员)
|
|
32
|
+
*/
|
|
33
|
+
export async function configureCursor(apiKey) {
|
|
34
|
+
const baseUrl = API_CONFIG.baseUrl;
|
|
35
|
+
|
|
36
|
+
// 生成配置说明
|
|
37
|
+
const instructions = `
|
|
38
|
+
${chalk.yellow('Cursor 需要在编辑器中手动配置:')}
|
|
39
|
+
|
|
40
|
+
${chalk.red('⚠️ 注意: 需要 Cursor Pro 或更高级订阅才能使用自定义模型')}
|
|
41
|
+
|
|
42
|
+
${chalk.cyan('步骤 1:')} 打开 Cursor,进入 Settings → Models
|
|
43
|
+
${chalk.cyan('步骤 2:')} 点击 "Add Custom Model"
|
|
44
|
+
${chalk.cyan('步骤 3:')} 填入以下配置:
|
|
45
|
+
|
|
46
|
+
${chalk.green('Protocol:')} OpenAI
|
|
47
|
+
${chalk.green('OpenAI API Key:')} ${apiKey}
|
|
48
|
+
${chalk.green('Override OpenAI Base URL:')} ${baseUrl}
|
|
49
|
+
${chalk.green('Model Name:')} 输入模型名称(注意大小写)
|
|
50
|
+
例如: gpt-5.4
|
|
51
|
+
或: claude-sonnet-4-6
|
|
52
|
+
|
|
53
|
+
${chalk.cyan('步骤 4:')} 点击保存,然后在模型列表中选择刚添加的模型
|
|
54
|
+
|
|
55
|
+
${chalk.cyan('可选配置:')}
|
|
56
|
+
${chalk.green('Max Tokens:')} 16384(推荐)
|
|
57
|
+
|
|
58
|
+
${chalk.gray('提示: 如果遇到 "The model does not work with your current plan" 错误,')}
|
|
59
|
+
${chalk.gray(' 说明您的 Cursor 订阅不支持自定义模型')}
|
|
60
|
+
`;
|
|
61
|
+
|
|
62
|
+
return {
|
|
63
|
+
success: true,
|
|
64
|
+
manual: true,
|
|
65
|
+
message: '需要手动配置(需 Cursor Pro)',
|
|
66
|
+
instructions: instructions,
|
|
67
|
+
configInfo: {
|
|
68
|
+
protocol: 'OpenAI',
|
|
69
|
+
baseUrl: baseUrl,
|
|
70
|
+
apiKey: apiKey,
|
|
71
|
+
}
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* 检查 Cursor 配置状态
|
|
77
|
+
*/
|
|
78
|
+
configureCursor.checkStatus = async function() {
|
|
79
|
+
const paths = getCursorConfigPaths();
|
|
80
|
+
|
|
81
|
+
if (await fs.pathExists(paths.configDir)) {
|
|
82
|
+
return {
|
|
83
|
+
configured: true,
|
|
84
|
+
message: 'Cursor 已安装(配置状态需在 Cursor 中查看)',
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
return { configured: false };
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* 撤销 Cursor 配置
|
|
93
|
+
*/
|
|
94
|
+
configureCursor.revoke = async function() {
|
|
95
|
+
// Cursor 自定义模型配置在内部数据库,无法通过文件操作撤销
|
|
96
|
+
console.log(chalk.yellow('Cursor 配置需要在 Cursor Settings → Models 中手动删除'));
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
export default configureCursor;
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
import fs from 'fs-extra';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import os from 'os';
|
|
4
|
+
import { API_CONFIG, getModels, getApiBaseForProtocol } from '../index.js';
|
|
5
|
+
|
|
6
|
+
function normalizeBaseUrl(url) {
|
|
7
|
+
return `${url || ''}`.replace(/\/+$/, '');
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* 获取 Droid (Factory) 配置路径
|
|
12
|
+
*/
|
|
13
|
+
function getDroidConfigPaths() {
|
|
14
|
+
const home = os.homedir();
|
|
15
|
+
return {
|
|
16
|
+
configDir: path.join(home, '.factory'),
|
|
17
|
+
configFile: path.join(home, '.factory', 'config.json'),
|
|
18
|
+
settingsFile: path.join(home, '.factory', 'settings.json'),
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const DUOJIE_MODEL_TAG = '[1314mc]';
|
|
23
|
+
|
|
24
|
+
function isDuojieModel(m) {
|
|
25
|
+
return (
|
|
26
|
+
(m.base_url && (m.base_url.includes('duojie') || m.base_url.includes('1314mc'))) ||
|
|
27
|
+
(m.model_display_name && m.model_display_name.includes(DUOJIE_MODEL_TAG))
|
|
28
|
+
);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* 生成带协议后缀的显示名称
|
|
33
|
+
* 双协议模型会生成两条记录,如 "GLM 5 Turbo [1314mc OpenAI]" 和 "GLM 5 Turbo [1314mc Anthropic]"
|
|
34
|
+
*/
|
|
35
|
+
function generateDisplayName(modelId, api) {
|
|
36
|
+
let name = modelId;
|
|
37
|
+
if (name.startsWith('claude-')) {
|
|
38
|
+
name = name.substring(7);
|
|
39
|
+
} else if (name.startsWith('gpt-')) {
|
|
40
|
+
name = 'GPT ' + name.substring(4);
|
|
41
|
+
} else if (name.startsWith('gemini-')) {
|
|
42
|
+
name = 'Gemini ' + name.substring(7);
|
|
43
|
+
} else if (name.startsWith('glm-')) {
|
|
44
|
+
name = 'GLM-' + name.substring(4);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
name = name
|
|
48
|
+
.split('-')
|
|
49
|
+
.map(part => {
|
|
50
|
+
if (/^\d+$/.test(part)) return part;
|
|
51
|
+
return part.charAt(0).toUpperCase() + part.slice(1);
|
|
52
|
+
})
|
|
53
|
+
.join(' ');
|
|
54
|
+
|
|
55
|
+
name = name.replace(/(\d+)\s+(\d+)(?=\s|$)/g, '$1.$2');
|
|
56
|
+
|
|
57
|
+
const provider = providerForApi(api);
|
|
58
|
+
const suffix = provider === 'openai' ? 'OpenAI' : 'Anthropic';
|
|
59
|
+
return `${name} [1314mc ${suffix}]`;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* 根据模型 ID 确定 provider
|
|
64
|
+
* @param {string} modelId - 模型 ID
|
|
65
|
+
* @param {string[]} endpoints - 支持的 endpoint 类型
|
|
66
|
+
* @returns {string} provider 名称
|
|
67
|
+
*/
|
|
68
|
+
/**
|
|
69
|
+
* 根据协议字符串判断 provider 类型
|
|
70
|
+
*/
|
|
71
|
+
function providerForApi(api) {
|
|
72
|
+
const openaiProtocols = new Set([
|
|
73
|
+
'openai', 'openai-response', 'openai-responses',
|
|
74
|
+
'openai-completion', 'openai-completions', 'responses', 'chat',
|
|
75
|
+
]);
|
|
76
|
+
return openaiProtocols.has(`${api || ''}`.toLowerCase()) ? 'openai' : 'anthropic';
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* 配置 Droid (Factory)
|
|
81
|
+
* 自动配置所有可用模型到 custom_models
|
|
82
|
+
*/
|
|
83
|
+
export async function configureDroid(apiKey) {
|
|
84
|
+
const paths = getDroidConfigPaths();
|
|
85
|
+
|
|
86
|
+
try {
|
|
87
|
+
// 确保目录存在
|
|
88
|
+
await fs.ensureDir(paths.configDir);
|
|
89
|
+
|
|
90
|
+
// 1. 读取现有配置(如果存在)
|
|
91
|
+
let existingConfig = {};
|
|
92
|
+
if (await fs.pathExists(paths.configFile)) {
|
|
93
|
+
try {
|
|
94
|
+
existingConfig = await fs.readJson(paths.configFile);
|
|
95
|
+
} catch {
|
|
96
|
+
existingConfig = {};
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// 2. 获取模型列表
|
|
101
|
+
const models = getModels();
|
|
102
|
+
const rawModels = API_CONFIG.rawModels || [];
|
|
103
|
+
|
|
104
|
+
// 创建模型 ID 到 endpoints 的映射
|
|
105
|
+
const endpointsMap = {};
|
|
106
|
+
for (const m of rawModels) {
|
|
107
|
+
endpointsMap[m.id] = m.supported_endpoint_types || [];
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// 3. 构建 Duojie 的 custom_models
|
|
111
|
+
const duojieModels = [];
|
|
112
|
+
|
|
113
|
+
// 处理所有模型类别
|
|
114
|
+
const allModels = [
|
|
115
|
+
...(models.claude || []),
|
|
116
|
+
...(models.gpt || []),
|
|
117
|
+
...(models.gemini || []),
|
|
118
|
+
...(models.other || []),
|
|
119
|
+
];
|
|
120
|
+
|
|
121
|
+
// 默认模型优先放在列表前(便于用户在 UI 中选择)
|
|
122
|
+
const preferredId = 'gpt-5.4';
|
|
123
|
+
const preferred = allModels.find(m => m.id === preferredId) || null;
|
|
124
|
+
const orderedModels = preferred
|
|
125
|
+
? [preferred, ...allModels.filter(m => m.id !== preferredId)]
|
|
126
|
+
: allModels;
|
|
127
|
+
|
|
128
|
+
for (const m of orderedModels) {
|
|
129
|
+
const apis = m.apis || ['anthropic-messages'];
|
|
130
|
+
for (const api of apis) {
|
|
131
|
+
const provider = providerForApi(api);
|
|
132
|
+
duojieModels.push({
|
|
133
|
+
model_display_name: generateDisplayName(m.id, api),
|
|
134
|
+
model: m.id,
|
|
135
|
+
base_url: normalizeBaseUrl(getApiBaseForProtocol(api)),
|
|
136
|
+
api_key: apiKey,
|
|
137
|
+
provider: provider,
|
|
138
|
+
supports_vision: true,
|
|
139
|
+
max_tokens: 16384,
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
if (duojieModels.length === 0) {
|
|
145
|
+
return {
|
|
146
|
+
success: false,
|
|
147
|
+
message: '没有可用的模型',
|
|
148
|
+
};
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// 4. 合并配置(保留用户其他自定义模型)
|
|
152
|
+
let existingCustomModels = existingConfig.custom_models || [];
|
|
153
|
+
|
|
154
|
+
// 过滤掉已有的 Duojie 模型(通过 base_url 或 display_name 判断)
|
|
155
|
+
existingCustomModels = existingCustomModels.filter(m => {
|
|
156
|
+
return !isDuojieModel(m);
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
// 5. 合并新旧模型
|
|
160
|
+
const newConfig = {
|
|
161
|
+
...existingConfig,
|
|
162
|
+
custom_models: [
|
|
163
|
+
...existingCustomModels,
|
|
164
|
+
...duojieModels,
|
|
165
|
+
],
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
// 6. 写入配置文件
|
|
169
|
+
await fs.writeJson(paths.configFile, newConfig, { spaces: 2 });
|
|
170
|
+
|
|
171
|
+
// 7. 清空 settings.json 中的 customModels(如文件和字段存在)
|
|
172
|
+
if (await fs.pathExists(paths.settingsFile)) {
|
|
173
|
+
try {
|
|
174
|
+
const settings = await fs.readJson(paths.settingsFile);
|
|
175
|
+
if (Array.isArray(settings.customModels) && settings.customModels.length > 0) {
|
|
176
|
+
settings.customModels = [];
|
|
177
|
+
await fs.writeJson(paths.settingsFile, settings, { spaces: 2 });
|
|
178
|
+
}
|
|
179
|
+
} catch {
|
|
180
|
+
// 忽略,不影响主流程
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
return {
|
|
185
|
+
success: true,
|
|
186
|
+
message: `已配置 ${duojieModels.length} 个模型 → ${paths.configFile}`,
|
|
187
|
+
configPath: paths.configFile,
|
|
188
|
+
hint: '重启 Factory 使配置生效',
|
|
189
|
+
};
|
|
190
|
+
} catch (error) {
|
|
191
|
+
return {
|
|
192
|
+
success: false,
|
|
193
|
+
message: `配置失败: ${error.message}`,
|
|
194
|
+
};
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
/**
|
|
199
|
+
* 检查 Droid 配置状态
|
|
200
|
+
*/
|
|
201
|
+
configureDroid.checkStatus = async function() {
|
|
202
|
+
const paths = getDroidConfigPaths();
|
|
203
|
+
|
|
204
|
+
if (await fs.pathExists(paths.configFile)) {
|
|
205
|
+
try {
|
|
206
|
+
const config = await fs.readJson(paths.configFile);
|
|
207
|
+
|
|
208
|
+
// 检查是否有 Duojie 的自定义模型
|
|
209
|
+
const customModels = config.custom_models || [];
|
|
210
|
+
const duojieModels = customModels.filter(m => isDuojieModel(m));
|
|
211
|
+
|
|
212
|
+
if (duojieModels.length > 0) {
|
|
213
|
+
return {
|
|
214
|
+
configured: true,
|
|
215
|
+
message: `已配置 ${duojieModels.length} 个 1314mc 模型`,
|
|
216
|
+
};
|
|
217
|
+
} else if (customModels.length > 0) {
|
|
218
|
+
return {
|
|
219
|
+
configured: true,
|
|
220
|
+
message: '已配置(非 1314mc)',
|
|
221
|
+
};
|
|
222
|
+
}
|
|
223
|
+
} catch {
|
|
224
|
+
// ignore
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
return { configured: false };
|
|
229
|
+
};
|
|
230
|
+
|
|
231
|
+
/**
|
|
232
|
+
* 撤销 Droid 配置
|
|
233
|
+
*/
|
|
234
|
+
configureDroid.revoke = async function() {
|
|
235
|
+
const paths = getDroidConfigPaths();
|
|
236
|
+
|
|
237
|
+
if (await fs.pathExists(paths.configFile)) {
|
|
238
|
+
try {
|
|
239
|
+
const config = await fs.readJson(paths.configFile);
|
|
240
|
+
|
|
241
|
+
// 只删除 Duojie 的自定义模型
|
|
242
|
+
if (config.custom_models) {
|
|
243
|
+
config.custom_models = config.custom_models.filter(m => !isDuojieModel(m));
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
await fs.writeJson(paths.configFile, config, { spaces: 2 });
|
|
247
|
+
} catch {
|
|
248
|
+
// ignore
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
};
|
|
252
|
+
|
|
253
|
+
export default configureDroid;
|