@simonyea/holysheep-cli 1.0.6 → 1.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@simonyea/holysheep-cli",
3
- "version": "1.0.6",
3
+ "version": "1.0.9",
4
4
  "description": "一键配置所有 AI 编程工具接入 HolySheep API — Claude Code / Codex / Gemini CLI / OpenCode / OpenClaw / Aider / Cursor",
5
5
  "keywords": [
6
6
  "claude",
@@ -174,6 +174,11 @@ async function setup(options) {
174
174
  spinner.info(`${chalk.yellow(tool.name)} 需要手动配置:`)
175
175
  result.steps.forEach((s, i) => console.log(` ${chalk.gray(i + 1 + '.')} ${s}`))
176
176
  results.push({ tool, status: 'manual' })
177
+ } else if (result.warning) {
178
+ if (result.envVars) Object.assign(envVarsToWrite, result.envVars)
179
+ spinner.warn(`${chalk.yellow(tool.name)} ${chalk.gray(result.file ? `→ ${result.file}` : '')}`)
180
+ console.log(chalk.yellow(` ⚠️ ${result.warning}`))
181
+ results.push({ tool, status: 'warning', result })
177
182
  } else {
178
183
  if (result.envVars) Object.assign(envVarsToWrite, result.envVars)
179
184
  spinner.succeed(`${chalk.green(tool.name)} ${chalk.gray(result.file ? `→ ${result.file}` : '')}`)
@@ -1,43 +1,105 @@
1
1
  /**
2
- * Codex CLI 适配器 (@openai/codex v0.46+)
2
+ * Codex CLI 适配器 (@openai/codex v0.111+, Rust 版)
3
3
  *
4
- * 配置文件: ~/.codex/config.json(JSON 格式,不是 yaml
4
+ * ⚠️ 重要:v0.111.0 Codex 已切换到 Rust 实现(codex-rs
5
+ * 配置文件变更:~/.codex/config.toml(TOML 格式,不是 JSON!)
5
6
  *
6
- * 正确格式:
7
- * {
8
- * "model": "claude-sonnet-4-5",
9
- * "provider": "holysheep", // 指定默认 provider
10
- * "providers": {
11
- * "holysheep": {
12
- * "name": "HolySheep",
13
- * "baseURL": "https://api.holysheep.ai/v1",
14
- * "envKey": "OPENAI_API_KEY"
15
- * }
16
- * }
17
- * }
7
+ * 正确格式(config.toml):
18
8
  *
19
- * 环境变量: OPENAI_API_KEY(通过 envKey 指定)
20
- * 注意: Codex 会优先使用账号登录,需要设置 provider 才能绕过
9
+ * model = "gpt-5.4"
10
+ * model_provider = "holysheep"
11
+ *
12
+ * [model_providers.holysheep]
13
+ * name = "HolySheep"
14
+ * base_url = "https://api.holysheep.ai/v1"
15
+ * env_key = "OPENAI_API_KEY"
16
+ *
17
+ * 注意:旧的 config.json 会被 Rust Codex 忽略!
21
18
  */
22
19
  const fs = require('fs')
23
20
  const path = require('path')
24
21
  const os = require('os')
25
22
 
26
23
  const CONFIG_DIR = path.join(os.homedir(), '.codex')
27
- const CONFIG_FILE = path.join(CONFIG_DIR, 'config.json')
24
+ const CONFIG_FILE = path.join(CONFIG_DIR, 'config.toml')
25
+ // 保留 JSON 兼容性(老版本 TypeScript Codex 用)
26
+ const CONFIG_FILE_JSON = path.join(CONFIG_DIR, 'config.json')
28
27
 
29
- function readConfig() {
28
+ /**
29
+ * 读取 TOML config(简单解析,不依赖 toml 库)
30
+ */
31
+ function readTomlConfig() {
30
32
  try {
31
33
  if (fs.existsSync(CONFIG_FILE)) {
32
- return JSON.parse(fs.readFileSync(CONFIG_FILE, 'utf8'))
34
+ return fs.readFileSync(CONFIG_FILE, 'utf8')
33
35
  }
34
36
  } catch {}
35
- return {}
37
+ return ''
38
+ }
39
+
40
+ /**
41
+ * 检查 TOML 里是否已配置 holysheep
42
+ */
43
+ function isConfiguredInToml() {
44
+ const content = readTomlConfig()
45
+ return content.includes('model_provider = "holysheep"') &&
46
+ content.includes('base_url') &&
47
+ content.includes('holysheep.ai')
48
+ }
49
+
50
+ /**
51
+ * 写入 TOML config(合并方式:保留已有内容,只更新 holysheep 部分)
52
+ */
53
+ function writeTomlConfig(apiKey, baseUrlOpenAI, model) {
54
+ if (!fs.existsSync(CONFIG_DIR)) {
55
+ fs.mkdirSync(CONFIG_DIR, { recursive: true })
56
+ }
57
+
58
+ let content = readTomlConfig()
59
+
60
+ // 移除旧的 holysheep 相关配置
61
+ content = content
62
+ .replace(/\nmodel\s*=\s*"[^"]*"\n/g, '\n')
63
+ .replace(/\nmodel_provider\s*=\s*"holysheep"\n/g, '\n')
64
+ .replace(/\[model_providers\.holysheep\][^\[]*(\[|$)/gs, (m, end) => end === '[' ? '[' : '')
65
+ .trim()
66
+
67
+ // 在开头插入 holysheep 配置
68
+ const newConfig = [
69
+ `model = "${model || 'gpt-5.4'}"`,
70
+ `model_provider = "holysheep"`,
71
+ '',
72
+ content,
73
+ '',
74
+ `[model_providers.holysheep]`,
75
+ `name = "HolySheep"`,
76
+ `base_url = "${baseUrlOpenAI}"`,
77
+ `env_key = "OPENAI_API_KEY"`,
78
+ '',
79
+ ].join('\n').replace(/\n{3,}/g, '\n\n').trim() + '\n'
80
+
81
+ fs.writeFileSync(CONFIG_FILE, newConfig, 'utf8')
36
82
  }
37
83
 
38
- function writeConfig(data) {
39
- if (!fs.existsSync(CONFIG_DIR)) fs.mkdirSync(CONFIG_DIR, { recursive: true })
40
- fs.writeFileSync(CONFIG_FILE, JSON.stringify(data, null, 2), 'utf8')
84
+ /**
85
+ * 同时写 JSON(兼容旧版 TypeScript Codex,如果存在的话)
86
+ */
87
+ function writeJsonConfigIfNeeded(apiKey, baseUrlOpenAI, model) {
88
+ try {
89
+ let jsonConfig = {}
90
+ if (fs.existsSync(CONFIG_FILE_JSON)) {
91
+ jsonConfig = JSON.parse(fs.readFileSync(CONFIG_FILE_JSON, 'utf8'))
92
+ }
93
+ jsonConfig.model = model || 'gpt-5.4'
94
+ jsonConfig.provider = 'holysheep'
95
+ if (!jsonConfig.providers) jsonConfig.providers = {}
96
+ jsonConfig.providers.holysheep = {
97
+ name: 'HolySheep',
98
+ baseURL: baseUrlOpenAI,
99
+ envKey: 'OPENAI_API_KEY',
100
+ }
101
+ fs.writeFileSync(CONFIG_FILE_JSON, JSON.stringify(jsonConfig, null, 2), 'utf8')
102
+ } catch {}
41
103
  }
42
104
 
43
105
  module.exports = {
@@ -47,30 +109,20 @@ module.exports = {
47
109
  return require('../utils/which').commandExists('codex')
48
110
  },
49
111
  isConfigured() {
50
- const c = readConfig()
51
- return c.provider === 'holysheep' &&
52
- !!c.providers?.holysheep?.baseURL?.includes('holysheep')
112
+ return isConfiguredInToml()
53
113
  },
54
114
  configure(apiKey, _baseUrlAnthropicNoV1, baseUrlOpenAI) {
55
- const config = readConfig()
56
-
57
- // 设置 HolySheep 为默认 provider
58
- config.provider = 'holysheep'
59
- config.model = 'gpt-5.1-codex' // HolySheep 支持的 Codex 专属模型
115
+ const model = 'gpt-5.4'
60
116
 
61
- if (!config.providers) config.providers = {}
62
- config.providers.holysheep = {
63
- name: 'HolySheep',
64
- baseURL: baseUrlOpenAI, // https://api.holysheep.ai/v1
65
- envKey: 'OPENAI_API_KEY',
66
- }
117
+ // 写入 TOML(Rust Codex v0.111+ 使用)
118
+ writeTomlConfig(apiKey, baseUrlOpenAI, model)
67
119
 
68
- writeConfig(config)
120
+ // 同时写 JSON(兼容旧版 TypeScript Codex)
121
+ writeJsonConfigIfNeeded(apiKey, baseUrlOpenAI, model)
69
122
 
70
123
  return {
71
124
  file: CONFIG_FILE,
72
125
  hot: false,
73
- // 需要同时设置环境变量,供 envKey 读取
74
126
  envVars: {
75
127
  OPENAI_API_KEY: apiKey,
76
128
  OPENAI_BASE_URL: baseUrlOpenAI,
@@ -78,15 +130,30 @@ module.exports = {
78
130
  }
79
131
  },
80
132
  reset() {
81
- const config = readConfig()
82
- if (config.provider === 'holysheep') {
83
- delete config.provider
84
- delete config.providers?.holysheep
133
+ // 清理 TOML
134
+ if (fs.existsSync(CONFIG_FILE)) {
135
+ let content = readTomlConfig()
136
+ content = content
137
+ .replace(/^model\s*=\s*"[^"]*"\n/m, '')
138
+ .replace(/^model_provider\s*=\s*"holysheep"\n/m, '')
139
+ .replace(/\[model_providers\.holysheep\][^\[]*(\[|$)/gs, (m, end) => end === '[' ? '[' : '')
140
+ .trim() + '\n'
141
+ fs.writeFileSync(CONFIG_FILE, content, 'utf8')
142
+ }
143
+ // 清理 JSON
144
+ if (fs.existsSync(CONFIG_FILE_JSON)) {
145
+ try {
146
+ const c = JSON.parse(fs.readFileSync(CONFIG_FILE_JSON, 'utf8'))
147
+ if (c.provider === 'holysheep') {
148
+ delete c.provider
149
+ delete c.providers?.holysheep
150
+ }
151
+ fs.writeFileSync(CONFIG_FILE_JSON, JSON.stringify(c, null, 2), 'utf8')
152
+ } catch {}
85
153
  }
86
- writeConfig(config)
87
154
  },
88
155
  getConfigPath() { return CONFIG_FILE },
89
- hint: '切换后重开终端生效;用 codex --provider holysheep 指定',
156
+ hint: '切换后重开终端生效;Rust Codex (v0.111+) 使用 config.toml',
90
157
  installCmd: 'npm install -g @openai/codex',
91
158
  docsUrl: 'https://github.com/openai/codex',
92
159
  envVarFormat: 'openai',
@@ -1,23 +1,22 @@
1
1
  /**
2
2
  * Gemini CLI 适配器 (@google/gemini-cli)
3
3
  *
4
- * Gemini CLI 目前不原生支持自定义 base_url
5
- * 但可通过以下方式接入 OpenAI 兼容端点:
4
+ * ⚠️ 重要:Gemini CLI 不支持自定义 base_url/中继
5
+ * 它只能连接 Google 官方 Gemini API。
6
6
  *
7
- * 方式 A(推荐): 写入 ~/.gemini/settings.json 中的 otherAIProvider
8
- * 支持 openaiCompatible 类型,需要 Gemini CLI >= 0.20
7
+ * 配置方式:
8
+ * 1. settings.json 写入 selectedAuthType = "gemini-api-key"(跳过登录向导)
9
+ * 2. 设置环境变量 GEMINI_API_KEY(需要 Google Gemini API Key,从 aistudio.google.com 获取)
9
10
  *
10
- * 方式 B: 通过 GEMINI_API_KEY 环境变量 + GOOGLE_API_KEY 覆盖
11
- * (仅适用于少数版本)
12
- *
13
- * 实际测试: Gemini CLI 0.30.0 支持 otherAIProvider 配置
14
- * 参考: https://github.com/google-gemini/gemini-cli/blob/main/docs/configuration.md
11
+ * HolySheep 暂不支持 Gemini CLI 中继(Gemini CLI 使用 Google 专有协议,非 OpenAI 兼容格式)
12
+ * 建议用户使用 Claude Code / Codex / Aider 等支持中继的工具。
15
13
  */
16
- const fs = require('fs')
14
+ const fs = require('fs')
17
15
  const path = require('path')
18
- const os = require('os')
16
+ const os = require('os')
19
17
 
20
- const SETTINGS_FILE = path.join(os.homedir(), '.gemini', 'settings.json')
18
+ const GEMINI_DIR = path.join(os.homedir(), '.gemini')
19
+ const SETTINGS_FILE = path.join(GEMINI_DIR, 'settings.json')
21
20
 
22
21
  function readSettings() {
23
22
  try {
@@ -29,42 +28,55 @@ function readSettings() {
29
28
  }
30
29
 
31
30
  function writeSettings(data) {
32
- const dir = path.dirname(SETTINGS_FILE)
33
- if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true })
31
+ if (!fs.existsSync(GEMINI_DIR)) fs.mkdirSync(GEMINI_DIR, { recursive: true })
34
32
  fs.writeFileSync(SETTINGS_FILE, JSON.stringify(data, null, 2), 'utf8')
35
33
  }
36
34
 
37
35
  module.exports = {
38
36
  name: 'Gemini CLI',
39
37
  id: 'gemini-cli',
38
+
40
39
  checkInstalled() {
41
40
  return require('../utils/which').commandExists('gemini')
42
41
  },
42
+
43
43
  isConfigured() {
44
+ // 检查是否设置了 GEMINI_API_KEY 环境变量
45
+ if (process.env.GEMINI_API_KEY) return true
46
+ // 检查 settings.json 是否已跳过向导
44
47
  const s = readSettings()
45
- return !!(s.otherAIProvider?.url?.includes('holysheep'))
48
+ return s.selectedAuthType === 'gemini-api-key'
46
49
  },
47
- configure(apiKey, baseUrlOpenAI) {
50
+
51
+ configure(apiKey, _baseUrlAnthropicNoV1, _baseUrlOpenAI) {
52
+ // Gemini CLI 不支持 HolySheep 中继,只能配置为使用官方 Gemini API Key 模式
53
+ // 写入 settings.json 跳过认证向导
48
54
  const settings = readSettings()
55
+ settings.selectedAuthType = 'gemini-api-key'
56
+ writeSettings(settings)
49
57
 
50
- // Gemini CLI otherAIProvider 支持 OpenAI 兼容格式
51
- settings.otherAIProvider = {
52
- url: baseUrlOpenAI, // 带 /v1
53
- apiKey: apiKey,
54
- model: 'claude-sonnet-4-5', // 默认推荐模型
58
+ // 环境变量:GEMINI_API_KEY 需要用户自己的 Google Gemini API Key
59
+ // HolySheep API Key (cr_xxx) 无法用于 Gemini CLI
60
+ return {
61
+ file: SETTINGS_FILE,
62
+ hot: false,
63
+ // 不注入 GEMINI_API_KEY,因为 HolySheep key 对 Gemini CLI 无效
64
+ // 用户需要手动设置真正的 Gemini API Key
65
+ envVars: {},
66
+ warning: 'Gemini CLI 需要 Google 官方 Gemini API Key,无法使用 HolySheep 中继。\n请从 https://aistudio.google.com/apikey 获取 API Key 后设置环境变量:\n export GEMINI_API_KEY="your-google-api-key"',
55
67
  }
56
-
57
- // 同时保留原有 general 配置
58
- writeSettings(settings)
59
- return { file: SETTINGS_FILE, hot: false }
60
68
  },
69
+
61
70
  reset() {
62
71
  const settings = readSettings()
63
- delete settings.otherAIProvider
72
+ delete settings.selectedAuthType
64
73
  writeSettings(settings)
65
74
  },
75
+
66
76
  getConfigPath() { return SETTINGS_FILE },
67
- hint: '使用 gemini -m claude-sonnet-4-5 指定模型',
77
+ hint: 'Gemini CLI 不支持 HolySheep 中继,需使用 Google 官方 Gemini API Key',
68
78
  installCmd: 'npm install -g @google/gemini-cli',
69
79
  docsUrl: 'https://github.com/google-gemini/gemini-cli',
80
+ envVarFormat: 'gemini',
81
+ unsupported: true, // 标记为不支持中继
70
82
  }