@simonyea/holysheep-cli 1.0.5 → 1.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +1 -1
  2. package/src/tools/codex.js +112 -45
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@simonyea/holysheep-cli",
3
- "version": "1.0.5",
3
+ "version": "1.0.8",
4
4
  "description": "一键配置所有 AI 编程工具接入 HolySheep API — Claude Code / Codex / Gemini CLI / OpenCode / OpenClaw / Aider / Cursor",
5
5
  "keywords": [
6
6
  "claude",
@@ -1,43 +1,105 @@
1
1
  /**
2
- * Codex CLI 适配器 (@openai/codex v0.46+)
2
+ * Codex CLI 适配器 (@openai/codex v0.111+, Rust 版)
3
3
  *
4
- * 配置文件: ~/.codex/config.json(JSON 格式,不是 yaml
4
+ * ⚠️ 重要:v0.111.0 Codex 已切换到 Rust 实现(codex-rs
5
+ * 配置文件变更:~/.codex/config.toml(TOML 格式,不是 JSON!)
5
6
  *
6
- * 正确格式:
7
- * {
8
- * "model": "claude-sonnet-4-5",
9
- * "provider": "holysheep", // 指定默认 provider
10
- * "providers": {
11
- * "holysheep": {
12
- * "name": "HolySheep",
13
- * "baseURL": "https://api.holysheep.ai/v1",
14
- * "envKey": "OPENAI_API_KEY"
15
- * }
16
- * }
17
- * }
7
+ * 正确格式(config.toml):
18
8
  *
19
- * 环境变量: OPENAI_API_KEY(通过 envKey 指定)
20
- * 注意: Codex 会优先使用账号登录,需要设置 provider 才能绕过
9
+ * model = "gpt-5.4"
10
+ * model_provider = "holysheep"
11
+ *
12
+ * [model_providers.holysheep]
13
+ * name = "HolySheep"
14
+ * base_url = "https://api.holysheep.ai/v1"
15
+ * env_key = "OPENAI_API_KEY"
16
+ *
17
+ * 注意:旧的 config.json 会被 Rust Codex 忽略!
21
18
  */
22
19
  const fs = require('fs')
23
20
  const path = require('path')
24
21
  const os = require('os')
25
22
 
26
23
  const CONFIG_DIR = path.join(os.homedir(), '.codex')
27
- const CONFIG_FILE = path.join(CONFIG_DIR, 'config.json')
24
+ const CONFIG_FILE = path.join(CONFIG_DIR, 'config.toml')
25
+ // 保留 JSON 兼容性(老版本 TypeScript Codex 用)
26
+ const CONFIG_FILE_JSON = path.join(CONFIG_DIR, 'config.json')
28
27
 
29
- function readConfig() {
28
+ /**
29
+ * 读取 TOML config(简单解析,不依赖 toml 库)
30
+ */
31
+ function readTomlConfig() {
30
32
  try {
31
33
  if (fs.existsSync(CONFIG_FILE)) {
32
- return JSON.parse(fs.readFileSync(CONFIG_FILE, 'utf8'))
34
+ return fs.readFileSync(CONFIG_FILE, 'utf8')
33
35
  }
34
36
  } catch {}
35
- return {}
37
+ return ''
38
+ }
39
+
40
+ /**
41
+ * 检查 TOML 里是否已配置 holysheep
42
+ */
43
+ function isConfiguredInToml() {
44
+ const content = readTomlConfig()
45
+ return content.includes('model_provider = "holysheep"') &&
46
+ content.includes('base_url') &&
47
+ content.includes('holysheep.ai')
48
+ }
49
+
50
+ /**
51
+ * 写入 TOML config(合并方式:保留已有内容,只更新 holysheep 部分)
52
+ */
53
+ function writeTomlConfig(apiKey, baseUrlOpenAI, model) {
54
+ if (!fs.existsSync(CONFIG_DIR)) {
55
+ fs.mkdirSync(CONFIG_DIR, { recursive: true })
56
+ }
57
+
58
+ let content = readTomlConfig()
59
+
60
+ // 移除旧的 holysheep 相关配置
61
+ content = content
62
+ .replace(/\nmodel\s*=\s*"[^"]*"\n/g, '\n')
63
+ .replace(/\nmodel_provider\s*=\s*"holysheep"\n/g, '\n')
64
+ .replace(/\[model_providers\.holysheep\][^\[]*(\[|$)/gs, (m, end) => end === '[' ? '[' : '')
65
+ .trim()
66
+
67
+ // 在开头插入 holysheep 配置
68
+ const newConfig = [
69
+ `model = "${model || 'gpt-5.4'}"`,
70
+ `model_provider = "holysheep"`,
71
+ '',
72
+ content,
73
+ '',
74
+ `[model_providers.holysheep]`,
75
+ `name = "HolySheep"`,
76
+ `base_url = "${baseUrlOpenAI}"`,
77
+ `env_key = "OPENAI_API_KEY"`,
78
+ '',
79
+ ].join('\n').replace(/\n{3,}/g, '\n\n').trim() + '\n'
80
+
81
+ fs.writeFileSync(CONFIG_FILE, newConfig, 'utf8')
36
82
  }
37
83
 
38
- function writeConfig(data) {
39
- if (!fs.existsSync(CONFIG_DIR)) fs.mkdirSync(CONFIG_DIR, { recursive: true })
40
- fs.writeFileSync(CONFIG_FILE, JSON.stringify(data, null, 2), 'utf8')
84
+ /**
85
+ * 同时写 JSON(兼容旧版 TypeScript Codex,如果存在的话)
86
+ */
87
+ function writeJsonConfigIfNeeded(apiKey, baseUrlOpenAI, model) {
88
+ try {
89
+ let jsonConfig = {}
90
+ if (fs.existsSync(CONFIG_FILE_JSON)) {
91
+ jsonConfig = JSON.parse(fs.readFileSync(CONFIG_FILE_JSON, 'utf8'))
92
+ }
93
+ jsonConfig.model = model || 'gpt-5.4'
94
+ jsonConfig.provider = 'holysheep'
95
+ if (!jsonConfig.providers) jsonConfig.providers = {}
96
+ jsonConfig.providers.holysheep = {
97
+ name: 'HolySheep',
98
+ baseURL: baseUrlOpenAI,
99
+ envKey: 'OPENAI_API_KEY',
100
+ }
101
+ fs.writeFileSync(CONFIG_FILE_JSON, JSON.stringify(jsonConfig, null, 2), 'utf8')
102
+ } catch {}
41
103
  }
42
104
 
43
105
  module.exports = {
@@ -47,30 +109,20 @@ module.exports = {
47
109
  return require('../utils/which').commandExists('codex')
48
110
  },
49
111
  isConfigured() {
50
- const c = readConfig()
51
- return c.provider === 'holysheep' &&
52
- !!c.providers?.holysheep?.baseURL?.includes('holysheep')
112
+ return isConfiguredInToml()
53
113
  },
54
114
  configure(apiKey, _baseUrlAnthropicNoV1, baseUrlOpenAI) {
55
- const config = readConfig()
56
-
57
- // 设置 HolySheep 为默认 provider
58
- config.provider = 'holysheep'
59
- config.model = config.model || 'claude-sonnet-4-5'
115
+ const model = 'gpt-5.4'
60
116
 
61
- if (!config.providers) config.providers = {}
62
- config.providers.holysheep = {
63
- name: 'HolySheep',
64
- baseURL: baseUrlOpenAI, // https://api.holysheep.ai/v1
65
- envKey: 'OPENAI_API_KEY',
66
- }
117
+ // 写入 TOML(Rust Codex v0.111+ 使用)
118
+ writeTomlConfig(apiKey, baseUrlOpenAI, model)
67
119
 
68
- writeConfig(config)
120
+ // 同时写 JSON(兼容旧版 TypeScript Codex)
121
+ writeJsonConfigIfNeeded(apiKey, baseUrlOpenAI, model)
69
122
 
70
123
  return {
71
124
  file: CONFIG_FILE,
72
125
  hot: false,
73
- // 需要同时设置环境变量,供 envKey 读取
74
126
  envVars: {
75
127
  OPENAI_API_KEY: apiKey,
76
128
  OPENAI_BASE_URL: baseUrlOpenAI,
@@ -78,15 +130,30 @@ module.exports = {
78
130
  }
79
131
  },
80
132
  reset() {
81
- const config = readConfig()
82
- if (config.provider === 'holysheep') {
83
- delete config.provider
84
- delete config.providers?.holysheep
133
+ // 清理 TOML
134
+ if (fs.existsSync(CONFIG_FILE)) {
135
+ let content = readTomlConfig()
136
+ content = content
137
+ .replace(/^model\s*=\s*"[^"]*"\n/m, '')
138
+ .replace(/^model_provider\s*=\s*"holysheep"\n/m, '')
139
+ .replace(/\[model_providers\.holysheep\][^\[]*(\[|$)/gs, (m, end) => end === '[' ? '[' : '')
140
+ .trim() + '\n'
141
+ fs.writeFileSync(CONFIG_FILE, content, 'utf8')
142
+ }
143
+ // 清理 JSON
144
+ if (fs.existsSync(CONFIG_FILE_JSON)) {
145
+ try {
146
+ const c = JSON.parse(fs.readFileSync(CONFIG_FILE_JSON, 'utf8'))
147
+ if (c.provider === 'holysheep') {
148
+ delete c.provider
149
+ delete c.providers?.holysheep
150
+ }
151
+ fs.writeFileSync(CONFIG_FILE_JSON, JSON.stringify(c, null, 2), 'utf8')
152
+ } catch {}
85
153
  }
86
- writeConfig(config)
87
154
  },
88
155
  getConfigPath() { return CONFIG_FILE },
89
- hint: '切换后重开终端生效;用 codex --provider holysheep 指定',
156
+ hint: '切换后重开终端生效;Rust Codex (v0.111+) 使用 config.toml',
90
157
  installCmd: 'npm install -g @openai/codex',
91
158
  docsUrl: 'https://github.com/openai/codex',
92
159
  envVarFormat: 'openai',