@simonyea/holysheep-cli 1.6.12 → 1.6.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -218,6 +218,8 @@ A: OpenClaw 需要 Node.js 20+,运行 `node --version` 确认版本后重试
218
218
 
219
219
  ## Changelog
220
220
 
221
+ - **v1.6.14** — OpenClaw 新增 `gpt-5.3-codex-spark` 模型,通过本地 bridge 路由到 HolySheep `/v1`
222
+ - **v1.6.13** — Codex 配置改为直接写 `api_key` 到 config.toml,不再依赖环境变量,修复 Windows 上 setup 后无需重启终端即可使用;同时精简工具列表,只保留 Claude Code / Codex / Droid / OpenClaw
221
223
  - **v1.6.12** — 修复 OpenClaw Bridge 对 GPT-5.4 的流式响应转换,避免 `holysheep/gpt-5.4` 在 OpenClaw 中报错;同时增强 Dashboard URL 解析,减少安装后浏览器打开黑屏/空白页
222
224
  - **v1.6.11** — OpenClaw 新增本地 HolySheep Bridge,统一暴露单一 `holysheep` provider 以支持自由切换 GPT / Claude / MiniMax;同时保留用户所选默认模型,不再强制 GPT-5.4 作为 primary
223
225
  - **v1.6.10** — 将可运行的 OpenClaw runtime(含 npx 回退)视为已安装,避免 Windows/Node 环境下重复提示安装;同时修复 Droid CLI 的 GPT `/v1` 接入地址并同步写入 `~/.factory/config.json`
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@simonyea/holysheep-cli",
3
- "version": "1.6.12",
3
+ "version": "1.6.14",
4
4
  "description": "Claude Code/Cursor/Cline API relay for China — ¥1=$1, WeChat/Alipay payment, no credit card, no VPN. One command setup for all AI coding tools.",
5
5
  "keywords": [
6
6
  "openai-china",
@@ -112,11 +112,12 @@ async function setup(options) {
112
112
 
113
113
  // Step 1.5: 选择要配置的模型
114
114
  const MODEL_CHOICES = [
115
- { name: 'gpt-5.4 (GPT 5.4, 通用编码)', value: 'gpt-5.4', checked: true },
116
- { name: 'claude-sonnet-4-6 (Sonnet 4.6, 均衡推荐)', value: 'claude-sonnet-4-6', checked: true },
117
- { name: 'claude-opus-4-6 (Opus 4.6, 强力旗舰)', value: 'claude-opus-4-6', checked: true },
118
- { name: 'MiniMax-M2.7-highspeed (高速经济版)', value: 'MiniMax-M2.7-highspeed', checked: true },
119
- { name: 'claude-haiku-4-5 (Haiku 4.5, 轻快便宜)', value: 'claude-haiku-4-5', checked: true },
115
+ { name: 'gpt-5.4 (GPT 5.4, 通用编码)', value: 'gpt-5.4', checked: true },
116
+ { name: 'gpt-5.3-codex-spark (GPT 5.3 Codex Spark, 编码)', value: 'gpt-5.3-codex-spark', checked: true },
117
+ { name: 'claude-sonnet-4-6 (Sonnet 4.6, 均衡推荐)', value: 'claude-sonnet-4-6', checked: true },
118
+ { name: 'claude-opus-4-6 (Opus 4.6, 强力旗舰)', value: 'claude-opus-4-6', checked: true },
119
+ { name: 'MiniMax-M2.7-highspeed (高速经济版)', value: 'MiniMax-M2.7-highspeed', checked: true },
120
+ { name: 'claude-haiku-4-5 (Haiku 4.5, 轻快便宜)', value: 'claude-haiku-4-5', checked: true },
120
121
  ]
121
122
  const { selectedModels } = await inquirer.prompt([{
122
123
  type: 'checkbox',
@@ -12,9 +12,10 @@
12
12
  * [model_providers.holysheep]
13
13
  * name = "HolySheep"
14
14
  * base_url = "https://api.holysheep.ai/v1"
15
- * env_key = "OPENAI_API_KEY"
15
+ * api_key = "cr_xxx"
16
16
  *
17
17
  * 注意:旧的 config.json 会被 Rust Codex 忽略!
18
+ * 注意:使用 api_key 而非 env_key,避免 Windows 上需要重启终端才能生效的问题
18
19
  */
19
20
  const fs = require('fs')
20
21
  const path = require('path')
@@ -110,7 +111,7 @@ function writeTomlConfig(apiKey, baseUrlOpenAI, model) {
110
111
  `[model_providers.holysheep]`,
111
112
  `name = "HolySheep"`,
112
113
  `base_url = "${baseUrlOpenAI}"`,
113
- `env_key = "OPENAI_API_KEY"`,
114
+ `api_key = "${apiKey}"`,
114
115
  '',
115
116
  ].join('\n')
116
117
 
@@ -134,12 +135,12 @@ function writeJsonConfigIfNeeded(apiKey, baseUrlOpenAI, model) {
134
135
  jsonConfig.model_providers.holysheep = {
135
136
  name: 'HolySheep',
136
137
  base_url: baseUrlOpenAI,
137
- env_key: 'OPENAI_API_KEY',
138
+ api_key: apiKey,
138
139
  }
139
140
  jsonConfig.providers.holysheep = {
140
141
  name: 'HolySheep',
141
142
  baseURL: baseUrlOpenAI,
142
- envKey: 'OPENAI_API_KEY',
143
+ apiKey,
143
144
  }
144
145
  fs.writeFileSync(CONFIG_FILE_JSON, JSON.stringify(jsonConfig, null, 2), 'utf8')
145
146
  } catch {}
@@ -5,10 +5,5 @@ module.exports = [
5
5
  require('./claude-code'),
6
6
  require('./codex'),
7
7
  require('./droid'),
8
- require('./gemini-cli'),
9
- require('./opencode'),
10
8
  require('./openclaw'),
11
- require('./aider'),
12
- require('./cursor'),
13
- require('./continue'),
14
9
  ]
@@ -18,6 +18,7 @@ const DEFAULT_BRIDGE_PORT = 18788
18
18
  const DEFAULT_GATEWAY_PORT = 18789
19
19
  const MAX_PORT_SCAN = 40
20
20
  const OPENCLAW_DEFAULT_MODEL = 'gpt-5.4'
21
+ const OPENCLAW_DEFAULT_CODEX_SPARK_MODEL = 'gpt-5.3-codex-spark'
21
22
  const OPENCLAW_DEFAULT_CLAUDE_MODEL = 'claude-sonnet-4-6'
22
23
  const OPENCLAW_DEFAULT_MINIMAX_MODEL = 'MiniMax-M2.7-highspeed'
23
24
  const OPENCLAW_PROVIDER_NAME = 'holysheep'
@@ -312,7 +313,7 @@ function buildModelEntry(id) {
312
313
  function normalizeRequestedModels(selectedModels) {
313
314
  const requestedModels = Array.isArray(selectedModels) && selectedModels.length > 0
314
315
  ? [...selectedModels]
315
- : [OPENCLAW_DEFAULT_MODEL, OPENCLAW_DEFAULT_CLAUDE_MODEL, OPENCLAW_DEFAULT_MINIMAX_MODEL]
316
+ : [OPENCLAW_DEFAULT_MODEL, OPENCLAW_DEFAULT_CODEX_SPARK_MODEL, OPENCLAW_DEFAULT_CLAUDE_MODEL, OPENCLAW_DEFAULT_MINIMAX_MODEL]
316
317
 
317
318
  if (!requestedModels.includes(OPENCLAW_DEFAULT_MODEL)) requestedModels.unshift(OPENCLAW_DEFAULT_MODEL)
318
319
  return Array.from(new Set(requestedModels))