@simonyea/holysheep-cli 1.7.20 → 1.7.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@simonyea/holysheep-cli",
3
- "version": "1.7.20",
3
+ "version": "1.7.22",
4
4
  "description": "Claude Code/Cursor/Cline API relay for China — ¥1=$1, WeChat/Alipay payment, no credit card, no VPN. One command setup for all AI coding tools.",
5
5
  "keywords": [
6
6
  "openai-china",
@@ -6,6 +6,19 @@ const http = require('http')
6
6
  const path = require('path')
7
7
  const os = require('os')
8
8
  const fetch = global.fetch || require('node-fetch')
9
+ const _nodeFetch = require('node-fetch')
10
+
11
+ // Windows 上 api.holysheep.ai 有 IPv6 DNS 记录,但 Windows Server 普遍未启用 IPv6。
12
+ // Node.js 默认 IPv6 优先,会导致每次请求先卡在 IPv6 连接超时再降级 IPv4,
13
+ // 超过 OpenClaw embedded agent 的 timeout 阈值,触发 "LLM request timed out"。
14
+ // 解决方案:Windows 下强制用 node-fetch + https.Agent({family:4}) 只走 IPv4。
15
+ function upstreamFetch(url, options) {
16
+ if (process.platform === 'win32' && String(url).startsWith('https://')) {
17
+ const https = require('https')
18
+ return _nodeFetch(url, { ...options, agent: new https.Agent({ family: 4 }) })
19
+ }
20
+ return fetch(url, options)
21
+ }
9
22
 
10
23
  const OPENCLAW_DIR = path.join(os.homedir(), '.openclaw')
11
24
  const BRIDGE_CONFIG_FILE = path.join(OPENCLAW_DIR, 'holysheep-bridge.json')
@@ -516,7 +529,7 @@ async function relayOpenAIRequest(requestBody, config, res) {
516
529
  ...requestBody,
517
530
  stream: requestBody.stream === true,
518
531
  }
519
- const upstream = await fetch(`${config.baseUrlOpenAI.replace(/\/+$/, '')}/chat/completions`, {
532
+ const upstream = await upstreamFetch(`${config.baseUrlOpenAI.replace(/\/+$/, '')}/chat/completions`, {
520
533
  method: 'POST',
521
534
  headers: {
522
535
  'content-type': 'application/json',
@@ -584,7 +597,7 @@ async function relayAnthropicStream(requestBody, config, route, res) {
584
597
 
585
598
  let upstream
586
599
  try {
587
- upstream = await fetch(baseUrl, {
600
+ upstream = await upstreamFetch(baseUrl, {
588
601
  method: 'POST',
589
602
  headers: {
590
603
  'content-type': 'application/json',
@@ -707,7 +720,7 @@ async function relayAnthropicRequest(requestBody, config, route, res) {
707
720
  ? `${config.baseUrlAnthropic.replace(/\/+$/, '')}/minimax/v1/messages`
708
721
  : `${config.baseUrlAnthropic.replace(/\/+$/, '')}/v1/messages`
709
722
 
710
- const upstream = await fetch(baseUrl, {
723
+ const upstream = await upstreamFetch(baseUrl, {
711
724
  method: 'POST',
712
725
  headers: {
713
726
  'content-type': 'application/json',
@@ -205,10 +205,10 @@ function startBridge(port) {
205
205
  const scriptPath = path.join(__dirname, '..', 'index.js')
206
206
  // Windows: use shell+node command to avoid ERROR_FILE_NOT_FOUND with process.execPath
207
207
  // (Windows Store / nvm paths can be unresolvable when spawning detached)
208
+ // Windows: use 'node' (resolved via PATH by CreateProcess) without shell:true.
209
+ // shell:true spawns cmd.exe /c which exits after the command, breaking detach.
208
210
  const spawnCmd = isWin ? 'node' : process.execPath
209
- const spawnOpts = isWin
210
- ? { shell: true, detached: true, stdio: 'ignore', windowsHide: true }
211
- : { detached: true, stdio: 'ignore' }
211
+ const spawnOpts = { detached: true, stdio: 'ignore', windowsHide: true }
212
212
  const child = spawn(spawnCmd, [scriptPath, 'openclaw-bridge', '--port', String(port)], spawnOpts)
213
213
  child.unref()
214
214
  return waitForBridge(port)
@@ -416,7 +416,7 @@ function normalizeRequestedModels(selectedModels) {
416
416
  return Array.from(new Set(requestedModels))
417
417
  }
418
418
 
419
- function buildManagedPlan(baseUrlBridge, primaryModel, selectedModels) {
419
+ function buildManagedPlan(baseUrlBridge, apiKey, primaryModel, selectedModels) {
420
420
  const requestedModels = normalizeRequestedModels(selectedModels)
421
421
  const managedModelRefs = requestedModels.map((model) => `${OPENCLAW_PROVIDER_NAME}/${model}`)
422
422
  const fallbackPrimaryModel = pickPrimaryModel(primaryModel, requestedModels)
@@ -428,6 +428,7 @@ function buildManagedPlan(baseUrlBridge, primaryModel, selectedModels) {
428
428
  providers: {
429
429
  [OPENCLAW_PROVIDER_NAME]: {
430
430
  baseUrl: baseUrlBridge,
431
+ apiKey,
431
432
  api: 'openai-completions',
432
433
  models: requestedModels.map(buildModelEntry),
433
434
  },
@@ -445,10 +446,10 @@ function isHolySheepProvider(provider) {
445
446
  )
446
447
  }
447
448
 
448
- function writeManagedConfig(baseConfig, bridgeBaseUrl, primaryModel, selectedModels, gatewayPort) {
449
+ function writeManagedConfig(baseConfig, bridgeBaseUrl, apiKey, primaryModel, selectedModels, gatewayPort) {
449
450
  fs.mkdirSync(OPENCLAW_DIR, { recursive: true })
450
451
 
451
- const plan = buildManagedPlan(bridgeBaseUrl, primaryModel, selectedModels)
452
+ const plan = buildManagedPlan(bridgeBaseUrl, apiKey, primaryModel, selectedModels)
452
453
  const existingProviders = baseConfig?.models?.providers || {}
453
454
  const managedProviderIds = Object.entries(existingProviders)
454
455
  .filter(([providerId, provider]) => providerId === OPENCLAW_PROVIDER_NAME || isHolySheepProvider(provider))
@@ -692,6 +693,7 @@ module.exports = {
692
693
  const plan = writeManagedConfig(
693
694
  result.status === 0 ? readConfig() : {},
694
695
  bridgeBaseUrl,
696
+ apiKey,
695
697
  resolvedPrimaryModel,
696
698
  selectedModels,
697
699
  gatewayPort,