@simonyea/holysheep-cli 1.7.17 → 1.7.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@simonyea/holysheep-cli",
3
- "version": "1.7.17",
3
+ "version": "1.7.19",
4
4
  "description": "Claude Code/Cursor/Cline API relay for China — ¥1=$1, WeChat/Alipay payment, no credit card, no VPN. One command setup for all AI coding tools.",
5
5
  "keywords": [
6
6
  "openai-china",
@@ -256,13 +256,13 @@ function convertToolChoice(toolChoice) {
256
256
  return { type: 'auto' }
257
257
  }
258
258
 
259
- function buildAnthropicPayload(requestBody) {
259
+ function buildAnthropicPayload(requestBody, stream = false) {
260
260
  const converted = convertOpenAIToAnthropicMessages(requestBody.messages)
261
261
  const payload = {
262
262
  model: requestBody.model,
263
263
  max_tokens: requestBody.max_tokens || requestBody.max_completion_tokens || requestBody.max_output_tokens || 4096,
264
264
  messages: converted.messages,
265
- stream: false,
265
+ stream: Boolean(stream),
266
266
  }
267
267
 
268
268
  if (converted.system) payload.system = converted.system
@@ -540,7 +540,156 @@ async function relayOpenAIRequest(requestBody, config, res) {
540
540
  res.end(text)
541
541
  }
542
542
 
543
+ // 兼容 native fetch (ReadableStream) 和 node-fetch v2 (Node.js stream) 的流读取
544
+ async function pipeStream(body, onChunk) {
545
+ if (body == null) return
546
+ if (typeof body.getReader === 'function') {
547
+ const reader = body.getReader()
548
+ const decoder = new TextDecoder()
549
+ try {
550
+ while (true) {
551
+ const { done, value } = await reader.read()
552
+ if (done) break
553
+ onChunk(decoder.decode(value, { stream: true }))
554
+ }
555
+ onChunk(decoder.decode())
556
+ } finally {
557
+ reader.releaseLock()
558
+ }
559
+ } else {
560
+ for await (const chunk of body) {
561
+ onChunk(typeof chunk === 'string' ? chunk : chunk.toString())
562
+ }
563
+ }
564
+ }
565
+
566
+ // Anthropic SSE → OpenAI SSE 实时透传(避免整包缓冲导致 OpenClaw timeout)
567
+ async function relayAnthropicStream(requestBody, config, route, res) {
568
+ const payload = buildAnthropicPayload(requestBody, true)
569
+ const baseUrl = route === 'minimax'
570
+ ? `${config.baseUrlAnthropic.replace(/\/+$/, '')}/minimax/v1/messages`
571
+ : `${config.baseUrlAnthropic.replace(/\/+$/, '')}/v1/messages`
572
+
573
+ let upstream
574
+ try {
575
+ upstream = await fetch(baseUrl, {
576
+ method: 'POST',
577
+ headers: {
578
+ 'content-type': 'application/json',
579
+ 'x-api-key': config.apiKey,
580
+ 'anthropic-version': '2023-06-01',
581
+ 'user-agent': 'holysheep-openclaw-bridge/1.0',
582
+ },
583
+ body: JSON.stringify(payload),
584
+ })
585
+ } catch (err) {
586
+ return sendJson(res, 500, { error: { message: err.message || 'Bridge upstream error' } })
587
+ }
588
+
589
+ if (!upstream.ok) {
590
+ let errBody
591
+ try { errBody = JSON.parse(await upstream.text()) } catch { errBody = { error: { message: 'Upstream error' } } }
592
+ return sendJson(res, upstream.status, errBody)
593
+ }
594
+
595
+ res.writeHead(200, {
596
+ 'content-type': 'text/event-stream; charset=utf-8',
597
+ 'cache-control': 'no-cache, no-transform',
598
+ connection: 'keep-alive',
599
+ })
600
+
601
+ const msgId = `chatcmpl_${Date.now()}`
602
+ const created = Math.floor(Date.now() / 1000)
603
+ const model = requestBody.model
604
+ let headerSent = false
605
+ let inputTokens = 0
606
+ // tool_use 流式:按 content block index 收集
607
+ const toolBlocks = {} // index → {id, name, argsBuf}
608
+
609
+ function writeChunk(delta, finishReason, usage) {
610
+ const chunk = {
611
+ id: msgId,
612
+ object: 'chat.completion.chunk',
613
+ created,
614
+ model,
615
+ choices: [{ index: 0, delta, finish_reason: finishReason || null }],
616
+ }
617
+ if (usage) chunk.usage = usage
618
+ res.write(`data: ${JSON.stringify(chunk)}\n\n`)
619
+ }
620
+
621
+ function handleEvent(event, data) {
622
+ if (data === '[DONE]') return
623
+ let obj
624
+ try { obj = JSON.parse(data) } catch { return }
625
+
626
+ if (event === 'message_start') {
627
+ inputTokens = obj.message?.usage?.input_tokens || 0
628
+ if (!headerSent) {
629
+ writeChunk({ role: 'assistant', content: '' }, null, null)
630
+ headerSent = true
631
+ }
632
+ } else if (event === 'content_block_start') {
633
+ if (!headerSent) { writeChunk({ role: 'assistant', content: '' }, null, null); headerSent = true }
634
+ const block = obj.content_block || {}
635
+ if (block.type === 'tool_use') {
636
+ toolBlocks[obj.index] = { id: block.id, name: block.name, argsBuf: '' }
637
+ writeChunk({
638
+ tool_calls: [{ index: obj.index, id: block.id, type: 'function', function: { name: block.name, arguments: '' } }],
639
+ }, null, null)
640
+ }
641
+ } else if (event === 'content_block_delta') {
642
+ if (!headerSent) { writeChunk({ role: 'assistant', content: '' }, null, null); headerSent = true }
643
+ const delta = obj.delta || {}
644
+ if (delta.type === 'text_delta' && typeof delta.text === 'string') {
645
+ writeChunk({ content: delta.text }, null, null)
646
+ } else if (delta.type === 'input_json_delta' && typeof delta.partial_json === 'string') {
647
+ if (toolBlocks[obj.index]) toolBlocks[obj.index].argsBuf += delta.partial_json
648
+ writeChunk({ tool_calls: [{ index: obj.index, function: { arguments: delta.partial_json } }] }, null, null)
649
+ }
650
+ } else if (event === 'message_delta') {
651
+ const stopReason = obj.delta?.stop_reason
652
+ const finishReason = mapFinishReason(stopReason)
653
+ const outputTokens = obj.usage?.output_tokens || 0
654
+ const usage = {
655
+ prompt_tokens: inputTokens,
656
+ completion_tokens: outputTokens,
657
+ total_tokens: inputTokens + outputTokens,
658
+ }
659
+ if (!headerSent) { writeChunk({ role: 'assistant', content: '' }, null, null); headerSent = true }
660
+ writeChunk({}, finishReason, usage)
661
+ }
662
+ }
663
+
664
+ let buf = ''
665
+ let curEvent = ''
666
+
667
+ function processBuffer(text) {
668
+ buf += text
669
+ const lines = buf.split('\n')
670
+ buf = lines.pop() ?? ''
671
+ for (const line of lines) {
672
+ const trimmed = line.trimEnd()
673
+ if (trimmed.startsWith('event:')) {
674
+ curEvent = trimmed.slice(6).trim()
675
+ } else if (trimmed.startsWith('data:')) {
676
+ handleEvent(curEvent, trimmed.slice(5).trim())
677
+ curEvent = ''
678
+ }
679
+ }
680
+ }
681
+
682
+ try {
683
+ await pipeStream(upstream.body, processBuffer)
684
+ } catch {}
685
+
686
+ if (!res.writableEnded) res.end('data: [DONE]\n\n')
687
+ }
688
+
543
689
  async function relayAnthropicRequest(requestBody, config, route, res) {
690
+ if (requestBody.stream === true) {
691
+ return relayAnthropicStream(requestBody, config, route, res)
692
+ }
544
693
  const payload = buildAnthropicPayload(requestBody)
545
694
  const baseUrl = route === 'minimax'
546
695
  ? `${config.baseUrlAnthropic.replace(/\/+$/, '')}/minimax/v1/messages`
@@ -613,9 +762,8 @@ async function checkGatewayHealth(config) {
613
762
 
614
763
  try {
615
764
  const response = await fetch(`http://${host}:${gatewayPort}/`, { method: 'GET', timeout })
616
- return response.ok
617
- ? { ok: true, reason: 'gateway_http_ok' }
618
- : { ok: false, reason: `gateway_http_${response.status}` }
765
+ // 任何 HTTP 响应都说明 Gateway 进程存活(4xx 可能是正常的路由/auth 行为)
766
+ return { ok: true, reason: `gateway_http_${response.status}` }
619
767
  } catch {
620
768
  return { ok: false, reason: 'gateway_http_unreachable' }
621
769
  }