@simonyea/holysheep-cli 1.6.14 → 1.6.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/tools/openclaw-bridge.js +127 -24
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@simonyea/holysheep-cli",
|
|
3
|
-
"version": "1.6.
|
|
3
|
+
"version": "1.6.15",
|
|
4
4
|
"description": "Claude Code/Cursor/Cline API relay for China — ¥1=$1, WeChat/Alipay payment, no credit card, no VPN. One command setup for all AI coding tools.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"openai-china",
|
|
@@ -59,6 +59,7 @@ function sendOpenAIStream(res, payload) {
|
|
|
59
59
|
const choice = payload.choices?.[0] || {}
|
|
60
60
|
const message = choice.message || {}
|
|
61
61
|
const created = payload.created || Math.floor(Date.now() / 1000)
|
|
62
|
+
const messageContent = extractOpenAITextContent(message.content)
|
|
62
63
|
|
|
63
64
|
res.writeHead(200, {
|
|
64
65
|
'content-type': 'text/event-stream; charset=utf-8',
|
|
@@ -75,7 +76,7 @@ function sendOpenAIStream(res, payload) {
|
|
|
75
76
|
index: 0,
|
|
76
77
|
delta: {
|
|
77
78
|
role: 'assistant',
|
|
78
|
-
...(
|
|
79
|
+
...(messageContent ? { content: messageContent } : {}),
|
|
79
80
|
...(message.tool_calls ? { tool_calls: message.tool_calls } : {}),
|
|
80
81
|
},
|
|
81
82
|
finish_reason: null,
|
|
@@ -101,11 +102,30 @@ function normalizeText(value) {
|
|
|
101
102
|
if (Array.isArray(value)) return value.map(normalizeText).filter(Boolean).join('\n')
|
|
102
103
|
if (value && typeof value === 'object') {
|
|
103
104
|
if (typeof value.text === 'string') return value.text
|
|
105
|
+
if (typeof value.output_text === 'string') return value.output_text
|
|
104
106
|
if (typeof value.content === 'string') return value.content
|
|
107
|
+
if (typeof value.value === 'string') return value.value
|
|
105
108
|
}
|
|
106
109
|
return value == null ? '' : String(value)
|
|
107
110
|
}
|
|
108
111
|
|
|
112
|
+
function extractOpenAITextContent(content) {
|
|
113
|
+
if (typeof content === 'string') return content
|
|
114
|
+
if (!Array.isArray(content)) return normalizeText(content)
|
|
115
|
+
|
|
116
|
+
return content
|
|
117
|
+
.map((part) => {
|
|
118
|
+
if (typeof part === 'string') return part
|
|
119
|
+
if (!part || typeof part !== 'object') return ''
|
|
120
|
+
if (part.type === 'text') return normalizeText(part.text)
|
|
121
|
+
if (part.type === 'output_text') return normalizeText(part.text)
|
|
122
|
+
if (part.type === 'input_text') return normalizeText(part.text)
|
|
123
|
+
return normalizeText(part.text || part.content || part.value)
|
|
124
|
+
})
|
|
125
|
+
.filter(Boolean)
|
|
126
|
+
.join('')
|
|
127
|
+
}
|
|
128
|
+
|
|
109
129
|
function parseDataUrl(url) {
|
|
110
130
|
const match = String(url || '').match(/^data:([^;]+);base64,(.+)$/)
|
|
111
131
|
if (!match) return null
|
|
@@ -314,10 +334,102 @@ function pickRoute(model) {
|
|
|
314
334
|
return 'openai'
|
|
315
335
|
}
|
|
316
336
|
|
|
317
|
-
function
|
|
337
|
+
function responseOutputToText(output) {
|
|
338
|
+
return (output || [])
|
|
339
|
+
.flatMap((item) => {
|
|
340
|
+
if (item?.type === 'message') return item.content || []
|
|
341
|
+
if (item?.content) return item.content
|
|
342
|
+
return []
|
|
343
|
+
})
|
|
344
|
+
.filter((item) => item?.type === 'output_text' || item?.type === 'text')
|
|
345
|
+
.map((item) => extractOpenAITextContent(item.text || item.content || item))
|
|
346
|
+
.filter(Boolean)
|
|
347
|
+
.join('')
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
function responseOutputToToolCalls(output) {
|
|
351
|
+
return (output || [])
|
|
352
|
+
.filter((item) => item?.type === 'function_call' && item.name)
|
|
353
|
+
.map((item, index) => ({
|
|
354
|
+
id: item.call_id || item.id || `call_${index + 1}`,
|
|
355
|
+
type: 'function',
|
|
356
|
+
function: {
|
|
357
|
+
name: item.name,
|
|
358
|
+
arguments: typeof item.arguments === 'string'
|
|
359
|
+
? item.arguments
|
|
360
|
+
: JSON.stringify(item.arguments || {}),
|
|
361
|
+
},
|
|
362
|
+
}))
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
function responseToChatCompletion(responseBody, requestedModel) {
|
|
366
|
+
const response = responseBody?.response && typeof responseBody.response === 'object'
|
|
367
|
+
? responseBody.response
|
|
368
|
+
: responseBody
|
|
369
|
+
|
|
370
|
+
const text = responseOutputToText(response.output)
|
|
371
|
+
const toolCalls = responseOutputToToolCalls(response.output)
|
|
372
|
+
const status = String(response.status || '').toLowerCase()
|
|
373
|
+
const finishReason = status === 'completed' || status === '' ? 'stop' : 'length'
|
|
374
|
+
const outputTokens = response.usage?.output_tokens || response.usage?.completion_tokens || 0
|
|
375
|
+
const promptTokens = response.usage?.input_tokens || response.usage?.prompt_tokens || 0
|
|
376
|
+
|
|
377
|
+
return {
|
|
378
|
+
id: response.id || `chatcmpl_${Date.now()}`,
|
|
379
|
+
object: 'chat.completion',
|
|
380
|
+
created: response.created_at || Math.floor(Date.now() / 1000),
|
|
381
|
+
model: requestedModel || response.model,
|
|
382
|
+
choices: [{
|
|
383
|
+
index: 0,
|
|
384
|
+
message: {
|
|
385
|
+
role: 'assistant',
|
|
386
|
+
content: text || null,
|
|
387
|
+
...(toolCalls.length ? { tool_calls: toolCalls } : {}),
|
|
388
|
+
},
|
|
389
|
+
finish_reason: finishReason,
|
|
390
|
+
}],
|
|
391
|
+
usage: response.usage
|
|
392
|
+
? {
|
|
393
|
+
prompt_tokens: promptTokens,
|
|
394
|
+
completion_tokens: outputTokens,
|
|
395
|
+
total_tokens: response.usage.total_tokens || (promptTokens + outputTokens),
|
|
396
|
+
}
|
|
397
|
+
: undefined,
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
function normalizeOpenAICompatibleResponse(parsed, requestedModel) {
|
|
402
|
+
if (!parsed || typeof parsed !== 'object') return parsed
|
|
403
|
+
|
|
404
|
+
if (parsed.object === 'response' || Array.isArray(parsed.output)) {
|
|
405
|
+
return responseToChatCompletion(parsed, requestedModel)
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
if (parsed.object === 'chat.completion' || parsed.object === 'chat.completion.chunk') {
|
|
409
|
+
const choice = parsed.choices?.[0]
|
|
410
|
+
if (choice?.message) {
|
|
411
|
+
choice.message = {
|
|
412
|
+
...choice.message,
|
|
413
|
+
content: extractOpenAITextContent(choice.message.content) || null,
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
if (choice?.delta?.content != null) {
|
|
417
|
+
choice.delta = {
|
|
418
|
+
...choice.delta,
|
|
419
|
+
content: extractOpenAITextContent(choice.delta.content),
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
return parsed
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
function parseOpenAIStreamText(text, requestedModel) {
|
|
318
428
|
try {
|
|
319
429
|
const parsed = JSON.parse(String(text || ''))
|
|
320
|
-
if (parsed && typeof parsed === 'object')
|
|
430
|
+
if (parsed && typeof parsed === 'object') {
|
|
431
|
+
return normalizeOpenAICompatibleResponse(parsed, requestedModel)
|
|
432
|
+
}
|
|
321
433
|
} catch {}
|
|
322
434
|
|
|
323
435
|
const blocks = String(text || '').split(/\r?\n\r?\n+/).filter(Boolean)
|
|
@@ -356,11 +468,7 @@ function parseOpenAIStreamText(text) {
|
|
|
356
468
|
if (eventName === 'response.completed' && chunk.response) {
|
|
357
469
|
responseCompleted = chunk.response
|
|
358
470
|
if (!content) {
|
|
359
|
-
const outputText = (chunk.response.output
|
|
360
|
-
.flatMap((item) => item?.content || [])
|
|
361
|
-
.filter((item) => item?.type === 'output_text' && typeof item.text === 'string')
|
|
362
|
-
.map((item) => item.text)
|
|
363
|
-
.join('')
|
|
471
|
+
const outputText = responseOutputToText(chunk.response.output)
|
|
364
472
|
if (outputText) content = outputText
|
|
365
473
|
}
|
|
366
474
|
continue
|
|
@@ -369,28 +477,23 @@ function parseOpenAIStreamText(text) {
|
|
|
369
477
|
finalChunk = chunk
|
|
370
478
|
const choice = chunk.choices?.[0] || {}
|
|
371
479
|
const delta = choice.delta || {}
|
|
372
|
-
|
|
373
|
-
|
|
480
|
+
const deltaContent = extractOpenAITextContent(delta.content)
|
|
481
|
+
const messageContent = extractOpenAITextContent(choice.message?.content)
|
|
482
|
+
if (deltaContent) content += deltaContent
|
|
483
|
+
else if (messageContent) content += messageContent
|
|
374
484
|
}
|
|
375
485
|
|
|
376
486
|
if (responseCompleted) {
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
created: responseCompleted.created_at || Math.floor(Date.now() / 1000),
|
|
381
|
-
model: responseCompleted.model,
|
|
382
|
-
choices: [{
|
|
383
|
-
index: 0,
|
|
384
|
-
message: { role: 'assistant', content: content || null },
|
|
385
|
-
finish_reason: responseCompleted.status === 'completed' ? 'stop' : 'length',
|
|
386
|
-
}],
|
|
387
|
-
usage: responseCompleted.usage,
|
|
487
|
+
const completion = responseToChatCompletion(responseCompleted, requestedModel || responseCompleted.model)
|
|
488
|
+
if (!completion.choices?.[0]?.message?.content && content) {
|
|
489
|
+
completion.choices[0].message.content = content
|
|
388
490
|
}
|
|
491
|
+
return completion
|
|
389
492
|
}
|
|
390
493
|
|
|
391
494
|
if (!finalChunk) return null
|
|
392
495
|
|
|
393
|
-
return {
|
|
496
|
+
return normalizeOpenAICompatibleResponse({
|
|
394
497
|
id: finalChunk.id || `chatcmpl_${Date.now()}`,
|
|
395
498
|
object: 'chat.completion',
|
|
396
499
|
created: finalChunk.created || Math.floor(Date.now() / 1000),
|
|
@@ -401,7 +504,7 @@ function parseOpenAIStreamText(text) {
|
|
|
401
504
|
finish_reason: finalChunk.choices?.[0]?.finish_reason || 'stop',
|
|
402
505
|
}],
|
|
403
506
|
usage: finalChunk.usage,
|
|
404
|
-
}
|
|
507
|
+
}, requestedModel)
|
|
405
508
|
}
|
|
406
509
|
|
|
407
510
|
async function relayOpenAIRequest(requestBody, config, res) {
|
|
@@ -420,7 +523,7 @@ async function relayOpenAIRequest(requestBody, config, res) {
|
|
|
420
523
|
})
|
|
421
524
|
|
|
422
525
|
const text = await upstream.text()
|
|
423
|
-
const parsed = parseOpenAIStreamText(text)
|
|
526
|
+
const parsed = parseOpenAIStreamText(text, requestBody.model)
|
|
424
527
|
if (upstream.ok && parsed) {
|
|
425
528
|
if (requestBody.stream) return sendOpenAIStream(res, parsed)
|
|
426
529
|
return sendJson(res, upstream.status, parsed)
|