@kenkaiiii/gg-ai 4.2.90 → 4.2.92
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +283 -211
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +13 -9
- package/dist/index.d.ts +13 -9
- package/dist/index.js +283 -211
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -58,44 +58,62 @@ var EventStream = class {
|
|
|
58
58
|
}
|
|
59
59
|
};
|
|
60
60
|
var StreamResult = class {
|
|
61
|
-
events;
|
|
62
61
|
response;
|
|
62
|
+
buffer = [];
|
|
63
|
+
done = false;
|
|
64
|
+
error = null;
|
|
63
65
|
resolveResponse;
|
|
64
66
|
rejectResponse;
|
|
65
|
-
|
|
66
|
-
constructor() {
|
|
67
|
-
this.events = new EventStream();
|
|
67
|
+
resolveWait = null;
|
|
68
|
+
constructor(generator) {
|
|
68
69
|
this.response = new Promise((resolve, reject) => {
|
|
69
70
|
this.resolveResponse = resolve;
|
|
70
71
|
this.rejectResponse = reject;
|
|
71
72
|
});
|
|
73
|
+
this.pump(generator);
|
|
72
74
|
}
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
75
|
+
async pump(generator) {
|
|
76
|
+
try {
|
|
77
|
+
let next = await generator.next();
|
|
78
|
+
while (!next.done) {
|
|
79
|
+
this.buffer.push(next.value);
|
|
80
|
+
this.resolveWait?.();
|
|
81
|
+
this.resolveWait = null;
|
|
82
|
+
next = await generator.next();
|
|
83
|
+
}
|
|
84
|
+
this.done = true;
|
|
85
|
+
this.resolveResponse(next.value);
|
|
86
|
+
this.resolveWait?.();
|
|
87
|
+
this.resolveWait = null;
|
|
88
|
+
} catch (err) {
|
|
89
|
+
const error = err instanceof Error ? err : new Error(String(err));
|
|
90
|
+
this.error = error;
|
|
91
|
+
this.done = true;
|
|
92
|
+
this.rejectResponse(error);
|
|
93
|
+
this.resolveWait?.();
|
|
94
|
+
this.resolveWait = null;
|
|
95
|
+
}
|
|
83
96
|
}
|
|
84
|
-
[Symbol.asyncIterator]() {
|
|
85
|
-
|
|
86
|
-
|
|
97
|
+
async *[Symbol.asyncIterator]() {
|
|
98
|
+
let index = 0;
|
|
99
|
+
while (true) {
|
|
100
|
+
while (index < this.buffer.length) {
|
|
101
|
+
yield this.buffer[index++];
|
|
102
|
+
}
|
|
103
|
+
if (this.error) throw this.error;
|
|
104
|
+
if (this.done) return;
|
|
105
|
+
await new Promise((r) => {
|
|
106
|
+
this.resolveWait = r;
|
|
107
|
+
if (this.buffer.length > index || this.done || this.error) {
|
|
108
|
+
this.resolveWait = null;
|
|
109
|
+
r();
|
|
110
|
+
}
|
|
111
|
+
});
|
|
112
|
+
}
|
|
87
113
|
}
|
|
88
114
|
then(onfulfilled, onrejected) {
|
|
89
|
-
this.drainEvents().catch(() => {
|
|
90
|
-
});
|
|
91
115
|
return this.response.then(onfulfilled, onrejected);
|
|
92
116
|
}
|
|
93
|
-
async drainEvents() {
|
|
94
|
-
if (this.hasConsumer) return;
|
|
95
|
-
this.hasConsumer = true;
|
|
96
|
-
for await (const _ of this.events) {
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
117
|
};
|
|
100
118
|
|
|
101
119
|
// src/providers/anthropic.ts
|
|
@@ -337,8 +355,10 @@ function toOpenAIMessages(messages, options) {
|
|
|
337
355
|
content: parts || textParts || null,
|
|
338
356
|
...toolCalls?.length ? { tool_calls: toolCalls } : {}
|
|
339
357
|
};
|
|
340
|
-
if (thinkingParts
|
|
341
|
-
assistantMsg.reasoning_content = thinkingParts
|
|
358
|
+
if (thinkingParts) {
|
|
359
|
+
assistantMsg.reasoning_content = thinkingParts;
|
|
360
|
+
} else if (options?.provider === "moonshot" && toolCalls?.length) {
|
|
361
|
+
assistantMsg.reasoning_content = " ";
|
|
342
362
|
}
|
|
343
363
|
out.push(assistantMsg);
|
|
344
364
|
continue;
|
|
@@ -405,11 +425,9 @@ function normalizeOpenAIStopReason(reason) {
|
|
|
405
425
|
|
|
406
426
|
// src/providers/anthropic.ts
|
|
407
427
|
function streamAnthropic(options) {
|
|
408
|
-
|
|
409
|
-
runStream(options, result).catch((err) => result.abort(toError(err)));
|
|
410
|
-
return result;
|
|
428
|
+
return new StreamResult(runStream(options));
|
|
411
429
|
}
|
|
412
|
-
async function runStream(options
|
|
430
|
+
async function* runStream(options) {
|
|
413
431
|
const isOAuth = options.apiKey?.startsWith("sk-ant-oat");
|
|
414
432
|
const client = new Anthropic({
|
|
415
433
|
...isOAuth ? { apiKey: null, authToken: options.apiKey } : { apiKey: options.apiKey },
|
|
@@ -479,116 +497,185 @@ async function runStream(options, result) {
|
|
|
479
497
|
...betaHeaders.length ? { headers: { "anthropic-beta": betaHeaders.join(",") } } : {}
|
|
480
498
|
});
|
|
481
499
|
const contentParts = [];
|
|
482
|
-
|
|
483
|
-
let
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
result.push({ type: "thinking_delta", text: thinkingDelta });
|
|
489
|
-
});
|
|
490
|
-
stream2.on("streamEvent", (event) => {
|
|
491
|
-
if (event.type === "content_block_start") {
|
|
492
|
-
if (event.content_block.type === "tool_use") {
|
|
493
|
-
currentToolId = event.content_block.id;
|
|
494
|
-
currentToolName = event.content_block.name;
|
|
495
|
-
}
|
|
496
|
-
if (event.content_block.type === "server_tool_use") {
|
|
497
|
-
currentToolId = event.content_block.id;
|
|
498
|
-
currentToolName = event.content_block.name;
|
|
499
|
-
}
|
|
500
|
-
}
|
|
501
|
-
});
|
|
502
|
-
stream2.on("inputJson", (delta) => {
|
|
503
|
-
result.push({
|
|
504
|
-
type: "toolcall_delta",
|
|
505
|
-
id: currentToolId,
|
|
506
|
-
name: currentToolName,
|
|
507
|
-
argsJson: delta
|
|
508
|
-
});
|
|
509
|
-
});
|
|
510
|
-
stream2.on("contentBlock", (block) => {
|
|
511
|
-
if (block.type === "text") {
|
|
512
|
-
contentParts.push({ type: "text", text: block.text });
|
|
513
|
-
} else if (block.type === "thinking") {
|
|
514
|
-
contentParts.push({ type: "thinking", text: block.thinking, signature: block.signature });
|
|
515
|
-
} else if (block.type === "tool_use") {
|
|
516
|
-
const tc = {
|
|
517
|
-
type: "tool_call",
|
|
518
|
-
id: block.id,
|
|
519
|
-
name: block.name,
|
|
520
|
-
args: block.input
|
|
521
|
-
};
|
|
522
|
-
contentParts.push(tc);
|
|
523
|
-
result.push({
|
|
524
|
-
type: "toolcall_done",
|
|
525
|
-
id: tc.id,
|
|
526
|
-
name: tc.name,
|
|
527
|
-
args: tc.args
|
|
528
|
-
});
|
|
529
|
-
} else if (block.type === "server_tool_use") {
|
|
530
|
-
const stc = {
|
|
531
|
-
type: "server_tool_call",
|
|
532
|
-
id: block.id,
|
|
533
|
-
name: block.name,
|
|
534
|
-
input: block.input
|
|
535
|
-
};
|
|
536
|
-
contentParts.push(stc);
|
|
537
|
-
result.push({
|
|
538
|
-
type: "server_toolcall",
|
|
539
|
-
id: stc.id,
|
|
540
|
-
name: stc.name,
|
|
541
|
-
input: stc.input
|
|
542
|
-
});
|
|
543
|
-
} else {
|
|
544
|
-
const raw = block;
|
|
545
|
-
const blockType = raw.type;
|
|
546
|
-
if (blockType === "web_search_tool_result") {
|
|
547
|
-
const str = {
|
|
548
|
-
type: "server_tool_result",
|
|
549
|
-
toolUseId: raw.tool_use_id,
|
|
550
|
-
resultType: blockType,
|
|
551
|
-
data: raw
|
|
552
|
-
};
|
|
553
|
-
contentParts.push(str);
|
|
554
|
-
result.push({
|
|
555
|
-
type: "server_toolresult",
|
|
556
|
-
toolUseId: str.toolUseId,
|
|
557
|
-
resultType: str.resultType,
|
|
558
|
-
data: str.data
|
|
559
|
-
});
|
|
560
|
-
} else {
|
|
561
|
-
contentParts.push({ type: "raw", data: raw });
|
|
562
|
-
}
|
|
563
|
-
}
|
|
564
|
-
});
|
|
500
|
+
const blocks = /* @__PURE__ */ new Map();
|
|
501
|
+
let inputTokens = 0;
|
|
502
|
+
let outputTokens = 0;
|
|
503
|
+
let cacheRead;
|
|
504
|
+
let cacheWrite;
|
|
505
|
+
let stopReason = null;
|
|
565
506
|
try {
|
|
566
|
-
const
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
}
|
|
580
|
-
|
|
581
|
-
|
|
507
|
+
for await (const event of stream2) {
|
|
508
|
+
switch (event.type) {
|
|
509
|
+
case "message_start": {
|
|
510
|
+
const usage = event.message.usage;
|
|
511
|
+
inputTokens = usage.input_tokens;
|
|
512
|
+
const usageAny = usage;
|
|
513
|
+
if (usageAny.cache_read_input_tokens != null) {
|
|
514
|
+
cacheRead = usageAny.cache_read_input_tokens;
|
|
515
|
+
}
|
|
516
|
+
if (usageAny.cache_creation_input_tokens != null) {
|
|
517
|
+
cacheWrite = usageAny.cache_creation_input_tokens;
|
|
518
|
+
}
|
|
519
|
+
break;
|
|
520
|
+
}
|
|
521
|
+
case "content_block_start": {
|
|
522
|
+
const block = event.content_block;
|
|
523
|
+
const idx = event.index;
|
|
524
|
+
const accum = {
|
|
525
|
+
type: block.type,
|
|
526
|
+
text: "",
|
|
527
|
+
thinking: "",
|
|
528
|
+
signature: "",
|
|
529
|
+
toolId: "",
|
|
530
|
+
toolName: "",
|
|
531
|
+
argsJson: "",
|
|
532
|
+
input: void 0,
|
|
533
|
+
raw: null
|
|
534
|
+
};
|
|
535
|
+
if (block.type === "tool_use") {
|
|
536
|
+
accum.toolId = block.id;
|
|
537
|
+
accum.toolName = block.name;
|
|
538
|
+
} else if (block.type === "server_tool_use") {
|
|
539
|
+
accum.toolId = block.id;
|
|
540
|
+
accum.toolName = block.name;
|
|
541
|
+
} else if (block.type === "redacted_thinking") {
|
|
542
|
+
accum.raw = block;
|
|
543
|
+
}
|
|
544
|
+
blocks.set(idx, accum);
|
|
545
|
+
break;
|
|
546
|
+
}
|
|
547
|
+
case "content_block_delta": {
|
|
548
|
+
const accum = blocks.get(event.index);
|
|
549
|
+
if (!accum) break;
|
|
550
|
+
const delta = event.delta;
|
|
551
|
+
const deltaType = delta.type;
|
|
552
|
+
if (deltaType === "text_delta") {
|
|
553
|
+
const text = delta.text;
|
|
554
|
+
accum.text += text;
|
|
555
|
+
yield { type: "text_delta", text };
|
|
556
|
+
} else if (deltaType === "thinking_delta") {
|
|
557
|
+
const text = delta.thinking;
|
|
558
|
+
accum.thinking += text;
|
|
559
|
+
yield { type: "thinking_delta", text };
|
|
560
|
+
} else if (deltaType === "input_json_delta") {
|
|
561
|
+
const partialJson = delta.partial_json;
|
|
562
|
+
accum.argsJson += partialJson;
|
|
563
|
+
yield {
|
|
564
|
+
type: "toolcall_delta",
|
|
565
|
+
id: accum.toolId,
|
|
566
|
+
name: accum.toolName,
|
|
567
|
+
argsJson: partialJson
|
|
568
|
+
};
|
|
569
|
+
} else if (deltaType === "signature_delta") {
|
|
570
|
+
accum.signature = delta.signature;
|
|
571
|
+
}
|
|
572
|
+
break;
|
|
573
|
+
}
|
|
574
|
+
case "content_block_stop": {
|
|
575
|
+
const accum = blocks.get(event.index);
|
|
576
|
+
if (!accum) break;
|
|
577
|
+
if (accum.type === "text") {
|
|
578
|
+
contentParts.push({ type: "text", text: accum.text });
|
|
579
|
+
} else if (accum.type === "thinking") {
|
|
580
|
+
contentParts.push({
|
|
581
|
+
type: "thinking",
|
|
582
|
+
text: accum.thinking,
|
|
583
|
+
signature: accum.signature
|
|
584
|
+
});
|
|
585
|
+
} else if (accum.type === "tool_use") {
|
|
586
|
+
let args = {};
|
|
587
|
+
try {
|
|
588
|
+
args = JSON.parse(accum.argsJson);
|
|
589
|
+
} catch {
|
|
590
|
+
}
|
|
591
|
+
const tc = {
|
|
592
|
+
type: "tool_call",
|
|
593
|
+
id: accum.toolId,
|
|
594
|
+
name: accum.toolName,
|
|
595
|
+
args
|
|
596
|
+
};
|
|
597
|
+
contentParts.push(tc);
|
|
598
|
+
yield {
|
|
599
|
+
type: "toolcall_done",
|
|
600
|
+
id: tc.id,
|
|
601
|
+
name: tc.name,
|
|
602
|
+
args: tc.args
|
|
603
|
+
};
|
|
604
|
+
} else if (accum.type === "server_tool_use") {
|
|
605
|
+
const stc = {
|
|
606
|
+
type: "server_tool_call",
|
|
607
|
+
id: accum.toolId,
|
|
608
|
+
name: accum.toolName,
|
|
609
|
+
input: accum.input
|
|
610
|
+
};
|
|
611
|
+
contentParts.push(stc);
|
|
612
|
+
yield {
|
|
613
|
+
type: "server_toolcall",
|
|
614
|
+
id: stc.id,
|
|
615
|
+
name: stc.name,
|
|
616
|
+
input: stc.input
|
|
617
|
+
};
|
|
618
|
+
} else if (accum.type === "redacted_thinking" && accum.raw) {
|
|
619
|
+
contentParts.push({ type: "raw", data: accum.raw });
|
|
620
|
+
} else {
|
|
621
|
+
const msg = stream2.currentMessage;
|
|
622
|
+
const rawBlock = msg?.content[event.index];
|
|
623
|
+
if (rawBlock) {
|
|
624
|
+
const blockType = rawBlock.type;
|
|
625
|
+
if (blockType === "web_search_tool_result") {
|
|
626
|
+
const str = {
|
|
627
|
+
type: "server_tool_result",
|
|
628
|
+
toolUseId: rawBlock.tool_use_id,
|
|
629
|
+
resultType: blockType,
|
|
630
|
+
data: rawBlock
|
|
631
|
+
};
|
|
632
|
+
contentParts.push(str);
|
|
633
|
+
yield {
|
|
634
|
+
type: "server_toolresult",
|
|
635
|
+
toolUseId: str.toolUseId,
|
|
636
|
+
resultType: str.resultType,
|
|
637
|
+
data: str.data
|
|
638
|
+
};
|
|
639
|
+
} else {
|
|
640
|
+
contentParts.push({ type: "raw", data: rawBlock });
|
|
641
|
+
}
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
blocks.delete(event.index);
|
|
645
|
+
break;
|
|
646
|
+
}
|
|
647
|
+
case "message_delta": {
|
|
648
|
+
const delta = event.delta;
|
|
649
|
+
if (delta.stop_reason) {
|
|
650
|
+
stopReason = delta.stop_reason;
|
|
651
|
+
}
|
|
652
|
+
const usage = event.usage;
|
|
653
|
+
if (usage?.output_tokens != null) {
|
|
654
|
+
outputTokens = usage.output_tokens;
|
|
655
|
+
}
|
|
656
|
+
break;
|
|
582
657
|
}
|
|
583
658
|
}
|
|
584
|
-
}
|
|
585
|
-
result.push({ type: "done", stopReason });
|
|
586
|
-
result.complete(response);
|
|
659
|
+
}
|
|
587
660
|
} catch (err) {
|
|
588
|
-
|
|
589
|
-
result.push({ type: "error", error });
|
|
590
|
-
result.abort(error);
|
|
661
|
+
throw toError(err);
|
|
591
662
|
}
|
|
663
|
+
const normalizedStop = normalizeAnthropicStopReason(stopReason);
|
|
664
|
+
const response = {
|
|
665
|
+
message: {
|
|
666
|
+
role: "assistant",
|
|
667
|
+
content: contentParts.length > 0 ? contentParts : ""
|
|
668
|
+
},
|
|
669
|
+
stopReason: normalizedStop,
|
|
670
|
+
usage: {
|
|
671
|
+
inputTokens,
|
|
672
|
+
outputTokens,
|
|
673
|
+
...cacheRead != null && { cacheRead },
|
|
674
|
+
...cacheWrite != null && { cacheWrite }
|
|
675
|
+
}
|
|
676
|
+
};
|
|
677
|
+
yield { type: "done", stopReason: normalizedStop };
|
|
678
|
+
return response;
|
|
592
679
|
}
|
|
593
680
|
function toError(err) {
|
|
594
681
|
if (err instanceof Anthropic.APIError) {
|
|
@@ -606,12 +693,10 @@ function toError(err) {
|
|
|
606
693
|
// src/providers/openai.ts
|
|
607
694
|
import OpenAI from "openai";
|
|
608
695
|
function streamOpenAI(options) {
|
|
609
|
-
|
|
610
|
-
const providerName = options.provider ?? "openai";
|
|
611
|
-
runStream2(options, result).catch((err) => result.abort(toError2(err, providerName)));
|
|
612
|
-
return result;
|
|
696
|
+
return new StreamResult(runStream2(options));
|
|
613
697
|
}
|
|
614
|
-
async function runStream2(options
|
|
698
|
+
async function* runStream2(options) {
|
|
699
|
+
const providerName = options.provider ?? "openai";
|
|
615
700
|
const client = new OpenAI({
|
|
616
701
|
apiKey: options.apiKey,
|
|
617
702
|
...options.baseUrl ? { baseURL: options.baseUrl } : {},
|
|
@@ -645,9 +730,14 @@ async function runStream2(options, result) {
|
|
|
645
730
|
if (usesThinkingParam) {
|
|
646
731
|
params.thinking = options.thinking ? { type: "enabled" } : { type: "disabled" };
|
|
647
732
|
}
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
733
|
+
let stream2;
|
|
734
|
+
try {
|
|
735
|
+
stream2 = await client.chat.completions.create(params, {
|
|
736
|
+
signal: options.signal ?? void 0
|
|
737
|
+
});
|
|
738
|
+
} catch (err) {
|
|
739
|
+
throw toError2(err, providerName);
|
|
740
|
+
}
|
|
651
741
|
const contentParts = [];
|
|
652
742
|
const toolCallAccum = /* @__PURE__ */ new Map();
|
|
653
743
|
let textAccum = "";
|
|
@@ -674,11 +764,11 @@ async function runStream2(options, result) {
|
|
|
674
764
|
const reasoningContent = delta.reasoning_content;
|
|
675
765
|
if (typeof reasoningContent === "string" && reasoningContent) {
|
|
676
766
|
thinkingAccum += reasoningContent;
|
|
677
|
-
|
|
767
|
+
yield { type: "thinking_delta", text: reasoningContent };
|
|
678
768
|
}
|
|
679
769
|
if (delta.content) {
|
|
680
770
|
textAccum += delta.content;
|
|
681
|
-
|
|
771
|
+
yield { type: "text_delta", text: delta.content };
|
|
682
772
|
}
|
|
683
773
|
if (delta.tool_calls) {
|
|
684
774
|
for (const tc of delta.tool_calls) {
|
|
@@ -695,12 +785,12 @@ async function runStream2(options, result) {
|
|
|
695
785
|
if (tc.function?.name) accum.name = tc.function.name;
|
|
696
786
|
if (tc.function?.arguments) {
|
|
697
787
|
accum.argsJson += tc.function.arguments;
|
|
698
|
-
|
|
788
|
+
yield {
|
|
699
789
|
type: "toolcall_delta",
|
|
700
790
|
id: accum.id,
|
|
701
791
|
name: accum.name,
|
|
702
792
|
argsJson: tc.function.arguments
|
|
703
|
-
}
|
|
793
|
+
};
|
|
704
794
|
}
|
|
705
795
|
}
|
|
706
796
|
}
|
|
@@ -724,12 +814,12 @@ async function runStream2(options, result) {
|
|
|
724
814
|
args
|
|
725
815
|
};
|
|
726
816
|
contentParts.push(toolCall);
|
|
727
|
-
|
|
817
|
+
yield {
|
|
728
818
|
type: "toolcall_done",
|
|
729
819
|
id: tc.id,
|
|
730
820
|
name: tc.name,
|
|
731
821
|
args
|
|
732
|
-
}
|
|
822
|
+
};
|
|
733
823
|
}
|
|
734
824
|
const stopReason = normalizeOpenAIStopReason(finishReason);
|
|
735
825
|
const response = {
|
|
@@ -740,8 +830,8 @@ async function runStream2(options, result) {
|
|
|
740
830
|
stopReason,
|
|
741
831
|
usage: { inputTokens, outputTokens, ...cacheRead > 0 && { cacheRead } }
|
|
742
832
|
};
|
|
743
|
-
|
|
744
|
-
|
|
833
|
+
yield { type: "done", stopReason };
|
|
834
|
+
return response;
|
|
745
835
|
}
|
|
746
836
|
function toError2(err, provider = "openai") {
|
|
747
837
|
if (err instanceof OpenAI.APIError) {
|
|
@@ -765,11 +855,9 @@ function toError2(err, provider = "openai") {
|
|
|
765
855
|
import os from "os";
|
|
766
856
|
var DEFAULT_BASE_URL = "https://chatgpt.com/backend-api";
|
|
767
857
|
function streamOpenAICodex(options) {
|
|
768
|
-
|
|
769
|
-
runStream3(options, result).catch((err) => result.abort(toError3(err)));
|
|
770
|
-
return result;
|
|
858
|
+
return new StreamResult(runStream3(options));
|
|
771
859
|
}
|
|
772
|
-
async function runStream3(options
|
|
860
|
+
async function* runStream3(options) {
|
|
773
861
|
const baseUrl = (options.baseUrl || DEFAULT_BASE_URL).replace(/\/+$/, "");
|
|
774
862
|
const url = `${baseUrl}/codex/responses`;
|
|
775
863
|
const { system, input } = toCodexInput(options.messages);
|
|
@@ -846,11 +934,11 @@ Hint: Codex models require a ChatGPT Plus ($20/mo) or Pro ($200/mo) subscription
|
|
|
846
934
|
if (type === "response.output_text.delta") {
|
|
847
935
|
const delta = event.delta;
|
|
848
936
|
textAccum += delta;
|
|
849
|
-
|
|
937
|
+
yield { type: "text_delta", text: delta };
|
|
850
938
|
}
|
|
851
939
|
if (type === "response.reasoning_summary_text.delta") {
|
|
852
940
|
const delta = event.delta;
|
|
853
|
-
|
|
941
|
+
yield { type: "thinking_delta", text: delta };
|
|
854
942
|
}
|
|
855
943
|
if (type === "response.output_item.added") {
|
|
856
944
|
const item = event.item;
|
|
@@ -868,12 +956,12 @@ Hint: Codex models require a ChatGPT Plus ($20/mo) or Pro ($200/mo) subscription
|
|
|
868
956
|
for (const [key, tc] of toolCalls) {
|
|
869
957
|
if (key.endsWith(`|${itemId}`)) {
|
|
870
958
|
tc.argsJson += delta;
|
|
871
|
-
|
|
959
|
+
yield {
|
|
872
960
|
type: "toolcall_delta",
|
|
873
961
|
id: tc.id,
|
|
874
962
|
name: tc.name,
|
|
875
963
|
argsJson: delta
|
|
876
|
-
}
|
|
964
|
+
};
|
|
877
965
|
break;
|
|
878
966
|
}
|
|
879
967
|
}
|
|
@@ -901,12 +989,12 @@ Hint: Codex models require a ChatGPT Plus ($20/mo) or Pro ($200/mo) subscription
|
|
|
901
989
|
args = JSON.parse(tc.argsJson);
|
|
902
990
|
} catch {
|
|
903
991
|
}
|
|
904
|
-
|
|
992
|
+
yield {
|
|
905
993
|
type: "toolcall_done",
|
|
906
994
|
id: tc.id,
|
|
907
995
|
name: tc.name,
|
|
908
996
|
args
|
|
909
|
-
}
|
|
997
|
+
};
|
|
910
998
|
}
|
|
911
999
|
}
|
|
912
1000
|
}
|
|
@@ -946,8 +1034,8 @@ Hint: Codex models require a ChatGPT Plus ($20/mo) or Pro ($200/mo) subscription
|
|
|
946
1034
|
stopReason,
|
|
947
1035
|
usage: { inputTokens, outputTokens }
|
|
948
1036
|
};
|
|
949
|
-
|
|
950
|
-
|
|
1037
|
+
yield { type: "done", stopReason };
|
|
1038
|
+
return streamResponse;
|
|
951
1039
|
}
|
|
952
1040
|
async function* parseSSE(body) {
|
|
953
1041
|
const reader = body.getReader();
|
|
@@ -1061,13 +1149,6 @@ function toCodexTools(tools) {
|
|
|
1061
1149
|
strict: null
|
|
1062
1150
|
}));
|
|
1063
1151
|
}
|
|
1064
|
-
function toError3(err) {
|
|
1065
|
-
if (err instanceof ProviderError) return err;
|
|
1066
|
-
if (err instanceof Error) {
|
|
1067
|
-
return new ProviderError("openai", err.message, { cause: err });
|
|
1068
|
-
}
|
|
1069
|
-
return new ProviderError("openai", String(err));
|
|
1070
|
-
}
|
|
1071
1152
|
|
|
1072
1153
|
// src/provider-registry.ts
|
|
1073
1154
|
var ProviderRegistryImpl = class {
|
|
@@ -1141,32 +1222,28 @@ function stream(options) {
|
|
|
1141
1222
|
return entry.stream(options);
|
|
1142
1223
|
}
|
|
1143
1224
|
function streamGLMWithFallback(options) {
|
|
1144
|
-
|
|
1145
|
-
runGLMWithFallback(options, result).catch((err) => {
|
|
1146
|
-
result.abort(err instanceof Error ? err : new Error(String(err)));
|
|
1147
|
-
});
|
|
1148
|
-
return result;
|
|
1225
|
+
return new StreamResult(runGLMWithFallback(options));
|
|
1149
1226
|
}
|
|
1150
|
-
async function runGLMWithFallback(options
|
|
1151
|
-
const
|
|
1152
|
-
|
|
1227
|
+
async function* runGLMWithFallback(options) {
|
|
1228
|
+
const coding = streamOpenAI({ ...options, baseUrl: GLM_CODING_BASE_URL });
|
|
1229
|
+
coding.response.catch(() => {
|
|
1153
1230
|
});
|
|
1154
1231
|
try {
|
|
1155
|
-
for await (const event of
|
|
1156
|
-
|
|
1232
|
+
for await (const event of coding) {
|
|
1233
|
+
yield event;
|
|
1157
1234
|
}
|
|
1158
|
-
|
|
1235
|
+
return await coding.response;
|
|
1159
1236
|
} catch {
|
|
1160
|
-
const
|
|
1161
|
-
|
|
1237
|
+
const regular = streamOpenAI({ ...options, baseUrl: GLM_REGULAR_BASE_URL });
|
|
1238
|
+
regular.response.catch(() => {
|
|
1162
1239
|
});
|
|
1163
1240
|
try {
|
|
1164
|
-
for await (const event of
|
|
1165
|
-
|
|
1241
|
+
for await (const event of regular) {
|
|
1242
|
+
yield event;
|
|
1166
1243
|
}
|
|
1167
|
-
|
|
1244
|
+
return await regular.response;
|
|
1168
1245
|
} catch (fallbackErr) {
|
|
1169
|
-
|
|
1246
|
+
throw fallbackErr instanceof Error ? fallbackErr : new Error(String(fallbackErr));
|
|
1170
1247
|
}
|
|
1171
1248
|
}
|
|
1172
1249
|
}
|
|
@@ -1197,31 +1274,29 @@ function chunkText(text, size) {
|
|
|
1197
1274
|
}
|
|
1198
1275
|
return chunks.length > 0 ? chunks : [""];
|
|
1199
1276
|
}
|
|
1200
|
-
function simulateStream(message, stopReason,
|
|
1277
|
+
async function* simulateStream(message, stopReason, signal, cacheUsage) {
|
|
1201
1278
|
if (signal?.aborted) {
|
|
1202
|
-
|
|
1203
|
-
return;
|
|
1279
|
+
throw new Error("aborted");
|
|
1204
1280
|
}
|
|
1205
1281
|
const content = typeof message.content === "string" ? message.content ? [{ type: "text", text: message.content }] : [] : message.content;
|
|
1206
1282
|
let outputChars = 0;
|
|
1207
1283
|
for (const part of content) {
|
|
1208
1284
|
if (signal?.aborted) {
|
|
1209
|
-
|
|
1210
|
-
return;
|
|
1285
|
+
throw new Error("aborted");
|
|
1211
1286
|
}
|
|
1212
1287
|
if (part.type === "text") {
|
|
1213
1288
|
const chunks = chunkText(part.text, DEFAULT_CHUNK_SIZE);
|
|
1214
1289
|
for (const chunk of chunks) {
|
|
1215
|
-
|
|
1290
|
+
yield { type: "text_delta", text: chunk };
|
|
1216
1291
|
outputChars += chunk.length;
|
|
1217
1292
|
}
|
|
1218
1293
|
} else if (part.type === "thinking") {
|
|
1219
|
-
|
|
1294
|
+
yield { type: "thinking_delta", text: part.text };
|
|
1220
1295
|
outputChars += part.text.length;
|
|
1221
1296
|
} else if (part.type === "tool_call") {
|
|
1222
1297
|
const argsJson = JSON.stringify(part.args);
|
|
1223
|
-
|
|
1224
|
-
|
|
1298
|
+
yield { type: "toolcall_delta", id: part.id, name: part.name, argsJson };
|
|
1299
|
+
yield { type: "toolcall_done", id: part.id, name: part.name, args: part.args };
|
|
1225
1300
|
outputChars += argsJson.length;
|
|
1226
1301
|
}
|
|
1227
1302
|
}
|
|
@@ -1232,8 +1307,8 @@ function simulateStream(message, stopReason, result, signal, cacheUsage) {
|
|
|
1232
1307
|
...cacheUsage?.cacheRead ? { cacheRead: cacheUsage.cacheRead } : {},
|
|
1233
1308
|
...cacheUsage?.cacheWrite ? { cacheWrite: cacheUsage.cacheWrite } : {}
|
|
1234
1309
|
};
|
|
1235
|
-
|
|
1236
|
-
|
|
1310
|
+
yield { type: "done", stopReason };
|
|
1311
|
+
return { message, stopReason, usage };
|
|
1237
1312
|
}
|
|
1238
1313
|
function computeCacheUsage(current, previous) {
|
|
1239
1314
|
if (!previous) {
|
|
@@ -1305,24 +1380,21 @@ function registerPalsuProvider(config) {
|
|
|
1305
1380
|
state.callCount++;
|
|
1306
1381
|
const ms = modelStates.get(options.model);
|
|
1307
1382
|
const responseDef = (ms && ms.responses.length > 0 ? ms.responses.shift() : void 0) ?? (responses.length > 0 ? responses.shift() : void 0) ?? ms?.defaultResponse ?? defaultResponse;
|
|
1308
|
-
const result = new StreamResult();
|
|
1309
1383
|
let cacheUsage;
|
|
1310
1384
|
if (enableCache) {
|
|
1311
1385
|
const serialized = JSON.stringify(options.messages);
|
|
1312
1386
|
cacheUsage = computeCacheUsage(serialized, lastMessagesSerialized);
|
|
1313
1387
|
lastMessagesSerialized = serialized;
|
|
1314
1388
|
}
|
|
1315
|
-
const
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
);
|
|
1325
|
-
return result;
|
|
1389
|
+
const gen = (async function* () {
|
|
1390
|
+
const rawMessage = typeof responseDef === "function" ? responseDef(options.messages, options, state) : responseDef;
|
|
1391
|
+
const message = await Promise.resolve(rawMessage);
|
|
1392
|
+
const hasToolCalls = Array.isArray(message.content) && message.content.some((p) => p.type === "tool_call");
|
|
1393
|
+
const explicitStop = message._stopReason;
|
|
1394
|
+
const stopReason = explicitStop ?? (hasToolCalls ? "tool_use" : "end_turn");
|
|
1395
|
+
return yield* simulateStream(message, stopReason, options.signal, cacheUsage);
|
|
1396
|
+
})();
|
|
1397
|
+
return new StreamResult(gen);
|
|
1326
1398
|
}
|
|
1327
1399
|
});
|
|
1328
1400
|
return handle;
|