@corbat-tech/coco 2.8.1 → 2.8.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +59 -8
- package/dist/cli/index.js.map +1 -1
- package/dist/index.d.ts +1 -0
- package/dist/index.js +39 -8
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.d.ts
CHANGED
|
@@ -1032,6 +1032,7 @@ interface StreamChunk {
|
|
|
1032
1032
|
type: "text" | "tool_use_start" | "tool_use_delta" | "tool_use_end" | "done";
|
|
1033
1033
|
text?: string;
|
|
1034
1034
|
toolCall?: Partial<ToolCall>;
|
|
1035
|
+
stopReason?: "end_turn" | "max_tokens" | "stop_sequence" | "tool_use";
|
|
1035
1036
|
}
|
|
1036
1037
|
/**
|
|
1037
1038
|
* LLM Provider interface
|
package/dist/index.js
CHANGED
|
@@ -11803,6 +11803,7 @@ var AnthropicProvider = class {
|
|
|
11803
11803
|
};
|
|
11804
11804
|
const timeoutInterval = setInterval(checkTimeout, 5e3);
|
|
11805
11805
|
try {
|
|
11806
|
+
let streamStopReason;
|
|
11806
11807
|
for await (const event of stream) {
|
|
11807
11808
|
lastActivityTime = Date.now();
|
|
11808
11809
|
if (event.type === "content_block_delta") {
|
|
@@ -11810,9 +11811,14 @@ var AnthropicProvider = class {
|
|
|
11810
11811
|
if (delta.type === "text_delta" && delta.text) {
|
|
11811
11812
|
yield { type: "text", text: delta.text };
|
|
11812
11813
|
}
|
|
11814
|
+
} else if (event.type === "message_delta") {
|
|
11815
|
+
const delta = event.delta;
|
|
11816
|
+
if (delta.stop_reason) {
|
|
11817
|
+
streamStopReason = this.mapStopReason(delta.stop_reason);
|
|
11818
|
+
}
|
|
11813
11819
|
}
|
|
11814
11820
|
}
|
|
11815
|
-
yield { type: "done" };
|
|
11821
|
+
yield { type: "done", stopReason: streamStopReason };
|
|
11816
11822
|
} finally {
|
|
11817
11823
|
clearInterval(timeoutInterval);
|
|
11818
11824
|
}
|
|
@@ -11849,9 +11855,15 @@ var AnthropicProvider = class {
|
|
|
11849
11855
|
};
|
|
11850
11856
|
const timeoutInterval = setInterval(checkTimeout, 5e3);
|
|
11851
11857
|
try {
|
|
11858
|
+
let streamStopReason;
|
|
11852
11859
|
for await (const event of stream) {
|
|
11853
11860
|
lastActivityTime = Date.now();
|
|
11854
|
-
if (event.type === "
|
|
11861
|
+
if (event.type === "message_delta") {
|
|
11862
|
+
const delta = event.delta;
|
|
11863
|
+
if (delta.stop_reason) {
|
|
11864
|
+
streamStopReason = this.mapStopReason(delta.stop_reason);
|
|
11865
|
+
}
|
|
11866
|
+
} else if (event.type === "content_block_start") {
|
|
11855
11867
|
const contentBlock = event.content_block;
|
|
11856
11868
|
if (contentBlock.type === "tool_use") {
|
|
11857
11869
|
if (currentToolCall) {
|
|
@@ -11922,7 +11934,7 @@ var AnthropicProvider = class {
|
|
|
11922
11934
|
}
|
|
11923
11935
|
}
|
|
11924
11936
|
}
|
|
11925
|
-
yield { type: "done" };
|
|
11937
|
+
yield { type: "done", stopReason: streamStopReason };
|
|
11926
11938
|
} finally {
|
|
11927
11939
|
clearInterval(timeoutInterval);
|
|
11928
11940
|
}
|
|
@@ -12418,13 +12430,18 @@ var OpenAIProvider = class {
|
|
|
12418
12430
|
stream: true,
|
|
12419
12431
|
...supportsTemp && { temperature: options?.temperature ?? this.config.temperature ?? 0 }
|
|
12420
12432
|
});
|
|
12433
|
+
let streamStopReason;
|
|
12421
12434
|
for await (const chunk of stream) {
|
|
12422
12435
|
const delta = chunk.choices[0]?.delta;
|
|
12423
12436
|
if (delta?.content) {
|
|
12424
12437
|
yield { type: "text", text: delta.content };
|
|
12425
12438
|
}
|
|
12439
|
+
const finishReason = chunk.choices[0]?.finish_reason;
|
|
12440
|
+
if (finishReason) {
|
|
12441
|
+
streamStopReason = this.mapFinishReason(finishReason);
|
|
12442
|
+
}
|
|
12426
12443
|
}
|
|
12427
|
-
yield { type: "done" };
|
|
12444
|
+
yield { type: "done", stopReason: streamStopReason };
|
|
12428
12445
|
} catch (error) {
|
|
12429
12446
|
throw this.handleError(error);
|
|
12430
12447
|
}
|
|
@@ -12489,6 +12506,7 @@ var OpenAIProvider = class {
|
|
|
12489
12506
|
return input;
|
|
12490
12507
|
};
|
|
12491
12508
|
try {
|
|
12509
|
+
let streamStopReason;
|
|
12492
12510
|
for await (const chunk of stream) {
|
|
12493
12511
|
const delta = chunk.choices[0]?.delta;
|
|
12494
12512
|
if (delta?.content || delta?.tool_calls) {
|
|
@@ -12535,6 +12553,9 @@ var OpenAIProvider = class {
|
|
|
12535
12553
|
}
|
|
12536
12554
|
}
|
|
12537
12555
|
const finishReason = chunk.choices[0]?.finish_reason;
|
|
12556
|
+
if (finishReason) {
|
|
12557
|
+
streamStopReason = this.mapFinishReason(finishReason);
|
|
12558
|
+
}
|
|
12538
12559
|
if (finishReason && toolCallBuilders.size > 0) {
|
|
12539
12560
|
for (const [, builder] of toolCallBuilders) {
|
|
12540
12561
|
yield {
|
|
@@ -12559,7 +12580,7 @@ var OpenAIProvider = class {
|
|
|
12559
12580
|
}
|
|
12560
12581
|
};
|
|
12561
12582
|
}
|
|
12562
|
-
yield { type: "done" };
|
|
12583
|
+
yield { type: "done", stopReason: streamStopReason };
|
|
12563
12584
|
} finally {
|
|
12564
12585
|
clearInterval(timeoutInterval);
|
|
12565
12586
|
}
|
|
@@ -13322,7 +13343,7 @@ var CodexProvider = class {
|
|
|
13322
13343
|
}
|
|
13323
13344
|
}
|
|
13324
13345
|
}
|
|
13325
|
-
yield { type: "done" };
|
|
13346
|
+
yield { type: "done", stopReason: response.stopReason };
|
|
13326
13347
|
}
|
|
13327
13348
|
/**
|
|
13328
13349
|
* Stream a chat response with tool use
|
|
@@ -13475,13 +13496,18 @@ var GeminiProvider = class {
|
|
|
13475
13496
|
const { history, lastMessage } = this.convertMessages(messages);
|
|
13476
13497
|
const chat = model.startChat({ history });
|
|
13477
13498
|
const result = await chat.sendMessageStream(lastMessage);
|
|
13499
|
+
let streamStopReason;
|
|
13478
13500
|
for await (const chunk of result.stream) {
|
|
13479
13501
|
const text = chunk.text();
|
|
13480
13502
|
if (text) {
|
|
13481
13503
|
yield { type: "text", text };
|
|
13482
13504
|
}
|
|
13505
|
+
const finishReason = chunk.candidates?.[0]?.finishReason;
|
|
13506
|
+
if (finishReason) {
|
|
13507
|
+
streamStopReason = this.mapFinishReason(finishReason);
|
|
13508
|
+
}
|
|
13483
13509
|
}
|
|
13484
|
-
yield { type: "done" };
|
|
13510
|
+
yield { type: "done", stopReason: streamStopReason };
|
|
13485
13511
|
} catch (error) {
|
|
13486
13512
|
throw this.handleError(error);
|
|
13487
13513
|
}
|
|
@@ -13516,11 +13542,16 @@ var GeminiProvider = class {
|
|
|
13516
13542
|
const chat = model.startChat({ history });
|
|
13517
13543
|
const result = await chat.sendMessageStream(lastMessage);
|
|
13518
13544
|
const emittedToolCalls = /* @__PURE__ */ new Set();
|
|
13545
|
+
let streamStopReason;
|
|
13519
13546
|
for await (const chunk of result.stream) {
|
|
13520
13547
|
const text = chunk.text();
|
|
13521
13548
|
if (text) {
|
|
13522
13549
|
yield { type: "text", text };
|
|
13523
13550
|
}
|
|
13551
|
+
const finishReason = chunk.candidates?.[0]?.finishReason;
|
|
13552
|
+
if (finishReason) {
|
|
13553
|
+
streamStopReason = this.mapFinishReason(finishReason);
|
|
13554
|
+
}
|
|
13524
13555
|
const candidate = chunk.candidates?.[0];
|
|
13525
13556
|
if (candidate?.content?.parts) {
|
|
13526
13557
|
for (const part of candidate.content.parts) {
|
|
@@ -13554,7 +13585,7 @@ var GeminiProvider = class {
|
|
|
13554
13585
|
}
|
|
13555
13586
|
}
|
|
13556
13587
|
}
|
|
13557
|
-
yield { type: "done" };
|
|
13588
|
+
yield { type: "done", stopReason: streamStopReason };
|
|
13558
13589
|
} catch (error) {
|
|
13559
13590
|
throw this.handleError(error);
|
|
13560
13591
|
}
|