mini-coder 0.0.21 → 0.0.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +8 -1
- package/dist/mc.js +1175 -561
- package/package.json +3 -2
package/dist/mc.js
CHANGED
|
@@ -1791,7 +1791,7 @@ async function renderTurn(events, spinner, opts) {
|
|
|
1791
1791
|
|
|
1792
1792
|
// src/cli/output.ts
|
|
1793
1793
|
var HOME = homedir3();
|
|
1794
|
-
var PACKAGE_VERSION = "0.0.
|
|
1794
|
+
var PACKAGE_VERSION = "0.0.22";
|
|
1795
1795
|
function tildePath(p) {
|
|
1796
1796
|
return p.startsWith(HOME) ? `~${p.slice(HOME.length)}` : p;
|
|
1797
1797
|
}
|
|
@@ -4283,6 +4283,17 @@ var SUPPORTED_PROVIDERS = [
|
|
|
4283
4283
|
"ollama"
|
|
4284
4284
|
];
|
|
4285
4285
|
var ZEN_BASE2 = "https://opencode.ai/zen/v1";
|
|
4286
|
+
var REDACTED_HEADERS = new Set(["authorization"]);
|
|
4287
|
+
function redactHeaders(headers) {
|
|
4288
|
+
if (!headers)
|
|
4289
|
+
return;
|
|
4290
|
+
const h = new Headers(headers);
|
|
4291
|
+
const out = {};
|
|
4292
|
+
h.forEach((v, k) => {
|
|
4293
|
+
out[k] = REDACTED_HEADERS.has(k) ? "[REDACTED]" : v;
|
|
4294
|
+
});
|
|
4295
|
+
return out;
|
|
4296
|
+
}
|
|
4286
4297
|
function createFetchWithLogging() {
|
|
4287
4298
|
const customFetch = async (input, init) => {
|
|
4288
4299
|
if (init?.body) {
|
|
@@ -4292,14 +4303,14 @@ function createFetchWithLogging() {
|
|
|
4292
4303
|
logApiEvent("Provider Request", {
|
|
4293
4304
|
url: input.toString(),
|
|
4294
4305
|
method: init.method,
|
|
4295
|
-
headers: init.headers,
|
|
4306
|
+
headers: redactHeaders(init.headers),
|
|
4296
4307
|
body: bodyJson
|
|
4297
4308
|
});
|
|
4298
4309
|
} catch {
|
|
4299
4310
|
logApiEvent("Provider Request", {
|
|
4300
4311
|
url: input.toString(),
|
|
4301
4312
|
method: init.method,
|
|
4302
|
-
headers: init.headers,
|
|
4313
|
+
headers: redactHeaders(init.headers),
|
|
4303
4314
|
body: init.body
|
|
4304
4315
|
});
|
|
4305
4316
|
}
|
|
@@ -4309,6 +4320,59 @@ function createFetchWithLogging() {
|
|
|
4309
4320
|
return customFetch;
|
|
4310
4321
|
}
|
|
4311
4322
|
var fetchWithLogging = createFetchWithLogging();
|
|
4323
|
+
var OAUTH_STRIP_BETAS = new Set(["structured-outputs-2025-11-13"]);
|
|
4324
|
+
function sanitizeOAuthBody(body) {
|
|
4325
|
+
try {
|
|
4326
|
+
const json = JSON.parse(body);
|
|
4327
|
+
if (Array.isArray(json.tools)) {
|
|
4328
|
+
for (const tool of json.tools) {
|
|
4329
|
+
if (tool.input_schema) {
|
|
4330
|
+
tool.input_schema = {
|
|
4331
|
+
type: "object",
|
|
4332
|
+
properties: tool.input_schema.properties ?? {},
|
|
4333
|
+
required: tool.input_schema.required ?? []
|
|
4334
|
+
};
|
|
4335
|
+
}
|
|
4336
|
+
}
|
|
4337
|
+
}
|
|
4338
|
+
if (Array.isArray(json.messages)) {
|
|
4339
|
+
for (const msg of json.messages) {
|
|
4340
|
+
if (!Array.isArray(msg.content))
|
|
4341
|
+
continue;
|
|
4342
|
+
for (const block of msg.content) {
|
|
4343
|
+
if (block.type === "tool_use") {
|
|
4344
|
+
delete block.caller;
|
|
4345
|
+
}
|
|
4346
|
+
}
|
|
4347
|
+
}
|
|
4348
|
+
}
|
|
4349
|
+
return JSON.stringify(json);
|
|
4350
|
+
} catch {
|
|
4351
|
+
return body;
|
|
4352
|
+
}
|
|
4353
|
+
}
|
|
4354
|
+
function createOAuthFetch() {
|
|
4355
|
+
const baseFetch = createFetchWithLogging();
|
|
4356
|
+
const oauthFetch = async (input, init) => {
|
|
4357
|
+
let opts = init;
|
|
4358
|
+
if (opts?.headers) {
|
|
4359
|
+
const h = new Headers(opts.headers);
|
|
4360
|
+
const beta = h.get("anthropic-beta");
|
|
4361
|
+
if (beta) {
|
|
4362
|
+
const filtered = beta.split(",").filter((b) => !OAUTH_STRIP_BETAS.has(b)).join(",");
|
|
4363
|
+
h.set("anthropic-beta", filtered);
|
|
4364
|
+
}
|
|
4365
|
+
h.set("user-agent", "claude-cli/2.1.75");
|
|
4366
|
+
h.set("x-app", "cli");
|
|
4367
|
+
opts = { ...opts, headers: Object.fromEntries(h.entries()) };
|
|
4368
|
+
}
|
|
4369
|
+
if (opts?.body) {
|
|
4370
|
+
opts = { ...opts, body: sanitizeOAuthBody(opts.body.toString()) };
|
|
4371
|
+
}
|
|
4372
|
+
return baseFetch(input, opts);
|
|
4373
|
+
};
|
|
4374
|
+
return oauthFetch;
|
|
4375
|
+
}
|
|
4312
4376
|
function requireEnv(name) {
|
|
4313
4377
|
const value = process.env[name];
|
|
4314
4378
|
if (!value)
|
|
@@ -4399,7 +4463,7 @@ async function resolveAnthropicModel(modelId) {
|
|
|
4399
4463
|
oauthAnthropicCache = {
|
|
4400
4464
|
token,
|
|
4401
4465
|
provider: createAnthropic({
|
|
4402
|
-
fetch:
|
|
4466
|
+
fetch: createOAuthFetch(),
|
|
4403
4467
|
authToken: token,
|
|
4404
4468
|
headers: {
|
|
4405
4469
|
"anthropic-beta": "claude-code-20250219,oauth-2025-04-20"
|
|
@@ -4434,6 +4498,9 @@ async function resolveModel(modelString) {
|
|
|
4434
4498
|
}
|
|
4435
4499
|
return PROVIDER_MODEL_RESOLVERS[provider](modelId);
|
|
4436
4500
|
}
|
|
4501
|
+
function isAnthropicOAuth() {
|
|
4502
|
+
return isLoggedIn("anthropic");
|
|
4503
|
+
}
|
|
4437
4504
|
function autoDiscoverModel() {
|
|
4438
4505
|
if (process.env.OPENCODE_API_KEY)
|
|
4439
4506
|
return "zen/claude-sonnet-4-6";
|
|
@@ -4498,6 +4565,10 @@ function normalizeUnknownError(error) {
|
|
|
4498
4565
|
return new Error(stringifyUnknown(error));
|
|
4499
4566
|
}
|
|
4500
4567
|
|
|
4568
|
+
// src/llm-api/turn-anthropic-oauth.ts
|
|
4569
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
4570
|
+
import { zodSchema } from "ai";
|
|
4571
|
+
|
|
4501
4572
|
// src/llm-api/turn-context.ts
|
|
4502
4573
|
import { pruneMessages } from "ai";
|
|
4503
4574
|
var DEFAULT_TOOL_RESULT_PAYLOAD_CAP_BYTES = 16 * 1024;
|
|
@@ -4698,629 +4769,1172 @@ function annotateAnthropicCacheBreakpoints(turnMessages, systemPrompt) {
|
|
|
4698
4769
|
};
|
|
4699
4770
|
}
|
|
4700
4771
|
|
|
4701
|
-
// src/llm-api/turn-
|
|
4702
|
-
import {
|
|
4772
|
+
// src/llm-api/turn-request.ts
|
|
4773
|
+
import { stepCountIs } from "ai";
|
|
4703
4774
|
|
|
4704
|
-
// src/llm-api/
|
|
4705
|
-
|
|
4706
|
-
|
|
4707
|
-
|
|
4708
|
-
|
|
4709
|
-
|
|
4710
|
-
}
|
|
4711
|
-
|
|
4712
|
-
|
|
4713
|
-
|
|
4714
|
-
|
|
4715
|
-
|
|
4716
|
-
|
|
4717
|
-
|
|
4718
|
-
|
|
4719
|
-
|
|
4720
|
-
|
|
4775
|
+
// src/llm-api/provider-options.ts
|
|
4776
|
+
var ANTHROPIC_BUDGET = {
|
|
4777
|
+
low: 4096,
|
|
4778
|
+
medium: 8192,
|
|
4779
|
+
high: 16384,
|
|
4780
|
+
xhigh: 32768
|
|
4781
|
+
};
|
|
4782
|
+
var GEMINI_BUDGET = {
|
|
4783
|
+
low: 4096,
|
|
4784
|
+
medium: 8192,
|
|
4785
|
+
high: 16384,
|
|
4786
|
+
xhigh: 24575
|
|
4787
|
+
};
|
|
4788
|
+
function clampEffort(effort, max) {
|
|
4789
|
+
const ORDER = ["low", "medium", "high", "xhigh"];
|
|
4790
|
+
const effortIdx = ORDER.indexOf(effort);
|
|
4791
|
+
const maxIdx = ORDER.indexOf(max);
|
|
4792
|
+
return ORDER[Math.min(effortIdx, maxIdx)];
|
|
4721
4793
|
}
|
|
4722
|
-
function
|
|
4723
|
-
|
|
4724
|
-
|
|
4725
|
-
|
|
4726
|
-
|
|
4727
|
-
|
|
4728
|
-
delta
|
|
4729
|
-
};
|
|
4730
|
-
}
|
|
4731
|
-
case "reasoning-delta":
|
|
4732
|
-
case "reasoning": {
|
|
4733
|
-
const delta = getReasoningDeltaFromStreamChunk(c11);
|
|
4734
|
-
if (delta === null)
|
|
4735
|
-
return null;
|
|
4736
|
-
return {
|
|
4737
|
-
type: "reasoning-delta",
|
|
4738
|
-
delta
|
|
4739
|
-
};
|
|
4740
|
-
}
|
|
4741
|
-
case "tool-input-start": {
|
|
4742
|
-
const args = extractToolArgs(c11);
|
|
4743
|
-
const hasStableToolCallId = typeof c11.toolCallId === "string" && c11.toolCallId.trim().length > 0;
|
|
4744
|
-
if (hasStableToolCallId && !hasRenderableToolArgs(args))
|
|
4745
|
-
return null;
|
|
4746
|
-
return {
|
|
4747
|
-
type: "tool-call-start",
|
|
4748
|
-
toolCallId: String(c11.toolCallId ?? ""),
|
|
4749
|
-
toolName: String(c11.toolName ?? ""),
|
|
4750
|
-
args
|
|
4751
|
-
};
|
|
4752
|
-
}
|
|
4753
|
-
case "tool-call": {
|
|
4754
|
-
return {
|
|
4755
|
-
type: "tool-call-start",
|
|
4756
|
-
toolCallId: String(c11.toolCallId ?? ""),
|
|
4757
|
-
toolName: String(c11.toolName ?? ""),
|
|
4758
|
-
args: extractToolArgs(c11)
|
|
4759
|
-
};
|
|
4760
|
-
}
|
|
4761
|
-
case "tool-result": {
|
|
4762
|
-
return {
|
|
4763
|
-
type: "tool-result",
|
|
4764
|
-
toolCallId: String(c11.toolCallId ?? ""),
|
|
4765
|
-
toolName: String(c11.toolName ?? ""),
|
|
4766
|
-
result: "output" in c11 ? c11.output : ("result" in c11) ? c11.result : undefined,
|
|
4767
|
-
isError: "isError" in c11 ? Boolean(c11.isError) : false
|
|
4768
|
-
};
|
|
4769
|
-
}
|
|
4770
|
-
case "tool-error":
|
|
4771
|
-
return {
|
|
4772
|
-
type: "tool-result",
|
|
4773
|
-
toolCallId: String(c11.toolCallId ?? ""),
|
|
4774
|
-
toolName: String(c11.toolName ?? ""),
|
|
4775
|
-
result: c11.error ?? "Tool execution failed",
|
|
4776
|
-
isError: true
|
|
4777
|
-
};
|
|
4778
|
-
case "error": {
|
|
4779
|
-
throw normalizeUnknownError(c11.error);
|
|
4780
|
-
}
|
|
4781
|
-
default:
|
|
4782
|
-
return null;
|
|
4794
|
+
function getAnthropicThinkingOptions(modelId, effort) {
|
|
4795
|
+
const isAdaptive = /^claude-3-7/.test(modelId) || /^claude-sonnet-4/.test(modelId) || /^claude-opus-4/.test(modelId);
|
|
4796
|
+
if (isAdaptive) {
|
|
4797
|
+
const isOpus = /^claude-opus-4/.test(modelId);
|
|
4798
|
+
const mapped = effort === "xhigh" ? isOpus ? "max" : "high" : effort;
|
|
4799
|
+
return { anthropic: { thinking: { type: "adaptive" }, effort: mapped } };
|
|
4783
4800
|
}
|
|
4801
|
+
return {
|
|
4802
|
+
anthropic: {
|
|
4803
|
+
thinking: { type: "enabled", budgetTokens: ANTHROPIC_BUDGET[effort] },
|
|
4804
|
+
betas: ["interleaved-thinking-2025-05-14"]
|
|
4805
|
+
}
|
|
4806
|
+
};
|
|
4784
4807
|
}
|
|
4785
|
-
|
|
4786
|
-
|
|
4787
|
-
|
|
4788
|
-
return
|
|
4808
|
+
function getOpenAIThinkingOptions(modelId, effort) {
|
|
4809
|
+
const supportsXhigh = /^gpt-5\.[2-9]/.test(modelId) || /^o4/.test(modelId);
|
|
4810
|
+
const clamped = supportsXhigh ? effort : clampEffort(effort, "high");
|
|
4811
|
+
return { openai: { reasoningEffort: clamped, reasoningSummary: "auto" } };
|
|
4789
4812
|
}
|
|
4790
|
-
function
|
|
4791
|
-
|
|
4792
|
-
|
|
4793
|
-
|
|
4794
|
-
|
|
4795
|
-
|
|
4796
|
-
|
|
4797
|
-
|
|
4798
|
-
} catch (err) {
|
|
4799
|
-
throw normalizeUnknownError(err);
|
|
4813
|
+
function getGeminiThinkingOptions(modelId, effort) {
|
|
4814
|
+
if (/^gemini-3/.test(modelId)) {
|
|
4815
|
+
return {
|
|
4816
|
+
google: {
|
|
4817
|
+
thinkingConfig: {
|
|
4818
|
+
includeThoughts: true,
|
|
4819
|
+
thinkingLevel: clampEffort(effort, "high")
|
|
4820
|
+
}
|
|
4800
4821
|
}
|
|
4801
|
-
}
|
|
4802
|
-
});
|
|
4803
|
-
}
|
|
4804
|
-
function buildToolSet(tools) {
|
|
4805
|
-
const toolSet = {};
|
|
4806
|
-
for (const def of tools) {
|
|
4807
|
-
toolSet[def.name] = toCoreTool(def);
|
|
4822
|
+
};
|
|
4808
4823
|
}
|
|
4809
|
-
return toolSet;
|
|
4810
|
-
}
|
|
4811
|
-
function createTurnStepTracker(opts) {
|
|
4812
|
-
let stepCount = 0;
|
|
4813
|
-
let inputTokens = 0;
|
|
4814
|
-
let outputTokens = 0;
|
|
4815
|
-
let contextTokens = 0;
|
|
4816
|
-
let partialMessages = [];
|
|
4817
4824
|
return {
|
|
4818
|
-
|
|
4819
|
-
|
|
4820
|
-
|
|
4821
|
-
|
|
4822
|
-
|
|
4823
|
-
|
|
4824
|
-
inputTokens += step.usage?.inputTokens ?? 0;
|
|
4825
|
-
outputTokens += step.usage?.outputTokens ?? 0;
|
|
4826
|
-
contextTokens = step.usage?.inputTokens ?? contextTokens;
|
|
4827
|
-
stepCount += 1;
|
|
4828
|
-
const s = step;
|
|
4829
|
-
partialMessages = s.response?.messages ?? s.messages ?? partialMessages;
|
|
4830
|
-
},
|
|
4831
|
-
getState: () => ({
|
|
4832
|
-
stepCount,
|
|
4833
|
-
inputTokens,
|
|
4834
|
-
outputTokens,
|
|
4835
|
-
contextTokens,
|
|
4836
|
-
partialMessages
|
|
4837
|
-
})
|
|
4825
|
+
google: {
|
|
4826
|
+
thinkingConfig: {
|
|
4827
|
+
includeThoughts: true,
|
|
4828
|
+
thinkingBudget: GEMINI_BUDGET[effort]
|
|
4829
|
+
}
|
|
4830
|
+
}
|
|
4838
4831
|
};
|
|
4839
4832
|
}
|
|
4840
|
-
var
|
|
4841
|
-
|
|
4842
|
-
|
|
4833
|
+
var THINKING_STRATEGIES = [
|
|
4834
|
+
{
|
|
4835
|
+
supports: isAnthropicModelFamily,
|
|
4836
|
+
build: getAnthropicThinkingOptions
|
|
4837
|
+
},
|
|
4838
|
+
{
|
|
4839
|
+
supports: isOpenAIReasoningModelFamily,
|
|
4840
|
+
build: getOpenAIThinkingOptions
|
|
4841
|
+
},
|
|
4842
|
+
{
|
|
4843
|
+
supports: isGeminiModelFamily,
|
|
4844
|
+
build: getGeminiThinkingOptions
|
|
4845
|
+
}
|
|
4846
|
+
];
|
|
4847
|
+
function getThinkingProviderOptions(modelString, effort) {
|
|
4848
|
+
if (!supportsThinking(modelString))
|
|
4843
4849
|
return null;
|
|
4844
|
-
const
|
|
4845
|
-
|
|
4846
|
-
|
|
4847
|
-
|
|
4848
|
-
|
|
4849
|
-
|
|
4850
|
-
|
|
4851
|
-
return trimmed || "tool";
|
|
4850
|
+
const { modelId } = parseModelString(modelString);
|
|
4851
|
+
for (const strategy of THINKING_STRATEGIES) {
|
|
4852
|
+
if (!strategy.supports(modelString))
|
|
4853
|
+
continue;
|
|
4854
|
+
return strategy.build(modelId, effort);
|
|
4855
|
+
}
|
|
4856
|
+
return null;
|
|
4852
4857
|
}
|
|
4853
|
-
|
|
4854
|
-
|
|
4858
|
+
var CACHE_FAMILY_RULES = [
|
|
4859
|
+
[isAnthropicModelFamily, "anthropic"],
|
|
4860
|
+
[isGeminiModelFamily, "google"]
|
|
4861
|
+
];
|
|
4862
|
+
function getCacheFamily(modelString) {
|
|
4863
|
+
for (const [match, family] of CACHE_FAMILY_RULES) {
|
|
4864
|
+
if (match(modelString))
|
|
4865
|
+
return family;
|
|
4866
|
+
}
|
|
4867
|
+
return "none";
|
|
4855
4868
|
}
|
|
4856
|
-
function
|
|
4857
|
-
if (
|
|
4869
|
+
function getCachingProviderOptions(modelString, opts) {
|
|
4870
|
+
if (!opts.enabled)
|
|
4858
4871
|
return null;
|
|
4859
|
-
const
|
|
4860
|
-
|
|
4861
|
-
}
|
|
4862
|
-
|
|
4863
|
-
|
|
4864
|
-
if (!providerData)
|
|
4865
|
-
return null;
|
|
4866
|
-
const openai = providerData.openai;
|
|
4867
|
-
if (!isRecord5(openai))
|
|
4868
|
-
return null;
|
|
4869
|
-
return openai.phase === "commentary" || openai.phase === "final_answer" ? openai.phase : null;
|
|
4872
|
+
const family = getCacheFamily(modelString);
|
|
4873
|
+
if (family === "google" && opts.googleCachedContent && opts.googleExplicitCachingCompatible !== false) {
|
|
4874
|
+
return { google: { cachedContent: opts.googleCachedContent } };
|
|
4875
|
+
}
|
|
4876
|
+
return null;
|
|
4870
4877
|
}
|
|
4871
|
-
|
|
4872
|
-
|
|
4873
|
-
|
|
4874
|
-
|
|
4875
|
-
|
|
4876
|
-
|
|
4877
|
-
|
|
4878
|
-
|
|
4878
|
+
|
|
4879
|
+
// src/llm-api/turn-prepare-messages.ts
|
|
4880
|
+
function prepareTurnMessages(input) {
|
|
4881
|
+
const {
|
|
4882
|
+
messages,
|
|
4883
|
+
modelString,
|
|
4884
|
+
toolCount,
|
|
4885
|
+
systemPrompt,
|
|
4886
|
+
pruningMode,
|
|
4887
|
+
toolResultPayloadCapBytes,
|
|
4888
|
+
promptCachingEnabled
|
|
4889
|
+
} = input;
|
|
4890
|
+
const apiLogOn = isApiLogEnabled();
|
|
4891
|
+
const strippedRuntimeToolFields = stripToolRuntimeInputFields(messages);
|
|
4892
|
+
if (strippedRuntimeToolFields !== messages && apiLogOn) {
|
|
4893
|
+
logApiEvent("runtime tool input fields stripped", { modelString });
|
|
4894
|
+
}
|
|
4895
|
+
const geminiResult = sanitizeGeminiToolMessagesWithMetadata(strippedRuntimeToolFields, modelString, toolCount > 0);
|
|
4896
|
+
if (geminiResult.repaired && apiLogOn) {
|
|
4897
|
+
logApiEvent("gemini tool history repaired", {
|
|
4898
|
+
modelString,
|
|
4899
|
+
reason: geminiResult.reason,
|
|
4900
|
+
repairedFromIndex: geminiResult.repairedFromIndex,
|
|
4901
|
+
droppedMessageCount: geminiResult.droppedMessageCount,
|
|
4902
|
+
tailOnlyAffected: geminiResult.tailOnlyAffected
|
|
4903
|
+
});
|
|
4904
|
+
}
|
|
4905
|
+
const openaiStripped = stripOpenAIHistoryTransforms(geminiResult.messages, modelString);
|
|
4906
|
+
if (openaiStripped !== geminiResult.messages && apiLogOn) {
|
|
4907
|
+
logApiEvent("openai history transforms applied", { modelString });
|
|
4908
|
+
}
|
|
4909
|
+
const normalised = normalizeOpenAICompatibleToolCallInputs(openaiStripped, modelString);
|
|
4910
|
+
if (normalised !== openaiStripped && apiLogOn) {
|
|
4911
|
+
logApiEvent("openai-compatible tool input normalized", { modelString });
|
|
4912
|
+
}
|
|
4913
|
+
const preStats = apiLogOn ? getMessageDiagnostics(normalised) : getMessageStats(normalised);
|
|
4914
|
+
if (apiLogOn)
|
|
4915
|
+
logApiEvent("turn context pre-prune", preStats);
|
|
4916
|
+
const pruned = applyContextPruning(normalised, pruningMode);
|
|
4917
|
+
const postStats = apiLogOn ? getMessageDiagnostics(pruned) : getMessageStats(pruned);
|
|
4918
|
+
if (apiLogOn)
|
|
4919
|
+
logApiEvent("turn context post-prune", postStats);
|
|
4920
|
+
const compacted = compactToolResultPayloads(pruned, toolResultPayloadCapBytes);
|
|
4921
|
+
if (compacted !== pruned && apiLogOn) {
|
|
4922
|
+
logApiEvent("turn context post-compaction", {
|
|
4923
|
+
capBytes: toolResultPayloadCapBytes,
|
|
4924
|
+
diagnostics: getMessageDiagnostics(compacted)
|
|
4925
|
+
});
|
|
4926
|
+
}
|
|
4927
|
+
let finalMessages = compacted;
|
|
4928
|
+
let finalSystemPrompt = systemPrompt;
|
|
4929
|
+
const cacheFamily = getCacheFamily(modelString);
|
|
4930
|
+
if (cacheFamily === "anthropic" && promptCachingEnabled) {
|
|
4931
|
+
const annotated = annotateAnthropicCacheBreakpoints(compacted, systemPrompt);
|
|
4932
|
+
finalMessages = annotated.messages;
|
|
4933
|
+
finalSystemPrompt = annotated.systemPrompt;
|
|
4934
|
+
if (apiLogOn) {
|
|
4935
|
+
logApiEvent("Anthropic prompt caching", annotated.diagnostics);
|
|
4936
|
+
}
|
|
4937
|
+
}
|
|
4938
|
+
if (isAnthropicModelFamily(modelString) && isAnthropicOAuth()) {
|
|
4939
|
+
const prefix = `You are Claude Code, Anthropic's official CLI for Claude.
|
|
4940
|
+
`;
|
|
4941
|
+
if (finalSystemPrompt) {
|
|
4942
|
+
finalSystemPrompt = prefix + finalSystemPrompt;
|
|
4943
|
+
} else {
|
|
4944
|
+
const sysMsg = finalMessages.find((m) => m.role === "system");
|
|
4945
|
+
if (sysMsg && typeof sysMsg.content === "string") {
|
|
4946
|
+
const idx = finalMessages.indexOf(sysMsg);
|
|
4947
|
+
finalMessages[idx] = { ...sysMsg, content: prefix + sysMsg.content };
|
|
4948
|
+
}
|
|
4949
|
+
}
|
|
4950
|
+
}
|
|
4951
|
+
const wasPruned = (pruningMode === "balanced" || pruningMode === "aggressive") && (postStats.messageCount < preStats.messageCount || postStats.totalBytes < preStats.totalBytes);
|
|
4952
|
+
return {
|
|
4953
|
+
messages: finalMessages,
|
|
4954
|
+
systemPrompt: finalSystemPrompt,
|
|
4955
|
+
pruned: wasPruned,
|
|
4956
|
+
prePruneMessageCount: preStats.messageCount,
|
|
4957
|
+
prePruneTotalBytes: preStats.totalBytes,
|
|
4958
|
+
postPruneMessageCount: postStats.messageCount,
|
|
4959
|
+
postPruneTotalBytes: postStats.totalBytes
|
|
4960
|
+
};
|
|
4879
4961
|
}
|
|
4880
4962
|
|
|
4881
|
-
|
|
4882
|
-
|
|
4883
|
-
|
|
4884
|
-
|
|
4885
|
-
|
|
4886
|
-
|
|
4887
|
-
|
|
4888
|
-
|
|
4889
|
-
|
|
4963
|
+
// src/llm-api/turn-provider-options.ts
|
|
4964
|
+
function isRecord5(value) {
|
|
4965
|
+
return value !== null && typeof value === "object";
|
|
4966
|
+
}
|
|
4967
|
+
function mergeDeep(target, source) {
|
|
4968
|
+
const output = { ...target };
|
|
4969
|
+
for (const key in source) {
|
|
4970
|
+
const sVal = source[key];
|
|
4971
|
+
const tVal = target[key];
|
|
4972
|
+
output[key] = isRecord5(sVal) && isRecord5(tVal) ? { ...tVal, ...sVal } : sVal;
|
|
4973
|
+
}
|
|
4974
|
+
return output;
|
|
4975
|
+
}
|
|
4976
|
+
function buildTurnProviderOptions(input) {
|
|
4977
|
+
const {
|
|
4978
|
+
modelString,
|
|
4979
|
+
thinkingEffort,
|
|
4980
|
+
promptCachingEnabled,
|
|
4981
|
+
openaiPromptCacheRetention,
|
|
4982
|
+
googleCachedContent,
|
|
4983
|
+
toolCount,
|
|
4984
|
+
hasSystemPrompt
|
|
4985
|
+
} = input;
|
|
4986
|
+
const thinkingOpts = thinkingEffort ? getThinkingProviderOptions(modelString, thinkingEffort) : null;
|
|
4987
|
+
const reasoningSummaryRequested = isRecord5(thinkingOpts) && isRecord5(thinkingOpts.openai) && typeof thinkingOpts.openai.reasoningSummary === "string";
|
|
4988
|
+
const cacheFamily = getCacheFamily(modelString);
|
|
4989
|
+
const cacheOpts = getCachingProviderOptions(modelString, {
|
|
4990
|
+
enabled: promptCachingEnabled,
|
|
4991
|
+
openaiRetention: openaiPromptCacheRetention,
|
|
4992
|
+
googleCachedContent,
|
|
4993
|
+
googleExplicitCachingCompatible: toolCount === 0 && !hasSystemPrompt
|
|
4994
|
+
});
|
|
4995
|
+
const baseProviderOpts = {
|
|
4996
|
+
...thinkingOpts ?? {},
|
|
4997
|
+
...isOpenAIGPT(modelString) ? {
|
|
4998
|
+
openai: {
|
|
4999
|
+
store: false,
|
|
5000
|
+
...isRecord5(thinkingOpts?.openai) ? thinkingOpts.openai : {}
|
|
5001
|
+
}
|
|
5002
|
+
} : {}
|
|
5003
|
+
};
|
|
5004
|
+
const providerOptions = cacheOpts ? mergeDeep(baseProviderOpts, cacheOpts) : baseProviderOpts;
|
|
5005
|
+
return {
|
|
5006
|
+
cacheFamily,
|
|
5007
|
+
thinkingOpts,
|
|
5008
|
+
cacheOpts,
|
|
5009
|
+
providerOptions,
|
|
5010
|
+
reasoningSummaryRequested
|
|
5011
|
+
};
|
|
5012
|
+
}
|
|
5013
|
+
|
|
5014
|
+
// src/llm-api/turn-request.ts
|
|
5015
|
+
function buildTurnPreparation(input) {
|
|
5016
|
+
const providerOptionsResult = buildTurnProviderOptions({
|
|
5017
|
+
modelString: input.modelString,
|
|
5018
|
+
thinkingEffort: input.thinkingEffort,
|
|
5019
|
+
promptCachingEnabled: input.promptCachingEnabled,
|
|
5020
|
+
openaiPromptCacheRetention: input.openaiPromptCacheRetention,
|
|
5021
|
+
googleCachedContent: input.googleCachedContent,
|
|
5022
|
+
toolCount: input.toolCount,
|
|
5023
|
+
hasSystemPrompt: Boolean(input.systemPrompt)
|
|
5024
|
+
});
|
|
5025
|
+
const prepared = prepareTurnMessages({
|
|
5026
|
+
messages: input.messages,
|
|
5027
|
+
modelString: input.modelString,
|
|
5028
|
+
toolCount: input.toolCount,
|
|
5029
|
+
systemPrompt: input.systemPrompt,
|
|
5030
|
+
pruningMode: input.pruningMode,
|
|
5031
|
+
toolResultPayloadCapBytes: input.toolResultPayloadCapBytes,
|
|
5032
|
+
promptCachingEnabled: input.promptCachingEnabled
|
|
5033
|
+
});
|
|
5034
|
+
return { providerOptionsResult, prepared };
|
|
5035
|
+
}
|
|
5036
|
+
function buildStreamTextRequest(input) {
|
|
5037
|
+
return {
|
|
5038
|
+
model: input.model,
|
|
5039
|
+
maxOutputTokens: 16384,
|
|
5040
|
+
messages: input.prepared.messages,
|
|
5041
|
+
tools: input.toolSet,
|
|
5042
|
+
stopWhen: stepCountIs(input.maxSteps),
|
|
5043
|
+
onStepFinish: input.onStepFinish,
|
|
5044
|
+
prepareStep: ({ stepNumber }) => {
|
|
5045
|
+
if (stepNumber >= input.maxSteps - 1) {
|
|
5046
|
+
return { activeTools: [] };
|
|
5047
|
+
}
|
|
5048
|
+
return;
|
|
5049
|
+
},
|
|
5050
|
+
...input.prepared.systemPrompt ? { system: input.prepared.systemPrompt } : {},
|
|
5051
|
+
...Object.keys(input.providerOptions).length > 0 ? {
|
|
5052
|
+
providerOptions: input.providerOptions
|
|
5053
|
+
} : {},
|
|
5054
|
+
...input.signal ? { abortSignal: input.signal } : {},
|
|
5055
|
+
onError: () => {},
|
|
5056
|
+
timeout: { chunkMs: 120000 }
|
|
5057
|
+
};
|
|
5058
|
+
}
|
|
5059
|
+
|
|
5060
|
+
// src/llm-api/turn-anthropic-oauth.ts
|
|
5061
|
+
var MAX_STEPS = 50;
|
|
5062
|
+
var MAX_OUTPUT_TOKENS = 16384;
|
|
5063
|
+
var CC_VERSION = "2.1.75";
|
|
5064
|
+
var cachedClient = null;
|
|
5065
|
+
function getClient(token) {
|
|
5066
|
+
if (cachedClient?.token === token && cachedClient.client)
|
|
5067
|
+
return cachedClient.client;
|
|
5068
|
+
const client = new Anthropic({
|
|
5069
|
+
apiKey: null,
|
|
5070
|
+
authToken: token,
|
|
5071
|
+
maxRetries: 5,
|
|
5072
|
+
dangerouslyAllowBrowser: true,
|
|
5073
|
+
defaultHeaders: {
|
|
5074
|
+
accept: "application/json",
|
|
5075
|
+
"anthropic-dangerous-direct-browser-access": "true",
|
|
5076
|
+
"anthropic-beta": "claude-code-20250219,oauth-2025-04-20",
|
|
5077
|
+
"user-agent": `claude-cli/${CC_VERSION}`,
|
|
5078
|
+
"x-app": "cli"
|
|
5079
|
+
}
|
|
5080
|
+
});
|
|
5081
|
+
cachedClient = { token, client };
|
|
5082
|
+
return client;
|
|
5083
|
+
}
|
|
5084
|
+
function supportsAdaptiveThinking(modelId) {
|
|
5085
|
+
return modelId.includes("opus-4-6") || modelId.includes("sonnet-4-6");
|
|
5086
|
+
}
|
|
5087
|
+
function mapEffort(effort, modelId) {
|
|
5088
|
+
if (!effort)
|
|
5089
|
+
return;
|
|
5090
|
+
const map = {
|
|
5091
|
+
low: "low",
|
|
5092
|
+
medium: "medium",
|
|
5093
|
+
high: "high",
|
|
5094
|
+
xhigh: modelId.includes("opus-4-6") ? "max" : "high"
|
|
5095
|
+
};
|
|
5096
|
+
return map[effort];
|
|
5097
|
+
}
|
|
5098
|
+
function coreToAnthropicMessages(messages) {
|
|
5099
|
+
let systemPrompt;
|
|
5100
|
+
const params = [];
|
|
5101
|
+
const toolUseIds = new Set;
|
|
5102
|
+
for (const msg of messages) {
|
|
5103
|
+
if (msg.role !== "assistant" || !Array.isArray(msg.content))
|
|
5104
|
+
continue;
|
|
5105
|
+
for (const part of msg.content) {
|
|
5106
|
+
if (part.type === "tool-call" && part.toolCallId) {
|
|
5107
|
+
toolUseIds.add(part.toolCallId);
|
|
5108
|
+
}
|
|
5109
|
+
}
|
|
5110
|
+
}
|
|
5111
|
+
for (const msg of messages) {
|
|
5112
|
+
if (msg.role === "system") {
|
|
5113
|
+
systemPrompt = typeof msg.content === "string" ? msg.content : Array.isArray(msg.content) ? msg.content.filter((p) => p.type === "text").map((p) => p.text).join(`
|
|
5114
|
+
`) : undefined;
|
|
5115
|
+
continue;
|
|
5116
|
+
}
|
|
5117
|
+
if (msg.role === "user") {
|
|
5118
|
+
if (typeof msg.content === "string") {
|
|
5119
|
+
if (msg.content.trim())
|
|
5120
|
+
params.push({ role: "user", content: msg.content });
|
|
5121
|
+
continue;
|
|
5122
|
+
}
|
|
5123
|
+
if (Array.isArray(msg.content)) {
|
|
5124
|
+
const blocks = [];
|
|
5125
|
+
for (const part of msg.content) {
|
|
5126
|
+
if (part.type === "text" && part.text?.trim()) {
|
|
5127
|
+
blocks.push({ type: "text", text: part.text });
|
|
5128
|
+
} else if (part.type === "tool-result") {
|
|
5129
|
+
if (!toolUseIds.has(part.toolCallId))
|
|
5130
|
+
continue;
|
|
5131
|
+
blocks.push({
|
|
5132
|
+
type: "tool_result",
|
|
5133
|
+
tool_use_id: part.toolCallId,
|
|
5134
|
+
content: typeof part.result === "string" ? part.result : JSON.stringify(part.result ?? part.output ?? ""),
|
|
5135
|
+
is_error: part.isError ?? false
|
|
5136
|
+
});
|
|
5137
|
+
} else if (part.type === "image") {
|
|
5138
|
+
blocks.push({
|
|
5139
|
+
type: "image",
|
|
5140
|
+
source: {
|
|
5141
|
+
type: "base64",
|
|
5142
|
+
media_type: part.mimeType ?? "image/png",
|
|
5143
|
+
data: part.data
|
|
5144
|
+
}
|
|
5145
|
+
});
|
|
5146
|
+
}
|
|
5147
|
+
}
|
|
5148
|
+
if (blocks.length > 0)
|
|
5149
|
+
params.push({ role: "user", content: blocks });
|
|
5150
|
+
}
|
|
5151
|
+
continue;
|
|
5152
|
+
}
|
|
5153
|
+
if (msg.role === "assistant") {
|
|
5154
|
+
if (typeof msg.content === "string") {
|
|
5155
|
+
if (msg.content.trim())
|
|
5156
|
+
params.push({ role: "assistant", content: msg.content });
|
|
5157
|
+
continue;
|
|
5158
|
+
}
|
|
5159
|
+
if (Array.isArray(msg.content)) {
|
|
5160
|
+
const blocks = [];
|
|
5161
|
+
for (const part of msg.content) {
|
|
5162
|
+
if (part.type === "text" && part.text?.trim()) {
|
|
5163
|
+
blocks.push({ type: "text", text: part.text });
|
|
5164
|
+
} else if (part.type === "tool-call") {
|
|
5165
|
+
blocks.push({
|
|
5166
|
+
type: "tool_use",
|
|
5167
|
+
id: part.toolCallId,
|
|
5168
|
+
name: part.toolName,
|
|
5169
|
+
input: part.args ?? {}
|
|
5170
|
+
});
|
|
5171
|
+
} else if (part.type === "thinking") {
|
|
5172
|
+
if (part.redacted && part.signature) {
|
|
5173
|
+
blocks.push({
|
|
5174
|
+
type: "redacted_thinking",
|
|
5175
|
+
data: part.signature
|
|
5176
|
+
});
|
|
5177
|
+
} else if (part.text?.trim() && part.signature?.trim()) {
|
|
5178
|
+
blocks.push({
|
|
5179
|
+
type: "thinking",
|
|
5180
|
+
thinking: part.text,
|
|
5181
|
+
signature: part.signature
|
|
5182
|
+
});
|
|
5183
|
+
}
|
|
5184
|
+
}
|
|
5185
|
+
}
|
|
5186
|
+
if (blocks.length > 0)
|
|
5187
|
+
params.push({ role: "assistant", content: blocks });
|
|
5188
|
+
}
|
|
5189
|
+
}
|
|
5190
|
+
}
|
|
5191
|
+
return { system: systemPrompt, params };
|
|
5192
|
+
}
|
|
5193
|
+
function convertTools(tools) {
|
|
5194
|
+
return tools.map((tool) => {
|
|
5195
|
+
const schema = zodSchema(tool.schema).jsonSchema;
|
|
5196
|
+
return {
|
|
5197
|
+
name: tool.name,
|
|
5198
|
+
description: tool.description,
|
|
5199
|
+
input_schema: {
|
|
5200
|
+
type: "object",
|
|
5201
|
+
properties: schema.properties ?? {},
|
|
5202
|
+
required: schema.required ?? []
|
|
5203
|
+
}
|
|
5204
|
+
};
|
|
5205
|
+
});
|
|
5206
|
+
}
|
|
5207
|
+
function buildCoreMessages(assistantText, thinkingBlocks, toolCalls, toolResults) {
|
|
5208
|
+
const messages = [];
|
|
5209
|
+
const parts = [];
|
|
5210
|
+
for (const tb of thinkingBlocks) {
|
|
5211
|
+
if (tb.redacted) {
|
|
5212
|
+
parts.push({
|
|
5213
|
+
type: "thinking",
|
|
5214
|
+
text: "[Reasoning redacted]",
|
|
5215
|
+
signature: tb.signature,
|
|
5216
|
+
redacted: true
|
|
5217
|
+
});
|
|
5218
|
+
} else {
|
|
5219
|
+
parts.push({
|
|
5220
|
+
type: "thinking",
|
|
5221
|
+
text: tb.text,
|
|
5222
|
+
signature: tb.signature
|
|
5223
|
+
});
|
|
5224
|
+
}
|
|
5225
|
+
}
|
|
5226
|
+
if (assistantText.trim()) {
|
|
5227
|
+
parts.push({ type: "text", text: assistantText });
|
|
5228
|
+
}
|
|
5229
|
+
for (const tc of toolCalls) {
|
|
5230
|
+
parts.push({
|
|
5231
|
+
type: "tool-call",
|
|
5232
|
+
toolCallId: tc.id,
|
|
5233
|
+
toolName: tc.name,
|
|
5234
|
+
args: tc.args
|
|
5235
|
+
});
|
|
5236
|
+
}
|
|
5237
|
+
if (parts.length > 0) {
|
|
5238
|
+
messages.push({
|
|
5239
|
+
role: "assistant",
|
|
5240
|
+
content: parts
|
|
5241
|
+
});
|
|
5242
|
+
}
|
|
5243
|
+
if (toolResults.length > 0) {
|
|
5244
|
+
const resultParts = toolResults.map((tr) => ({
|
|
5245
|
+
type: "tool-result",
|
|
5246
|
+
toolCallId: tr.toolCallId,
|
|
5247
|
+
toolName: tr.toolName,
|
|
5248
|
+
result: tr.result,
|
|
5249
|
+
isError: tr.isError
|
|
5250
|
+
}));
|
|
5251
|
+
messages.push({
|
|
5252
|
+
role: "user",
|
|
5253
|
+
content: resultParts
|
|
5254
|
+
});
|
|
5255
|
+
}
|
|
5256
|
+
return messages;
|
|
5257
|
+
}
|
|
5258
|
+
function parseStreamingJson(partial) {
|
|
5259
|
+
try {
|
|
5260
|
+
return JSON.parse(partial);
|
|
5261
|
+
} catch {
|
|
5262
|
+
try {
|
|
5263
|
+
let fixed = partial;
|
|
5264
|
+
const opens = (fixed.match(/{/g) || []).length;
|
|
5265
|
+
const closes = (fixed.match(/}/g) || []).length;
|
|
5266
|
+
for (let i = 0;i < opens - closes; i++)
|
|
5267
|
+
fixed += "}";
|
|
5268
|
+
return JSON.parse(fixed);
|
|
5269
|
+
} catch {
|
|
5270
|
+
return {};
|
|
5271
|
+
}
|
|
5272
|
+
}
|
|
5273
|
+
}
|
|
5274
|
+
async function* runTurnAnthropicOAuth(options) {
|
|
5275
|
+
const {
|
|
5276
|
+
token,
|
|
5277
|
+
modelString,
|
|
5278
|
+
messages,
|
|
5279
|
+
tools,
|
|
5280
|
+
systemPrompt,
|
|
5281
|
+
signal,
|
|
5282
|
+
thinkingEffort,
|
|
5283
|
+
pruningMode = "balanced",
|
|
5284
|
+
promptCachingEnabled = true,
|
|
5285
|
+
toolResultPayloadCapBytes = DEFAULT_TOOL_RESULT_PAYLOAD_CAP_BYTES
|
|
5286
|
+
} = options;
|
|
5287
|
+
const modelId = modelString.replace(/^anthropic\//, "");
|
|
5288
|
+
const client = getClient(token);
|
|
5289
|
+
const anthropicTools = convertTools(tools);
|
|
5290
|
+
const toolExecutors = new Map(tools.map((t) => [t.name, t.execute]));
|
|
5291
|
+
let totalInputTokens = 0;
|
|
5292
|
+
let totalOutputTokens = 0;
|
|
5293
|
+
let contextTokens = 0;
|
|
5294
|
+
let stepCount = 0;
|
|
5295
|
+
const allNewMessages = [];
|
|
5296
|
+
try {
|
|
5297
|
+
const { prepared } = buildTurnPreparation({
|
|
5298
|
+
modelString,
|
|
5299
|
+
messages,
|
|
5300
|
+
thinkingEffort,
|
|
5301
|
+
promptCachingEnabled,
|
|
5302
|
+
openaiPromptCacheRetention: "in_memory",
|
|
5303
|
+
googleCachedContent: null,
|
|
5304
|
+
toolCount: tools.length,
|
|
5305
|
+
systemPrompt,
|
|
5306
|
+
pruningMode,
|
|
5307
|
+
toolResultPayloadCapBytes
|
|
5308
|
+
});
|
|
5309
|
+
logApiEvent("turn start", {
|
|
5310
|
+
modelString,
|
|
5311
|
+
messageCount: messages.length,
|
|
5312
|
+
reasoningSummaryRequested: false,
|
|
5313
|
+
pruningMode,
|
|
5314
|
+
toolResultPayloadCapBytes
|
|
5315
|
+
});
|
|
5316
|
+
if (prepared.pruned) {
|
|
5317
|
+
yield {
|
|
5318
|
+
type: "context-pruned",
|
|
5319
|
+
mode: pruningMode,
|
|
5320
|
+
beforeMessageCount: prepared.prePruneMessageCount,
|
|
5321
|
+
afterMessageCount: prepared.postPruneMessageCount,
|
|
5322
|
+
removedMessageCount: prepared.prePruneMessageCount - prepared.postPruneMessageCount,
|
|
5323
|
+
beforeTotalBytes: prepared.prePruneTotalBytes,
|
|
5324
|
+
afterTotalBytes: prepared.postPruneTotalBytes,
|
|
5325
|
+
removedBytes: prepared.prePruneTotalBytes - prepared.postPruneTotalBytes
|
|
5326
|
+
};
|
|
5327
|
+
}
|
|
5328
|
+
const { system: extractedSystem, params: anthropicMessages } = coreToAnthropicMessages(prepared.messages);
|
|
5329
|
+
const ccPrefix = "You are Claude Code, Anthropic's official CLI for Claude.";
|
|
5330
|
+
const sysText = prepared.systemPrompt ?? extractedSystem;
|
|
5331
|
+
const systemBlocks = [
|
|
5332
|
+
{
|
|
5333
|
+
type: "text",
|
|
5334
|
+
text: ccPrefix,
|
|
5335
|
+
cache_control: { type: "ephemeral" }
|
|
4890
5336
|
}
|
|
4891
|
-
|
|
4892
|
-
|
|
4893
|
-
|
|
4894
|
-
|
|
4895
|
-
|
|
4896
|
-
|
|
5337
|
+
];
|
|
5338
|
+
if (sysText) {
|
|
5339
|
+
const clean = sysText.startsWith(ccPrefix) ? sysText.slice(ccPrefix.length).replace(/^\n/, "") : sysText;
|
|
5340
|
+
if (clean.trim()) {
|
|
5341
|
+
systemBlocks.push({
|
|
5342
|
+
type: "text",
|
|
5343
|
+
text: clean,
|
|
5344
|
+
cache_control: { type: "ephemeral" }
|
|
5345
|
+
});
|
|
4897
5346
|
}
|
|
4898
|
-
|
|
4899
|
-
|
|
4900
|
-
|
|
4901
|
-
|
|
4902
|
-
|
|
5347
|
+
}
|
|
5348
|
+
const currentMessages = [...anthropicMessages];
|
|
5349
|
+
while (stepCount < MAX_STEPS) {
|
|
5350
|
+
stepCount++;
|
|
5351
|
+
const isLastStep = stepCount >= MAX_STEPS;
|
|
5352
|
+
const params = {
|
|
5353
|
+
model: modelId,
|
|
5354
|
+
max_tokens: MAX_OUTPUT_TOKENS,
|
|
5355
|
+
system: systemBlocks,
|
|
5356
|
+
messages: currentMessages,
|
|
5357
|
+
tools: isLastStep ? [] : anthropicTools,
|
|
5358
|
+
stream: true
|
|
5359
|
+
};
|
|
5360
|
+
if (thinkingEffort && supportsAdaptiveThinking(modelId)) {
|
|
5361
|
+
params.thinking = { type: "adaptive" };
|
|
5362
|
+
const effort = mapEffort(thinkingEffort, modelId);
|
|
5363
|
+
if (effort) {
|
|
5364
|
+
params.output_config = {
|
|
5365
|
+
effort
|
|
5366
|
+
};
|
|
5367
|
+
}
|
|
4903
5368
|
}
|
|
4904
|
-
|
|
4905
|
-
if (
|
|
4906
|
-
|
|
4907
|
-
|
|
5369
|
+
for (const m of currentMessages) {
|
|
5370
|
+
if (!Array.isArray(m.content))
|
|
5371
|
+
continue;
|
|
5372
|
+
for (const block of m.content) {
|
|
5373
|
+
if (typeof block === "object" && block !== null) {
|
|
5374
|
+
delete block.cache_control;
|
|
5375
|
+
}
|
|
5376
|
+
}
|
|
4908
5377
|
}
|
|
4909
|
-
|
|
4910
|
-
|
|
4911
|
-
|
|
4912
|
-
if (
|
|
4913
|
-
|
|
5378
|
+
const lastMsg = currentMessages.length > 0 ? currentMessages[currentMessages.length - 1] : undefined;
|
|
5379
|
+
if (lastMsg && lastMsg.role === "user" && Array.isArray(lastMsg.content)) {
|
|
5380
|
+
const lastBlock = lastMsg.content[lastMsg.content.length - 1];
|
|
5381
|
+
if (lastBlock && typeof lastBlock === "object") {
|
|
5382
|
+
lastBlock.cache_control = {
|
|
5383
|
+
type: "ephemeral"
|
|
5384
|
+
};
|
|
4914
5385
|
}
|
|
4915
|
-
return this.sawExplicitReasoningThisStep ? "skip" : "reasoning";
|
|
4916
5386
|
}
|
|
4917
|
-
|
|
4918
|
-
|
|
5387
|
+
if (isApiLogEnabled()) {
|
|
5388
|
+
logApiEvent("Provider Request", {
|
|
5389
|
+
url: "https://api.anthropic.com/v1/messages",
|
|
5390
|
+
method: "POST",
|
|
5391
|
+
model: modelId,
|
|
5392
|
+
messageCount: currentMessages.length,
|
|
5393
|
+
toolCount: params.tools?.length ?? 0
|
|
5394
|
+
});
|
|
5395
|
+
}
|
|
5396
|
+
const stream = client.messages.stream(params, { signal });
|
|
5397
|
+
let assistantText = "";
|
|
5398
|
+
const thinkingBlocks = [];
|
|
5399
|
+
const toolCalls = [];
|
|
5400
|
+
let partialJson = "";
|
|
5401
|
+
let currentToolId = "";
|
|
5402
|
+
let currentToolName = "";
|
|
5403
|
+
let stepInputTokens = 0;
|
|
5404
|
+
let stepOutputTokens = 0;
|
|
5405
|
+
let stopReason;
|
|
5406
|
+
for await (const event of stream) {
|
|
5407
|
+
if (event.type === "message_start") {
|
|
5408
|
+
stepInputTokens = event.message.usage.input_tokens || 0;
|
|
5409
|
+
stepOutputTokens = event.message.usage.output_tokens || 0;
|
|
5410
|
+
} else if (event.type === "content_block_start") {
|
|
5411
|
+
if (event.content_block.type === "text") {} else if (event.content_block.type === "thinking") {
|
|
5412
|
+
thinkingBlocks.push({
|
|
5413
|
+
text: "",
|
|
5414
|
+
signature: ""
|
|
5415
|
+
});
|
|
5416
|
+
} else if (event.content_block.type === "redacted_thinking") {
|
|
5417
|
+
thinkingBlocks.push({
|
|
5418
|
+
text: "[Reasoning redacted]",
|
|
5419
|
+
signature: event.content_block.data,
|
|
5420
|
+
redacted: true
|
|
5421
|
+
});
|
|
5422
|
+
} else if (event.content_block.type === "tool_use") {
|
|
5423
|
+
currentToolId = event.content_block.id;
|
|
5424
|
+
currentToolName = event.content_block.name;
|
|
5425
|
+
partialJson = "";
|
|
5426
|
+
}
|
|
5427
|
+
} else if (event.type === "content_block_delta") {
|
|
5428
|
+
if (event.delta.type === "text_delta") {
|
|
5429
|
+
assistantText += event.delta.text;
|
|
5430
|
+
yield { type: "text-delta", delta: event.delta.text };
|
|
5431
|
+
} else if (event.delta.type === "thinking_delta") {
|
|
5432
|
+
const tb = thinkingBlocks[thinkingBlocks.length - 1];
|
|
5433
|
+
if (tb)
|
|
5434
|
+
tb.text += event.delta.thinking;
|
|
5435
|
+
yield {
|
|
5436
|
+
type: "reasoning-delta",
|
|
5437
|
+
delta: event.delta.thinking
|
|
5438
|
+
};
|
|
5439
|
+
} else if (event.delta.type === "input_json_delta") {
|
|
5440
|
+
partialJson += event.delta.partial_json;
|
|
5441
|
+
} else if (event.delta.type === "signature_delta") {
|
|
5442
|
+
const tb = thinkingBlocks[thinkingBlocks.length - 1];
|
|
5443
|
+
if (tb)
|
|
5444
|
+
tb.signature += event.delta.signature;
|
|
5445
|
+
}
|
|
5446
|
+
} else if (event.type === "content_block_stop") {
|
|
5447
|
+
if (currentToolId && currentToolName) {
|
|
5448
|
+
const args = parseStreamingJson(partialJson);
|
|
5449
|
+
toolCalls.push({
|
|
5450
|
+
id: currentToolId,
|
|
5451
|
+
name: currentToolName,
|
|
5452
|
+
args
|
|
5453
|
+
});
|
|
5454
|
+
yield {
|
|
5455
|
+
type: "tool-call-start",
|
|
5456
|
+
toolCallId: currentToolId,
|
|
5457
|
+
toolName: currentToolName,
|
|
5458
|
+
args
|
|
5459
|
+
};
|
|
5460
|
+
currentToolId = "";
|
|
5461
|
+
currentToolName = "";
|
|
5462
|
+
partialJson = "";
|
|
5463
|
+
}
|
|
5464
|
+
} else if (event.type === "message_delta") {
|
|
5465
|
+
if (event.delta.stop_reason) {
|
|
5466
|
+
stopReason = event.delta.stop_reason;
|
|
5467
|
+
}
|
|
5468
|
+
if (event.usage.output_tokens != null) {
|
|
5469
|
+
stepOutputTokens = event.usage.output_tokens;
|
|
5470
|
+
}
|
|
5471
|
+
}
|
|
5472
|
+
}
|
|
5473
|
+
totalInputTokens += stepInputTokens;
|
|
5474
|
+
totalOutputTokens += stepOutputTokens;
|
|
5475
|
+
contextTokens = stepInputTokens;
|
|
5476
|
+
logApiEvent("step finish", {
|
|
5477
|
+
stepNumber: stepCount,
|
|
5478
|
+
finishReason: stopReason,
|
|
5479
|
+
usage: {
|
|
5480
|
+
inputTokens: stepInputTokens,
|
|
5481
|
+
outputTokens: stepOutputTokens
|
|
5482
|
+
}
|
|
5483
|
+
});
|
|
5484
|
+
const toolResults = [];
|
|
5485
|
+
if (stopReason === "tool_use" && toolCalls.length > 0) {
|
|
5486
|
+
for (const tc of toolCalls) {
|
|
5487
|
+
const executor = toolExecutors.get(tc.name);
|
|
5488
|
+
let result;
|
|
5489
|
+
let isError = false;
|
|
5490
|
+
if (!executor) {
|
|
5491
|
+
result = `Unknown tool: ${tc.name}`;
|
|
5492
|
+
isError = true;
|
|
5493
|
+
} else {
|
|
5494
|
+
try {
|
|
5495
|
+
result = await executor(tc.args);
|
|
5496
|
+
} catch (err) {
|
|
5497
|
+
result = normalizeUnknownError(err).message;
|
|
5498
|
+
isError = true;
|
|
5499
|
+
}
|
|
5500
|
+
}
|
|
5501
|
+
toolResults.push({
|
|
5502
|
+
toolCallId: tc.id,
|
|
5503
|
+
toolName: tc.name,
|
|
5504
|
+
result,
|
|
5505
|
+
isError
|
|
5506
|
+
});
|
|
5507
|
+
yield {
|
|
5508
|
+
type: "tool-result",
|
|
5509
|
+
toolCallId: tc.id,
|
|
5510
|
+
toolName: tc.name,
|
|
5511
|
+
result,
|
|
5512
|
+
isError
|
|
5513
|
+
};
|
|
5514
|
+
}
|
|
5515
|
+
}
|
|
5516
|
+
const stepMessages = buildCoreMessages(assistantText, thinkingBlocks, toolCalls, toolResults);
|
|
5517
|
+
allNewMessages.push(...stepMessages);
|
|
5518
|
+
if (stopReason !== "tool_use" || toolCalls.length === 0) {
|
|
5519
|
+
break;
|
|
5520
|
+
}
|
|
5521
|
+
const assistantBlocks = [];
|
|
5522
|
+
for (const tb of thinkingBlocks) {
|
|
5523
|
+
if (tb.redacted) {
|
|
5524
|
+
assistantBlocks.push({
|
|
5525
|
+
type: "redacted_thinking",
|
|
5526
|
+
data: tb.signature
|
|
5527
|
+
});
|
|
5528
|
+
} else if (tb.text.trim() && tb.signature.trim()) {
|
|
5529
|
+
assistantBlocks.push({
|
|
5530
|
+
type: "thinking",
|
|
5531
|
+
thinking: tb.text,
|
|
5532
|
+
signature: tb.signature
|
|
5533
|
+
});
|
|
5534
|
+
}
|
|
5535
|
+
}
|
|
5536
|
+
if (assistantText.trim()) {
|
|
5537
|
+
assistantBlocks.push({ type: "text", text: assistantText });
|
|
5538
|
+
}
|
|
5539
|
+
for (const tc of toolCalls) {
|
|
5540
|
+
assistantBlocks.push({
|
|
5541
|
+
type: "tool_use",
|
|
5542
|
+
id: tc.id,
|
|
5543
|
+
name: tc.name,
|
|
5544
|
+
input: tc.args
|
|
5545
|
+
});
|
|
5546
|
+
}
|
|
5547
|
+
currentMessages.push({
|
|
5548
|
+
role: "assistant",
|
|
5549
|
+
content: assistantBlocks
|
|
5550
|
+
});
|
|
5551
|
+
const resultBlocks = toolResults.map((tr) => ({
|
|
5552
|
+
type: "tool_result",
|
|
5553
|
+
tool_use_id: tr.toolCallId,
|
|
5554
|
+
content: typeof tr.result === "string" ? tr.result : JSON.stringify(tr.result ?? ""),
|
|
5555
|
+
is_error: tr.isError
|
|
5556
|
+
}));
|
|
5557
|
+
currentMessages.push({ role: "user", content: resultBlocks });
|
|
4919
5558
|
}
|
|
5559
|
+
logApiEvent("turn complete", {
|
|
5560
|
+
newMessagesCount: allNewMessages.length,
|
|
5561
|
+
inputTokens: totalInputTokens,
|
|
5562
|
+
outputTokens: totalOutputTokens
|
|
5563
|
+
});
|
|
5564
|
+
yield {
|
|
5565
|
+
type: "turn-complete",
|
|
5566
|
+
inputTokens: totalInputTokens,
|
|
5567
|
+
outputTokens: totalOutputTokens,
|
|
5568
|
+
contextTokens,
|
|
5569
|
+
messages: allNewMessages
|
|
5570
|
+
};
|
|
5571
|
+
} catch (err) {
|
|
5572
|
+
const normalizedError = normalizeUnknownError(err);
|
|
5573
|
+
logApiEvent("turn error", normalizedError);
|
|
5574
|
+
yield {
|
|
5575
|
+
type: "turn-error",
|
|
5576
|
+
error: normalizedError,
|
|
5577
|
+
partialMessages: allNewMessages
|
|
5578
|
+
};
|
|
4920
5579
|
}
|
|
4921
5580
|
}
|
|
4922
|
-
function mapCommentaryChunkToTurnEvent(chunk) {
|
|
4923
|
-
if (chunk.type !== "text-delta")
|
|
4924
|
-
return null;
|
|
4925
|
-
return {
|
|
4926
|
-
type: "reasoning-delta",
|
|
4927
|
-
delta: extractTextDelta(chunk)
|
|
4928
|
-
};
|
|
4929
|
-
}
|
|
4930
5581
|
|
|
4931
|
-
|
|
4932
|
-
|
|
4933
|
-
|
|
4934
|
-
|
|
4935
|
-
|
|
4936
|
-
|
|
4937
|
-
|
|
4938
|
-
|
|
4939
|
-
|
|
4940
|
-
|
|
4941
|
-
|
|
4942
|
-
|
|
4943
|
-
|
|
4944
|
-
|
|
4945
|
-
|
|
4946
|
-
|
|
4947
|
-
|
|
4948
|
-
|
|
4949
|
-
|
|
4950
|
-
|
|
5582
|
+
// src/llm-api/turn-execution.ts
|
|
5583
|
+
import { dynamicTool, jsonSchema } from "ai";
|
|
5584
|
+
|
|
5585
|
+
// src/llm-api/turn-stream-events.ts
|
|
5586
|
+
function shouldLogStreamChunk(c11) {
|
|
5587
|
+
return c11.type !== "text-delta" && c11.type !== "reasoning" && c11.type !== "reasoning-delta";
|
|
5588
|
+
}
|
|
5589
|
+
function extractToolArgs(c11) {
|
|
5590
|
+
return c11.input ?? c11.args;
|
|
5591
|
+
}
|
|
5592
|
+
function hasRenderableToolArgs(args) {
|
|
5593
|
+
if (args === null || args === undefined)
|
|
5594
|
+
return false;
|
|
5595
|
+
if (typeof args === "string")
|
|
5596
|
+
return args.trim().length > 0;
|
|
5597
|
+
if (Array.isArray(args))
|
|
5598
|
+
return args.length > 0;
|
|
5599
|
+
if (typeof args === "object")
|
|
5600
|
+
return Object.keys(args).length > 0;
|
|
5601
|
+
return true;
|
|
5602
|
+
}
|
|
5603
|
+
function mapStreamChunkToTurnEvent(c11) {
|
|
5604
|
+
switch (c11.type) {
|
|
5605
|
+
case "text-delta": {
|
|
5606
|
+
const delta = typeof c11.text === "string" ? c11.text : "";
|
|
4951
5607
|
return {
|
|
4952
|
-
|
|
4953
|
-
|
|
5608
|
+
type: "text-delta",
|
|
5609
|
+
delta
|
|
4954
5610
|
};
|
|
4955
5611
|
}
|
|
4956
|
-
|
|
4957
|
-
|
|
4958
|
-
|
|
5612
|
+
case "reasoning-delta":
|
|
5613
|
+
case "reasoning": {
|
|
5614
|
+
const delta = getReasoningDeltaFromStreamChunk(c11);
|
|
5615
|
+
if (delta === null)
|
|
5616
|
+
return null;
|
|
4959
5617
|
return {
|
|
4960
|
-
|
|
4961
|
-
|
|
5618
|
+
type: "reasoning-delta",
|
|
5619
|
+
delta
|
|
4962
5620
|
};
|
|
4963
5621
|
}
|
|
4964
|
-
|
|
4965
|
-
const
|
|
4966
|
-
const
|
|
4967
|
-
if (
|
|
4968
|
-
|
|
4969
|
-
return { chunk, suppressTurnEvent: false };
|
|
4970
|
-
}
|
|
4971
|
-
const nextToolCallId = this.consumeNextTracked(toolName) ?? this.nextSyntheticToolCallId();
|
|
5622
|
+
case "tool-input-start": {
|
|
5623
|
+
const args = extractToolArgs(c11);
|
|
5624
|
+
const hasStableToolCallId = typeof c11.toolCallId === "string" && c11.toolCallId.trim().length > 0;
|
|
5625
|
+
if (hasStableToolCallId && !hasRenderableToolArgs(args))
|
|
5626
|
+
return null;
|
|
4972
5627
|
return {
|
|
4973
|
-
|
|
4974
|
-
|
|
5628
|
+
type: "tool-call-start",
|
|
5629
|
+
toolCallId: String(c11.toolCallId ?? ""),
|
|
5630
|
+
toolName: String(c11.toolName ?? ""),
|
|
5631
|
+
args
|
|
4975
5632
|
};
|
|
4976
5633
|
}
|
|
4977
|
-
|
|
4978
|
-
|
|
4979
|
-
|
|
4980
|
-
|
|
4981
|
-
|
|
4982
|
-
|
|
4983
|
-
|
|
4984
|
-
return { ...chunk, toolCallId };
|
|
4985
|
-
}
|
|
4986
|
-
nextSyntheticToolCallId() {
|
|
4987
|
-
this.syntheticCount += 1;
|
|
4988
|
-
return `synthetic-tool-call-${this.syntheticCount}`;
|
|
4989
|
-
}
|
|
4990
|
-
trackStart(toolName, toolCallId) {
|
|
4991
|
-
const pending = this.pendingByTool.get(toolName) ?? [];
|
|
4992
|
-
pending.push(toolCallId);
|
|
4993
|
-
this.pendingByTool.set(toolName, pending);
|
|
4994
|
-
}
|
|
4995
|
-
trackDeferredStart(toolName) {
|
|
4996
|
-
this.deferredStartsByTool.set(toolName, (this.deferredStartsByTool.get(toolName) ?? 0) + 1);
|
|
4997
|
-
}
|
|
4998
|
-
consumeDeferredStart(toolName) {
|
|
4999
|
-
const count = this.deferredStartsByTool.get(toolName) ?? 0;
|
|
5000
|
-
if (count <= 0)
|
|
5001
|
-
return;
|
|
5002
|
-
if (count === 1) {
|
|
5003
|
-
this.deferredStartsByTool.delete(toolName);
|
|
5004
|
-
return;
|
|
5634
|
+
case "tool-call": {
|
|
5635
|
+
return {
|
|
5636
|
+
type: "tool-call-start",
|
|
5637
|
+
toolCallId: String(c11.toolCallId ?? ""),
|
|
5638
|
+
toolName: String(c11.toolName ?? ""),
|
|
5639
|
+
args: extractToolArgs(c11)
|
|
5640
|
+
};
|
|
5005
5641
|
}
|
|
5006
|
-
|
|
5007
|
-
|
|
5008
|
-
|
|
5009
|
-
|
|
5010
|
-
|
|
5011
|
-
|
|
5012
|
-
|
|
5013
|
-
|
|
5014
|
-
return;
|
|
5015
|
-
pending.splice(idx, 1);
|
|
5016
|
-
if (pending.length === 0)
|
|
5017
|
-
this.pendingByTool.delete(toolName);
|
|
5018
|
-
}
|
|
5019
|
-
consumeNextTracked(toolName) {
|
|
5020
|
-
const pending = this.pendingByTool.get(toolName);
|
|
5021
|
-
if (!pending || pending.length === 0)
|
|
5022
|
-
return null;
|
|
5023
|
-
const toolCallId = pending.shift() ?? null;
|
|
5024
|
-
if (pending.length === 0)
|
|
5025
|
-
this.pendingByTool.delete(toolName);
|
|
5026
|
-
return toolCallId;
|
|
5027
|
-
}
|
|
5028
|
-
}
|
|
5029
|
-
async function* mapFullStreamToTurnEvents(stream, opts) {
|
|
5030
|
-
const toolCallTracker = new StreamToolCallTracker;
|
|
5031
|
-
const textPhaseTracker = new StreamTextPhaseTracker;
|
|
5032
|
-
for await (const originalChunk of stream) {
|
|
5033
|
-
const prepared = toolCallTracker.prepare(originalChunk);
|
|
5034
|
-
const chunk = prepared.chunk;
|
|
5035
|
-
const route = textPhaseTracker.route(chunk);
|
|
5036
|
-
if (!prepared.suppressTurnEvent && route !== "skip" && shouldLogStreamChunk(chunk)) {
|
|
5037
|
-
opts.onChunk?.(chunk);
|
|
5642
|
+
case "tool-result": {
|
|
5643
|
+
return {
|
|
5644
|
+
type: "tool-result",
|
|
5645
|
+
toolCallId: String(c11.toolCallId ?? ""),
|
|
5646
|
+
toolName: String(c11.toolName ?? ""),
|
|
5647
|
+
result: "output" in c11 ? c11.output : ("result" in c11) ? c11.result : undefined,
|
|
5648
|
+
isError: "isError" in c11 ? Boolean(c11.isError) : false
|
|
5649
|
+
};
|
|
5038
5650
|
}
|
|
5039
|
-
|
|
5040
|
-
|
|
5041
|
-
|
|
5042
|
-
|
|
5043
|
-
|
|
5651
|
+
case "tool-error":
|
|
5652
|
+
return {
|
|
5653
|
+
type: "tool-result",
|
|
5654
|
+
toolCallId: String(c11.toolCallId ?? ""),
|
|
5655
|
+
toolName: String(c11.toolName ?? ""),
|
|
5656
|
+
result: c11.error ?? "Tool execution failed",
|
|
5657
|
+
isError: true
|
|
5658
|
+
};
|
|
5659
|
+
case "error": {
|
|
5660
|
+
throw normalizeUnknownError(c11.error);
|
|
5661
|
+
}
|
|
5662
|
+
default:
|
|
5663
|
+
return null;
|
|
5044
5664
|
}
|
|
5045
5665
|
}
|
|
5046
5666
|
|
|
5047
|
-
// src/llm-api/turn-
|
|
5048
|
-
|
|
5049
|
-
|
|
5050
|
-
// src/llm-api/provider-options.ts
|
|
5051
|
-
var ANTHROPIC_BUDGET = {
|
|
5052
|
-
low: 4096,
|
|
5053
|
-
medium: 8192,
|
|
5054
|
-
high: 16384,
|
|
5055
|
-
xhigh: 32768
|
|
5056
|
-
};
|
|
5057
|
-
var GEMINI_BUDGET = {
|
|
5058
|
-
low: 4096,
|
|
5059
|
-
medium: 8192,
|
|
5060
|
-
high: 16384,
|
|
5061
|
-
xhigh: 24575
|
|
5062
|
-
};
|
|
5063
|
-
function clampEffort(effort, max) {
|
|
5064
|
-
const ORDER = ["low", "medium", "high", "xhigh"];
|
|
5065
|
-
const effortIdx = ORDER.indexOf(effort);
|
|
5066
|
-
const maxIdx = ORDER.indexOf(max);
|
|
5067
|
-
return ORDER[Math.min(effortIdx, maxIdx)];
|
|
5667
|
+
// src/llm-api/turn-execution.ts
|
|
5668
|
+
function isZodSchema(s) {
|
|
5669
|
+
return s !== null && typeof s === "object" && "_def" in s;
|
|
5068
5670
|
}
|
|
5069
|
-
function
|
|
5070
|
-
const
|
|
5071
|
-
|
|
5072
|
-
|
|
5073
|
-
|
|
5074
|
-
|
|
5075
|
-
|
|
5076
|
-
|
|
5077
|
-
|
|
5078
|
-
|
|
5079
|
-
|
|
5671
|
+
function toCoreTool(def) {
|
|
5672
|
+
const schema = isZodSchema(def.schema) ? def.schema : jsonSchema(def.schema);
|
|
5673
|
+
return dynamicTool({
|
|
5674
|
+
description: def.description,
|
|
5675
|
+
inputSchema: schema,
|
|
5676
|
+
execute: async (input) => {
|
|
5677
|
+
try {
|
|
5678
|
+
return await def.execute(input);
|
|
5679
|
+
} catch (err) {
|
|
5680
|
+
throw normalizeUnknownError(err);
|
|
5681
|
+
}
|
|
5080
5682
|
}
|
|
5081
|
-
};
|
|
5082
|
-
}
|
|
5083
|
-
function getOpenAIThinkingOptions(modelId, effort) {
|
|
5084
|
-
const supportsXhigh = /^gpt-5\.[2-9]/.test(modelId) || /^o4/.test(modelId);
|
|
5085
|
-
const clamped = supportsXhigh ? effort : clampEffort(effort, "high");
|
|
5086
|
-
return { openai: { reasoningEffort: clamped, reasoningSummary: "auto" } };
|
|
5683
|
+
});
|
|
5087
5684
|
}
|
|
5088
|
-
function
|
|
5089
|
-
|
|
5090
|
-
|
|
5091
|
-
|
|
5092
|
-
thinkingConfig: {
|
|
5093
|
-
includeThoughts: true,
|
|
5094
|
-
thinkingLevel: clampEffort(effort, "high")
|
|
5095
|
-
}
|
|
5096
|
-
}
|
|
5097
|
-
};
|
|
5685
|
+
function buildToolSet(tools) {
|
|
5686
|
+
const toolSet = {};
|
|
5687
|
+
for (const def of tools) {
|
|
5688
|
+
toolSet[def.name] = toCoreTool(def);
|
|
5098
5689
|
}
|
|
5690
|
+
return toolSet;
|
|
5691
|
+
}
|
|
5692
|
+
function createTurnStepTracker(opts) {
|
|
5693
|
+
let stepCount = 0;
|
|
5694
|
+
let inputTokens = 0;
|
|
5695
|
+
let outputTokens = 0;
|
|
5696
|
+
let contextTokens = 0;
|
|
5697
|
+
let partialMessages = [];
|
|
5099
5698
|
return {
|
|
5100
|
-
|
|
5101
|
-
|
|
5102
|
-
|
|
5103
|
-
|
|
5104
|
-
|
|
5105
|
-
|
|
5699
|
+
onStepFinish: (step) => {
|
|
5700
|
+
opts.onStepLog({
|
|
5701
|
+
stepNumber: stepCount + 1,
|
|
5702
|
+
finishReason: step.finishReason,
|
|
5703
|
+
usage: step.usage
|
|
5704
|
+
});
|
|
5705
|
+
inputTokens += step.usage?.inputTokens ?? 0;
|
|
5706
|
+
outputTokens += step.usage?.outputTokens ?? 0;
|
|
5707
|
+
contextTokens = step.usage?.inputTokens ?? contextTokens;
|
|
5708
|
+
stepCount += 1;
|
|
5709
|
+
const s = step;
|
|
5710
|
+
partialMessages = s.response?.messages ?? s.messages ?? partialMessages;
|
|
5711
|
+
},
|
|
5712
|
+
getState: () => ({
|
|
5713
|
+
stepCount,
|
|
5714
|
+
inputTokens,
|
|
5715
|
+
outputTokens,
|
|
5716
|
+
contextTokens,
|
|
5717
|
+
partialMessages
|
|
5718
|
+
})
|
|
5106
5719
|
};
|
|
5107
5720
|
}
|
|
5108
|
-
var
|
|
5109
|
-
|
|
5110
|
-
|
|
5111
|
-
build: getAnthropicThinkingOptions
|
|
5112
|
-
},
|
|
5113
|
-
{
|
|
5114
|
-
supports: isOpenAIReasoningModelFamily,
|
|
5115
|
-
build: getOpenAIThinkingOptions
|
|
5116
|
-
},
|
|
5117
|
-
{
|
|
5118
|
-
supports: isGeminiModelFamily,
|
|
5119
|
-
build: getGeminiThinkingOptions
|
|
5120
|
-
}
|
|
5121
|
-
];
|
|
5122
|
-
function getThinkingProviderOptions(modelString, effort) {
|
|
5123
|
-
if (!supportsThinking(modelString))
|
|
5721
|
+
var TOOL_RESULT_CHUNK_TYPES = new Set(["tool-result", "tool-error"]);
|
|
5722
|
+
function normalizeToolCallId(raw) {
|
|
5723
|
+
if (typeof raw !== "string")
|
|
5124
5724
|
return null;
|
|
5125
|
-
const
|
|
5126
|
-
|
|
5127
|
-
if (!strategy.supports(modelString))
|
|
5128
|
-
continue;
|
|
5129
|
-
return strategy.build(modelId, effort);
|
|
5130
|
-
}
|
|
5131
|
-
return null;
|
|
5725
|
+
const trimmed = raw.trim();
|
|
5726
|
+
return trimmed ? trimmed : null;
|
|
5132
5727
|
}
|
|
5133
|
-
|
|
5134
|
-
|
|
5135
|
-
|
|
5136
|
-
|
|
5137
|
-
|
|
5138
|
-
for (const [match, family] of CACHE_FAMILY_RULES) {
|
|
5139
|
-
if (match(modelString))
|
|
5140
|
-
return family;
|
|
5141
|
-
}
|
|
5142
|
-
return "none";
|
|
5728
|
+
function normalizeToolName(raw) {
|
|
5729
|
+
if (typeof raw !== "string")
|
|
5730
|
+
return "tool";
|
|
5731
|
+
const trimmed = raw.trim();
|
|
5732
|
+
return trimmed || "tool";
|
|
5143
5733
|
}
|
|
5144
|
-
function
|
|
5145
|
-
|
|
5734
|
+
function isRecord6(value) {
|
|
5735
|
+
return value !== null && typeof value === "object";
|
|
5736
|
+
}
|
|
5737
|
+
function normalizeTextPartId(raw) {
|
|
5738
|
+
if (typeof raw !== "string")
|
|
5146
5739
|
return null;
|
|
5147
|
-
const
|
|
5148
|
-
|
|
5149
|
-
return { google: { cachedContent: opts.googleCachedContent } };
|
|
5150
|
-
}
|
|
5151
|
-
return null;
|
|
5740
|
+
const trimmed = raw.trim();
|
|
5741
|
+
return trimmed ? trimmed : null;
|
|
5152
5742
|
}
|
|
5153
|
-
|
|
5154
|
-
|
|
5155
|
-
|
|
5156
|
-
|
|
5157
|
-
|
|
5158
|
-
|
|
5159
|
-
|
|
5160
|
-
|
|
5161
|
-
pruningMode,
|
|
5162
|
-
toolResultPayloadCapBytes,
|
|
5163
|
-
promptCachingEnabled
|
|
5164
|
-
} = input;
|
|
5165
|
-
const apiLogOn = isApiLogEnabled();
|
|
5166
|
-
const strippedRuntimeToolFields = stripToolRuntimeInputFields(messages);
|
|
5167
|
-
if (strippedRuntimeToolFields !== messages && apiLogOn) {
|
|
5168
|
-
logApiEvent("runtime tool input fields stripped", { modelString });
|
|
5169
|
-
}
|
|
5170
|
-
const geminiResult = sanitizeGeminiToolMessagesWithMetadata(strippedRuntimeToolFields, modelString, toolCount > 0);
|
|
5171
|
-
if (geminiResult.repaired && apiLogOn) {
|
|
5172
|
-
logApiEvent("gemini tool history repaired", {
|
|
5173
|
-
modelString,
|
|
5174
|
-
reason: geminiResult.reason,
|
|
5175
|
-
repairedFromIndex: geminiResult.repairedFromIndex,
|
|
5176
|
-
droppedMessageCount: geminiResult.droppedMessageCount,
|
|
5177
|
-
tailOnlyAffected: geminiResult.tailOnlyAffected
|
|
5178
|
-
});
|
|
5179
|
-
}
|
|
5180
|
-
const openaiStripped = stripOpenAIHistoryTransforms(geminiResult.messages, modelString);
|
|
5181
|
-
if (openaiStripped !== geminiResult.messages && apiLogOn) {
|
|
5182
|
-
logApiEvent("openai history transforms applied", { modelString });
|
|
5183
|
-
}
|
|
5184
|
-
const normalised = normalizeOpenAICompatibleToolCallInputs(openaiStripped, modelString);
|
|
5185
|
-
if (normalised !== openaiStripped && apiLogOn) {
|
|
5186
|
-
logApiEvent("openai-compatible tool input normalized", { modelString });
|
|
5187
|
-
}
|
|
5188
|
-
const preStats = apiLogOn ? getMessageDiagnostics(normalised) : getMessageStats(normalised);
|
|
5189
|
-
if (apiLogOn)
|
|
5190
|
-
logApiEvent("turn context pre-prune", preStats);
|
|
5191
|
-
const pruned = applyContextPruning(normalised, pruningMode);
|
|
5192
|
-
const postStats = apiLogOn ? getMessageDiagnostics(pruned) : getMessageStats(pruned);
|
|
5193
|
-
if (apiLogOn)
|
|
5194
|
-
logApiEvent("turn context post-prune", postStats);
|
|
5195
|
-
const compacted = compactToolResultPayloads(pruned, toolResultPayloadCapBytes);
|
|
5196
|
-
if (compacted !== pruned && apiLogOn) {
|
|
5197
|
-
logApiEvent("turn context post-compaction", {
|
|
5198
|
-
capBytes: toolResultPayloadCapBytes,
|
|
5199
|
-
diagnostics: getMessageDiagnostics(compacted)
|
|
5200
|
-
});
|
|
5201
|
-
}
|
|
5202
|
-
let finalMessages = compacted;
|
|
5203
|
-
let finalSystemPrompt = systemPrompt;
|
|
5204
|
-
const cacheFamily = getCacheFamily(modelString);
|
|
5205
|
-
if (cacheFamily === "anthropic" && promptCachingEnabled) {
|
|
5206
|
-
const annotated = annotateAnthropicCacheBreakpoints(compacted, systemPrompt);
|
|
5207
|
-
finalMessages = annotated.messages;
|
|
5208
|
-
finalSystemPrompt = annotated.systemPrompt;
|
|
5209
|
-
if (apiLogOn) {
|
|
5210
|
-
logApiEvent("Anthropic prompt caching", annotated.diagnostics);
|
|
5211
|
-
}
|
|
5212
|
-
}
|
|
5213
|
-
const wasPruned = (pruningMode === "balanced" || pruningMode === "aggressive") && (postStats.messageCount < preStats.messageCount || postStats.totalBytes < preStats.totalBytes);
|
|
5214
|
-
return {
|
|
5215
|
-
messages: finalMessages,
|
|
5216
|
-
systemPrompt: finalSystemPrompt,
|
|
5217
|
-
pruned: wasPruned,
|
|
5218
|
-
prePruneMessageCount: preStats.messageCount,
|
|
5219
|
-
prePruneTotalBytes: preStats.totalBytes,
|
|
5220
|
-
postPruneMessageCount: postStats.messageCount,
|
|
5221
|
-
postPruneTotalBytes: postStats.totalBytes
|
|
5222
|
-
};
|
|
5743
|
+
function getOpenAITextPhase2(chunk) {
|
|
5744
|
+
const providerData = isRecord6(chunk.providerMetadata) ? chunk.providerMetadata : isRecord6(chunk.providerOptions) ? chunk.providerOptions : null;
|
|
5745
|
+
if (!providerData)
|
|
5746
|
+
return null;
|
|
5747
|
+
const openai = providerData.openai;
|
|
5748
|
+
if (!isRecord6(openai))
|
|
5749
|
+
return null;
|
|
5750
|
+
return openai.phase === "commentary" || openai.phase === "final_answer" ? openai.phase : null;
|
|
5223
5751
|
}
|
|
5224
|
-
|
|
5225
|
-
|
|
5226
|
-
|
|
5227
|
-
|
|
5752
|
+
function extractTextDelta(chunk) {
|
|
5753
|
+
if (typeof chunk.text === "string")
|
|
5754
|
+
return chunk.text;
|
|
5755
|
+
if (typeof chunk.textDelta === "string")
|
|
5756
|
+
return chunk.textDelta;
|
|
5757
|
+
if (typeof chunk.delta === "string")
|
|
5758
|
+
return chunk.delta;
|
|
5759
|
+
return "";
|
|
5228
5760
|
}
|
|
5229
|
-
|
|
5230
|
-
|
|
5231
|
-
|
|
5232
|
-
|
|
5233
|
-
|
|
5234
|
-
|
|
5761
|
+
|
|
5762
|
+
class StreamTextPhaseTracker {
|
|
5763
|
+
phaseByTextPartId = new Map;
|
|
5764
|
+
sawExplicitReasoningThisStep = false;
|
|
5765
|
+
route(chunk) {
|
|
5766
|
+
const textPartId = normalizeTextPartId(chunk.id);
|
|
5767
|
+
switch (chunk.type) {
|
|
5768
|
+
case "start-step": {
|
|
5769
|
+
this.sawExplicitReasoningThisStep = false;
|
|
5770
|
+
return "text";
|
|
5771
|
+
}
|
|
5772
|
+
case "reasoning-start":
|
|
5773
|
+
case "reasoning-delta":
|
|
5774
|
+
case "reasoning-end":
|
|
5775
|
+
case "reasoning": {
|
|
5776
|
+
this.sawExplicitReasoningThisStep = true;
|
|
5777
|
+
return "text";
|
|
5778
|
+
}
|
|
5779
|
+
case "text-start": {
|
|
5780
|
+
const phase = getOpenAITextPhase2(chunk);
|
|
5781
|
+
if (textPartId && phase)
|
|
5782
|
+
this.phaseByTextPartId.set(textPartId, phase);
|
|
5783
|
+
return "skip";
|
|
5784
|
+
}
|
|
5785
|
+
case "text-end": {
|
|
5786
|
+
if (textPartId)
|
|
5787
|
+
this.phaseByTextPartId.delete(textPartId);
|
|
5788
|
+
return "skip";
|
|
5789
|
+
}
|
|
5790
|
+
case "text-delta": {
|
|
5791
|
+
if (!textPartId)
|
|
5792
|
+
return "text";
|
|
5793
|
+
if (this.phaseByTextPartId.get(textPartId) !== "commentary") {
|
|
5794
|
+
return "text";
|
|
5795
|
+
}
|
|
5796
|
+
return this.sawExplicitReasoningThisStep ? "skip" : "reasoning";
|
|
5797
|
+
}
|
|
5798
|
+
default:
|
|
5799
|
+
return "text";
|
|
5800
|
+
}
|
|
5235
5801
|
}
|
|
5236
|
-
return output;
|
|
5237
5802
|
}
|
|
5238
|
-
function
|
|
5239
|
-
|
|
5240
|
-
|
|
5241
|
-
thinkingEffort,
|
|
5242
|
-
promptCachingEnabled,
|
|
5243
|
-
openaiPromptCacheRetention,
|
|
5244
|
-
googleCachedContent,
|
|
5245
|
-
toolCount,
|
|
5246
|
-
hasSystemPrompt
|
|
5247
|
-
} = input;
|
|
5248
|
-
const thinkingOpts = thinkingEffort ? getThinkingProviderOptions(modelString, thinkingEffort) : null;
|
|
5249
|
-
const reasoningSummaryRequested = isRecord6(thinkingOpts) && isRecord6(thinkingOpts.openai) && typeof thinkingOpts.openai.reasoningSummary === "string";
|
|
5250
|
-
const cacheFamily = getCacheFamily(modelString);
|
|
5251
|
-
const cacheOpts = getCachingProviderOptions(modelString, {
|
|
5252
|
-
enabled: promptCachingEnabled,
|
|
5253
|
-
openaiRetention: openaiPromptCacheRetention,
|
|
5254
|
-
googleCachedContent,
|
|
5255
|
-
googleExplicitCachingCompatible: toolCount === 0 && !hasSystemPrompt
|
|
5256
|
-
});
|
|
5257
|
-
const baseProviderOpts = {
|
|
5258
|
-
...thinkingOpts ?? {},
|
|
5259
|
-
...isOpenAIGPT(modelString) ? {
|
|
5260
|
-
openai: {
|
|
5261
|
-
store: false,
|
|
5262
|
-
...isRecord6(thinkingOpts?.openai) ? thinkingOpts.openai : {}
|
|
5263
|
-
}
|
|
5264
|
-
} : {}
|
|
5265
|
-
};
|
|
5266
|
-
const providerOptions = cacheOpts ? mergeDeep(baseProviderOpts, cacheOpts) : baseProviderOpts;
|
|
5803
|
+
function mapCommentaryChunkToTurnEvent(chunk) {
|
|
5804
|
+
if (chunk.type !== "text-delta")
|
|
5805
|
+
return null;
|
|
5267
5806
|
return {
|
|
5268
|
-
|
|
5269
|
-
|
|
5270
|
-
cacheOpts,
|
|
5271
|
-
providerOptions,
|
|
5272
|
-
reasoningSummaryRequested
|
|
5807
|
+
type: "reasoning-delta",
|
|
5808
|
+
delta: extractTextDelta(chunk)
|
|
5273
5809
|
};
|
|
5274
5810
|
}
|
|
5275
5811
|
|
|
5276
|
-
|
|
5277
|
-
|
|
5278
|
-
|
|
5279
|
-
|
|
5280
|
-
|
|
5281
|
-
|
|
5282
|
-
|
|
5283
|
-
|
|
5284
|
-
|
|
5285
|
-
|
|
5286
|
-
|
|
5287
|
-
|
|
5288
|
-
|
|
5289
|
-
|
|
5290
|
-
|
|
5291
|
-
|
|
5292
|
-
|
|
5293
|
-
|
|
5294
|
-
|
|
5295
|
-
|
|
5296
|
-
|
|
5297
|
-
|
|
5298
|
-
|
|
5299
|
-
|
|
5300
|
-
|
|
5301
|
-
|
|
5302
|
-
|
|
5303
|
-
|
|
5304
|
-
|
|
5305
|
-
|
|
5306
|
-
|
|
5307
|
-
|
|
5812
|
+
class StreamToolCallTracker {
|
|
5813
|
+
syntheticCount = 0;
|
|
5814
|
+
pendingByTool = new Map;
|
|
5815
|
+
deferredStartsByTool = new Map;
|
|
5816
|
+
prepare(chunk) {
|
|
5817
|
+
const type = chunk.type;
|
|
5818
|
+
if (!type) {
|
|
5819
|
+
return { chunk, suppressTurnEvent: false };
|
|
5820
|
+
}
|
|
5821
|
+
if (type === "tool-input-start") {
|
|
5822
|
+
const toolName = normalizeToolName(chunk.toolName);
|
|
5823
|
+
const toolCallId = normalizeToolCallId(chunk.toolCallId);
|
|
5824
|
+
const args = extractToolArgs(chunk);
|
|
5825
|
+
if (!hasRenderableToolArgs(args)) {
|
|
5826
|
+
if (!toolCallId) {
|
|
5827
|
+
this.trackDeferredStart(toolName);
|
|
5828
|
+
return { chunk, suppressTurnEvent: true };
|
|
5829
|
+
}
|
|
5830
|
+
return { chunk, suppressTurnEvent: false };
|
|
5831
|
+
}
|
|
5832
|
+
return {
|
|
5833
|
+
chunk: this.trackRenderableStart(chunk, toolName, toolCallId),
|
|
5834
|
+
suppressTurnEvent: false
|
|
5835
|
+
};
|
|
5836
|
+
}
|
|
5837
|
+
if (type === "tool-call") {
|
|
5838
|
+
const toolName = normalizeToolName(chunk.toolName);
|
|
5839
|
+
this.consumeDeferredStart(toolName);
|
|
5840
|
+
return {
|
|
5841
|
+
chunk: this.trackRenderableStart(chunk, toolName, normalizeToolCallId(chunk.toolCallId)),
|
|
5842
|
+
suppressTurnEvent: false
|
|
5843
|
+
};
|
|
5844
|
+
}
|
|
5845
|
+
if (TOOL_RESULT_CHUNK_TYPES.has(type)) {
|
|
5846
|
+
const toolName = normalizeToolName(chunk.toolName);
|
|
5847
|
+
const existingToolCallId = normalizeToolCallId(chunk.toolCallId);
|
|
5848
|
+
if (existingToolCallId) {
|
|
5849
|
+
this.consumeTracked(toolName, existingToolCallId);
|
|
5850
|
+
return { chunk, suppressTurnEvent: false };
|
|
5308
5851
|
}
|
|
5852
|
+
const nextToolCallId = this.consumeNextTracked(toolName) ?? this.nextSyntheticToolCallId();
|
|
5853
|
+
return {
|
|
5854
|
+
chunk: { ...chunk, toolCallId: nextToolCallId },
|
|
5855
|
+
suppressTurnEvent: false
|
|
5856
|
+
};
|
|
5857
|
+
}
|
|
5858
|
+
return { chunk, suppressTurnEvent: false };
|
|
5859
|
+
}
|
|
5860
|
+
trackRenderableStart(chunk, toolName, existingToolCallId) {
|
|
5861
|
+
const toolCallId = existingToolCallId ?? this.nextSyntheticToolCallId();
|
|
5862
|
+
this.trackStart(toolName, toolCallId);
|
|
5863
|
+
if (toolCallId === chunk.toolCallId)
|
|
5864
|
+
return chunk;
|
|
5865
|
+
return { ...chunk, toolCallId };
|
|
5866
|
+
}
|
|
5867
|
+
nextSyntheticToolCallId() {
|
|
5868
|
+
this.syntheticCount += 1;
|
|
5869
|
+
return `synthetic-tool-call-${this.syntheticCount}`;
|
|
5870
|
+
}
|
|
5871
|
+
trackStart(toolName, toolCallId) {
|
|
5872
|
+
const pending = this.pendingByTool.get(toolName) ?? [];
|
|
5873
|
+
pending.push(toolCallId);
|
|
5874
|
+
this.pendingByTool.set(toolName, pending);
|
|
5875
|
+
}
|
|
5876
|
+
trackDeferredStart(toolName) {
|
|
5877
|
+
this.deferredStartsByTool.set(toolName, (this.deferredStartsByTool.get(toolName) ?? 0) + 1);
|
|
5878
|
+
}
|
|
5879
|
+
consumeDeferredStart(toolName) {
|
|
5880
|
+
const count = this.deferredStartsByTool.get(toolName) ?? 0;
|
|
5881
|
+
if (count <= 0)
|
|
5309
5882
|
return;
|
|
5310
|
-
|
|
5311
|
-
|
|
5312
|
-
|
|
5313
|
-
|
|
5314
|
-
|
|
5315
|
-
|
|
5316
|
-
|
|
5317
|
-
|
|
5318
|
-
|
|
5883
|
+
if (count === 1) {
|
|
5884
|
+
this.deferredStartsByTool.delete(toolName);
|
|
5885
|
+
return;
|
|
5886
|
+
}
|
|
5887
|
+
this.deferredStartsByTool.set(toolName, count - 1);
|
|
5888
|
+
}
|
|
5889
|
+
consumeTracked(toolName, toolCallId) {
|
|
5890
|
+
const pending = this.pendingByTool.get(toolName);
|
|
5891
|
+
if (!pending || pending.length === 0)
|
|
5892
|
+
return;
|
|
5893
|
+
const idx = pending.indexOf(toolCallId);
|
|
5894
|
+
if (idx === -1)
|
|
5895
|
+
return;
|
|
5896
|
+
pending.splice(idx, 1);
|
|
5897
|
+
if (pending.length === 0)
|
|
5898
|
+
this.pendingByTool.delete(toolName);
|
|
5899
|
+
}
|
|
5900
|
+
consumeNextTracked(toolName) {
|
|
5901
|
+
const pending = this.pendingByTool.get(toolName);
|
|
5902
|
+
if (!pending || pending.length === 0)
|
|
5903
|
+
return null;
|
|
5904
|
+
const toolCallId = pending.shift() ?? null;
|
|
5905
|
+
if (pending.length === 0)
|
|
5906
|
+
this.pendingByTool.delete(toolName);
|
|
5907
|
+
return toolCallId;
|
|
5908
|
+
}
|
|
5909
|
+
}
|
|
5910
|
+
async function* mapFullStreamToTurnEvents(stream, opts) {
|
|
5911
|
+
const toolCallTracker = new StreamToolCallTracker;
|
|
5912
|
+
const textPhaseTracker = new StreamTextPhaseTracker;
|
|
5913
|
+
for await (const originalChunk of stream) {
|
|
5914
|
+
const prepared = toolCallTracker.prepare(originalChunk);
|
|
5915
|
+
const chunk = prepared.chunk;
|
|
5916
|
+
const route = textPhaseTracker.route(chunk);
|
|
5917
|
+
if (!prepared.suppressTurnEvent && route !== "skip" && shouldLogStreamChunk(chunk)) {
|
|
5918
|
+
opts.onChunk?.(chunk);
|
|
5919
|
+
}
|
|
5920
|
+
if (prepared.suppressTurnEvent || route === "skip")
|
|
5921
|
+
continue;
|
|
5922
|
+
const event = route === "reasoning" ? mapCommentaryChunkToTurnEvent(chunk) : mapStreamChunkToTurnEvent(chunk);
|
|
5923
|
+
if (event)
|
|
5924
|
+
yield event;
|
|
5925
|
+
}
|
|
5319
5926
|
}
|
|
5320
5927
|
|
|
5321
5928
|
// src/llm-api/turn.ts
|
|
5322
|
-
var
|
|
5929
|
+
var MAX_STEPS2 = 50;
|
|
5323
5930
|
async function* runTurn(options) {
|
|
5931
|
+
if (options.modelString.startsWith("anthropic/") && isLoggedIn("anthropic")) {
|
|
5932
|
+
const token = await getAccessToken("anthropic");
|
|
5933
|
+
if (token) {
|
|
5934
|
+
yield* runTurnAnthropicOAuth({ ...options, token });
|
|
5935
|
+
return;
|
|
5936
|
+
}
|
|
5937
|
+
}
|
|
5324
5938
|
const {
|
|
5325
5939
|
model,
|
|
5326
5940
|
modelString,
|
|
@@ -5392,7 +6006,7 @@ async function* runTurn(options) {
|
|
|
5392
6006
|
onStepFinish: stepTracker.onStepFinish,
|
|
5393
6007
|
signal,
|
|
5394
6008
|
providerOptions: providerOptionsResult.providerOptions,
|
|
5395
|
-
maxSteps:
|
|
6009
|
+
maxSteps: MAX_STEPS2
|
|
5396
6010
|
}));
|
|
5397
6011
|
result.response.catch(() => {});
|
|
5398
6012
|
for await (const event of mapFullStreamToTurnEvents(result.fullStream, {
|