extrait 0.6.1 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -790,36 +790,36 @@ function unwrap(schema) {
790
790
  let optional = false;
791
791
  let nullable = false;
792
792
  while (true) {
793
- const typeName = current?._def?.type;
793
+ const typeName = current?.def?.type;
794
794
  if (!typeName) {
795
795
  break;
796
796
  }
797
797
  if (typeName === "optional") {
798
798
  optional = true;
799
- current = current._def?.innerType ?? current;
799
+ current = current.def?.innerType ?? current;
800
800
  continue;
801
801
  }
802
802
  if (typeName === "default") {
803
803
  optional = true;
804
- current = current._def?.innerType ?? current;
804
+ current = current.def?.innerType ?? current;
805
805
  continue;
806
806
  }
807
807
  if (typeName === "nullable") {
808
808
  nullable = true;
809
- current = current._def?.innerType ?? current;
809
+ current = current.def?.innerType ?? current;
810
810
  continue;
811
811
  }
812
812
  if (typeName === "pipe") {
813
- const outType = current._def?.out?._def?.type;
813
+ const outType = current.def?.out?.def?.type;
814
814
  if (outType === "transform") {
815
- current = current._def?.in ?? current;
815
+ current = current.def?.in ?? current;
816
816
  } else {
817
- current = current._def?.out ?? current;
817
+ current = current.def?.out ?? current;
818
818
  }
819
819
  continue;
820
820
  }
821
821
  if (typeName === "catch" || typeName === "readonly") {
822
- current = current._def?.innerType ?? current;
822
+ current = current.def?.innerType ?? current;
823
823
  continue;
824
824
  }
825
825
  break;
@@ -835,7 +835,7 @@ function formatCore(schema, depth, seen) {
835
835
  return "unknown";
836
836
  }
837
837
  seen.add(schema);
838
- const typeName = schema?._def?.type;
838
+ const typeName = schema?.def?.type;
839
839
  switch (typeName) {
840
840
  case "string":
841
841
  return "string";
@@ -860,44 +860,44 @@ function formatCore(schema, depth, seen) {
860
860
  case "void":
861
861
  return "void";
862
862
  case "literal": {
863
- const value = schema._def?.values?.[0];
863
+ const value = schema.def?.values?.[0];
864
864
  return JSON.stringify(value);
865
865
  }
866
866
  case "enum": {
867
- const entries = schema._def?.entries;
867
+ const entries = schema.def?.entries;
868
868
  const values = Object.values(entries ?? {});
869
869
  const unique = [...new Set(values.filter((v) => typeof v !== "string" || Number.isNaN(Number(v))))];
870
870
  return unique.map((v) => JSON.stringify(v)).join(" | ") || "string";
871
871
  }
872
872
  case "array": {
873
- const inner = formatType(schema._def?.element ?? schema, depth, seen);
873
+ const inner = formatType(schema.def?.element ?? schema, depth, seen);
874
874
  return requiresParentheses(inner) ? `(${inner})[]` : `${inner}[]`;
875
875
  }
876
876
  case "tuple": {
877
- const items = (schema._def?.items ?? []).map((item) => formatType(item, depth, seen));
877
+ const items = (schema.def?.items ?? []).map((item) => formatType(item, depth, seen));
878
878
  return `[${items.join(", ")}]`;
879
879
  }
880
880
  case "union": {
881
- const options = (schema._def?.options ?? []).map((option) => formatType(option, depth, seen));
881
+ const options = (schema.def?.options ?? []).map((option) => formatType(option, depth, seen));
882
882
  return options.join(" | ") || "unknown";
883
883
  }
884
884
  case "intersection": {
885
- const left = formatType(schema._def?.left ?? schema, depth, seen);
886
- const right = formatType(schema._def?.right ?? schema, depth, seen);
885
+ const left = formatType(schema.def?.left ?? schema, depth, seen);
886
+ const right = formatType(schema.def?.right ?? schema, depth, seen);
887
887
  return `${left} & ${right}`;
888
888
  }
889
889
  case "record": {
890
- const keyType = formatType(schema._def?.keyType ?? schema, depth, seen);
891
- const valueType = formatType(schema._def?.valueType ?? schema, depth, seen);
890
+ const keyType = formatType(schema.def?.keyType ?? schema, depth, seen);
891
+ const valueType = formatType(schema.def?.valueType ?? schema, depth, seen);
892
892
  return `Record<${keyType}, ${valueType}>`;
893
893
  }
894
894
  case "map": {
895
- const keyType = formatType(schema._def?.keyType ?? schema, depth, seen);
896
- const valueType = formatType(schema._def?.valueType ?? schema, depth, seen);
895
+ const keyType = formatType(schema.def?.keyType ?? schema, depth, seen);
896
+ const valueType = formatType(schema.def?.valueType ?? schema, depth, seen);
897
897
  return `Map<${keyType}, ${valueType}>`;
898
898
  }
899
899
  case "set": {
900
- const valueType = formatType(schema._def?.valueType ?? schema, depth, seen);
900
+ const valueType = formatType(schema.def?.valueType ?? schema, depth, seen);
901
901
  return `Set<${valueType}>`;
902
902
  }
903
903
  case "object":
@@ -911,7 +911,7 @@ function formatCore(schema, depth, seen) {
911
911
  function formatObject(schema, depth, seen) {
912
912
  const indent = " ".repeat(depth);
913
913
  const innerIndent = " ".repeat(depth + 1);
914
- const rawShape = schema._def?.shape;
914
+ const rawShape = schema.def?.shape;
915
915
  const shape = typeof rawShape === "function" ? rawShape() : rawShape ?? {};
916
916
  const entries = Object.entries(shape);
917
917
  if (entries.length === 0) {
@@ -937,27 +937,27 @@ function requiresParentheses(typeText) {
937
937
  return typeText.includes(" | ") || typeText.includes(" & ");
938
938
  }
939
939
  function isIntegerNumber(schema) {
940
- const checks = schema._def?.checks ?? [];
940
+ const checks = schema.def?.checks ?? [];
941
941
  return checks.some((check) => check.isInt === true);
942
942
  }
943
943
  function readSchemaDescription(schema) {
944
944
  let current = schema;
945
- while (current?._def?.type) {
945
+ while (current?.def?.type) {
946
946
  const desc = current.description;
947
947
  if (typeof desc === "string" && desc.trim().length > 0) {
948
948
  return sanitizeDescription(desc);
949
949
  }
950
- const typeName = current._def.type;
950
+ const typeName = current.def.type;
951
951
  if (typeName === "optional" || typeName === "default" || typeName === "nullable") {
952
- current = current._def.innerType ?? current;
952
+ current = current.def.innerType ?? current;
953
953
  continue;
954
954
  }
955
955
  if (typeName === "pipe") {
956
- current = current._def.in ?? current;
956
+ current = current.def.in ?? current;
957
957
  continue;
958
958
  }
959
959
  if (typeName === "catch" || typeName === "readonly") {
960
- current = current._def.innerType ?? current;
960
+ current = current.def.innerType ?? current;
961
961
  continue;
962
962
  }
963
963
  break;
@@ -1220,7 +1220,7 @@ function findSSEBoundary(buffer) {
1220
1220
  }
1221
1221
 
1222
1222
  // src/providers/mcp-runtime.ts
1223
- var DEFAULT_MAX_TOOL_ROUNDS = 8;
1223
+ var DEFAULT_MAX_TOOL_ROUNDS = 100;
1224
1224
  async function resolveMCPToolset(clients) {
1225
1225
  if (!Array.isArray(clients) || clients.length === 0) {
1226
1226
  return {
@@ -1867,6 +1867,7 @@ async function completeWithChatCompletionsWithMCP(options, fetcher, path, reques
1867
1867
  let lastPayload;
1868
1868
  const toolCalls = [];
1869
1869
  const toolExecutions = [];
1870
+ const reasoningBlocks = [];
1870
1871
  for (let round = 1;round <= maxToolRounds + 1; round += 1) {
1871
1872
  const mcpToolset = await resolveMCPToolset(request.mcpClients);
1872
1873
  const transportTools = toProviderFunctionTools(mcpToolset);
@@ -1897,14 +1898,17 @@ async function completeWithChatCompletionsWithMCP(options, fetcher, path, reques
1897
1898
  finishReason = pickFinishReason(payload);
1898
1899
  const assistantMessage = pickAssistantMessage(payload);
1899
1900
  const calledTools = pickChatToolCalls(payload);
1901
+ const roundReasoning = pickAssistantReasoning(payload);
1902
+ pushReasoningBlock(reasoningBlocks, round, roundReasoning);
1900
1903
  if (!assistantMessage) {
1901
1904
  throw new Error("No assistant message in OpenAI-compatible response.");
1902
1905
  }
1903
1906
  if (calledTools.length === 0) {
1904
- const reasoning = pickAssistantReasoning(payload);
1907
+ const reasoning = joinReasoningBlocks(reasoningBlocks) || undefined;
1905
1908
  return {
1906
1909
  text: pickAssistantText(payload),
1907
- reasoning: reasoning.length > 0 ? reasoning : undefined,
1910
+ reasoning,
1911
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
1908
1912
  raw: payload,
1909
1913
  usage: aggregatedUsage,
1910
1914
  finishReason,
@@ -1932,10 +1936,8 @@ async function completeWithChatCompletionsWithMCP(options, fetcher, path, reques
1932
1936
  }
1933
1937
  return {
1934
1938
  text: pickAssistantText(lastPayload ?? {}),
1935
- reasoning: (() => {
1936
- const value = pickAssistantReasoning(lastPayload ?? {});
1937
- return value.length > 0 ? value : undefined;
1938
- })(),
1939
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
1940
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
1939
1941
  raw: lastPayload,
1940
1942
  usage: aggregatedUsage,
1941
1943
  finishReason,
@@ -1983,6 +1985,7 @@ async function completeWithResponsesAPIWithMCP(options, fetcher, path, request)
1983
1985
  let lastPayload;
1984
1986
  const executedToolCalls = [];
1985
1987
  const toolExecutions = [];
1988
+ const reasoningBlocks = [];
1986
1989
  for (let round = 1;round <= maxToolRounds + 1; round += 1) {
1987
1990
  const mcpToolset = await resolveMCPToolset(request.mcpClients);
1988
1991
  const transportTools = toResponsesTools(toProviderFunctionTools(mcpToolset));
@@ -2012,12 +2015,15 @@ async function completeWithResponsesAPIWithMCP(options, fetcher, path, request)
2012
2015
  lastPayload = payload;
2013
2016
  aggregatedUsage = mergeUsage(aggregatedUsage, pickUsage(payload));
2014
2017
  finishReason = pickResponsesFinishReason(payload) ?? finishReason;
2018
+ pushReasoningBlock(reasoningBlocks, round, pickResponsesReasoning(payload));
2015
2019
  const providerToolCalls = pickResponsesToolCalls(payload);
2016
2020
  const functionCalls = providerToolCalls.filter((toolCall) => toolCall.type === "function" && typeof toolCall.id === "string" && typeof toolCall.name === "string");
2017
2021
  if (functionCalls.length === 0) {
2018
2022
  const text = pickResponsesText(payload) || pickAssistantText(payload);
2019
2023
  return {
2020
2024
  text,
2025
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
2026
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
2021
2027
  raw: payload,
2022
2028
  usage: aggregatedUsage,
2023
2029
  finishReason,
@@ -2045,6 +2051,8 @@ async function completeWithResponsesAPIWithMCP(options, fetcher, path, request)
2045
2051
  }
2046
2052
  return {
2047
2053
  text: pickResponsesText(lastPayload ?? {}) || pickAssistantText(lastPayload ?? {}),
2054
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
2055
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
2048
2056
  raw: lastPayload,
2049
2057
  usage: aggregatedUsage,
2050
2058
  finishReason,
@@ -2060,9 +2068,9 @@ async function streamWithChatCompletionsWithMCP(options, fetcher, path, request,
2060
2068
  let lastPayload;
2061
2069
  const executedToolCalls = [];
2062
2070
  const toolExecutions = [];
2071
+ const reasoningBlocks = [];
2063
2072
  callbacks.onStart?.();
2064
2073
  let lastRoundText = "";
2065
- let lastRoundReasoning = "";
2066
2074
  for (let round = 1;round <= maxToolRounds + 1; round += 1) {
2067
2075
  const mcpToolset = await resolveMCPToolset(request.mcpClients);
2068
2076
  const transportTools = toProviderFunctionTools(mcpToolset);
@@ -2124,6 +2132,7 @@ async function streamWithChatCompletionsWithMCP(options, fetcher, path, request,
2124
2132
  const chunk = {
2125
2133
  textDelta: delta,
2126
2134
  reasoningDelta: reasoningDelta || undefined,
2135
+ turnIndex: round,
2127
2136
  raw: json,
2128
2137
  usage: chunkUsage,
2129
2138
  finishReason: chunkFinishReason
@@ -2136,22 +2145,41 @@ async function streamWithChatCompletionsWithMCP(options, fetcher, path, request,
2136
2145
  finishReason = roundFinishReason;
2137
2146
  }
2138
2147
  const calledTools = buildOpenAIStreamToolCalls(streamedToolCalls);
2148
+ pushReasoningBlock(reasoningBlocks, round, roundReasoning);
2149
+ request.onTurnTransition?.({
2150
+ turnIndex: round,
2151
+ kind: "reasoningComplete",
2152
+ reasoningText: roundReasoning
2153
+ });
2139
2154
  if (calledTools.length === 0) {
2140
2155
  const out2 = {
2141
2156
  text: roundText,
2142
- reasoning: roundReasoning.length > 0 ? roundReasoning : undefined,
2157
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
2158
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
2143
2159
  raw: lastPayload,
2144
2160
  usage: aggregatedUsage,
2145
2161
  finishReason,
2146
2162
  toolCalls: executedToolCalls.length > 0 ? executedToolCalls : undefined,
2147
2163
  toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
2148
2164
  };
2165
+ request.onTurnTransition?.({ turnIndex: round, kind: "streamEnd" });
2149
2166
  callbacks.onComplete?.(out2);
2150
2167
  return out2;
2151
2168
  }
2152
2169
  if (round > maxToolRounds) {
2153
2170
  throw new Error(`Tool call loop exceeded maxToolRounds (${maxToolRounds}).`);
2154
2171
  }
2172
+ request.onTurnTransition?.({
2173
+ turnIndex: round,
2174
+ kind: "toolCallsEmit",
2175
+ toolCalls: calledTools
2176
+ });
2177
+ callbacks.onChunk?.({
2178
+ textDelta: "",
2179
+ turnIndex: round,
2180
+ toolCalls: calledTools,
2181
+ finishReason: roundFinishReason
2182
+ });
2155
2183
  const outputs = await executeMCPToolCalls(calledTools, mcpToolset, {
2156
2184
  round,
2157
2185
  request,
@@ -2160,8 +2188,8 @@ async function streamWithChatCompletionsWithMCP(options, fetcher, path, request,
2160
2188
  });
2161
2189
  executedToolCalls.push(...outputs.map((entry) => entry.call));
2162
2190
  toolExecutions.push(...outputs.map((entry) => entry.execution));
2191
+ request.onTurnTransition?.({ turnIndex: round, kind: "toolResultsReceived" });
2163
2192
  lastRoundText = roundText;
2164
- lastRoundReasoning = roundReasoning;
2165
2193
  const assistantMessage = buildOpenAIAssistantToolMessage(roundText, calledTools, {
2166
2194
  reasoning: roundReasoning,
2167
2195
  reasoningFieldName
@@ -2175,13 +2203,15 @@ async function streamWithChatCompletionsWithMCP(options, fetcher, path, request,
2175
2203
  }
2176
2204
  const out = {
2177
2205
  text: lastRoundText,
2178
- reasoning: lastRoundReasoning.length > 0 ? lastRoundReasoning : undefined,
2206
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
2207
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
2179
2208
  raw: lastPayload,
2180
2209
  usage: aggregatedUsage,
2181
2210
  finishReason,
2182
2211
  toolCalls: executedToolCalls.length > 0 ? executedToolCalls : undefined,
2183
2212
  toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
2184
2213
  };
2214
+ request.onTurnTransition?.({ turnIndex: maxToolRounds + 1, kind: "streamEnd" });
2185
2215
  callbacks.onComplete?.(out);
2186
2216
  return out;
2187
2217
  }
@@ -2264,6 +2294,7 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2264
2294
  let lastPayload;
2265
2295
  const executedToolCalls = [];
2266
2296
  const toolExecutions = [];
2297
+ const reasoningBlocks = [];
2267
2298
  callbacks.onStart?.();
2268
2299
  for (let round = 1;round <= maxToolRounds + 1; round += 1) {
2269
2300
  const mcpToolset = await resolveMCPToolset(request.mcpClients);
@@ -2292,6 +2323,7 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2292
2323
  throw new Error(`HTTP ${response.status}: ${message}`);
2293
2324
  }
2294
2325
  let roundText = "";
2326
+ let roundReasoning = "";
2295
2327
  let roundUsage;
2296
2328
  let roundFinishReason;
2297
2329
  let roundPayload;
@@ -2310,6 +2342,7 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2310
2342
  lastPayload = payload;
2311
2343
  }
2312
2344
  const delta = pickResponsesStreamTextDelta(json);
2345
+ const reasoningDelta = pickResponsesStreamReasoningDelta(json);
2313
2346
  const chunkUsage = pickResponsesStreamUsage(json);
2314
2347
  const chunkFinishReason = pickResponsesStreamFinishReason(json);
2315
2348
  collectResponsesStreamToolCalls(json, streamedToolCalls);
@@ -2321,9 +2354,14 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2321
2354
  roundText += delta;
2322
2355
  callbacks.onToken?.(delta);
2323
2356
  }
2324
- if (delta || chunkUsage || chunkFinishReason) {
2357
+ if (reasoningDelta) {
2358
+ roundReasoning += reasoningDelta;
2359
+ }
2360
+ if (delta || reasoningDelta || chunkUsage || chunkFinishReason) {
2325
2361
  const chunk = {
2326
2362
  textDelta: delta,
2363
+ reasoningDelta: reasoningDelta || undefined,
2364
+ turnIndex: round,
2327
2365
  raw: json,
2328
2366
  usage: chunkUsage,
2329
2367
  finishReason: chunkFinishReason
@@ -2339,25 +2377,48 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2339
2377
  finishReason = pickResponsesFinishReason(roundPayload) ?? finishReason;
2340
2378
  }
2341
2379
  const payloadToolCalls = roundPayload ? pickResponsesToolCalls(roundPayload) : [];
2380
+ if (roundPayload && roundReasoning.length === 0) {
2381
+ roundReasoning = pickResponsesReasoning(roundPayload);
2382
+ }
2342
2383
  const streamedCalls = buildResponsesStreamToolCalls(streamedToolCalls);
2343
2384
  const providerToolCalls = payloadToolCalls.length > 0 ? payloadToolCalls : streamedCalls;
2344
2385
  const functionCalls = providerToolCalls.filter((toolCall) => toolCall.type === "function" && typeof toolCall.id === "string" && typeof toolCall.name === "string");
2386
+ pushReasoningBlock(reasoningBlocks, round, roundReasoning);
2387
+ request.onTurnTransition?.({
2388
+ turnIndex: round,
2389
+ kind: "reasoningComplete",
2390
+ reasoningText: roundReasoning
2391
+ });
2345
2392
  if (functionCalls.length === 0) {
2346
2393
  const finalText = roundText.length > 0 ? roundText : roundPayload ? pickResponsesText(roundPayload) || pickAssistantText(roundPayload) : "";
2347
2394
  const out2 = {
2348
2395
  text: finalText,
2396
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
2397
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
2349
2398
  raw: roundPayload ?? lastPayload,
2350
2399
  usage: aggregatedUsage,
2351
2400
  finishReason,
2352
2401
  toolCalls: executedToolCalls.length > 0 ? executedToolCalls : undefined,
2353
2402
  toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
2354
2403
  };
2404
+ request.onTurnTransition?.({ turnIndex: round, kind: "streamEnd" });
2355
2405
  callbacks.onComplete?.(out2);
2356
2406
  return out2;
2357
2407
  }
2358
2408
  if (round > maxToolRounds) {
2359
2409
  throw new Error(`Tool call loop exceeded maxToolRounds (${maxToolRounds}).`);
2360
2410
  }
2411
+ request.onTurnTransition?.({
2412
+ turnIndex: round,
2413
+ kind: "toolCallsEmit",
2414
+ toolCalls: functionCalls
2415
+ });
2416
+ callbacks.onChunk?.({
2417
+ textDelta: "",
2418
+ turnIndex: round,
2419
+ toolCalls: functionCalls,
2420
+ finishReason: roundFinishReason
2421
+ });
2361
2422
  const outputs = await executeMCPToolCalls(functionCalls, mcpToolset, {
2362
2423
  round,
2363
2424
  request,
@@ -2366,6 +2427,7 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2366
2427
  });
2367
2428
  executedToolCalls.push(...outputs.map((entry) => entry.call));
2368
2429
  toolExecutions.push(...outputs.map((entry) => entry.execution));
2430
+ request.onTurnTransition?.({ turnIndex: round, kind: "toolResultsReceived" });
2369
2431
  input = outputs.map((entry) => ({
2370
2432
  type: "function_call_output",
2371
2433
  call_id: entry.call.id,
@@ -2375,12 +2437,15 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2375
2437
  }
2376
2438
  const out = {
2377
2439
  text: pickResponsesText(lastPayload ?? {}) || pickAssistantText(lastPayload ?? {}),
2440
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
2441
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
2378
2442
  raw: lastPayload,
2379
2443
  usage: aggregatedUsage,
2380
2444
  finishReason,
2381
2445
  toolCalls: executedToolCalls.length > 0 ? executedToolCalls : undefined,
2382
2446
  toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
2383
2447
  };
2448
+ request.onTurnTransition?.({ turnIndex: maxToolRounds + 1, kind: "streamEnd" });
2384
2449
  callbacks.onComplete?.(out);
2385
2450
  return out;
2386
2451
  }
@@ -2665,6 +2730,20 @@ function pickResponsesStreamTextDelta(payload) {
2665
2730
  }
2666
2731
  return "";
2667
2732
  }
2733
+ function pickResponsesStreamReasoningDelta(payload) {
2734
+ const eventType = pickString(payload.type) ?? "";
2735
+ if (!eventType.includes("reasoning") && !eventType.includes("thinking")) {
2736
+ return "";
2737
+ }
2738
+ const direct = pickString(payload.delta);
2739
+ if (direct) {
2740
+ return direct;
2741
+ }
2742
+ if (isRecord2(payload.delta)) {
2743
+ return pickReasoningText(payload.delta) || pickString(payload.delta.text) || pickString(payload.delta.summary_text) || "";
2744
+ }
2745
+ return "";
2746
+ }
2668
2747
  function pickResponsesStreamUsage(payload) {
2669
2748
  const direct = pickUsage(payload);
2670
2749
  if (direct) {
@@ -2799,6 +2878,30 @@ function pickResponsesText(payload) {
2799
2878
  }).join("");
2800
2879
  }).join("");
2801
2880
  }
2881
+ function pickResponsesReasoning(payload) {
2882
+ const direct = pickReasoningText(payload);
2883
+ if (direct) {
2884
+ return direct;
2885
+ }
2886
+ const output = payload.output;
2887
+ if (!Array.isArray(output)) {
2888
+ return "";
2889
+ }
2890
+ return output.map((item) => {
2891
+ if (!isRecord2(item)) {
2892
+ return "";
2893
+ }
2894
+ const itemReasoning = pickReasoningText(item);
2895
+ if (itemReasoning) {
2896
+ return itemReasoning;
2897
+ }
2898
+ const itemType = pickString(item.type) ?? "";
2899
+ if ((itemType.includes("reasoning") || itemType.includes("thinking")) && Array.isArray(item.content)) {
2900
+ return item.content.map((part) => isRecord2(part) ? pickTextLike(part) : "").join("");
2901
+ }
2902
+ return "";
2903
+ }).join("");
2904
+ }
2802
2905
  function pickAssistantText(payload) {
2803
2906
  const message = pickAssistantMessage(payload);
2804
2907
  if (message) {
@@ -2819,6 +2922,18 @@ function pickAssistantText(payload) {
2819
2922
  function pickReasoningText(value) {
2820
2923
  return pickTextLike(value.reasoning) || pickTextLike(value.reasoning_content);
2821
2924
  }
2925
+ function pushReasoningBlock(blocks, turnIndex, text) {
2926
+ const clean = text?.replace(/<\/?think\s*>/gi, "").trim();
2927
+ if (!clean) {
2928
+ return;
2929
+ }
2930
+ blocks.push({ turnIndex, text: clean });
2931
+ }
2932
+ function joinReasoningBlocks(blocks) {
2933
+ return blocks.map((block) => block.text).filter(Boolean).join(`
2934
+
2935
+ `);
2936
+ }
2822
2937
  function pickTextFromOpenAIContent(value) {
2823
2938
  return pickTextLike(value);
2824
2939
  }
@@ -2999,6 +3114,7 @@ async function completeWithMCPToolLoop(options, fetcher, path, request) {
2999
3114
  let lastPayload;
3000
3115
  const toolCalls = [];
3001
3116
  const toolExecutions = [];
3117
+ const reasoningBlocks = [];
3002
3118
  for (let round = 1;round <= maxToolRounds + 1; round += 1) {
3003
3119
  const mcpToolset = await resolveMCPToolset(request.mcpClients);
3004
3120
  const tools = toAnthropicTools(toProviderFunctionTools(mcpToolset));
@@ -3028,9 +3144,12 @@ async function completeWithMCPToolLoop(options, fetcher, path, request) {
3028
3144
  finishReason = pickFinishReason2(payload);
3029
3145
  const content = Array.isArray(payload.content) ? payload.content : [];
3030
3146
  const calledTools = pickAnthropicToolCalls(payload).filter((call) => call.type === "function");
3147
+ pushReasoningBlock2(reasoningBlocks, round, extractAnthropicReasoning(payload));
3031
3148
  if (calledTools.length === 0) {
3032
3149
  return {
3033
3150
  text: extractAnthropicText(payload),
3151
+ reasoning: joinReasoningBlocks2(reasoningBlocks) || undefined,
3152
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
3034
3153
  raw: payload,
3035
3154
  usage: aggregatedUsage,
3036
3155
  finishReason,
@@ -3066,6 +3185,8 @@ async function completeWithMCPToolLoop(options, fetcher, path, request) {
3066
3185
  }
3067
3186
  return {
3068
3187
  text: extractAnthropicText(lastPayload ?? {}),
3188
+ reasoning: joinReasoningBlocks2(reasoningBlocks) || undefined,
3189
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
3069
3190
  raw: lastPayload,
3070
3191
  usage: aggregatedUsage,
3071
3192
  finishReason,
@@ -3082,6 +3203,7 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3082
3203
  let lastPayload;
3083
3204
  const toolCalls = [];
3084
3205
  const toolExecutions = [];
3206
+ const reasoningBlocks = [];
3085
3207
  callbacks.onStart?.();
3086
3208
  for (let round = 1;round <= maxToolRounds + 1; round += 1) {
3087
3209
  const mcpToolset = await resolveMCPToolset(request.mcpClients);
@@ -3107,6 +3229,7 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3107
3229
  throw new Error(`HTTP ${response.status}: ${message}`);
3108
3230
  }
3109
3231
  let roundText = "";
3232
+ let roundReasoning = "";
3110
3233
  let roundUsage;
3111
3234
  let roundFinishReason;
3112
3235
  const streamedToolCalls = new Map;
@@ -3120,6 +3243,7 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3120
3243
  }
3121
3244
  lastPayload = json;
3122
3245
  const delta = pickAnthropicDelta(json);
3246
+ const reasoningDelta = pickAnthropicReasoningDelta(json);
3123
3247
  const chunkUsage = pickUsage2(json);
3124
3248
  const chunkFinishReason = pickFinishReason2(json);
3125
3249
  collectAnthropicStreamToolCalls(json, streamedToolCalls);
@@ -3131,9 +3255,14 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3131
3255
  roundText += delta;
3132
3256
  callbacks.onToken?.(delta);
3133
3257
  }
3134
- if (delta || chunkUsage || chunkFinishReason) {
3258
+ if (reasoningDelta) {
3259
+ roundReasoning += reasoningDelta;
3260
+ }
3261
+ if (delta || reasoningDelta || chunkUsage || chunkFinishReason) {
3135
3262
  const chunk = {
3136
3263
  textDelta: delta,
3264
+ reasoningDelta: reasoningDelta || undefined,
3265
+ turnIndex: round,
3137
3266
  raw: json,
3138
3267
  usage: chunkUsage,
3139
3268
  finishReason: chunkFinishReason
@@ -3146,21 +3275,41 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3146
3275
  finishReason = roundFinishReason;
3147
3276
  }
3148
3277
  const calledTools = buildAnthropicStreamToolCalls(streamedToolCalls);
3278
+ pushReasoningBlock2(reasoningBlocks, round, roundReasoning);
3279
+ request.onTurnTransition?.({
3280
+ turnIndex: round,
3281
+ kind: "reasoningComplete",
3282
+ reasoningText: roundReasoning
3283
+ });
3149
3284
  if (calledTools.length === 0) {
3150
3285
  const out2 = {
3151
3286
  text: roundText,
3287
+ reasoning: joinReasoningBlocks2(reasoningBlocks) || undefined,
3288
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
3152
3289
  raw: lastPayload,
3153
3290
  usage: aggregatedUsage,
3154
3291
  finishReason,
3155
3292
  toolCalls: toolCalls.length > 0 ? toolCalls : undefined,
3156
3293
  toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
3157
3294
  };
3295
+ request.onTurnTransition?.({ turnIndex: round, kind: "streamEnd" });
3158
3296
  callbacks.onComplete?.(out2);
3159
3297
  return out2;
3160
3298
  }
3161
3299
  if (round > maxToolRounds) {
3162
3300
  throw new Error(`Tool call loop exceeded maxToolRounds (${maxToolRounds}).`);
3163
3301
  }
3302
+ request.onTurnTransition?.({
3303
+ turnIndex: round,
3304
+ kind: "toolCallsEmit",
3305
+ toolCalls: calledTools
3306
+ });
3307
+ callbacks.onChunk?.({
3308
+ textDelta: "",
3309
+ turnIndex: round,
3310
+ toolCalls: calledTools,
3311
+ finishReason: roundFinishReason
3312
+ });
3164
3313
  const toolResultContent = [];
3165
3314
  const outputs = await executeMCPToolCalls(calledTools, mcpToolset, {
3166
3315
  round,
@@ -3170,6 +3319,7 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3170
3319
  });
3171
3320
  toolCalls.push(...outputs.map((entry) => entry.call));
3172
3321
  toolExecutions.push(...outputs.map((entry) => entry.execution));
3322
+ request.onTurnTransition?.({ turnIndex: round, kind: "toolResultsReceived" });
3173
3323
  for (const entry of outputs) {
3174
3324
  toolResultContent.push({
3175
3325
  type: "tool_result",
@@ -3186,12 +3336,15 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3186
3336
  }
3187
3337
  const out = {
3188
3338
  text: "",
3339
+ reasoning: joinReasoningBlocks2(reasoningBlocks) || undefined,
3340
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
3189
3341
  raw: lastPayload,
3190
3342
  usage: aggregatedUsage,
3191
3343
  finishReason,
3192
3344
  toolCalls: toolCalls.length > 0 ? toolCalls : undefined,
3193
3345
  toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
3194
3346
  };
3347
+ request.onTurnTransition?.({ turnIndex: maxToolRounds + 1, kind: "streamEnd" });
3195
3348
  callbacks.onComplete?.(out);
3196
3349
  return out;
3197
3350
  }
@@ -3312,6 +3465,22 @@ function extractAnthropicText(payload) {
3312
3465
  return typeof text === "string" ? text : "";
3313
3466
  }).join("");
3314
3467
  }
3468
+ function extractAnthropicReasoning(payload) {
3469
+ const content = payload.content;
3470
+ if (!Array.isArray(content)) {
3471
+ return "";
3472
+ }
3473
+ return content.map((part) => {
3474
+ if (!isRecord2(part)) {
3475
+ return "";
3476
+ }
3477
+ const type = pickString(part.type) ?? "";
3478
+ if (type !== "thinking" && type !== "reasoning") {
3479
+ return "";
3480
+ }
3481
+ return pickString(part.thinking) ?? pickString(part.text) ?? pickString(part.reasoning) ?? "";
3482
+ }).join("");
3483
+ }
3315
3484
  function pickAnthropicToolCalls(payload) {
3316
3485
  const content = payload.content;
3317
3486
  if (!Array.isArray(content)) {
@@ -3342,6 +3511,35 @@ function pickAnthropicDelta(payload) {
3342
3511
  }
3343
3512
  return "";
3344
3513
  }
3514
+ function pickAnthropicReasoningDelta(payload) {
3515
+ const deltaObject = payload.delta;
3516
+ if (isRecord2(deltaObject)) {
3517
+ const type = pickString(deltaObject.type) ?? "";
3518
+ if (type === "thinking_delta" || type === "reasoning_delta") {
3519
+ return pickString(deltaObject.thinking) ?? pickString(deltaObject.text) ?? "";
3520
+ }
3521
+ }
3522
+ const contentBlock = payload.content_block;
3523
+ if (isRecord2(contentBlock)) {
3524
+ const type = pickString(contentBlock.type) ?? "";
3525
+ if (type === "thinking" || type === "reasoning") {
3526
+ return pickString(contentBlock.thinking) ?? pickString(contentBlock.text) ?? "";
3527
+ }
3528
+ }
3529
+ return "";
3530
+ }
3531
+ function pushReasoningBlock2(blocks, turnIndex, text) {
3532
+ const clean = text?.replace(/<\/?think\s*>/gi, "").trim();
3533
+ if (!clean) {
3534
+ return;
3535
+ }
3536
+ blocks.push({ turnIndex, text: clean });
3537
+ }
3538
+ function joinReasoningBlocks2(blocks) {
3539
+ return blocks.map((block) => block.text).filter(Boolean).join(`
3540
+
3541
+ `);
3542
+ }
3345
3543
  function collectAnthropicStreamToolCalls(payload, state) {
3346
3544
  const eventType = pickString(payload.type);
3347
3545
  if (!eventType) {
@@ -3815,6 +4013,7 @@ function normalizeStreamConfig(option) {
3815
4013
  return {
3816
4014
  enabled: option.enabled ?? true,
3817
4015
  onData: option.onData,
4016
+ onTurnTransition: option.onTurnTransition,
3818
4017
  to: option.to
3819
4018
  };
3820
4019
  }
@@ -3882,6 +4081,7 @@ async function callModel(adapter, options) {
3882
4081
  transformToolCallParams: options.request?.transformToolCallParams,
3883
4082
  unknownToolError: options.request?.unknownToolError,
3884
4083
  toolDebug: options.request?.toolDebug,
4084
+ onTurnTransition: options.stream.onTurnTransition,
3885
4085
  body: options.request?.body,
3886
4086
  signal: requestSignal
3887
4087
  };
@@ -3909,13 +4109,21 @@ async function callModel(adapter, options) {
3909
4109
  let latestFinishReason;
3910
4110
  let streamedProviderText = "";
3911
4111
  let streamedDedicatedReasoning = "";
4112
+ let currentTurnIndex;
4113
+ let currentToolCalls;
4114
+ let streamedReasoningBlocks;
3912
4115
  let lastSnapshotFingerprint;
3913
4116
  let previousSnapshotText = "";
3914
4117
  let previousSnapshotReasoning = "";
3915
4118
  const emitStreamingData = (done, usage2, finishReason2) => {
3916
- const normalized2 = normalizeModelOutput(streamedProviderText, streamedDedicatedReasoning);
4119
+ const normalized2 = normalizeModelOutput(streamedProviderText, streamedDedicatedReasoning, streamedReasoningBlocks);
3917
4120
  const snapshot = options.buildSnapshot(normalized2);
3918
- const fingerprint = toStreamDataFingerprint(snapshot);
4121
+ const fingerprint = toStreamDataFingerprint({
4122
+ snapshot,
4123
+ done,
4124
+ turnIndex: currentTurnIndex,
4125
+ toolCalls: currentToolCalls
4126
+ });
3919
4127
  if (!done && fingerprint === lastSnapshotFingerprint) {
3920
4128
  return;
3921
4129
  }
@@ -3931,7 +4139,9 @@ async function callModel(adapter, options) {
3931
4139
  snapshot,
3932
4140
  done,
3933
4141
  usage: usage2,
3934
- finishReason: finishReason2
4142
+ finishReason: finishReason2,
4143
+ turnIndex: currentTurnIndex,
4144
+ toolCalls: currentToolCalls
3935
4145
  });
3936
4146
  if (options.stream.to === "stdout" && delta.text) {
3937
4147
  process.stdout.write(delta.text);
@@ -3966,8 +4176,21 @@ async function callModel(adapter, options) {
3966
4176
  streamedDedicatedReasoning += delta;
3967
4177
  emitStreamingData(false);
3968
4178
  };
3969
- const response2 = await adapter.stream(requestPayload, {
4179
+ const streamRequestPayload = {
4180
+ ...requestPayload,
4181
+ onTurnTransition: (transition) => {
4182
+ if (transition.kind === "reasoningComplete") {
4183
+ streamedReasoningBlocks = appendReasoningBlock(streamedReasoningBlocks, transition);
4184
+ }
4185
+ options.stream.onTurnTransition?.(transition);
4186
+ }
4187
+ };
4188
+ const response2 = await adapter.stream(streamRequestPayload, {
3970
4189
  onChunk: (chunk) => {
4190
+ if (chunk.turnIndex !== undefined) {
4191
+ currentTurnIndex = chunk.turnIndex;
4192
+ }
4193
+ currentToolCalls = chunk.toolCalls;
3971
4194
  if (chunk.textDelta) {
3972
4195
  handleTextDelta(chunk.textDelta);
3973
4196
  }
@@ -3980,11 +4203,15 @@ async function callModel(adapter, options) {
3980
4203
  if (chunk.finishReason) {
3981
4204
  latestFinishReason = chunk.finishReason;
3982
4205
  }
4206
+ if (!chunk.textDelta && !chunk.reasoningDelta && (chunk.turnIndex !== undefined || chunk.toolCalls)) {
4207
+ emitStreamingData(false, chunk.usage, chunk.finishReason);
4208
+ }
3983
4209
  }
3984
4210
  });
3985
4211
  streamedProviderText = typeof response2.text === "string" ? response2.text : streamedProviderText;
3986
4212
  streamedDedicatedReasoning = typeof response2.reasoning === "string" ? response2.reasoning : streamedDedicatedReasoning;
3987
- const finalNormalized = normalizeModelOutput(streamedProviderText, streamedDedicatedReasoning);
4213
+ streamedReasoningBlocks = response2.reasoningBlocks ?? streamedReasoningBlocks;
4214
+ const finalNormalized = normalizeModelOutput(streamedProviderText, streamedDedicatedReasoning, streamedReasoningBlocks);
3988
4215
  const usage = preferLatestUsage(latestUsage, response2.usage);
3989
4216
  const finishReason = response2.finishReason ?? latestFinishReason;
3990
4217
  emitStreamingData(true, usage, finishReason);
@@ -4016,11 +4243,12 @@ async function callModel(adapter, options) {
4016
4243
  parseSource: finalNormalized.parseSource,
4017
4244
  via: "stream",
4018
4245
  usage,
4019
- finishReason
4246
+ finishReason,
4247
+ reasoningBlocks: finalNormalized.reasoningBlocks
4020
4248
  };
4021
4249
  }
4022
4250
  const response = await adapter.complete(requestPayload);
4023
- const normalized = normalizeModelOutput(response.text, response.reasoning);
4251
+ const normalized = normalizeModelOutput(response.text, response.reasoning, response.reasoningBlocks);
4024
4252
  options.observe?.(options.buildEvent({
4025
4253
  stage: "llm.response",
4026
4254
  message: "Completion response received.",
@@ -4049,10 +4277,11 @@ async function callModel(adapter, options) {
4049
4277
  parseSource: normalized.parseSource,
4050
4278
  via: "complete",
4051
4279
  usage: response.usage,
4052
- finishReason: response.finishReason
4280
+ finishReason: response.finishReason,
4281
+ reasoningBlocks: normalized.reasoningBlocks
4053
4282
  };
4054
4283
  }
4055
- function normalizeModelOutput(text, dedicatedReasoning) {
4284
+ function normalizeModelOutput(text, dedicatedReasoning, reasoningBlocks) {
4056
4285
  const sanitized = sanitizeThink(text);
4057
4286
  const visibleText = stripThinkBlocks(text, sanitized.thinkBlocks);
4058
4287
  const reasoning = joinReasoningSegments([
@@ -4062,10 +4291,29 @@ function normalizeModelOutput(text, dedicatedReasoning) {
4062
4291
  return {
4063
4292
  text: visibleText,
4064
4293
  reasoning,
4294
+ reasoningBlocks: normalizeReasoningBlocks(reasoningBlocks),
4065
4295
  thinkBlocks: sanitized.thinkBlocks,
4066
4296
  parseSource: composeParseSource(visibleText, reasoning)
4067
4297
  };
4068
4298
  }
4299
+ function normalizeReasoningBlocks(blocks) {
4300
+ if (!Array.isArray(blocks)) {
4301
+ return;
4302
+ }
4303
+ const normalized = blocks.map((block) => ({
4304
+ turnIndex: block.turnIndex,
4305
+ text: block.text.replace(RE_THINK_TAGS, "").trim()
4306
+ })).filter((block) => Number.isFinite(block.turnIndex) && block.text.length > 0);
4307
+ return normalized.length > 0 ? normalized : undefined;
4308
+ }
4309
+ function appendReasoningBlock(blocks, transition) {
4310
+ const text = transition.reasoningText?.replace(RE_THINK_TAGS, "").trim();
4311
+ if (!text) {
4312
+ return blocks;
4313
+ }
4314
+ const next = [...blocks ?? [], { turnIndex: transition.turnIndex, text }];
4315
+ return normalizeReasoningBlocks(next);
4316
+ }
4069
4317
  function composeParseSource(text, reasoning) {
4070
4318
  if (typeof reasoning !== "string" || reasoning.length === 0) {
4071
4319
  return text;
@@ -4225,7 +4473,8 @@ async function generate(adapter, promptOrOptions, callOptions) {
4225
4473
  }),
4226
4474
  buildSnapshot: (model) => ({
4227
4475
  text: model.text,
4228
- reasoning: model.reasoning
4476
+ reasoning: model.reasoning,
4477
+ ...model.reasoningBlocks ? { reasoningBlocks: model.reasoningBlocks } : {}
4229
4478
  }),
4230
4479
  debug: debugConfig,
4231
4480
  debugLabel: "generate",
@@ -4240,7 +4489,8 @@ async function generate(adapter, promptOrOptions, callOptions) {
4240
4489
  text: response.text,
4241
4490
  reasoning: response.reasoning,
4242
4491
  usage: response.usage,
4243
- finishReason: response.finishReason
4492
+ finishReason: response.finishReason,
4493
+ ...response.reasoningBlocks ? { reasoningBlocks: response.reasoningBlocks } : {}
4244
4494
  };
4245
4495
  const attempts = [attempt];
4246
4496
  normalized.observe?.({
@@ -4257,7 +4507,8 @@ async function generate(adapter, promptOrOptions, callOptions) {
4257
4507
  reasoning: attempt.reasoning,
4258
4508
  attempts,
4259
4509
  usage: aggregateUsage(attempts),
4260
- finishReason: attempt.finishReason
4510
+ finishReason: attempt.finishReason,
4511
+ ...attempt.reasoningBlocks ? { reasoningBlocks: attempt.reasoningBlocks } : {}
4261
4512
  };
4262
4513
  }
4263
4514
  function normalizeGenerateInput(promptOrOptions, callOptions) {
@@ -5077,6 +5328,7 @@ async function executeAttempt(adapter, input) {
5077
5328
  success: parsed.success,
5078
5329
  usage: response.usage,
5079
5330
  finishReason: response.finishReason,
5331
+ ...response.reasoningBlocks ? { reasoningBlocks: response.reasoningBlocks } : {},
5080
5332
  parsed
5081
5333
  };
5082
5334
  return {
@@ -5097,6 +5349,7 @@ async function callModel2(adapter, options) {
5097
5349
  buildSnapshot: (normalized) => ({
5098
5350
  text: normalized.text,
5099
5351
  reasoning: normalized.reasoning,
5352
+ ...normalized.reasoningBlocks ? { reasoningBlocks: normalized.reasoningBlocks } : {},
5100
5353
  data: parseStreamingStructuredData(normalized.parseSource) ?? null
5101
5354
  }),
5102
5355
  debugLabel: "structured"
@@ -5197,7 +5450,8 @@ function buildSuccessResult(data, attempts) {
5197
5450
  json: final?.json ?? null,
5198
5451
  attempts,
5199
5452
  usage: aggregateUsage(attempts),
5200
- finishReason: final?.finishReason
5453
+ finishReason: final?.finishReason,
5454
+ ...final?.reasoningBlocks ? { reasoningBlocks: final.reasoningBlocks } : {}
5201
5455
  };
5202
5456
  }
5203
5457
  function toStructuredError(attempt) {
@@ -5660,11 +5914,11 @@ function inferSchemaExample(schema) {
5660
5914
  }
5661
5915
  function getObjectShape(schema) {
5662
5916
  const unwrapped = unwrap2(schema).schema;
5663
- const typeName = unwrapped._def?.type;
5917
+ const typeName = unwrapped.def?.type;
5664
5918
  if (typeName !== "object") {
5665
5919
  return null;
5666
5920
  }
5667
- const rawShape = unwrapped._def?.shape;
5921
+ const rawShape = unwrapped.def?.shape;
5668
5922
  if (typeof rawShape === "function") {
5669
5923
  return rawShape();
5670
5924
  }
@@ -5672,11 +5926,11 @@ function getObjectShape(schema) {
5672
5926
  }
5673
5927
  function readDefaultValue(schema) {
5674
5928
  let current = schema;
5675
- while (current?._def?.type) {
5676
- const typeName = current._def.type;
5929
+ while (current?.def?.type) {
5930
+ const typeName = current.def.type;
5677
5931
  if (typeName === "default") {
5678
5932
  try {
5679
- const raw = current._def.defaultValue;
5933
+ const raw = current.def.defaultValue;
5680
5934
  if (typeof raw === "function") {
5681
5935
  return raw();
5682
5936
  }
@@ -5686,11 +5940,11 @@ function readDefaultValue(schema) {
5686
5940
  }
5687
5941
  }
5688
5942
  if (typeName === "optional" || typeName === "nullable" || typeName === "catch" || typeName === "readonly") {
5689
- current = current._def.innerType ?? current;
5943
+ current = current.def.innerType ?? current;
5690
5944
  continue;
5691
5945
  }
5692
5946
  if (typeName === "pipe") {
5693
- current = current._def.in ?? current;
5947
+ current = current.def.in ?? current;
5694
5948
  continue;
5695
5949
  }
5696
5950
  return;
@@ -5699,22 +5953,22 @@ function readDefaultValue(schema) {
5699
5953
  }
5700
5954
  function readSchemaDescription2(schema) {
5701
5955
  let current = schema;
5702
- while (current?._def?.type) {
5956
+ while (current?.def?.type) {
5703
5957
  const desc = current.description;
5704
5958
  if (typeof desc === "string" && desc.trim().length > 0) {
5705
5959
  return desc.trim();
5706
5960
  }
5707
- const typeName = current._def.type;
5961
+ const typeName = current.def.type;
5708
5962
  if (typeName === "optional" || typeName === "default" || typeName === "nullable") {
5709
- current = current._def.innerType ?? current;
5963
+ current = current.def.innerType ?? current;
5710
5964
  continue;
5711
5965
  }
5712
5966
  if (typeName === "catch" || typeName === "readonly") {
5713
- current = current._def.innerType ?? current;
5967
+ current = current.def.innerType ?? current;
5714
5968
  continue;
5715
5969
  }
5716
5970
  if (typeName === "pipe") {
5717
- current = current._def.in ?? current;
5971
+ current = current.def.in ?? current;
5718
5972
  continue;
5719
5973
  }
5720
5974
  break;
@@ -5724,19 +5978,19 @@ function readSchemaDescription2(schema) {
5724
5978
  function unwrap2(schema) {
5725
5979
  let current = schema;
5726
5980
  let optional = false;
5727
- while (current?._def?.type) {
5728
- const typeName = current._def.type;
5981
+ while (current?.def?.type) {
5982
+ const typeName = current.def.type;
5729
5983
  if (typeName === "optional" || typeName === "default") {
5730
5984
  optional = true;
5731
- current = current._def.innerType ?? current;
5985
+ current = current.def.innerType ?? current;
5732
5986
  continue;
5733
5987
  }
5734
5988
  if (typeName === "nullable" || typeName === "catch" || typeName === "readonly") {
5735
- current = current._def.innerType ?? current;
5989
+ current = current.def.innerType ?? current;
5736
5990
  continue;
5737
5991
  }
5738
5992
  if (typeName === "pipe") {
5739
- current = current._def.in ?? current;
5993
+ current = current.def.in ?? current;
5740
5994
  continue;
5741
5995
  }
5742
5996
  break;