kimi-proxy 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1018,6 +1018,77 @@ var ResponseSchema = z3.object({
1018
1018
  synthetic: z3.boolean().optional()
1019
1019
  }).optional()
1020
1020
  });
1021
+ function jsonSchemaToZod(schema) {
1022
+ if (!schema || typeof schema !== "object" || Array.isArray(schema))
1023
+ return z3.any();
1024
+ const s = schema;
1025
+ switch (s.type) {
1026
+ case "object": {
1027
+ const shape = {};
1028
+ const properties = s.properties || {};
1029
+ const required = s.required || [];
1030
+ for (const key in properties) {
1031
+ let propSchema = jsonSchemaToZod(properties[key]);
1032
+ if (!required.includes(key)) {
1033
+ propSchema = propSchema.optional();
1034
+ }
1035
+ shape[key] = propSchema;
1036
+ }
1037
+ const zodObj = z3.object(shape);
1038
+ if (s.additionalProperties === false) {
1039
+ return zodObj.strict();
1040
+ } else {
1041
+ return zodObj.passthrough();
1042
+ }
1043
+ }
1044
+ case "array":
1045
+ return z3.array(jsonSchemaToZod(s.items || {}));
1046
+ case "string":
1047
+ if (Array.isArray(s.enum) && s.enum.length > 0 && s.enum.every((e) => typeof e === "string")) {
1048
+ return z3.enum(s.enum);
1049
+ }
1050
+ return z3.string();
1051
+ case "number":
1052
+ case "integer":
1053
+ return z3.number();
1054
+ case "boolean":
1055
+ return z3.boolean();
1056
+ case "null":
1057
+ return z3.null();
1058
+ default:
1059
+ return z3.any();
1060
+ }
1061
+ }
1062
+ var toolSchemaCache = new WeakMap;
1063
+ function getToolSchemas(request) {
1064
+ let schemas = toolSchemaCache.get(request);
1065
+ if (schemas)
1066
+ return schemas;
1067
+ schemas = {};
1068
+ for (const tool of request.tools ?? []) {
1069
+ schemas[tool.name] = jsonSchemaToZod(tool.parameters);
1070
+ }
1071
+ toolSchemaCache.set(request, schemas);
1072
+ return schemas;
1073
+ }
1074
+ function safeParseJson(str) {
1075
+ try {
1076
+ return JSON.parse(str);
1077
+ } catch {
1078
+ return str;
1079
+ }
1080
+ }
1081
+ function findMatchingTool(request, args) {
1082
+ const schemas = getToolSchemas(request);
1083
+ const matches = [];
1084
+ const parsedArgs = typeof args === "string" ? safeParseJson(args) : args;
1085
+ for (const [name, schema] of Object.entries(schemas)) {
1086
+ if (schema.safeParse(parsedArgs).success) {
1087
+ matches.push(name);
1088
+ }
1089
+ }
1090
+ return matches.length === 1 ? matches[0] : null;
1091
+ }
1021
1092
 
1022
1093
  // src/core/ensureToolCall.ts
1023
1094
  var ENSURE_TOOL_CALL_STATE_KEY = "__ensureToolCall";
@@ -1483,33 +1554,36 @@ class OpenAIChatClientAdapter {
1483
1554
  };
1484
1555
  }
1485
1556
  fromUlx(ulxResponse, _ulxRequest) {
1486
- const contentBlocks = ulxResponse.output.find((entry) => entry.type === "message");
1487
- let content = null;
1488
- let reasoning_content = undefined;
1489
- if (contentBlocks?.type === "message") {
1490
- const textParts = [];
1491
- const reasoningParts = [];
1492
- for (const entry of contentBlocks.content) {
1493
- if (entry.type === "text") {
1494
- textParts.push(entry.text ?? "");
1495
- } else if (entry.type === "reasoning") {
1496
- reasoningParts.push(entry.text ?? "");
1557
+ const textParts = [];
1558
+ const reasoningParts = [];
1559
+ let toolCalls = undefined;
1560
+ for (const block of ulxResponse.output) {
1561
+ if (block.type === "message") {
1562
+ for (const entry of block.content) {
1563
+ if (entry.type === "reasoning") {
1564
+ reasoningParts.push(entry.text ?? "");
1565
+ }
1566
+ }
1567
+ const meaningfulContent = block.content.filter((c) => c.type !== "reasoning");
1568
+ if (meaningfulContent.length > 0) {
1569
+ const mixed = contentToText(meaningfulContent);
1570
+ textParts.push(typeof mixed === "string" ? mixed : JSON.stringify(mixed));
1571
+ }
1572
+ if (block.tool_calls) {
1573
+ toolCalls = [...toolCalls ?? [], ...block.tool_calls];
1574
+ }
1575
+ } else if (block.type === "reasoning") {
1576
+ for (const entry of block.content) {
1577
+ if (entry.type === "reasoning") {
1578
+ reasoningParts.push(entry.text ?? "");
1579
+ }
1497
1580
  }
1498
- }
1499
- if (textParts.length > 0) {
1500
- content = textParts.join("");
1501
- } else if (contentBlocks.content.some((c) => c.type === "image_url" || c.type === "json")) {
1502
- const mixed = contentToText(contentBlocks.content.filter((c) => c.type !== "reasoning"));
1503
- content = typeof mixed === "string" ? mixed : JSON.stringify(mixed);
1504
- } else {
1505
- content = null;
1506
- }
1507
- if (reasoningParts.length > 0) {
1508
- reasoning_content = reasoningParts.join(`
1509
-
1510
- `);
1511
1581
  }
1512
1582
  }
1583
+ const content = textParts.join("") || null;
1584
+ const reasoning_content = reasoningParts.join(`
1585
+
1586
+ `) || undefined;
1513
1587
  return {
1514
1588
  id: ulxResponse.id,
1515
1589
  object: "chat.completion",
@@ -1520,9 +1594,9 @@ class OpenAIChatClientAdapter {
1520
1594
  index: 0,
1521
1595
  message: {
1522
1596
  role: "assistant",
1523
- content,
1524
1597
  ...reasoning_content ? { reasoning_content } : {},
1525
- tool_calls: contentBlocks?.type === "message" && contentBlocks.tool_calls ? contentBlocks.tool_calls.map((tc) => ({
1598
+ content,
1599
+ tool_calls: toolCalls ? toolCalls.map((tc) => ({
1526
1600
  id: tc.id,
1527
1601
  type: "function",
1528
1602
  function: {
@@ -1612,7 +1686,11 @@ class AnthropicMessagesClientAdapter {
1612
1686
  function anthropicBlockToUlxContent(block) {
1613
1687
  const declaredType = typeof block.type === "string" ? block.type : "";
1614
1688
  if (declaredType === "thinking" || declaredType === "redacted_thinking" || typeof block.thinking === "string") {
1615
- return;
1689
+ return {
1690
+ type: "reasoning",
1691
+ text: block.thinking ?? block.text ?? "",
1692
+ data: block.signature ? { signature: block.signature } : declaredType === "redacted_thinking" ? { redacted: true } : undefined
1693
+ };
1616
1694
  }
1617
1695
  if (typeof block.text === "string") {
1618
1696
  return { type: "text", text: block.text };
@@ -1728,37 +1806,57 @@ class AnthropicMessagesClientAdapter {
1728
1806
  metadata: { clientFormat: this.clientFormat, headers }
1729
1807
  };
1730
1808
  }
1731
- fromUlx(ulxResponse) {
1732
- const contentBlocks = ulxResponse.output.find((entry) => entry.type === "message");
1733
- const messageContent = contentBlocks?.type === "message" ? contentToText(contentBlocks.content) : "";
1734
- const content = [];
1735
- if (typeof messageContent === "string") {
1736
- if (messageContent)
1737
- content.push({ type: "text", text: messageContent });
1738
- } else if (Array.isArray(messageContent)) {
1739
- for (const item of messageContent) {
1740
- if (item && typeof item === "object") {
1741
- const block = item;
1742
- if (block.type === "thinking") {
1743
- content.push(block);
1744
- } else if (block.type === "text") {
1745
- content.push(block);
1809
+ fromUlx(ulxResponse, _ulxRequest) {
1810
+ const thinkingBlocks = [];
1811
+ const textBlocks = [];
1812
+ const toolUseBlocks = [];
1813
+ for (const block of ulxResponse.output) {
1814
+ if (block.type === "reasoning") {
1815
+ for (const entry of block.content) {
1816
+ if (entry.type === "reasoning") {
1817
+ thinkingBlocks.push({
1818
+ type: "thinking",
1819
+ thinking: entry.text ?? "",
1820
+ signature: entry.data?.signature
1821
+ });
1822
+ }
1823
+ }
1824
+ } else if (block.type === "message") {
1825
+ const messageContent = contentToText(block.content);
1826
+ if (typeof messageContent === "string") {
1827
+ if (messageContent)
1828
+ textBlocks.push({ type: "text", text: messageContent });
1829
+ } else if (Array.isArray(messageContent)) {
1830
+ for (const item of messageContent) {
1831
+ if (item && typeof item === "object") {
1832
+ const b = item;
1833
+ if (b.type === "thinking") {
1834
+ thinkingBlocks.push(b);
1835
+ } else if (b.type === "text") {
1836
+ textBlocks.push(b);
1837
+ }
1838
+ }
1839
+ }
1840
+ } else if (typeof messageContent === "object" && messageContent !== null && "text" in messageContent) {
1841
+ textBlocks.push(messageContent);
1842
+ }
1843
+ if (block.tool_calls) {
1844
+ for (const tc of block.tool_calls) {
1845
+ toolUseBlocks.push({
1846
+ type: "tool_use",
1847
+ id: tc.id,
1848
+ name: tc.name,
1849
+ input: JSON.parse(tc.arguments)
1850
+ });
1746
1851
  }
1747
1852
  }
1748
- }
1749
- } else if (typeof messageContent === "object" && messageContent !== null && "text" in messageContent) {
1750
- content.push(messageContent);
1751
- }
1752
- if (contentBlocks?.type === "message" && contentBlocks.tool_calls) {
1753
- for (const tc of contentBlocks.tool_calls) {
1754
- content.push({
1755
- type: "tool_use",
1756
- id: tc.id,
1757
- name: tc.name,
1758
- input: JSON.parse(tc.arguments)
1759
- });
1760
1853
  }
1761
1854
  }
1855
+ const content = [
1856
+ ...thinkingBlocks,
1857
+ ...textBlocks,
1858
+ ...toolUseBlocks
1859
+ ];
1762
1860
  return {
1763
1861
  id: ulxResponse.id,
1764
1862
  type: "message",
@@ -2018,18 +2116,19 @@ class OpenAIResponsesClientAdapter {
2018
2116
  }
2019
2117
  fromUlx(ulxResponse, ulxRequest) {
2020
2118
  const outputBlocks = ulxResponse.output;
2021
- const output = [];
2022
2119
  const textParts = [];
2023
2120
  const createdAt = Math.floor(Date.now() / 1000);
2024
2121
  let messageIndex = 0;
2025
2122
  let functionCallIndex = 0;
2026
2123
  let reasoningIndex = 0;
2124
+ const collectedReasoning = [];
2125
+ const collectedMessages = [];
2126
+ const collectedFunctionCalls = [];
2027
2127
  for (const block of outputBlocks) {
2028
2128
  if (block.type === "message") {
2029
2129
  const messageId = `msg_${ulxResponse.id}_${messageIndex++}`;
2030
2130
  const status = block.status === "incomplete" ? "incomplete" : "completed";
2031
2131
  const content = [];
2032
- const reasoningContent = [];
2033
2132
  for (const entry of block.content) {
2034
2133
  if (entry.type === "text") {
2035
2134
  const text = entry.text ?? "";
@@ -2048,22 +2147,16 @@ class OpenAIResponsesClientAdapter {
2048
2147
  }
2049
2148
  content.push({ type: "output_text", text, annotations: [] });
2050
2149
  } else if (entry.type === "reasoning") {
2051
- reasoningContent.push({
2052
- type: "reasoning_text",
2053
- text: entry.text ?? ""
2150
+ collectedReasoning.push({
2151
+ type: "reasoning",
2152
+ id: `rsn_${ulxResponse.id}_${reasoningIndex++}`,
2153
+ status: "completed",
2154
+ content: [{ type: "reasoning_text", text: entry.text ?? "" }],
2155
+ summary: []
2054
2156
  });
2055
2157
  }
2056
2158
  }
2057
- if (reasoningContent.length > 0) {
2058
- output.push({
2059
- type: "reasoning",
2060
- id: `rsn_${ulxResponse.id}_${reasoningIndex++}`,
2061
- status: "completed",
2062
- content: reasoningContent,
2063
- summary: []
2064
- });
2065
- }
2066
- output.push({
2159
+ collectedMessages.push({
2067
2160
  type: "message",
2068
2161
  id: messageId,
2069
2162
  role: "assistant",
@@ -2072,7 +2165,7 @@ class OpenAIResponsesClientAdapter {
2072
2165
  });
2073
2166
  if (block.tool_calls) {
2074
2167
  for (const call of block.tool_calls) {
2075
- output.push({
2168
+ collectedFunctionCalls.push({
2076
2169
  type: "function_call",
2077
2170
  id: `fc_${ulxResponse.id}_${functionCallIndex++}`,
2078
2171
  call_id: call.id,
@@ -2083,7 +2176,7 @@ class OpenAIResponsesClientAdapter {
2083
2176
  }
2084
2177
  }
2085
2178
  } else if (block.type === "tool_call") {
2086
- output.push({
2179
+ collectedFunctionCalls.push({
2087
2180
  type: "function_call",
2088
2181
  id: `fc_${ulxResponse.id}_${functionCallIndex++}`,
2089
2182
  call_id: block.call_id,
@@ -2092,7 +2185,7 @@ class OpenAIResponsesClientAdapter {
2092
2185
  status: block.status === "pending" ? "in_progress" : "completed"
2093
2186
  });
2094
2187
  } else if (block.type === "reasoning") {
2095
- output.push({
2188
+ collectedReasoning.push({
2096
2189
  type: "reasoning",
2097
2190
  id: `rsn_${ulxResponse.id}_${reasoningIndex++}`,
2098
2191
  status: "completed",
@@ -2107,6 +2200,11 @@ class OpenAIResponsesClientAdapter {
2107
2200
  });
2108
2201
  }
2109
2202
  }
2203
+ const output = [
2204
+ ...collectedReasoning,
2205
+ ...collectedMessages,
2206
+ ...collectedFunctionCalls
2207
+ ];
2110
2208
  const inputTokens = ulxResponse.usage?.input_tokens ?? 0;
2111
2209
  const outputTokens = ulxResponse.usage?.output_tokens ?? 0;
2112
2210
  const totalTokens = ulxResponse.usage?.total_tokens ?? inputTokens + outputTokens;
@@ -2316,7 +2414,9 @@ function createCapturingFetch(originalFetch = globalThis.fetch) {
2316
2414
  // src/core/providers/anthropic.ts
2317
2415
  var AnthropicContentSchema = z6.object({
2318
2416
  type: z6.string(),
2319
- text: z6.string().optional()
2417
+ text: z6.string().optional(),
2418
+ thinking: z6.string().optional(),
2419
+ signature: z6.string().optional()
2320
2420
  }).passthrough();
2321
2421
  var AnthropicResponseSchema = z6.object({
2322
2422
  id: z6.string(),
@@ -2357,6 +2457,13 @@ function toAnthropicContent(blocks) {
2357
2457
  return blocks.map((entry) => {
2358
2458
  if (entry.type === "text")
2359
2459
  return { type: "text", text: entry.text ?? "" };
2460
+ if (entry.type === "reasoning") {
2461
+ return {
2462
+ type: "thinking",
2463
+ thinking: entry.text ?? "",
2464
+ signature: entry.data?.signature
2465
+ };
2466
+ }
2360
2467
  if (entry.type === "image_url") {
2361
2468
  if (typeof entry.url === "string") {
2362
2469
  const match = entry.url.match(/^data:([^;]+);base64,(.+)$/);
@@ -2397,7 +2504,8 @@ function anthropicResponseToUlx(body, request) {
2397
2504
  type: "reasoning",
2398
2505
  content: reasoning.map((part) => ({
2399
2506
  type: "reasoning",
2400
- text: part.text ?? ""
2507
+ text: part.thinking ?? part.text ?? "",
2508
+ data: part.signature ? { signature: part.signature } : undefined
2401
2509
  })),
2402
2510
  summary: []
2403
2511
  });
@@ -2635,13 +2743,39 @@ function parseToolCalls(section) {
2635
2743
  }
2636
2744
  return toolCalls;
2637
2745
  }
2638
- function fixKimiResponse(response) {
2746
+ function repairToolNames(toolCalls, request) {
2747
+ const schemas = getToolSchemas(request);
2748
+ const toolNames = new Set(Object.keys(schemas));
2749
+ let repairedCount = 0;
2750
+ for (const call of toolCalls) {
2751
+ if (call.type !== "function" || !isJsonObject(call.function))
2752
+ continue;
2753
+ const fn = call.function;
2754
+ if (typeof fn.name === "number") {
2755
+ fn.name = String(fn.name);
2756
+ }
2757
+ const name = fn.name;
2758
+ if (typeof name !== "string")
2759
+ continue;
2760
+ if (toolNames.has(name))
2761
+ continue;
2762
+ const match = findMatchingTool(request, fn.arguments);
2763
+ if (match) {
2764
+ logger.debug(`[KimiFixer] Repaired tool name: ${name} -> ${match}`);
2765
+ fn.name = match;
2766
+ repairedCount++;
2767
+ }
2768
+ }
2769
+ return repairedCount;
2770
+ }
2771
+ function fixKimiResponse(response, request) {
2639
2772
  const metadata = {
2640
2773
  extractedToolCalls: 0,
2641
2774
  extractedFromReasoning: 0,
2642
2775
  extractedFromContent: 0,
2643
2776
  cleanedReasoningContent: false,
2644
- cleanedMessageContent: false
2777
+ cleanedMessageContent: false,
2778
+ repairedToolNames: 0
2645
2779
  };
2646
2780
  try {
2647
2781
  const choices = response?.choices;
@@ -2698,6 +2832,10 @@ ${thinkingContent}` : thinkingContent;
2698
2832
  }
2699
2833
  }
2700
2834
  if (aggregatedToolCalls.length) {
2835
+ const repaired = repairToolNames(aggregatedToolCalls, request);
2836
+ if (repaired > 0) {
2837
+ metadata.repairedToolNames = (metadata.repairedToolNames || 0) + repaired;
2838
+ }
2701
2839
  message.tool_calls = aggregatedToolCalls;
2702
2840
  choice.finish_reason = "tool_calls";
2703
2841
  } else if ("tool_calls" in message) {
@@ -2741,7 +2879,7 @@ var OpenAIToolCallSchema = z7.object({
2741
2879
  id: z7.string().optional(),
2742
2880
  type: z7.literal("function").optional(),
2743
2881
  function: z7.object({
2744
- name: z7.string(),
2882
+ name: z7.union([z7.string(), z7.number()]).transform(String),
2745
2883
  arguments: z7.union([z7.string(), z7.record(z7.unknown())]).optional()
2746
2884
  }).passthrough()
2747
2885
  }).passthrough();
@@ -2771,13 +2909,14 @@ function normalizeToolCalls(toolCalls) {
2771
2909
  const normalized = [];
2772
2910
  for (const call of toolCalls) {
2773
2911
  const fn = call?.function;
2774
- if (!fn || typeof fn.name !== "string")
2912
+ if (!fn || typeof fn.name !== "string" && typeof fn.name !== "number")
2775
2913
  continue;
2776
- const id = typeof call.id === "string" && call.id.length ? call.id : fn.name;
2914
+ const name = String(fn.name);
2915
+ const id = typeof call.id === "string" && call.id.length ? call.id : `${name}_call_${Math.random().toString(36).substring(2, 10)}`;
2777
2916
  normalized.push({
2778
2917
  id,
2779
2918
  type: "function",
2780
- name: fn.name,
2919
+ name,
2781
2920
  arguments: safeJsonString(fn.arguments)
2782
2921
  });
2783
2922
  }
@@ -2851,35 +2990,34 @@ function toOpenAITool(tool) {
2851
2990
  }
2852
2991
  function toOpenAIMessages(messages) {
2853
2992
  return messages.map((msg) => {
2993
+ const reasoningBlocks = msg.content.filter((c) => c.type === "reasoning");
2994
+ const nonReasoningBlocks = msg.content.filter((c) => c.type !== "reasoning");
2995
+ const res = {
2996
+ role: msg.role === "assistant" ? "assistant" : msg.role,
2997
+ content: toOpenAIContent(nonReasoningBlocks)
2998
+ };
2999
+ if (reasoningBlocks.length > 0) {
3000
+ res.reasoning_content = reasoningBlocks.map((b) => b.text).join(`
3001
+
3002
+ `);
3003
+ }
2854
3004
  if (msg.role === "tool") {
2855
- return {
2856
- role: "tool",
2857
- tool_call_id: msg.tool_call_id,
2858
- content: toOpenAIContent(msg.content)
2859
- };
3005
+ res.tool_call_id = msg.tool_call_id;
2860
3006
  }
2861
3007
  if (msg.tool_calls) {
2862
- return {
2863
- role: msg.role,
2864
- content: toOpenAIContent(msg.content),
2865
- tool_calls: msg.tool_calls.map((call) => ({
2866
- id: call.id,
2867
- type: "function",
2868
- function: {
2869
- name: call.name,
2870
- arguments: call.arguments
2871
- }
2872
- }))
2873
- };
3008
+ res.tool_calls = msg.tool_calls.map((call) => ({
3009
+ id: call.id,
3010
+ type: "function",
3011
+ function: {
3012
+ name: call.name,
3013
+ arguments: call.arguments
3014
+ }
3015
+ }));
2874
3016
  }
2875
- return {
2876
- role: msg.role === "assistant" ? "assistant" : msg.role,
2877
- content: toOpenAIContent(msg.content),
2878
- name: undefined
2879
- };
3017
+ return res;
2880
3018
  });
2881
3019
  }
2882
- function normalizeOpenAIProviderResponse(payload) {
3020
+ function normalizeOpenAIProviderResponse(payload, request) {
2883
3021
  const parsed = OpenAIResponseSchema.safeParse(payload.body);
2884
3022
  if (!parsed.success) {
2885
3023
  const issue = parsed.error.issues[0]?.message ?? "Invalid provider payload";
@@ -2887,7 +3025,7 @@ function normalizeOpenAIProviderResponse(payload) {
2887
3025
  return { error: issue };
2888
3026
  }
2889
3027
  const cloned = structuredClone(parsed.data);
2890
- const { response, metadata } = fixKimiResponse(cloned);
3028
+ const { response, metadata } = fixKimiResponse(cloned, request);
2891
3029
  const normalized = OpenAIResponseSchema.safeParse(response);
2892
3030
  if (!normalized.success) {
2893
3031
  const issue = normalized.error.issues[0]?.message ?? "Provider payload invalid after normalization";
@@ -3002,7 +3140,7 @@ class OpenAIProviderAdapter {
3002
3140
  if (payload.status >= 400) {
3003
3141
  return toUlxErrorResponse(payload, request);
3004
3142
  }
3005
- const normalized = normalizeOpenAIProviderResponse(payload);
3143
+ const normalized = normalizeOpenAIProviderResponse(payload, request);
3006
3144
  if ("error" in normalized) {
3007
3145
  return {
3008
3146
  id: request.id,
@@ -3090,7 +3228,7 @@ class OpenRouterProviderAdapter {
3090
3228
  if (payload.status >= 400) {
3091
3229
  return toUlxErrorResponse(payload, request);
3092
3230
  }
3093
- const normalized = normalizeOpenAIProviderResponse(payload);
3231
+ const normalized = normalizeOpenAIProviderResponse(payload, request);
3094
3232
  if ("error" in normalized) {
3095
3233
  return {
3096
3234
  id: request.id,
@@ -3422,7 +3560,7 @@ class VertexProviderAdapter {
3422
3560
  return toUlxErrorResponse(payload, request);
3423
3561
  }
3424
3562
  if (request.model && MAAS_MODEL_PATTERN.test(request.model)) {
3425
- const normalized = normalizeOpenAIProviderResponse(payload);
3563
+ const normalized = normalizeOpenAIProviderResponse(payload, request);
3426
3564
  if ("error" in normalized) {
3427
3565
  return {
3428
3566
  id: request.id,
@@ -3715,13 +3853,53 @@ class VertexProviderAdapter {
3715
3853
  toVertexTools(tools) {
3716
3854
  if (!tools?.length)
3717
3855
  return;
3856
+ function flattenJsonSchema(schema) {
3857
+ if (!schema || typeof schema !== "object" || Array.isArray(schema)) {
3858
+ return schema;
3859
+ }
3860
+ const obj = { ...schema };
3861
+ const definitions = obj.definitions;
3862
+ delete obj.definitions;
3863
+ function resolveRef(target, path4) {
3864
+ if (!target || typeof target !== "object" || Array.isArray(target)) {
3865
+ return target;
3866
+ }
3867
+ const copy = { ...target };
3868
+ for (const key in copy) {
3869
+ const value = copy[key];
3870
+ if (key === "$ref" && typeof value === "string" && value.startsWith("#/definitions/")) {
3871
+ const defName = value.slice(14);
3872
+ const definition = definitions?.[defName];
3873
+ if (definition) {
3874
+ delete copy.$ref;
3875
+ const resolved = resolveRef(definition, [...path4, defName]);
3876
+ if (resolved && typeof resolved === "object" && !Array.isArray(resolved)) {
3877
+ Object.assign(copy, resolved);
3878
+ }
3879
+ }
3880
+ } else if (key === "$ref" && typeof value === "string") {
3881
+ delete copy.$ref;
3882
+ } else {
3883
+ copy[key] = resolveRef(value, path4);
3884
+ }
3885
+ }
3886
+ return copy;
3887
+ }
3888
+ return resolveRef(obj, []);
3889
+ }
3718
3890
  return [
3719
3891
  {
3720
- functionDeclarations: tools.map((tool) => ({
3721
- name: tool.name,
3722
- description: tool.description,
3723
- parameters: tool.parameters
3724
- }))
3892
+ functionDeclarations: tools.map((tool) => {
3893
+ let parameters = tool.parameters;
3894
+ if (parameters && typeof parameters === "object" && !Array.isArray(parameters)) {
3895
+ parameters = flattenJsonSchema(parameters);
3896
+ }
3897
+ return {
3898
+ name: tool.name,
3899
+ description: tool.description,
3900
+ parameters
3901
+ };
3902
+ })
3725
3903
  }
3726
3904
  ];
3727
3905
  }
@@ -3977,7 +4155,7 @@ ${content}`
3977
4155
  logger.warn({ requestId: request.id }, "Detected prior assistant termination; requesting synthetic response");
3978
4156
  return true;
3979
4157
  }
3980
- if (toolCalls.length === 1 && this.checkTerminationHeuristic(message, toolCalls[0])) {
4158
+ if (toolCalls.length === 1 && this.checkTerminationHeuristic(message, toolCalls[0], request)) {
3981
4159
  requestSyntheticResponse(request.state);
3982
4160
  logger.warn({ requestId: request.id }, "Detected TodoWrite termination heuristic; requesting synthetic response");
3983
4161
  return true;
@@ -3985,7 +4163,10 @@ ${content}`
3985
4163
  }
3986
4164
  return false;
3987
4165
  }
3988
- checkTerminationHeuristic(message, toolCall) {
4166
+ checkTerminationHeuristic(message, toolCall, request) {
4167
+ if (!request.model.toLowerCase().includes("kimi")) {
4168
+ return false;
4169
+ }
3989
4170
  if (!hasCaseInsensitiveTerminationKeywords(message.content)) {
3990
4171
  return false;
3991
4172
  }
@@ -4014,7 +4195,7 @@ class EnsureToolCallResponseTransform {
4014
4195
  const response = context.response;
4015
4196
  if (!ensureState || !response)
4016
4197
  return;
4017
- if (this.checkTerminationHeuristic(response)) {
4198
+ if (this.checkTerminationHeuristic(response, context.request)) {
4018
4199
  ensureState.pendingReminder = false;
4019
4200
  logger.info({ requestId: context.request.id }, "EnsureToolCall satisfied by TodoWrite + keyword heuristics");
4020
4201
  return;
@@ -4088,7 +4269,10 @@ class EnsureToolCallResponseTransform {
4088
4269
  }
4089
4270
  return false;
4090
4271
  }
4091
- checkTerminationHeuristic(response) {
4272
+ checkTerminationHeuristic(response, request) {
4273
+ if (!request.model.toLowerCase().includes("kimi")) {
4274
+ return false;
4275
+ }
4092
4276
  const messageBlock = response.output.find((block) => block.type === "message");
4093
4277
  if (!messageBlock || messageBlock.type !== "message")
4094
4278
  return false;
@@ -5666,6 +5850,31 @@ async function createServer(config) {
5666
5850
  timestamp: meta.timestamp
5667
5851
  });
5668
5852
  });
5853
+ server.get("/api/config", (_req, reply) => {
5854
+ reply.send({
5855
+ blobRoot: config.logging.blobRoot
5856
+ });
5857
+ });
5858
+ server.get("/api/logs/:id/path", (req, reply) => {
5859
+ const { id } = req.params;
5860
+ const meta = logStore.readMetadata(Number(id));
5861
+ if (!meta) {
5862
+ reply.status(404).send({ error: { message: "Log not found" } });
5863
+ return;
5864
+ }
5865
+ const date = new Date(meta.timestamp);
5866
+ const year = date.getUTCFullYear();
5867
+ const month = String(date.getUTCMonth() + 1).padStart(2, "0");
5868
+ const day = String(date.getUTCDate()).padStart(2, "0");
5869
+ const dirPath = `${config.logging.blobRoot}/${year}/${month}/${day}/${meta.request_id}/`;
5870
+ reply.send({
5871
+ directory: dirPath,
5872
+ request: meta.request_path ? `${config.logging.blobRoot}/${meta.request_path}` : null,
5873
+ response: meta.response_path ? `${config.logging.blobRoot}/${meta.response_path}` : null,
5874
+ providerRequest: meta.provider_request_path ? `${config.logging.blobRoot}/${meta.provider_request_path}` : null,
5875
+ providerResponse: meta.provider_response_path ? `${config.logging.blobRoot}/${meta.provider_response_path}` : null
5876
+ });
5877
+ });
5669
5878
  return server;
5670
5879
  }
5671
5880
  async function handleRequest(req, reply, body, modelRegistry, pipeline, logStore, liveStoreRuntime, config, options) {
@@ -5699,13 +5908,15 @@ async function handleRequest(req, reply, body, modelRegistry, pipeline, logStore
5699
5908
  method: req.method,
5700
5909
  url: req.url,
5701
5910
  statusCode: 400,
5911
+ provider: "schema_validation_failed",
5702
5912
  startedAt,
5703
5913
  finishedAt: Date.now(),
5704
5914
  requestBody: req.body,
5705
5915
  responseBody: errorBody,
5706
5916
  operation: options.operation,
5707
5917
  clientFormat: options.clientFormat,
5708
- profile: options.profile
5918
+ profile: options.profile,
5919
+ summary: summarizeError(error, "schema_validation")
5709
5920
  });
5710
5921
  reply.status(400).send(errorBody);
5711
5922
  return;
@@ -5730,13 +5941,15 @@ async function handleRequest(req, reply, body, modelRegistry, pipeline, logStore
5730
5941
  url: req.url,
5731
5942
  statusCode: 400,
5732
5943
  model,
5944
+ provider: "resolution_failed",
5733
5945
  startedAt,
5734
5946
  finishedAt: Date.now(),
5735
5947
  requestBody: parsedBody,
5736
5948
  responseBody: errorBody,
5737
5949
  operation: options.operation,
5738
5950
  clientFormat: options.clientFormat,
5739
- profile: options.profile
5951
+ profile: options.profile,
5952
+ summary: summarizeError(error, "model_resolution")
5740
5953
  });
5741
5954
  reply.status(400).send(errorBody);
5742
5955
  return;
@@ -5813,7 +6026,8 @@ async function handleRequest(req, reply, body, modelRegistry, pipeline, logStore
5813
6026
  requestBody: upstreamModel ? { ...parsedBody, model: upstreamModel } : parsedBody,
5814
6027
  responseBody: { error: errorDetails },
5815
6028
  providerRequestBody: null,
5816
- providerResponseBody: null
6029
+ providerResponseBody: null,
6030
+ summary: summarizeError(error, "pipeline_execution")
5817
6031
  });
5818
6032
  reply.status(500).send({ error: { message: "Internal proxy error" } });
5819
6033
  }
@@ -5902,6 +6116,13 @@ function summarize(response) {
5902
6116
  preview
5903
6117
  });
5904
6118
  }
6119
+ function summarizeError(error, errorType) {
6120
+ const errorMessage = error instanceof Error ? error.message : String(error);
6121
+ return JSON.stringify({
6122
+ error_type: errorType,
6123
+ error_message: errorMessage.slice(0, 500)
6124
+ });
6125
+ }
5905
6126
  function isPrematureCloseError(error) {
5906
6127
  if (!error || typeof error !== "object")
5907
6128
  return false;
@@ -5925,5 +6146,5 @@ async function bootstrap() {
5925
6146
  }
5926
6147
  bootstrap();
5927
6148
 
5928
- //# debugId=C3410C5B307F3A9164756E2164756E21
6149
+ //# debugId=58CB00416A249CAB64756E2164756E21
5929
6150
  //# sourceMappingURL=index.js.map