@aituber-onair/chat 0.28.0 → 0.30.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/README.ja.md +9 -5
  2. package/README.md +9 -5
  3. package/dist/cjs/constants/openrouter.d.ts +10 -0
  4. package/dist/cjs/constants/openrouter.d.ts.map +1 -1
  5. package/dist/cjs/constants/openrouter.js +20 -1
  6. package/dist/cjs/constants/openrouter.js.map +1 -1
  7. package/dist/cjs/constants/xai.d.ts +1 -0
  8. package/dist/cjs/constants/xai.d.ts.map +1 -1
  9. package/dist/cjs/constants/xai.js +3 -1
  10. package/dist/cjs/constants/xai.js.map +1 -1
  11. package/dist/cjs/services/ChatService.d.ts +2 -0
  12. package/dist/cjs/services/ChatService.d.ts.map +1 -1
  13. package/dist/cjs/services/providers/openai/OpenAIChatService.d.ts +0 -14
  14. package/dist/cjs/services/providers/openai/OpenAIChatService.d.ts.map +1 -1
  15. package/dist/cjs/services/providers/openai/OpenAIChatService.js +5 -192
  16. package/dist/cjs/services/providers/openai/OpenAIChatService.js.map +1 -1
  17. package/dist/cjs/services/providers/openai/responsesParser.d.ts +10 -0
  18. package/dist/cjs/services/providers/openai/responsesParser.d.ts.map +1 -0
  19. package/dist/cjs/services/providers/openai/responsesParser.js +166 -0
  20. package/dist/cjs/services/providers/openai/responsesParser.js.map +1 -0
  21. package/dist/cjs/services/providers/openrouter/OpenRouterChatServiceProvider.d.ts.map +1 -1
  22. package/dist/cjs/services/providers/openrouter/OpenRouterChatServiceProvider.js +11 -0
  23. package/dist/cjs/services/providers/openrouter/OpenRouterChatServiceProvider.js.map +1 -1
  24. package/dist/cjs/services/providers/xai/XAIChatServiceProvider.d.ts.map +1 -1
  25. package/dist/cjs/services/providers/xai/XAIChatServiceProvider.js +1 -0
  26. package/dist/cjs/services/providers/xai/XAIChatServiceProvider.js.map +1 -1
  27. package/dist/esm/constants/openrouter.d.ts +10 -0
  28. package/dist/esm/constants/openrouter.d.ts.map +1 -1
  29. package/dist/esm/constants/openrouter.js +19 -0
  30. package/dist/esm/constants/openrouter.js.map +1 -1
  31. package/dist/esm/constants/xai.d.ts +1 -0
  32. package/dist/esm/constants/xai.d.ts.map +1 -1
  33. package/dist/esm/constants/xai.js +2 -0
  34. package/dist/esm/constants/xai.js.map +1 -1
  35. package/dist/esm/services/ChatService.d.ts +2 -0
  36. package/dist/esm/services/ChatService.d.ts.map +1 -1
  37. package/dist/esm/services/providers/openai/OpenAIChatService.d.ts +0 -14
  38. package/dist/esm/services/providers/openai/OpenAIChatService.d.ts.map +1 -1
  39. package/dist/esm/services/providers/openai/OpenAIChatService.js +5 -192
  40. package/dist/esm/services/providers/openai/OpenAIChatService.js.map +1 -1
  41. package/dist/esm/services/providers/openai/responsesParser.d.ts +10 -0
  42. package/dist/esm/services/providers/openai/responsesParser.d.ts.map +1 -0
  43. package/dist/esm/services/providers/openai/responsesParser.js +162 -0
  44. package/dist/esm/services/providers/openai/responsesParser.js.map +1 -0
  45. package/dist/esm/services/providers/openrouter/OpenRouterChatServiceProvider.d.ts.map +1 -1
  46. package/dist/esm/services/providers/openrouter/OpenRouterChatServiceProvider.js +12 -1
  47. package/dist/esm/services/providers/openrouter/OpenRouterChatServiceProvider.js.map +1 -1
  48. package/dist/esm/services/providers/xai/XAIChatServiceProvider.d.ts.map +1 -1
  49. package/dist/esm/services/providers/xai/XAIChatServiceProvider.js +2 -1
  50. package/dist/esm/services/providers/xai/XAIChatServiceProvider.js.map +1 -1
  51. package/dist/umd/aituber-onair-chat.js +205 -181
  52. package/dist/umd/aituber-onair-chat.min.js +8 -8
  53. package/package.json +1 -1
@@ -57,8 +57,10 @@ var AITuberOnAirChat = (() => {
57
57
  MODEL_ANTHROPIC_CLAUDE_3_5_SONNET: () => MODEL_ANTHROPIC_CLAUDE_3_5_SONNET,
58
58
  MODEL_ANTHROPIC_CLAUDE_3_7_SONNET: () => MODEL_ANTHROPIC_CLAUDE_3_7_SONNET,
59
59
  MODEL_ANTHROPIC_CLAUDE_4_5_HAIKU: () => MODEL_ANTHROPIC_CLAUDE_4_5_HAIKU,
60
+ MODEL_ANTHROPIC_CLAUDE_HAIKU_LATEST: () => MODEL_ANTHROPIC_CLAUDE_HAIKU_LATEST,
60
61
  MODEL_ANTHROPIC_CLAUDE_OPUS_4: () => MODEL_ANTHROPIC_CLAUDE_OPUS_4,
61
62
  MODEL_ANTHROPIC_CLAUDE_SONNET_4: () => MODEL_ANTHROPIC_CLAUDE_SONNET_4,
63
+ MODEL_ANTHROPIC_CLAUDE_SONNET_LATEST: () => MODEL_ANTHROPIC_CLAUDE_SONNET_LATEST,
62
64
  MODEL_CLAUDE_3_5_HAIKU: () => MODEL_CLAUDE_3_5_HAIKU,
63
65
  MODEL_CLAUDE_3_5_SONNET: () => MODEL_CLAUDE_3_5_SONNET,
64
66
  MODEL_CLAUDE_3_7_SONNET: () => MODEL_CLAUDE_3_7_SONNET,
@@ -96,6 +98,8 @@ var AITuberOnAirChat = (() => {
96
98
  MODEL_GOOGLE_GEMINI_2_5_FLASH: () => MODEL_GOOGLE_GEMINI_2_5_FLASH,
97
99
  MODEL_GOOGLE_GEMINI_2_5_FLASH_LITE_PREVIEW_09_2025: () => MODEL_GOOGLE_GEMINI_2_5_FLASH_LITE_PREVIEW_09_2025,
98
100
  MODEL_GOOGLE_GEMINI_2_5_PRO: () => MODEL_GOOGLE_GEMINI_2_5_PRO,
101
+ MODEL_GOOGLE_GEMINI_FLASH_LATEST: () => MODEL_GOOGLE_GEMINI_FLASH_LATEST,
102
+ MODEL_GOOGLE_GEMINI_PRO_LATEST: () => MODEL_GOOGLE_GEMINI_PRO_LATEST,
99
103
  MODEL_GPT_4O: () => MODEL_GPT_4O,
100
104
  MODEL_GPT_4O_MINI: () => MODEL_GPT_4O_MINI,
101
105
  MODEL_GPT_4_1: () => MODEL_GPT_4_1,
@@ -115,9 +119,11 @@ var AITuberOnAirChat = (() => {
115
119
  MODEL_GROK_4_1_FAST_REASONING: () => MODEL_GROK_4_1_FAST_REASONING,
116
120
  MODEL_GROK_4_20_NON_REASONING: () => MODEL_GROK_4_20_NON_REASONING,
117
121
  MODEL_GROK_4_20_REASONING: () => MODEL_GROK_4_20_REASONING,
122
+ MODEL_GROK_4_3: () => MODEL_GROK_4_3,
118
123
  MODEL_KIMI_K2_5: () => MODEL_KIMI_K2_5,
119
124
  MODEL_KIMI_K2_6: () => MODEL_KIMI_K2_6,
120
125
  MODEL_MOONSHOTAI_KIMI_K2_5: () => MODEL_MOONSHOTAI_KIMI_K2_5,
126
+ MODEL_MOONSHOTAI_KIMI_LATEST: () => MODEL_MOONSHOTAI_KIMI_LATEST,
121
127
  MODEL_O1: () => MODEL_O1,
122
128
  MODEL_O1_MINI: () => MODEL_O1_MINI,
123
129
  MODEL_O3_MINI: () => MODEL_O3_MINI,
@@ -126,8 +132,13 @@ var AITuberOnAirChat = (() => {
126
132
  MODEL_OPENAI_GPT_4_1_NANO: () => MODEL_OPENAI_GPT_4_1_NANO,
127
133
  MODEL_OPENAI_GPT_5_1_CHAT: () => MODEL_OPENAI_GPT_5_1_CHAT,
128
134
  MODEL_OPENAI_GPT_5_1_CODEX: () => MODEL_OPENAI_GPT_5_1_CODEX,
135
+ MODEL_OPENAI_GPT_5_5: () => MODEL_OPENAI_GPT_5_5,
136
+ MODEL_OPENAI_GPT_5_5_PRO: () => MODEL_OPENAI_GPT_5_5_PRO,
129
137
  MODEL_OPENAI_GPT_5_MINI: () => MODEL_OPENAI_GPT_5_MINI,
130
138
  MODEL_OPENAI_GPT_5_NANO: () => MODEL_OPENAI_GPT_5_NANO,
139
+ MODEL_OPENAI_GPT_LATEST: () => MODEL_OPENAI_GPT_LATEST,
140
+ MODEL_OPENAI_GPT_MINI_LATEST: () => MODEL_OPENAI_GPT_MINI_LATEST,
141
+ MODEL_OPENROUTER_AUTO: () => MODEL_OPENROUTER_AUTO,
131
142
  MODEL_ZAI_GLM_4_5_AIR: () => MODEL_ZAI_GLM_4_5_AIR,
132
143
  MODEL_ZAI_GLM_4_5_AIR_FREE: () => MODEL_ZAI_GLM_4_5_AIR_FREE,
133
144
  MODEL_ZAI_GLM_4_7_FLASH: () => MODEL_ZAI_GLM_4_7_FLASH,
@@ -309,8 +320,14 @@ var AITuberOnAirChat = (() => {
309
320
 
310
321
  // src/constants/openrouter.ts
311
322
  var ENDPOINT_OPENROUTER_API = "https://openrouter.ai/api/v1/chat/completions";
323
+ var MODEL_OPENROUTER_AUTO = "openrouter/auto";
312
324
  var MODEL_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free";
313
325
  var MODEL_MOONSHOTAI_KIMI_K2_5 = "moonshotai/kimi-k2.5";
326
+ var MODEL_MOONSHOTAI_KIMI_LATEST = "~moonshotai/kimi-latest";
327
+ var MODEL_OPENAI_GPT_LATEST = "~openai/gpt-latest";
328
+ var MODEL_OPENAI_GPT_MINI_LATEST = "~openai/gpt-mini-latest";
329
+ var MODEL_OPENAI_GPT_5_5_PRO = "openai/gpt-5.5-pro";
330
+ var MODEL_OPENAI_GPT_5_5 = "openai/gpt-5.5";
314
331
  var MODEL_OPENAI_GPT_5_1_CHAT = "openai/gpt-5.1-chat";
315
332
  var MODEL_OPENAI_GPT_5_1_CODEX = "openai/gpt-5.1-codex";
316
333
  var MODEL_OPENAI_GPT_5_MINI = "openai/gpt-5-mini";
@@ -318,11 +335,15 @@ var AITuberOnAirChat = (() => {
318
335
  var MODEL_OPENAI_GPT_4O = "openai/gpt-4o";
319
336
  var MODEL_OPENAI_GPT_4_1_MINI = "openai/gpt-4.1-mini";
320
337
  var MODEL_OPENAI_GPT_4_1_NANO = "openai/gpt-4.1-nano";
338
+ var MODEL_ANTHROPIC_CLAUDE_SONNET_LATEST = "~anthropic/claude-sonnet-latest";
339
+ var MODEL_ANTHROPIC_CLAUDE_HAIKU_LATEST = "~anthropic/claude-haiku-latest";
321
340
  var MODEL_ANTHROPIC_CLAUDE_OPUS_4 = "anthropic/claude-opus-4";
322
341
  var MODEL_ANTHROPIC_CLAUDE_SONNET_4 = "anthropic/claude-sonnet-4";
323
342
  var MODEL_ANTHROPIC_CLAUDE_3_7_SONNET = "anthropic/claude-3.7-sonnet";
324
343
  var MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet";
325
344
  var MODEL_ANTHROPIC_CLAUDE_4_5_HAIKU = "anthropic/claude-haiku-4.5";
345
+ var MODEL_GOOGLE_GEMINI_PRO_LATEST = "~google/gemini-pro-latest";
346
+ var MODEL_GOOGLE_GEMINI_FLASH_LATEST = "~google/gemini-flash-latest";
326
347
  var MODEL_GOOGLE_GEMINI_2_5_PRO = "google/gemini-2.5-pro";
327
348
  var MODEL_GOOGLE_GEMINI_2_5_FLASH = "google/gemini-2.5-flash";
328
349
  var MODEL_GOOGLE_GEMINI_2_5_FLASH_LITE_PREVIEW_09_2025 = "google/gemini-2.5-flash-lite-preview-09-2025";
@@ -334,6 +355,11 @@ var AITuberOnAirChat = (() => {
334
355
  MODEL_ZAI_GLM_4_5_AIR_FREE
335
356
  ];
336
357
  var OPENROUTER_VISION_SUPPORTED_MODELS = [
358
+ MODEL_MOONSHOTAI_KIMI_LATEST,
359
+ MODEL_OPENAI_GPT_LATEST,
360
+ MODEL_OPENAI_GPT_MINI_LATEST,
361
+ MODEL_OPENAI_GPT_5_5_PRO,
362
+ MODEL_OPENAI_GPT_5_5,
337
363
  MODEL_OPENAI_GPT_5_1_CHAT,
338
364
  MODEL_OPENAI_GPT_5_1_CODEX,
339
365
  MODEL_OPENAI_GPT_5_MINI,
@@ -341,10 +367,14 @@ var AITuberOnAirChat = (() => {
341
367
  MODEL_OPENAI_GPT_4O,
342
368
  MODEL_OPENAI_GPT_4_1_MINI,
343
369
  MODEL_OPENAI_GPT_4_1_NANO,
370
+ MODEL_ANTHROPIC_CLAUDE_SONNET_LATEST,
371
+ MODEL_ANTHROPIC_CLAUDE_HAIKU_LATEST,
344
372
  MODEL_ANTHROPIC_CLAUDE_OPUS_4,
345
373
  MODEL_ANTHROPIC_CLAUDE_SONNET_4,
346
374
  MODEL_ANTHROPIC_CLAUDE_3_7_SONNET,
347
375
  MODEL_ANTHROPIC_CLAUDE_4_5_HAIKU,
376
+ MODEL_GOOGLE_GEMINI_PRO_LATEST,
377
+ MODEL_GOOGLE_GEMINI_FLASH_LATEST,
348
378
  MODEL_GOOGLE_GEMINI_2_5_PRO,
349
379
  MODEL_GOOGLE_GEMINI_2_5_FLASH,
350
380
  MODEL_GOOGLE_GEMINI_2_5_FLASH_LITE_PREVIEW_09_2025,
@@ -388,11 +418,13 @@ var AITuberOnAirChat = (() => {
388
418
 
389
419
  // src/constants/xai.ts
390
420
  var ENDPOINT_XAI_CHAT_COMPLETIONS_API = "https://api.x.ai/v1/chat/completions";
421
+ var MODEL_GROK_4_3 = "grok-4.3";
391
422
  var MODEL_GROK_4_20_REASONING = "grok-4.20-0309-reasoning";
392
423
  var MODEL_GROK_4_20_NON_REASONING = "grok-4.20-0309-non-reasoning";
393
424
  var MODEL_GROK_4_1_FAST_REASONING = "grok-4-1-fast-reasoning";
394
425
  var MODEL_GROK_4_1_FAST_NON_REASONING = "grok-4-1-fast-non-reasoning";
395
426
  var XAI_VISION_SUPPORTED_MODELS = [
427
+ MODEL_GROK_4_3,
396
428
  MODEL_GROK_4_20_REASONING,
397
429
  MODEL_GROK_4_20_NON_REASONING,
398
430
  MODEL_GROK_4_1_FAST_REASONING,
@@ -2826,6 +2858,164 @@ If it's in another language, summarize in that language.
2826
2858
  }
2827
2859
  };
2828
2860
 
2861
+ // src/services/providers/openai/responsesParser.ts
2862
+ async function parseOpenAIResponsesStream(res, onPartial) {
2863
+ const reader = res.body.getReader();
2864
+ const dec = new TextDecoder();
2865
+ const textBlocks = [];
2866
+ const toolCallsMap = /* @__PURE__ */ new Map();
2867
+ let responseStatus;
2868
+ let incompleteDetails;
2869
+ let usage;
2870
+ let buf = "";
2871
+ while (true) {
2872
+ const { done, value } = await reader.read();
2873
+ if (done) break;
2874
+ buf += dec.decode(value, { stream: true });
2875
+ let eventType = "";
2876
+ let eventData = "";
2877
+ const lines = buf.split("\n");
2878
+ buf = lines.pop() || "";
2879
+ for (let i = 0; i < lines.length; i++) {
2880
+ const line = lines[i].trim();
2881
+ if (line.startsWith("event:")) {
2882
+ eventType = line.slice(6).trim();
2883
+ } else if (line.startsWith("data:")) {
2884
+ eventData = line.slice(5).trim();
2885
+ } else if (line === "" && eventType && eventData) {
2886
+ try {
2887
+ const json = JSON.parse(eventData);
2888
+ handleResponsesSSEEvent(
2889
+ eventType,
2890
+ json,
2891
+ onPartial,
2892
+ textBlocks,
2893
+ toolCallsMap,
2894
+ (metadata) => {
2895
+ if (metadata.responseStatus !== void 0) {
2896
+ responseStatus = metadata.responseStatus;
2897
+ }
2898
+ if (metadata.incompleteDetails !== void 0) {
2899
+ incompleteDetails = metadata.incompleteDetails;
2900
+ }
2901
+ if (metadata.usage !== void 0) {
2902
+ usage = metadata.usage;
2903
+ }
2904
+ }
2905
+ );
2906
+ } catch {
2907
+ console.warn("Failed to parse SSE data:", eventData);
2908
+ }
2909
+ eventType = "";
2910
+ eventData = "";
2911
+ }
2912
+ }
2913
+ }
2914
+ const toolBlocks = Array.from(toolCallsMap.values()).map(
2915
+ (tool) => ({
2916
+ type: "tool_use",
2917
+ id: tool.id,
2918
+ name: tool.name,
2919
+ input: tool.input || {}
2920
+ })
2921
+ );
2922
+ return {
2923
+ blocks: [...textBlocks, ...toolBlocks],
2924
+ stop_reason: toolBlocks.length ? "tool_use" : "end",
2925
+ truncated: responseStatus === "incomplete",
2926
+ response_status: responseStatus,
2927
+ incomplete_details: incompleteDetails,
2928
+ usage
2929
+ };
2930
+ }
2931
+ function handleResponsesSSEEvent(eventType, data, onPartial, textBlocks, toolCallsMap, onMetadata) {
2932
+ switch (eventType) {
2933
+ case "response.output_item.added":
2934
+ if (data.item?.type === "message" && Array.isArray(data.item.content)) {
2935
+ data.item.content.forEach((c) => {
2936
+ if (c.type === "output_text" && c.text) {
2937
+ onPartial(c.text);
2938
+ StreamTextAccumulator.append(textBlocks, c.text);
2939
+ }
2940
+ });
2941
+ } else if (data.item?.type === "function_call") {
2942
+ toolCallsMap.set(data.item.id, {
2943
+ id: data.item.id,
2944
+ name: data.item.name,
2945
+ input: data.item.arguments ? JSON.parse(data.item.arguments) : {}
2946
+ });
2947
+ }
2948
+ break;
2949
+ case "response.content_part.added":
2950
+ if (data.part?.type === "output_text" && typeof data.part.text === "string") {
2951
+ onPartial(data.part.text);
2952
+ StreamTextAccumulator.append(textBlocks, data.part.text);
2953
+ }
2954
+ break;
2955
+ case "response.output_text.delta":
2956
+ case "response.content_part.delta": {
2957
+ const deltaText = typeof data.delta === "string" ? data.delta : data.delta?.text ?? "";
2958
+ if (deltaText) {
2959
+ onPartial(deltaText);
2960
+ StreamTextAccumulator.append(textBlocks, deltaText);
2961
+ }
2962
+ break;
2963
+ }
2964
+ case "response.output_text.done":
2965
+ case "response.content_part.done":
2966
+ case "response.reasoning.started":
2967
+ case "response.reasoning.delta":
2968
+ case "response.reasoning.done":
2969
+ break;
2970
+ case "response.completed":
2971
+ onMetadata(extractResponsesMetadata(data, "completed"));
2972
+ break;
2973
+ case "response.incomplete":
2974
+ onMetadata(extractResponsesMetadata(data, "incomplete"));
2975
+ break;
2976
+ default:
2977
+ break;
2978
+ }
2979
+ }
2980
+ function extractResponsesMetadata(data, fallbackStatus) {
2981
+ const response = data?.response ?? data;
2982
+ return {
2983
+ responseStatus: response?.status ?? fallbackStatus,
2984
+ incompleteDetails: response?.incomplete_details ?? null,
2985
+ usage: response?.usage
2986
+ };
2987
+ }
2988
+ function parseOpenAIResponsesOneShot(data) {
2989
+ const blocks = [];
2990
+ if (data.output && Array.isArray(data.output)) {
2991
+ data.output.forEach((outputItem) => {
2992
+ if (outputItem.type === "message" && outputItem.content) {
2993
+ outputItem.content.forEach((content) => {
2994
+ if (content.type === "output_text" && content.text) {
2995
+ blocks.push({ type: "text", text: content.text });
2996
+ }
2997
+ });
2998
+ }
2999
+ if (outputItem.type === "function_call") {
3000
+ blocks.push({
3001
+ type: "tool_use",
3002
+ id: outputItem.id,
3003
+ name: outputItem.name,
3004
+ input: outputItem.arguments ? JSON.parse(outputItem.arguments) : {}
3005
+ });
3006
+ }
3007
+ });
3008
+ }
3009
+ return {
3010
+ blocks,
3011
+ stop_reason: blocks.some((b) => b.type === "tool_use") ? "tool_use" : "end",
3012
+ truncated: data?.status === "incomplete",
3013
+ response_status: data?.status,
3014
+ incomplete_details: data?.incomplete_details ?? null,
3015
+ usage: data?.usage
3016
+ };
3017
+ }
3018
+
2829
3019
  // src/services/providers/openai/OpenAIChatService.ts
2830
3020
  var GPT5_RESPONSE_LENGTH_MIN_TOKENS = {
2831
3021
  [CHAT_RESPONSE_LENGTH.VERY_SHORT]: 800,
@@ -2896,7 +3086,7 @@ If it's in another language, summarize in that language.
2896
3086
  const isResponsesAPI = this.endpoint === ENDPOINT_OPENAI_RESPONSES_API;
2897
3087
  try {
2898
3088
  if (isResponsesAPI) {
2899
- const result = await this.parseResponsesStream(
3089
+ const result = await parseOpenAIResponsesStream(
2900
3090
  res,
2901
3091
  onPartialResponse
2902
3092
  );
@@ -2929,7 +3119,7 @@ If it's in another language, summarize in that language.
2929
3119
  const isResponsesAPI = this.endpoint === ENDPOINT_OPENAI_RESPONSES_API;
2930
3120
  try {
2931
3121
  if (isResponsesAPI) {
2932
- const result = await this.parseResponsesStream(
3122
+ const result = await parseOpenAIResponsesStream(
2933
3123
  res,
2934
3124
  onPartialResponse
2935
3125
  );
@@ -2990,7 +3180,7 @@ If it's in another language, summarize in that language.
2990
3180
  async parseResponse(res, stream, onPartialResponse) {
2991
3181
  const isResponsesAPI = this.endpoint === ENDPOINT_OPENAI_RESPONSES_API;
2992
3182
  if (isResponsesAPI) {
2993
- return stream ? this.parseResponsesStream(res, onPartialResponse) : this.parseResponsesOneShot(await res.json());
3183
+ return stream ? parseOpenAIResponsesStream(res, onPartialResponse) : parseOpenAIResponsesOneShot(await res.json());
2994
3184
  }
2995
3185
  return stream ? this.parseStream(res, onPartialResponse) : this.parseOneShot(await res.json());
2996
3186
  }
@@ -3184,184 +3374,6 @@ If it's in another language, summarize in that language.
3184
3374
  parseOneShot(data) {
3185
3375
  return parseOpenAICompatibleOneShot(data);
3186
3376
  }
3187
- /**
3188
- * Parse streaming response from Responses API (SSE format)
3189
- */
3190
- async parseResponsesStream(res, onPartial) {
3191
- const reader = res.body.getReader();
3192
- const dec = new TextDecoder();
3193
- const textBlocks = [];
3194
- const toolCallsMap = /* @__PURE__ */ new Map();
3195
- let responseStatus;
3196
- let incompleteDetails;
3197
- let usage;
3198
- let buf = "";
3199
- while (true) {
3200
- const { done, value } = await reader.read();
3201
- if (done) break;
3202
- buf += dec.decode(value, { stream: true });
3203
- let eventType = "";
3204
- let eventData = "";
3205
- const lines = buf.split("\n");
3206
- buf = lines.pop() || "";
3207
- for (let i = 0; i < lines.length; i++) {
3208
- const line = lines[i].trim();
3209
- if (line.startsWith("event:")) {
3210
- eventType = line.slice(6).trim();
3211
- } else if (line.startsWith("data:")) {
3212
- eventData = line.slice(5).trim();
3213
- } else if (line === "" && eventType && eventData) {
3214
- try {
3215
- const json = JSON.parse(eventData);
3216
- const completionResult = this.handleResponsesSSEEvent(
3217
- eventType,
3218
- json,
3219
- onPartial,
3220
- textBlocks,
3221
- toolCallsMap,
3222
- (metadata) => {
3223
- if (metadata.responseStatus !== void 0) {
3224
- responseStatus = metadata.responseStatus;
3225
- }
3226
- if (metadata.incompleteDetails !== void 0) {
3227
- incompleteDetails = metadata.incompleteDetails;
3228
- }
3229
- if (metadata.usage !== void 0) {
3230
- usage = metadata.usage;
3231
- }
3232
- }
3233
- );
3234
- if (completionResult === "completed") {
3235
- }
3236
- } catch (e) {
3237
- console.warn("Failed to parse SSE data:", eventData);
3238
- }
3239
- eventType = "";
3240
- eventData = "";
3241
- }
3242
- }
3243
- }
3244
- const toolBlocks = Array.from(toolCallsMap.values()).map(
3245
- (tool) => ({
3246
- type: "tool_use",
3247
- id: tool.id,
3248
- name: tool.name,
3249
- input: tool.input || {}
3250
- })
3251
- );
3252
- const blocks = [...textBlocks, ...toolBlocks];
3253
- return {
3254
- blocks,
3255
- stop_reason: toolBlocks.length ? "tool_use" : "end",
3256
- truncated: responseStatus === "incomplete",
3257
- response_status: responseStatus,
3258
- incomplete_details: incompleteDetails,
3259
- usage
3260
- };
3261
- }
3262
- /**
3263
- * Handle specific SSE events from Responses API
3264
- * @returns 'completed' if the response is completed, undefined otherwise
3265
- */
3266
- handleResponsesSSEEvent(eventType, data, onPartial, textBlocks, toolCallsMap, onMetadata) {
3267
- switch (eventType) {
3268
- // Item addition events
3269
- case "response.output_item.added":
3270
- if (data.item?.type === "message" && Array.isArray(data.item.content)) {
3271
- data.item.content.forEach((c) => {
3272
- if (c.type === "output_text" && c.text) {
3273
- onPartial(c.text);
3274
- StreamTextAccumulator.append(textBlocks, c.text);
3275
- }
3276
- });
3277
- } else if (data.item?.type === "function_call") {
3278
- toolCallsMap.set(data.item.id, {
3279
- id: data.item.id,
3280
- name: data.item.name,
3281
- input: data.item.arguments ? JSON.parse(data.item.arguments) : {}
3282
- });
3283
- }
3284
- break;
3285
- // Initial content part events
3286
- case "response.content_part.added":
3287
- if (data.part?.type === "output_text" && typeof data.part.text === "string") {
3288
- onPartial(data.part.text);
3289
- StreamTextAccumulator.append(textBlocks, data.part.text);
3290
- }
3291
- break;
3292
- // Text delta events
3293
- case "response.output_text.delta":
3294
- case "response.content_part.delta":
3295
- {
3296
- const deltaText = typeof data.delta === "string" ? data.delta : data.delta?.text ?? "";
3297
- if (deltaText) {
3298
- onPartial(deltaText);
3299
- StreamTextAccumulator.append(textBlocks, deltaText);
3300
- }
3301
- }
3302
- break;
3303
- // Text completion events - do not add text here as it's already accumulated via delta events
3304
- case "response.output_text.done":
3305
- case "response.content_part.done":
3306
- break;
3307
- // Response completion events
3308
- case "response.completed":
3309
- onMetadata(this.extractResponsesMetadata(data, "completed"));
3310
- return "completed";
3311
- case "response.incomplete":
3312
- onMetadata(this.extractResponsesMetadata(data, "incomplete"));
3313
- return "completed";
3314
- // GPT-5 reasoning token events (not visible but counted for billing)
3315
- case "response.reasoning.started":
3316
- case "response.reasoning.delta":
3317
- case "response.reasoning.done":
3318
- break;
3319
- default:
3320
- break;
3321
- }
3322
- return void 0;
3323
- }
3324
- extractResponsesMetadata(data, fallbackStatus) {
3325
- const response = data?.response ?? data;
3326
- return {
3327
- responseStatus: response?.status ?? fallbackStatus,
3328
- incompleteDetails: response?.incomplete_details ?? null,
3329
- usage: response?.usage
3330
- };
3331
- }
3332
- /**
3333
- * Parse non-streaming response from Responses API
3334
- */
3335
- parseResponsesOneShot(data) {
3336
- const blocks = [];
3337
- if (data.output && Array.isArray(data.output)) {
3338
- data.output.forEach((outputItem) => {
3339
- if (outputItem.type === "message" && outputItem.content) {
3340
- outputItem.content.forEach((content) => {
3341
- if (content.type === "output_text" && content.text) {
3342
- blocks.push({ type: "text", text: content.text });
3343
- }
3344
- });
3345
- }
3346
- if (outputItem.type === "function_call") {
3347
- blocks.push({
3348
- type: "tool_use",
3349
- id: outputItem.id,
3350
- name: outputItem.name,
3351
- input: outputItem.arguments ? JSON.parse(outputItem.arguments) : {}
3352
- });
3353
- }
3354
- });
3355
- }
3356
- return {
3357
- blocks,
3358
- stop_reason: blocks.some((b) => b.type === "tool_use") ? "tool_use" : "end",
3359
- truncated: data?.status === "incomplete",
3360
- response_status: data?.status,
3361
- incomplete_details: data?.incomplete_details ?? null,
3362
- usage: data?.usage
3363
- };
3364
- }
3365
3377
  };
3366
3378
 
3367
3379
  // src/services/providers/openaiCompatible/OpenAICompatibleChatServiceProvider.ts
@@ -3883,10 +3895,16 @@ If it's in another language, summarize in that language.
3883
3895
  */
3884
3896
  getSupportedModels() {
3885
3897
  return [
3898
+ // OpenRouter routing
3899
+ MODEL_OPENROUTER_AUTO,
3886
3900
  // Free models
3887
3901
  MODEL_GPT_OSS_20B_FREE,
3888
3902
  MODEL_ZAI_GLM_4_5_AIR_FREE,
3889
3903
  // OpenAI models
3904
+ MODEL_OPENAI_GPT_LATEST,
3905
+ MODEL_OPENAI_GPT_MINI_LATEST,
3906
+ MODEL_OPENAI_GPT_5_5_PRO,
3907
+ MODEL_OPENAI_GPT_5_5,
3890
3908
  MODEL_OPENAI_GPT_5_1_CHAT,
3891
3909
  MODEL_OPENAI_GPT_5_1_CODEX,
3892
3910
  MODEL_OPENAI_GPT_5_MINI,
@@ -3895,12 +3913,16 @@ If it's in another language, summarize in that language.
3895
3913
  MODEL_OPENAI_GPT_4_1_MINI,
3896
3914
  MODEL_OPENAI_GPT_4_1_NANO,
3897
3915
  // Anthropic models
3916
+ MODEL_ANTHROPIC_CLAUDE_SONNET_LATEST,
3917
+ MODEL_ANTHROPIC_CLAUDE_HAIKU_LATEST,
3898
3918
  MODEL_ANTHROPIC_CLAUDE_OPUS_4,
3899
3919
  MODEL_ANTHROPIC_CLAUDE_SONNET_4,
3900
3920
  MODEL_ANTHROPIC_CLAUDE_3_7_SONNET,
3901
3921
  MODEL_ANTHROPIC_CLAUDE_3_5_SONNET,
3902
3922
  MODEL_ANTHROPIC_CLAUDE_4_5_HAIKU,
3903
3923
  // Gemini models
3924
+ MODEL_GOOGLE_GEMINI_PRO_LATEST,
3925
+ MODEL_GOOGLE_GEMINI_FLASH_LATEST,
3904
3926
  MODEL_GOOGLE_GEMINI_2_5_PRO,
3905
3927
  MODEL_GOOGLE_GEMINI_2_5_FLASH,
3906
3928
  MODEL_GOOGLE_GEMINI_2_5_FLASH_LITE_PREVIEW_09_2025,
@@ -3908,6 +3930,7 @@ If it's in another language, summarize in that language.
3908
3930
  MODEL_ZAI_GLM_4_7_FLASH,
3909
3931
  MODEL_ZAI_GLM_4_5_AIR,
3910
3932
  // Other models
3933
+ MODEL_MOONSHOTAI_KIMI_LATEST,
3911
3934
  MODEL_MOONSHOTAI_KIMI_K2_5
3912
3935
  ];
3913
3936
  }
@@ -4139,6 +4162,7 @@ If it's in another language, summarize in that language.
4139
4162
  */
4140
4163
  getSupportedModels() {
4141
4164
  return [
4165
+ MODEL_GROK_4_3,
4142
4166
  MODEL_GROK_4_20_REASONING,
4143
4167
  MODEL_GROK_4_20_NON_REASONING,
4144
4168
  MODEL_GROK_4_1_FAST_REASONING,