@openrouter/ai-sdk-provider 1.2.8 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -950,7 +950,7 @@ var OpenRouterProviderMetadataSchema = z3.object({
950
950
  cost: z3.number().optional(),
951
951
  costDetails: z3.object({
952
952
  upstreamInferenceCost: z3.number()
953
- }).passthrough()
953
+ }).passthrough().optional()
954
954
  }).passthrough()
955
955
  }).passthrough();
956
956
  var OpenRouterProviderOptionsSchema = z3.object({
@@ -1069,9 +1069,8 @@ function getCacheControl(providerMetadata) {
1069
1069
  return (_c = (_b = (_a15 = openrouter == null ? void 0 : openrouter.cacheControl) != null ? _a15 : openrouter == null ? void 0 : openrouter.cache_control) != null ? _b : anthropic == null ? void 0 : anthropic.cacheControl) != null ? _c : anthropic == null ? void 0 : anthropic.cache_control;
1070
1070
  }
1071
1071
  function convertToOpenRouterChatMessages(prompt) {
1072
- var _a15, _b, _c, _d, _e, _f;
1072
+ var _a15, _b, _c, _d, _e, _f, _g, _h;
1073
1073
  const messages = [];
1074
- const accumulatedReasoningDetails = [];
1075
1074
  for (const { role, content, providerOptions } of prompt) {
1076
1075
  switch (role) {
1077
1076
  case "system": {
@@ -1101,7 +1100,7 @@ function convertToOpenRouterChatMessages(prompt) {
1101
1100
  const messageCacheControl = getCacheControl(providerOptions);
1102
1101
  const contentParts = content.map(
1103
1102
  (part) => {
1104
- var _a16, _b2, _c2, _d2, _e2, _f2, _g;
1103
+ var _a16, _b2, _c2, _d2, _e2, _f2, _g2;
1105
1104
  const cacheControl = (_a16 = getCacheControl(part.providerOptions)) != null ? _a16 : messageCacheControl;
1106
1105
  switch (part.type) {
1107
1106
  case "text":
@@ -1134,7 +1133,7 @@ function convertToOpenRouterChatMessages(prompt) {
1134
1133
  };
1135
1134
  }
1136
1135
  const fileName = String(
1137
- (_g = (_f2 = (_e2 = (_d2 = part.providerOptions) == null ? void 0 : _d2.openrouter) == null ? void 0 : _e2.filename) != null ? _f2 : part.filename) != null ? _g : ""
1136
+ (_g2 = (_f2 = (_e2 = (_d2 = part.providerOptions) == null ? void 0 : _d2.openrouter) == null ? void 0 : _e2.filename) != null ? _f2 : part.filename) != null ? _g2 : ""
1138
1137
  );
1139
1138
  const fileData = getFileUrl({
1140
1139
  part,
@@ -1181,6 +1180,7 @@ function convertToOpenRouterChatMessages(prompt) {
1181
1180
  let text = "";
1182
1181
  let reasoning = "";
1183
1182
  const toolCalls = [];
1183
+ const accumulatedReasoningDetails = [];
1184
1184
  for (const part of content) {
1185
1185
  switch (part.type) {
1186
1186
  case "text": {
@@ -1206,6 +1206,12 @@ function convertToOpenRouterChatMessages(prompt) {
1206
1206
  }
1207
1207
  case "reasoning": {
1208
1208
  reasoning += part.text;
1209
+ const parsedPartProviderOptions = OpenRouterProviderOptionsSchema.safeParse(part.providerOptions);
1210
+ if (parsedPartProviderOptions.success && ((_e = (_d = parsedPartProviderOptions.data) == null ? void 0 : _d.openrouter) == null ? void 0 : _e.reasoning_details)) {
1211
+ accumulatedReasoningDetails.push(
1212
+ ...parsedPartProviderOptions.data.openrouter.reasoning_details
1213
+ );
1214
+ }
1209
1215
  break;
1210
1216
  }
1211
1217
  case "file":
@@ -1216,7 +1222,7 @@ function convertToOpenRouterChatMessages(prompt) {
1216
1222
  }
1217
1223
  }
1218
1224
  const parsedProviderOptions = OpenRouterProviderOptionsSchema.safeParse(providerOptions);
1219
- const messageReasoningDetails = parsedProviderOptions.success ? (_e = (_d = parsedProviderOptions.data) == null ? void 0 : _d.openrouter) == null ? void 0 : _e.reasoning_details : void 0;
1225
+ const messageReasoningDetails = parsedProviderOptions.success ? (_g = (_f = parsedProviderOptions.data) == null ? void 0 : _f.openrouter) == null ? void 0 : _g.reasoning_details : void 0;
1220
1226
  const finalReasoningDetails = messageReasoningDetails && Array.isArray(messageReasoningDetails) && messageReasoningDetails.length > 0 ? messageReasoningDetails : accumulatedReasoningDetails.length > 0 ? accumulatedReasoningDetails : void 0;
1221
1227
  messages.push({
1222
1228
  role: "assistant",
@@ -1235,7 +1241,7 @@ function convertToOpenRouterChatMessages(prompt) {
1235
1241
  role: "tool",
1236
1242
  tool_call_id: toolResponse.toolCallId,
1237
1243
  content: content2,
1238
- cache_control: (_f = getCacheControl(providerOptions)) != null ? _f : getCacheControl(toolResponse.providerOptions)
1244
+ cache_control: (_h = getCacheControl(providerOptions)) != null ? _h : getCacheControl(toolResponse.providerOptions)
1239
1245
  });
1240
1246
  }
1241
1247
  break;
@@ -1551,7 +1557,9 @@ var OpenRouterChatLanguageModel = class {
1551
1557
  plugins: this.settings.plugins,
1552
1558
  web_search_options: this.settings.web_search_options,
1553
1559
  // Provider routing settings:
1554
- provider: this.settings.provider
1560
+ provider: this.settings.provider,
1561
+ // Debug settings:
1562
+ debug: this.settings.debug
1555
1563
  }, this.config.extraBody), this.settings.extraBody);
1556
1564
  if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null) {
1557
1565
  return __spreadProps(__spreadValues({}, baseArgs), {
@@ -1586,7 +1594,7 @@ var OpenRouterChatLanguageModel = class {
1586
1594
  return baseArgs;
1587
1595
  }
1588
1596
  async doGenerate(options) {
1589
- var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y;
1597
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
1590
1598
  const providerOptions = options.providerOptions || {};
1591
1599
  const openrouterOptions = providerOptions.openrouter || {};
1592
1600
  const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
@@ -1753,21 +1761,24 @@ var OpenRouterChatLanguageModel = class {
1753
1761
  openrouter: OpenRouterProviderMetadataSchema.parse({
1754
1762
  provider: (_k = response.provider) != null ? _k : "",
1755
1763
  reasoning_details: (_l = choice.message.reasoning_details) != null ? _l : [],
1756
- usage: {
1764
+ usage: __spreadValues(__spreadValues(__spreadValues({
1757
1765
  promptTokens: (_m = usageInfo.inputTokens) != null ? _m : 0,
1758
1766
  completionTokens: (_n = usageInfo.outputTokens) != null ? _n : 0,
1759
1767
  totalTokens: (_o = usageInfo.totalTokens) != null ? _o : 0,
1760
- cost: (_p = response.usage) == null ? void 0 : _p.cost,
1768
+ cost: (_p = response.usage) == null ? void 0 : _p.cost
1769
+ }, ((_r = (_q = response.usage) == null ? void 0 : _q.prompt_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? {
1761
1770
  promptTokensDetails: {
1762
- cachedTokens: (_s = (_r = (_q = response.usage) == null ? void 0 : _q.prompt_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? _s : 0
1763
- },
1771
+ cachedTokens: response.usage.prompt_tokens_details.cached_tokens
1772
+ }
1773
+ } : {}), ((_t = (_s = response.usage) == null ? void 0 : _s.completion_tokens_details) == null ? void 0 : _t.reasoning_tokens) != null ? {
1764
1774
  completionTokensDetails: {
1765
- reasoningTokens: (_v = (_u = (_t = response.usage) == null ? void 0 : _t.completion_tokens_details) == null ? void 0 : _u.reasoning_tokens) != null ? _v : 0
1766
- },
1775
+ reasoningTokens: response.usage.completion_tokens_details.reasoning_tokens
1776
+ }
1777
+ } : {}), ((_v = (_u = response.usage) == null ? void 0 : _u.cost_details) == null ? void 0 : _v.upstream_inference_cost) != null ? {
1767
1778
  costDetails: {
1768
- upstreamInferenceCost: (_y = (_x = (_w = response.usage) == null ? void 0 : _w.cost_details) == null ? void 0 : _x.upstream_inference_cost) != null ? _y : 0
1779
+ upstreamInferenceCost: response.usage.cost_details.upstream_inference_cost
1769
1780
  }
1770
- }
1781
+ } : {})
1771
1782
  })
1772
1783
  },
1773
1784
  request: { body: args },
@@ -1824,7 +1835,7 @@ var OpenRouterChatLanguageModel = class {
1824
1835
  stream: response.pipeThrough(
1825
1836
  new TransformStream({
1826
1837
  transform(chunk, controller) {
1827
- var _a16, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
1838
+ var _a16, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o;
1828
1839
  if (!chunk.success) {
1829
1840
  finishReason = "error";
1830
1841
  controller.enqueue({ type: "error", error: chunk.error });
@@ -1874,6 +1885,12 @@ var OpenRouterChatLanguageModel = class {
1874
1885
  }
1875
1886
  openrouterUsage.cost = value.usage.cost;
1876
1887
  openrouterUsage.totalTokens = value.usage.total_tokens;
1888
+ const upstreamInferenceCost = (_c = value.usage.cost_details) == null ? void 0 : _c.upstream_inference_cost;
1889
+ if (upstreamInferenceCost != null) {
1890
+ openrouterUsage.costDetails = {
1891
+ upstreamInferenceCost
1892
+ };
1893
+ }
1877
1894
  }
1878
1895
  const choice = value.choices[0];
1879
1896
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
@@ -1883,16 +1900,18 @@ var OpenRouterChatLanguageModel = class {
1883
1900
  return;
1884
1901
  }
1885
1902
  const delta = choice.delta;
1886
- const emitReasoningChunk = (chunkText) => {
1903
+ const emitReasoningChunk = (chunkText, providerMetadata) => {
1887
1904
  if (!reasoningStarted) {
1888
1905
  reasoningId = openrouterResponseId || generateId();
1889
1906
  controller.enqueue({
1907
+ providerMetadata,
1890
1908
  type: "reasoning-start",
1891
1909
  id: reasoningId
1892
1910
  });
1893
1911
  reasoningStarted = true;
1894
1912
  }
1895
1913
  controller.enqueue({
1914
+ providerMetadata,
1896
1915
  type: "reasoning-delta",
1897
1916
  delta: chunkText,
1898
1917
  id: reasoningId || generateId()
@@ -1913,23 +1932,28 @@ var OpenRouterChatLanguageModel = class {
1913
1932
  accumulatedReasoningDetails.push(detail);
1914
1933
  }
1915
1934
  }
1935
+ const reasoningMetadata = {
1936
+ openrouter: {
1937
+ reasoning_details: delta.reasoning_details
1938
+ }
1939
+ };
1916
1940
  for (const detail of delta.reasoning_details) {
1917
1941
  switch (detail.type) {
1918
1942
  case "reasoning.text" /* Text */: {
1919
1943
  if (detail.text) {
1920
- emitReasoningChunk(detail.text);
1944
+ emitReasoningChunk(detail.text, reasoningMetadata);
1921
1945
  }
1922
1946
  break;
1923
1947
  }
1924
1948
  case "reasoning.encrypted" /* Encrypted */: {
1925
1949
  if (detail.data) {
1926
- emitReasoningChunk("[REDACTED]");
1950
+ emitReasoningChunk("[REDACTED]", reasoningMetadata);
1927
1951
  }
1928
1952
  break;
1929
1953
  }
1930
1954
  case "reasoning.summary" /* Summary */: {
1931
1955
  if (detail.summary) {
1932
- emitReasoningChunk(detail.summary);
1956
+ emitReasoningChunk(detail.summary, reasoningMetadata);
1933
1957
  }
1934
1958
  break;
1935
1959
  }
@@ -1984,7 +2008,7 @@ var OpenRouterChatLanguageModel = class {
1984
2008
  }
1985
2009
  if (delta.tool_calls != null) {
1986
2010
  for (const toolCallDelta of delta.tool_calls) {
1987
- const index = (_c = toolCallDelta.index) != null ? _c : toolCalls.length - 1;
2011
+ const index = (_d = toolCallDelta.index) != null ? _d : toolCalls.length - 1;
1988
2012
  if (toolCalls[index] == null) {
1989
2013
  if (toolCallDelta.type !== "function") {
1990
2014
  throw new InvalidResponseDataError({
@@ -1998,7 +2022,7 @@ var OpenRouterChatLanguageModel = class {
1998
2022
  message: `Expected 'id' to be a string.`
1999
2023
  });
2000
2024
  }
2001
- if (((_d = toolCallDelta.function) == null ? void 0 : _d.name) == null) {
2025
+ if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) {
2002
2026
  throw new InvalidResponseDataError({
2003
2027
  data: toolCallDelta,
2004
2028
  message: `Expected 'function.name' to be a string.`
@@ -2009,7 +2033,7 @@ var OpenRouterChatLanguageModel = class {
2009
2033
  type: "function",
2010
2034
  function: {
2011
2035
  name: toolCallDelta.function.name,
2012
- arguments: (_e = toolCallDelta.function.arguments) != null ? _e : ""
2036
+ arguments: (_f = toolCallDelta.function.arguments) != null ? _f : ""
2013
2037
  },
2014
2038
  inputStarted: false,
2015
2039
  sent: false
@@ -2021,7 +2045,7 @@ var OpenRouterChatLanguageModel = class {
2021
2045
  message: `Tool call at index ${index} is missing after creation.`
2022
2046
  });
2023
2047
  }
2024
- if (((_f = toolCall2.function) == null ? void 0 : _f.name) != null && ((_g = toolCall2.function) == null ? void 0 : _g.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
2048
+ if (((_g = toolCall2.function) == null ? void 0 : _g.name) != null && ((_h = toolCall2.function) == null ? void 0 : _h.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
2025
2049
  toolCall2.inputStarted = true;
2026
2050
  controller.enqueue({
2027
2051
  type: "tool-input-start",
@@ -2071,18 +2095,18 @@ var OpenRouterChatLanguageModel = class {
2071
2095
  toolName: toolCall.function.name
2072
2096
  });
2073
2097
  }
2074
- if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
2075
- toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
2098
+ if (((_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null) {
2099
+ toolCall.function.arguments += (_k = (_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null ? _k : "";
2076
2100
  }
2077
2101
  controller.enqueue({
2078
2102
  type: "tool-input-delta",
2079
2103
  id: toolCall.id,
2080
- delta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
2104
+ delta: (_l = toolCallDelta.function.arguments) != null ? _l : ""
2081
2105
  });
2082
- if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && isParsableJson(toolCall.function.arguments)) {
2106
+ if (((_m = toolCall.function) == null ? void 0 : _m.name) != null && ((_n = toolCall.function) == null ? void 0 : _n.arguments) != null && isParsableJson(toolCall.function.arguments)) {
2083
2107
  controller.enqueue({
2084
2108
  type: "tool-call",
2085
- toolCallId: (_n = toolCall.id) != null ? _n : generateId(),
2109
+ toolCallId: (_o = toolCall.id) != null ? _o : generateId(),
2086
2110
  toolName: toolCall.function.name,
2087
2111
  input: toolCall.function.arguments,
2088
2112
  providerMetadata: {
@@ -2298,7 +2322,10 @@ var OpenRouterCompletionChunkSchema = z8.union([
2298
2322
  reasoning_tokens: z8.number()
2299
2323
  }).passthrough().nullish(),
2300
2324
  total_tokens: z8.number(),
2301
- cost: z8.number().optional()
2325
+ cost: z8.number().optional(),
2326
+ cost_details: z8.object({
2327
+ upstream_inference_cost: z8.number().nullish()
2328
+ }).passthrough().nullish()
2302
2329
  }).passthrough().nullish()
2303
2330
  }).passthrough(),
2304
2331
  OpenRouterErrorResponseSchema
@@ -2471,7 +2498,7 @@ var OpenRouterCompletionLanguageModel = class {
2471
2498
  stream: response.pipeThrough(
2472
2499
  new TransformStream({
2473
2500
  transform(chunk, controller) {
2474
- var _a15, _b;
2501
+ var _a15, _b, _c;
2475
2502
  if (!chunk.success) {
2476
2503
  finishReason = "error";
2477
2504
  controller.enqueue({ type: "error", error: chunk.error });
@@ -2505,6 +2532,12 @@ var OpenRouterCompletionLanguageModel = class {
2505
2532
  }
2506
2533
  openrouterUsage.cost = value.usage.cost;
2507
2534
  openrouterUsage.totalTokens = value.usage.total_tokens;
2535
+ const upstreamInferenceCost = (_c = value.usage.cost_details) == null ? void 0 : _c.upstream_inference_cost;
2536
+ if (upstreamInferenceCost != null) {
2537
+ openrouterUsage.costDetails = {
2538
+ upstreamInferenceCost
2539
+ };
2540
+ }
2508
2541
  }
2509
2542
  const choice = value.choices[0];
2510
2543
  if ((choice == null ? void 0 : choice.finish_reason) != null) {