@openrouter/ai-sdk-provider 2.5.1 → 2.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -537,6 +537,40 @@ type OpenRouterSharedSettings = OpenRouterProviderOptions & {
537
537
  */
538
538
  include: boolean;
539
539
  };
540
+ /**
541
+ * Default temperature for model calls. Controls randomness in the output.
542
+ * Can be overridden at call time via generateText/streamText options.
543
+ * Range: 0 to 2, where 0 is deterministic and higher values are more random.
544
+ */
545
+ temperature?: number;
546
+ /**
547
+ * Default top-p (nucleus sampling) for model calls.
548
+ * Can be overridden at call time via generateText/streamText options.
549
+ * Range: 0 to 1.
550
+ */
551
+ topP?: number;
552
+ /**
553
+ * Default top-k sampling for model calls.
554
+ * Can be overridden at call time via generateText/streamText options.
555
+ */
556
+ topK?: number;
557
+ /**
558
+ * Default frequency penalty for model calls.
559
+ * Can be overridden at call time via generateText/streamText options.
560
+ * Range: -2 to 2.
561
+ */
562
+ frequencyPenalty?: number;
563
+ /**
564
+ * Default presence penalty for model calls.
565
+ * Can be overridden at call time via generateText/streamText options.
566
+ * Range: -2 to 2.
567
+ */
568
+ presencePenalty?: number;
569
+ /**
570
+ * Default maximum number of tokens to generate.
571
+ * Can be overridden at call time via generateText/streamText options.
572
+ */
573
+ maxTokens?: number;
540
574
  };
541
575
  /**
542
576
  * Usage accounting response
@@ -537,6 +537,40 @@ type OpenRouterSharedSettings = OpenRouterProviderOptions & {
537
537
  */
538
538
  include: boolean;
539
539
  };
540
+ /**
541
+ * Default temperature for model calls. Controls randomness in the output.
542
+ * Can be overridden at call time via generateText/streamText options.
543
+ * Range: 0 to 2, where 0 is deterministic and higher values are more random.
544
+ */
545
+ temperature?: number;
546
+ /**
547
+ * Default top-p (nucleus sampling) for model calls.
548
+ * Can be overridden at call time via generateText/streamText options.
549
+ * Range: 0 to 1.
550
+ */
551
+ topP?: number;
552
+ /**
553
+ * Default top-k sampling for model calls.
554
+ * Can be overridden at call time via generateText/streamText options.
555
+ */
556
+ topK?: number;
557
+ /**
558
+ * Default frequency penalty for model calls.
559
+ * Can be overridden at call time via generateText/streamText options.
560
+ * Range: -2 to 2.
561
+ */
562
+ frequencyPenalty?: number;
563
+ /**
564
+ * Default presence penalty for model calls.
565
+ * Can be overridden at call time via generateText/streamText options.
566
+ * Range: -2 to 2.
567
+ */
568
+ presencePenalty?: number;
569
+ /**
570
+ * Default maximum number of tokens to generate.
571
+ * Can be overridden at call time via generateText/streamText options.
572
+ */
573
+ maxTokens?: number;
540
574
  };
541
575
  /**
542
576
  * Usage accounting response
@@ -2441,6 +2441,29 @@ function withStreamErrorHandling(source, onError) {
2441
2441
  });
2442
2442
  }
2443
2443
 
2444
+ // src/utils/deterministic-stringify.ts
2445
+ function deterministicStringify(value) {
2446
+ return JSON.stringify(sortKeys(value));
2447
+ }
2448
+ function sortKeys(value) {
2449
+ if (value === null || value === void 0) {
2450
+ return value;
2451
+ }
2452
+ if (Array.isArray(value)) {
2453
+ return value.map(sortKeys);
2454
+ }
2455
+ if (typeof value === "object") {
2456
+ const sorted = {};
2457
+ const entries = Object.entries(value);
2458
+ entries.sort(([a], [b]) => a.localeCompare(b));
2459
+ for (const [key, val] of entries) {
2460
+ sorted[key] = sortKeys(val);
2461
+ }
2462
+ return sorted;
2463
+ }
2464
+ return value;
2465
+ }
2466
+
2444
2467
  // src/utils/reasoning-details-duplicate-tracker.ts
2445
2468
  var _seenKeys;
2446
2469
  var ReasoningDetailsDuplicateTracker = class {
@@ -2772,7 +2795,7 @@ function convertToOpenRouterChatMessages(prompt) {
2772
2795
  type: "function",
2773
2796
  function: {
2774
2797
  name: part.toolName,
2775
- arguments: JSON.stringify(part.input)
2798
+ arguments: deterministicStringify(part.input)
2776
2799
  }
2777
2800
  });
2778
2801
  break;
@@ -3288,7 +3311,7 @@ var OpenRouterChatLanguageModel = class {
3288
3311
  tools,
3289
3312
  toolChoice
3290
3313
  }) {
3291
- var _a16;
3314
+ var _a16, _b16;
3292
3315
  const baseArgs = __spreadValues(__spreadValues({
3293
3316
  // model id:
3294
3317
  model: this.modelId,
@@ -3299,12 +3322,12 @@ var OpenRouterChatLanguageModel = class {
3299
3322
  top_logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
3300
3323
  user: this.settings.user,
3301
3324
  parallel_tool_calls: this.settings.parallelToolCalls,
3302
- // standardized settings:
3303
- max_tokens: maxOutputTokens,
3304
- temperature,
3305
- top_p: topP,
3306
- frequency_penalty: frequencyPenalty,
3307
- presence_penalty: presencePenalty,
3325
+ // standardized settings (call-level options override model-level settings):
3326
+ max_tokens: maxOutputTokens != null ? maxOutputTokens : this.settings.maxTokens,
3327
+ temperature: temperature != null ? temperature : this.settings.temperature,
3328
+ top_p: topP != null ? topP : this.settings.topP,
3329
+ frequency_penalty: frequencyPenalty != null ? frequencyPenalty : this.settings.frequencyPenalty,
3330
+ presence_penalty: presencePenalty != null ? presencePenalty : this.settings.presencePenalty,
3308
3331
  seed,
3309
3332
  stop: stopSequences,
3310
3333
  response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? responseFormat.schema != null ? {
@@ -3317,7 +3340,7 @@ var OpenRouterChatLanguageModel = class {
3317
3340
  description: responseFormat.description
3318
3341
  })
3319
3342
  } : { type: "json_object" } : void 0,
3320
- top_k: topK,
3343
+ top_k: topK != null ? topK : this.settings.topK,
3321
3344
  // messages:
3322
3345
  messages: convertToOpenRouterChatMessages(prompt),
3323
3346
  // OpenRouter specific settings:
@@ -3335,16 +3358,25 @@ var OpenRouterChatLanguageModel = class {
3335
3358
  cache_control: this.settings.cache_control
3336
3359
  }, this.config.extraBody), this.settings.extraBody);
3337
3360
  if (tools && tools.length > 0) {
3338
- const mappedTools = tools.filter(
3339
- (tool) => tool.type === "function"
3340
- ).map((tool) => ({
3341
- type: "function",
3342
- function: {
3343
- name: tool.name,
3344
- description: tool.description,
3345
- parameters: tool.inputSchema
3361
+ const mappedTools = [];
3362
+ for (const tool of tools) {
3363
+ if (tool.type === "function") {
3364
+ const openrouterOptions = (_b16 = tool.providerOptions) == null ? void 0 : _b16.openrouter;
3365
+ const eagerInputStreaming = openrouterOptions == null ? void 0 : openrouterOptions.eager_input_streaming;
3366
+ mappedTools.push(__spreadValues({
3367
+ type: "function",
3368
+ function: {
3369
+ name: tool.name,
3370
+ description: tool.description,
3371
+ parameters: tool.inputSchema
3372
+ }
3373
+ }, eagerInputStreaming != null && {
3374
+ eager_input_streaming: eagerInputStreaming
3375
+ }));
3376
+ } else if (tool.type === "provider") {
3377
+ mappedTools.push(mapProviderTool(tool));
3346
3378
  }
3347
- }));
3379
+ }
3348
3380
  return __spreadProps(__spreadValues({}, baseArgs), {
3349
3381
  tools: mappedTools,
3350
3382
  tool_choice: toolChoice ? getChatCompletionToolChoice(toolChoice) : void 0
@@ -3353,7 +3385,7 @@ var OpenRouterChatLanguageModel = class {
3353
3385
  return baseArgs;
3354
3386
  }
3355
3387
  async doGenerate(options) {
3356
- var _b16, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w;
3388
+ var _b16, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
3357
3389
  const providerOptions = options.providerOptions || {};
3358
3390
  const openrouterOptions = providerOptions.openrouter || {};
3359
3391
  const _a16 = openrouterOptions, { cacheControl } = _a16, restOpenrouterOptions = __objRest(_a16, ["cacheControl"]);
@@ -3449,12 +3481,18 @@ var OpenRouterChatLanguageModel = class {
3449
3481
  }
3450
3482
  if (choice.message.tool_calls) {
3451
3483
  let reasoningDetailsAttachedToToolCall = false;
3484
+ const seenToolCallIds = /* @__PURE__ */ new Set();
3452
3485
  for (const toolCall of choice.message.tool_calls) {
3486
+ let toolCallId = toolCall.id;
3487
+ if (!toolCallId || seenToolCallIds.has(toolCallId)) {
3488
+ toolCallId = generateId();
3489
+ }
3490
+ seenToolCallIds.add(toolCallId);
3453
3491
  content.push({
3454
3492
  type: "tool-call",
3455
- toolCallId: (_c = toolCall.id) != null ? _c : generateId(),
3493
+ toolCallId,
3456
3494
  toolName: toolCall.function.name,
3457
- input: (_d = toolCall.function.arguments) != null ? _d : "{}",
3495
+ input: (_c = toolCall.function.arguments) != null ? _c : "{}",
3458
3496
  providerMetadata: !reasoningDetailsAttachedToToolCall ? {
3459
3497
  openrouter: {
3460
3498
  reasoning_details: reasoningDetails
@@ -3481,19 +3519,19 @@ var OpenRouterChatLanguageModel = class {
3481
3519
  sourceType: "url",
3482
3520
  id: annotation.url_citation.url,
3483
3521
  url: annotation.url_citation.url,
3484
- title: (_e = annotation.url_citation.title) != null ? _e : "",
3522
+ title: (_d = annotation.url_citation.title) != null ? _d : "",
3485
3523
  providerMetadata: {
3486
3524
  openrouter: {
3487
- content: (_f = annotation.url_citation.content) != null ? _f : "",
3488
- startIndex: (_g = annotation.url_citation.start_index) != null ? _g : 0,
3489
- endIndex: (_h = annotation.url_citation.end_index) != null ? _h : 0
3525
+ content: (_e = annotation.url_citation.content) != null ? _e : "",
3526
+ startIndex: (_f = annotation.url_citation.start_index) != null ? _f : 0,
3527
+ endIndex: (_g = annotation.url_citation.end_index) != null ? _g : 0
3490
3528
  }
3491
3529
  }
3492
3530
  });
3493
3531
  }
3494
3532
  }
3495
3533
  }
3496
- const fileAnnotations = (_i = choice.message.annotations) == null ? void 0 : _i.filter(
3534
+ const fileAnnotations = (_h = choice.message.annotations) == null ? void 0 : _h.filter(
3497
3535
  (a) => a.type === "file"
3498
3536
  );
3499
3537
  const hasToolCalls = choice.message.tool_calls && choice.message.tool_calls.length > 0;
@@ -3501,7 +3539,7 @@ var OpenRouterChatLanguageModel = class {
3501
3539
  (d) => d.type === "reasoning.encrypted" /* Encrypted */ && d.data
3502
3540
  );
3503
3541
  const shouldOverrideFinishReason = hasToolCalls && hasEncryptedReasoning && choice.finish_reason === "stop";
3504
- const mappedFinishReason = shouldOverrideFinishReason ? createFinishReason("tool-calls", (_j = choice.finish_reason) != null ? _j : void 0) : mapOpenRouterFinishReason(choice.finish_reason);
3542
+ const mappedFinishReason = shouldOverrideFinishReason ? createFinishReason("tool-calls", (_i = choice.finish_reason) != null ? _i : void 0) : mapOpenRouterFinishReason(choice.finish_reason);
3505
3543
  const effectiveFinishReason = hasToolCalls && mappedFinishReason.unified === "other" ? createFinishReason("tool-calls", mappedFinishReason.raw) : mappedFinishReason;
3506
3544
  return {
3507
3545
  content,
@@ -3510,22 +3548,22 @@ var OpenRouterChatLanguageModel = class {
3510
3548
  warnings: [],
3511
3549
  providerMetadata: {
3512
3550
  openrouter: OpenRouterProviderMetadataSchema.parse({
3513
- provider: (_k = response.provider) != null ? _k : "",
3514
- reasoning_details: (_l = choice.message.reasoning_details) != null ? _l : [],
3551
+ provider: (_j = response.provider) != null ? _j : "",
3552
+ reasoning_details: (_k = choice.message.reasoning_details) != null ? _k : [],
3515
3553
  annotations: fileAnnotations && fileAnnotations.length > 0 ? fileAnnotations : void 0,
3516
3554
  usage: __spreadValues(__spreadValues(__spreadValues(__spreadValues({
3517
- promptTokens: (_m = usageInfo.inputTokens.total) != null ? _m : 0,
3518
- completionTokens: (_n = usageInfo.outputTokens.total) != null ? _n : 0,
3519
- totalTokens: ((_o = usageInfo.inputTokens.total) != null ? _o : 0) + ((_p = usageInfo.outputTokens.total) != null ? _p : 0)
3520
- }, ((_q = response.usage) == null ? void 0 : _q.cost) != null ? { cost: response.usage.cost } : {}), ((_s = (_r = response.usage) == null ? void 0 : _r.prompt_tokens_details) == null ? void 0 : _s.cached_tokens) != null ? {
3555
+ promptTokens: (_l = usageInfo.inputTokens.total) != null ? _l : 0,
3556
+ completionTokens: (_m = usageInfo.outputTokens.total) != null ? _m : 0,
3557
+ totalTokens: ((_n = usageInfo.inputTokens.total) != null ? _n : 0) + ((_o = usageInfo.outputTokens.total) != null ? _o : 0)
3558
+ }, ((_p = response.usage) == null ? void 0 : _p.cost) != null ? { cost: response.usage.cost } : {}), ((_r = (_q = response.usage) == null ? void 0 : _q.prompt_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? {
3521
3559
  promptTokensDetails: {
3522
3560
  cachedTokens: response.usage.prompt_tokens_details.cached_tokens
3523
3561
  }
3524
- } : {}), ((_u = (_t = response.usage) == null ? void 0 : _t.completion_tokens_details) == null ? void 0 : _u.reasoning_tokens) != null ? {
3562
+ } : {}), ((_t = (_s = response.usage) == null ? void 0 : _s.completion_tokens_details) == null ? void 0 : _t.reasoning_tokens) != null ? {
3525
3563
  completionTokensDetails: {
3526
3564
  reasoningTokens: response.usage.completion_tokens_details.reasoning_tokens
3527
3565
  }
3528
- } : {}), ((_w = (_v = response.usage) == null ? void 0 : _v.cost_details) == null ? void 0 : _w.upstream_inference_cost) != null ? {
3566
+ } : {}), ((_v = (_u = response.usage) == null ? void 0 : _u.cost_details) == null ? void 0 : _v.upstream_inference_cost) != null ? {
3529
3567
  costDetails: {
3530
3568
  upstreamInferenceCost: response.usage.cost_details.upstream_inference_cost
3531
3569
  }
@@ -3572,6 +3610,7 @@ var OpenRouterChatLanguageModel = class {
3572
3610
  streamError = err;
3573
3611
  });
3574
3612
  const toolCalls = [];
3613
+ const seenToolCallIds = /* @__PURE__ */ new Set();
3575
3614
  let finishReason = createFinishReason("other");
3576
3615
  const usage = {
3577
3616
  inputTokens: {
@@ -3671,18 +3710,16 @@ var OpenRouterChatLanguageModel = class {
3671
3710
  return;
3672
3711
  }
3673
3712
  const delta = choice.delta;
3674
- const emitReasoningChunk = (chunkText, providerMetadata) => {
3713
+ const emitReasoningChunk = (chunkText) => {
3675
3714
  if (!reasoningStarted) {
3676
3715
  reasoningId = generateId();
3677
3716
  controller.enqueue({
3678
- providerMetadata,
3679
3717
  type: "reasoning-start",
3680
3718
  id: reasoningId
3681
3719
  });
3682
3720
  reasoningStarted = true;
3683
3721
  }
3684
3722
  controller.enqueue({
3685
- providerMetadata,
3686
3723
  type: "reasoning-delta",
3687
3724
  delta: chunkText,
3688
3725
  id: reasoningId || generateId()
@@ -3704,15 +3741,10 @@ var OpenRouterChatLanguageModel = class {
3704
3741
  }
3705
3742
  }
3706
3743
  if (!textStarted) {
3707
- const reasoningMetadata = {
3708
- openrouter: {
3709
- reasoning_details: accumulatedReasoningDetails.map((d) => __spreadValues({}, d))
3710
- }
3711
- };
3712
3744
  for (const detail of delta.reasoning_details) {
3713
3745
  switch (detail.type) {
3714
3746
  case "reasoning.text" /* Text */: {
3715
- emitReasoningChunk(detail.text || "", reasoningMetadata);
3747
+ emitReasoningChunk(detail.text || "");
3716
3748
  break;
3717
3749
  }
3718
3750
  case "reasoning.encrypted" /* Encrypted */: {
@@ -3720,7 +3752,7 @@ var OpenRouterChatLanguageModel = class {
3720
3752
  }
3721
3753
  case "reasoning.summary" /* Summary */: {
3722
3754
  if (detail.summary) {
3723
- emitReasoningChunk(detail.summary, reasoningMetadata);
3755
+ emitReasoningChunk(detail.summary);
3724
3756
  }
3725
3757
  break;
3726
3758
  }
@@ -3802,24 +3834,23 @@ var OpenRouterChatLanguageModel = class {
3802
3834
  message: `Expected 'function' type.`
3803
3835
  });
3804
3836
  }
3805
- if (toolCallDelta.id == null) {
3806
- throw new InvalidResponseDataError({
3807
- data: toolCallDelta,
3808
- message: `Expected 'id' to be a string.`
3809
- });
3810
- }
3811
3837
  if (((_k = toolCallDelta.function) == null ? void 0 : _k.name) == null) {
3812
3838
  throw new InvalidResponseDataError({
3813
3839
  data: toolCallDelta,
3814
3840
  message: `Expected 'function.name' to be a string.`
3815
3841
  });
3816
3842
  }
3843
+ let toolCallId = (_l = toolCallDelta.id) != null ? _l : "";
3844
+ if (!toolCallId || seenToolCallIds.has(toolCallId)) {
3845
+ toolCallId = generateId();
3846
+ }
3847
+ seenToolCallIds.add(toolCallId);
3817
3848
  toolCalls[index] = {
3818
- id: toolCallDelta.id,
3849
+ id: toolCallId,
3819
3850
  type: "function",
3820
3851
  function: {
3821
3852
  name: toolCallDelta.function.name,
3822
- arguments: (_l = toolCallDelta.function.arguments) != null ? _l : ""
3853
+ arguments: (_m = toolCallDelta.function.arguments) != null ? _m : ""
3823
3854
  },
3824
3855
  inputStarted: false,
3825
3856
  sent: false
@@ -3831,7 +3862,7 @@ var OpenRouterChatLanguageModel = class {
3831
3862
  message: `Tool call at index ${index} is missing after creation.`
3832
3863
  });
3833
3864
  }
3834
- if (((_m = toolCall2.function) == null ? void 0 : _m.name) != null && ((_n = toolCall2.function) == null ? void 0 : _n.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
3865
+ if (((_n = toolCall2.function) == null ? void 0 : _n.name) != null && ((_o = toolCall2.function) == null ? void 0 : _o.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
3835
3866
  toolCall2.inputStarted = true;
3836
3867
  controller.enqueue({
3837
3868
  type: "tool-input-start",
@@ -3889,22 +3920,22 @@ var OpenRouterChatLanguageModel = class {
3889
3920
  });
3890
3921
  }
3891
3922
  }
3892
- if (((_o = toolCallDelta.function) == null ? void 0 : _o.arguments) != null) {
3893
- toolCall.function.arguments += (_q = (_p = toolCallDelta.function) == null ? void 0 : _p.arguments) != null ? _q : "";
3923
+ if (((_p = toolCallDelta.function) == null ? void 0 : _p.arguments) != null) {
3924
+ toolCall.function.arguments += (_r = (_q = toolCallDelta.function) == null ? void 0 : _q.arguments) != null ? _r : "";
3894
3925
  }
3895
3926
  controller.enqueue({
3896
3927
  type: "tool-input-delta",
3897
3928
  id: toolCall.id,
3898
- delta: (_r = toolCallDelta.function.arguments) != null ? _r : ""
3929
+ delta: (_s = toolCallDelta.function.arguments) != null ? _s : ""
3899
3930
  });
3900
- if (((_s = toolCall.function) == null ? void 0 : _s.name) != null && ((_t = toolCall.function) == null ? void 0 : _t.arguments) != null && isParsableJson(toolCall.function.arguments)) {
3931
+ if (((_t = toolCall.function) == null ? void 0 : _t.name) != null && ((_u = toolCall.function) == null ? void 0 : _u.arguments) != null && isParsableJson(toolCall.function.arguments)) {
3901
3932
  controller.enqueue({
3902
3933
  type: "tool-input-end",
3903
3934
  id: toolCall.id
3904
3935
  });
3905
3936
  controller.enqueue({
3906
3937
  type: "tool-call",
3907
- toolCallId: (_u = toolCall.id) != null ? _u : generateId(),
3938
+ toolCallId: toolCall.id,
3908
3939
  toolName: toolCall.function.name,
3909
3940
  input: toolCall.function.arguments,
3910
3941
  providerMetadata: !reasoningDetailsAttachedToToolCall ? {
@@ -3929,7 +3960,6 @@ var OpenRouterChatLanguageModel = class {
3929
3960
  }
3930
3961
  },
3931
3962
  flush(controller) {
3932
- var _a17;
3933
3963
  const hasToolCalls = toolCalls.length > 0;
3934
3964
  if (streamError != null) {
3935
3965
  finishReason = createFinishReason("error");
@@ -3966,7 +3996,7 @@ var OpenRouterChatLanguageModel = class {
3966
3996
  });
3967
3997
  controller.enqueue({
3968
3998
  type: "tool-call",
3969
- toolCallId: (_a17 = toolCall.id) != null ? _a17 : generateId(),
3999
+ toolCallId: toolCall.id,
3970
4000
  toolName: toolCall.function.name,
3971
4001
  input,
3972
4002
  providerMetadata: !reasoningDetailsAttachedToToolCall ? {
@@ -4011,6 +4041,12 @@ var OpenRouterChatLanguageModel = class {
4011
4041
  if (accumulatedFileAnnotations.length > 0) {
4012
4042
  openrouterMetadata.annotations = accumulatedFileAnnotations;
4013
4043
  }
4044
+ if (usage.inputTokens.total === void 0 && openrouterUsage.promptTokens !== void 0) {
4045
+ usage.inputTokens.total = openrouterUsage.promptTokens;
4046
+ }
4047
+ if (usage.outputTokens.total === void 0 && openrouterUsage.completionTokens !== void 0) {
4048
+ usage.outputTokens.total = openrouterUsage.completionTokens;
4049
+ }
4014
4050
  usage.raw = rawUsage;
4015
4051
  controller.enqueue({
4016
4052
  type: "finish",
@@ -4029,6 +4065,22 @@ var OpenRouterChatLanguageModel = class {
4029
4065
  };
4030
4066
  }
4031
4067
  };
4068
+ function mapProviderTool(tool) {
4069
+ const [provider, toolName] = tool.id.split(".");
4070
+ const apiToolType = `${provider}:${toolName}`;
4071
+ const mappedArgs = {};
4072
+ for (const [key, value] of Object.entries(tool.args)) {
4073
+ if (value !== void 0) {
4074
+ mappedArgs[camelToSnake(key)] = value;
4075
+ }
4076
+ }
4077
+ return __spreadValues({
4078
+ type: apiToolType
4079
+ }, mappedArgs);
4080
+ }
4081
+ function camelToSnake(str) {
4082
+ return str.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`);
4083
+ }
4032
4084
 
4033
4085
  // src/completion/convert-to-openrouter-completion-prompt.ts
4034
4086
  function convertToOpenRouterCompletionPrompt({
@@ -4230,16 +4282,16 @@ var OpenRouterCompletionLanguageModel = class {
4230
4282
  logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
4231
4283
  suffix: this.settings.suffix,
4232
4284
  user: this.settings.user,
4233
- // standardized settings:
4234
- max_tokens: maxOutputTokens,
4235
- temperature,
4236
- top_p: topP,
4237
- frequency_penalty: frequencyPenalty,
4238
- presence_penalty: presencePenalty,
4285
+ // standardized settings (call-level options override model-level settings):
4286
+ max_tokens: maxOutputTokens != null ? maxOutputTokens : this.settings.maxTokens,
4287
+ temperature: temperature != null ? temperature : this.settings.temperature,
4288
+ top_p: topP != null ? topP : this.settings.topP,
4289
+ frequency_penalty: frequencyPenalty != null ? frequencyPenalty : this.settings.frequencyPenalty,
4290
+ presence_penalty: presencePenalty != null ? presencePenalty : this.settings.presencePenalty,
4239
4291
  seed,
4240
4292
  stop: stopSequences,
4241
4293
  response_format: responseFormat,
4242
- top_k: topK,
4294
+ top_k: topK != null ? topK : this.settings.topK,
4243
4295
  // prompt:
4244
4296
  prompt: completionPrompt,
4245
4297
  // OpenRouter specific settings: