@xyd-js/ask-ai-widget 0.0.0-build-2acf05c-20251207022018 → 0.0.0-build-8a0317f-20251214165542

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -40260,7 +40260,7 @@ var optionalType = ZodOptional2.create;
40260
40260
  var nullableType = ZodNullable2.create;
40261
40261
  var preprocessType = ZodEffects.createWithPreprocess;
40262
40262
  var pipelineType = ZodPipeline.create;
40263
- // ../../node_modules/.pnpm/@ai-sdk+provider-utils@3.0.18_zod@3.25.76/node_modules/@ai-sdk/provider-utils/dist/index.mjs
40263
+ // ../../node_modules/.pnpm/@ai-sdk+provider-utils@3.0.19_zod@3.25.76/node_modules/@ai-sdk/provider-utils/dist/index.mjs
40264
40264
  function combineHeaders(...headers) {
40265
40265
  return headers.reduce((combinedHeaders, currentHeaders) => ({
40266
40266
  ...combinedHeaders,
@@ -40450,7 +40450,7 @@ function withUserAgentSuffix(headers, ...userAgentSuffixParts) {
40450
40450
  normalizedHeaders.set("user-agent", [currentUserAgentHeader, ...userAgentSuffixParts].filter(Boolean).join(" "));
40451
40451
  return Object.fromEntries(normalizedHeaders.entries());
40452
40452
  }
40453
- var VERSION = "3.0.18";
40453
+ var VERSION = "3.0.19";
40454
40454
  var getOriginalFetch = () => globalThis.fetch;
40455
40455
  var getFromApi = async ({
40456
40456
  url: url2,
@@ -40889,9 +40889,6 @@ var postToApi = async ({
40889
40889
  function tool(tool2) {
40890
40890
  return tool2;
40891
40891
  }
40892
- function dynamicTool(tool2) {
40893
- return { ...tool2, type: "dynamic" };
40894
- }
40895
40892
  function createProviderDefinedToolFactory({
40896
40893
  id,
40897
40894
  name: name14,
@@ -41073,6 +41070,25 @@ var createBinaryResponseHandler = () => async ({ response, url: url2, requestBod
41073
41070
  });
41074
41071
  }
41075
41072
  };
41073
+ function addAdditionalPropertiesToJsonSchema(jsonSchema2) {
41074
+ if (jsonSchema2.type === "object") {
41075
+ jsonSchema2.additionalProperties = false;
41076
+ const properties = jsonSchema2.properties;
41077
+ if (properties != null) {
41078
+ for (const property in properties) {
41079
+ properties[property] = addAdditionalPropertiesToJsonSchema(properties[property]);
41080
+ }
41081
+ }
41082
+ }
41083
+ if (jsonSchema2.type === "array" && jsonSchema2.items != null) {
41084
+ if (Array.isArray(jsonSchema2.items)) {
41085
+ jsonSchema2.items = jsonSchema2.items.map((item) => addAdditionalPropertiesToJsonSchema(item));
41086
+ } else {
41087
+ jsonSchema2.items = addAdditionalPropertiesToJsonSchema(jsonSchema2.items);
41088
+ }
41089
+ }
41090
+ return jsonSchema2;
41091
+ }
41076
41092
  var getRelativePath = (pathA, pathB) => {
41077
41093
  let i = 0;
41078
41094
  for (;i < pathA.length && i < pathB.length; i++) {
@@ -42082,11 +42098,11 @@ function zod3Schema(zodSchema2, options) {
42082
42098
  function zod4Schema(zodSchema2, options) {
42083
42099
  var _a15;
42084
42100
  const useReferences = (_a15 = options == null ? undefined : options.useReferences) != null ? _a15 : false;
42085
- return jsonSchema(() => toJSONSchema(zodSchema2, {
42101
+ return jsonSchema(() => addAdditionalPropertiesToJsonSchema(toJSONSchema(zodSchema2, {
42086
42102
  target: "draft-7",
42087
- io: "output",
42103
+ io: "input",
42088
42104
  reused: useReferences ? "ref" : "inline"
42089
- }), {
42105
+ })), {
42090
42106
  validate: async (value) => {
42091
42107
  const result = await safeParseAsync2(zodSchema2, value);
42092
42108
  return result.success ? { success: true, value: result.data } : { success: false, error: result.error };
@@ -42178,7 +42194,7 @@ async function* executeTool({
42178
42194
  }
42179
42195
  }
42180
42196
 
42181
- // ../../node_modules/.pnpm/@ai-sdk+gateway@2.0.18_zod@3.25.76/node_modules/@ai-sdk/gateway/dist/index.mjs
42197
+ // ../../node_modules/.pnpm/@ai-sdk+gateway@2.0.21_zod@3.25.76/node_modules/@ai-sdk/gateway/dist/index.mjs
42182
42198
  var import_oidc = __toESM(require_dist(), 1);
42183
42199
  var import_oidc2 = __toESM(require_dist(), 1);
42184
42200
  var marker15 = "vercel.ai.gateway.error";
@@ -42816,7 +42832,7 @@ async function getVercelRequestId() {
42816
42832
  var _a82;
42817
42833
  return (_a82 = import_oidc.getContext().headers) == null ? undefined : _a82["x-vercel-id"];
42818
42834
  }
42819
- var VERSION2 = "2.0.18";
42835
+ var VERSION2 = "2.0.21";
42820
42836
  var AI_GATEWAY_PROTOCOL_VERSION = "0.0.1";
42821
42837
  function createGatewayProvider(options = {}) {
42822
42838
  var _a82, _b8;
@@ -42952,7 +42968,7 @@ async function getGatewayAuthToken(options) {
42952
42968
  }
42953
42969
  }
42954
42970
 
42955
- // ../../node_modules/.pnpm/ai@5.0.108_zod@3.25.76/node_modules/ai/dist/index.mjs
42971
+ // ../../node_modules/.pnpm/ai@5.0.113_zod@3.25.76/node_modules/ai/dist/index.mjs
42956
42972
  var import_api2 = __toESM(require_src2(), 1);
42957
42973
  var import_api3 = __toESM(require_src2(), 1);
42958
42974
  var __defProp2 = Object.defineProperty;
@@ -43356,7 +43372,7 @@ function detectMediaType({
43356
43372
  }
43357
43373
  return;
43358
43374
  }
43359
- var VERSION3 = "5.0.108";
43375
+ var VERSION3 = "5.0.113";
43360
43376
  var download = async ({ url: url2 }) => {
43361
43377
  var _a16;
43362
43378
  const urlText = url2.toString();
@@ -43948,14 +43964,25 @@ async function standardizePrompt(prompt) {
43948
43964
  };
43949
43965
  }
43950
43966
  function wrapGatewayError(error40) {
43951
- if (GatewayAuthenticationError.isInstance(error40) || GatewayModelNotFoundError.isInstance(error40)) {
43967
+ if (!GatewayAuthenticationError.isInstance(error40))
43968
+ return error40;
43969
+ const isProductionEnv = (process == null ? undefined : "production") === "production";
43970
+ const moreInfoURL = "https://ai-sdk.dev/unauthenticated-ai-gateway";
43971
+ if (isProductionEnv) {
43952
43972
  return new AISDKError({
43953
43973
  name: "GatewayError",
43954
- message: "Vercel AI Gateway access failed. If you want to use AI SDK providers directly, use the providers, e.g. @ai-sdk/openai, or register a different global default provider.",
43955
- cause: error40
43974
+ message: `Unauthenticated. Configure AI_GATEWAY_API_KEY or use a provider module. Learn more: ${moreInfoURL}`
43956
43975
  });
43957
43976
  }
43958
- return error40;
43977
+ return Object.assign(new Error(`\x1B[1m\x1B[31mUnauthenticated request to AI Gateway.\x1B[0m
43978
+
43979
+ To authenticate, set the \x1B[33mAI_GATEWAY_API_KEY\x1B[0m environment variable with your API key.
43980
+
43981
+ Alternatively, you can use a provider module instead of the AI Gateway.
43982
+
43983
+ Learn more: \x1B[34m${moreInfoURL}\x1B[0m
43984
+
43985
+ `), { name: "GatewayAuthenticationError" });
43959
43986
  }
43960
43987
  function assembleOperationName({
43961
43988
  operationId,
@@ -47634,6 +47661,102 @@ var uiMessagesSchema = lazyValidator(() => zodSchema(exports_external.array(expo
47634
47661
  })
47635
47662
  ])).nonempty("Message must contain at least one part")
47636
47663
  })).nonempty("Messages array must not be empty")));
47664
+ // ../../node_modules/.pnpm/@ai-sdk+provider-utils@3.0.18_zod@3.25.76/node_modules/@ai-sdk/provider-utils/dist/index.mjs
47665
+ var createIdGenerator2 = ({
47666
+ prefix,
47667
+ size = 16,
47668
+ alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
47669
+ separator = "-"
47670
+ } = {}) => {
47671
+ const generator = () => {
47672
+ const alphabetLength = alphabet.length;
47673
+ const chars = new Array(size);
47674
+ for (let i = 0;i < size; i++) {
47675
+ chars[i] = alphabet[Math.random() * alphabetLength | 0];
47676
+ }
47677
+ return chars.join("");
47678
+ };
47679
+ if (prefix == null) {
47680
+ return generator;
47681
+ }
47682
+ if (alphabet.includes(separator)) {
47683
+ throw new InvalidArgumentError({
47684
+ argument: "separator",
47685
+ message: `The separator "${separator}" must not be part of the alphabet "${alphabet}".`
47686
+ });
47687
+ }
47688
+ return () => `${prefix}${separator}${generator()}`;
47689
+ };
47690
+ var generateId2 = createIdGenerator2();
47691
+ function getRuntimeEnvironmentUserAgent2(globalThisAny = globalThis) {
47692
+ var _a16, _b8, _c;
47693
+ if (globalThisAny.window) {
47694
+ return `runtime/browser`;
47695
+ }
47696
+ if ((_a16 = globalThisAny.navigator) == null ? undefined : _a16.userAgent) {
47697
+ return `runtime/${globalThisAny.navigator.userAgent.toLowerCase()}`;
47698
+ }
47699
+ if ((_c = (_b8 = globalThisAny.process) == null ? undefined : _b8.versions) == null ? undefined : _c.node) {
47700
+ return `runtime/node.js/${globalThisAny.process.version.substring(0)}`;
47701
+ }
47702
+ if (globalThisAny.EdgeRuntime) {
47703
+ return `runtime/vercel-edge`;
47704
+ }
47705
+ return "runtime/unknown";
47706
+ }
47707
+ function normalizeHeaders2(headers) {
47708
+ if (headers == null) {
47709
+ return {};
47710
+ }
47711
+ const normalized = {};
47712
+ if (headers instanceof Headers) {
47713
+ headers.forEach((value, key) => {
47714
+ normalized[key.toLowerCase()] = value;
47715
+ });
47716
+ } else {
47717
+ if (!Array.isArray(headers)) {
47718
+ headers = Object.entries(headers);
47719
+ }
47720
+ for (const [key, value] of headers) {
47721
+ if (value != null) {
47722
+ normalized[key.toLowerCase()] = value;
47723
+ }
47724
+ }
47725
+ }
47726
+ return normalized;
47727
+ }
47728
+ function withUserAgentSuffix2(headers, ...userAgentSuffixParts) {
47729
+ const normalizedHeaders = new Headers(normalizeHeaders2(headers));
47730
+ const currentUserAgentHeader = normalizedHeaders.get("user-agent") || "";
47731
+ normalizedHeaders.set("user-agent", [currentUserAgentHeader, ...userAgentSuffixParts].filter(Boolean).join(" "));
47732
+ return Object.fromEntries(normalizedHeaders.entries());
47733
+ }
47734
+ var validatorSymbol2 = Symbol.for("vercel.ai.validator");
47735
+ function tool2(tool22) {
47736
+ return tool22;
47737
+ }
47738
+ function dynamicTool(tool22) {
47739
+ return { ...tool22, type: "dynamic" };
47740
+ }
47741
+ var ignoreOverride2 = Symbol("Let zodToJsonSchema decide on which parser to use");
47742
+ var ALPHA_NUMERIC2 = new Set("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvxyz0123456789");
47743
+ var schemaSymbol2 = Symbol.for("vercel.ai.schema");
47744
+ function jsonSchema2(jsonSchema22, {
47745
+ validate
47746
+ } = {}) {
47747
+ return {
47748
+ [schemaSymbol2]: true,
47749
+ _type: undefined,
47750
+ [validatorSymbol2]: true,
47751
+ get jsonSchema() {
47752
+ if (typeof jsonSchema22 === "function") {
47753
+ jsonSchema22 = jsonSchema22();
47754
+ }
47755
+ return jsonSchema22;
47756
+ },
47757
+ validate
47758
+ };
47759
+ }
47637
47760
 
47638
47761
  // ../../node_modules/.pnpm/pkce-challenge@5.0.1/node_modules/pkce-challenge/dist/index.node.js
47639
47762
  var crypto;
@@ -48564,7 +48687,7 @@ var SseMCPTransport = class {
48564
48687
  headers["Authorization"] = `Bearer ${tokens.access_token}`;
48565
48688
  }
48566
48689
  }
48567
- return withUserAgentSuffix(headers, `ai-sdk/${VERSION4}`, getRuntimeEnvironmentUserAgent());
48690
+ return withUserAgentSuffix2(headers, `ai-sdk/${VERSION4}`, getRuntimeEnvironmentUserAgent2());
48568
48691
  }
48569
48692
  async start() {
48570
48693
  return new Promise((resolve2, reject) => {
@@ -48766,7 +48889,7 @@ var HttpMCPTransport = class {
48766
48889
  headers["Authorization"] = `Bearer ${tokens.access_token}`;
48767
48890
  }
48768
48891
  }
48769
- return withUserAgentSuffix(headers, `ai-sdk/${VERSION4}`, getRuntimeEnvironmentUserAgent());
48892
+ return withUserAgentSuffix2(headers, `ai-sdk/${VERSION4}`, getRuntimeEnvironmentUserAgent2());
48770
48893
  }
48771
48894
  async start() {
48772
48895
  if (this.abortController) {
@@ -49348,13 +49471,13 @@ var DefaultMCPClient = class {
49348
49471
  };
49349
49472
  const toolWithExecute = schemas3 === "automatic" ? dynamicTool({
49350
49473
  description,
49351
- inputSchema: jsonSchema({
49474
+ inputSchema: jsonSchema2({
49352
49475
  ...inputSchema,
49353
49476
  properties: (_a34 = inputSchema.properties) != null ? _a34 : {},
49354
49477
  additionalProperties: false
49355
49478
  }),
49356
49479
  execute
49357
- }) : tool({
49480
+ }) : tool2({
49358
49481
  description,
49359
49482
  inputSchema: schemas3[name34].inputSchema,
49360
49483
  execute
@@ -49827,8 +49950,8 @@ function S(t2) {
49827
49950
  }, S(t2);
49828
49951
  }
49829
49952
 
49830
- // ../../node_modules/.pnpm/@ai-sdk+anthropic@2.0.53_zod@3.25.76/node_modules/@ai-sdk/anthropic/dist/index.mjs
49831
- var VERSION5 = "2.0.53";
49953
+ // ../../node_modules/.pnpm/@ai-sdk+anthropic@2.0.56_zod@3.25.76/node_modules/@ai-sdk/anthropic/dist/index.mjs
49954
+ var VERSION5 = "2.0.56";
49832
49955
  var anthropicErrorDataSchema = lazySchema(() => zodSchema(exports_external.object({
49833
49956
  type: exports_external.literal("error"),
49834
49957
  error: exports_external.object({
@@ -49907,11 +50030,18 @@ var anthropicMessagesResponseSchema = lazySchema(() => zodSchema(exports_externa
49907
50030
  type: exports_external.literal("document"),
49908
50031
  title: exports_external.string().nullable(),
49909
50032
  citations: exports_external.object({ enabled: exports_external.boolean() }).optional(),
49910
- source: exports_external.object({
49911
- type: exports_external.literal("text"),
49912
- media_type: exports_external.string(),
49913
- data: exports_external.string()
49914
- })
50033
+ source: exports_external.union([
50034
+ exports_external.object({
50035
+ type: exports_external.literal("base64"),
50036
+ media_type: exports_external.literal("application/pdf"),
50037
+ data: exports_external.string()
50038
+ }),
50039
+ exports_external.object({
50040
+ type: exports_external.literal("text"),
50041
+ media_type: exports_external.literal("text/plain"),
50042
+ data: exports_external.string()
50043
+ })
50044
+ ])
49915
50045
  })
49916
50046
  }),
49917
50047
  exports_external.object({
@@ -50074,11 +50204,18 @@ var anthropicMessagesChunkSchema = lazySchema(() => zodSchema(exports_external.d
50074
50204
  type: exports_external.literal("document"),
50075
50205
  title: exports_external.string().nullable(),
50076
50206
  citations: exports_external.object({ enabled: exports_external.boolean() }).optional(),
50077
- source: exports_external.object({
50078
- type: exports_external.literal("text"),
50079
- media_type: exports_external.string(),
50080
- data: exports_external.string()
50081
- })
50207
+ source: exports_external.union([
50208
+ exports_external.object({
50209
+ type: exports_external.literal("base64"),
50210
+ media_type: exports_external.literal("application/pdf"),
50211
+ data: exports_external.string()
50212
+ }),
50213
+ exports_external.object({
50214
+ type: exports_external.literal("text"),
50215
+ media_type: exports_external.literal("text/plain"),
50216
+ data: exports_external.string()
50217
+ })
50218
+ ])
50082
50219
  })
50083
50220
  }),
50084
50221
  exports_external.object({
@@ -50370,7 +50507,7 @@ var webSearch_20250305ArgsSchema = lazySchema(() => zodSchema(exports_external.o
50370
50507
  })));
50371
50508
  var webSearch_20250305OutputSchema = lazySchema(() => zodSchema(exports_external.array(exports_external.object({
50372
50509
  url: exports_external.string(),
50373
- title: exports_external.string(),
50510
+ title: exports_external.string().nullable(),
50374
50511
  pageAge: exports_external.string().nullable(),
50375
50512
  encryptedContent: exports_external.string(),
50376
50513
  type: exports_external.literal("web_search_result")
@@ -50399,7 +50536,7 @@ var webFetch_20250910OutputSchema = lazySchema(() => zodSchema(exports_external.
50399
50536
  url: exports_external.string(),
50400
50537
  content: exports_external.object({
50401
50538
  type: exports_external.literal("document"),
50402
- title: exports_external.string(),
50539
+ title: exports_external.string().nullable(),
50403
50540
  citations: exports_external.object({ enabled: exports_external.boolean() }).optional(),
50404
50541
  source: exports_external.union([
50405
50542
  exports_external.object({
@@ -50442,23 +50579,23 @@ async function prepareTools({
50442
50579
  return { tools: undefined, toolChoice: undefined, toolWarnings, betas };
50443
50580
  }
50444
50581
  const anthropicTools2 = [];
50445
- for (const tool2 of tools) {
50446
- switch (tool2.type) {
50582
+ for (const tool3 of tools) {
50583
+ switch (tool3.type) {
50447
50584
  case "function": {
50448
- const cacheControl = validator2.getCacheControl(tool2.providerOptions, {
50585
+ const cacheControl = validator2.getCacheControl(tool3.providerOptions, {
50449
50586
  type: "tool definition",
50450
50587
  canCache: true
50451
50588
  });
50452
50589
  anthropicTools2.push({
50453
- name: tool2.name,
50454
- description: tool2.description,
50455
- input_schema: tool2.inputSchema,
50590
+ name: tool3.name,
50591
+ description: tool3.description,
50592
+ input_schema: tool3.inputSchema,
50456
50593
  cache_control: cacheControl
50457
50594
  });
50458
50595
  break;
50459
50596
  }
50460
50597
  case "provider-defined": {
50461
- switch (tool2.id) {
50598
+ switch (tool3.id) {
50462
50599
  case "anthropic.code_execution_20250522": {
50463
50600
  betas.add("code-execution-2025-05-22");
50464
50601
  anthropicTools2.push({
@@ -50481,9 +50618,9 @@ async function prepareTools({
50481
50618
  anthropicTools2.push({
50482
50619
  name: "computer",
50483
50620
  type: "computer_20250124",
50484
- display_width_px: tool2.args.displayWidthPx,
50485
- display_height_px: tool2.args.displayHeightPx,
50486
- display_number: tool2.args.displayNumber,
50621
+ display_width_px: tool3.args.displayWidthPx,
50622
+ display_height_px: tool3.args.displayHeightPx,
50623
+ display_number: tool3.args.displayNumber,
50487
50624
  cache_control: undefined
50488
50625
  });
50489
50626
  break;
@@ -50493,9 +50630,9 @@ async function prepareTools({
50493
50630
  anthropicTools2.push({
50494
50631
  name: "computer",
50495
50632
  type: "computer_20241022",
50496
- display_width_px: tool2.args.displayWidthPx,
50497
- display_height_px: tool2.args.displayHeightPx,
50498
- display_number: tool2.args.displayNumber,
50633
+ display_width_px: tool3.args.displayWidthPx,
50634
+ display_height_px: tool3.args.displayHeightPx,
50635
+ display_number: tool3.args.displayNumber,
50499
50636
  cache_control: undefined
50500
50637
  });
50501
50638
  break;
@@ -50529,7 +50666,7 @@ async function prepareTools({
50529
50666
  }
50530
50667
  case "anthropic.text_editor_20250728": {
50531
50668
  const args = await validateTypes({
50532
- value: tool2.args,
50669
+ value: tool3.args,
50533
50670
  schema: textEditor_20250728ArgsSchema
50534
50671
  });
50535
50672
  anthropicTools2.push({
@@ -50569,7 +50706,7 @@ async function prepareTools({
50569
50706
  case "anthropic.web_fetch_20250910": {
50570
50707
  betas.add("web-fetch-2025-09-10");
50571
50708
  const args = await validateTypes({
50572
- value: tool2.args,
50709
+ value: tool3.args,
50573
50710
  schema: webFetch_20250910ArgsSchema
50574
50711
  });
50575
50712
  anthropicTools2.push({
@@ -50586,7 +50723,7 @@ async function prepareTools({
50586
50723
  }
50587
50724
  case "anthropic.web_search_20250305": {
50588
50725
  const args = await validateTypes({
50589
- value: tool2.args,
50726
+ value: tool3.args,
50590
50727
  schema: webSearch_20250305ArgsSchema
50591
50728
  });
50592
50729
  anthropicTools2.push({
@@ -50601,14 +50738,14 @@ async function prepareTools({
50601
50738
  break;
50602
50739
  }
50603
50740
  default: {
50604
- toolWarnings.push({ type: "unsupported-tool", tool: tool2 });
50741
+ toolWarnings.push({ type: "unsupported-tool", tool: tool3 });
50605
50742
  break;
50606
50743
  }
50607
50744
  }
50608
50745
  break;
50609
50746
  }
50610
50747
  default: {
50611
- toolWarnings.push({ type: "unsupported-tool", tool: tool2 });
50748
+ toolWarnings.push({ type: "unsupported-tool", tool: tool3 });
50612
50749
  break;
50613
50750
  }
50614
50751
  }
@@ -51525,7 +51662,7 @@ var AnthropicMessagesLanguageModel = class {
51525
51662
  betas.add("code-execution-2025-08-25");
51526
51663
  betas.add("skills-2025-10-02");
51527
51664
  betas.add("files-api-2025-04-14");
51528
- if (!(tools == null ? undefined : tools.some((tool2) => tool2.type === "provider-defined" && tool2.id === "anthropic.code_execution_20250825"))) {
51665
+ if (!(tools == null ? undefined : tools.some((tool3) => tool3.type === "provider-defined" && tool3.id === "anthropic.code_execution_20250825"))) {
51529
51666
  warnings.push({
51530
51667
  type: "other",
51531
51668
  message: "code execution tool is required when using skills"
@@ -52606,7 +52743,7 @@ function createAnthropic(options = {}) {
52606
52743
  }
52607
52744
  var anthropic = createAnthropic();
52608
52745
 
52609
- // ../../node_modules/.pnpm/@ai-sdk+openai@2.0.77_zod@3.25.76/node_modules/@ai-sdk/openai/dist/index.mjs
52746
+ // ../../node_modules/.pnpm/@ai-sdk+openai@2.0.86_zod@3.25.76/node_modules/@ai-sdk/openai/dist/index.mjs
52610
52747
  var openaiErrorDataSchema = exports_external.object({
52611
52748
  error: exports_external.object({
52612
52749
  message: exports_external.string(),
@@ -52619,6 +52756,20 @@ var openaiFailedResponseHandler = createJsonErrorResponseHandler({
52619
52756
  errorSchema: openaiErrorDataSchema,
52620
52757
  errorToMessage: (data) => data.error.message
52621
52758
  });
52759
+ function getOpenAILanguageModelCapabilities(modelId) {
52760
+ const supportsFlexProcessing = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
52761
+ const supportsPriorityProcessing = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
52762
+ const isReasoningModel = !(modelId.startsWith("gpt-3") || modelId.startsWith("gpt-4") || modelId.startsWith("chatgpt-4o") || modelId.startsWith("gpt-5-chat"));
52763
+ const supportsNonReasoningParameters = modelId.startsWith("gpt-5.1");
52764
+ const systemMessageMode = isReasoningModel ? "developer" : "system";
52765
+ return {
52766
+ supportsFlexProcessing,
52767
+ supportsPriorityProcessing,
52768
+ isReasoningModel,
52769
+ systemMessageMode,
52770
+ supportsNonReasoningParameters
52771
+ };
52772
+ }
52622
52773
  function convertToOpenAIChatMessages({
52623
52774
  prompt,
52624
52775
  systemMessageMode = "system"
@@ -52834,10 +52985,12 @@ var openaiChatResponseSchema = lazyValidator(() => zodSchema(exports_external.ob
52834
52985
  })).nullish(),
52835
52986
  annotations: exports_external.array(exports_external.object({
52836
52987
  type: exports_external.literal("url_citation"),
52837
- start_index: exports_external.number(),
52838
- end_index: exports_external.number(),
52839
- url: exports_external.string(),
52840
- title: exports_external.string()
52988
+ url_citation: exports_external.object({
52989
+ start_index: exports_external.number(),
52990
+ end_index: exports_external.number(),
52991
+ url: exports_external.string(),
52992
+ title: exports_external.string()
52993
+ })
52841
52994
  })).nullish()
52842
52995
  }),
52843
52996
  index: exports_external.number(),
@@ -52887,10 +53040,12 @@ var openaiChatChunkSchema = lazyValidator(() => zodSchema(exports_external.union
52887
53040
  })).nullish(),
52888
53041
  annotations: exports_external.array(exports_external.object({
52889
53042
  type: exports_external.literal("url_citation"),
52890
- start_index: exports_external.number(),
52891
- end_index: exports_external.number(),
52892
- url: exports_external.string(),
52893
- title: exports_external.string()
53043
+ url_citation: exports_external.object({
53044
+ start_index: exports_external.number(),
53045
+ end_index: exports_external.number(),
53046
+ url: exports_external.string(),
53047
+ title: exports_external.string()
53048
+ })
52894
53049
  })).nullish()
52895
53050
  }).nullish(),
52896
53051
  logprobs: exports_external.object({
@@ -52927,7 +53082,7 @@ var openaiChatLanguageModelOptions = lazyValidator(() => zodSchema(exports_exter
52927
53082
  logprobs: exports_external.union([exports_external.boolean(), exports_external.number()]).optional(),
52928
53083
  parallelToolCalls: exports_external.boolean().optional(),
52929
53084
  user: exports_external.string().optional(),
52930
- reasoningEffort: exports_external.enum(["none", "minimal", "low", "medium", "high"]).optional(),
53085
+ reasoningEffort: exports_external.enum(["none", "minimal", "low", "medium", "high", "xhigh"]).optional(),
52931
53086
  maxCompletionTokens: exports_external.number().optional(),
52932
53087
  store: exports_external.boolean().optional(),
52933
53088
  metadata: exports_external.record(exports_external.string().max(64), exports_external.string().max(512)).optional(),
@@ -52952,21 +53107,21 @@ function prepareChatTools({
52952
53107
  return { tools: undefined, toolChoice: undefined, toolWarnings };
52953
53108
  }
52954
53109
  const openaiTools2 = [];
52955
- for (const tool2 of tools) {
52956
- switch (tool2.type) {
53110
+ for (const tool3 of tools) {
53111
+ switch (tool3.type) {
52957
53112
  case "function":
52958
53113
  openaiTools2.push({
52959
53114
  type: "function",
52960
53115
  function: {
52961
- name: tool2.name,
52962
- description: tool2.description,
52963
- parameters: tool2.inputSchema,
53116
+ name: tool3.name,
53117
+ description: tool3.description,
53118
+ parameters: tool3.inputSchema,
52964
53119
  strict: structuredOutputs ? strictJsonSchema : undefined
52965
53120
  }
52966
53121
  });
52967
53122
  break;
52968
53123
  default:
52969
- toolWarnings.push({ type: "unsupported-tool", tool: tool2 });
53124
+ toolWarnings.push({ type: "unsupported-tool", tool: tool3 });
52970
53125
  break;
52971
53126
  }
52972
53127
  }
@@ -53033,6 +53188,7 @@ var OpenAIChatLanguageModel = class {
53033
53188
  schema: openaiChatLanguageModelOptions
53034
53189
  })) != null ? _a16 : {};
53035
53190
  const structuredOutputs = (_b9 = openaiOptions.structuredOutputs) != null ? _b9 : true;
53191
+ const modelCapabilities = getOpenAILanguageModelCapabilities(this.modelId);
53036
53192
  if (topK != null) {
53037
53193
  warnings.push({
53038
53194
  type: "unsupported-setting",
@@ -53048,7 +53204,7 @@ var OpenAIChatLanguageModel = class {
53048
53204
  }
53049
53205
  const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages({
53050
53206
  prompt,
53051
- systemMessageMode: getSystemMessageMode(this.modelId)
53207
+ systemMessageMode: modelCapabilities.systemMessageMode
53052
53208
  });
53053
53209
  warnings.push(...messageWarnings);
53054
53210
  const strictJsonSchema = (_c = openaiOptions.strictJsonSchema) != null ? _c : false;
@@ -53087,22 +53243,31 @@ var OpenAIChatLanguageModel = class {
53087
53243
  safety_identifier: openaiOptions.safetyIdentifier,
53088
53244
  messages
53089
53245
  };
53090
- if (isReasoningModel(this.modelId)) {
53091
- if (baseArgs.temperature != null) {
53092
- baseArgs.temperature = undefined;
53093
- warnings.push({
53094
- type: "unsupported-setting",
53095
- setting: "temperature",
53096
- details: "temperature is not supported for reasoning models"
53097
- });
53098
- }
53099
- if (baseArgs.top_p != null) {
53100
- baseArgs.top_p = undefined;
53101
- warnings.push({
53102
- type: "unsupported-setting",
53103
- setting: "topP",
53104
- details: "topP is not supported for reasoning models"
53105
- });
53246
+ if (modelCapabilities.isReasoningModel) {
53247
+ if (openaiOptions.reasoningEffort !== "none" || !modelCapabilities.supportsNonReasoningParameters) {
53248
+ if (baseArgs.temperature != null) {
53249
+ baseArgs.temperature = undefined;
53250
+ warnings.push({
53251
+ type: "unsupported-setting",
53252
+ setting: "temperature",
53253
+ details: "temperature is not supported for reasoning models"
53254
+ });
53255
+ }
53256
+ if (baseArgs.top_p != null) {
53257
+ baseArgs.top_p = undefined;
53258
+ warnings.push({
53259
+ type: "unsupported-setting",
53260
+ setting: "topP",
53261
+ details: "topP is not supported for reasoning models"
53262
+ });
53263
+ }
53264
+ if (baseArgs.logprobs != null) {
53265
+ baseArgs.logprobs = undefined;
53266
+ warnings.push({
53267
+ type: "other",
53268
+ message: "logprobs is not supported for reasoning models"
53269
+ });
53270
+ }
53106
53271
  }
53107
53272
  if (baseArgs.frequency_penalty != null) {
53108
53273
  baseArgs.frequency_penalty = undefined;
@@ -53127,13 +53292,6 @@ var OpenAIChatLanguageModel = class {
53127
53292
  message: "logitBias is not supported for reasoning models"
53128
53293
  });
53129
53294
  }
53130
- if (baseArgs.logprobs != null) {
53131
- baseArgs.logprobs = undefined;
53132
- warnings.push({
53133
- type: "other",
53134
- message: "logprobs is not supported for reasoning models"
53135
- });
53136
- }
53137
53295
  if (baseArgs.top_logprobs != null) {
53138
53296
  baseArgs.top_logprobs = undefined;
53139
53297
  warnings.push({
@@ -53157,7 +53315,7 @@ var OpenAIChatLanguageModel = class {
53157
53315
  });
53158
53316
  }
53159
53317
  }
53160
- if (openaiOptions.serviceTier === "flex" && !supportsFlexProcessing(this.modelId)) {
53318
+ if (openaiOptions.serviceTier === "flex" && !modelCapabilities.supportsFlexProcessing) {
53161
53319
  warnings.push({
53162
53320
  type: "unsupported-setting",
53163
53321
  setting: "serviceTier",
@@ -53165,7 +53323,7 @@ var OpenAIChatLanguageModel = class {
53165
53323
  });
53166
53324
  baseArgs.service_tier = undefined;
53167
53325
  }
53168
- if (openaiOptions.serviceTier === "priority" && !supportsPriorityProcessing(this.modelId)) {
53326
+ if (openaiOptions.serviceTier === "priority" && !modelCapabilities.supportsPriorityProcessing) {
53169
53327
  warnings.push({
53170
53328
  type: "unsupported-setting",
53171
53329
  setting: "serviceTier",
@@ -53230,8 +53388,8 @@ var OpenAIChatLanguageModel = class {
53230
53388
  type: "source",
53231
53389
  sourceType: "url",
53232
53390
  id: generateId(),
53233
- url: annotation.url,
53234
- title: annotation.title
53391
+ url: annotation.url_citation.url,
53392
+ title: annotation.url_citation.title
53235
53393
  });
53236
53394
  }
53237
53395
  const completionTokenDetails = (_d = response.usage) == null ? undefined : _d.completion_tokens_details;
@@ -53457,8 +53615,8 @@ var OpenAIChatLanguageModel = class {
53457
53615
  type: "source",
53458
53616
  sourceType: "url",
53459
53617
  id: generateId(),
53460
- url: annotation.url,
53461
- title: annotation.title
53618
+ url: annotation.url_citation.url,
53619
+ title: annotation.url_citation.title
53462
53620
  });
53463
53621
  }
53464
53622
  }
@@ -53480,42 +53638,6 @@ var OpenAIChatLanguageModel = class {
53480
53638
  };
53481
53639
  }
53482
53640
  };
53483
- function isReasoningModel(modelId) {
53484
- return (modelId.startsWith("o") || modelId.startsWith("gpt-5")) && !modelId.startsWith("gpt-5-chat");
53485
- }
53486
- function supportsFlexProcessing(modelId) {
53487
- return modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
53488
- }
53489
- function supportsPriorityProcessing(modelId) {
53490
- return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
53491
- }
53492
- function getSystemMessageMode(modelId) {
53493
- var _a16, _b9;
53494
- if (!isReasoningModel(modelId)) {
53495
- return "system";
53496
- }
53497
- return (_b9 = (_a16 = reasoningModels[modelId]) == null ? undefined : _a16.systemMessageMode) != null ? _b9 : "developer";
53498
- }
53499
- var reasoningModels = {
53500
- o3: {
53501
- systemMessageMode: "developer"
53502
- },
53503
- "o3-2025-04-16": {
53504
- systemMessageMode: "developer"
53505
- },
53506
- "o3-mini": {
53507
- systemMessageMode: "developer"
53508
- },
53509
- "o3-mini-2025-01-31": {
53510
- systemMessageMode: "developer"
53511
- },
53512
- "o4-mini": {
53513
- systemMessageMode: "developer"
53514
- },
53515
- "o4-mini-2025-04-16": {
53516
- systemMessageMode: "developer"
53517
- }
53518
- };
53519
53641
  function convertToOpenAICompletionPrompt({
53520
53642
  prompt,
53521
53643
  user = "user",
@@ -54083,8 +54205,8 @@ var codeInterpreter = (args = {}) => {
54083
54205
  };
54084
54206
  var comparisonFilterSchema = exports_external.object({
54085
54207
  key: exports_external.string(),
54086
- type: exports_external.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
54087
- value: exports_external.union([exports_external.string(), exports_external.number(), exports_external.boolean()])
54208
+ type: exports_external.enum(["eq", "ne", "gt", "gte", "lt", "lte", "in", "nin"]),
54209
+ value: exports_external.union([exports_external.string(), exports_external.number(), exports_external.boolean(), exports_external.array(exports_external.string())])
54088
54210
  });
54089
54211
  var compoundFilterSchema = exports_external.object({
54090
54212
  type: exports_external.enum(["and", "or"]),
@@ -54182,7 +54304,7 @@ var webSearchOutputSchema = lazySchema(() => zodSchema(exports_external.object({
54182
54304
  url: exports_external.string().nullish()
54183
54305
  }),
54184
54306
  exports_external.object({
54185
- type: exports_external.literal("find"),
54307
+ type: exports_external.literal("findInPage"),
54186
54308
  url: exports_external.string().nullish(),
54187
54309
  pattern: exports_external.string().nullish()
54188
54310
  })
@@ -54221,7 +54343,7 @@ var webSearchPreviewOutputSchema = lazySchema(() => zodSchema(exports_external.o
54221
54343
  url: exports_external.string().nullish()
54222
54344
  }),
54223
54345
  exports_external.object({
54224
- type: exports_external.literal("find"),
54346
+ type: exports_external.literal("findInPage"),
54225
54347
  url: exports_external.string().nullish(),
54226
54348
  pattern: exports_external.string().nullish()
54227
54349
  })
@@ -54674,7 +54796,7 @@ var openaiResponsesChunkSchema = lazyValidator(() => zodSchema(exports_external.
54674
54796
  url: exports_external.string().nullish()
54675
54797
  }),
54676
54798
  exports_external.object({
54677
- type: exports_external.literal("find"),
54799
+ type: exports_external.literal("find_in_page"),
54678
54800
  url: exports_external.string().nullish(),
54679
54801
  pattern: exports_external.string().nullish()
54680
54802
  })
@@ -54866,7 +54988,7 @@ var openaiResponsesResponseSchema = lazyValidator(() => zodSchema(exports_extern
54866
54988
  url: exports_external.string().nullish()
54867
54989
  }),
54868
54990
  exports_external.object({
54869
- type: exports_external.literal("find"),
54991
+ type: exports_external.literal("find_in_page"),
54870
54992
  url: exports_external.string().nullish(),
54871
54993
  pattern: exports_external.string().nullish()
54872
54994
  })
@@ -54971,7 +55093,11 @@ var openaiResponsesReasoningModelIds = [
54971
55093
  "gpt-5.1",
54972
55094
  "gpt-5.1-chat-latest",
54973
55095
  "gpt-5.1-codex-mini",
54974
- "gpt-5.1-codex"
55096
+ "gpt-5.1-codex",
55097
+ "gpt-5.1-codex-max",
55098
+ "gpt-5.2",
55099
+ "gpt-5.2-chat-latest",
55100
+ "gpt-5.2-pro"
54975
55101
  ];
54976
55102
  var openaiResponsesModelIds = [
54977
55103
  "gpt-4.1",
@@ -55045,22 +55171,22 @@ async function prepareResponsesTools({
55045
55171
  return { tools: undefined, toolChoice: undefined, toolWarnings };
55046
55172
  }
55047
55173
  const openaiTools2 = [];
55048
- for (const tool2 of tools) {
55049
- switch (tool2.type) {
55174
+ for (const tool3 of tools) {
55175
+ switch (tool3.type) {
55050
55176
  case "function":
55051
55177
  openaiTools2.push({
55052
55178
  type: "function",
55053
- name: tool2.name,
55054
- description: tool2.description,
55055
- parameters: tool2.inputSchema,
55179
+ name: tool3.name,
55180
+ description: tool3.description,
55181
+ parameters: tool3.inputSchema,
55056
55182
  strict: strictJsonSchema
55057
55183
  });
55058
55184
  break;
55059
55185
  case "provider-defined": {
55060
- switch (tool2.id) {
55186
+ switch (tool3.id) {
55061
55187
  case "openai.file_search": {
55062
55188
  const args = await validateTypes({
55063
- value: tool2.args,
55189
+ value: tool3.args,
55064
55190
  schema: fileSearchArgsSchema
55065
55191
  });
55066
55192
  openaiTools2.push({
@@ -55083,7 +55209,7 @@ async function prepareResponsesTools({
55083
55209
  }
55084
55210
  case "openai.web_search_preview": {
55085
55211
  const args = await validateTypes({
55086
- value: tool2.args,
55212
+ value: tool3.args,
55087
55213
  schema: webSearchPreviewArgsSchema
55088
55214
  });
55089
55215
  openaiTools2.push({
@@ -55095,7 +55221,7 @@ async function prepareResponsesTools({
55095
55221
  }
55096
55222
  case "openai.web_search": {
55097
55223
  const args = await validateTypes({
55098
- value: tool2.args,
55224
+ value: tool3.args,
55099
55225
  schema: webSearchArgsSchema
55100
55226
  });
55101
55227
  openaiTools2.push({
@@ -55109,7 +55235,7 @@ async function prepareResponsesTools({
55109
55235
  }
55110
55236
  case "openai.code_interpreter": {
55111
55237
  const args = await validateTypes({
55112
- value: tool2.args,
55238
+ value: tool3.args,
55113
55239
  schema: codeInterpreterArgsSchema
55114
55240
  });
55115
55241
  openaiTools2.push({
@@ -55120,7 +55246,7 @@ async function prepareResponsesTools({
55120
55246
  }
55121
55247
  case "openai.image_generation": {
55122
55248
  const args = await validateTypes({
55123
- value: tool2.args,
55249
+ value: tool3.args,
55124
55250
  schema: imageGenerationArgsSchema
55125
55251
  });
55126
55252
  openaiTools2.push({
@@ -55144,7 +55270,7 @@ async function prepareResponsesTools({
55144
55270
  break;
55145
55271
  }
55146
55272
  default:
55147
- toolWarnings.push({ type: "unsupported-tool", tool: tool2 });
55273
+ toolWarnings.push({ type: "unsupported-tool", tool: tool3 });
55148
55274
  break;
55149
55275
  }
55150
55276
  }
@@ -55201,7 +55327,7 @@ var OpenAIResponsesLanguageModel = class {
55201
55327
  }) {
55202
55328
  var _a16, _b9, _c, _d;
55203
55329
  const warnings = [];
55204
- const modelConfig = getResponsesModelConfig(this.modelId);
55330
+ const modelCapabilities = getOpenAILanguageModelCapabilities(this.modelId);
55205
55331
  if (topK != null) {
55206
55332
  warnings.push({ type: "unsupported-setting", setting: "topK" });
55207
55333
  }
@@ -55237,7 +55363,7 @@ var OpenAIResponsesLanguageModel = class {
55237
55363
  }
55238
55364
  const { input, warnings: inputWarnings } = await convertToOpenAIResponsesInput({
55239
55365
  prompt,
55240
- systemMessageMode: modelConfig.systemMessageMode,
55366
+ systemMessageMode: modelCapabilities.systemMessageMode,
55241
55367
  fileIdPrefixes: this.config.fileIdPrefixes,
55242
55368
  store: (_a16 = openaiOptions == null ? undefined : openaiOptions.store) != null ? _a16 : true,
55243
55369
  hasLocalShellTool: hasOpenAITool("openai.local_shell")
@@ -55253,13 +55379,13 @@ var OpenAIResponsesLanguageModel = class {
55253
55379
  }
55254
55380
  }
55255
55381
  function hasOpenAITool(id) {
55256
- return (tools == null ? undefined : tools.find((tool2) => tool2.type === "provider-defined" && tool2.id === id)) != null;
55382
+ return (tools == null ? undefined : tools.find((tool3) => tool3.type === "provider-defined" && tool3.id === id)) != null;
55257
55383
  }
55258
55384
  const topLogprobs = typeof (openaiOptions == null ? undefined : openaiOptions.logprobs) === "number" ? openaiOptions == null ? undefined : openaiOptions.logprobs : (openaiOptions == null ? undefined : openaiOptions.logprobs) === true ? TOP_LOGPROBS_MAX : undefined;
55259
55385
  if (topLogprobs) {
55260
55386
  addInclude("message.output_text.logprobs");
55261
55387
  }
55262
- const webSearchToolName = (_c = tools == null ? undefined : tools.find((tool2) => tool2.type === "provider-defined" && (tool2.id === "openai.web_search" || tool2.id === "openai.web_search_preview"))) == null ? undefined : _c.name;
55388
+ const webSearchToolName = (_c = tools == null ? undefined : tools.find((tool3) => tool3.type === "provider-defined" && (tool3.id === "openai.web_search" || tool3.id === "openai.web_search_preview"))) == null ? undefined : _c.name;
55263
55389
  if (webSearchToolName) {
55264
55390
  addInclude("web_search_call.action.sources");
55265
55391
  }
@@ -55267,7 +55393,7 @@ var OpenAIResponsesLanguageModel = class {
55267
55393
  addInclude("code_interpreter_call.outputs");
55268
55394
  }
55269
55395
  const store = openaiOptions == null ? undefined : openaiOptions.store;
55270
- if (store === false && modelConfig.isReasoningModel) {
55396
+ if (store === false && modelCapabilities.isReasoningModel) {
55271
55397
  addInclude("reasoning.encrypted_content");
55272
55398
  }
55273
55399
  const baseArgs = {
@@ -55307,7 +55433,7 @@ var OpenAIResponsesLanguageModel = class {
55307
55433
  safety_identifier: openaiOptions == null ? undefined : openaiOptions.safetyIdentifier,
55308
55434
  top_logprobs: topLogprobs,
55309
55435
  truncation: openaiOptions == null ? undefined : openaiOptions.truncation,
55310
- ...modelConfig.isReasoningModel && ((openaiOptions == null ? undefined : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? undefined : openaiOptions.reasoningSummary) != null) && {
55436
+ ...modelCapabilities.isReasoningModel && ((openaiOptions == null ? undefined : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? undefined : openaiOptions.reasoningSummary) != null) && {
55311
55437
  reasoning: {
55312
55438
  ...(openaiOptions == null ? undefined : openaiOptions.reasoningEffort) != null && {
55313
55439
  effort: openaiOptions.reasoningEffort
@@ -55318,22 +55444,24 @@ var OpenAIResponsesLanguageModel = class {
55318
55444
  }
55319
55445
  }
55320
55446
  };
55321
- if (modelConfig.isReasoningModel) {
55322
- if (baseArgs.temperature != null) {
55323
- baseArgs.temperature = undefined;
55324
- warnings.push({
55325
- type: "unsupported-setting",
55326
- setting: "temperature",
55327
- details: "temperature is not supported for reasoning models"
55328
- });
55329
- }
55330
- if (baseArgs.top_p != null) {
55331
- baseArgs.top_p = undefined;
55332
- warnings.push({
55333
- type: "unsupported-setting",
55334
- setting: "topP",
55335
- details: "topP is not supported for reasoning models"
55336
- });
55447
+ if (modelCapabilities.isReasoningModel) {
55448
+ if (!((openaiOptions == null ? undefined : openaiOptions.reasoningEffort) === "none" && modelCapabilities.supportsNonReasoningParameters)) {
55449
+ if (baseArgs.temperature != null) {
55450
+ baseArgs.temperature = undefined;
55451
+ warnings.push({
55452
+ type: "unsupported-setting",
55453
+ setting: "temperature",
55454
+ details: "temperature is not supported for reasoning models"
55455
+ });
55456
+ }
55457
+ if (baseArgs.top_p != null) {
55458
+ baseArgs.top_p = undefined;
55459
+ warnings.push({
55460
+ type: "unsupported-setting",
55461
+ setting: "topP",
55462
+ details: "topP is not supported for reasoning models"
55463
+ });
55464
+ }
55337
55465
  }
55338
55466
  } else {
55339
55467
  if ((openaiOptions == null ? undefined : openaiOptions.reasoningEffort) != null) {
@@ -55351,7 +55479,7 @@ var OpenAIResponsesLanguageModel = class {
55351
55479
  });
55352
55480
  }
55353
55481
  }
55354
- if ((openaiOptions == null ? undefined : openaiOptions.serviceTier) === "flex" && !modelConfig.supportsFlexProcessing) {
55482
+ if ((openaiOptions == null ? undefined : openaiOptions.serviceTier) === "flex" && !modelCapabilities.supportsFlexProcessing) {
55355
55483
  warnings.push({
55356
55484
  type: "unsupported-setting",
55357
55485
  setting: "serviceTier",
@@ -55359,7 +55487,7 @@ var OpenAIResponsesLanguageModel = class {
55359
55487
  });
55360
55488
  delete baseArgs.service_tier;
55361
55489
  }
55362
- if ((openaiOptions == null ? undefined : openaiOptions.serviceTier) === "priority" && !modelConfig.supportsPriorityProcessing) {
55490
+ if ((openaiOptions == null ? undefined : openaiOptions.serviceTier) === "priority" && !modelCapabilities.supportsPriorityProcessing) {
55363
55491
  warnings.push({
55364
55492
  type: "unsupported-setting",
55365
55493
  setting: "serviceTier",
@@ -56202,32 +56330,6 @@ function isResponseAnnotationAddedChunk(chunk) {
56202
56330
  function isErrorChunk(chunk) {
56203
56331
  return chunk.type === "error";
56204
56332
  }
56205
- function getResponsesModelConfig(modelId) {
56206
- const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
56207
- const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
56208
- const defaults = {
56209
- systemMessageMode: "system",
56210
- supportsFlexProcessing: supportsFlexProcessing2,
56211
- supportsPriorityProcessing: supportsPriorityProcessing2
56212
- };
56213
- if (modelId.startsWith("gpt-5-chat")) {
56214
- return {
56215
- ...defaults,
56216
- isReasoningModel: false
56217
- };
56218
- }
56219
- if (modelId.startsWith("o") || modelId.startsWith("gpt-5") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
56220
- return {
56221
- ...defaults,
56222
- isReasoningModel: true,
56223
- systemMessageMode: "developer"
56224
- };
56225
- }
56226
- return {
56227
- ...defaults,
56228
- isReasoningModel: false
56229
- };
56230
- }
56231
56333
  function mapWebSearchOutput(action) {
56232
56334
  var _a16;
56233
56335
  switch (action.type) {
@@ -56238,9 +56340,13 @@ function mapWebSearchOutput(action) {
56238
56340
  };
56239
56341
  case "open_page":
56240
56342
  return { action: { type: "openPage", url: action.url } };
56241
- case "find":
56343
+ case "find_in_page":
56242
56344
  return {
56243
- action: { type: "find", url: action.url, pattern: action.pattern }
56345
+ action: {
56346
+ type: "findInPage",
56347
+ url: action.url,
56348
+ pattern: action.pattern
56349
+ }
56244
56350
  };
56245
56351
  }
56246
56352
  }
@@ -56533,7 +56639,7 @@ var OpenAITranscriptionModel = class {
56533
56639
  };
56534
56640
  }
56535
56641
  };
56536
- var VERSION6 = "2.0.77";
56642
+ var VERSION6 = "2.0.86";
56537
56643
  function createOpenAI(options = {}) {
56538
56644
  var _a16, _b9;
56539
56645
  const baseURL = (_a16 = withoutTrailingSlash(loadOptionalSetting({