@budibase/pro 3.20.7 → 3.20.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,4 +3,5 @@ import { OpenAI } from "./openai";
3
3
  import { AzureOpenAI as AzureOpenAIClient } from "openai";
4
4
  export declare class AzureOpenAI extends OpenAI {
5
5
  protected getClient(opts: LLMConfigOptions): AzureOpenAIClient;
6
+ protected getVerbosityForModel(): "low" | "medium" | undefined;
6
7
  }
@@ -5,7 +5,7 @@ import { LLMRequest } from "../llm";
5
5
  import { LLMConfigOptions, ResponseFormat } from "@budibase/types";
6
6
  import openai from "openai";
7
7
  import { Readable } from "node:stream";
8
- export type OpenAIModel = "gpt-4o-mini" | "gpt-4o" | "gpt-4" | "gpt-4-turbo" | "gpt-3.5-turbo" | "gpt-5" | "gpt-5-mini" | "gpt-5-nano";
8
+ export type OpenAIModel = "gpt-4o-mini" | "gpt-4o" | "gpt-4" | "gpt-4.1" | "gpt-4.1-mini" | "gpt-4-turbo" | "gpt-3.5-turbo" | "gpt-5" | "gpt-5-mini" | "gpt-5-nano";
9
9
  export declare enum GPT_5_MODELS {
10
10
  GPT_5_MINI = "gpt-5-mini",
11
11
  GPT_5 = "gpt-5",
@@ -15,6 +15,7 @@ export declare function parseResponseFormat(responseFormat?: ResponseFormat): op
15
15
  export declare class OpenAI extends LLM {
16
16
  protected client: OpenAIClient;
17
17
  constructor(opts: LLMConfigOptions);
18
+ protected getVerbosityForModel(): "low" | "medium" | undefined;
18
19
  protected getClient(opts: LLMConfigOptions): OpenAIClient;
19
20
  uploadFile(data: Readable | Buffer, filename: string, contentType?: string): Promise<string>;
20
21
  protected chatCompletion(request: LLMRequest): Promise<LLMFullResponse>;