@budibase/pro 3.28.2 → 3.29.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/ai/llm.d.ts CHANGED
@@ -3,7 +3,9 @@ import openai from "openai";
3
3
  import { z } from "zod";
4
4
  import { LLM } from "./models/base";
5
5
  export declare function getLLMConfig(): Promise<LLMProviderConfig | undefined>;
6
- export declare function getLLM(options?: LLMConfigOptions): Promise<LLM | undefined>;
6
+ export declare function getLLM(options?: Omit<LLMConfigOptions, "model"> & {
7
+ model?: string;
8
+ }): Promise<LLM | undefined>;
7
9
  export declare function getLLMOrThrow(): Promise<LLM>;
8
10
  export declare function getOpenAIUsingLocalAPIKey(): Promise<LLM | undefined>;
9
11
  export declare class LLMRequest {
@@ -1,14 +1,14 @@
1
1
  import AnthropicClient from "@anthropic-ai/sdk";
2
- import { LLMConfigOptions, LLMStreamChunk } from "@budibase/types";
2
+ import { LLMConfigOptions } from "@budibase/types";
3
3
  import { LLMFullResponse } from "../../types/ai";
4
4
  import { LLMRequest } from "../llm";
5
5
  import { LLM } from "./base";
6
6
  export type AnthropicModel = "claude-3-5-sonnet-20240620" | "claude-3-sonnet-20240229" | "claude-3-opus-20240229" | "claude-3-haiku-20240307";
7
7
  export declare class Anthropic extends LLM {
8
+ supportsFiles: boolean;
8
9
  private client;
9
10
  constructor(opts: LLMConfigOptions);
10
11
  firstTextBlock(message: AnthropicClient.Messages.Message): string | undefined;
11
12
  uploadFile(_data?: any, _filename?: string, _contentType?: string): Promise<string>;
12
13
  protected chatCompletion(request: LLMRequest): Promise<LLMFullResponse>;
13
- protected chatCompletionStream(request: LLMRequest): AsyncGenerator<LLMStreamChunk, void, unknown>;
14
14
  }
@@ -2,6 +2,7 @@ import { LLMConfigOptions } from "@budibase/types";
2
2
  import { OpenAI } from "./openai";
3
3
  import { AzureOpenAI as AzureOpenAIClient } from "openai";
4
4
  export declare class AzureOpenAI extends OpenAI {
5
+ supportsFiles: boolean;
5
6
  protected getClient(opts: LLMConfigOptions): AzureOpenAIClient;
6
7
  protected getVerbosityForModel(): "low" | "medium" | undefined;
7
8
  }
@@ -1,4 +1,4 @@
1
- import { AIFieldMetadata, EnrichedBinding, LLMConfigOptions, LLMStreamChunk, Row, Snippet } from "@budibase/types";
1
+ import { AIFieldMetadata, EnrichedBinding, LLMConfigOptions, Row, Snippet } from "@budibase/types";
2
2
  import { Readable } from "node:stream";
3
3
  import { LLMFullResponse, LLMPromptResponse } from "../../types/ai";
4
4
  import { LLMRequest } from "../llm";
@@ -10,12 +10,11 @@ export declare abstract class LLM {
10
10
  get model(): string;
11
11
  get apiKey(): string | undefined;
12
12
  get maxTokens(): number;
13
+ abstract supportsFiles: boolean;
13
14
  protected abstract chatCompletion(request: LLMRequest): Promise<LLMFullResponse>;
14
- protected abstract chatCompletionStream(request: LLMRequest): AsyncGenerator<LLMStreamChunk, void, unknown>;
15
15
  prompt(requestOrString: string | LLMRequest): Promise<LLMPromptResponse>;
16
16
  abstract uploadFile(data: Readable | Buffer, filename: string, contentType?: string): Promise<string>;
17
17
  chat(request: LLMRequest): Promise<LLMFullResponse>;
18
- chatStream(request: LLMRequest): AsyncGenerator<LLMStreamChunk, void, unknown>;
19
18
  summarizeText(prompt: string): Promise<LLMPromptResponse>;
20
19
  generateCronExpression(prompt: string): Promise<LLMPromptResponse>;
21
20
  operation(schema: AIFieldMetadata, row: Row): Promise<LLMPromptResponse>;
@@ -1,9 +1,9 @@
1
- import { LLMStreamChunk } from "@budibase/types";
2
1
  import { Readable } from "node:stream";
3
2
  import { LLMFullResponse, LLMPromptResponse } from "../../types/ai";
4
3
  import { LLMRequest } from "../llm";
5
4
  import { LLM } from "./base";
6
5
  export declare class BudibaseAI extends LLM {
6
+ supportsFiles: boolean;
7
7
  prompt(prompt: string | LLMRequest): Promise<LLMPromptResponse>;
8
8
  chat(prompt: LLMRequest): Promise<LLMFullResponse>;
9
9
  uploadFile(data: Readable | Buffer, filename: string, contentType: string): Promise<string>;
@@ -12,7 +12,4 @@ export declare class BudibaseAI extends LLM {
12
12
  protected chatCompletion(prompt: LLMRequest): Promise<LLMFullResponse>;
13
13
  protected chatCompletionCloud(prompt: LLMRequest): Promise<LLMFullResponse>;
14
14
  protected chatCompletionSelfHost(prompt: LLMRequest): Promise<LLMFullResponse>;
15
- protected chatCompletionStream(request: LLMRequest): AsyncGenerator<LLMStreamChunk, void, unknown>;
16
- protected chatCompletionStreamCloud(request: LLMRequest): AsyncGenerator<LLMStreamChunk, void, unknown>;
17
- protected chatCompletionStreamSelfHost(request: LLMRequest): AsyncGenerator<LLMStreamChunk, void, unknown>;
18
15
  }
@@ -1,4 +1,4 @@
1
- import { LLMConfigOptions, LLMStreamChunk, ResponseFormat } from "@budibase/types";
1
+ import { LLMConfigOptions, ResponseFormat } from "@budibase/types";
2
2
  import { Readable } from "node:stream";
3
3
  import { default as openai, default as OpenAIClient } from "openai";
4
4
  import { LLMFullResponse } from "../../types/ai";
@@ -14,9 +14,9 @@ export declare function parseResponseFormat(responseFormat?: ResponseFormat): op
14
14
  export declare class OpenAI extends LLM {
15
15
  protected client: OpenAIClient;
16
16
  constructor(opts: LLMConfigOptions);
17
+ supportsFiles: boolean;
17
18
  protected getVerbosityForModel(): "low" | "medium" | undefined;
18
19
  protected getClient(opts: LLMConfigOptions): OpenAIClient;
19
20
  uploadFile(data: Readable | Buffer, filename: string, contentType?: string): Promise<string>;
20
21
  protected chatCompletion(request: LLMRequest): Promise<LLMFullResponse>;
21
- protected chatCompletionStream(request: LLMRequest): AsyncGenerator<LLMStreamChunk, void, unknown>;
22
22
  }
@@ -6,7 +6,7 @@ export interface AutomationAgentToolGuideline {
6
6
  guidelines: string;
7
7
  }
8
8
  export declare function summarizeText(text: string, length?: SummariseLength): LLMRequest;
9
- export declare function extractFileData(schema: Record<string, any>, fileIdOrDataUrl: string): LLMRequest;
9
+ export declare function extractFileData(schema: Record<string, any>, fileIdOrDataUrl: string, supportsFile: boolean): LLMRequest;
10
10
  export declare function classifyText(text: string, categories: string[]): LLMRequest;
11
11
  export declare function cleanData(text: string): LLMRequest;
12
12
  export declare function generateSQL(prompt: string, tableSchema: string): LLMRequest;
@@ -12,9 +12,9 @@ export declare const generationStructure: z.ZodObject<{
12
12
  number: FieldType.NUMBER;
13
13
  boolean: FieldType.BOOLEAN;
14
14
  }>;
15
- constraints: z.ZodNullable<z.ZodOptional<z.ZodObject<{
15
+ constraints: z.ZodNullable<z.ZodObject<{
16
16
  presence: z.ZodBoolean;
17
- }, z.core.$strip>>>;
17
+ }, z.core.$strip>>;
18
18
  }, z.core.$strip>, z.ZodObject<{
19
19
  name: z.ZodString;
20
20
  type: z.ZodLiteral<FieldType.LINK>;
package/dist/index.d.ts CHANGED
@@ -1,8 +1,8 @@
1
1
  export * from "./types";
2
2
  export * from "./sdk";
3
3
  export * as sdk from "./sdk";
4
+ export * as db from "./db";
4
5
  export * as constants from "./constants";
5
6
  export * as middleware from "./middleware";
6
- export * as api from "./api";
7
7
  export * as mappers from "./mappers";
8
8
  export * as ai from "./ai";