@budibase/pro 3.31.4 → 3.31.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai/index.d.ts +1 -3
- package/dist/ai/llm.d.ts +2 -11
- package/dist/index.js +46 -49
- package/package.json +2 -2
- package/dist/ai/models/anthropic.d.ts +0 -13
- package/dist/ai/models/azureOpenai.d.ts +0 -8
- package/dist/ai/models/base.d.ts +0 -17
- package/dist/ai/models/budibaseai.d.ts +0 -11
- package/dist/ai/models/index.d.ts +0 -2
- package/dist/ai/models/openai.d.ts +0 -20
package/package.json
CHANGED
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
"dist"
|
|
5
5
|
],
|
|
6
6
|
"license": "UNLICENSED",
|
|
7
|
-
"version": "3.31.
|
|
7
|
+
"version": "3.31.5",
|
|
8
8
|
"description": "Budibase Pro (Backend)",
|
|
9
9
|
"main": "dist/index.js",
|
|
10
10
|
"types": "dist/index.d.ts",
|
|
@@ -69,5 +69,5 @@
|
|
|
69
69
|
}
|
|
70
70
|
}
|
|
71
71
|
},
|
|
72
|
-
"gitHead": "
|
|
72
|
+
"gitHead": "8b6307f21fc1e0496119295b4417d57e13857c6a"
|
|
73
73
|
}
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
import AnthropicClient from "@anthropic-ai/sdk";
|
|
2
|
-
import { LLMConfigOptions } from "@budibase/types";
|
|
3
|
-
import { LLMFullResponse } from "../../types/ai";
|
|
4
|
-
import { LLMRequest } from "../llm";
|
|
5
|
-
import { LLM } from "./base";
|
|
6
|
-
export type AnthropicModel = "claude-3-5-sonnet-20240620" | "claude-3-sonnet-20240229" | "claude-3-opus-20240229" | "claude-3-haiku-20240307";
|
|
7
|
-
export declare class Anthropic extends LLM {
|
|
8
|
-
supportsFiles: boolean;
|
|
9
|
-
private client;
|
|
10
|
-
constructor(opts: LLMConfigOptions);
|
|
11
|
-
firstTextBlock(message: AnthropicClient.Messages.Message): string | undefined;
|
|
12
|
-
protected chatCompletion(request: LLMRequest): Promise<LLMFullResponse>;
|
|
13
|
-
}
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
import { LLMConfigOptions } from "@budibase/types";
|
|
2
|
-
import { OpenAI } from "./openai";
|
|
3
|
-
import { AzureOpenAI as AzureOpenAIClient } from "openai";
|
|
4
|
-
export declare class AzureOpenAI extends OpenAI {
|
|
5
|
-
supportsFiles: boolean;
|
|
6
|
-
protected getClient(opts: LLMConfigOptions): AzureOpenAIClient;
|
|
7
|
-
protected getVerbosityForModel(): "low" | "medium" | undefined;
|
|
8
|
-
}
|
package/dist/ai/models/base.d.ts
DELETED
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
import { LLMConfigOptions } from "@budibase/types";
|
|
2
|
-
import { LLMFullResponse, LLMPromptResponse } from "../../types/ai";
|
|
3
|
-
import { LLMRequest } from "../llm";
|
|
4
|
-
export declare abstract class LLM {
|
|
5
|
-
protected _model: string;
|
|
6
|
-
protected _apiKey?: string;
|
|
7
|
-
protected _maxTokens: number;
|
|
8
|
-
constructor({ model, apiKey, maxTokens }: LLMConfigOptions);
|
|
9
|
-
get model(): string;
|
|
10
|
-
get apiKey(): string | undefined;
|
|
11
|
-
get maxTokens(): number;
|
|
12
|
-
abstract supportsFiles: boolean;
|
|
13
|
-
protected abstract chatCompletion(request: LLMRequest): Promise<LLMFullResponse>;
|
|
14
|
-
prompt(requestOrString: string | LLMRequest): Promise<LLMPromptResponse>;
|
|
15
|
-
chat(request: LLMRequest): Promise<LLMFullResponse>;
|
|
16
|
-
summarizeText(prompt: string): Promise<LLMPromptResponse>;
|
|
17
|
-
}
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
import { LLMFullResponse, LLMPromptResponse } from "../../types";
|
|
2
|
-
import { LLMRequest } from "../llm";
|
|
3
|
-
import { LLM } from "./base";
|
|
4
|
-
export declare class BudibaseAI extends LLM {
|
|
5
|
-
supportsFiles: boolean;
|
|
6
|
-
prompt(prompt: string | LLMRequest): Promise<LLMPromptResponse>;
|
|
7
|
-
chat(prompt: LLMRequest): Promise<LLMFullResponse>;
|
|
8
|
-
protected chatCompletion(prompt: LLMRequest): Promise<LLMFullResponse>;
|
|
9
|
-
protected chatCompletionCloud(prompt: LLMRequest): Promise<LLMFullResponse>;
|
|
10
|
-
protected chatCompletionSelfHost(prompt: LLMRequest): Promise<LLMFullResponse>;
|
|
11
|
-
}
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
import { LLMConfigOptions, ResponseFormat } from "@budibase/types";
|
|
2
|
-
import { default as openai, default as OpenAIClient } from "openai";
|
|
3
|
-
import { LLMFullResponse } from "../../types/ai";
|
|
4
|
-
import { LLMRequest } from "../llm";
|
|
5
|
-
import { LLM } from "./base";
|
|
6
|
-
export type OpenAIModel = "gpt-4o-mini" | "gpt-4o" | "gpt-4" | "gpt-4.1" | "gpt-4.1-mini" | "gpt-4-turbo" | "gpt-3.5-turbo" | "gpt-5" | "gpt-5-mini" | "gpt-5-nano";
|
|
7
|
-
export declare enum GPT_5_MODELS {
|
|
8
|
-
GPT_5_MINI = "gpt-5-mini",
|
|
9
|
-
GPT_5 = "gpt-5",
|
|
10
|
-
GPT_5_NANO = "gpt-5-nano"
|
|
11
|
-
}
|
|
12
|
-
export declare function parseResponseFormat(responseFormat?: ResponseFormat): openai.ResponseFormatText | openai.ResponseFormatJSONObject | openai.ResponseFormatJSONSchema | undefined;
|
|
13
|
-
export declare class OpenAI extends LLM {
|
|
14
|
-
protected client: OpenAIClient;
|
|
15
|
-
constructor(opts: LLMConfigOptions);
|
|
16
|
-
supportsFiles: boolean;
|
|
17
|
-
protected getVerbosityForModel(): "low" | "medium" | undefined;
|
|
18
|
-
protected getClient(opts: LLMConfigOptions): OpenAIClient;
|
|
19
|
-
protected chatCompletion(request: LLMRequest): Promise<LLMFullResponse>;
|
|
20
|
-
}
|