@squidcloud/client 1.0.383 → 1.0.384
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -4,7 +4,7 @@ import { IntegrationType } from './integration.public-types';
|
|
|
4
4
|
import { JobId } from './job.public-types';
|
|
5
5
|
import { SecretKey } from './secret.public-types';
|
|
6
6
|
/** List of available AI provider types. See AiProviderType. */
|
|
7
|
-
export declare const AI_PROVIDER_TYPES: readonly ["anthropic", "flux", "gemini", "openai", "grok", "
|
|
7
|
+
export declare const AI_PROVIDER_TYPES: readonly ["anthropic", "flux", "gemini", "openai", "grok", "stability", "voyage", "external"];
|
|
8
8
|
/**
|
|
9
9
|
* Type of the AI provider.
|
|
10
10
|
* References a single AI service, regardless of the model or other AI function it provides (like
|
|
@@ -31,40 +31,16 @@ export declare const GEMINI_CHAT_MODEL_NAMES: readonly ["gemini-2.5-pro", "gemin
|
|
|
31
31
|
/**
|
|
32
32
|
* @category AI
|
|
33
33
|
*/
|
|
34
|
-
export declare const GROK_CHAT_MODEL_NAMES: readonly ["grok-3", "grok-3-fast", "grok-3-mini", "grok-3-mini-fast"];
|
|
34
|
+
export declare const GROK_CHAT_MODEL_NAMES: readonly ["grok-3", "grok-3-fast", "grok-3-mini", "grok-3-mini-fast", "grok-4"];
|
|
35
35
|
/**
|
|
36
36
|
* @category AI
|
|
37
37
|
*/
|
|
38
38
|
export declare const ANTHROPIC_CHAT_MODEL_NAMES: readonly ["claude-3-7-sonnet-latest", "claude-opus-4-20250514", "claude-sonnet-4-20250514"];
|
|
39
|
-
/**
|
|
40
|
-
* AI model names supported internally by Squid.
|
|
41
|
-
*
|
|
42
|
-
* Currently, Squid supports only one model: 'dictionary'. In this model, the response to each prompt message
|
|
43
|
-
* is taken from a predefined dictionary specified in the agent's instructions.
|
|
44
|
-
*
|
|
45
|
-
* The instructions follow the following format:
|
|
46
|
-
*
|
|
47
|
-
* ~> Prompt1
|
|
48
|
-
* <~ Response1
|
|
49
|
-
*
|
|
50
|
-
* Both prompt and response can be written in a multiline format. Blank leading and trailing lines are ignored.
|
|
51
|
-
*
|
|
52
|
-
* ~>
|
|
53
|
-
* Prompt2 line1
|
|
54
|
-
* Prompt2 line2
|
|
55
|
-
* <~
|
|
56
|
-
* Response2 line 1
|
|
57
|
-
* Response2 line 2
|
|
58
|
-
*
|
|
59
|
-
* If the prompt is missed in the dictionary the answer will be 'No entry found for this prompt.'
|
|
60
|
-
* @category AI
|
|
61
|
-
*/
|
|
62
|
-
export declare const SQUID_AI_MODEL_NAMES: readonly ["dictionary"];
|
|
63
39
|
/**
|
|
64
40
|
* The supported AI model names.
|
|
65
41
|
* @category AI
|
|
66
42
|
*/
|
|
67
|
-
export declare const VENDOR_AI_CHAT_MODEL_NAMES: readonly ["gpt-4o", "gpt-4o-mini", "gpt-4.1-nano", "gpt-4.1-mini", "gpt-4.1", "o1", "o1-mini", "o3", "o3-mini", "o4-mini", "claude-3-7-sonnet-latest", "claude-opus-4-20250514", "claude-sonnet-4-20250514", "gemini-2.5-pro", "gemini-2.5-flash", "grok-3", "grok-3-fast", "grok-3-mini", "grok-3-mini-fast", "
|
|
43
|
+
export declare const VENDOR_AI_CHAT_MODEL_NAMES: readonly ["gpt-4o", "gpt-4o-mini", "gpt-4.1-nano", "gpt-4.1-mini", "gpt-4.1", "o1", "o1-mini", "o3", "o3-mini", "o4-mini", "claude-3-7-sonnet-latest", "claude-opus-4-20250514", "claude-sonnet-4-20250514", "gemini-2.5-pro", "gemini-2.5-flash", "grok-3", "grok-3-fast", "grok-3-mini", "grok-3-mini-fast", "grok-4"];
|
|
68
44
|
/**
|
|
69
45
|
* Check if the given model name is a global AI chat model name.
|
|
70
46
|
*/
|
|
@@ -154,10 +130,6 @@ export type GeminiChatModelName = (typeof GEMINI_CHAT_MODEL_NAMES)[number];
|
|
|
154
130
|
* @category AI
|
|
155
131
|
*/
|
|
156
132
|
export type GrokChatModelName = (typeof GROK_CHAT_MODEL_NAMES)[number];
|
|
157
|
-
/**
|
|
158
|
-
* @category AI
|
|
159
|
-
*/
|
|
160
|
-
export type SquidAiModelName = (typeof SQUID_AI_MODEL_NAMES)[number];
|
|
161
133
|
/**
|
|
162
134
|
* @category AI
|
|
163
135
|
*/
|
|
@@ -570,7 +542,7 @@ export interface AnthropicChatOptions extends BaseAiChatOptions {
|
|
|
570
542
|
* the type is inferred from the provided overrideModel (or falls back to BaseAiAgentChatOptions).
|
|
571
543
|
* @category AI
|
|
572
544
|
*/
|
|
573
|
-
export type AiChatOptions<T extends AiChatModelName | undefined = undefined> = T extends undefined ? BaseAiChatOptions | GeminiChatOptions | OpenAiReasoningChatOptions | OpenAiChatOptions | AnthropicChatOptions : T extends GeminiChatModelName ? GeminiChatOptions : T extends OpenAiReasoningChatModelName ? OpenAiReasoningChatOptions : T extends OpenAiChatModelName ? OpenAiChatOptions : T extends AnthropicChatModelName ? AnthropicChatOptions : T extends GrokChatModelName ? GrokChatOptions :
|
|
545
|
+
export type AiChatOptions<T extends AiChatModelName | undefined = undefined> = T extends undefined ? BaseAiChatOptions | GeminiChatOptions | OpenAiReasoningChatOptions | OpenAiChatOptions | AnthropicChatOptions : T extends GeminiChatModelName ? GeminiChatOptions : T extends OpenAiReasoningChatModelName ? OpenAiReasoningChatOptions : T extends OpenAiChatModelName ? OpenAiChatOptions : T extends AnthropicChatModelName ? AnthropicChatOptions : T extends GrokChatModelName ? GrokChatOptions : never;
|
|
574
546
|
/**
|
|
575
547
|
* @category AI
|
|
576
548
|
*/
|
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
import { AgentContextRequest, AiAgent, AiAgentContext, AiAudioCreateSpeechOptions, AiChatModelName, AiChatOptions, AiContextMetadata, AiContextMetadataFilter, AiEmbeddingsModelName, AiGenerateImageOptions, AiProviderType, AiSearchOptions, AiSearchResultChunk, AllAiAgentChatOptions, ApplicationAiSettings } from '../public-types/ai-agent.public-types';
|
|
2
|
-
import { AiFunctionId } from '../public-types/backend.public-types';
|
|
3
2
|
import { AiAgentId, AppId, ClientRequestId } from '../public-types/communication.public-types';
|
|
4
3
|
import { JobId } from '../public-types/job.public-types';
|
|
5
4
|
import { SecretKey } from '../public-types/secret.public-types';
|
|
6
5
|
/** Specification for an AI model, defining its operational parameters. */
|
|
7
6
|
export type AiModelSpec = {
|
|
8
|
-
|
|
7
|
+
/** Maximum tokens the model can generate in a single response. */
|
|
8
|
+
maxOutputTokens: number;
|
|
9
|
+
/** Total context window size: input (aka prompt) + output (aka completion) tokens combined. */
|
|
10
|
+
contextWindowTokens: number;
|
|
9
11
|
};
|
|
10
12
|
/** Map of AI model details by model name. */
|
|
11
13
|
export declare const AI_VENDOR_MODEL_SPECS: Record<AiChatModelName, AiModelSpec>;
|
|
@@ -137,18 +139,6 @@ export interface SetAiProviderApiKeySecretResponse {
|
|
|
137
139
|
export declare function validateAiContextMetadata(metadata: AiContextMetadata): void;
|
|
138
140
|
export declare function validateAiContextMetadataFilter(filter: AiContextMetadataFilter): void;
|
|
139
141
|
export declare const METADATA_SERVICE_FUNCTION_NAME = "default:metadata";
|
|
140
|
-
/**
|
|
141
|
-
* A dictionary model contains either direct prompt-to-answer pairs or prompt-to-function/call-agent instructions.
|
|
142
|
-
* Keys within this dictionary model are treated as case-insensitive by the Squid system.
|
|
143
|
-
*/
|
|
144
|
-
export type AiDictionaryModel = Record<string, string | AiDictionaryFunctionCall>;
|
|
145
|
-
export interface AiDictionaryFunctionCall {
|
|
146
|
-
type: 'function';
|
|
147
|
-
/** ID (name) of the AI function to call. */
|
|
148
|
-
id: AiFunctionId;
|
|
149
|
-
/** Args to pass to the AI function. */
|
|
150
|
-
args: Record<string, unknown>;
|
|
151
|
-
}
|
|
152
142
|
export interface UpdateApplicationAiSettingsWebhookPayload {
|
|
153
143
|
appId: AppId;
|
|
154
144
|
aiSettings: ApplicationAiSettings;
|