@squidcloud/client 1.0.325 → 1.0.326

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,9 @@
1
- import { FunctionName, FunctionNameWithContext } from './bundle-data.public-types';
2
1
  import { AiAgentId, IntegrationId } from './communication.public-types';
3
2
  import { IntegrationType } from './integration.public-types';
3
+ import { FunctionName, FunctionNameWithContext } from './bundle-data.public-types';
4
4
  /** The supported OpenAI models */
5
+ export declare const OPENAI_O1_CHAT_MODEL_NAMES: readonly ["o1", "o1-mini"];
6
+ export declare const OPENAI_REASONING_CHAT_MODEL_NAMES: readonly ["o1", "o1-mini", "o3-mini"];
5
7
  export declare const OPENAI_CHAT_MODEL_NAMES: readonly ["gpt-4o", "gpt-4o-mini", "o1", "o1-mini", "o3-mini"];
6
8
  export declare const GEMINI_CHAT_MODEL_NAMES: readonly ["gemini-1.5-pro", "gemini-2.0-flash"];
7
9
  export declare const ANTHROPIC_CHAT_MODEL_NAMES: readonly ["claude-3-5-haiku-latest", "claude-3-5-sonnet-latest"];
@@ -23,6 +25,7 @@ export declare const OPEN_AI_CREATE_SPEECH_FORMATS: readonly ["mp3", "opus", "aa
23
25
  export type AiChatModelName = (typeof AI_CHAT_MODEL_NAMES)[number];
24
26
  export type AiEmbeddingsModelName = (typeof AI_EMBEDDINGS_MODEL_NAMES)[number];
25
27
  export type OpenAiChatModelName = (typeof OPENAI_CHAT_MODEL_NAMES)[number];
28
+ export type OpenAiReasoningChatModelName = (typeof OPENAI_REASONING_CHAT_MODEL_NAMES)[number];
26
29
  export type GeminiChatModelName = (typeof GEMINI_CHAT_MODEL_NAMES)[number];
27
30
  export type AnthropicChatModelName = (typeof ANTHROPIC_CHAT_MODEL_NAMES)[number];
28
31
  export type AiImageModelName = (typeof AI_IMAGE_MODEL_NAMES)[number];
@@ -135,8 +138,7 @@ export interface AiChatPromptQuotas {
135
138
  */
136
139
  maxAiCallStackSize: number;
137
140
  }
138
- /** The options for the AI chat method. */
139
- export interface AiAgentChatOptions {
141
+ interface BaseAiAgentChatOptions {
140
142
  /** The maximum number of tokens to use when making the request to the AI model. Default to the max tokens the model can accept. */
141
143
  maxTokens?: number;
142
144
  /** A unique chat ID, if the same chat ID is used again and history is not disabled, it will continue the conversation. */
@@ -155,16 +157,6 @@ export interface AiAgentChatOptions {
155
157
  responseFormat?: OpenAiResponseFormat;
156
158
  /** Whether to response in a "smooth typing" way, beneficial when the chat result is displayed in a UI. Default to true. */
157
159
  smoothTyping?: boolean;
158
- /** The temperature to use when sampling from the model. Default to 0.5. */
159
- temperature?: number;
160
- /** The top P value to use when sampling from the model. Default to 1. */
161
- topP?: number;
162
- /** The model to use for this chat. If not provided, the profile model will be used. */
163
- overrideModel?: AiChatModelName;
164
- /** File URLs (only images supported at the moment). */
165
- fileUrls?: Array<AiFileUrl>;
166
- /** Constrains effort on reasoning for reasoning models. o1 models only. */
167
- reasoningEffort?: 'low' | 'medium' | 'high';
168
160
  /** Global context passed to the agent and all AI functions of the agent. */
169
161
  agentContext?: Record<string, unknown>;
170
162
  /**
@@ -188,10 +180,27 @@ export interface AiAgentChatOptions {
188
180
  quotas?: AiChatPromptQuotas;
189
181
  /** Include metadata in the context */
190
182
  includeMetadata?: boolean;
191
- /** Enables grounding with real-time web search to enhance AI responses with up-to-date information.
192
- * Currently supported only for gemini-2.0-flash. */
183
+ /** The temperature to use when sampling from the model. Default to 0.5. */
184
+ temperature?: number;
185
+ overrideModel?: AiChatModelName;
186
+ }
187
+ export interface GeminiChatOptions extends BaseAiAgentChatOptions {
188
+ overrideModel?: GeminiChatModelName;
193
189
  groundingWithWebSearch?: boolean;
194
190
  }
191
+ export interface OpenAiReasoningChatOptions extends BaseAiAgentChatOptions {
192
+ overrideModel?: OpenAiReasoningChatModelName;
193
+ reasoningEffort?: 'low' | 'medium' | 'high';
194
+ }
195
+ export interface OpenAiChatOptions extends BaseAiAgentChatOptions {
196
+ overrideModel?: OpenAiChatModelName;
197
+ topP?: number;
198
+ fileUrls?: Array<AiFileUrl>;
199
+ }
200
+ export interface AnthropicChatOptions extends BaseAiAgentChatOptions {
201
+ overrideModel?: AnthropicChatModelName;
202
+ }
203
+ export type AiAgentChatOptions<T extends AiChatModelName | undefined = undefined> = T extends undefined ? BaseAiAgentChatOptions | GeminiChatOptions | OpenAiReasoningChatOptions | OpenAiChatOptions | AnthropicChatOptions : T extends GeminiChatModelName ? GeminiChatOptions : T extends OpenAiReasoningChatModelName ? OpenAiReasoningChatOptions : T extends OpenAiChatModelName ? OpenAiChatOptions : T extends AnthropicChatModelName ? AnthropicChatOptions : never;
195
204
  export interface AiObserveStatusOptions {
196
205
  /** A unique chat ID for the conversation. */
197
206
  chatId?: string;
@@ -271,7 +280,7 @@ export interface AiFileContext extends AiContextBase {
271
280
  }
272
281
  /**
273
282
  * Provides context to the security rules of an AI Agent for each new user prompt.
274
- * Refer to the `@secureAiChatbotChat` annotation for more details.
283
+ * Refer to the `@secureAiAgent` annotation for more details.
275
284
  */
276
285
  export declare class AiChatContext {
277
286
  readonly prompt?: string;
@@ -319,3 +328,4 @@ export interface AiContextMetadataOrFilter {
319
328
  }
320
329
  export type AiContextMetadataFilter = AiContextMetadataFieldFilter | AiContextMetadataAndFilter | AiContextMetadataOrFilter;
321
330
  export type AiContextMetadata = Record<string, AiContextMetadataValue>;
331
+ export {};
@@ -7,8 +7,6 @@ export type StorageActionType = 'read' | 'write' | 'update' | 'insert' | 'delete
7
7
  export type TopicActionType = 'read' | 'write' | 'all';
8
8
  /** The different type of actions for metrics. */
9
9
  export type MetricActionType = 'write' | 'all';
10
- /** Types of actions that can be performed on an AI chatbot. */
11
- export type AiActionType = 'chat' | 'mutate' | 'all';
12
10
  export type AiFunctionParamType = 'string' | 'number' | 'boolean' | 'date';
13
11
  export interface AiFunctionParam {
14
12
  name: string;
@@ -16,6 +16,8 @@ export interface AiChatOptions {
16
16
  agentId?: AiAgentId;
17
17
  /** A unique chat ID for the conversation. */
18
18
  chatId?: string;
19
+ /** A unique ID for the currentt trace. */
20
+ traceId?: string;
19
21
  /** Global per-agent context passed to all AI functions. Must be a serializable JSON. */
20
22
  agentContext?: Record<string, unknown>;
21
23
  /** List of AI functions available for the agent. Overrides the stored list.*/
@@ -38,7 +38,7 @@ export interface LogEvent {
38
38
  source?: string;
39
39
  service?: string;
40
40
  }
41
- export declare const AUDIT_LOG_EVENT_NAMES: readonly ["ai_agent_prompt"];
41
+ export declare const AUDIT_LOG_EVENT_NAMES: readonly ["ai_agent"];
42
42
  export type AuditLogEventName = (typeof AUDIT_LOG_EVENT_NAMES)[number];
43
43
  export interface AuditLogEvent {
44
44
  appId: AppId;
@@ -1 +1 @@
1
- export declare const SQUIDCLOUD_CLIENT_PACKAGE_VERSION = "1.0.325";
1
+ export declare const SQUIDCLOUD_CLIENT_PACKAGE_VERSION = "1.0.326";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@squidcloud/client",
3
- "version": "1.0.325",
3
+ "version": "1.0.326",
4
4
  "description": "A typescript implementation of the Squid client",
5
5
  "main": "dist/cjs/index.js",
6
6
  "types": "dist/typescript-client/src/index.d.ts",