@superatomai/sdk-node 0.0.62 → 0.0.63

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -864,6 +864,12 @@ interface SuperatomSDKConfig {
864
864
  * Default: 0.8
865
865
  */
866
866
  conversationSimilarityThreshold?: number;
867
+ /**
868
+ * Query cache TTL (Time To Live) in minutes
869
+ * Cached query results expire after this duration
870
+ * Default: 5 minutes
871
+ */
872
+ queryCacheTTL?: number;
867
873
  }
868
874
 
869
875
  declare const KbNodesQueryFiltersSchema: z.ZodObject<{
@@ -2037,11 +2043,6 @@ interface BaseLLMConfig {
2037
2043
  * - 'balanced': Use best model for complex tasks, fast model for simple tasks (default)
2038
2044
  */
2039
2045
  modelStrategy?: ModelStrategy;
2040
- /**
2041
- * Similarity threshold for conversation search (semantic matching)
2042
- * Value between 0 and 1 (e.g., 0.8 = 80% similarity required)
2043
- * Default: 0.8
2044
- */
2045
2046
  conversationSimilarityThreshold?: number;
2046
2047
  }
2047
2048
  /**
@@ -2109,6 +2110,29 @@ declare abstract class BaseLLM {
2109
2110
  * This checks both single Form components and Forms inside MultiComponentContainer
2110
2111
  */
2111
2112
  protected containsFormComponent(component: any): boolean;
2113
+ /**
2114
+ * Get the cache key for a query (the exact sql param that would be sent to execute)
2115
+ * This ensures the cache key matches what the frontend will send
2116
+ * Used for both caching and internal deduplication
2117
+ */
2118
+ private getQueryCacheKey;
2119
+ /**
2120
+ * Execute a query against the database for validation and caching
2121
+ * @param query - The SQL query to execute (string or object with sql/values)
2122
+ * @param collections - Collections object containing database execute function
2123
+ * @returns Object with result data and cache key
2124
+ * @throws Error if query execution fails
2125
+ */
2126
+ private executeQueryForValidation;
2127
+ /**
2128
+ * Request the LLM to fix a failed SQL query
2129
+ * @param failedQuery - The query that failed execution
2130
+ * @param errorMessage - The error message from the failed execution
2131
+ * @param componentContext - Context about the component (name, type, title)
2132
+ * @param apiKey - Optional API key
2133
+ * @returns Fixed query string
2134
+ */
2135
+ private requestQueryFix;
2112
2136
  /**
2113
2137
  * Match components from text response suggestions and generate follow-up questions
2114
2138
  * Takes a text response with component suggestions (c1:type format) and matches with available components
@@ -2127,6 +2151,15 @@ declare abstract class BaseLLM {
2127
2151
  layoutDescription: string;
2128
2152
  actions: Action[];
2129
2153
  }>;
2154
+ /**
2155
+ * Validate component queries against the database and retry with LLM fixes if they fail
2156
+ * @param components - Array of components with potential queries
2157
+ * @param collections - Collections object containing database execute function
2158
+ * @param apiKey - Optional API key for LLM calls
2159
+ * @param logCollector - Optional log collector for logging
2160
+ * @returns Object with validated components and a map of query results
2161
+ */
2162
+ private validateAndRetryComponentQueries;
2130
2163
  /**
2131
2164
  * Classify user question into category and detect external tools needed
2132
2165
  * Determines if question is for data analysis, requires external tools, or needs text response
@@ -2163,12 +2196,6 @@ declare abstract class BaseLLM {
2163
2196
  * This provides conversational text responses instead of component generation
2164
2197
  * Supports tool calling for query execution with automatic retry on errors (max 3 attempts)
2165
2198
  * After generating text response, if components are provided, matches suggested components
2166
- * @param streamCallback - Optional callback function to receive text chunks as they stream
2167
- * @param collections - Collection registry for executing database queries via database.execute
2168
- * @param components - Optional list of available components for matching suggestions
2169
- * @param externalTools - Optional array of external tools (email, calendar, etc.) that can be called
2170
- * @param category - Question category ('data_analysis' | 'data_modification' | 'general'). For data_modification, answer component streaming is skipped. For general, component generation is skipped entirely.
2171
- * @param userId - Optional user ID for fetching user-specific knowledge base nodes
2172
2199
  */
2173
2200
  generateTextResponse(userPrompt: string, apiKey?: string, logCollector?: any, conversationHistory?: string, streamCallback?: (chunk: string) => void, collections?: any, components?: Component[], externalTools?: any[], category?: 'data_analysis' | 'data_modification' | 'general', userId?: string): Promise<T_RESPONSE>;
2174
2201
  /**
@@ -2178,11 +2205,6 @@ declare abstract class BaseLLM {
2178
2205
  * - If match found → Adapt UI block parameters and return
2179
2206
  * 2. Category classification: Determine if data_analysis, requires_external_tools, or text_response
2180
2207
  * 3. Route appropriately based on category and response mode
2181
- *
2182
- * @param responseMode - 'component' for component generation (default), 'text' for text responses
2183
- * @param streamCallback - Optional callback function to receive text chunks as they stream (only for text mode)
2184
- * @param collections - Collection registry for executing database queries (required for text mode)
2185
- * @param externalTools - Optional array of external tools (email, calendar, etc.) that can be called (only for text mode)
2186
2208
  */
2187
2209
  handleUserRequest(userPrompt: string, components: Component[], apiKey?: string, logCollector?: any, conversationHistory?: string, responseMode?: 'component' | 'text', streamCallback?: (chunk: string) => void, collections?: any, externalTools?: any[], userId?: string): Promise<T_RESPONSE>;
2188
2210
  /**
@@ -2249,6 +2271,63 @@ declare class OpenAILLM extends BaseLLM {
2249
2271
  }
2250
2272
  declare const openaiLLM: OpenAILLM;
2251
2273
 
2274
+ /**
2275
+ * Query Cache - Stores query results with configurable TTL
2276
+ * Used to avoid re-executing queries that were already validated
2277
+ */
2278
+ declare class QueryCache {
2279
+ private cache;
2280
+ private ttlMs;
2281
+ private cleanupInterval;
2282
+ constructor();
2283
+ /**
2284
+ * Set the cache TTL (Time To Live)
2285
+ * @param minutes - TTL in minutes (default: 5)
2286
+ */
2287
+ setTTL(minutes: number): void;
2288
+ /**
2289
+ * Get the current TTL in minutes
2290
+ */
2291
+ getTTL(): number;
2292
+ /**
2293
+ * Store query result in cache
2294
+ * Key is the exact query string (or JSON for parameterized queries)
2295
+ */
2296
+ set(query: string, data: any): void;
2297
+ /**
2298
+ * Get cached result if exists and not expired
2299
+ */
2300
+ get(query: string): any | null;
2301
+ /**
2302
+ * Check if query exists in cache (not expired)
2303
+ */
2304
+ has(query: string): boolean;
2305
+ /**
2306
+ * Remove a specific query from cache
2307
+ */
2308
+ delete(query: string): void;
2309
+ /**
2310
+ * Clear all cached entries
2311
+ */
2312
+ clear(): void;
2313
+ /**
2314
+ * Get cache statistics
2315
+ */
2316
+ getStats(): {
2317
+ size: number;
2318
+ oldestEntryAge: number | null;
2319
+ };
2320
+ /**
2321
+ * Start periodic cleanup of expired entries
2322
+ */
2323
+ private startCleanup;
2324
+ /**
2325
+ * Stop cleanup interval (for graceful shutdown)
2326
+ */
2327
+ destroy(): void;
2328
+ }
2329
+ declare const queryCache: QueryCache;
2330
+
2252
2331
  declare const SDK_VERSION = "0.0.8";
2253
2332
  type MessageTypeHandler = (message: IncomingMessage) => void | Promise<void>;
2254
2333
  declare class SuperatomSDK {
@@ -2400,4 +2479,4 @@ declare class SuperatomSDK {
2400
2479
  getConversationSimilarityThreshold(): number;
2401
2480
  }
2402
2481
 
2403
- export { type Action, BM25L, type BM25LOptions, type BaseLLMConfig, CONTEXT_CONFIG, type CapturedLog, CleanupService, type CollectionHandler, type CollectionOperation, type DBUIBlock, type DatabaseType, type HybridSearchOptions, type IncomingMessage, type KbNodesQueryFilters, type KbNodesRequestPayload, LLM, type LLMUsageEntry, type LogLevel, type Message, type ModelStrategy, type OutputField, type RerankedResult, SDK_VERSION, STORAGE_CONFIG, SuperatomSDK, type SuperatomSDKConfig, type TaskType, Thread, ThreadManager, type Tool$1 as Tool, type ToolOutputSchema, UIBlock, UILogCollector, type User, UserManager, type UsersData, anthropicLLM, geminiLLM, groqLLM, hybridRerank, llmUsageLogger, logger, openaiLLM, rerankChromaResults, rerankConversationResults, userPromptErrorLogger };
2482
+ export { type Action, BM25L, type BM25LOptions, type BaseLLMConfig, CONTEXT_CONFIG, type CapturedLog, CleanupService, type CollectionHandler, type CollectionOperation, type DBUIBlock, type DatabaseType, type HybridSearchOptions, type IncomingMessage, type KbNodesQueryFilters, type KbNodesRequestPayload, LLM, type LLMUsageEntry, type LogLevel, type Message, type ModelStrategy, type OutputField, type RerankedResult, SDK_VERSION, STORAGE_CONFIG, SuperatomSDK, type SuperatomSDKConfig, type TaskType, Thread, ThreadManager, type Tool$1 as Tool, type ToolOutputSchema, UIBlock, UILogCollector, type User, UserManager, type UsersData, anthropicLLM, geminiLLM, groqLLM, hybridRerank, llmUsageLogger, logger, openaiLLM, queryCache, rerankChromaResults, rerankConversationResults, userPromptErrorLogger };
package/dist/index.d.ts CHANGED
@@ -864,6 +864,12 @@ interface SuperatomSDKConfig {
864
864
  * Default: 0.8
865
865
  */
866
866
  conversationSimilarityThreshold?: number;
867
+ /**
868
+ * Query cache TTL (Time To Live) in minutes
869
+ * Cached query results expire after this duration
870
+ * Default: 5 minutes
871
+ */
872
+ queryCacheTTL?: number;
867
873
  }
868
874
 
869
875
  declare const KbNodesQueryFiltersSchema: z.ZodObject<{
@@ -2037,11 +2043,6 @@ interface BaseLLMConfig {
2037
2043
  * - 'balanced': Use best model for complex tasks, fast model for simple tasks (default)
2038
2044
  */
2039
2045
  modelStrategy?: ModelStrategy;
2040
- /**
2041
- * Similarity threshold for conversation search (semantic matching)
2042
- * Value between 0 and 1 (e.g., 0.8 = 80% similarity required)
2043
- * Default: 0.8
2044
- */
2045
2046
  conversationSimilarityThreshold?: number;
2046
2047
  }
2047
2048
  /**
@@ -2109,6 +2110,29 @@ declare abstract class BaseLLM {
2109
2110
  * This checks both single Form components and Forms inside MultiComponentContainer
2110
2111
  */
2111
2112
  protected containsFormComponent(component: any): boolean;
2113
+ /**
2114
+ * Get the cache key for a query (the exact sql param that would be sent to execute)
2115
+ * This ensures the cache key matches what the frontend will send
2116
+ * Used for both caching and internal deduplication
2117
+ */
2118
+ private getQueryCacheKey;
2119
+ /**
2120
+ * Execute a query against the database for validation and caching
2121
+ * @param query - The SQL query to execute (string or object with sql/values)
2122
+ * @param collections - Collections object containing database execute function
2123
+ * @returns Object with result data and cache key
2124
+ * @throws Error if query execution fails
2125
+ */
2126
+ private executeQueryForValidation;
2127
+ /**
2128
+ * Request the LLM to fix a failed SQL query
2129
+ * @param failedQuery - The query that failed execution
2130
+ * @param errorMessage - The error message from the failed execution
2131
+ * @param componentContext - Context about the component (name, type, title)
2132
+ * @param apiKey - Optional API key
2133
+ * @returns Fixed query string
2134
+ */
2135
+ private requestQueryFix;
2112
2136
  /**
2113
2137
  * Match components from text response suggestions and generate follow-up questions
2114
2138
  * Takes a text response with component suggestions (c1:type format) and matches with available components
@@ -2127,6 +2151,15 @@ declare abstract class BaseLLM {
2127
2151
  layoutDescription: string;
2128
2152
  actions: Action[];
2129
2153
  }>;
2154
+ /**
2155
+ * Validate component queries against the database and retry with LLM fixes if they fail
2156
+ * @param components - Array of components with potential queries
2157
+ * @param collections - Collections object containing database execute function
2158
+ * @param apiKey - Optional API key for LLM calls
2159
+ * @param logCollector - Optional log collector for logging
2160
+ * @returns Object with validated components and a map of query results
2161
+ */
2162
+ private validateAndRetryComponentQueries;
2130
2163
  /**
2131
2164
  * Classify user question into category and detect external tools needed
2132
2165
  * Determines if question is for data analysis, requires external tools, or needs text response
@@ -2163,12 +2196,6 @@ declare abstract class BaseLLM {
2163
2196
  * This provides conversational text responses instead of component generation
2164
2197
  * Supports tool calling for query execution with automatic retry on errors (max 3 attempts)
2165
2198
  * After generating text response, if components are provided, matches suggested components
2166
- * @param streamCallback - Optional callback function to receive text chunks as they stream
2167
- * @param collections - Collection registry for executing database queries via database.execute
2168
- * @param components - Optional list of available components for matching suggestions
2169
- * @param externalTools - Optional array of external tools (email, calendar, etc.) that can be called
2170
- * @param category - Question category ('data_analysis' | 'data_modification' | 'general'). For data_modification, answer component streaming is skipped. For general, component generation is skipped entirely.
2171
- * @param userId - Optional user ID for fetching user-specific knowledge base nodes
2172
2199
  */
2173
2200
  generateTextResponse(userPrompt: string, apiKey?: string, logCollector?: any, conversationHistory?: string, streamCallback?: (chunk: string) => void, collections?: any, components?: Component[], externalTools?: any[], category?: 'data_analysis' | 'data_modification' | 'general', userId?: string): Promise<T_RESPONSE>;
2174
2201
  /**
@@ -2178,11 +2205,6 @@ declare abstract class BaseLLM {
2178
2205
  * - If match found → Adapt UI block parameters and return
2179
2206
  * 2. Category classification: Determine if data_analysis, requires_external_tools, or text_response
2180
2207
  * 3. Route appropriately based on category and response mode
2181
- *
2182
- * @param responseMode - 'component' for component generation (default), 'text' for text responses
2183
- * @param streamCallback - Optional callback function to receive text chunks as they stream (only for text mode)
2184
- * @param collections - Collection registry for executing database queries (required for text mode)
2185
- * @param externalTools - Optional array of external tools (email, calendar, etc.) that can be called (only for text mode)
2186
2208
  */
2187
2209
  handleUserRequest(userPrompt: string, components: Component[], apiKey?: string, logCollector?: any, conversationHistory?: string, responseMode?: 'component' | 'text', streamCallback?: (chunk: string) => void, collections?: any, externalTools?: any[], userId?: string): Promise<T_RESPONSE>;
2188
2210
  /**
@@ -2249,6 +2271,63 @@ declare class OpenAILLM extends BaseLLM {
2249
2271
  }
2250
2272
  declare const openaiLLM: OpenAILLM;
2251
2273
 
2274
+ /**
2275
+ * Query Cache - Stores query results with configurable TTL
2276
+ * Used to avoid re-executing queries that were already validated
2277
+ */
2278
+ declare class QueryCache {
2279
+ private cache;
2280
+ private ttlMs;
2281
+ private cleanupInterval;
2282
+ constructor();
2283
+ /**
2284
+ * Set the cache TTL (Time To Live)
2285
+ * @param minutes - TTL in minutes (default: 5)
2286
+ */
2287
+ setTTL(minutes: number): void;
2288
+ /**
2289
+ * Get the current TTL in minutes
2290
+ */
2291
+ getTTL(): number;
2292
+ /**
2293
+ * Store query result in cache
2294
+ * Key is the exact query string (or JSON for parameterized queries)
2295
+ */
2296
+ set(query: string, data: any): void;
2297
+ /**
2298
+ * Get cached result if exists and not expired
2299
+ */
2300
+ get(query: string): any | null;
2301
+ /**
2302
+ * Check if query exists in cache (not expired)
2303
+ */
2304
+ has(query: string): boolean;
2305
+ /**
2306
+ * Remove a specific query from cache
2307
+ */
2308
+ delete(query: string): void;
2309
+ /**
2310
+ * Clear all cached entries
2311
+ */
2312
+ clear(): void;
2313
+ /**
2314
+ * Get cache statistics
2315
+ */
2316
+ getStats(): {
2317
+ size: number;
2318
+ oldestEntryAge: number | null;
2319
+ };
2320
+ /**
2321
+ * Start periodic cleanup of expired entries
2322
+ */
2323
+ private startCleanup;
2324
+ /**
2325
+ * Stop cleanup interval (for graceful shutdown)
2326
+ */
2327
+ destroy(): void;
2328
+ }
2329
+ declare const queryCache: QueryCache;
2330
+
2252
2331
  declare const SDK_VERSION = "0.0.8";
2253
2332
  type MessageTypeHandler = (message: IncomingMessage) => void | Promise<void>;
2254
2333
  declare class SuperatomSDK {
@@ -2400,4 +2479,4 @@ declare class SuperatomSDK {
2400
2479
  getConversationSimilarityThreshold(): number;
2401
2480
  }
2402
2481
 
2403
- export { type Action, BM25L, type BM25LOptions, type BaseLLMConfig, CONTEXT_CONFIG, type CapturedLog, CleanupService, type CollectionHandler, type CollectionOperation, type DBUIBlock, type DatabaseType, type HybridSearchOptions, type IncomingMessage, type KbNodesQueryFilters, type KbNodesRequestPayload, LLM, type LLMUsageEntry, type LogLevel, type Message, type ModelStrategy, type OutputField, type RerankedResult, SDK_VERSION, STORAGE_CONFIG, SuperatomSDK, type SuperatomSDKConfig, type TaskType, Thread, ThreadManager, type Tool$1 as Tool, type ToolOutputSchema, UIBlock, UILogCollector, type User, UserManager, type UsersData, anthropicLLM, geminiLLM, groqLLM, hybridRerank, llmUsageLogger, logger, openaiLLM, rerankChromaResults, rerankConversationResults, userPromptErrorLogger };
2482
+ export { type Action, BM25L, type BM25LOptions, type BaseLLMConfig, CONTEXT_CONFIG, type CapturedLog, CleanupService, type CollectionHandler, type CollectionOperation, type DBUIBlock, type DatabaseType, type HybridSearchOptions, type IncomingMessage, type KbNodesQueryFilters, type KbNodesRequestPayload, LLM, type LLMUsageEntry, type LogLevel, type Message, type ModelStrategy, type OutputField, type RerankedResult, SDK_VERSION, STORAGE_CONFIG, SuperatomSDK, type SuperatomSDKConfig, type TaskType, Thread, ThreadManager, type Tool$1 as Tool, type ToolOutputSchema, UIBlock, UILogCollector, type User, UserManager, type UsersData, anthropicLLM, geminiLLM, groqLLM, hybridRerank, llmUsageLogger, logger, openaiLLM, queryCache, rerankChromaResults, rerankConversationResults, userPromptErrorLogger };