@superatomai/sdk-node 0.0.12 → 0.0.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +15 -1
- package/dist/index.d.ts +15 -1
- package/dist/index.js +1475 -1511
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1475 -1511
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.d.mts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import Anthropic from '@anthropic-ai/sdk';
|
|
2
3
|
|
|
3
4
|
/**
|
|
4
5
|
* Log levels in hierarchical order
|
|
@@ -840,8 +841,9 @@ declare class ReportManager {
|
|
|
840
841
|
getReportCount(): number;
|
|
841
842
|
}
|
|
842
843
|
|
|
844
|
+
type SystemPrompt = string | Anthropic.Messages.TextBlockParam[];
|
|
843
845
|
interface LLMMessages {
|
|
844
|
-
sys:
|
|
846
|
+
sys: SystemPrompt;
|
|
845
847
|
user: string;
|
|
846
848
|
}
|
|
847
849
|
interface LLMOptions {
|
|
@@ -865,6 +867,18 @@ declare class LLM {
|
|
|
865
867
|
static text(messages: LLMMessages, options?: LLMOptions): Promise<string>;
|
|
866
868
|
static stream<T = string>(messages: LLMMessages, options?: LLMOptions, json?: boolean): Promise<T extends string ? string : any>;
|
|
867
869
|
static streamWithTools(messages: LLMMessages, tools: Tool[], toolHandler: (toolName: string, toolInput: any) => Promise<any>, options?: LLMOptions, maxIterations?: number): Promise<string>;
|
|
870
|
+
/**
|
|
871
|
+
* Normalize system prompt to Anthropic format
|
|
872
|
+
* Converts string to array format if needed
|
|
873
|
+
* @param sys - System prompt (string or array of blocks)
|
|
874
|
+
* @returns Normalized system prompt for Anthropic API
|
|
875
|
+
*/
|
|
876
|
+
private static _normalizeSystemPrompt;
|
|
877
|
+
/**
|
|
878
|
+
* Log cache usage metrics from Anthropic API response
|
|
879
|
+
* Shows cache hits, costs, and savings
|
|
880
|
+
*/
|
|
881
|
+
private static _logCacheUsage;
|
|
868
882
|
/**
|
|
869
883
|
* Parse model string to extract provider and model name
|
|
870
884
|
* @param modelString - Format: "provider/model-name" or just "model-name"
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import Anthropic from '@anthropic-ai/sdk';
|
|
2
3
|
|
|
3
4
|
/**
|
|
4
5
|
* Log levels in hierarchical order
|
|
@@ -840,8 +841,9 @@ declare class ReportManager {
|
|
|
840
841
|
getReportCount(): number;
|
|
841
842
|
}
|
|
842
843
|
|
|
844
|
+
type SystemPrompt = string | Anthropic.Messages.TextBlockParam[];
|
|
843
845
|
interface LLMMessages {
|
|
844
|
-
sys:
|
|
846
|
+
sys: SystemPrompt;
|
|
845
847
|
user: string;
|
|
846
848
|
}
|
|
847
849
|
interface LLMOptions {
|
|
@@ -865,6 +867,18 @@ declare class LLM {
|
|
|
865
867
|
static text(messages: LLMMessages, options?: LLMOptions): Promise<string>;
|
|
866
868
|
static stream<T = string>(messages: LLMMessages, options?: LLMOptions, json?: boolean): Promise<T extends string ? string : any>;
|
|
867
869
|
static streamWithTools(messages: LLMMessages, tools: Tool[], toolHandler: (toolName: string, toolInput: any) => Promise<any>, options?: LLMOptions, maxIterations?: number): Promise<string>;
|
|
870
|
+
/**
|
|
871
|
+
* Normalize system prompt to Anthropic format
|
|
872
|
+
* Converts string to array format if needed
|
|
873
|
+
* @param sys - System prompt (string or array of blocks)
|
|
874
|
+
* @returns Normalized system prompt for Anthropic API
|
|
875
|
+
*/
|
|
876
|
+
private static _normalizeSystemPrompt;
|
|
877
|
+
/**
|
|
878
|
+
* Log cache usage metrics from Anthropic API response
|
|
879
|
+
* Shows cache hits, costs, and savings
|
|
880
|
+
*/
|
|
881
|
+
private static _logCacheUsage;
|
|
868
882
|
/**
|
|
869
883
|
* Parse model string to extract provider and model name
|
|
870
884
|
* @param modelString - Format: "provider/model-name" or just "model-name"
|