@ax-llm/ax 14.0.26 → 14.0.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +31 -21
- package/index.cjs.map +1 -1
- package/index.d.cts +83 -2
- package/index.d.ts +83 -2
- package/index.global.js +13 -3
- package/index.global.js.map +1 -1
- package/index.js +27 -17
- package/index.js.map +1 -1
- package/package.json +1 -1
package/index.d.cts
CHANGED
|
@@ -3381,8 +3381,20 @@ type AxAICohereEmbedResponse = {
|
|
|
3381
3381
|
embeddings: number[][];
|
|
3382
3382
|
};
|
|
3383
3383
|
|
|
3384
|
+
/**
|
|
3385
|
+
* Creates the default configuration for Cohere AI service
|
|
3386
|
+
* @returns A deep clone of the default Cohere configuration with CommandRPlus model and EmbedEnglishV30 embed model
|
|
3387
|
+
*/
|
|
3384
3388
|
declare const axAICohereDefaultConfig: () => AxAICohereConfig;
|
|
3389
|
+
/**
|
|
3390
|
+
* Creates a creative configuration for Cohere AI service with more flexible parameters
|
|
3391
|
+
* @returns A deep clone of the creative Cohere configuration with CommandR model and EmbedEnglishV30 embed model
|
|
3392
|
+
*/
|
|
3385
3393
|
declare const axAICohereCreativeConfig: () => AxAICohereConfig;
|
|
3394
|
+
/**
|
|
3395
|
+
* Configuration arguments for initializing the Cohere AI service
|
|
3396
|
+
* @template TModelKey - The type of model keys supported
|
|
3397
|
+
*/
|
|
3386
3398
|
interface AxAICohereArgs<TModelKey> {
|
|
3387
3399
|
name: 'cohere';
|
|
3388
3400
|
apiKey: string;
|
|
@@ -3390,7 +3402,15 @@ interface AxAICohereArgs<TModelKey> {
|
|
|
3390
3402
|
options?: Readonly<AxAIServiceOptions>;
|
|
3391
3403
|
models?: AxAIInputModelList<AxAICohereModel, AxAICohereEmbedModel, TModelKey>;
|
|
3392
3404
|
}
|
|
3405
|
+
/**
|
|
3406
|
+
* Main Cohere AI service class that extends the base AI implementation
|
|
3407
|
+
* @template TModelKey - The type of model keys supported
|
|
3408
|
+
*/
|
|
3393
3409
|
declare class AxAICohere<TModelKey> extends AxBaseAI<AxAICohereModel, AxAICohereEmbedModel, AxAICohereChatRequest, AxAICohereEmbedRequest, AxAICohereChatResponse, AxAICohereChatResponseDelta, AxAICohereEmbedResponse, TModelKey> {
|
|
3410
|
+
/**
|
|
3411
|
+
* Creates a new instance of AxAICohere
|
|
3412
|
+
* @param args - Configuration arguments including API key, config, options, and models
|
|
3413
|
+
*/
|
|
3394
3414
|
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAICohereArgs<TModelKey>, 'name'>>);
|
|
3395
3415
|
}
|
|
3396
3416
|
|
|
@@ -3405,11 +3425,41 @@ declare enum AxAIDeepSeekModel {
|
|
|
3405
3425
|
DeepSeekReasoner = "deepseek-reasoner"
|
|
3406
3426
|
}
|
|
3407
3427
|
|
|
3428
|
+
/**
|
|
3429
|
+
* Configuration type for DeepSeek AI models
|
|
3430
|
+
*/
|
|
3408
3431
|
type DeepSeekConfig = AxAIOpenAIConfig<AxAIDeepSeekModel, undefined>;
|
|
3432
|
+
/**
|
|
3433
|
+
* Creates the default configuration for DeepSeek AI with the chat model
|
|
3434
|
+
* @returns Default DeepSeek configuration with chat model settings
|
|
3435
|
+
*/
|
|
3409
3436
|
declare const axAIDeepSeekDefaultConfig: () => DeepSeekConfig;
|
|
3437
|
+
/**
|
|
3438
|
+
* Creates a configuration optimized for code generation tasks using DeepSeek Coder
|
|
3439
|
+
* @returns DeepSeek configuration with creative settings for coding tasks
|
|
3440
|
+
*/
|
|
3410
3441
|
declare const axAIDeepSeekCodeConfig: () => DeepSeekConfig;
|
|
3442
|
+
/**
|
|
3443
|
+
* Arguments type for initializing DeepSeek AI instances
|
|
3444
|
+
* @template TModelKey - The model key type for type safety
|
|
3445
|
+
*/
|
|
3411
3446
|
type AxAIDeepSeekArgs<TModelKey> = AxAIOpenAIArgs<'deepseek', AxAIDeepSeekModel, undefined, TModelKey>;
|
|
3447
|
+
/**
|
|
3448
|
+
* DeepSeek AI client implementation extending OpenAI base functionality
|
|
3449
|
+
* Provides access to DeepSeek's language models through OpenAI-compatible API
|
|
3450
|
+
* @template TModelKey - The model key type for type safety
|
|
3451
|
+
*/
|
|
3412
3452
|
declare class AxAIDeepSeek<TModelKey> extends AxAIOpenAIBase<AxAIDeepSeekModel, undefined, TModelKey> {
|
|
3453
|
+
/**
|
|
3454
|
+
* Creates a new DeepSeek AI client instance
|
|
3455
|
+
* @param args - Configuration arguments for the DeepSeek client
|
|
3456
|
+
* @param args.apiKey - DeepSeek API key for authentication
|
|
3457
|
+
* @param args.config - Optional configuration overrides
|
|
3458
|
+
* @param args.options - Optional client options
|
|
3459
|
+
* @param args.models - Optional model definitions
|
|
3460
|
+
* @param args.modelInfo - Optional additional model information
|
|
3461
|
+
* @throws {Error} When API key is not provided or empty
|
|
3462
|
+
*/
|
|
3413
3463
|
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAIDeepSeekArgs<TModelKey>, 'name'>>);
|
|
3414
3464
|
}
|
|
3415
3465
|
|
|
@@ -3425,7 +3475,10 @@ declare enum AxAIGoogleGeminiModel {
|
|
|
3425
3475
|
Gemini15Flash = "gemini-1.5-flash",
|
|
3426
3476
|
Gemini15Flash002 = "gemini-1.5-flash-002",
|
|
3427
3477
|
Gemini15Flash8B = "gemini-1.5-flash-8b",
|
|
3428
|
-
Gemini15Pro = "gemini-1.5-pro"
|
|
3478
|
+
Gemini15Pro = "gemini-1.5-pro",
|
|
3479
|
+
GeminiFlashLatest = "gemini-flash-latest",
|
|
3480
|
+
GeminiFlashLiteLatest = "gemini-flash-lite-latest",
|
|
3481
|
+
GeminiProLatest = "gemini-pro-latest"
|
|
3429
3482
|
}
|
|
3430
3483
|
declare enum AxAIGoogleGeminiEmbedModel {
|
|
3431
3484
|
GeminiEmbedding = "gemini-embedding-exp",
|
|
@@ -3968,18 +4021,44 @@ declare class AxMultiServiceRouter<TServices extends readonly (AxAIService | AxA
|
|
|
3968
4021
|
}): void;
|
|
3969
4022
|
}
|
|
3970
4023
|
|
|
4024
|
+
/**
|
|
4025
|
+
* Configuration type for Ollama AI service
|
|
4026
|
+
*/
|
|
3971
4027
|
type AxAIOllamaAIConfig = AxAIOpenAIConfig<string, string>;
|
|
4028
|
+
/**
|
|
4029
|
+
* Creates default configuration for Ollama AI service
|
|
4030
|
+
* @returns Default configuration object with nous-hermes2 model and all-minilm embed model
|
|
4031
|
+
*/
|
|
3972
4032
|
declare const axAIOllamaDefaultConfig: () => AxAIOllamaAIConfig;
|
|
4033
|
+
/**
|
|
4034
|
+
* Creates default creative configuration for Ollama AI service
|
|
4035
|
+
* @returns Creative configuration object with nous-hermes2 model and all-minilm embed model
|
|
4036
|
+
*/
|
|
3973
4037
|
declare const axAIOllamaDefaultCreativeConfig: () => AxAIOllamaAIConfig;
|
|
4038
|
+
/**
|
|
4039
|
+
* Arguments type for initializing Ollama AI service
|
|
4040
|
+
* @template TModelKey - Type for model key
|
|
4041
|
+
*/
|
|
3974
4042
|
type AxAIOllamaArgs<TModelKey> = AxAIOpenAIArgs<'ollama', string, string, TModelKey> & {
|
|
3975
4043
|
model?: string;
|
|
3976
4044
|
embedModel?: string;
|
|
3977
4045
|
url?: string;
|
|
3978
4046
|
};
|
|
3979
4047
|
/**
|
|
3980
|
-
*
|
|
4048
|
+
* Ollama AI service implementation that extends OpenAI base functionality
|
|
4049
|
+
* Provides access to locally hosted Ollama models with OpenAI-compatible API
|
|
4050
|
+
* @template TModelKey - Type for model key
|
|
3981
4051
|
*/
|
|
3982
4052
|
declare class AxAIOllama<TModelKey> extends AxAIOpenAIBase<string, string, TModelKey> {
|
|
4053
|
+
/**
|
|
4054
|
+
* Creates a new Ollama AI service instance
|
|
4055
|
+
* @param args - Configuration arguments for the Ollama service
|
|
4056
|
+
* @param args.apiKey - API key for authentication (defaults to 'not-set')
|
|
4057
|
+
* @param args.url - Base URL for the Ollama API (defaults to 'http://localhost:11434/v1')
|
|
4058
|
+
* @param args.config - Additional configuration options
|
|
4059
|
+
* @param args.options - Service options
|
|
4060
|
+
* @param args.models - Available models configuration
|
|
4061
|
+
*/
|
|
3983
4062
|
constructor({ apiKey, url, config, options, models, }: Readonly<Omit<AxAIOllamaArgs<TModelKey>, 'name'>>);
|
|
3984
4063
|
}
|
|
3985
4064
|
|
|
@@ -5676,6 +5755,7 @@ declare class AxGEPA extends AxBaseOptimizer {
|
|
|
5676
5755
|
private updateSamplerShuffled;
|
|
5677
5756
|
private nextMinibatchIndices;
|
|
5678
5757
|
private rand;
|
|
5758
|
+
private generateOptimizationReport;
|
|
5679
5759
|
private mergeInstructions;
|
|
5680
5760
|
}
|
|
5681
5761
|
|
|
@@ -5834,6 +5914,7 @@ declare class AxMiPRO extends AxBaseOptimizer {
|
|
|
5834
5914
|
* parameter optimization rather than full MiPRO functionality.
|
|
5835
5915
|
*/
|
|
5836
5916
|
private compilePython;
|
|
5917
|
+
private generateOptimizationReport;
|
|
5837
5918
|
/**
|
|
5838
5919
|
* Simplified evaluation method for Python optimization
|
|
5839
5920
|
*/
|
package/index.d.ts
CHANGED
|
@@ -3381,8 +3381,20 @@ type AxAICohereEmbedResponse = {
|
|
|
3381
3381
|
embeddings: number[][];
|
|
3382
3382
|
};
|
|
3383
3383
|
|
|
3384
|
+
/**
|
|
3385
|
+
* Creates the default configuration for Cohere AI service
|
|
3386
|
+
* @returns A deep clone of the default Cohere configuration with CommandRPlus model and EmbedEnglishV30 embed model
|
|
3387
|
+
*/
|
|
3384
3388
|
declare const axAICohereDefaultConfig: () => AxAICohereConfig;
|
|
3389
|
+
/**
|
|
3390
|
+
* Creates a creative configuration for Cohere AI service with more flexible parameters
|
|
3391
|
+
* @returns A deep clone of the creative Cohere configuration with CommandR model and EmbedEnglishV30 embed model
|
|
3392
|
+
*/
|
|
3385
3393
|
declare const axAICohereCreativeConfig: () => AxAICohereConfig;
|
|
3394
|
+
/**
|
|
3395
|
+
* Configuration arguments for initializing the Cohere AI service
|
|
3396
|
+
* @template TModelKey - The type of model keys supported
|
|
3397
|
+
*/
|
|
3386
3398
|
interface AxAICohereArgs<TModelKey> {
|
|
3387
3399
|
name: 'cohere';
|
|
3388
3400
|
apiKey: string;
|
|
@@ -3390,7 +3402,15 @@ interface AxAICohereArgs<TModelKey> {
|
|
|
3390
3402
|
options?: Readonly<AxAIServiceOptions>;
|
|
3391
3403
|
models?: AxAIInputModelList<AxAICohereModel, AxAICohereEmbedModel, TModelKey>;
|
|
3392
3404
|
}
|
|
3405
|
+
/**
|
|
3406
|
+
* Main Cohere AI service class that extends the base AI implementation
|
|
3407
|
+
* @template TModelKey - The type of model keys supported
|
|
3408
|
+
*/
|
|
3393
3409
|
declare class AxAICohere<TModelKey> extends AxBaseAI<AxAICohereModel, AxAICohereEmbedModel, AxAICohereChatRequest, AxAICohereEmbedRequest, AxAICohereChatResponse, AxAICohereChatResponseDelta, AxAICohereEmbedResponse, TModelKey> {
|
|
3410
|
+
/**
|
|
3411
|
+
* Creates a new instance of AxAICohere
|
|
3412
|
+
* @param args - Configuration arguments including API key, config, options, and models
|
|
3413
|
+
*/
|
|
3394
3414
|
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAICohereArgs<TModelKey>, 'name'>>);
|
|
3395
3415
|
}
|
|
3396
3416
|
|
|
@@ -3405,11 +3425,41 @@ declare enum AxAIDeepSeekModel {
|
|
|
3405
3425
|
DeepSeekReasoner = "deepseek-reasoner"
|
|
3406
3426
|
}
|
|
3407
3427
|
|
|
3428
|
+
/**
|
|
3429
|
+
* Configuration type for DeepSeek AI models
|
|
3430
|
+
*/
|
|
3408
3431
|
type DeepSeekConfig = AxAIOpenAIConfig<AxAIDeepSeekModel, undefined>;
|
|
3432
|
+
/**
|
|
3433
|
+
* Creates the default configuration for DeepSeek AI with the chat model
|
|
3434
|
+
* @returns Default DeepSeek configuration with chat model settings
|
|
3435
|
+
*/
|
|
3409
3436
|
declare const axAIDeepSeekDefaultConfig: () => DeepSeekConfig;
|
|
3437
|
+
/**
|
|
3438
|
+
* Creates a configuration optimized for code generation tasks using DeepSeek Coder
|
|
3439
|
+
* @returns DeepSeek configuration with creative settings for coding tasks
|
|
3440
|
+
*/
|
|
3410
3441
|
declare const axAIDeepSeekCodeConfig: () => DeepSeekConfig;
|
|
3442
|
+
/**
|
|
3443
|
+
* Arguments type for initializing DeepSeek AI instances
|
|
3444
|
+
* @template TModelKey - The model key type for type safety
|
|
3445
|
+
*/
|
|
3411
3446
|
type AxAIDeepSeekArgs<TModelKey> = AxAIOpenAIArgs<'deepseek', AxAIDeepSeekModel, undefined, TModelKey>;
|
|
3447
|
+
/**
|
|
3448
|
+
* DeepSeek AI client implementation extending OpenAI base functionality
|
|
3449
|
+
* Provides access to DeepSeek's language models through OpenAI-compatible API
|
|
3450
|
+
* @template TModelKey - The model key type for type safety
|
|
3451
|
+
*/
|
|
3412
3452
|
declare class AxAIDeepSeek<TModelKey> extends AxAIOpenAIBase<AxAIDeepSeekModel, undefined, TModelKey> {
|
|
3453
|
+
/**
|
|
3454
|
+
* Creates a new DeepSeek AI client instance
|
|
3455
|
+
* @param args - Configuration arguments for the DeepSeek client
|
|
3456
|
+
* @param args.apiKey - DeepSeek API key for authentication
|
|
3457
|
+
* @param args.config - Optional configuration overrides
|
|
3458
|
+
* @param args.options - Optional client options
|
|
3459
|
+
* @param args.models - Optional model definitions
|
|
3460
|
+
* @param args.modelInfo - Optional additional model information
|
|
3461
|
+
* @throws {Error} When API key is not provided or empty
|
|
3462
|
+
*/
|
|
3413
3463
|
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAIDeepSeekArgs<TModelKey>, 'name'>>);
|
|
3414
3464
|
}
|
|
3415
3465
|
|
|
@@ -3425,7 +3475,10 @@ declare enum AxAIGoogleGeminiModel {
|
|
|
3425
3475
|
Gemini15Flash = "gemini-1.5-flash",
|
|
3426
3476
|
Gemini15Flash002 = "gemini-1.5-flash-002",
|
|
3427
3477
|
Gemini15Flash8B = "gemini-1.5-flash-8b",
|
|
3428
|
-
Gemini15Pro = "gemini-1.5-pro"
|
|
3478
|
+
Gemini15Pro = "gemini-1.5-pro",
|
|
3479
|
+
GeminiFlashLatest = "gemini-flash-latest",
|
|
3480
|
+
GeminiFlashLiteLatest = "gemini-flash-lite-latest",
|
|
3481
|
+
GeminiProLatest = "gemini-pro-latest"
|
|
3429
3482
|
}
|
|
3430
3483
|
declare enum AxAIGoogleGeminiEmbedModel {
|
|
3431
3484
|
GeminiEmbedding = "gemini-embedding-exp",
|
|
@@ -3968,18 +4021,44 @@ declare class AxMultiServiceRouter<TServices extends readonly (AxAIService | AxA
|
|
|
3968
4021
|
}): void;
|
|
3969
4022
|
}
|
|
3970
4023
|
|
|
4024
|
+
/**
|
|
4025
|
+
* Configuration type for Ollama AI service
|
|
4026
|
+
*/
|
|
3971
4027
|
type AxAIOllamaAIConfig = AxAIOpenAIConfig<string, string>;
|
|
4028
|
+
/**
|
|
4029
|
+
* Creates default configuration for Ollama AI service
|
|
4030
|
+
* @returns Default configuration object with nous-hermes2 model and all-minilm embed model
|
|
4031
|
+
*/
|
|
3972
4032
|
declare const axAIOllamaDefaultConfig: () => AxAIOllamaAIConfig;
|
|
4033
|
+
/**
|
|
4034
|
+
* Creates default creative configuration for Ollama AI service
|
|
4035
|
+
* @returns Creative configuration object with nous-hermes2 model and all-minilm embed model
|
|
4036
|
+
*/
|
|
3973
4037
|
declare const axAIOllamaDefaultCreativeConfig: () => AxAIOllamaAIConfig;
|
|
4038
|
+
/**
|
|
4039
|
+
* Arguments type for initializing Ollama AI service
|
|
4040
|
+
* @template TModelKey - Type for model key
|
|
4041
|
+
*/
|
|
3974
4042
|
type AxAIOllamaArgs<TModelKey> = AxAIOpenAIArgs<'ollama', string, string, TModelKey> & {
|
|
3975
4043
|
model?: string;
|
|
3976
4044
|
embedModel?: string;
|
|
3977
4045
|
url?: string;
|
|
3978
4046
|
};
|
|
3979
4047
|
/**
|
|
3980
|
-
*
|
|
4048
|
+
* Ollama AI service implementation that extends OpenAI base functionality
|
|
4049
|
+
* Provides access to locally hosted Ollama models with OpenAI-compatible API
|
|
4050
|
+
* @template TModelKey - Type for model key
|
|
3981
4051
|
*/
|
|
3982
4052
|
declare class AxAIOllama<TModelKey> extends AxAIOpenAIBase<string, string, TModelKey> {
|
|
4053
|
+
/**
|
|
4054
|
+
* Creates a new Ollama AI service instance
|
|
4055
|
+
* @param args - Configuration arguments for the Ollama service
|
|
4056
|
+
* @param args.apiKey - API key for authentication (defaults to 'not-set')
|
|
4057
|
+
* @param args.url - Base URL for the Ollama API (defaults to 'http://localhost:11434/v1')
|
|
4058
|
+
* @param args.config - Additional configuration options
|
|
4059
|
+
* @param args.options - Service options
|
|
4060
|
+
* @param args.models - Available models configuration
|
|
4061
|
+
*/
|
|
3983
4062
|
constructor({ apiKey, url, config, options, models, }: Readonly<Omit<AxAIOllamaArgs<TModelKey>, 'name'>>);
|
|
3984
4063
|
}
|
|
3985
4064
|
|
|
@@ -5676,6 +5755,7 @@ declare class AxGEPA extends AxBaseOptimizer {
|
|
|
5676
5755
|
private updateSamplerShuffled;
|
|
5677
5756
|
private nextMinibatchIndices;
|
|
5678
5757
|
private rand;
|
|
5758
|
+
private generateOptimizationReport;
|
|
5679
5759
|
private mergeInstructions;
|
|
5680
5760
|
}
|
|
5681
5761
|
|
|
@@ -5834,6 +5914,7 @@ declare class AxMiPRO extends AxBaseOptimizer {
|
|
|
5834
5914
|
* parameter optimization rather than full MiPRO functionality.
|
|
5835
5915
|
*/
|
|
5836
5916
|
private compilePython;
|
|
5917
|
+
private generateOptimizationReport;
|
|
5837
5918
|
/**
|
|
5838
5919
|
* Simplified evaluation method for Python optimization
|
|
5839
5920
|
*/
|