@ax-llm/ax 11.0.47 → 11.0.49
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +254 -164
- package/index.cjs.map +1 -1
- package/index.d.cts +22 -15
- package/index.d.ts +22 -15
- package/index.js +260 -166
- package/index.js.map +1 -1
- package/package.json +1 -1
package/index.d.cts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { ReadableStream as ReadableStream$1 } from 'node:stream/web';
|
|
2
|
-
import { Span, Tracer } from '@opentelemetry/api';
|
|
2
|
+
import { Span, Context, Tracer } from '@opentelemetry/api';
|
|
3
3
|
import { ReadableStream } from 'stream/web';
|
|
4
4
|
|
|
5
5
|
interface RetryConfig {
|
|
@@ -332,7 +332,7 @@ type AxRateLimiterFunction = <T = unknown>(reqFunc: () => Promise<T | ReadableSt
|
|
|
332
332
|
}>) => Promise<T | ReadableStream$1<T>>;
|
|
333
333
|
type AxAIPromptConfig = {
|
|
334
334
|
stream?: boolean;
|
|
335
|
-
thinkingTokenBudget?: 'minimal' | 'low' | 'medium' | 'high';
|
|
335
|
+
thinkingTokenBudget?: 'minimal' | 'low' | 'medium' | 'high' | 'highest';
|
|
336
336
|
};
|
|
337
337
|
type AxAIServiceOptions = {
|
|
338
338
|
debug?: boolean;
|
|
@@ -351,6 +351,7 @@ type AxAIServiceActionOptions<TModel = unknown, TEmbedModel = unknown> = {
|
|
|
351
351
|
debug?: boolean;
|
|
352
352
|
debugHideSystemPrompt?: boolean;
|
|
353
353
|
hideThought?: boolean;
|
|
354
|
+
traceContext?: Context;
|
|
354
355
|
};
|
|
355
356
|
interface AxAIService<TModel = unknown, TEmbedModel = unknown> {
|
|
356
357
|
getId(): string;
|
|
@@ -798,8 +799,9 @@ declare const axAIOpenAIDefaultConfig: () => AxAIOpenAIConfig<AxAIOpenAIModel, A
|
|
|
798
799
|
declare const axAIOpenAIBestConfig: () => AxAIOpenAIConfig<AxAIOpenAIModel, AxAIOpenAIEmbedModel>;
|
|
799
800
|
declare const axAIOpenAICreativeConfig: () => AxAIOpenAIConfig<AxAIOpenAIModel, AxAIOpenAIEmbedModel>;
|
|
800
801
|
declare const axAIOpenAIFastConfig: () => AxAIOpenAIConfig<AxAIOpenAIModel, AxAIOpenAIEmbedModel>;
|
|
801
|
-
interface AxAIOpenAIArgs<TName = 'openai', TModel = AxAIOpenAIModel, TEmbedModel = AxAIOpenAIEmbedModel, TChatReq extends AxAIOpenAIChatRequest<TModel> = AxAIOpenAIChatRequest<TModel>> extends Omit<AxAIOpenAIBaseArgs<TModel, TEmbedModel, TChatReq>, 'config' | 'modelInfo'> {
|
|
802
|
+
interface AxAIOpenAIArgs<TName = 'openai', TModel = AxAIOpenAIModel, TEmbedModel = AxAIOpenAIEmbedModel, TChatReq extends AxAIOpenAIChatRequest<TModel> = AxAIOpenAIChatRequest<TModel>> extends Omit<AxAIOpenAIBaseArgs<TModel, TEmbedModel, TChatReq>, 'config' | 'supportFor' | 'modelInfo'> {
|
|
802
803
|
name: TName;
|
|
804
|
+
modelInfo?: AxModelInfo[];
|
|
803
805
|
config?: Partial<AxAIOpenAIBaseArgs<TModel, TEmbedModel, TChatReq>['config']>;
|
|
804
806
|
}
|
|
805
807
|
type ChatReqUpdater<TModel, TChatReq extends AxAIOpenAIChatRequest<TModel>> = (req: Readonly<TChatReq>) => TChatReq;
|
|
@@ -813,13 +815,13 @@ interface AxAIOpenAIBaseArgs<TModel, TEmbedModel, TChatReq extends AxAIOpenAICha
|
|
|
813
815
|
modelInfo: Readonly<AxModelInfo[]>;
|
|
814
816
|
models?: AxAIInputModelList<TModel, TEmbedModel>;
|
|
815
817
|
chatReqUpdater?: ChatReqUpdater<TModel, TChatReq>;
|
|
816
|
-
supportFor
|
|
818
|
+
supportFor: AxAIFeatures | ((model: TModel) => AxAIFeatures);
|
|
817
819
|
}
|
|
818
820
|
declare class AxAIOpenAIBase<TModel, TEmbedModel, TChatReq extends AxAIOpenAIChatRequest<TModel> = AxAIOpenAIChatRequest<TModel>> extends AxBaseAI<TModel, TEmbedModel, AxAIOpenAIChatRequest<TModel>, AxAIOpenAIEmbedRequest<TEmbedModel>, AxAIOpenAIChatResponse, AxAIOpenAIChatResponseDelta, AxAIOpenAIEmbedResponse> {
|
|
819
821
|
constructor({ apiKey, config, options, apiURL, modelInfo, models, chatReqUpdater, supportFor, }: Readonly<Omit<AxAIOpenAIBaseArgs<TModel, TEmbedModel, TChatReq>, 'name'>>);
|
|
820
822
|
}
|
|
821
823
|
declare class AxAIOpenAI extends AxAIOpenAIBase<AxAIOpenAIModel, AxAIOpenAIEmbedModel> {
|
|
822
|
-
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAIOpenAIArgs, 'name'
|
|
824
|
+
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAIOpenAIArgs, 'name'>>);
|
|
823
825
|
}
|
|
824
826
|
|
|
825
827
|
declare const axAIAzureOpenAIDefaultConfig: () => AxAIOpenAIConfig<AxAIOpenAIModel, AxAIOpenAIEmbedModel>;
|
|
@@ -833,7 +835,7 @@ type AxAIAzureOpenAIArgs = AxAIOpenAIArgs<'azure-openai', AxAIOpenAIModel, AxAIO
|
|
|
833
835
|
version?: string;
|
|
834
836
|
};
|
|
835
837
|
declare class AxAIAzureOpenAI extends AxAIOpenAIBase<AxAIOpenAIModel, AxAIOpenAIEmbedModel> {
|
|
836
|
-
constructor({ apiKey, resourceName, deploymentName, version, config, options, models, }: Readonly<Omit<AxAIAzureOpenAIArgs, 'name'
|
|
838
|
+
constructor({ apiKey, resourceName, deploymentName, version, config, options, models, modelInfo, }: Readonly<Omit<AxAIAzureOpenAIArgs, 'name'>>);
|
|
837
839
|
}
|
|
838
840
|
|
|
839
841
|
/**
|
|
@@ -953,7 +955,8 @@ declare class AxAICohere extends AxBaseAI<AxAICohereModel, AxAICohereEmbedModel,
|
|
|
953
955
|
*/
|
|
954
956
|
declare enum AxAIDeepSeekModel {
|
|
955
957
|
DeepSeekChat = "deepseek-chat",
|
|
956
|
-
DeepSeekCoder = "deepseek-coder"
|
|
958
|
+
DeepSeekCoder = "deepseek-coder",
|
|
959
|
+
DeepSeekReasoner = "deepseek-reasoner"
|
|
957
960
|
}
|
|
958
961
|
|
|
959
962
|
type DeepSeekConfig = AxAIOpenAIConfig<AxAIDeepSeekModel, undefined>;
|
|
@@ -961,7 +964,7 @@ declare const axAIDeepSeekDefaultConfig: () => DeepSeekConfig;
|
|
|
961
964
|
declare const axAIDeepSeekCodeConfig: () => DeepSeekConfig;
|
|
962
965
|
type AxAIDeepSeekArgs = AxAIOpenAIArgs<'deepseek', AxAIDeepSeekModel, undefined>;
|
|
963
966
|
declare class AxAIDeepSeek extends AxAIOpenAIBase<AxAIDeepSeekModel, undefined> {
|
|
964
|
-
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAIDeepSeekArgs, 'name'>>);
|
|
967
|
+
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAIDeepSeekArgs, 'name'>>);
|
|
965
968
|
}
|
|
966
969
|
|
|
967
970
|
declare enum AxAIGoogleGeminiModel {
|
|
@@ -969,7 +972,6 @@ declare enum AxAIGoogleGeminiModel {
|
|
|
969
972
|
Gemini25Flash = "gemini-2.5-flash-preview-04-17",
|
|
970
973
|
Gemini20Flash = "gemini-2.0-flash",
|
|
971
974
|
Gemini20FlashLite = "gemini-2.0-flash-lite-preview-02-05",
|
|
972
|
-
Gemini20FlashThinking = "gemini-2.0-flash-thinking-exp-01-21",
|
|
973
975
|
Gemini1Pro = "gemini-1.0-pro",
|
|
974
976
|
Gemini15Flash = "gemini-1.5-flash",
|
|
975
977
|
Gemini15Flash002 = "gemini-1.5-flash-002",
|
|
@@ -1055,6 +1057,7 @@ type AxAIGoogleGeminiTool = {
|
|
|
1055
1057
|
function_declarations?: AxAIGoogleGeminiToolFunctionDeclaration[];
|
|
1056
1058
|
code_execution?: object;
|
|
1057
1059
|
google_search_retrieval?: AxAIGoogleGeminiToolGoogleSearchRetrieval;
|
|
1060
|
+
google_search?: object;
|
|
1058
1061
|
url_context?: object;
|
|
1059
1062
|
};
|
|
1060
1063
|
type AxAIGoogleGeminiToolConfig = {
|
|
@@ -1201,12 +1204,13 @@ interface AxAIGoogleGeminiArgs {
|
|
|
1201
1204
|
config?: Readonly<Partial<AxAIGoogleGeminiConfig>>;
|
|
1202
1205
|
options?: Readonly<AxAIServiceOptions & AxAIGoogleGeminiOptionsTools>;
|
|
1203
1206
|
models?: AxAIInputModelList<AxAIGoogleGeminiModel, AxAIGoogleGeminiEmbedModel>;
|
|
1207
|
+
modelInfo?: AxModelInfo[];
|
|
1204
1208
|
}
|
|
1205
1209
|
/**
|
|
1206
1210
|
* AxAIGoogleGemini: AI Service
|
|
1207
1211
|
*/
|
|
1208
1212
|
declare class AxAIGoogleGemini extends AxBaseAI<AxAIGoogleGeminiModel, AxAIGoogleGeminiEmbedModel, AxAIGoogleGeminiChatRequest, AxAIGoogleGeminiBatchEmbedRequest | AxAIGoogleVertexBatchEmbedRequest, AxAIGoogleGeminiChatResponse, AxAIGoogleGeminiChatResponseDelta, AxAIGoogleGeminiBatchEmbedResponse | AxAIGoogleVertexBatchEmbedResponse> {
|
|
1209
|
-
constructor({ apiKey, projectId, region, endpointId, config, options, models, }: Readonly<Omit<AxAIGoogleGeminiArgs, 'name'>>);
|
|
1213
|
+
constructor({ apiKey, projectId, region, endpointId, config, options, models, modelInfo, }: Readonly<Omit<AxAIGoogleGeminiArgs, 'name'>>);
|
|
1210
1214
|
}
|
|
1211
1215
|
|
|
1212
1216
|
declare enum AxAIGroqModel {
|
|
@@ -1220,9 +1224,10 @@ type AxAIGroqArgs = AxAIOpenAIArgs<'groq', AxAIGroqModel, undefined> & {
|
|
|
1220
1224
|
options?: Readonly<AxAIServiceOptions> & {
|
|
1221
1225
|
tokensPerMinute?: number;
|
|
1222
1226
|
};
|
|
1227
|
+
modelInfo?: AxModelInfo[];
|
|
1223
1228
|
};
|
|
1224
1229
|
declare class AxAIGroq extends AxAIOpenAIBase<AxAIGroqModel, undefined> {
|
|
1225
|
-
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAIGroqArgs, 'name'>>);
|
|
1230
|
+
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAIGroqArgs, 'name'>>);
|
|
1226
1231
|
setOptions: (options: Readonly<AxAIServiceOptions>) => void;
|
|
1227
1232
|
private newRateLimiter;
|
|
1228
1233
|
}
|
|
@@ -1295,9 +1300,10 @@ type AxAIMistralArgs = AxAIOpenAIArgs<'mistral', AxAIMistralModel, AxAIMistralEm
|
|
|
1295
1300
|
options?: Readonly<AxAIServiceOptions> & {
|
|
1296
1301
|
tokensPerMinute?: number;
|
|
1297
1302
|
};
|
|
1303
|
+
modelInfo?: AxModelInfo[];
|
|
1298
1304
|
};
|
|
1299
1305
|
declare class AxAIMistral extends AxAIOpenAIBase<AxAIMistralModel, AxAIMistralEmbedModels> {
|
|
1300
|
-
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAIMistralArgs, 'name'>>);
|
|
1306
|
+
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAIMistralArgs, 'name'>>);
|
|
1301
1307
|
}
|
|
1302
1308
|
|
|
1303
1309
|
type AxAIOllamaAIConfig = AxAIOpenAIConfig<string, string>;
|
|
@@ -1405,7 +1411,7 @@ type TogetherAIConfig = AxAIOpenAIConfig<string, unknown>;
|
|
|
1405
1411
|
declare const axAITogetherDefaultConfig: () => TogetherAIConfig;
|
|
1406
1412
|
type AxAITogetherArgs = AxAIOpenAIArgs<'together', string, unknown>;
|
|
1407
1413
|
declare class AxAITogether extends AxAIOpenAIBase<string, unknown> {
|
|
1408
|
-
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAITogetherArgs, 'name'>>);
|
|
1414
|
+
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAITogetherArgs, 'name'>>);
|
|
1409
1415
|
}
|
|
1410
1416
|
|
|
1411
1417
|
type AxAIArgs = AxAIOpenAIArgs | AxAIAzureOpenAIArgs | AxAITogetherArgs | AxAIAnthropicArgs | AxAIGroqArgs | AxAIGoogleGeminiArgs | AxAICohereArgs | AxAIHuggingFaceArgs | AxAIMistralArgs | AxAIDeepSeekArgs | AxAIOllamaArgs | AxAIRekaArgs;
|
|
@@ -1447,9 +1453,10 @@ type AxAIGrokArgs = AxAIOpenAIArgs<'grok', AxAIGrokModel, AxAIGrokEmbedModels> &
|
|
|
1447
1453
|
options?: Readonly<AxAIServiceOptions> & {
|
|
1448
1454
|
tokensPerMinute?: number;
|
|
1449
1455
|
};
|
|
1456
|
+
modelInfo?: AxModelInfo[];
|
|
1450
1457
|
};
|
|
1451
1458
|
declare class AxAIGrok extends AxAIOpenAIBase<AxAIGrokModel, AxAIGrokEmbedModels> {
|
|
1452
|
-
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAIGrokArgs, 'name'>>);
|
|
1459
|
+
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAIGrokArgs, 'name'>>);
|
|
1453
1460
|
}
|
|
1454
1461
|
|
|
1455
1462
|
interface AxAIMemory {
|
|
@@ -1691,7 +1698,7 @@ type AxProgramForwardOptions = {
|
|
|
1691
1698
|
fastFail?: boolean;
|
|
1692
1699
|
debug?: boolean;
|
|
1693
1700
|
debugHideSystemPrompt?: boolean;
|
|
1694
|
-
thinkingTokenBudget?: 'minimal' | 'low' | 'medium' | 'high';
|
|
1701
|
+
thinkingTokenBudget?: 'minimal' | 'low' | 'medium' | 'high' | 'highest';
|
|
1695
1702
|
traceLabel?: string;
|
|
1696
1703
|
};
|
|
1697
1704
|
type AxProgramStreamingForwardOptions = Omit<AxProgramForwardOptions, 'stream'>;
|
package/index.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { ReadableStream as ReadableStream$1 } from 'node:stream/web';
|
|
2
|
-
import { Span, Tracer } from '@opentelemetry/api';
|
|
2
|
+
import { Span, Context, Tracer } from '@opentelemetry/api';
|
|
3
3
|
import { ReadableStream } from 'stream/web';
|
|
4
4
|
|
|
5
5
|
interface RetryConfig {
|
|
@@ -332,7 +332,7 @@ type AxRateLimiterFunction = <T = unknown>(reqFunc: () => Promise<T | ReadableSt
|
|
|
332
332
|
}>) => Promise<T | ReadableStream$1<T>>;
|
|
333
333
|
type AxAIPromptConfig = {
|
|
334
334
|
stream?: boolean;
|
|
335
|
-
thinkingTokenBudget?: 'minimal' | 'low' | 'medium' | 'high';
|
|
335
|
+
thinkingTokenBudget?: 'minimal' | 'low' | 'medium' | 'high' | 'highest';
|
|
336
336
|
};
|
|
337
337
|
type AxAIServiceOptions = {
|
|
338
338
|
debug?: boolean;
|
|
@@ -351,6 +351,7 @@ type AxAIServiceActionOptions<TModel = unknown, TEmbedModel = unknown> = {
|
|
|
351
351
|
debug?: boolean;
|
|
352
352
|
debugHideSystemPrompt?: boolean;
|
|
353
353
|
hideThought?: boolean;
|
|
354
|
+
traceContext?: Context;
|
|
354
355
|
};
|
|
355
356
|
interface AxAIService<TModel = unknown, TEmbedModel = unknown> {
|
|
356
357
|
getId(): string;
|
|
@@ -798,8 +799,9 @@ declare const axAIOpenAIDefaultConfig: () => AxAIOpenAIConfig<AxAIOpenAIModel, A
|
|
|
798
799
|
declare const axAIOpenAIBestConfig: () => AxAIOpenAIConfig<AxAIOpenAIModel, AxAIOpenAIEmbedModel>;
|
|
799
800
|
declare const axAIOpenAICreativeConfig: () => AxAIOpenAIConfig<AxAIOpenAIModel, AxAIOpenAIEmbedModel>;
|
|
800
801
|
declare const axAIOpenAIFastConfig: () => AxAIOpenAIConfig<AxAIOpenAIModel, AxAIOpenAIEmbedModel>;
|
|
801
|
-
interface AxAIOpenAIArgs<TName = 'openai', TModel = AxAIOpenAIModel, TEmbedModel = AxAIOpenAIEmbedModel, TChatReq extends AxAIOpenAIChatRequest<TModel> = AxAIOpenAIChatRequest<TModel>> extends Omit<AxAIOpenAIBaseArgs<TModel, TEmbedModel, TChatReq>, 'config' | 'modelInfo'> {
|
|
802
|
+
interface AxAIOpenAIArgs<TName = 'openai', TModel = AxAIOpenAIModel, TEmbedModel = AxAIOpenAIEmbedModel, TChatReq extends AxAIOpenAIChatRequest<TModel> = AxAIOpenAIChatRequest<TModel>> extends Omit<AxAIOpenAIBaseArgs<TModel, TEmbedModel, TChatReq>, 'config' | 'supportFor' | 'modelInfo'> {
|
|
802
803
|
name: TName;
|
|
804
|
+
modelInfo?: AxModelInfo[];
|
|
803
805
|
config?: Partial<AxAIOpenAIBaseArgs<TModel, TEmbedModel, TChatReq>['config']>;
|
|
804
806
|
}
|
|
805
807
|
type ChatReqUpdater<TModel, TChatReq extends AxAIOpenAIChatRequest<TModel>> = (req: Readonly<TChatReq>) => TChatReq;
|
|
@@ -813,13 +815,13 @@ interface AxAIOpenAIBaseArgs<TModel, TEmbedModel, TChatReq extends AxAIOpenAICha
|
|
|
813
815
|
modelInfo: Readonly<AxModelInfo[]>;
|
|
814
816
|
models?: AxAIInputModelList<TModel, TEmbedModel>;
|
|
815
817
|
chatReqUpdater?: ChatReqUpdater<TModel, TChatReq>;
|
|
816
|
-
supportFor
|
|
818
|
+
supportFor: AxAIFeatures | ((model: TModel) => AxAIFeatures);
|
|
817
819
|
}
|
|
818
820
|
declare class AxAIOpenAIBase<TModel, TEmbedModel, TChatReq extends AxAIOpenAIChatRequest<TModel> = AxAIOpenAIChatRequest<TModel>> extends AxBaseAI<TModel, TEmbedModel, AxAIOpenAIChatRequest<TModel>, AxAIOpenAIEmbedRequest<TEmbedModel>, AxAIOpenAIChatResponse, AxAIOpenAIChatResponseDelta, AxAIOpenAIEmbedResponse> {
|
|
819
821
|
constructor({ apiKey, config, options, apiURL, modelInfo, models, chatReqUpdater, supportFor, }: Readonly<Omit<AxAIOpenAIBaseArgs<TModel, TEmbedModel, TChatReq>, 'name'>>);
|
|
820
822
|
}
|
|
821
823
|
declare class AxAIOpenAI extends AxAIOpenAIBase<AxAIOpenAIModel, AxAIOpenAIEmbedModel> {
|
|
822
|
-
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAIOpenAIArgs, 'name'
|
|
824
|
+
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAIOpenAIArgs, 'name'>>);
|
|
823
825
|
}
|
|
824
826
|
|
|
825
827
|
declare const axAIAzureOpenAIDefaultConfig: () => AxAIOpenAIConfig<AxAIOpenAIModel, AxAIOpenAIEmbedModel>;
|
|
@@ -833,7 +835,7 @@ type AxAIAzureOpenAIArgs = AxAIOpenAIArgs<'azure-openai', AxAIOpenAIModel, AxAIO
|
|
|
833
835
|
version?: string;
|
|
834
836
|
};
|
|
835
837
|
declare class AxAIAzureOpenAI extends AxAIOpenAIBase<AxAIOpenAIModel, AxAIOpenAIEmbedModel> {
|
|
836
|
-
constructor({ apiKey, resourceName, deploymentName, version, config, options, models, }: Readonly<Omit<AxAIAzureOpenAIArgs, 'name'
|
|
838
|
+
constructor({ apiKey, resourceName, deploymentName, version, config, options, models, modelInfo, }: Readonly<Omit<AxAIAzureOpenAIArgs, 'name'>>);
|
|
837
839
|
}
|
|
838
840
|
|
|
839
841
|
/**
|
|
@@ -953,7 +955,8 @@ declare class AxAICohere extends AxBaseAI<AxAICohereModel, AxAICohereEmbedModel,
|
|
|
953
955
|
*/
|
|
954
956
|
declare enum AxAIDeepSeekModel {
|
|
955
957
|
DeepSeekChat = "deepseek-chat",
|
|
956
|
-
DeepSeekCoder = "deepseek-coder"
|
|
958
|
+
DeepSeekCoder = "deepseek-coder",
|
|
959
|
+
DeepSeekReasoner = "deepseek-reasoner"
|
|
957
960
|
}
|
|
958
961
|
|
|
959
962
|
type DeepSeekConfig = AxAIOpenAIConfig<AxAIDeepSeekModel, undefined>;
|
|
@@ -961,7 +964,7 @@ declare const axAIDeepSeekDefaultConfig: () => DeepSeekConfig;
|
|
|
961
964
|
declare const axAIDeepSeekCodeConfig: () => DeepSeekConfig;
|
|
962
965
|
type AxAIDeepSeekArgs = AxAIOpenAIArgs<'deepseek', AxAIDeepSeekModel, undefined>;
|
|
963
966
|
declare class AxAIDeepSeek extends AxAIOpenAIBase<AxAIDeepSeekModel, undefined> {
|
|
964
|
-
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAIDeepSeekArgs, 'name'>>);
|
|
967
|
+
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAIDeepSeekArgs, 'name'>>);
|
|
965
968
|
}
|
|
966
969
|
|
|
967
970
|
declare enum AxAIGoogleGeminiModel {
|
|
@@ -969,7 +972,6 @@ declare enum AxAIGoogleGeminiModel {
|
|
|
969
972
|
Gemini25Flash = "gemini-2.5-flash-preview-04-17",
|
|
970
973
|
Gemini20Flash = "gemini-2.0-flash",
|
|
971
974
|
Gemini20FlashLite = "gemini-2.0-flash-lite-preview-02-05",
|
|
972
|
-
Gemini20FlashThinking = "gemini-2.0-flash-thinking-exp-01-21",
|
|
973
975
|
Gemini1Pro = "gemini-1.0-pro",
|
|
974
976
|
Gemini15Flash = "gemini-1.5-flash",
|
|
975
977
|
Gemini15Flash002 = "gemini-1.5-flash-002",
|
|
@@ -1055,6 +1057,7 @@ type AxAIGoogleGeminiTool = {
|
|
|
1055
1057
|
function_declarations?: AxAIGoogleGeminiToolFunctionDeclaration[];
|
|
1056
1058
|
code_execution?: object;
|
|
1057
1059
|
google_search_retrieval?: AxAIGoogleGeminiToolGoogleSearchRetrieval;
|
|
1060
|
+
google_search?: object;
|
|
1058
1061
|
url_context?: object;
|
|
1059
1062
|
};
|
|
1060
1063
|
type AxAIGoogleGeminiToolConfig = {
|
|
@@ -1201,12 +1204,13 @@ interface AxAIGoogleGeminiArgs {
|
|
|
1201
1204
|
config?: Readonly<Partial<AxAIGoogleGeminiConfig>>;
|
|
1202
1205
|
options?: Readonly<AxAIServiceOptions & AxAIGoogleGeminiOptionsTools>;
|
|
1203
1206
|
models?: AxAIInputModelList<AxAIGoogleGeminiModel, AxAIGoogleGeminiEmbedModel>;
|
|
1207
|
+
modelInfo?: AxModelInfo[];
|
|
1204
1208
|
}
|
|
1205
1209
|
/**
|
|
1206
1210
|
* AxAIGoogleGemini: AI Service
|
|
1207
1211
|
*/
|
|
1208
1212
|
declare class AxAIGoogleGemini extends AxBaseAI<AxAIGoogleGeminiModel, AxAIGoogleGeminiEmbedModel, AxAIGoogleGeminiChatRequest, AxAIGoogleGeminiBatchEmbedRequest | AxAIGoogleVertexBatchEmbedRequest, AxAIGoogleGeminiChatResponse, AxAIGoogleGeminiChatResponseDelta, AxAIGoogleGeminiBatchEmbedResponse | AxAIGoogleVertexBatchEmbedResponse> {
|
|
1209
|
-
constructor({ apiKey, projectId, region, endpointId, config, options, models, }: Readonly<Omit<AxAIGoogleGeminiArgs, 'name'>>);
|
|
1213
|
+
constructor({ apiKey, projectId, region, endpointId, config, options, models, modelInfo, }: Readonly<Omit<AxAIGoogleGeminiArgs, 'name'>>);
|
|
1210
1214
|
}
|
|
1211
1215
|
|
|
1212
1216
|
declare enum AxAIGroqModel {
|
|
@@ -1220,9 +1224,10 @@ type AxAIGroqArgs = AxAIOpenAIArgs<'groq', AxAIGroqModel, undefined> & {
|
|
|
1220
1224
|
options?: Readonly<AxAIServiceOptions> & {
|
|
1221
1225
|
tokensPerMinute?: number;
|
|
1222
1226
|
};
|
|
1227
|
+
modelInfo?: AxModelInfo[];
|
|
1223
1228
|
};
|
|
1224
1229
|
declare class AxAIGroq extends AxAIOpenAIBase<AxAIGroqModel, undefined> {
|
|
1225
|
-
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAIGroqArgs, 'name'>>);
|
|
1230
|
+
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAIGroqArgs, 'name'>>);
|
|
1226
1231
|
setOptions: (options: Readonly<AxAIServiceOptions>) => void;
|
|
1227
1232
|
private newRateLimiter;
|
|
1228
1233
|
}
|
|
@@ -1295,9 +1300,10 @@ type AxAIMistralArgs = AxAIOpenAIArgs<'mistral', AxAIMistralModel, AxAIMistralEm
|
|
|
1295
1300
|
options?: Readonly<AxAIServiceOptions> & {
|
|
1296
1301
|
tokensPerMinute?: number;
|
|
1297
1302
|
};
|
|
1303
|
+
modelInfo?: AxModelInfo[];
|
|
1298
1304
|
};
|
|
1299
1305
|
declare class AxAIMistral extends AxAIOpenAIBase<AxAIMistralModel, AxAIMistralEmbedModels> {
|
|
1300
|
-
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAIMistralArgs, 'name'>>);
|
|
1306
|
+
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAIMistralArgs, 'name'>>);
|
|
1301
1307
|
}
|
|
1302
1308
|
|
|
1303
1309
|
type AxAIOllamaAIConfig = AxAIOpenAIConfig<string, string>;
|
|
@@ -1405,7 +1411,7 @@ type TogetherAIConfig = AxAIOpenAIConfig<string, unknown>;
|
|
|
1405
1411
|
declare const axAITogetherDefaultConfig: () => TogetherAIConfig;
|
|
1406
1412
|
type AxAITogetherArgs = AxAIOpenAIArgs<'together', string, unknown>;
|
|
1407
1413
|
declare class AxAITogether extends AxAIOpenAIBase<string, unknown> {
|
|
1408
|
-
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAITogetherArgs, 'name'>>);
|
|
1414
|
+
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAITogetherArgs, 'name'>>);
|
|
1409
1415
|
}
|
|
1410
1416
|
|
|
1411
1417
|
type AxAIArgs = AxAIOpenAIArgs | AxAIAzureOpenAIArgs | AxAITogetherArgs | AxAIAnthropicArgs | AxAIGroqArgs | AxAIGoogleGeminiArgs | AxAICohereArgs | AxAIHuggingFaceArgs | AxAIMistralArgs | AxAIDeepSeekArgs | AxAIOllamaArgs | AxAIRekaArgs;
|
|
@@ -1447,9 +1453,10 @@ type AxAIGrokArgs = AxAIOpenAIArgs<'grok', AxAIGrokModel, AxAIGrokEmbedModels> &
|
|
|
1447
1453
|
options?: Readonly<AxAIServiceOptions> & {
|
|
1448
1454
|
tokensPerMinute?: number;
|
|
1449
1455
|
};
|
|
1456
|
+
modelInfo?: AxModelInfo[];
|
|
1450
1457
|
};
|
|
1451
1458
|
declare class AxAIGrok extends AxAIOpenAIBase<AxAIGrokModel, AxAIGrokEmbedModels> {
|
|
1452
|
-
constructor({ apiKey, config, options, models, }: Readonly<Omit<AxAIGrokArgs, 'name'>>);
|
|
1459
|
+
constructor({ apiKey, config, options, models, modelInfo, }: Readonly<Omit<AxAIGrokArgs, 'name'>>);
|
|
1453
1460
|
}
|
|
1454
1461
|
|
|
1455
1462
|
interface AxAIMemory {
|
|
@@ -1691,7 +1698,7 @@ type AxProgramForwardOptions = {
|
|
|
1691
1698
|
fastFail?: boolean;
|
|
1692
1699
|
debug?: boolean;
|
|
1693
1700
|
debugHideSystemPrompt?: boolean;
|
|
1694
|
-
thinkingTokenBudget?: 'minimal' | 'low' | 'medium' | 'high';
|
|
1701
|
+
thinkingTokenBudget?: 'minimal' | 'low' | 'medium' | 'high' | 'highest';
|
|
1695
1702
|
traceLabel?: string;
|
|
1696
1703
|
};
|
|
1697
1704
|
type AxProgramStreamingForwardOptions = Omit<AxProgramForwardOptions, 'stream'>;
|