@ax-llm/ax 11.0.47 → 11.0.49
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +254 -164
- package/index.cjs.map +1 -1
- package/index.d.cts +22 -15
- package/index.d.ts +22 -15
- package/index.js +260 -166
- package/index.js.map +1 -1
- package/package.json +1 -1
package/index.cjs
CHANGED
|
@@ -365,14 +365,14 @@ var defaultRetryConfig = {
|
|
|
365
365
|
var defaultTimeoutMs = 3e4;
|
|
366
366
|
var textDecoderStream = import_web3.TextDecoderStream ?? TextDecoderStreamPolyfill;
|
|
367
367
|
var AxAIServiceError = class extends Error {
|
|
368
|
-
constructor(message, url, requestBody,
|
|
368
|
+
constructor(message, url, requestBody, context3 = {}) {
|
|
369
369
|
super(message);
|
|
370
370
|
this.url = url;
|
|
371
371
|
this.requestBody = requestBody;
|
|
372
372
|
this.name = this.constructor.name;
|
|
373
373
|
this.timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
374
374
|
this.errorId = crypto.randomUUID();
|
|
375
|
-
this.context =
|
|
375
|
+
this.context = context3;
|
|
376
376
|
this.stack = this.toString();
|
|
377
377
|
}
|
|
378
378
|
timestamp;
|
|
@@ -394,11 +394,11 @@ var AxAIServiceError = class extends Error {
|
|
|
394
394
|
}
|
|
395
395
|
};
|
|
396
396
|
var AxAIServiceStatusError = class extends AxAIServiceError {
|
|
397
|
-
constructor(status, statusText, url, requestBody,
|
|
397
|
+
constructor(status, statusText, url, requestBody, context3) {
|
|
398
398
|
super(`HTTP ${status} - ${statusText}`, url, requestBody, {
|
|
399
399
|
httpStatus: status,
|
|
400
400
|
httpStatusText: statusText,
|
|
401
|
-
...
|
|
401
|
+
...context3
|
|
402
402
|
});
|
|
403
403
|
this.status = status;
|
|
404
404
|
this.statusText = statusText;
|
|
@@ -406,11 +406,11 @@ var AxAIServiceStatusError = class extends AxAIServiceError {
|
|
|
406
406
|
}
|
|
407
407
|
};
|
|
408
408
|
var AxAIServiceNetworkError = class extends AxAIServiceError {
|
|
409
|
-
constructor(originalError, url, requestBody,
|
|
409
|
+
constructor(originalError, url, requestBody, context3) {
|
|
410
410
|
super(`Network Error: ${originalError.message}`, url, requestBody, {
|
|
411
411
|
originalErrorName: originalError.name,
|
|
412
412
|
originalErrorStack: originalError.stack,
|
|
413
|
-
...
|
|
413
|
+
...context3
|
|
414
414
|
});
|
|
415
415
|
this.originalError = originalError;
|
|
416
416
|
this.name = this.constructor.name;
|
|
@@ -418,33 +418,33 @@ var AxAIServiceNetworkError = class extends AxAIServiceError {
|
|
|
418
418
|
}
|
|
419
419
|
};
|
|
420
420
|
var AxAIServiceResponseError = class extends AxAIServiceError {
|
|
421
|
-
constructor(message, url, requestBody,
|
|
422
|
-
super(message, url, requestBody,
|
|
421
|
+
constructor(message, url, requestBody, context3) {
|
|
422
|
+
super(message, url, requestBody, context3);
|
|
423
423
|
this.name = this.constructor.name;
|
|
424
424
|
}
|
|
425
425
|
};
|
|
426
426
|
var AxAIServiceStreamTerminatedError = class extends AxAIServiceError {
|
|
427
|
-
constructor(url, requestBody, lastChunk,
|
|
427
|
+
constructor(url, requestBody, lastChunk, context3) {
|
|
428
428
|
super("Stream terminated unexpectedly by remote host", url, requestBody, {
|
|
429
429
|
lastChunk,
|
|
430
|
-
...
|
|
430
|
+
...context3
|
|
431
431
|
});
|
|
432
432
|
this.lastChunk = lastChunk;
|
|
433
433
|
this.name = this.constructor.name;
|
|
434
434
|
}
|
|
435
435
|
};
|
|
436
436
|
var AxAIServiceTimeoutError = class extends AxAIServiceError {
|
|
437
|
-
constructor(url, timeoutMs, requestBody,
|
|
437
|
+
constructor(url, timeoutMs, requestBody, context3) {
|
|
438
438
|
super(`Request timeout after ${timeoutMs}ms`, url, requestBody, {
|
|
439
439
|
timeoutMs,
|
|
440
|
-
...
|
|
440
|
+
...context3
|
|
441
441
|
});
|
|
442
442
|
this.name = this.constructor.name;
|
|
443
443
|
}
|
|
444
444
|
};
|
|
445
445
|
var AxAIServiceAuthenticationError = class extends AxAIServiceError {
|
|
446
|
-
constructor(url, requestBody,
|
|
447
|
-
super("Authentication failed", url, requestBody,
|
|
446
|
+
constructor(url, requestBody, context3) {
|
|
447
|
+
super("Authentication failed", url, requestBody, context3);
|
|
448
448
|
this.name = this.constructor.name;
|
|
449
449
|
}
|
|
450
450
|
};
|
|
@@ -490,14 +490,12 @@ var apiCall = async (api, json) => {
|
|
|
490
490
|
);
|
|
491
491
|
}
|
|
492
492
|
}
|
|
493
|
-
|
|
494
|
-
api.
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
});
|
|
500
|
-
}
|
|
493
|
+
api.span?.setAttributes({
|
|
494
|
+
"http.request.method": api.put ? "PUT" : "POST",
|
|
495
|
+
"url.full": apiUrl.href,
|
|
496
|
+
"request.id": requestId,
|
|
497
|
+
"request.startTime": metrics.startTime
|
|
498
|
+
});
|
|
501
499
|
let attempt = 0;
|
|
502
500
|
while (true) {
|
|
503
501
|
const controller = new AbortController();
|
|
@@ -524,16 +522,14 @@ var apiCall = async (api, json) => {
|
|
|
524
522
|
const delay = calculateRetryDelay(attempt, retryConfig);
|
|
525
523
|
attempt++;
|
|
526
524
|
updateRetryMetrics(metrics);
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
});
|
|
536
|
-
}
|
|
525
|
+
api.span?.addEvent("retry", {
|
|
526
|
+
attempt,
|
|
527
|
+
delay,
|
|
528
|
+
status: res.status,
|
|
529
|
+
"metrics.startTime": metrics.startTime,
|
|
530
|
+
"metrics.retryCount": metrics.retryCount,
|
|
531
|
+
"metrics.lastRetryTime": metrics.lastRetryTime
|
|
532
|
+
});
|
|
537
533
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
538
534
|
continue;
|
|
539
535
|
}
|
|
@@ -559,12 +555,10 @@ var apiCall = async (api, json) => {
|
|
|
559
555
|
);
|
|
560
556
|
}
|
|
561
557
|
}
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
});
|
|
567
|
-
}
|
|
558
|
+
api.span?.setAttributes({
|
|
559
|
+
"response.time": Date.now() - metrics.startTime,
|
|
560
|
+
"response.retries": metrics.retryCount
|
|
561
|
+
});
|
|
568
562
|
return resJson;
|
|
569
563
|
}
|
|
570
564
|
if (!res.body) {
|
|
@@ -584,15 +578,11 @@ var apiCall = async (api, json) => {
|
|
|
584
578
|
metrics.streamChunks = chunkCount;
|
|
585
579
|
metrics.lastChunkTime = Date.now();
|
|
586
580
|
controller2.enqueue(chunk);
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
"stream.duration": Date.now() - metrics.startTime,
|
|
593
|
-
"response.retries": metrics.retryCount
|
|
594
|
-
});
|
|
595
|
-
}
|
|
581
|
+
api.span?.addEvent("stream.chunk", {
|
|
582
|
+
"stream.chunks": chunkCount,
|
|
583
|
+
"stream.duration": Date.now() - metrics.startTime,
|
|
584
|
+
"response.retries": metrics.retryCount
|
|
585
|
+
});
|
|
596
586
|
}
|
|
597
587
|
});
|
|
598
588
|
let closed = false;
|
|
@@ -677,16 +667,14 @@ var apiCall = async (api, json) => {
|
|
|
677
667
|
const delay = calculateRetryDelay(attempt, retryConfig);
|
|
678
668
|
attempt++;
|
|
679
669
|
updateRetryMetrics(metrics);
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
});
|
|
689
|
-
}
|
|
670
|
+
api.span?.addEvent("retry", {
|
|
671
|
+
attempt,
|
|
672
|
+
delay,
|
|
673
|
+
error: error.message,
|
|
674
|
+
"metrics.startTime": metrics.startTime,
|
|
675
|
+
"metrics.retryCount": metrics.retryCount,
|
|
676
|
+
"metrics.lastRetryTime": metrics.lastRetryTime
|
|
677
|
+
});
|
|
690
678
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
691
679
|
continue;
|
|
692
680
|
}
|
|
@@ -1085,7 +1073,7 @@ var AxBaseAI = class {
|
|
|
1085
1073
|
modelConfig.stream = false;
|
|
1086
1074
|
}
|
|
1087
1075
|
if (this.tracer) {
|
|
1088
|
-
return await this.tracer
|
|
1076
|
+
return await this.tracer.startActiveSpan(
|
|
1089
1077
|
"AI Chat Request",
|
|
1090
1078
|
{
|
|
1091
1079
|
kind: import_api2.SpanKind.SERVER,
|
|
@@ -1103,6 +1091,7 @@ var AxBaseAI = class {
|
|
|
1103
1091
|
[axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING]: modelConfig.stream
|
|
1104
1092
|
}
|
|
1105
1093
|
},
|
|
1094
|
+
options?.traceContext ?? import_api2.context.active(),
|
|
1106
1095
|
async (span) => {
|
|
1107
1096
|
return await this._chat2(model, modelConfig, req, options, span);
|
|
1108
1097
|
}
|
|
@@ -1284,6 +1273,7 @@ var AxBaseAI = class {
|
|
|
1284
1273
|
[axSpanAttributes.LLM_REQUEST_MODEL]: embedModel
|
|
1285
1274
|
}
|
|
1286
1275
|
},
|
|
1276
|
+
options?.traceContext ?? import_api2.context.active(),
|
|
1287
1277
|
async (span) => {
|
|
1288
1278
|
try {
|
|
1289
1279
|
return await this._embed2(embedModel, req, options, span);
|
|
@@ -1985,6 +1975,22 @@ function mapFinishReason(stopReason) {
|
|
|
1985
1975
|
}
|
|
1986
1976
|
}
|
|
1987
1977
|
|
|
1978
|
+
// dsp/modelinfo.ts
|
|
1979
|
+
function getModelInfo({
|
|
1980
|
+
model,
|
|
1981
|
+
modelInfo,
|
|
1982
|
+
models
|
|
1983
|
+
}) {
|
|
1984
|
+
const modelEntry = models?.find((v) => v.key === model);
|
|
1985
|
+
const mappedModel = modelEntry && "model" in modelEntry ? modelEntry.model : model;
|
|
1986
|
+
const exactMatch = modelInfo.find((v) => v.name === model);
|
|
1987
|
+
if (exactMatch) return exactMatch;
|
|
1988
|
+
const normalizedName = mappedModel.replace(/^(anthropic\.|openai\.)/, "").replace(/-latest$/, "").replace(/-\d{8}$/, "").replace(/-v\d+:\d+$/, "").replace(/@\d{8}$/, "").replace(/-\d{2,}(-[a-zA-Z0-9-]+)?$/, "").replace(/-v\d+@\d{8}$/, "").replace(/-v\d+$/, "");
|
|
1989
|
+
const normalizedMatch = modelInfo.find((v) => v.name === normalizedName);
|
|
1990
|
+
if (normalizedMatch) return normalizedMatch;
|
|
1991
|
+
return null;
|
|
1992
|
+
}
|
|
1993
|
+
|
|
1988
1994
|
// ai/openai/types.ts
|
|
1989
1995
|
var AxAIOpenAIModel = /* @__PURE__ */ ((AxAIOpenAIModel2) => {
|
|
1990
1996
|
AxAIOpenAIModel2["O1"] = "o1";
|
|
@@ -2234,6 +2240,10 @@ var AxAIOpenAIImpl = class {
|
|
|
2234
2240
|
break;
|
|
2235
2241
|
case "high":
|
|
2236
2242
|
reqValue.reasoning_effort = "high";
|
|
2243
|
+
break;
|
|
2244
|
+
case "highest":
|
|
2245
|
+
reqValue.reasoning_effort = "high";
|
|
2246
|
+
break;
|
|
2237
2247
|
}
|
|
2238
2248
|
}
|
|
2239
2249
|
if (this.chatReqUpdater) {
|
|
@@ -2450,15 +2460,7 @@ var AxAIOpenAIBase = class extends AxBaseAI {
|
|
|
2450
2460
|
embedModel: config.embedModel
|
|
2451
2461
|
},
|
|
2452
2462
|
options,
|
|
2453
|
-
supportFor
|
|
2454
|
-
const modelInf = modelInfo.find((m) => m.name === model);
|
|
2455
|
-
return {
|
|
2456
|
-
functions: true,
|
|
2457
|
-
streaming: true,
|
|
2458
|
-
hasThinkingBudget: modelInf?.hasThinkingBudget ?? false,
|
|
2459
|
-
hasShowThoughts: modelInf?.hasShowThoughts ?? false
|
|
2460
|
-
};
|
|
2461
|
-
}),
|
|
2463
|
+
supportFor,
|
|
2462
2464
|
models
|
|
2463
2465
|
});
|
|
2464
2466
|
}
|
|
@@ -2468,18 +2470,24 @@ var AxAIOpenAI = class extends AxAIOpenAIBase {
|
|
|
2468
2470
|
apiKey,
|
|
2469
2471
|
config,
|
|
2470
2472
|
options,
|
|
2471
|
-
models
|
|
2473
|
+
models,
|
|
2474
|
+
modelInfo
|
|
2472
2475
|
}) {
|
|
2473
2476
|
if (!apiKey || apiKey === "") {
|
|
2474
2477
|
throw new Error("OpenAI API key not set");
|
|
2475
2478
|
}
|
|
2476
|
-
|
|
2477
|
-
|
|
2479
|
+
modelInfo = [...axModelInfoOpenAI, ...modelInfo ?? []];
|
|
2480
|
+
const supportFor = (model) => {
|
|
2481
|
+
const mi = getModelInfo({
|
|
2482
|
+
model,
|
|
2483
|
+
modelInfo,
|
|
2484
|
+
models
|
|
2485
|
+
});
|
|
2478
2486
|
return {
|
|
2479
2487
|
functions: true,
|
|
2480
2488
|
streaming: true,
|
|
2481
|
-
hasThinkingBudget:
|
|
2482
|
-
hasShowThoughts:
|
|
2489
|
+
hasThinkingBudget: mi?.hasThinkingBudget ?? false,
|
|
2490
|
+
hasShowThoughts: mi?.hasShowThoughts ?? false
|
|
2483
2491
|
};
|
|
2484
2492
|
};
|
|
2485
2493
|
super({
|
|
@@ -2489,9 +2497,9 @@ var AxAIOpenAI = class extends AxAIOpenAIBase {
|
|
|
2489
2497
|
...config
|
|
2490
2498
|
},
|
|
2491
2499
|
options,
|
|
2492
|
-
modelInfo
|
|
2500
|
+
modelInfo,
|
|
2493
2501
|
models,
|
|
2494
|
-
supportFor
|
|
2502
|
+
supportFor
|
|
2495
2503
|
});
|
|
2496
2504
|
super.setName("OpenAI");
|
|
2497
2505
|
}
|
|
@@ -2510,7 +2518,8 @@ var AxAIAzureOpenAI = class extends AxAIOpenAIBase {
|
|
|
2510
2518
|
version = "api-version=2024-02-15-preview",
|
|
2511
2519
|
config,
|
|
2512
2520
|
options,
|
|
2513
|
-
models
|
|
2521
|
+
models,
|
|
2522
|
+
modelInfo
|
|
2514
2523
|
}) {
|
|
2515
2524
|
if (!apiKey || apiKey === "") {
|
|
2516
2525
|
throw new Error("Azure OpenAPI API key not set");
|
|
@@ -2525,21 +2534,27 @@ var AxAIAzureOpenAI = class extends AxAIOpenAIBase {
|
|
|
2525
2534
|
...axAIAzureOpenAIDefaultConfig(),
|
|
2526
2535
|
...config
|
|
2527
2536
|
};
|
|
2537
|
+
modelInfo = [...axModelInfoOpenAI, ...modelInfo ?? []];
|
|
2538
|
+
const supportFor = (model) => {
|
|
2539
|
+
const mi = getModelInfo({
|
|
2540
|
+
model,
|
|
2541
|
+
modelInfo,
|
|
2542
|
+
models
|
|
2543
|
+
});
|
|
2544
|
+
return {
|
|
2545
|
+
functions: true,
|
|
2546
|
+
streaming: true,
|
|
2547
|
+
hasThinkingBudget: mi?.hasThinkingBudget ?? false,
|
|
2548
|
+
hasShowThoughts: mi?.hasShowThoughts ?? false
|
|
2549
|
+
};
|
|
2550
|
+
};
|
|
2528
2551
|
super({
|
|
2529
2552
|
apiKey,
|
|
2530
2553
|
config: _config,
|
|
2531
2554
|
options,
|
|
2532
2555
|
models,
|
|
2533
|
-
modelInfo
|
|
2534
|
-
supportFor
|
|
2535
|
-
const modelInf = axModelInfoOpenAI.find((m) => m.name === model);
|
|
2536
|
-
return {
|
|
2537
|
-
functions: true,
|
|
2538
|
-
streaming: true,
|
|
2539
|
-
hasThinkingBudget: modelInf?.hasThinkingBudget ?? false,
|
|
2540
|
-
hasShowThoughts: modelInf?.hasShowThoughts ?? false
|
|
2541
|
-
};
|
|
2542
|
-
}
|
|
2556
|
+
modelInfo,
|
|
2557
|
+
supportFor
|
|
2543
2558
|
});
|
|
2544
2559
|
const host = resourceName.includes("://") ? resourceName : `https://${resourceName}.openai.azure.com/`;
|
|
2545
2560
|
super.setName("Azure OpenAI");
|
|
@@ -2889,6 +2904,7 @@ function createToolCall(functionCalls) {
|
|
|
2889
2904
|
var AxAIDeepSeekModel = /* @__PURE__ */ ((AxAIDeepSeekModel2) => {
|
|
2890
2905
|
AxAIDeepSeekModel2["DeepSeekChat"] = "deepseek-chat";
|
|
2891
2906
|
AxAIDeepSeekModel2["DeepSeekCoder"] = "deepseek-coder";
|
|
2907
|
+
AxAIDeepSeekModel2["DeepSeekReasoner"] = "deepseek-reasoner";
|
|
2892
2908
|
return AxAIDeepSeekModel2;
|
|
2893
2909
|
})(AxAIDeepSeekModel || {});
|
|
2894
2910
|
|
|
@@ -2897,14 +2913,14 @@ var axModelInfoDeepSeek = [
|
|
|
2897
2913
|
{
|
|
2898
2914
|
name: "deepseek-chat" /* DeepSeekChat */,
|
|
2899
2915
|
currency: "USD",
|
|
2900
|
-
promptTokenCostPer1M: 0.
|
|
2901
|
-
completionTokenCostPer1M:
|
|
2916
|
+
promptTokenCostPer1M: 0.27,
|
|
2917
|
+
completionTokenCostPer1M: 1.1
|
|
2902
2918
|
},
|
|
2903
2919
|
{
|
|
2904
|
-
name: "deepseek-
|
|
2920
|
+
name: "deepseek-reasoner" /* DeepSeekReasoner */,
|
|
2905
2921
|
currency: "USD",
|
|
2906
|
-
promptTokenCostPer1M: 0.
|
|
2907
|
-
completionTokenCostPer1M:
|
|
2922
|
+
promptTokenCostPer1M: 0.55,
|
|
2923
|
+
completionTokenCostPer1M: 2.19
|
|
2908
2924
|
}
|
|
2909
2925
|
];
|
|
2910
2926
|
|
|
@@ -2922,7 +2938,8 @@ var AxAIDeepSeek = class extends AxAIOpenAIBase {
|
|
|
2922
2938
|
apiKey,
|
|
2923
2939
|
config,
|
|
2924
2940
|
options,
|
|
2925
|
-
models
|
|
2941
|
+
models,
|
|
2942
|
+
modelInfo
|
|
2926
2943
|
}) {
|
|
2927
2944
|
if (!apiKey || apiKey === "") {
|
|
2928
2945
|
throw new Error("DeepSeek API key not set");
|
|
@@ -2931,12 +2948,19 @@ var AxAIDeepSeek = class extends AxAIOpenAIBase {
|
|
|
2931
2948
|
...axAIDeepSeekDefaultConfig(),
|
|
2932
2949
|
...config
|
|
2933
2950
|
};
|
|
2951
|
+
modelInfo = [...axModelInfoDeepSeek, ...modelInfo ?? []];
|
|
2934
2952
|
super({
|
|
2935
2953
|
apiKey,
|
|
2936
2954
|
config: _config,
|
|
2937
2955
|
options,
|
|
2938
2956
|
apiURL: "https://api.deepseek.com",
|
|
2939
|
-
modelInfo
|
|
2957
|
+
modelInfo,
|
|
2958
|
+
supportFor: {
|
|
2959
|
+
functions: true,
|
|
2960
|
+
streaming: true,
|
|
2961
|
+
hasThinkingBudget: false,
|
|
2962
|
+
hasShowThoughts: false
|
|
2963
|
+
},
|
|
2940
2964
|
models
|
|
2941
2965
|
});
|
|
2942
2966
|
super.setName("DeepSeek");
|
|
@@ -2949,7 +2973,6 @@ var AxAIGoogleGeminiModel = /* @__PURE__ */ ((AxAIGoogleGeminiModel2) => {
|
|
|
2949
2973
|
AxAIGoogleGeminiModel2["Gemini25Flash"] = "gemini-2.5-flash-preview-04-17";
|
|
2950
2974
|
AxAIGoogleGeminiModel2["Gemini20Flash"] = "gemini-2.0-flash";
|
|
2951
2975
|
AxAIGoogleGeminiModel2["Gemini20FlashLite"] = "gemini-2.0-flash-lite-preview-02-05";
|
|
2952
|
-
AxAIGoogleGeminiModel2["Gemini20FlashThinking"] = "gemini-2.0-flash-thinking-exp-01-21";
|
|
2953
2976
|
AxAIGoogleGeminiModel2["Gemini1Pro"] = "gemini-1.0-pro";
|
|
2954
2977
|
AxAIGoogleGeminiModel2["Gemini15Flash"] = "gemini-1.5-flash";
|
|
2955
2978
|
AxAIGoogleGeminiModel2["Gemini15Flash002"] = "gemini-1.5-flash-002";
|
|
@@ -3025,13 +3048,6 @@ var axModelInfoGoogleGemini = [
|
|
|
3025
3048
|
promptTokenCostPer1M: 0,
|
|
3026
3049
|
completionTokenCostPer1M: 0
|
|
3027
3050
|
},
|
|
3028
|
-
{
|
|
3029
|
-
name: "gemini-2.0-flash-thinking-exp-01-21" /* Gemini20FlashThinking */,
|
|
3030
|
-
currency: "usd",
|
|
3031
|
-
characterIsToken: false,
|
|
3032
|
-
promptTokenCostPer1M: 0,
|
|
3033
|
-
completionTokenCostPer1M: 0
|
|
3034
|
-
},
|
|
3035
3051
|
{
|
|
3036
3052
|
name: "gemini-1.5-flash" /* Gemini15Flash */,
|
|
3037
3053
|
currency: "usd",
|
|
@@ -3234,6 +3250,9 @@ var AxAIGoogleGeminiImpl = class {
|
|
|
3234
3250
|
}
|
|
3235
3251
|
});
|
|
3236
3252
|
}
|
|
3253
|
+
if (this.options?.googleSearch) {
|
|
3254
|
+
tools.push({ google_search: {} });
|
|
3255
|
+
}
|
|
3237
3256
|
if (this.options?.urlContext) {
|
|
3238
3257
|
tools.push({ url_context: {} });
|
|
3239
3258
|
}
|
|
@@ -3272,16 +3291,19 @@ var AxAIGoogleGeminiImpl = class {
|
|
|
3272
3291
|
if (config.thinkingTokenBudget) {
|
|
3273
3292
|
switch (config.thinkingTokenBudget) {
|
|
3274
3293
|
case "minimal":
|
|
3275
|
-
thinkingConfig.thinkingBudget =
|
|
3294
|
+
thinkingConfig.thinkingBudget = 200;
|
|
3276
3295
|
break;
|
|
3277
3296
|
case "low":
|
|
3278
|
-
thinkingConfig.thinkingBudget =
|
|
3297
|
+
thinkingConfig.thinkingBudget = 800;
|
|
3279
3298
|
break;
|
|
3280
3299
|
case "medium":
|
|
3281
|
-
thinkingConfig.thinkingBudget =
|
|
3300
|
+
thinkingConfig.thinkingBudget = 5e3;
|
|
3282
3301
|
break;
|
|
3283
3302
|
case "high":
|
|
3284
|
-
thinkingConfig.thinkingBudget =
|
|
3303
|
+
thinkingConfig.thinkingBudget = 1e4;
|
|
3304
|
+
break;
|
|
3305
|
+
case "highest":
|
|
3306
|
+
thinkingConfig.thinkingBudget = 24500;
|
|
3285
3307
|
break;
|
|
3286
3308
|
}
|
|
3287
3309
|
}
|
|
@@ -3435,7 +3457,8 @@ var AxAIGoogleGemini = class extends AxBaseAI {
|
|
|
3435
3457
|
endpointId,
|
|
3436
3458
|
config,
|
|
3437
3459
|
options,
|
|
3438
|
-
models
|
|
3460
|
+
models,
|
|
3461
|
+
modelInfo
|
|
3439
3462
|
}) {
|
|
3440
3463
|
const isVertex = projectId !== void 0 && region !== void 0;
|
|
3441
3464
|
let apiURL;
|
|
@@ -3474,26 +3497,32 @@ var AxAIGoogleGemini = class extends AxBaseAI {
|
|
|
3474
3497
|
apiKey,
|
|
3475
3498
|
options
|
|
3476
3499
|
);
|
|
3500
|
+
modelInfo = [...axModelInfoGoogleGemini, ...modelInfo ?? []];
|
|
3501
|
+
const supportFor = (model) => {
|
|
3502
|
+
const mi = getModelInfo({
|
|
3503
|
+
model,
|
|
3504
|
+
modelInfo,
|
|
3505
|
+
models
|
|
3506
|
+
});
|
|
3507
|
+
return {
|
|
3508
|
+
functions: true,
|
|
3509
|
+
streaming: true,
|
|
3510
|
+
hasThinkingBudget: mi?.hasThinkingBudget ?? false,
|
|
3511
|
+
hasShowThoughts: mi?.hasShowThoughts ?? false,
|
|
3512
|
+
functionCot: false
|
|
3513
|
+
};
|
|
3514
|
+
};
|
|
3477
3515
|
super(aiImpl, {
|
|
3478
3516
|
name: "GoogleGeminiAI",
|
|
3479
3517
|
apiURL,
|
|
3480
3518
|
headers,
|
|
3481
|
-
modelInfo
|
|
3519
|
+
modelInfo,
|
|
3482
3520
|
defaults: {
|
|
3483
3521
|
model: _config.model,
|
|
3484
3522
|
embedModel: _config.embedModel
|
|
3485
3523
|
},
|
|
3486
3524
|
options,
|
|
3487
|
-
supportFor
|
|
3488
|
-
const modelInf = axModelInfoGoogleGemini.find((m) => m.name === model);
|
|
3489
|
-
return {
|
|
3490
|
-
functions: true,
|
|
3491
|
-
streaming: true,
|
|
3492
|
-
hasThinkingBudget: modelInf?.hasThinkingBudget ?? false,
|
|
3493
|
-
hasShowThoughts: modelInf?.hasShowThoughts ?? false,
|
|
3494
|
-
functionCot: false
|
|
3495
|
-
};
|
|
3496
|
-
},
|
|
3525
|
+
supportFor,
|
|
3497
3526
|
models
|
|
3498
3527
|
});
|
|
3499
3528
|
}
|
|
@@ -3596,7 +3625,8 @@ var AxAIGroq = class extends AxAIOpenAIBase {
|
|
|
3596
3625
|
apiKey,
|
|
3597
3626
|
config,
|
|
3598
3627
|
options,
|
|
3599
|
-
models
|
|
3628
|
+
models,
|
|
3629
|
+
modelInfo
|
|
3600
3630
|
}) {
|
|
3601
3631
|
if (!apiKey || apiKey === "") {
|
|
3602
3632
|
throw new Error("Groq API key not set");
|
|
@@ -3609,13 +3639,21 @@ var AxAIGroq = class extends AxAIOpenAIBase {
|
|
|
3609
3639
|
...options,
|
|
3610
3640
|
streamingUsage: false
|
|
3611
3641
|
};
|
|
3642
|
+
modelInfo = [...axModelInfoGroq, ...modelInfo ?? []];
|
|
3643
|
+
const supportFor = {
|
|
3644
|
+
functions: true,
|
|
3645
|
+
streaming: true,
|
|
3646
|
+
hasThinkingBudget: false,
|
|
3647
|
+
hasShowThoughts: false
|
|
3648
|
+
};
|
|
3612
3649
|
super({
|
|
3613
3650
|
apiKey,
|
|
3614
3651
|
config: _config,
|
|
3615
3652
|
options: _options,
|
|
3616
|
-
modelInfo
|
|
3653
|
+
modelInfo,
|
|
3617
3654
|
apiURL: "https://api.groq.com/openai/v1",
|
|
3618
|
-
models
|
|
3655
|
+
models,
|
|
3656
|
+
supportFor
|
|
3619
3657
|
});
|
|
3620
3658
|
super.setName("Groq");
|
|
3621
3659
|
this.setOptions(_options);
|
|
@@ -3853,7 +3891,8 @@ var AxAIMistral = class extends AxAIOpenAIBase {
|
|
|
3853
3891
|
apiKey,
|
|
3854
3892
|
config,
|
|
3855
3893
|
options,
|
|
3856
|
-
models
|
|
3894
|
+
models,
|
|
3895
|
+
modelInfo
|
|
3857
3896
|
}) {
|
|
3858
3897
|
if (!apiKey || apiKey === "") {
|
|
3859
3898
|
throw new Error("Mistral API key not set");
|
|
@@ -3862,13 +3901,21 @@ var AxAIMistral = class extends AxAIOpenAIBase {
|
|
|
3862
3901
|
...axAIMistralDefaultConfig(),
|
|
3863
3902
|
...config
|
|
3864
3903
|
};
|
|
3904
|
+
modelInfo = [...axModelInfoMistral, ...modelInfo ?? []];
|
|
3905
|
+
const supportFor = {
|
|
3906
|
+
functions: true,
|
|
3907
|
+
streaming: true,
|
|
3908
|
+
hasThinkingBudget: false,
|
|
3909
|
+
hasShowThoughts: false
|
|
3910
|
+
};
|
|
3865
3911
|
super({
|
|
3866
3912
|
apiKey,
|
|
3867
3913
|
config: _config,
|
|
3868
3914
|
options,
|
|
3869
3915
|
apiURL: "https://api.mistral.ai/v1",
|
|
3870
|
-
modelInfo
|
|
3871
|
-
models
|
|
3916
|
+
modelInfo,
|
|
3917
|
+
models,
|
|
3918
|
+
supportFor
|
|
3872
3919
|
});
|
|
3873
3920
|
super.setName("Mistral");
|
|
3874
3921
|
}
|
|
@@ -3903,7 +3950,13 @@ var AxAIOllama = class extends AxAIOpenAIBase {
|
|
|
3903
3950
|
config: _config,
|
|
3904
3951
|
apiURL: url,
|
|
3905
3952
|
models,
|
|
3906
|
-
modelInfo: []
|
|
3953
|
+
modelInfo: [],
|
|
3954
|
+
supportFor: {
|
|
3955
|
+
functions: true,
|
|
3956
|
+
streaming: true,
|
|
3957
|
+
hasThinkingBudget: false,
|
|
3958
|
+
hasShowThoughts: false
|
|
3959
|
+
}
|
|
3907
3960
|
});
|
|
3908
3961
|
super.setName("Ollama");
|
|
3909
3962
|
}
|
|
@@ -4154,7 +4207,8 @@ var AxAITogether = class extends AxAIOpenAIBase {
|
|
|
4154
4207
|
apiKey,
|
|
4155
4208
|
config,
|
|
4156
4209
|
options,
|
|
4157
|
-
models
|
|
4210
|
+
models,
|
|
4211
|
+
modelInfo
|
|
4158
4212
|
}) {
|
|
4159
4213
|
if (!apiKey || apiKey === "") {
|
|
4160
4214
|
throw new Error("Together API key not set");
|
|
@@ -4163,13 +4217,21 @@ var AxAITogether = class extends AxAIOpenAIBase {
|
|
|
4163
4217
|
...axAITogetherDefaultConfig(),
|
|
4164
4218
|
...config
|
|
4165
4219
|
};
|
|
4220
|
+
modelInfo = [...axModelInfoTogether, ...modelInfo ?? []];
|
|
4221
|
+
const supportFor = {
|
|
4222
|
+
functions: true,
|
|
4223
|
+
streaming: true,
|
|
4224
|
+
hasThinkingBudget: false,
|
|
4225
|
+
hasShowThoughts: false
|
|
4226
|
+
};
|
|
4166
4227
|
super({
|
|
4167
4228
|
apiKey,
|
|
4168
4229
|
config: _config,
|
|
4169
4230
|
options,
|
|
4170
4231
|
apiURL: "https://api.together.xyz/v1",
|
|
4171
|
-
modelInfo
|
|
4172
|
-
models
|
|
4232
|
+
modelInfo,
|
|
4233
|
+
models,
|
|
4234
|
+
supportFor
|
|
4173
4235
|
});
|
|
4174
4236
|
super.setName("Together");
|
|
4175
4237
|
}
|
|
@@ -4315,7 +4377,8 @@ var AxAIGrok = class extends AxAIOpenAIBase {
|
|
|
4315
4377
|
apiKey,
|
|
4316
4378
|
config,
|
|
4317
4379
|
options,
|
|
4318
|
-
models
|
|
4380
|
+
models,
|
|
4381
|
+
modelInfo
|
|
4319
4382
|
}) {
|
|
4320
4383
|
if (!apiKey || apiKey === "") {
|
|
4321
4384
|
throw new Error("Grok API key not set");
|
|
@@ -4324,22 +4387,28 @@ var AxAIGrok = class extends AxAIOpenAIBase {
|
|
|
4324
4387
|
...axAIGrokDefaultConfig(),
|
|
4325
4388
|
...config
|
|
4326
4389
|
};
|
|
4390
|
+
modelInfo = [...axModelInfoGrok, ...modelInfo ?? []];
|
|
4391
|
+
const supportFor = (model) => {
|
|
4392
|
+
const mi = getModelInfo({
|
|
4393
|
+
model,
|
|
4394
|
+
modelInfo,
|
|
4395
|
+
models
|
|
4396
|
+
});
|
|
4397
|
+
return {
|
|
4398
|
+
functions: true,
|
|
4399
|
+
streaming: true,
|
|
4400
|
+
hasThinkingBudget: mi?.hasThinkingBudget ?? false,
|
|
4401
|
+
hasShowThoughts: mi?.hasShowThoughts ?? false
|
|
4402
|
+
};
|
|
4403
|
+
};
|
|
4327
4404
|
super({
|
|
4328
4405
|
apiKey,
|
|
4329
4406
|
config: _config,
|
|
4330
4407
|
options,
|
|
4331
4408
|
apiURL: "https://api.x.ai/v1",
|
|
4332
|
-
modelInfo
|
|
4409
|
+
modelInfo,
|
|
4333
4410
|
models,
|
|
4334
|
-
supportFor
|
|
4335
|
-
const modelInf = axModelInfoGrok.find((m) => m.name === model);
|
|
4336
|
-
return {
|
|
4337
|
-
functions: true,
|
|
4338
|
-
streaming: true,
|
|
4339
|
-
hasThinkingBudget: modelInf?.hasThinkingBudget ?? false,
|
|
4340
|
-
hasShowThoughts: modelInf?.hasShowThoughts ?? false
|
|
4341
|
-
};
|
|
4342
|
-
}
|
|
4411
|
+
supportFor
|
|
4343
4412
|
});
|
|
4344
4413
|
super.setName("Grok");
|
|
4345
4414
|
}
|
|
@@ -4682,9 +4751,9 @@ var SignatureParser = class {
|
|
|
4682
4751
|
};
|
|
4683
4752
|
} catch (error) {
|
|
4684
4753
|
const errorMessage = error instanceof Error ? error.message : "Unknown error";
|
|
4685
|
-
const
|
|
4754
|
+
const context3 = this.getErrorContext();
|
|
4686
4755
|
throw new Error(`${errorMessage}
|
|
4687
|
-
${
|
|
4756
|
+
${context3}`);
|
|
4688
4757
|
}
|
|
4689
4758
|
}
|
|
4690
4759
|
getErrorContext() {
|
|
@@ -6783,7 +6852,9 @@ var AxGen = class extends AxProgramWithSignature {
|
|
|
6783
6852
|
async forwardSendRequest({
|
|
6784
6853
|
ai,
|
|
6785
6854
|
mem,
|
|
6786
|
-
options
|
|
6855
|
+
options,
|
|
6856
|
+
traceContext,
|
|
6857
|
+
firstStep
|
|
6787
6858
|
}) {
|
|
6788
6859
|
const {
|
|
6789
6860
|
sessionId,
|
|
@@ -6801,7 +6872,10 @@ var AxGen = class extends AxProgramWithSignature {
|
|
|
6801
6872
|
throw new Error("No chat prompt found");
|
|
6802
6873
|
}
|
|
6803
6874
|
const functions = _functions?.map((f) => "toFunction" in f ? f.toFunction() : f)?.flat();
|
|
6804
|
-
|
|
6875
|
+
let functionCall = _functionCall ?? this.options?.functionCall;
|
|
6876
|
+
if (!firstStep && (functionCall === "required" || typeof functionCall === "function")) {
|
|
6877
|
+
functionCall = void 0;
|
|
6878
|
+
}
|
|
6805
6879
|
const res = await ai.chat(
|
|
6806
6880
|
{
|
|
6807
6881
|
chatPrompt,
|
|
@@ -6816,7 +6890,8 @@ var AxGen = class extends AxProgramWithSignature {
|
|
|
6816
6890
|
rateLimiter,
|
|
6817
6891
|
stream,
|
|
6818
6892
|
debug: false,
|
|
6819
|
-
thinkingTokenBudget
|
|
6893
|
+
thinkingTokenBudget,
|
|
6894
|
+
traceContext
|
|
6820
6895
|
}
|
|
6821
6896
|
);
|
|
6822
6897
|
return res;
|
|
@@ -6825,7 +6900,9 @@ var AxGen = class extends AxProgramWithSignature {
|
|
|
6825
6900
|
ai,
|
|
6826
6901
|
mem,
|
|
6827
6902
|
options,
|
|
6828
|
-
|
|
6903
|
+
firstStep,
|
|
6904
|
+
span,
|
|
6905
|
+
traceContext
|
|
6829
6906
|
}) {
|
|
6830
6907
|
const { sessionId, traceId, functions: _functions } = options ?? {};
|
|
6831
6908
|
const fastFail = options?.fastFail ?? this.options?.fastFail;
|
|
@@ -6834,7 +6911,9 @@ var AxGen = class extends AxProgramWithSignature {
|
|
|
6834
6911
|
const res = await this.forwardSendRequest({
|
|
6835
6912
|
ai,
|
|
6836
6913
|
mem,
|
|
6837
|
-
options
|
|
6914
|
+
options,
|
|
6915
|
+
traceContext,
|
|
6916
|
+
firstStep
|
|
6838
6917
|
});
|
|
6839
6918
|
if (res instanceof import_web5.ReadableStream) {
|
|
6840
6919
|
yield* this.processStreamingResponse({
|
|
@@ -7072,7 +7151,7 @@ Content: ${result.content}`
|
|
|
7072
7151
|
}
|
|
7073
7152
|
return { ...this.values };
|
|
7074
7153
|
}
|
|
7075
|
-
async *_forward2(ai, values, options, span) {
|
|
7154
|
+
async *_forward2(ai, values, options, span, traceContext) {
|
|
7076
7155
|
const stopFunction = (options?.stopFunction ?? this.options?.stopFunction)?.toLowerCase();
|
|
7077
7156
|
const maxRetries = options.maxRetries ?? this.options?.maxRetries ?? 10;
|
|
7078
7157
|
const maxSteps = options.maxSteps ?? this.options?.maxSteps ?? 10;
|
|
@@ -7094,9 +7173,17 @@ Content: ${result.content}`
|
|
|
7094
7173
|
});
|
|
7095
7174
|
mem.add(prompt, options?.sessionId);
|
|
7096
7175
|
multiStepLoop: for (let n = 0; n < maxSteps; n++) {
|
|
7176
|
+
const firstStep = n === 0;
|
|
7097
7177
|
for (let errCount = 0; errCount < maxRetries; errCount++) {
|
|
7098
7178
|
try {
|
|
7099
|
-
const generator = this.forwardCore({
|
|
7179
|
+
const generator = this.forwardCore({
|
|
7180
|
+
options,
|
|
7181
|
+
ai,
|
|
7182
|
+
mem,
|
|
7183
|
+
firstStep,
|
|
7184
|
+
span,
|
|
7185
|
+
traceContext
|
|
7186
|
+
});
|
|
7100
7187
|
for await (const delta of generator) {
|
|
7101
7188
|
if (delta !== void 0) {
|
|
7102
7189
|
yield { version: errCount, delta };
|
|
@@ -7205,6 +7292,8 @@ Content: ${result.content}`
|
|
|
7205
7292
|
kind: import_api22.SpanKind.SERVER,
|
|
7206
7293
|
attributes
|
|
7207
7294
|
});
|
|
7295
|
+
const currentContext = import_api22.context.active();
|
|
7296
|
+
const traceContext = import_api22.trace.setSpan(currentContext, span);
|
|
7208
7297
|
try {
|
|
7209
7298
|
if (!this.excludeContentFromTrace) {
|
|
7210
7299
|
span.addEvent("input", { content: JSON.stringify(values, null, 2) });
|
|
@@ -7216,7 +7305,8 @@ Content: ${result.content}`
|
|
|
7216
7305
|
...options,
|
|
7217
7306
|
functions
|
|
7218
7307
|
},
|
|
7219
|
-
span
|
|
7308
|
+
span,
|
|
7309
|
+
traceContext
|
|
7220
7310
|
);
|
|
7221
7311
|
if (!this.excludeContentFromTrace) {
|
|
7222
7312
|
span.addEvent("output", {
|
|
@@ -8101,7 +8191,7 @@ var AxDBBase = class {
|
|
|
8101
8191
|
if (!this.tracer) {
|
|
8102
8192
|
return await this._upsert(req, update);
|
|
8103
8193
|
}
|
|
8104
|
-
return await this.tracer
|
|
8194
|
+
return await this.tracer.startActiveSpan(
|
|
8105
8195
|
"DB Upsert Request",
|
|
8106
8196
|
{
|
|
8107
8197
|
kind: import_api23.SpanKind.SERVER,
|
|
@@ -8135,7 +8225,7 @@ var AxDBBase = class {
|
|
|
8135
8225
|
if (!this.tracer) {
|
|
8136
8226
|
return await this._batchUpsert(req, update);
|
|
8137
8227
|
}
|
|
8138
|
-
return await this.tracer
|
|
8228
|
+
return await this.tracer.startActiveSpan(
|
|
8139
8229
|
"DB Batch Upsert Request",
|
|
8140
8230
|
{
|
|
8141
8231
|
kind: import_api23.SpanKind.SERVER,
|
|
@@ -8163,7 +8253,7 @@ var AxDBBase = class {
|
|
|
8163
8253
|
if (!this.tracer) {
|
|
8164
8254
|
return await this._query(req);
|
|
8165
8255
|
}
|
|
8166
|
-
return await this.tracer
|
|
8256
|
+
return await this.tracer.startActiveSpan(
|
|
8167
8257
|
"DB Query Request",
|
|
8168
8258
|
{
|
|
8169
8259
|
kind: import_api23.SpanKind.SERVER,
|
|
@@ -9140,24 +9230,24 @@ var AxJSInterpreter = class {
|
|
|
9140
9230
|
this.permissions = permissions ?? [];
|
|
9141
9231
|
}
|
|
9142
9232
|
codeInterpreterJavascript(code) {
|
|
9143
|
-
const
|
|
9233
|
+
const context3 = { console };
|
|
9144
9234
|
if (this.permissions.includes("node:fs" /* FS */)) {
|
|
9145
|
-
|
|
9235
|
+
context3.fs = _fs;
|
|
9146
9236
|
}
|
|
9147
9237
|
if (this.permissions.includes("net" /* NET */)) {
|
|
9148
|
-
|
|
9149
|
-
|
|
9238
|
+
context3.http = _http;
|
|
9239
|
+
context3.https = _https;
|
|
9150
9240
|
}
|
|
9151
9241
|
if (this.permissions.includes("os" /* OS */)) {
|
|
9152
|
-
|
|
9242
|
+
context3.os = _os;
|
|
9153
9243
|
}
|
|
9154
9244
|
if (this.permissions.includes("crypto" /* CRYPTO */)) {
|
|
9155
|
-
|
|
9245
|
+
context3.crypto = _crypto;
|
|
9156
9246
|
}
|
|
9157
9247
|
if (this.permissions.includes("process" /* PROCESS */)) {
|
|
9158
|
-
|
|
9248
|
+
context3.process = _process;
|
|
9159
9249
|
}
|
|
9160
|
-
return (0, import_node_vm.runInNewContext)(`(function() { ${code} })()`,
|
|
9250
|
+
return (0, import_node_vm.runInNewContext)(`(function() { ${code} })()`, context3);
|
|
9161
9251
|
}
|
|
9162
9252
|
toFunction() {
|
|
9163
9253
|
return {
|
|
@@ -9940,9 +10030,9 @@ var colorLog6 = new ColorLog();
|
|
|
9940
10030
|
var AxSimpleClassifierClass = class {
|
|
9941
10031
|
name;
|
|
9942
10032
|
context;
|
|
9943
|
-
constructor(name,
|
|
10033
|
+
constructor(name, context3) {
|
|
9944
10034
|
this.name = name;
|
|
9945
|
-
this.context =
|
|
10035
|
+
this.context = context3;
|
|
9946
10036
|
}
|
|
9947
10037
|
getName() {
|
|
9948
10038
|
return this.name;
|
|
@@ -11946,20 +12036,20 @@ var AxRAG = class extends AxChainOfThought {
|
|
|
11946
12036
|
this.register(this.genQuery);
|
|
11947
12037
|
}
|
|
11948
12038
|
async forward(ai, { question }, options) {
|
|
11949
|
-
let
|
|
12039
|
+
let context3 = [];
|
|
11950
12040
|
for (let i = 0; i < this.maxHops; i++) {
|
|
11951
12041
|
const { query } = await this.genQuery.forward(
|
|
11952
12042
|
ai,
|
|
11953
12043
|
{
|
|
11954
|
-
context,
|
|
12044
|
+
context: context3,
|
|
11955
12045
|
question
|
|
11956
12046
|
},
|
|
11957
12047
|
options
|
|
11958
12048
|
);
|
|
11959
12049
|
const val = await this.queryFn(query);
|
|
11960
|
-
|
|
12050
|
+
context3 = AxStringUtil.dedup([...context3, val]);
|
|
11961
12051
|
}
|
|
11962
|
-
return super.forward(ai, { context, question }, options);
|
|
12052
|
+
return super.forward(ai, { context: context3, question }, options);
|
|
11963
12053
|
}
|
|
11964
12054
|
};
|
|
11965
12055
|
// Annotate the CommonJS export names for ESM import in node:
|