@ax-llm/ax 11.0.47 → 11.0.49
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +254 -164
- package/index.cjs.map +1 -1
- package/index.d.cts +22 -15
- package/index.d.ts +22 -15
- package/index.js +260 -166
- package/index.js.map +1 -1
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
// ai/base.ts
|
|
2
|
-
import { SpanKind } from "@opentelemetry/api";
|
|
2
|
+
import { context, SpanKind } from "@opentelemetry/api";
|
|
3
3
|
|
|
4
4
|
// trace/trace.ts
|
|
5
5
|
var axSpanAttributes = {
|
|
@@ -205,14 +205,14 @@ var defaultRetryConfig = {
|
|
|
205
205
|
var defaultTimeoutMs = 3e4;
|
|
206
206
|
var textDecoderStream = TextDecoderStreamNative ?? TextDecoderStreamPolyfill;
|
|
207
207
|
var AxAIServiceError = class extends Error {
|
|
208
|
-
constructor(message, url, requestBody,
|
|
208
|
+
constructor(message, url, requestBody, context3 = {}) {
|
|
209
209
|
super(message);
|
|
210
210
|
this.url = url;
|
|
211
211
|
this.requestBody = requestBody;
|
|
212
212
|
this.name = this.constructor.name;
|
|
213
213
|
this.timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
214
214
|
this.errorId = crypto.randomUUID();
|
|
215
|
-
this.context =
|
|
215
|
+
this.context = context3;
|
|
216
216
|
this.stack = this.toString();
|
|
217
217
|
}
|
|
218
218
|
timestamp;
|
|
@@ -234,11 +234,11 @@ var AxAIServiceError = class extends Error {
|
|
|
234
234
|
}
|
|
235
235
|
};
|
|
236
236
|
var AxAIServiceStatusError = class extends AxAIServiceError {
|
|
237
|
-
constructor(status, statusText, url, requestBody,
|
|
237
|
+
constructor(status, statusText, url, requestBody, context3) {
|
|
238
238
|
super(`HTTP ${status} - ${statusText}`, url, requestBody, {
|
|
239
239
|
httpStatus: status,
|
|
240
240
|
httpStatusText: statusText,
|
|
241
|
-
...
|
|
241
|
+
...context3
|
|
242
242
|
});
|
|
243
243
|
this.status = status;
|
|
244
244
|
this.statusText = statusText;
|
|
@@ -246,11 +246,11 @@ var AxAIServiceStatusError = class extends AxAIServiceError {
|
|
|
246
246
|
}
|
|
247
247
|
};
|
|
248
248
|
var AxAIServiceNetworkError = class extends AxAIServiceError {
|
|
249
|
-
constructor(originalError, url, requestBody,
|
|
249
|
+
constructor(originalError, url, requestBody, context3) {
|
|
250
250
|
super(`Network Error: ${originalError.message}`, url, requestBody, {
|
|
251
251
|
originalErrorName: originalError.name,
|
|
252
252
|
originalErrorStack: originalError.stack,
|
|
253
|
-
...
|
|
253
|
+
...context3
|
|
254
254
|
});
|
|
255
255
|
this.originalError = originalError;
|
|
256
256
|
this.name = this.constructor.name;
|
|
@@ -258,33 +258,33 @@ var AxAIServiceNetworkError = class extends AxAIServiceError {
|
|
|
258
258
|
}
|
|
259
259
|
};
|
|
260
260
|
var AxAIServiceResponseError = class extends AxAIServiceError {
|
|
261
|
-
constructor(message, url, requestBody,
|
|
262
|
-
super(message, url, requestBody,
|
|
261
|
+
constructor(message, url, requestBody, context3) {
|
|
262
|
+
super(message, url, requestBody, context3);
|
|
263
263
|
this.name = this.constructor.name;
|
|
264
264
|
}
|
|
265
265
|
};
|
|
266
266
|
var AxAIServiceStreamTerminatedError = class extends AxAIServiceError {
|
|
267
|
-
constructor(url, requestBody, lastChunk,
|
|
267
|
+
constructor(url, requestBody, lastChunk, context3) {
|
|
268
268
|
super("Stream terminated unexpectedly by remote host", url, requestBody, {
|
|
269
269
|
lastChunk,
|
|
270
|
-
...
|
|
270
|
+
...context3
|
|
271
271
|
});
|
|
272
272
|
this.lastChunk = lastChunk;
|
|
273
273
|
this.name = this.constructor.name;
|
|
274
274
|
}
|
|
275
275
|
};
|
|
276
276
|
var AxAIServiceTimeoutError = class extends AxAIServiceError {
|
|
277
|
-
constructor(url, timeoutMs, requestBody,
|
|
277
|
+
constructor(url, timeoutMs, requestBody, context3) {
|
|
278
278
|
super(`Request timeout after ${timeoutMs}ms`, url, requestBody, {
|
|
279
279
|
timeoutMs,
|
|
280
|
-
...
|
|
280
|
+
...context3
|
|
281
281
|
});
|
|
282
282
|
this.name = this.constructor.name;
|
|
283
283
|
}
|
|
284
284
|
};
|
|
285
285
|
var AxAIServiceAuthenticationError = class extends AxAIServiceError {
|
|
286
|
-
constructor(url, requestBody,
|
|
287
|
-
super("Authentication failed", url, requestBody,
|
|
286
|
+
constructor(url, requestBody, context3) {
|
|
287
|
+
super("Authentication failed", url, requestBody, context3);
|
|
288
288
|
this.name = this.constructor.name;
|
|
289
289
|
}
|
|
290
290
|
};
|
|
@@ -330,14 +330,12 @@ var apiCall = async (api, json) => {
|
|
|
330
330
|
);
|
|
331
331
|
}
|
|
332
332
|
}
|
|
333
|
-
|
|
334
|
-
api.
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
});
|
|
340
|
-
}
|
|
333
|
+
api.span?.setAttributes({
|
|
334
|
+
"http.request.method": api.put ? "PUT" : "POST",
|
|
335
|
+
"url.full": apiUrl.href,
|
|
336
|
+
"request.id": requestId,
|
|
337
|
+
"request.startTime": metrics.startTime
|
|
338
|
+
});
|
|
341
339
|
let attempt = 0;
|
|
342
340
|
while (true) {
|
|
343
341
|
const controller = new AbortController();
|
|
@@ -364,16 +362,14 @@ var apiCall = async (api, json) => {
|
|
|
364
362
|
const delay = calculateRetryDelay(attempt, retryConfig);
|
|
365
363
|
attempt++;
|
|
366
364
|
updateRetryMetrics(metrics);
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
});
|
|
376
|
-
}
|
|
365
|
+
api.span?.addEvent("retry", {
|
|
366
|
+
attempt,
|
|
367
|
+
delay,
|
|
368
|
+
status: res.status,
|
|
369
|
+
"metrics.startTime": metrics.startTime,
|
|
370
|
+
"metrics.retryCount": metrics.retryCount,
|
|
371
|
+
"metrics.lastRetryTime": metrics.lastRetryTime
|
|
372
|
+
});
|
|
377
373
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
378
374
|
continue;
|
|
379
375
|
}
|
|
@@ -399,12 +395,10 @@ var apiCall = async (api, json) => {
|
|
|
399
395
|
);
|
|
400
396
|
}
|
|
401
397
|
}
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
});
|
|
407
|
-
}
|
|
398
|
+
api.span?.setAttributes({
|
|
399
|
+
"response.time": Date.now() - metrics.startTime,
|
|
400
|
+
"response.retries": metrics.retryCount
|
|
401
|
+
});
|
|
408
402
|
return resJson;
|
|
409
403
|
}
|
|
410
404
|
if (!res.body) {
|
|
@@ -424,15 +418,11 @@ var apiCall = async (api, json) => {
|
|
|
424
418
|
metrics.streamChunks = chunkCount;
|
|
425
419
|
metrics.lastChunkTime = Date.now();
|
|
426
420
|
controller2.enqueue(chunk);
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
"stream.duration": Date.now() - metrics.startTime,
|
|
433
|
-
"response.retries": metrics.retryCount
|
|
434
|
-
});
|
|
435
|
-
}
|
|
421
|
+
api.span?.addEvent("stream.chunk", {
|
|
422
|
+
"stream.chunks": chunkCount,
|
|
423
|
+
"stream.duration": Date.now() - metrics.startTime,
|
|
424
|
+
"response.retries": metrics.retryCount
|
|
425
|
+
});
|
|
436
426
|
}
|
|
437
427
|
});
|
|
438
428
|
let closed = false;
|
|
@@ -517,16 +507,14 @@ var apiCall = async (api, json) => {
|
|
|
517
507
|
const delay = calculateRetryDelay(attempt, retryConfig);
|
|
518
508
|
attempt++;
|
|
519
509
|
updateRetryMetrics(metrics);
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
});
|
|
529
|
-
}
|
|
510
|
+
api.span?.addEvent("retry", {
|
|
511
|
+
attempt,
|
|
512
|
+
delay,
|
|
513
|
+
error: error.message,
|
|
514
|
+
"metrics.startTime": metrics.startTime,
|
|
515
|
+
"metrics.retryCount": metrics.retryCount,
|
|
516
|
+
"metrics.lastRetryTime": metrics.lastRetryTime
|
|
517
|
+
});
|
|
530
518
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
531
519
|
continue;
|
|
532
520
|
}
|
|
@@ -927,7 +915,7 @@ var AxBaseAI = class {
|
|
|
927
915
|
modelConfig.stream = false;
|
|
928
916
|
}
|
|
929
917
|
if (this.tracer) {
|
|
930
|
-
return await this.tracer
|
|
918
|
+
return await this.tracer.startActiveSpan(
|
|
931
919
|
"AI Chat Request",
|
|
932
920
|
{
|
|
933
921
|
kind: SpanKind.SERVER,
|
|
@@ -945,6 +933,7 @@ var AxBaseAI = class {
|
|
|
945
933
|
[axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING]: modelConfig.stream
|
|
946
934
|
}
|
|
947
935
|
},
|
|
936
|
+
options?.traceContext ?? context.active(),
|
|
948
937
|
async (span) => {
|
|
949
938
|
return await this._chat2(model, modelConfig, req, options, span);
|
|
950
939
|
}
|
|
@@ -1126,6 +1115,7 @@ var AxBaseAI = class {
|
|
|
1126
1115
|
[axSpanAttributes.LLM_REQUEST_MODEL]: embedModel
|
|
1127
1116
|
}
|
|
1128
1117
|
},
|
|
1118
|
+
options?.traceContext ?? context.active(),
|
|
1129
1119
|
async (span) => {
|
|
1130
1120
|
try {
|
|
1131
1121
|
return await this._embed2(embedModel, req, options, span);
|
|
@@ -1827,6 +1817,22 @@ function mapFinishReason(stopReason) {
|
|
|
1827
1817
|
}
|
|
1828
1818
|
}
|
|
1829
1819
|
|
|
1820
|
+
// dsp/modelinfo.ts
|
|
1821
|
+
function getModelInfo({
|
|
1822
|
+
model,
|
|
1823
|
+
modelInfo,
|
|
1824
|
+
models
|
|
1825
|
+
}) {
|
|
1826
|
+
const modelEntry = models?.find((v) => v.key === model);
|
|
1827
|
+
const mappedModel = modelEntry && "model" in modelEntry ? modelEntry.model : model;
|
|
1828
|
+
const exactMatch = modelInfo.find((v) => v.name === model);
|
|
1829
|
+
if (exactMatch) return exactMatch;
|
|
1830
|
+
const normalizedName = mappedModel.replace(/^(anthropic\.|openai\.)/, "").replace(/-latest$/, "").replace(/-\d{8}$/, "").replace(/-v\d+:\d+$/, "").replace(/@\d{8}$/, "").replace(/-\d{2,}(-[a-zA-Z0-9-]+)?$/, "").replace(/-v\d+@\d{8}$/, "").replace(/-v\d+$/, "");
|
|
1831
|
+
const normalizedMatch = modelInfo.find((v) => v.name === normalizedName);
|
|
1832
|
+
if (normalizedMatch) return normalizedMatch;
|
|
1833
|
+
return null;
|
|
1834
|
+
}
|
|
1835
|
+
|
|
1830
1836
|
// ai/openai/types.ts
|
|
1831
1837
|
var AxAIOpenAIModel = /* @__PURE__ */ ((AxAIOpenAIModel2) => {
|
|
1832
1838
|
AxAIOpenAIModel2["O1"] = "o1";
|
|
@@ -2076,6 +2082,10 @@ var AxAIOpenAIImpl = class {
|
|
|
2076
2082
|
break;
|
|
2077
2083
|
case "high":
|
|
2078
2084
|
reqValue.reasoning_effort = "high";
|
|
2085
|
+
break;
|
|
2086
|
+
case "highest":
|
|
2087
|
+
reqValue.reasoning_effort = "high";
|
|
2088
|
+
break;
|
|
2079
2089
|
}
|
|
2080
2090
|
}
|
|
2081
2091
|
if (this.chatReqUpdater) {
|
|
@@ -2292,15 +2302,7 @@ var AxAIOpenAIBase = class extends AxBaseAI {
|
|
|
2292
2302
|
embedModel: config.embedModel
|
|
2293
2303
|
},
|
|
2294
2304
|
options,
|
|
2295
|
-
supportFor
|
|
2296
|
-
const modelInf = modelInfo.find((m) => m.name === model);
|
|
2297
|
-
return {
|
|
2298
|
-
functions: true,
|
|
2299
|
-
streaming: true,
|
|
2300
|
-
hasThinkingBudget: modelInf?.hasThinkingBudget ?? false,
|
|
2301
|
-
hasShowThoughts: modelInf?.hasShowThoughts ?? false
|
|
2302
|
-
};
|
|
2303
|
-
}),
|
|
2305
|
+
supportFor,
|
|
2304
2306
|
models
|
|
2305
2307
|
});
|
|
2306
2308
|
}
|
|
@@ -2310,18 +2312,24 @@ var AxAIOpenAI = class extends AxAIOpenAIBase {
|
|
|
2310
2312
|
apiKey,
|
|
2311
2313
|
config,
|
|
2312
2314
|
options,
|
|
2313
|
-
models
|
|
2315
|
+
models,
|
|
2316
|
+
modelInfo
|
|
2314
2317
|
}) {
|
|
2315
2318
|
if (!apiKey || apiKey === "") {
|
|
2316
2319
|
throw new Error("OpenAI API key not set");
|
|
2317
2320
|
}
|
|
2318
|
-
|
|
2319
|
-
|
|
2321
|
+
modelInfo = [...axModelInfoOpenAI, ...modelInfo ?? []];
|
|
2322
|
+
const supportFor = (model) => {
|
|
2323
|
+
const mi = getModelInfo({
|
|
2324
|
+
model,
|
|
2325
|
+
modelInfo,
|
|
2326
|
+
models
|
|
2327
|
+
});
|
|
2320
2328
|
return {
|
|
2321
2329
|
functions: true,
|
|
2322
2330
|
streaming: true,
|
|
2323
|
-
hasThinkingBudget:
|
|
2324
|
-
hasShowThoughts:
|
|
2331
|
+
hasThinkingBudget: mi?.hasThinkingBudget ?? false,
|
|
2332
|
+
hasShowThoughts: mi?.hasShowThoughts ?? false
|
|
2325
2333
|
};
|
|
2326
2334
|
};
|
|
2327
2335
|
super({
|
|
@@ -2331,9 +2339,9 @@ var AxAIOpenAI = class extends AxAIOpenAIBase {
|
|
|
2331
2339
|
...config
|
|
2332
2340
|
},
|
|
2333
2341
|
options,
|
|
2334
|
-
modelInfo
|
|
2342
|
+
modelInfo,
|
|
2335
2343
|
models,
|
|
2336
|
-
supportFor
|
|
2344
|
+
supportFor
|
|
2337
2345
|
});
|
|
2338
2346
|
super.setName("OpenAI");
|
|
2339
2347
|
}
|
|
@@ -2352,7 +2360,8 @@ var AxAIAzureOpenAI = class extends AxAIOpenAIBase {
|
|
|
2352
2360
|
version = "api-version=2024-02-15-preview",
|
|
2353
2361
|
config,
|
|
2354
2362
|
options,
|
|
2355
|
-
models
|
|
2363
|
+
models,
|
|
2364
|
+
modelInfo
|
|
2356
2365
|
}) {
|
|
2357
2366
|
if (!apiKey || apiKey === "") {
|
|
2358
2367
|
throw new Error("Azure OpenAPI API key not set");
|
|
@@ -2367,21 +2376,27 @@ var AxAIAzureOpenAI = class extends AxAIOpenAIBase {
|
|
|
2367
2376
|
...axAIAzureOpenAIDefaultConfig(),
|
|
2368
2377
|
...config
|
|
2369
2378
|
};
|
|
2379
|
+
modelInfo = [...axModelInfoOpenAI, ...modelInfo ?? []];
|
|
2380
|
+
const supportFor = (model) => {
|
|
2381
|
+
const mi = getModelInfo({
|
|
2382
|
+
model,
|
|
2383
|
+
modelInfo,
|
|
2384
|
+
models
|
|
2385
|
+
});
|
|
2386
|
+
return {
|
|
2387
|
+
functions: true,
|
|
2388
|
+
streaming: true,
|
|
2389
|
+
hasThinkingBudget: mi?.hasThinkingBudget ?? false,
|
|
2390
|
+
hasShowThoughts: mi?.hasShowThoughts ?? false
|
|
2391
|
+
};
|
|
2392
|
+
};
|
|
2370
2393
|
super({
|
|
2371
2394
|
apiKey,
|
|
2372
2395
|
config: _config,
|
|
2373
2396
|
options,
|
|
2374
2397
|
models,
|
|
2375
|
-
modelInfo
|
|
2376
|
-
supportFor
|
|
2377
|
-
const modelInf = axModelInfoOpenAI.find((m) => m.name === model);
|
|
2378
|
-
return {
|
|
2379
|
-
functions: true,
|
|
2380
|
-
streaming: true,
|
|
2381
|
-
hasThinkingBudget: modelInf?.hasThinkingBudget ?? false,
|
|
2382
|
-
hasShowThoughts: modelInf?.hasShowThoughts ?? false
|
|
2383
|
-
};
|
|
2384
|
-
}
|
|
2398
|
+
modelInfo,
|
|
2399
|
+
supportFor
|
|
2385
2400
|
});
|
|
2386
2401
|
const host = resourceName.includes("://") ? resourceName : `https://${resourceName}.openai.azure.com/`;
|
|
2387
2402
|
super.setName("Azure OpenAI");
|
|
@@ -2731,6 +2746,7 @@ function createToolCall(functionCalls) {
|
|
|
2731
2746
|
var AxAIDeepSeekModel = /* @__PURE__ */ ((AxAIDeepSeekModel2) => {
|
|
2732
2747
|
AxAIDeepSeekModel2["DeepSeekChat"] = "deepseek-chat";
|
|
2733
2748
|
AxAIDeepSeekModel2["DeepSeekCoder"] = "deepseek-coder";
|
|
2749
|
+
AxAIDeepSeekModel2["DeepSeekReasoner"] = "deepseek-reasoner";
|
|
2734
2750
|
return AxAIDeepSeekModel2;
|
|
2735
2751
|
})(AxAIDeepSeekModel || {});
|
|
2736
2752
|
|
|
@@ -2739,14 +2755,14 @@ var axModelInfoDeepSeek = [
|
|
|
2739
2755
|
{
|
|
2740
2756
|
name: "deepseek-chat" /* DeepSeekChat */,
|
|
2741
2757
|
currency: "USD",
|
|
2742
|
-
promptTokenCostPer1M: 0.
|
|
2743
|
-
completionTokenCostPer1M:
|
|
2758
|
+
promptTokenCostPer1M: 0.27,
|
|
2759
|
+
completionTokenCostPer1M: 1.1
|
|
2744
2760
|
},
|
|
2745
2761
|
{
|
|
2746
|
-
name: "deepseek-
|
|
2762
|
+
name: "deepseek-reasoner" /* DeepSeekReasoner */,
|
|
2747
2763
|
currency: "USD",
|
|
2748
|
-
promptTokenCostPer1M: 0.
|
|
2749
|
-
completionTokenCostPer1M:
|
|
2764
|
+
promptTokenCostPer1M: 0.55,
|
|
2765
|
+
completionTokenCostPer1M: 2.19
|
|
2750
2766
|
}
|
|
2751
2767
|
];
|
|
2752
2768
|
|
|
@@ -2764,7 +2780,8 @@ var AxAIDeepSeek = class extends AxAIOpenAIBase {
|
|
|
2764
2780
|
apiKey,
|
|
2765
2781
|
config,
|
|
2766
2782
|
options,
|
|
2767
|
-
models
|
|
2783
|
+
models,
|
|
2784
|
+
modelInfo
|
|
2768
2785
|
}) {
|
|
2769
2786
|
if (!apiKey || apiKey === "") {
|
|
2770
2787
|
throw new Error("DeepSeek API key not set");
|
|
@@ -2773,12 +2790,19 @@ var AxAIDeepSeek = class extends AxAIOpenAIBase {
|
|
|
2773
2790
|
...axAIDeepSeekDefaultConfig(),
|
|
2774
2791
|
...config
|
|
2775
2792
|
};
|
|
2793
|
+
modelInfo = [...axModelInfoDeepSeek, ...modelInfo ?? []];
|
|
2776
2794
|
super({
|
|
2777
2795
|
apiKey,
|
|
2778
2796
|
config: _config,
|
|
2779
2797
|
options,
|
|
2780
2798
|
apiURL: "https://api.deepseek.com",
|
|
2781
|
-
modelInfo
|
|
2799
|
+
modelInfo,
|
|
2800
|
+
supportFor: {
|
|
2801
|
+
functions: true,
|
|
2802
|
+
streaming: true,
|
|
2803
|
+
hasThinkingBudget: false,
|
|
2804
|
+
hasShowThoughts: false
|
|
2805
|
+
},
|
|
2782
2806
|
models
|
|
2783
2807
|
});
|
|
2784
2808
|
super.setName("DeepSeek");
|
|
@@ -2791,7 +2815,6 @@ var AxAIGoogleGeminiModel = /* @__PURE__ */ ((AxAIGoogleGeminiModel2) => {
|
|
|
2791
2815
|
AxAIGoogleGeminiModel2["Gemini25Flash"] = "gemini-2.5-flash-preview-04-17";
|
|
2792
2816
|
AxAIGoogleGeminiModel2["Gemini20Flash"] = "gemini-2.0-flash";
|
|
2793
2817
|
AxAIGoogleGeminiModel2["Gemini20FlashLite"] = "gemini-2.0-flash-lite-preview-02-05";
|
|
2794
|
-
AxAIGoogleGeminiModel2["Gemini20FlashThinking"] = "gemini-2.0-flash-thinking-exp-01-21";
|
|
2795
2818
|
AxAIGoogleGeminiModel2["Gemini1Pro"] = "gemini-1.0-pro";
|
|
2796
2819
|
AxAIGoogleGeminiModel2["Gemini15Flash"] = "gemini-1.5-flash";
|
|
2797
2820
|
AxAIGoogleGeminiModel2["Gemini15Flash002"] = "gemini-1.5-flash-002";
|
|
@@ -2867,13 +2890,6 @@ var axModelInfoGoogleGemini = [
|
|
|
2867
2890
|
promptTokenCostPer1M: 0,
|
|
2868
2891
|
completionTokenCostPer1M: 0
|
|
2869
2892
|
},
|
|
2870
|
-
{
|
|
2871
|
-
name: "gemini-2.0-flash-thinking-exp-01-21" /* Gemini20FlashThinking */,
|
|
2872
|
-
currency: "usd",
|
|
2873
|
-
characterIsToken: false,
|
|
2874
|
-
promptTokenCostPer1M: 0,
|
|
2875
|
-
completionTokenCostPer1M: 0
|
|
2876
|
-
},
|
|
2877
2893
|
{
|
|
2878
2894
|
name: "gemini-1.5-flash" /* Gemini15Flash */,
|
|
2879
2895
|
currency: "usd",
|
|
@@ -3076,6 +3092,9 @@ var AxAIGoogleGeminiImpl = class {
|
|
|
3076
3092
|
}
|
|
3077
3093
|
});
|
|
3078
3094
|
}
|
|
3095
|
+
if (this.options?.googleSearch) {
|
|
3096
|
+
tools.push({ google_search: {} });
|
|
3097
|
+
}
|
|
3079
3098
|
if (this.options?.urlContext) {
|
|
3080
3099
|
tools.push({ url_context: {} });
|
|
3081
3100
|
}
|
|
@@ -3114,16 +3133,19 @@ var AxAIGoogleGeminiImpl = class {
|
|
|
3114
3133
|
if (config.thinkingTokenBudget) {
|
|
3115
3134
|
switch (config.thinkingTokenBudget) {
|
|
3116
3135
|
case "minimal":
|
|
3117
|
-
thinkingConfig.thinkingBudget =
|
|
3136
|
+
thinkingConfig.thinkingBudget = 200;
|
|
3118
3137
|
break;
|
|
3119
3138
|
case "low":
|
|
3120
|
-
thinkingConfig.thinkingBudget =
|
|
3139
|
+
thinkingConfig.thinkingBudget = 800;
|
|
3121
3140
|
break;
|
|
3122
3141
|
case "medium":
|
|
3123
|
-
thinkingConfig.thinkingBudget =
|
|
3142
|
+
thinkingConfig.thinkingBudget = 5e3;
|
|
3124
3143
|
break;
|
|
3125
3144
|
case "high":
|
|
3126
|
-
thinkingConfig.thinkingBudget =
|
|
3145
|
+
thinkingConfig.thinkingBudget = 1e4;
|
|
3146
|
+
break;
|
|
3147
|
+
case "highest":
|
|
3148
|
+
thinkingConfig.thinkingBudget = 24500;
|
|
3127
3149
|
break;
|
|
3128
3150
|
}
|
|
3129
3151
|
}
|
|
@@ -3277,7 +3299,8 @@ var AxAIGoogleGemini = class extends AxBaseAI {
|
|
|
3277
3299
|
endpointId,
|
|
3278
3300
|
config,
|
|
3279
3301
|
options,
|
|
3280
|
-
models
|
|
3302
|
+
models,
|
|
3303
|
+
modelInfo
|
|
3281
3304
|
}) {
|
|
3282
3305
|
const isVertex = projectId !== void 0 && region !== void 0;
|
|
3283
3306
|
let apiURL;
|
|
@@ -3316,26 +3339,32 @@ var AxAIGoogleGemini = class extends AxBaseAI {
|
|
|
3316
3339
|
apiKey,
|
|
3317
3340
|
options
|
|
3318
3341
|
);
|
|
3342
|
+
modelInfo = [...axModelInfoGoogleGemini, ...modelInfo ?? []];
|
|
3343
|
+
const supportFor = (model) => {
|
|
3344
|
+
const mi = getModelInfo({
|
|
3345
|
+
model,
|
|
3346
|
+
modelInfo,
|
|
3347
|
+
models
|
|
3348
|
+
});
|
|
3349
|
+
return {
|
|
3350
|
+
functions: true,
|
|
3351
|
+
streaming: true,
|
|
3352
|
+
hasThinkingBudget: mi?.hasThinkingBudget ?? false,
|
|
3353
|
+
hasShowThoughts: mi?.hasShowThoughts ?? false,
|
|
3354
|
+
functionCot: false
|
|
3355
|
+
};
|
|
3356
|
+
};
|
|
3319
3357
|
super(aiImpl, {
|
|
3320
3358
|
name: "GoogleGeminiAI",
|
|
3321
3359
|
apiURL,
|
|
3322
3360
|
headers,
|
|
3323
|
-
modelInfo
|
|
3361
|
+
modelInfo,
|
|
3324
3362
|
defaults: {
|
|
3325
3363
|
model: _config.model,
|
|
3326
3364
|
embedModel: _config.embedModel
|
|
3327
3365
|
},
|
|
3328
3366
|
options,
|
|
3329
|
-
supportFor
|
|
3330
|
-
const modelInf = axModelInfoGoogleGemini.find((m) => m.name === model);
|
|
3331
|
-
return {
|
|
3332
|
-
functions: true,
|
|
3333
|
-
streaming: true,
|
|
3334
|
-
hasThinkingBudget: modelInf?.hasThinkingBudget ?? false,
|
|
3335
|
-
hasShowThoughts: modelInf?.hasShowThoughts ?? false,
|
|
3336
|
-
functionCot: false
|
|
3337
|
-
};
|
|
3338
|
-
},
|
|
3367
|
+
supportFor,
|
|
3339
3368
|
models
|
|
3340
3369
|
});
|
|
3341
3370
|
}
|
|
@@ -3438,7 +3467,8 @@ var AxAIGroq = class extends AxAIOpenAIBase {
|
|
|
3438
3467
|
apiKey,
|
|
3439
3468
|
config,
|
|
3440
3469
|
options,
|
|
3441
|
-
models
|
|
3470
|
+
models,
|
|
3471
|
+
modelInfo
|
|
3442
3472
|
}) {
|
|
3443
3473
|
if (!apiKey || apiKey === "") {
|
|
3444
3474
|
throw new Error("Groq API key not set");
|
|
@@ -3451,13 +3481,21 @@ var AxAIGroq = class extends AxAIOpenAIBase {
|
|
|
3451
3481
|
...options,
|
|
3452
3482
|
streamingUsage: false
|
|
3453
3483
|
};
|
|
3484
|
+
modelInfo = [...axModelInfoGroq, ...modelInfo ?? []];
|
|
3485
|
+
const supportFor = {
|
|
3486
|
+
functions: true,
|
|
3487
|
+
streaming: true,
|
|
3488
|
+
hasThinkingBudget: false,
|
|
3489
|
+
hasShowThoughts: false
|
|
3490
|
+
};
|
|
3454
3491
|
super({
|
|
3455
3492
|
apiKey,
|
|
3456
3493
|
config: _config,
|
|
3457
3494
|
options: _options,
|
|
3458
|
-
modelInfo
|
|
3495
|
+
modelInfo,
|
|
3459
3496
|
apiURL: "https://api.groq.com/openai/v1",
|
|
3460
|
-
models
|
|
3497
|
+
models,
|
|
3498
|
+
supportFor
|
|
3461
3499
|
});
|
|
3462
3500
|
super.setName("Groq");
|
|
3463
3501
|
this.setOptions(_options);
|
|
@@ -3695,7 +3733,8 @@ var AxAIMistral = class extends AxAIOpenAIBase {
|
|
|
3695
3733
|
apiKey,
|
|
3696
3734
|
config,
|
|
3697
3735
|
options,
|
|
3698
|
-
models
|
|
3736
|
+
models,
|
|
3737
|
+
modelInfo
|
|
3699
3738
|
}) {
|
|
3700
3739
|
if (!apiKey || apiKey === "") {
|
|
3701
3740
|
throw new Error("Mistral API key not set");
|
|
@@ -3704,13 +3743,21 @@ var AxAIMistral = class extends AxAIOpenAIBase {
|
|
|
3704
3743
|
...axAIMistralDefaultConfig(),
|
|
3705
3744
|
...config
|
|
3706
3745
|
};
|
|
3746
|
+
modelInfo = [...axModelInfoMistral, ...modelInfo ?? []];
|
|
3747
|
+
const supportFor = {
|
|
3748
|
+
functions: true,
|
|
3749
|
+
streaming: true,
|
|
3750
|
+
hasThinkingBudget: false,
|
|
3751
|
+
hasShowThoughts: false
|
|
3752
|
+
};
|
|
3707
3753
|
super({
|
|
3708
3754
|
apiKey,
|
|
3709
3755
|
config: _config,
|
|
3710
3756
|
options,
|
|
3711
3757
|
apiURL: "https://api.mistral.ai/v1",
|
|
3712
|
-
modelInfo
|
|
3713
|
-
models
|
|
3758
|
+
modelInfo,
|
|
3759
|
+
models,
|
|
3760
|
+
supportFor
|
|
3714
3761
|
});
|
|
3715
3762
|
super.setName("Mistral");
|
|
3716
3763
|
}
|
|
@@ -3745,7 +3792,13 @@ var AxAIOllama = class extends AxAIOpenAIBase {
|
|
|
3745
3792
|
config: _config,
|
|
3746
3793
|
apiURL: url,
|
|
3747
3794
|
models,
|
|
3748
|
-
modelInfo: []
|
|
3795
|
+
modelInfo: [],
|
|
3796
|
+
supportFor: {
|
|
3797
|
+
functions: true,
|
|
3798
|
+
streaming: true,
|
|
3799
|
+
hasThinkingBudget: false,
|
|
3800
|
+
hasShowThoughts: false
|
|
3801
|
+
}
|
|
3749
3802
|
});
|
|
3750
3803
|
super.setName("Ollama");
|
|
3751
3804
|
}
|
|
@@ -3996,7 +4049,8 @@ var AxAITogether = class extends AxAIOpenAIBase {
|
|
|
3996
4049
|
apiKey,
|
|
3997
4050
|
config,
|
|
3998
4051
|
options,
|
|
3999
|
-
models
|
|
4052
|
+
models,
|
|
4053
|
+
modelInfo
|
|
4000
4054
|
}) {
|
|
4001
4055
|
if (!apiKey || apiKey === "") {
|
|
4002
4056
|
throw new Error("Together API key not set");
|
|
@@ -4005,13 +4059,21 @@ var AxAITogether = class extends AxAIOpenAIBase {
|
|
|
4005
4059
|
...axAITogetherDefaultConfig(),
|
|
4006
4060
|
...config
|
|
4007
4061
|
};
|
|
4062
|
+
modelInfo = [...axModelInfoTogether, ...modelInfo ?? []];
|
|
4063
|
+
const supportFor = {
|
|
4064
|
+
functions: true,
|
|
4065
|
+
streaming: true,
|
|
4066
|
+
hasThinkingBudget: false,
|
|
4067
|
+
hasShowThoughts: false
|
|
4068
|
+
};
|
|
4008
4069
|
super({
|
|
4009
4070
|
apiKey,
|
|
4010
4071
|
config: _config,
|
|
4011
4072
|
options,
|
|
4012
4073
|
apiURL: "https://api.together.xyz/v1",
|
|
4013
|
-
modelInfo
|
|
4014
|
-
models
|
|
4074
|
+
modelInfo,
|
|
4075
|
+
models,
|
|
4076
|
+
supportFor
|
|
4015
4077
|
});
|
|
4016
4078
|
super.setName("Together");
|
|
4017
4079
|
}
|
|
@@ -4157,7 +4219,8 @@ var AxAIGrok = class extends AxAIOpenAIBase {
|
|
|
4157
4219
|
apiKey,
|
|
4158
4220
|
config,
|
|
4159
4221
|
options,
|
|
4160
|
-
models
|
|
4222
|
+
models,
|
|
4223
|
+
modelInfo
|
|
4161
4224
|
}) {
|
|
4162
4225
|
if (!apiKey || apiKey === "") {
|
|
4163
4226
|
throw new Error("Grok API key not set");
|
|
@@ -4166,22 +4229,28 @@ var AxAIGrok = class extends AxAIOpenAIBase {
|
|
|
4166
4229
|
...axAIGrokDefaultConfig(),
|
|
4167
4230
|
...config
|
|
4168
4231
|
};
|
|
4232
|
+
modelInfo = [...axModelInfoGrok, ...modelInfo ?? []];
|
|
4233
|
+
const supportFor = (model) => {
|
|
4234
|
+
const mi = getModelInfo({
|
|
4235
|
+
model,
|
|
4236
|
+
modelInfo,
|
|
4237
|
+
models
|
|
4238
|
+
});
|
|
4239
|
+
return {
|
|
4240
|
+
functions: true,
|
|
4241
|
+
streaming: true,
|
|
4242
|
+
hasThinkingBudget: mi?.hasThinkingBudget ?? false,
|
|
4243
|
+
hasShowThoughts: mi?.hasShowThoughts ?? false
|
|
4244
|
+
};
|
|
4245
|
+
};
|
|
4169
4246
|
super({
|
|
4170
4247
|
apiKey,
|
|
4171
4248
|
config: _config,
|
|
4172
4249
|
options,
|
|
4173
4250
|
apiURL: "https://api.x.ai/v1",
|
|
4174
|
-
modelInfo
|
|
4251
|
+
modelInfo,
|
|
4175
4252
|
models,
|
|
4176
|
-
supportFor
|
|
4177
|
-
const modelInf = axModelInfoGrok.find((m) => m.name === model);
|
|
4178
|
-
return {
|
|
4179
|
-
functions: true,
|
|
4180
|
-
streaming: true,
|
|
4181
|
-
hasThinkingBudget: modelInf?.hasThinkingBudget ?? false,
|
|
4182
|
-
hasShowThoughts: modelInf?.hasShowThoughts ?? false
|
|
4183
|
-
};
|
|
4184
|
-
}
|
|
4253
|
+
supportFor
|
|
4185
4254
|
});
|
|
4186
4255
|
super.setName("Grok");
|
|
4187
4256
|
}
|
|
@@ -4189,7 +4258,11 @@ var AxAIGrok = class extends AxAIOpenAIBase {
|
|
|
4189
4258
|
|
|
4190
4259
|
// dsp/generate.ts
|
|
4191
4260
|
import { ReadableStream as ReadableStream3 } from "node:stream/web";
|
|
4192
|
-
import {
|
|
4261
|
+
import {
|
|
4262
|
+
context as context2,
|
|
4263
|
+
SpanKind as SpanKind2,
|
|
4264
|
+
trace
|
|
4265
|
+
} from "@opentelemetry/api";
|
|
4193
4266
|
|
|
4194
4267
|
// ai/util.ts
|
|
4195
4268
|
function mergeFunctionCalls(functionCalls, functionCallDeltas) {
|
|
@@ -4524,9 +4597,9 @@ var SignatureParser = class {
|
|
|
4524
4597
|
};
|
|
4525
4598
|
} catch (error) {
|
|
4526
4599
|
const errorMessage = error instanceof Error ? error.message : "Unknown error";
|
|
4527
|
-
const
|
|
4600
|
+
const context3 = this.getErrorContext();
|
|
4528
4601
|
throw new Error(`${errorMessage}
|
|
4529
|
-
${
|
|
4602
|
+
${context3}`);
|
|
4530
4603
|
}
|
|
4531
4604
|
}
|
|
4532
4605
|
getErrorContext() {
|
|
@@ -6625,7 +6698,9 @@ var AxGen = class extends AxProgramWithSignature {
|
|
|
6625
6698
|
async forwardSendRequest({
|
|
6626
6699
|
ai,
|
|
6627
6700
|
mem,
|
|
6628
|
-
options
|
|
6701
|
+
options,
|
|
6702
|
+
traceContext,
|
|
6703
|
+
firstStep
|
|
6629
6704
|
}) {
|
|
6630
6705
|
const {
|
|
6631
6706
|
sessionId,
|
|
@@ -6643,7 +6718,10 @@ var AxGen = class extends AxProgramWithSignature {
|
|
|
6643
6718
|
throw new Error("No chat prompt found");
|
|
6644
6719
|
}
|
|
6645
6720
|
const functions = _functions?.map((f) => "toFunction" in f ? f.toFunction() : f)?.flat();
|
|
6646
|
-
|
|
6721
|
+
let functionCall = _functionCall ?? this.options?.functionCall;
|
|
6722
|
+
if (!firstStep && (functionCall === "required" || typeof functionCall === "function")) {
|
|
6723
|
+
functionCall = void 0;
|
|
6724
|
+
}
|
|
6647
6725
|
const res = await ai.chat(
|
|
6648
6726
|
{
|
|
6649
6727
|
chatPrompt,
|
|
@@ -6658,7 +6736,8 @@ var AxGen = class extends AxProgramWithSignature {
|
|
|
6658
6736
|
rateLimiter,
|
|
6659
6737
|
stream,
|
|
6660
6738
|
debug: false,
|
|
6661
|
-
thinkingTokenBudget
|
|
6739
|
+
thinkingTokenBudget,
|
|
6740
|
+
traceContext
|
|
6662
6741
|
}
|
|
6663
6742
|
);
|
|
6664
6743
|
return res;
|
|
@@ -6667,7 +6746,9 @@ var AxGen = class extends AxProgramWithSignature {
|
|
|
6667
6746
|
ai,
|
|
6668
6747
|
mem,
|
|
6669
6748
|
options,
|
|
6670
|
-
|
|
6749
|
+
firstStep,
|
|
6750
|
+
span,
|
|
6751
|
+
traceContext
|
|
6671
6752
|
}) {
|
|
6672
6753
|
const { sessionId, traceId, functions: _functions } = options ?? {};
|
|
6673
6754
|
const fastFail = options?.fastFail ?? this.options?.fastFail;
|
|
@@ -6676,7 +6757,9 @@ var AxGen = class extends AxProgramWithSignature {
|
|
|
6676
6757
|
const res = await this.forwardSendRequest({
|
|
6677
6758
|
ai,
|
|
6678
6759
|
mem,
|
|
6679
|
-
options
|
|
6760
|
+
options,
|
|
6761
|
+
traceContext,
|
|
6762
|
+
firstStep
|
|
6680
6763
|
});
|
|
6681
6764
|
if (res instanceof ReadableStream3) {
|
|
6682
6765
|
yield* this.processStreamingResponse({
|
|
@@ -6914,7 +6997,7 @@ Content: ${result.content}`
|
|
|
6914
6997
|
}
|
|
6915
6998
|
return { ...this.values };
|
|
6916
6999
|
}
|
|
6917
|
-
async *_forward2(ai, values, options, span) {
|
|
7000
|
+
async *_forward2(ai, values, options, span, traceContext) {
|
|
6918
7001
|
const stopFunction = (options?.stopFunction ?? this.options?.stopFunction)?.toLowerCase();
|
|
6919
7002
|
const maxRetries = options.maxRetries ?? this.options?.maxRetries ?? 10;
|
|
6920
7003
|
const maxSteps = options.maxSteps ?? this.options?.maxSteps ?? 10;
|
|
@@ -6936,9 +7019,17 @@ Content: ${result.content}`
|
|
|
6936
7019
|
});
|
|
6937
7020
|
mem.add(prompt, options?.sessionId);
|
|
6938
7021
|
multiStepLoop: for (let n = 0; n < maxSteps; n++) {
|
|
7022
|
+
const firstStep = n === 0;
|
|
6939
7023
|
for (let errCount = 0; errCount < maxRetries; errCount++) {
|
|
6940
7024
|
try {
|
|
6941
|
-
const generator = this.forwardCore({
|
|
7025
|
+
const generator = this.forwardCore({
|
|
7026
|
+
options,
|
|
7027
|
+
ai,
|
|
7028
|
+
mem,
|
|
7029
|
+
firstStep,
|
|
7030
|
+
span,
|
|
7031
|
+
traceContext
|
|
7032
|
+
});
|
|
6942
7033
|
for await (const delta of generator) {
|
|
6943
7034
|
if (delta !== void 0) {
|
|
6944
7035
|
yield { version: errCount, delta };
|
|
@@ -7047,6 +7138,8 @@ Content: ${result.content}`
|
|
|
7047
7138
|
kind: SpanKind2.SERVER,
|
|
7048
7139
|
attributes
|
|
7049
7140
|
});
|
|
7141
|
+
const currentContext = context2.active();
|
|
7142
|
+
const traceContext = trace.setSpan(currentContext, span);
|
|
7050
7143
|
try {
|
|
7051
7144
|
if (!this.excludeContentFromTrace) {
|
|
7052
7145
|
span.addEvent("input", { content: JSON.stringify(values, null, 2) });
|
|
@@ -7058,7 +7151,8 @@ Content: ${result.content}`
|
|
|
7058
7151
|
...options,
|
|
7059
7152
|
functions
|
|
7060
7153
|
},
|
|
7061
|
-
span
|
|
7154
|
+
span,
|
|
7155
|
+
traceContext
|
|
7062
7156
|
);
|
|
7063
7157
|
if (!this.excludeContentFromTrace) {
|
|
7064
7158
|
span.addEvent("output", {
|
|
@@ -7943,7 +8037,7 @@ var AxDBBase = class {
|
|
|
7943
8037
|
if (!this.tracer) {
|
|
7944
8038
|
return await this._upsert(req, update);
|
|
7945
8039
|
}
|
|
7946
|
-
return await this.tracer
|
|
8040
|
+
return await this.tracer.startActiveSpan(
|
|
7947
8041
|
"DB Upsert Request",
|
|
7948
8042
|
{
|
|
7949
8043
|
kind: SpanKind3.SERVER,
|
|
@@ -7977,7 +8071,7 @@ var AxDBBase = class {
|
|
|
7977
8071
|
if (!this.tracer) {
|
|
7978
8072
|
return await this._batchUpsert(req, update);
|
|
7979
8073
|
}
|
|
7980
|
-
return await this.tracer
|
|
8074
|
+
return await this.tracer.startActiveSpan(
|
|
7981
8075
|
"DB Batch Upsert Request",
|
|
7982
8076
|
{
|
|
7983
8077
|
kind: SpanKind3.SERVER,
|
|
@@ -8005,7 +8099,7 @@ var AxDBBase = class {
|
|
|
8005
8099
|
if (!this.tracer) {
|
|
8006
8100
|
return await this._query(req);
|
|
8007
8101
|
}
|
|
8008
|
-
return await this.tracer
|
|
8102
|
+
return await this.tracer.startActiveSpan(
|
|
8009
8103
|
"DB Query Request",
|
|
8010
8104
|
{
|
|
8011
8105
|
kind: SpanKind3.SERVER,
|
|
@@ -8982,24 +9076,24 @@ var AxJSInterpreter = class {
|
|
|
8982
9076
|
this.permissions = permissions ?? [];
|
|
8983
9077
|
}
|
|
8984
9078
|
codeInterpreterJavascript(code) {
|
|
8985
|
-
const
|
|
9079
|
+
const context3 = { console };
|
|
8986
9080
|
if (this.permissions.includes("node:fs" /* FS */)) {
|
|
8987
|
-
|
|
9081
|
+
context3.fs = _fs;
|
|
8988
9082
|
}
|
|
8989
9083
|
if (this.permissions.includes("net" /* NET */)) {
|
|
8990
|
-
|
|
8991
|
-
|
|
9084
|
+
context3.http = _http;
|
|
9085
|
+
context3.https = _https;
|
|
8992
9086
|
}
|
|
8993
9087
|
if (this.permissions.includes("os" /* OS */)) {
|
|
8994
|
-
|
|
9088
|
+
context3.os = _os;
|
|
8995
9089
|
}
|
|
8996
9090
|
if (this.permissions.includes("crypto" /* CRYPTO */)) {
|
|
8997
|
-
|
|
9091
|
+
context3.crypto = _crypto;
|
|
8998
9092
|
}
|
|
8999
9093
|
if (this.permissions.includes("process" /* PROCESS */)) {
|
|
9000
|
-
|
|
9094
|
+
context3.process = _process;
|
|
9001
9095
|
}
|
|
9002
|
-
return runInNewContext(`(function() { ${code} })()`,
|
|
9096
|
+
return runInNewContext(`(function() { ${code} })()`, context3);
|
|
9003
9097
|
}
|
|
9004
9098
|
toFunction() {
|
|
9005
9099
|
return {
|
|
@@ -9782,9 +9876,9 @@ var colorLog6 = new ColorLog();
|
|
|
9782
9876
|
var AxSimpleClassifierClass = class {
|
|
9783
9877
|
name;
|
|
9784
9878
|
context;
|
|
9785
|
-
constructor(name,
|
|
9879
|
+
constructor(name, context3) {
|
|
9786
9880
|
this.name = name;
|
|
9787
|
-
this.context =
|
|
9881
|
+
this.context = context3;
|
|
9788
9882
|
}
|
|
9789
9883
|
getName() {
|
|
9790
9884
|
return this.name;
|
|
@@ -11788,20 +11882,20 @@ var AxRAG = class extends AxChainOfThought {
|
|
|
11788
11882
|
this.register(this.genQuery);
|
|
11789
11883
|
}
|
|
11790
11884
|
async forward(ai, { question }, options) {
|
|
11791
|
-
let
|
|
11885
|
+
let context3 = [];
|
|
11792
11886
|
for (let i = 0; i < this.maxHops; i++) {
|
|
11793
11887
|
const { query } = await this.genQuery.forward(
|
|
11794
11888
|
ai,
|
|
11795
11889
|
{
|
|
11796
|
-
context,
|
|
11890
|
+
context: context3,
|
|
11797
11891
|
question
|
|
11798
11892
|
},
|
|
11799
11893
|
options
|
|
11800
11894
|
);
|
|
11801
11895
|
const val = await this.queryFn(query);
|
|
11802
|
-
|
|
11896
|
+
context3 = AxStringUtil.dedup([...context3, val]);
|
|
11803
11897
|
}
|
|
11804
|
-
return super.forward(ai, { context, question }, options);
|
|
11898
|
+
return super.forward(ai, { context: context3, question }, options);
|
|
11805
11899
|
}
|
|
11806
11900
|
};
|
|
11807
11901
|
export {
|