@ax-llm/ax 12.0.16 → 12.0.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +970 -223
- package/index.cjs.map +1 -1
- package/index.d.cts +289 -14
- package/index.d.ts +289 -14
- package/index.js +966 -223
- package/index.js.map +1 -1
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -1,71 +1,3 @@
|
|
|
1
|
-
// ai/base.ts
|
|
2
|
-
import crypto2 from "crypto";
|
|
3
|
-
import { context, SpanKind } from "@opentelemetry/api";
|
|
4
|
-
|
|
5
|
-
// trace/trace.ts
|
|
6
|
-
var axSpanAttributes = {
|
|
7
|
-
// LLM
|
|
8
|
-
LLM_SYSTEM: "gen_ai.system",
|
|
9
|
-
LLM_OPERATION_NAME: "gen_ai.operation.name",
|
|
10
|
-
LLM_REQUEST_MODEL: "gen_ai.request.model",
|
|
11
|
-
LLM_REQUEST_MAX_TOKENS: "gen_ai.request.max_tokens",
|
|
12
|
-
LLM_REQUEST_TEMPERATURE: "gen_ai.request.temperature",
|
|
13
|
-
LLM_REQUEST_TOP_K: "gen_ai.request.top_k",
|
|
14
|
-
LLM_REQUEST_FREQUENCY_PENALTY: "gen_ai.request.frequency_penalty",
|
|
15
|
-
LLM_REQUEST_PRESENCE_PENALTY: "gen_ai.request.presence_penalty",
|
|
16
|
-
LLM_REQUEST_STOP_SEQUENCES: "gen_ai.request.stop_sequences",
|
|
17
|
-
LLM_REQUEST_LLM_IS_STREAMING: "gen_ai.request.llm_is_streaming",
|
|
18
|
-
LLM_REQUEST_TOP_P: "gen_ai.request.top_p",
|
|
19
|
-
LLM_USAGE_INPUT_TOKENS: "gen_ai.usage.input_tokens",
|
|
20
|
-
LLM_USAGE_OUTPUT_TOKENS: "gen_ai.usage.output_tokens",
|
|
21
|
-
LLM_USAGE_TOTAL_TOKENS: "gen_ai.usage.total_tokens",
|
|
22
|
-
LLM_USAGE_THOUGHTS_TOKENS: "gen_ai.usage.thoughts_tokens",
|
|
23
|
-
// Vector DB
|
|
24
|
-
DB_SYSTEM: "db.system",
|
|
25
|
-
DB_TABLE: "db.table",
|
|
26
|
-
DB_NAMESPACE: "db.namespace",
|
|
27
|
-
DB_ID: "db.id",
|
|
28
|
-
DB_QUERY_TEXT: "db.query.text",
|
|
29
|
-
DB_VECTOR: "db.vector",
|
|
30
|
-
DB_OPERATION_NAME: "db.operation.name",
|
|
31
|
-
DB_VECTOR_QUERY_TOP_K: "db.vector.query.top_k",
|
|
32
|
-
DB_QUERY_EMBEDDINGS: "db.query.embeddings",
|
|
33
|
-
DB_QUERY_RESULT: "db.query.result",
|
|
34
|
-
// Query Embeddings
|
|
35
|
-
DB_QUERY_EMBEDDINGS_VECTOR: "db.query.embeddings.vector",
|
|
36
|
-
// Query Result (canonical format)
|
|
37
|
-
DB_QUERY_RESULT_ID: "db.query.result.id",
|
|
38
|
-
DB_QUERY_RESULT_SCORE: "db.query.result.score",
|
|
39
|
-
DB_QUERY_RESULT_DISTANCE: "db.query.result.distance",
|
|
40
|
-
DB_QUERY_RESULT_METADATA: "db.query.result.metadata",
|
|
41
|
-
DB_QUERY_RESULT_VECTOR: "db.query.result.vector",
|
|
42
|
-
DB_QUERY_RESULT_DOCUMENT: "db.query.result.document"
|
|
43
|
-
};
|
|
44
|
-
var axSpanEvents = {
|
|
45
|
-
GEN_AI_USER_MESSAGE: "gen_ai.user.message",
|
|
46
|
-
GEN_AI_SYSTEM_MESSAGE: "gen_ai.system.message",
|
|
47
|
-
GEN_AI_ASSISTANT_MESSAGE: "gen_ai.assistant.message",
|
|
48
|
-
GEN_AI_TOOL_MESSAGE: "gen_ai.tool.message",
|
|
49
|
-
// For tool messages in request & response tool calls
|
|
50
|
-
GEN_AI_CHOICE: "gen_ai.choice",
|
|
51
|
-
GEN_AI_USAGE: "gen_ai.usage"
|
|
52
|
-
};
|
|
53
|
-
var AxLLMRequestTypeValues = /* @__PURE__ */ ((AxLLMRequestTypeValues2) => {
|
|
54
|
-
AxLLMRequestTypeValues2["COMPLETION"] = "completion";
|
|
55
|
-
AxLLMRequestTypeValues2["CHAT"] = "chat";
|
|
56
|
-
AxLLMRequestTypeValues2["RERANK"] = "rerank";
|
|
57
|
-
AxLLMRequestTypeValues2["UNKNOWN"] = "unknown";
|
|
58
|
-
return AxLLMRequestTypeValues2;
|
|
59
|
-
})(AxLLMRequestTypeValues || {});
|
|
60
|
-
var AxSpanKindValues = /* @__PURE__ */ ((AxSpanKindValues2) => {
|
|
61
|
-
AxSpanKindValues2["WORKFLOW"] = "workflow";
|
|
62
|
-
AxSpanKindValues2["TASK"] = "task";
|
|
63
|
-
AxSpanKindValues2["AGENT"] = "agent";
|
|
64
|
-
AxSpanKindValues2["TOOL"] = "tool";
|
|
65
|
-
AxSpanKindValues2["UNKNOWN"] = "unknown";
|
|
66
|
-
return AxSpanKindValues2;
|
|
67
|
-
})(AxSpanKindValues || {});
|
|
68
|
-
|
|
69
1
|
// util/apicall.ts
|
|
70
2
|
import crypto from "crypto";
|
|
71
3
|
import {
|
|
@@ -320,6 +252,33 @@ var AxAIServiceAuthenticationError = class extends AxAIServiceError {
|
|
|
320
252
|
this.name = this.constructor.name;
|
|
321
253
|
}
|
|
322
254
|
};
|
|
255
|
+
var AxAIRefusalError = class extends Error {
|
|
256
|
+
constructor(refusalMessage, model, requestId) {
|
|
257
|
+
super(`Model refused to fulfill request: ${refusalMessage}`);
|
|
258
|
+
this.refusalMessage = refusalMessage;
|
|
259
|
+
this.model = model;
|
|
260
|
+
this.requestId = requestId;
|
|
261
|
+
this.name = "AxAIRefusalError";
|
|
262
|
+
this.timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
263
|
+
this.errorId = crypto.randomUUID();
|
|
264
|
+
}
|
|
265
|
+
timestamp;
|
|
266
|
+
errorId;
|
|
267
|
+
toString() {
|
|
268
|
+
return [
|
|
269
|
+
`${this.name}: ${this.message}`,
|
|
270
|
+
`Refusal: ${this.refusalMessage}`,
|
|
271
|
+
this.model ? `Model: ${this.model}` : "",
|
|
272
|
+
this.requestId ? `Request ID: ${this.requestId}` : "",
|
|
273
|
+
`Timestamp: ${this.timestamp}`,
|
|
274
|
+
`Error ID: ${this.errorId}`
|
|
275
|
+
].filter(Boolean).join("\n");
|
|
276
|
+
}
|
|
277
|
+
// For Node.js, override the custom inspect method so console.log shows our custom string.
|
|
278
|
+
[Symbol.for("nodejs.util.inspect.custom")](_depth, _options) {
|
|
279
|
+
return this.toString();
|
|
280
|
+
}
|
|
281
|
+
};
|
|
323
282
|
async function safeReadResponseBody(response) {
|
|
324
283
|
try {
|
|
325
284
|
if (response.headers.get("content-type")?.includes("application/json")) {
|
|
@@ -623,6 +582,74 @@ var apiCall = async (api, json) => {
|
|
|
623
582
|
}
|
|
624
583
|
};
|
|
625
584
|
|
|
585
|
+
// ai/base.ts
|
|
586
|
+
import crypto2 from "crypto";
|
|
587
|
+
import { context, SpanKind } from "@opentelemetry/api";
|
|
588
|
+
|
|
589
|
+
// trace/trace.ts
|
|
590
|
+
var axSpanAttributes = {
|
|
591
|
+
// LLM
|
|
592
|
+
LLM_SYSTEM: "gen_ai.system",
|
|
593
|
+
LLM_OPERATION_NAME: "gen_ai.operation.name",
|
|
594
|
+
LLM_REQUEST_MODEL: "gen_ai.request.model",
|
|
595
|
+
LLM_REQUEST_MAX_TOKENS: "gen_ai.request.max_tokens",
|
|
596
|
+
LLM_REQUEST_TEMPERATURE: "gen_ai.request.temperature",
|
|
597
|
+
LLM_REQUEST_TOP_K: "gen_ai.request.top_k",
|
|
598
|
+
LLM_REQUEST_FREQUENCY_PENALTY: "gen_ai.request.frequency_penalty",
|
|
599
|
+
LLM_REQUEST_PRESENCE_PENALTY: "gen_ai.request.presence_penalty",
|
|
600
|
+
LLM_REQUEST_STOP_SEQUENCES: "gen_ai.request.stop_sequences",
|
|
601
|
+
LLM_REQUEST_LLM_IS_STREAMING: "gen_ai.request.llm_is_streaming",
|
|
602
|
+
LLM_REQUEST_TOP_P: "gen_ai.request.top_p",
|
|
603
|
+
LLM_USAGE_INPUT_TOKENS: "gen_ai.usage.input_tokens",
|
|
604
|
+
LLM_USAGE_OUTPUT_TOKENS: "gen_ai.usage.output_tokens",
|
|
605
|
+
LLM_USAGE_TOTAL_TOKENS: "gen_ai.usage.total_tokens",
|
|
606
|
+
LLM_USAGE_THOUGHTS_TOKENS: "gen_ai.usage.thoughts_tokens",
|
|
607
|
+
// Vector DB
|
|
608
|
+
DB_SYSTEM: "db.system",
|
|
609
|
+
DB_TABLE: "db.table",
|
|
610
|
+
DB_NAMESPACE: "db.namespace",
|
|
611
|
+
DB_ID: "db.id",
|
|
612
|
+
DB_QUERY_TEXT: "db.query.text",
|
|
613
|
+
DB_VECTOR: "db.vector",
|
|
614
|
+
DB_OPERATION_NAME: "db.operation.name",
|
|
615
|
+
DB_VECTOR_QUERY_TOP_K: "db.vector.query.top_k",
|
|
616
|
+
DB_QUERY_EMBEDDINGS: "db.query.embeddings",
|
|
617
|
+
DB_QUERY_RESULT: "db.query.result",
|
|
618
|
+
// Query Embeddings
|
|
619
|
+
DB_QUERY_EMBEDDINGS_VECTOR: "db.query.embeddings.vector",
|
|
620
|
+
// Query Result (canonical format)
|
|
621
|
+
DB_QUERY_RESULT_ID: "db.query.result.id",
|
|
622
|
+
DB_QUERY_RESULT_SCORE: "db.query.result.score",
|
|
623
|
+
DB_QUERY_RESULT_DISTANCE: "db.query.result.distance",
|
|
624
|
+
DB_QUERY_RESULT_METADATA: "db.query.result.metadata",
|
|
625
|
+
DB_QUERY_RESULT_VECTOR: "db.query.result.vector",
|
|
626
|
+
DB_QUERY_RESULT_DOCUMENT: "db.query.result.document"
|
|
627
|
+
};
|
|
628
|
+
var axSpanEvents = {
|
|
629
|
+
GEN_AI_USER_MESSAGE: "gen_ai.user.message",
|
|
630
|
+
GEN_AI_SYSTEM_MESSAGE: "gen_ai.system.message",
|
|
631
|
+
GEN_AI_ASSISTANT_MESSAGE: "gen_ai.assistant.message",
|
|
632
|
+
GEN_AI_TOOL_MESSAGE: "gen_ai.tool.message",
|
|
633
|
+
// For tool messages in request & response tool calls
|
|
634
|
+
GEN_AI_CHOICE: "gen_ai.choice",
|
|
635
|
+
GEN_AI_USAGE: "gen_ai.usage"
|
|
636
|
+
};
|
|
637
|
+
var AxLLMRequestTypeValues = /* @__PURE__ */ ((AxLLMRequestTypeValues2) => {
|
|
638
|
+
AxLLMRequestTypeValues2["COMPLETION"] = "completion";
|
|
639
|
+
AxLLMRequestTypeValues2["CHAT"] = "chat";
|
|
640
|
+
AxLLMRequestTypeValues2["RERANK"] = "rerank";
|
|
641
|
+
AxLLMRequestTypeValues2["UNKNOWN"] = "unknown";
|
|
642
|
+
return AxLLMRequestTypeValues2;
|
|
643
|
+
})(AxLLMRequestTypeValues || {});
|
|
644
|
+
var AxSpanKindValues = /* @__PURE__ */ ((AxSpanKindValues2) => {
|
|
645
|
+
AxSpanKindValues2["WORKFLOW"] = "workflow";
|
|
646
|
+
AxSpanKindValues2["TASK"] = "task";
|
|
647
|
+
AxSpanKindValues2["AGENT"] = "agent";
|
|
648
|
+
AxSpanKindValues2["TOOL"] = "tool";
|
|
649
|
+
AxSpanKindValues2["UNKNOWN"] = "unknown";
|
|
650
|
+
return AxSpanKindValues2;
|
|
651
|
+
})(AxSpanKindValues || {});
|
|
652
|
+
|
|
626
653
|
// util/transform.ts
|
|
627
654
|
import {
|
|
628
655
|
TransformStream as TransformStream4
|
|
@@ -1227,6 +1254,14 @@ var AxBaseAI = class {
|
|
|
1227
1254
|
if (options?.showThoughts && !this.getFeatures(model).hasShowThoughts) {
|
|
1228
1255
|
throw new Error(`Model ${model} does not support showThoughts.`);
|
|
1229
1256
|
}
|
|
1257
|
+
const modelInfo = this.modelInfo.find(
|
|
1258
|
+
(info) => info.name === model
|
|
1259
|
+
);
|
|
1260
|
+
if (modelInfo?.isExpensive && options?.useExpensiveModel !== "yes") {
|
|
1261
|
+
throw new Error(
|
|
1262
|
+
`Model ${model} is marked as expensive and requires explicit confirmation. Set useExpensiveModel: "yes" to proceed.`
|
|
1263
|
+
);
|
|
1264
|
+
}
|
|
1230
1265
|
modelConfig.stream = (options?.stream !== void 0 ? options.stream : modelConfig.stream) ?? true;
|
|
1231
1266
|
const canStream = this.getFeatures(model).streaming;
|
|
1232
1267
|
if (!canStream) {
|
|
@@ -1988,7 +2023,13 @@ var AxAIAnthropicImpl = class {
|
|
|
1988
2023
|
};
|
|
1989
2024
|
createChatResp = (resp) => {
|
|
1990
2025
|
if (resp.type === "error") {
|
|
1991
|
-
throw new
|
|
2026
|
+
throw new AxAIRefusalError(
|
|
2027
|
+
resp.error.message,
|
|
2028
|
+
void 0,
|
|
2029
|
+
// model not specified in error response
|
|
2030
|
+
void 0
|
|
2031
|
+
// requestId not specified in error response
|
|
2032
|
+
);
|
|
1992
2033
|
}
|
|
1993
2034
|
const finishReason = mapFinishReason(resp.stop_reason);
|
|
1994
2035
|
const showThoughts = this.currentPromptConfig?.thinkingTokenBudget !== "none" && this.currentPromptConfig?.showThoughts !== false;
|
|
@@ -2044,7 +2085,13 @@ var AxAIAnthropicImpl = class {
|
|
|
2044
2085
|
}
|
|
2045
2086
|
if (resp.type === "error") {
|
|
2046
2087
|
const { error } = resp;
|
|
2047
|
-
throw new
|
|
2088
|
+
throw new AxAIRefusalError(
|
|
2089
|
+
error.message,
|
|
2090
|
+
void 0,
|
|
2091
|
+
// model not specified in error event
|
|
2092
|
+
void 0
|
|
2093
|
+
// requestId not specified in error event
|
|
2094
|
+
);
|
|
2048
2095
|
}
|
|
2049
2096
|
const index = 0;
|
|
2050
2097
|
if (resp.type === "message_start") {
|
|
@@ -2349,8 +2396,6 @@ function mapFinishReason(stopReason) {
|
|
|
2349
2396
|
|
|
2350
2397
|
// ai/openai/chat_types.ts
|
|
2351
2398
|
var AxAIOpenAIModel = /* @__PURE__ */ ((AxAIOpenAIModel2) => {
|
|
2352
|
-
AxAIOpenAIModel2["O1"] = "o1";
|
|
2353
|
-
AxAIOpenAIModel2["O1Mini"] = "o1-mini";
|
|
2354
2399
|
AxAIOpenAIModel2["GPT4"] = "gpt-4";
|
|
2355
2400
|
AxAIOpenAIModel2["GPT41"] = "gpt-4.1";
|
|
2356
2401
|
AxAIOpenAIModel2["GPT41Mini"] = "gpt-4.1-mini";
|
|
@@ -2363,6 +2408,11 @@ var AxAIOpenAIModel = /* @__PURE__ */ ((AxAIOpenAIModel2) => {
|
|
|
2363
2408
|
AxAIOpenAIModel2["GPT35TextDavinci002"] = "text-davinci-002";
|
|
2364
2409
|
AxAIOpenAIModel2["GPT3TextBabbage002"] = "text-babbage-002";
|
|
2365
2410
|
AxAIOpenAIModel2["GPT3TextAda001"] = "text-ada-001";
|
|
2411
|
+
AxAIOpenAIModel2["O1"] = "o1";
|
|
2412
|
+
AxAIOpenAIModel2["O1Mini"] = "o1-mini";
|
|
2413
|
+
AxAIOpenAIModel2["O3"] = "o3";
|
|
2414
|
+
AxAIOpenAIModel2["O3Mini"] = "o3-mini";
|
|
2415
|
+
AxAIOpenAIModel2["O4Mini"] = "o4-mini";
|
|
2366
2416
|
return AxAIOpenAIModel2;
|
|
2367
2417
|
})(AxAIOpenAIModel || {});
|
|
2368
2418
|
var AxAIOpenAIEmbedModel = /* @__PURE__ */ ((AxAIOpenAIEmbedModel2) => {
|
|
@@ -2374,8 +2424,6 @@ var AxAIOpenAIEmbedModel = /* @__PURE__ */ ((AxAIOpenAIEmbedModel2) => {
|
|
|
2374
2424
|
|
|
2375
2425
|
// ai/openai/responses_types.ts
|
|
2376
2426
|
var AxAIOpenAIResponsesModel = /* @__PURE__ */ ((AxAIOpenAIResponsesModel2) => {
|
|
2377
|
-
AxAIOpenAIResponsesModel2["O1"] = "o1";
|
|
2378
|
-
AxAIOpenAIResponsesModel2["O1Mini"] = "o1-mini";
|
|
2379
2427
|
AxAIOpenAIResponsesModel2["GPT4"] = "gpt-4";
|
|
2380
2428
|
AxAIOpenAIResponsesModel2["GPT41"] = "gpt-4.1";
|
|
2381
2429
|
AxAIOpenAIResponsesModel2["GPT41Mini"] = "gpt-4.1-mini";
|
|
@@ -2388,6 +2436,10 @@ var AxAIOpenAIResponsesModel = /* @__PURE__ */ ((AxAIOpenAIResponsesModel2) => {
|
|
|
2388
2436
|
AxAIOpenAIResponsesModel2["GPT35TextDavinci002"] = "text-davinci-002";
|
|
2389
2437
|
AxAIOpenAIResponsesModel2["GPT3TextBabbage002"] = "text-babbage-002";
|
|
2390
2438
|
AxAIOpenAIResponsesModel2["GPT3TextAda001"] = "text-ada-001";
|
|
2439
|
+
AxAIOpenAIResponsesModel2["O1Pro"] = "o1-pro";
|
|
2440
|
+
AxAIOpenAIResponsesModel2["O1"] = "o1";
|
|
2441
|
+
AxAIOpenAIResponsesModel2["O1Mini"] = "o1-mini";
|
|
2442
|
+
AxAIOpenAIResponsesModel2["O3Pro"] = "o3-pro";
|
|
2391
2443
|
AxAIOpenAIResponsesModel2["O3"] = "o3";
|
|
2392
2444
|
AxAIOpenAIResponsesModel2["O3Mini"] = "o3-mini";
|
|
2393
2445
|
AxAIOpenAIResponsesModel2["O4Mini"] = "o4-mini";
|
|
@@ -2396,20 +2448,7 @@ var AxAIOpenAIResponsesModel = /* @__PURE__ */ ((AxAIOpenAIResponsesModel2) => {
|
|
|
2396
2448
|
|
|
2397
2449
|
// ai/openai/info.ts
|
|
2398
2450
|
var axModelInfoOpenAI = [
|
|
2399
|
-
|
|
2400
|
-
name: "o1" /* O1 */,
|
|
2401
|
-
currency: "usd",
|
|
2402
|
-
promptTokenCostPer1M: 15,
|
|
2403
|
-
completionTokenCostPer1M: 60,
|
|
2404
|
-
hasThinkingBudget: true
|
|
2405
|
-
},
|
|
2406
|
-
{
|
|
2407
|
-
name: "o1-mini" /* O1Mini */,
|
|
2408
|
-
currency: "usd",
|
|
2409
|
-
promptTokenCostPer1M: 1.1,
|
|
2410
|
-
completionTokenCostPer1M: 14.4,
|
|
2411
|
-
hasThinkingBudget: true
|
|
2412
|
-
},
|
|
2451
|
+
// Not Reasoning models
|
|
2413
2452
|
{
|
|
2414
2453
|
name: "gpt-4" /* GPT4 */,
|
|
2415
2454
|
currency: "usd",
|
|
@@ -2458,30 +2497,36 @@ var axModelInfoOpenAI = [
|
|
|
2458
2497
|
promptTokenCostPer1M: 0.5,
|
|
2459
2498
|
completionTokenCostPer1M: 1.5
|
|
2460
2499
|
},
|
|
2461
|
-
//
|
|
2500
|
+
// Reasoning models
|
|
2501
|
+
{
|
|
2502
|
+
name: "o1" /* O1 */,
|
|
2503
|
+
currency: "usd",
|
|
2504
|
+
promptTokenCostPer1M: 15,
|
|
2505
|
+
completionTokenCostPer1M: 60
|
|
2506
|
+
},
|
|
2507
|
+
{
|
|
2508
|
+
name: "o1-mini" /* O1Mini */,
|
|
2509
|
+
currency: "usd",
|
|
2510
|
+
promptTokenCostPer1M: 1.1,
|
|
2511
|
+
completionTokenCostPer1M: 14.4
|
|
2512
|
+
},
|
|
2462
2513
|
{
|
|
2463
2514
|
name: "o3" /* O3 */,
|
|
2464
2515
|
currency: "usd",
|
|
2465
2516
|
promptTokenCostPer1M: 15,
|
|
2466
|
-
completionTokenCostPer1M: 60
|
|
2467
|
-
hasThinkingBudget: true,
|
|
2468
|
-
hasShowThoughts: true
|
|
2517
|
+
completionTokenCostPer1M: 60
|
|
2469
2518
|
},
|
|
2470
2519
|
{
|
|
2471
2520
|
name: "o3-mini" /* O3Mini */,
|
|
2472
2521
|
currency: "usd",
|
|
2473
2522
|
promptTokenCostPer1M: 1.1,
|
|
2474
|
-
completionTokenCostPer1M: 4.4
|
|
2475
|
-
hasThinkingBudget: true,
|
|
2476
|
-
hasShowThoughts: true
|
|
2523
|
+
completionTokenCostPer1M: 4.4
|
|
2477
2524
|
},
|
|
2478
2525
|
{
|
|
2479
2526
|
name: "o4-mini" /* O4Mini */,
|
|
2480
2527
|
currency: "usd",
|
|
2481
2528
|
promptTokenCostPer1M: 1.1,
|
|
2482
|
-
completionTokenCostPer1M: 4.4
|
|
2483
|
-
hasThinkingBudget: true,
|
|
2484
|
-
hasShowThoughts: true
|
|
2529
|
+
completionTokenCostPer1M: 4.4
|
|
2485
2530
|
},
|
|
2486
2531
|
// Embedding models
|
|
2487
2532
|
{
|
|
@@ -2503,8 +2548,123 @@ var axModelInfoOpenAI = [
|
|
|
2503
2548
|
completionTokenCostPer1M: 0.13
|
|
2504
2549
|
}
|
|
2505
2550
|
];
|
|
2551
|
+
var axModelInfoOpenAIResponses = [
|
|
2552
|
+
// Not Reasoning models
|
|
2553
|
+
{
|
|
2554
|
+
name: "gpt-4" /* GPT4 */,
|
|
2555
|
+
currency: "usd",
|
|
2556
|
+
promptTokenCostPer1M: 30,
|
|
2557
|
+
completionTokenCostPer1M: 60
|
|
2558
|
+
},
|
|
2559
|
+
{
|
|
2560
|
+
name: "gpt-4.1" /* GPT41 */,
|
|
2561
|
+
currency: "usd",
|
|
2562
|
+
promptTokenCostPer1M: 2,
|
|
2563
|
+
completionTokenCostPer1M: 8
|
|
2564
|
+
},
|
|
2565
|
+
{
|
|
2566
|
+
name: "gpt-4.1-mini" /* GPT41Mini */,
|
|
2567
|
+
currency: "usd",
|
|
2568
|
+
promptTokenCostPer1M: 0.4,
|
|
2569
|
+
completionTokenCostPer1M: 1.6
|
|
2570
|
+
},
|
|
2571
|
+
{
|
|
2572
|
+
name: "gpt-4o" /* GPT4O */,
|
|
2573
|
+
currency: "usd",
|
|
2574
|
+
promptTokenCostPer1M: 5,
|
|
2575
|
+
completionTokenCostPer1M: 15
|
|
2576
|
+
},
|
|
2577
|
+
{
|
|
2578
|
+
name: "gpt-4o-mini" /* GPT4OMini */,
|
|
2579
|
+
currency: "usd",
|
|
2580
|
+
promptTokenCostPer1M: 0.15,
|
|
2581
|
+
completionTokenCostPer1M: 0.6
|
|
2582
|
+
},
|
|
2583
|
+
{
|
|
2584
|
+
name: "chatgpt-4o-latest" /* GPT4ChatGPT4O */,
|
|
2585
|
+
currency: "usd",
|
|
2586
|
+
promptTokenCostPer1M: 5,
|
|
2587
|
+
completionTokenCostPer1M: 15
|
|
2588
|
+
},
|
|
2589
|
+
{
|
|
2590
|
+
name: "gpt-4-turbo" /* GPT4Turbo */,
|
|
2591
|
+
currency: "usd",
|
|
2592
|
+
promptTokenCostPer1M: 10,
|
|
2593
|
+
completionTokenCostPer1M: 30
|
|
2594
|
+
},
|
|
2595
|
+
{
|
|
2596
|
+
name: "gpt-3.5-turbo" /* GPT35Turbo */,
|
|
2597
|
+
currency: "usd",
|
|
2598
|
+
promptTokenCostPer1M: 0.5,
|
|
2599
|
+
completionTokenCostPer1M: 1.5
|
|
2600
|
+
},
|
|
2601
|
+
// Reasoning models
|
|
2602
|
+
{
|
|
2603
|
+
name: "o1-pro" /* O1Pro */,
|
|
2604
|
+
currency: "usd",
|
|
2605
|
+
promptTokenCostPer1M: 150,
|
|
2606
|
+
completionTokenCostPer1M: 600,
|
|
2607
|
+
hasThinkingBudget: true,
|
|
2608
|
+
hasShowThoughts: true,
|
|
2609
|
+
isExpensive: true
|
|
2610
|
+
},
|
|
2611
|
+
{
|
|
2612
|
+
name: "o1" /* O1 */,
|
|
2613
|
+
currency: "usd",
|
|
2614
|
+
promptTokenCostPer1M: 15,
|
|
2615
|
+
completionTokenCostPer1M: 60,
|
|
2616
|
+
hasThinkingBudget: true,
|
|
2617
|
+
hasShowThoughts: true
|
|
2618
|
+
},
|
|
2619
|
+
{
|
|
2620
|
+
name: "o3-pro" /* O3Pro */,
|
|
2621
|
+
currency: "usd",
|
|
2622
|
+
promptTokenCostPer1M: 20,
|
|
2623
|
+
completionTokenCostPer1M: 80,
|
|
2624
|
+
hasThinkingBudget: true,
|
|
2625
|
+
hasShowThoughts: true,
|
|
2626
|
+
isExpensive: true
|
|
2627
|
+
},
|
|
2628
|
+
{
|
|
2629
|
+
name: "o3" /* O3 */,
|
|
2630
|
+
currency: "usd",
|
|
2631
|
+
promptTokenCostPer1M: 15,
|
|
2632
|
+
completionTokenCostPer1M: 60,
|
|
2633
|
+
hasThinkingBudget: true,
|
|
2634
|
+
hasShowThoughts: true
|
|
2635
|
+
},
|
|
2636
|
+
{
|
|
2637
|
+
name: "o3-mini" /* O3Mini */,
|
|
2638
|
+
currency: "usd",
|
|
2639
|
+
promptTokenCostPer1M: 1.1,
|
|
2640
|
+
completionTokenCostPer1M: 4.4,
|
|
2641
|
+
hasThinkingBudget: true,
|
|
2642
|
+
hasShowThoughts: true
|
|
2643
|
+
},
|
|
2644
|
+
{
|
|
2645
|
+
name: "o4-mini" /* O4Mini */,
|
|
2646
|
+
currency: "usd",
|
|
2647
|
+
promptTokenCostPer1M: 1.1,
|
|
2648
|
+
completionTokenCostPer1M: 4.4,
|
|
2649
|
+
hasThinkingBudget: true,
|
|
2650
|
+
hasShowThoughts: true
|
|
2651
|
+
}
|
|
2652
|
+
];
|
|
2506
2653
|
|
|
2507
2654
|
// ai/openai/api.ts
|
|
2655
|
+
var isOpenAIThinkingModel = (model) => {
|
|
2656
|
+
const thinkingModels = [
|
|
2657
|
+
"o1" /* O1 */,
|
|
2658
|
+
"o1-mini" /* O1Mini */,
|
|
2659
|
+
"o3" /* O3 */,
|
|
2660
|
+
"o3-mini" /* O3Mini */,
|
|
2661
|
+
"o4-mini" /* O4Mini */,
|
|
2662
|
+
// Pro models (string values since they're not in the regular chat enum)
|
|
2663
|
+
"o1-pro",
|
|
2664
|
+
"o3-pro"
|
|
2665
|
+
];
|
|
2666
|
+
return thinkingModels.includes(model) || thinkingModels.includes(model);
|
|
2667
|
+
};
|
|
2508
2668
|
var axAIOpenAIDefaultConfig = () => structuredClone({
|
|
2509
2669
|
model: "gpt-4.1" /* GPT41 */,
|
|
2510
2670
|
embedModel: "text-embedding-3-small" /* TextEmbedding3Small */,
|
|
@@ -2568,20 +2728,24 @@ var AxAIOpenAIImpl = class {
|
|
|
2568
2728
|
const frequencyPenalty = req.modelConfig?.frequencyPenalty ?? this.config.frequencyPenalty;
|
|
2569
2729
|
const stream = req.modelConfig?.stream ?? this.config.stream;
|
|
2570
2730
|
const store = this.config.store;
|
|
2731
|
+
const isThinkingModel = isOpenAIThinkingModel(model);
|
|
2571
2732
|
let reqValue = {
|
|
2572
2733
|
model,
|
|
2573
2734
|
messages,
|
|
2574
2735
|
response_format: this.config?.responseFormat ? { type: this.config.responseFormat } : void 0,
|
|
2575
2736
|
tools,
|
|
2576
2737
|
tool_choice: toolsChoice,
|
|
2577
|
-
|
|
2578
|
-
|
|
2579
|
-
|
|
2580
|
-
|
|
2738
|
+
// For thinking models, don't set these parameters as they're not supported
|
|
2739
|
+
...isThinkingModel ? {} : {
|
|
2740
|
+
max_completion_tokens: req.modelConfig?.maxTokens ?? this.config.maxTokens,
|
|
2741
|
+
temperature: req.modelConfig?.temperature ?? this.config.temperature,
|
|
2742
|
+
top_p: req.modelConfig?.topP ?? this.config.topP ?? 1,
|
|
2743
|
+
n: req.modelConfig?.n ?? this.config.n,
|
|
2744
|
+
presence_penalty: req.modelConfig?.presencePenalty ?? this.config.presencePenalty,
|
|
2745
|
+
...frequencyPenalty ? { frequency_penalty: frequencyPenalty } : {}
|
|
2746
|
+
},
|
|
2581
2747
|
stop: req.modelConfig?.stopSequences ?? this.config.stop,
|
|
2582
|
-
presence_penalty: req.modelConfig?.presencePenalty ?? this.config.presencePenalty,
|
|
2583
2748
|
logit_bias: this.config.logitBias,
|
|
2584
|
-
...frequencyPenalty ? { frequency_penalty: frequencyPenalty } : {},
|
|
2585
2749
|
...stream && this.streamingUsage ? { stream: true, stream_options: { include_usage: true } } : {},
|
|
2586
2750
|
...store ? { store } : {},
|
|
2587
2751
|
...this.config.serviceTier ? { service_tier: this.config.serviceTier } : {},
|
|
@@ -2672,6 +2836,9 @@ var AxAIOpenAIImpl = class {
|
|
|
2672
2836
|
totalTokens: usage.total_tokens
|
|
2673
2837
|
} : void 0;
|
|
2674
2838
|
const results = choices.map((choice) => {
|
|
2839
|
+
if (choice.message.refusal) {
|
|
2840
|
+
throw new AxAIRefusalError(choice.message.refusal, resp.model, resp.id);
|
|
2841
|
+
}
|
|
2675
2842
|
const finishReason = mapFinishReason2(choice.finish_reason);
|
|
2676
2843
|
const functionCalls = choice.message.tool_calls?.map(
|
|
2677
2844
|
({ id: id2, function: { arguments: params, name } }) => ({
|
|
@@ -2683,8 +2850,9 @@ var AxAIOpenAIImpl = class {
|
|
|
2683
2850
|
return {
|
|
2684
2851
|
index: choice.index,
|
|
2685
2852
|
id: `${choice.index}`,
|
|
2686
|
-
content: choice.message.content,
|
|
2853
|
+
content: choice.message.content ?? void 0,
|
|
2687
2854
|
thought: choice.message.reasoning_content,
|
|
2855
|
+
annotations: choice.message.annotations,
|
|
2688
2856
|
functionCalls,
|
|
2689
2857
|
finishReason
|
|
2690
2858
|
};
|
|
@@ -2711,11 +2879,16 @@ var AxAIOpenAIImpl = class {
|
|
|
2711
2879
|
delta: {
|
|
2712
2880
|
content,
|
|
2713
2881
|
role,
|
|
2882
|
+
refusal,
|
|
2714
2883
|
tool_calls: toolCalls,
|
|
2715
|
-
reasoning_content: thought
|
|
2884
|
+
reasoning_content: thought,
|
|
2885
|
+
annotations
|
|
2716
2886
|
},
|
|
2717
2887
|
finish_reason: oaiFinishReason
|
|
2718
2888
|
}) => {
|
|
2889
|
+
if (refusal) {
|
|
2890
|
+
throw new AxAIRefusalError(refusal, void 0, id);
|
|
2891
|
+
}
|
|
2719
2892
|
const finishReason = mapFinishReason2(oaiFinishReason);
|
|
2720
2893
|
const functionCalls = toolCalls?.map(({ id: _id, index: index2, function: { name, arguments: params } }) => {
|
|
2721
2894
|
if (typeof _id === "string" && typeof index2 === "number" && !sstate.indexIdMap[index2]) {
|
|
@@ -2733,9 +2906,10 @@ var AxAIOpenAIImpl = class {
|
|
|
2733
2906
|
}).filter((v) => v !== null);
|
|
2734
2907
|
return {
|
|
2735
2908
|
index,
|
|
2736
|
-
content,
|
|
2909
|
+
content: content ?? void 0,
|
|
2737
2910
|
role,
|
|
2738
2911
|
thought,
|
|
2912
|
+
annotations,
|
|
2739
2913
|
functionCalls,
|
|
2740
2914
|
finishReason,
|
|
2741
2915
|
id
|
|
@@ -3832,11 +4006,29 @@ var AxAIGoogleGeminiImpl = class {
|
|
|
3832
4006
|
result.finishReason = "stop";
|
|
3833
4007
|
break;
|
|
3834
4008
|
case "SAFETY":
|
|
3835
|
-
throw new
|
|
4009
|
+
throw new AxAIRefusalError(
|
|
4010
|
+
"Content was blocked due to safety settings",
|
|
4011
|
+
void 0,
|
|
4012
|
+
// model not available in candidate
|
|
4013
|
+
void 0
|
|
4014
|
+
// requestId not available
|
|
4015
|
+
);
|
|
3836
4016
|
case "RECITATION":
|
|
3837
|
-
throw new
|
|
4017
|
+
throw new AxAIRefusalError(
|
|
4018
|
+
"Content was blocked due to recitation policy",
|
|
4019
|
+
void 0,
|
|
4020
|
+
// model not available in candidate
|
|
4021
|
+
void 0
|
|
4022
|
+
// requestId not available
|
|
4023
|
+
);
|
|
3838
4024
|
case "MALFORMED_FUNCTION_CALL":
|
|
3839
|
-
throw new
|
|
4025
|
+
throw new AxAIRefusalError(
|
|
4026
|
+
"Function call was malformed and blocked",
|
|
4027
|
+
void 0,
|
|
4028
|
+
// model not available in candidate
|
|
4029
|
+
void 0
|
|
4030
|
+
// requestId not available
|
|
4031
|
+
);
|
|
3840
4032
|
}
|
|
3841
4033
|
if (!candidate.content || !candidate.content.parts) {
|
|
3842
4034
|
return result;
|
|
@@ -4445,6 +4637,18 @@ var AxAIOllama = class extends AxAIOpenAIBase {
|
|
|
4445
4637
|
};
|
|
4446
4638
|
|
|
4447
4639
|
// ai/openai/responses_api.ts
|
|
4640
|
+
var isOpenAIResponsesThinkingModel = (model) => {
|
|
4641
|
+
const thinkingModels = [
|
|
4642
|
+
"o1" /* O1 */,
|
|
4643
|
+
"o1-mini" /* O1Mini */,
|
|
4644
|
+
"o1-pro" /* O1Pro */,
|
|
4645
|
+
"o3" /* O3 */,
|
|
4646
|
+
"o3-mini" /* O3Mini */,
|
|
4647
|
+
"o3-pro" /* O3Pro */,
|
|
4648
|
+
"o4-mini" /* O4Mini */
|
|
4649
|
+
];
|
|
4650
|
+
return thinkingModels.includes(model);
|
|
4651
|
+
};
|
|
4448
4652
|
var AxAIOpenAIResponsesImpl = class {
|
|
4449
4653
|
constructor(config, streamingUsage, responsesReqUpdater) {
|
|
4450
4654
|
this.config = config;
|
|
@@ -4600,10 +4804,37 @@ var AxAIOpenAIResponsesImpl = class {
|
|
|
4600
4804
|
parameters: v.parameters ?? {}
|
|
4601
4805
|
})
|
|
4602
4806
|
);
|
|
4603
|
-
const includeFields =
|
|
4604
|
-
|
|
4605
|
-
|
|
4606
|
-
|
|
4807
|
+
const includeFields = (
|
|
4808
|
+
// | 'computer_call_output.output.image_url'
|
|
4809
|
+
// | 'reasoning.encrypted_content'
|
|
4810
|
+
// | 'code_interpreter_call.outputs'
|
|
4811
|
+
[]
|
|
4812
|
+
);
|
|
4813
|
+
const isThinkingModel = isOpenAIResponsesThinkingModel(model);
|
|
4814
|
+
let reasoningSummary = this.config.reasoningSummary;
|
|
4815
|
+
if (!config?.showThoughts) {
|
|
4816
|
+
reasoningSummary = void 0;
|
|
4817
|
+
} else if (!reasoningSummary) {
|
|
4818
|
+
reasoningSummary = "auto";
|
|
4819
|
+
}
|
|
4820
|
+
let reasoningEffort = this.config.reasoningEffort;
|
|
4821
|
+
if (config?.thinkingTokenBudget) {
|
|
4822
|
+
switch (config.thinkingTokenBudget) {
|
|
4823
|
+
case "none":
|
|
4824
|
+
reasoningEffort = void 0;
|
|
4825
|
+
break;
|
|
4826
|
+
case "minimal":
|
|
4827
|
+
reasoningEffort = "low";
|
|
4828
|
+
break;
|
|
4829
|
+
case "low":
|
|
4830
|
+
reasoningEffort = "medium";
|
|
4831
|
+
break;
|
|
4832
|
+
case "medium":
|
|
4833
|
+
case "high":
|
|
4834
|
+
case "highest":
|
|
4835
|
+
reasoningEffort = "high";
|
|
4836
|
+
break;
|
|
4837
|
+
}
|
|
4607
4838
|
}
|
|
4608
4839
|
let mutableReq = {
|
|
4609
4840
|
model,
|
|
@@ -4612,9 +4843,15 @@ var AxAIOpenAIResponsesImpl = class {
|
|
|
4612
4843
|
instructions: finalInstructions,
|
|
4613
4844
|
tools: tools?.length ? tools : void 0,
|
|
4614
4845
|
tool_choice: req.functionCall === "none" || req.functionCall === "auto" || req.functionCall === "required" ? req.functionCall : typeof req.functionCall === "object" && req.functionCall.function ? { type: "function", name: req.functionCall.function.name } : void 0,
|
|
4615
|
-
|
|
4616
|
-
|
|
4617
|
-
|
|
4846
|
+
// For thinking models, don't set these parameters as they're not supported
|
|
4847
|
+
...isThinkingModel ? {
|
|
4848
|
+
max_output_tokens: req.modelConfig?.maxTokens ?? this.config.maxTokens ?? void 0
|
|
4849
|
+
} : {
|
|
4850
|
+
temperature: req.modelConfig?.temperature ?? this.config.temperature ?? void 0,
|
|
4851
|
+
top_p: req.modelConfig?.topP ?? this.config.topP ?? void 0,
|
|
4852
|
+
presence_penalty: req.modelConfig?.presencePenalty ?? this.config.presencePenalty ?? void 0,
|
|
4853
|
+
frequency_penalty: req.modelConfig?.frequencyPenalty ?? this.config.frequencyPenalty ?? void 0
|
|
4854
|
+
},
|
|
4618
4855
|
stream: req.modelConfig?.stream ?? this.config.stream ?? false,
|
|
4619
4856
|
// Sourced from modelConfig or global config
|
|
4620
4857
|
// Optional fields from AxAIOpenAIResponsesRequest that need to be in Mutable for initialization
|
|
@@ -4623,7 +4860,12 @@ var AxAIOpenAIResponsesImpl = class {
|
|
|
4623
4860
|
metadata: void 0,
|
|
4624
4861
|
parallel_tool_calls: this.config.parallelToolCalls,
|
|
4625
4862
|
previous_response_id: void 0,
|
|
4626
|
-
|
|
4863
|
+
...reasoningEffort ? {
|
|
4864
|
+
reasoning: {
|
|
4865
|
+
effort: reasoningEffort,
|
|
4866
|
+
summary: reasoningSummary
|
|
4867
|
+
}
|
|
4868
|
+
} : {},
|
|
4627
4869
|
service_tier: this.config.serviceTier,
|
|
4628
4870
|
store: this.config.store,
|
|
4629
4871
|
text: void 0,
|
|
@@ -4714,7 +4956,7 @@ var AxAIOpenAIResponsesImpl = class {
|
|
|
4714
4956
|
switch (item.type) {
|
|
4715
4957
|
case "message":
|
|
4716
4958
|
currentResult.id = item.id;
|
|
4717
|
-
currentResult.content = contentToText(item.content);
|
|
4959
|
+
currentResult.content = contentToText(item.content, id);
|
|
4718
4960
|
currentResult.finishReason = item.status === "completed" ? "stop" : "content_filter";
|
|
4719
4961
|
break;
|
|
4720
4962
|
case "reasoning":
|
|
@@ -4887,7 +5129,10 @@ var AxAIOpenAIResponsesImpl = class {
|
|
|
4887
5129
|
switch (event.item.type) {
|
|
4888
5130
|
case "message":
|
|
4889
5131
|
baseResult.id = event.item.id;
|
|
4890
|
-
baseResult.content = contentToText(
|
|
5132
|
+
baseResult.content = contentToText(
|
|
5133
|
+
event.item.content,
|
|
5134
|
+
event.item.id
|
|
5135
|
+
);
|
|
4891
5136
|
break;
|
|
4892
5137
|
case "function_call":
|
|
4893
5138
|
baseResult.id = event.item.id;
|
|
@@ -5040,24 +5285,11 @@ var AxAIOpenAIResponsesImpl = class {
|
|
|
5040
5285
|
];
|
|
5041
5286
|
}
|
|
5042
5287
|
break;
|
|
5043
|
-
case "reasoning":
|
|
5044
|
-
{
|
|
5045
|
-
const reasoningItem = event.item;
|
|
5046
|
-
baseResult.id = event.item.id;
|
|
5047
|
-
if (reasoningItem.encrypted_content) {
|
|
5048
|
-
baseResult.thought = reasoningItem.encrypted_content;
|
|
5049
|
-
} else if (reasoningItem.summary) {
|
|
5050
|
-
baseResult.thought = reasoningItem.summary.map(
|
|
5051
|
-
(s2) => typeof s2 === "object" ? JSON.stringify(s2) : s2
|
|
5052
|
-
).join("\n");
|
|
5053
|
-
}
|
|
5054
|
-
}
|
|
5055
|
-
break;
|
|
5056
5288
|
}
|
|
5057
5289
|
break;
|
|
5058
5290
|
case "response.content_part.added":
|
|
5059
5291
|
baseResult.id = event.item_id;
|
|
5060
|
-
baseResult.content = contentToText([event.part]);
|
|
5292
|
+
baseResult.content = contentToText([event.part], event.item_id);
|
|
5061
5293
|
break;
|
|
5062
5294
|
case "response.output_text.delta":
|
|
5063
5295
|
baseResult.id = event.item_id;
|
|
@@ -5078,16 +5310,21 @@ var AxAIOpenAIResponsesImpl = class {
|
|
|
5078
5310
|
}
|
|
5079
5311
|
];
|
|
5080
5312
|
break;
|
|
5081
|
-
case
|
|
5082
|
-
|
|
5313
|
+
// case 'response.function_call_arguments.done':
|
|
5314
|
+
// // Function call arguments done - don't return function calls here
|
|
5315
|
+
// // The mergeFunctionCalls will handle combining name and arguments
|
|
5316
|
+
// baseResult.id = event.item_id
|
|
5317
|
+
// baseResult.finishReason = 'function_call'
|
|
5318
|
+
// break
|
|
5083
5319
|
case "response.reasoning_summary_text.delta":
|
|
5084
5320
|
baseResult.id = event.item_id;
|
|
5085
5321
|
baseResult.thought = event.delta;
|
|
5086
5322
|
break;
|
|
5087
|
-
case
|
|
5088
|
-
|
|
5089
|
-
|
|
5090
|
-
|
|
5323
|
+
// case 'response.reasoning_summary_text.done':
|
|
5324
|
+
// // Reasoning summary done
|
|
5325
|
+
// baseResult.id = event.item_id
|
|
5326
|
+
// baseResult.thought = event.text
|
|
5327
|
+
// break
|
|
5091
5328
|
// File search tool events
|
|
5092
5329
|
case "response.file_search_call.in_progress":
|
|
5093
5330
|
case "response.file_search_call.searching":
|
|
@@ -5181,10 +5418,6 @@ var AxAIOpenAIResponsesImpl = class {
|
|
|
5181
5418
|
baseResult.id = event.item.id;
|
|
5182
5419
|
baseResult.finishReason = "function_call";
|
|
5183
5420
|
break;
|
|
5184
|
-
case "reasoning":
|
|
5185
|
-
baseResult.id = event.item.id;
|
|
5186
|
-
baseResult.finishReason = "stop";
|
|
5187
|
-
break;
|
|
5188
5421
|
}
|
|
5189
5422
|
break;
|
|
5190
5423
|
case "response.completed":
|
|
@@ -5242,11 +5475,13 @@ var AxAIOpenAIResponsesImpl = class {
|
|
|
5242
5475
|
return [apiConfig, reqValue];
|
|
5243
5476
|
}
|
|
5244
5477
|
};
|
|
5245
|
-
var contentToText = (content) => {
|
|
5246
|
-
|
|
5247
|
-
|
|
5248
|
-
|
|
5249
|
-
|
|
5478
|
+
var contentToText = (content, responseId) => {
|
|
5479
|
+
const refusalContent = content.filter((c) => c.type === "refusal");
|
|
5480
|
+
if (refusalContent.length > 0) {
|
|
5481
|
+
const refusalMessage = refusalContent.map((c) => c.refusal).join("\n");
|
|
5482
|
+
throw new AxAIRefusalError(refusalMessage, void 0, responseId);
|
|
5483
|
+
}
|
|
5484
|
+
return content.filter((c) => c.type === "output_text").map((c) => c.text).join("\n");
|
|
5250
5485
|
};
|
|
5251
5486
|
|
|
5252
5487
|
// ai/openai/responses_api_base.ts
|
|
@@ -5310,7 +5545,7 @@ var AxAIOpenAIResponses = class extends AxAIOpenAIResponsesBase {
|
|
|
5310
5545
|
if (!apiKey || apiKey === "") {
|
|
5311
5546
|
throw new Error("OpenAI API key not set");
|
|
5312
5547
|
}
|
|
5313
|
-
modelInfo = [...
|
|
5548
|
+
modelInfo = [...axModelInfoOpenAIResponses, ...modelInfo ?? []];
|
|
5314
5549
|
const supportFor = (model) => {
|
|
5315
5550
|
const mi = getModelInfo({
|
|
5316
5551
|
model,
|
|
@@ -5834,213 +6069,279 @@ import {
|
|
|
5834
6069
|
|
|
5835
6070
|
// ai/validate.ts
|
|
5836
6071
|
function axValidateChatRequestMessage(item) {
|
|
6072
|
+
const value = (v) => JSON.stringify(v, null, 2);
|
|
5837
6073
|
if (!item) {
|
|
5838
|
-
throw new Error(
|
|
6074
|
+
throw new Error(
|
|
6075
|
+
`Chat request message item cannot be null or undefined, received: ${value(item)}`
|
|
6076
|
+
);
|
|
5839
6077
|
}
|
|
5840
|
-
|
|
5841
|
-
|
|
6078
|
+
const role = item?.role;
|
|
6079
|
+
if (!role) {
|
|
6080
|
+
throw new Error(
|
|
6081
|
+
`Chat request message must have a role, received: ${value(role)}`
|
|
6082
|
+
);
|
|
5842
6083
|
}
|
|
5843
|
-
switch (
|
|
5844
|
-
case "system":
|
|
5845
|
-
|
|
6084
|
+
switch (role) {
|
|
6085
|
+
case "system": {
|
|
6086
|
+
const systemItem = item;
|
|
6087
|
+
if (!systemItem.content || systemItem.content.trim() === "") {
|
|
5846
6088
|
throw new Error(
|
|
5847
|
-
|
|
6089
|
+
`System message content cannot be empty or whitespace-only, received: ${value(systemItem.content)}`
|
|
5848
6090
|
);
|
|
5849
6091
|
}
|
|
5850
6092
|
break;
|
|
5851
|
-
|
|
5852
|
-
|
|
5853
|
-
|
|
6093
|
+
}
|
|
6094
|
+
case "user": {
|
|
6095
|
+
const userItem = item;
|
|
6096
|
+
if (!userItem.content) {
|
|
6097
|
+
throw new Error(
|
|
6098
|
+
`User message content cannot be undefined, received: ${value(userItem.content)}`
|
|
6099
|
+
);
|
|
5854
6100
|
}
|
|
5855
|
-
if (typeof
|
|
5856
|
-
if (
|
|
6101
|
+
if (typeof userItem.content === "string") {
|
|
6102
|
+
if (userItem.content.trim() === "") {
|
|
5857
6103
|
throw new Error(
|
|
5858
|
-
|
|
6104
|
+
`User message content cannot be empty or whitespace-only, received: ${value(userItem.content)}`
|
|
5859
6105
|
);
|
|
5860
6106
|
}
|
|
5861
|
-
} else if (Array.isArray(
|
|
5862
|
-
if (
|
|
5863
|
-
throw new Error(
|
|
6107
|
+
} else if (Array.isArray(userItem.content)) {
|
|
6108
|
+
if (userItem.content.length === 0) {
|
|
6109
|
+
throw new Error(
|
|
6110
|
+
`User message content array cannot be empty, received: ${value(userItem.content)}`
|
|
6111
|
+
);
|
|
5864
6112
|
}
|
|
5865
|
-
for (let index = 0; index <
|
|
5866
|
-
const contentItem =
|
|
6113
|
+
for (let index = 0; index < userItem.content.length; index++) {
|
|
6114
|
+
const contentItem = userItem.content[index];
|
|
5867
6115
|
if (!contentItem || typeof contentItem !== "object") {
|
|
5868
6116
|
throw new Error(
|
|
5869
|
-
`User message content item at index ${index} must be an object`
|
|
6117
|
+
`User message content item at index ${index} must be an object, received: ${value(contentItem)}`
|
|
5870
6118
|
);
|
|
5871
6119
|
}
|
|
5872
|
-
|
|
6120
|
+
const contentType = contentItem?.type;
|
|
6121
|
+
if (!contentType) {
|
|
5873
6122
|
throw new Error(
|
|
5874
|
-
`User message content item at index ${index} must have a type`
|
|
6123
|
+
`User message content item at index ${index} must have a type, received: ${value(contentType)}`
|
|
5875
6124
|
);
|
|
5876
6125
|
}
|
|
5877
|
-
switch (
|
|
5878
|
-
case "text":
|
|
5879
|
-
|
|
6126
|
+
switch (contentType) {
|
|
6127
|
+
case "text": {
|
|
6128
|
+
const textItem = contentItem;
|
|
6129
|
+
if (!textItem.text || textItem.text.trim() === "") {
|
|
5880
6130
|
throw new Error(
|
|
5881
|
-
`User message text content at index ${index} cannot be empty or whitespace-only`
|
|
6131
|
+
`User message text content at index ${index} cannot be empty or whitespace-only, received: ${value(textItem.text)}`
|
|
5882
6132
|
);
|
|
5883
6133
|
}
|
|
5884
6134
|
break;
|
|
5885
|
-
|
|
5886
|
-
|
|
6135
|
+
}
|
|
6136
|
+
case "image": {
|
|
6137
|
+
const imageItem = contentItem;
|
|
6138
|
+
if (!imageItem.image || imageItem.image.trim() === "") {
|
|
5887
6139
|
throw new Error(
|
|
5888
|
-
`User message image content at index ${index} cannot be empty`
|
|
6140
|
+
`User message image content at index ${index} cannot be empty, received: ${value(imageItem.image)}`
|
|
5889
6141
|
);
|
|
5890
6142
|
}
|
|
5891
|
-
if (!
|
|
6143
|
+
if (!imageItem.mimeType || imageItem.mimeType.trim() === "") {
|
|
5892
6144
|
throw new Error(
|
|
5893
|
-
`User message image content at index ${index} must have a mimeType`
|
|
6145
|
+
`User message image content at index ${index} must have a mimeType, received: ${value(imageItem.mimeType)}`
|
|
5894
6146
|
);
|
|
5895
6147
|
}
|
|
5896
6148
|
break;
|
|
5897
|
-
|
|
5898
|
-
|
|
6149
|
+
}
|
|
6150
|
+
case "audio": {
|
|
6151
|
+
const audioItem = contentItem;
|
|
6152
|
+
if (!audioItem.data || audioItem.data.trim() === "") {
|
|
5899
6153
|
throw new Error(
|
|
5900
|
-
`User message audio content at index ${index} cannot be empty`
|
|
6154
|
+
`User message audio content at index ${index} cannot be empty, received: ${value(audioItem.data)}`
|
|
5901
6155
|
);
|
|
5902
6156
|
}
|
|
5903
6157
|
break;
|
|
6158
|
+
}
|
|
5904
6159
|
default:
|
|
5905
6160
|
throw new Error(
|
|
5906
|
-
`User message content item at index ${index} has unsupported type: ${
|
|
6161
|
+
`User message content item at index ${index} has unsupported type: ${value(contentType)}`
|
|
5907
6162
|
);
|
|
5908
6163
|
}
|
|
5909
6164
|
}
|
|
5910
6165
|
} else {
|
|
5911
6166
|
throw new Error(
|
|
5912
|
-
|
|
6167
|
+
`User message content must be a string or array of content objects, received: ${value(userItem.content)}`
|
|
5913
6168
|
);
|
|
5914
6169
|
}
|
|
5915
6170
|
break;
|
|
5916
|
-
|
|
5917
|
-
|
|
6171
|
+
}
|
|
6172
|
+
case "assistant": {
|
|
6173
|
+
const assistantItem = item;
|
|
6174
|
+
if (!assistantItem.content && !assistantItem.functionCalls) {
|
|
5918
6175
|
throw new Error(
|
|
5919
|
-
|
|
6176
|
+
`Assistant message must have either content or function calls, received content: ${value(assistantItem.content)}, functionCalls: ${value(assistantItem.functionCalls)}`
|
|
5920
6177
|
);
|
|
5921
6178
|
}
|
|
5922
|
-
if (
|
|
5923
|
-
throw new Error(
|
|
6179
|
+
if (assistantItem.content && typeof assistantItem.content !== "string") {
|
|
6180
|
+
throw new Error(
|
|
6181
|
+
`Assistant message content must be a string, received: ${value(assistantItem.content)}`
|
|
6182
|
+
);
|
|
5924
6183
|
}
|
|
5925
|
-
if (
|
|
5926
|
-
throw new Error(
|
|
6184
|
+
if (assistantItem.functionCalls && !Array.isArray(assistantItem.functionCalls)) {
|
|
6185
|
+
throw new Error(
|
|
6186
|
+
`Assistant message function calls must be an array, received: ${value(assistantItem.functionCalls)}`
|
|
6187
|
+
);
|
|
5927
6188
|
}
|
|
5928
6189
|
break;
|
|
5929
|
-
|
|
5930
|
-
|
|
5931
|
-
|
|
6190
|
+
}
|
|
6191
|
+
case "function": {
|
|
6192
|
+
const functionItem = item;
|
|
6193
|
+
if (!functionItem.functionId || functionItem.functionId.trim() === "") {
|
|
6194
|
+
throw new Error(
|
|
6195
|
+
`Function message must have a non-empty functionId, received: ${value(functionItem.functionId)}`
|
|
6196
|
+
);
|
|
5932
6197
|
}
|
|
5933
|
-
if (
|
|
5934
|
-
throw new Error(
|
|
6198
|
+
if (functionItem.result === void 0 || functionItem.result === null) {
|
|
6199
|
+
throw new Error(
|
|
6200
|
+
`Function message must have a result, received: ${value(functionItem.result)}`
|
|
6201
|
+
);
|
|
5935
6202
|
}
|
|
5936
|
-
if (typeof
|
|
5937
|
-
throw new Error(
|
|
6203
|
+
if (typeof functionItem.result !== "string") {
|
|
6204
|
+
throw new Error(
|
|
6205
|
+
`Function message result must be a string, received: ${value(functionItem.result)}`
|
|
6206
|
+
);
|
|
5938
6207
|
}
|
|
5939
6208
|
break;
|
|
6209
|
+
}
|
|
5940
6210
|
default:
|
|
5941
|
-
throw new Error(
|
|
5942
|
-
`Unsupported message role: ${item.role}`
|
|
5943
|
-
);
|
|
6211
|
+
throw new Error(`Unsupported message role: ${value(role)}`);
|
|
5944
6212
|
}
|
|
5945
6213
|
}
|
|
5946
6214
|
function axValidateChatResponseResult(results) {
|
|
6215
|
+
const value = (v) => JSON.stringify(v, null, 2);
|
|
5947
6216
|
const resultsArray = Array.isArray(results) ? results : [results];
|
|
5948
6217
|
if (resultsArray.length === 0) {
|
|
5949
|
-
throw new Error(
|
|
6218
|
+
throw new Error(
|
|
6219
|
+
`Chat response results cannot be empty, received: ${value(resultsArray)}`
|
|
6220
|
+
);
|
|
5950
6221
|
}
|
|
5951
6222
|
for (let arrayIndex = 0; arrayIndex < resultsArray.length; arrayIndex++) {
|
|
5952
6223
|
const result = resultsArray[arrayIndex];
|
|
5953
6224
|
if (!result) {
|
|
5954
6225
|
throw new Error(
|
|
5955
|
-
`Chat response result at index ${arrayIndex} cannot be null or undefined`
|
|
6226
|
+
`Chat response result at index ${arrayIndex} cannot be null or undefined, received: ${value(result)}`
|
|
5956
6227
|
);
|
|
5957
6228
|
}
|
|
5958
6229
|
if (typeof result.index !== "number") {
|
|
5959
6230
|
throw new Error(
|
|
5960
|
-
`Chat response result at index ${arrayIndex} must have a numeric index`
|
|
6231
|
+
`Chat response result at index ${arrayIndex} must have a numeric index, received: ${value(result.index)}`
|
|
5961
6232
|
);
|
|
5962
6233
|
}
|
|
5963
6234
|
if (result.index < 0) {
|
|
5964
6235
|
throw new Error(
|
|
5965
|
-
`Chat response result at index ${arrayIndex} must have a non-negative index`
|
|
6236
|
+
`Chat response result at index ${arrayIndex} must have a non-negative index, received: ${value(result.index)}`
|
|
5966
6237
|
);
|
|
5967
6238
|
}
|
|
5968
6239
|
if (!result.content && !result.thought && !result.functionCalls && !result.finishReason) {
|
|
5969
6240
|
throw new Error(
|
|
5970
|
-
`Chat response result at index ${arrayIndex} must have at least one of: content, thought, functionCalls, or finishReason`
|
|
6241
|
+
`Chat response result at index ${arrayIndex} must have at least one of: content, thought, functionCalls, or finishReason, received: ${value({ content: result.content, thought: result.thought, functionCalls: result.functionCalls, finishReason: result.finishReason })}`
|
|
5971
6242
|
);
|
|
5972
6243
|
}
|
|
5973
6244
|
if (result.content !== void 0 && typeof result.content !== "string") {
|
|
5974
6245
|
throw new Error(
|
|
5975
|
-
`Chat response result content at index ${arrayIndex} must be a string`
|
|
6246
|
+
`Chat response result content at index ${arrayIndex} must be a string, received: ${value(result.content)}`
|
|
5976
6247
|
);
|
|
5977
6248
|
}
|
|
5978
6249
|
if (result.thought !== void 0 && typeof result.thought !== "string") {
|
|
5979
6250
|
throw new Error(
|
|
5980
|
-
`Chat response result thought at index ${arrayIndex} must be a string`
|
|
6251
|
+
`Chat response result thought at index ${arrayIndex} must be a string, received: ${value(result.thought)}`
|
|
5981
6252
|
);
|
|
5982
6253
|
}
|
|
5983
6254
|
if (result.name !== void 0) {
|
|
5984
6255
|
if (typeof result.name !== "string") {
|
|
5985
6256
|
throw new Error(
|
|
5986
|
-
`Chat response result name at index ${arrayIndex} must be a string`
|
|
6257
|
+
`Chat response result name at index ${arrayIndex} must be a string, received: ${value(result.name)}`
|
|
5987
6258
|
);
|
|
5988
6259
|
}
|
|
5989
6260
|
if (result.name.trim() === "") {
|
|
5990
6261
|
throw new Error(
|
|
5991
|
-
`Chat response result name at index ${arrayIndex} cannot be empty or whitespace-only`
|
|
6262
|
+
`Chat response result name at index ${arrayIndex} cannot be empty or whitespace-only, received: ${value(result.name)}`
|
|
6263
|
+
);
|
|
6264
|
+
}
|
|
6265
|
+
}
|
|
6266
|
+
if (result.annotations !== void 0) {
|
|
6267
|
+
if (!Array.isArray(result.annotations)) {
|
|
6268
|
+
throw new Error(
|
|
6269
|
+
`Chat response result annotations at index ${arrayIndex} must be an array, received: ${value(result.annotations)}`
|
|
5992
6270
|
);
|
|
5993
6271
|
}
|
|
6272
|
+
for (let i = 0; i < result.annotations.length; i++) {
|
|
6273
|
+
const annotation = result.annotations[i];
|
|
6274
|
+
if (!annotation || typeof annotation !== "object") {
|
|
6275
|
+
throw new Error(
|
|
6276
|
+
`Chat response result annotation at index ${arrayIndex}[${i}] must be an object, received: ${value(annotation)}`
|
|
6277
|
+
);
|
|
6278
|
+
}
|
|
6279
|
+
if (annotation.type !== "url_citation") {
|
|
6280
|
+
throw new Error(
|
|
6281
|
+
`Chat response result annotation at index ${arrayIndex}[${i}] must have type 'url_citation', received: ${value(annotation.type)}`
|
|
6282
|
+
);
|
|
6283
|
+
}
|
|
6284
|
+
if (!annotation.url_citation || typeof annotation.url_citation !== "object") {
|
|
6285
|
+
throw new Error(
|
|
6286
|
+
`Chat response result annotation at index ${arrayIndex}[${i}] must have a valid url_citation object, received: ${value(annotation.url_citation)}`
|
|
6287
|
+
);
|
|
6288
|
+
}
|
|
6289
|
+
if (typeof annotation.url_citation.url !== "string") {
|
|
6290
|
+
throw new Error(
|
|
6291
|
+
`Chat response result annotation at index ${arrayIndex}[${i}] url_citation.url must be a string, received: ${value(annotation.url_citation.url)}`
|
|
6292
|
+
);
|
|
6293
|
+
}
|
|
6294
|
+
}
|
|
5994
6295
|
}
|
|
5995
6296
|
if (result.id !== void 0) {
|
|
5996
6297
|
if (typeof result.id !== "string") {
|
|
5997
6298
|
throw new Error(
|
|
5998
|
-
`Chat response result id at index ${arrayIndex} must be a string`
|
|
6299
|
+
`Chat response result id at index ${arrayIndex} must be a string, received: ${value(result.id)}`
|
|
5999
6300
|
);
|
|
6000
6301
|
}
|
|
6001
6302
|
if (result.id.trim() === "") {
|
|
6002
6303
|
throw new Error(
|
|
6003
|
-
`Chat response result id at index ${arrayIndex} cannot be empty or whitespace-only`
|
|
6304
|
+
`Chat response result id at index ${arrayIndex} cannot be empty or whitespace-only, received: ${value(result.id)}`
|
|
6004
6305
|
);
|
|
6005
6306
|
}
|
|
6006
6307
|
}
|
|
6007
6308
|
if (result.functionCalls !== void 0) {
|
|
6008
6309
|
if (!Array.isArray(result.functionCalls)) {
|
|
6009
6310
|
throw new Error(
|
|
6010
|
-
`Chat response result functionCalls at index ${arrayIndex} must be an array`
|
|
6311
|
+
`Chat response result functionCalls at index ${arrayIndex} must be an array, received: ${value(result.functionCalls)}`
|
|
6011
6312
|
);
|
|
6012
6313
|
}
|
|
6013
6314
|
for (let callIndex = 0; callIndex < result.functionCalls.length; callIndex++) {
|
|
6014
6315
|
const functionCall = result.functionCalls[callIndex];
|
|
6015
6316
|
if (!functionCall) {
|
|
6016
6317
|
throw new Error(
|
|
6017
|
-
`Function call at index ${callIndex} in result ${arrayIndex} cannot be null or undefined`
|
|
6318
|
+
`Function call at index ${callIndex} in result ${arrayIndex} cannot be null or undefined, received: ${value(functionCall)}`
|
|
6018
6319
|
);
|
|
6019
6320
|
}
|
|
6020
6321
|
if (!functionCall.id || typeof functionCall.id !== "string" || functionCall.id.trim() === "") {
|
|
6021
6322
|
throw new Error(
|
|
6022
|
-
`Function call at index ${callIndex} in result ${arrayIndex} must have a non-empty string id`
|
|
6323
|
+
`Function call at index ${callIndex} in result ${arrayIndex} must have a non-empty string id, received: ${value(functionCall.id)}`
|
|
6023
6324
|
);
|
|
6024
6325
|
}
|
|
6025
6326
|
if (functionCall.type !== "function") {
|
|
6026
6327
|
throw new Error(
|
|
6027
|
-
`Function call at index ${callIndex} in result ${arrayIndex} must have type 'function'`
|
|
6328
|
+
`Function call at index ${callIndex} in result ${arrayIndex} must have type 'function', received: ${value(functionCall.type)}`
|
|
6028
6329
|
);
|
|
6029
6330
|
}
|
|
6030
6331
|
if (!functionCall.function) {
|
|
6031
6332
|
throw new Error(
|
|
6032
|
-
`Function call at index ${callIndex} in result ${arrayIndex} must have a function object`
|
|
6333
|
+
`Function call at index ${callIndex} in result ${arrayIndex} must have a function object, received: ${value(functionCall.function)}`
|
|
6033
6334
|
);
|
|
6034
6335
|
}
|
|
6035
6336
|
if (!functionCall.function.name || typeof functionCall.function.name !== "string" || functionCall.function.name.trim() === "") {
|
|
6036
6337
|
throw new Error(
|
|
6037
|
-
`Function call at index ${callIndex} in result ${arrayIndex} must have a non-empty function name`
|
|
6338
|
+
`Function call at index ${callIndex} in result ${arrayIndex} must have a non-empty function name, received: ${value(functionCall.function.name)}`
|
|
6038
6339
|
);
|
|
6039
6340
|
}
|
|
6040
6341
|
if (functionCall.function.params !== void 0) {
|
|
6041
6342
|
if (typeof functionCall.function.params !== "string" && typeof functionCall.function.params !== "object") {
|
|
6042
6343
|
throw new Error(
|
|
6043
|
-
`Function call params at index ${callIndex} in result ${arrayIndex} must be a string or object`
|
|
6344
|
+
`Function call params at index ${callIndex} in result ${arrayIndex} must be a string or object, received: ${value(functionCall.function.params)}`
|
|
6044
6345
|
);
|
|
6045
6346
|
}
|
|
6046
6347
|
}
|
|
@@ -6056,7 +6357,7 @@ function axValidateChatResponseResult(results) {
|
|
|
6056
6357
|
];
|
|
6057
6358
|
if (!validFinishReasons.includes(result.finishReason)) {
|
|
6058
6359
|
throw new Error(
|
|
6059
|
-
`Chat response result finishReason at index ${arrayIndex} must be one of: ${validFinishReasons.join(", ")}`
|
|
6360
|
+
`Chat response result finishReason at index ${arrayIndex} must be one of: ${validFinishReasons.join(", ")}, received: ${value(result.finishReason)}`
|
|
6060
6361
|
);
|
|
6061
6362
|
}
|
|
6062
6363
|
}
|
|
@@ -8828,7 +9129,7 @@ async function* processStreamingResponse({
|
|
|
8828
9129
|
usage.push(v.modelUsage);
|
|
8829
9130
|
}
|
|
8830
9131
|
for (const result of v.results) {
|
|
8831
|
-
if (result.content === "" && (!result.functionCalls || result.functionCalls.length === 0)) {
|
|
9132
|
+
if (result.content === "" && (!result.thought || result.thought === "") && (!result.functionCalls || result.functionCalls.length === 0)) {
|
|
8832
9133
|
continue;
|
|
8833
9134
|
}
|
|
8834
9135
|
const state = states.find((s2) => s2.index === result.index);
|
|
@@ -9540,7 +9841,7 @@ var toFieldType = (type) => {
|
|
|
9540
9841
|
case "number":
|
|
9541
9842
|
return "number";
|
|
9542
9843
|
case "boolean":
|
|
9543
|
-
return "boolean";
|
|
9844
|
+
return "boolean (true or false)";
|
|
9544
9845
|
case "date":
|
|
9545
9846
|
return 'date ("YYYY-MM-DD" format)';
|
|
9546
9847
|
case "datetime":
|
|
@@ -12698,6 +12999,444 @@ var AxDockerSession = class {
|
|
|
12698
12999
|
}
|
|
12699
13000
|
};
|
|
12700
13001
|
|
|
13002
|
+
// flow/flow.ts
|
|
13003
|
+
var AxFlow = class extends AxProgramWithSignature {
|
|
13004
|
+
nodes = /* @__PURE__ */ new Map();
|
|
13005
|
+
flowDefinition = [];
|
|
13006
|
+
nodeGenerators = /* @__PURE__ */ new Map();
|
|
13007
|
+
loopStack = [];
|
|
13008
|
+
stepLabels = /* @__PURE__ */ new Map();
|
|
13009
|
+
branchContext = null;
|
|
13010
|
+
constructor(signature = "userInput:string -> flowOutput:string") {
|
|
13011
|
+
super(signature);
|
|
13012
|
+
}
|
|
13013
|
+
/**
|
|
13014
|
+
* Declares a reusable computational node and its input/output signature.
|
|
13015
|
+
* Returns a new AxFlow type that tracks this node in the TNodes registry.
|
|
13016
|
+
*
|
|
13017
|
+
* @param name - The name of the node
|
|
13018
|
+
* @param signature - Signature string in the same format as AxSignature
|
|
13019
|
+
* @param options - Optional program forward options (same as AxGen)
|
|
13020
|
+
* @returns New AxFlow instance with updated TNodes type
|
|
13021
|
+
*
|
|
13022
|
+
* @example
|
|
13023
|
+
* ```typescript
|
|
13024
|
+
* flow.node('summarizer', 'text:string -> summary:string')
|
|
13025
|
+
* flow.node('analyzer', 'text:string -> analysis:string, confidence:number', { debug: true })
|
|
13026
|
+
* ```
|
|
13027
|
+
*/
|
|
13028
|
+
node(name, signature, options) {
|
|
13029
|
+
if (!signature) {
|
|
13030
|
+
throw new Error(
|
|
13031
|
+
`Invalid signature for node '${name}': signature cannot be empty`
|
|
13032
|
+
);
|
|
13033
|
+
}
|
|
13034
|
+
this.nodes.set(name, {
|
|
13035
|
+
inputs: {},
|
|
13036
|
+
outputs: {}
|
|
13037
|
+
});
|
|
13038
|
+
this.nodeGenerators.set(name, new AxGen(signature, options));
|
|
13039
|
+
return this;
|
|
13040
|
+
}
|
|
13041
|
+
/**
|
|
13042
|
+
* Applies a synchronous transformation to the state object.
|
|
13043
|
+
* Returns a new AxFlow type with the evolved state.
|
|
13044
|
+
*
|
|
13045
|
+
* @param transform - Function that takes the current state and returns a new state
|
|
13046
|
+
* @returns New AxFlow instance with updated TState type
|
|
13047
|
+
*
|
|
13048
|
+
* @example
|
|
13049
|
+
* ```typescript
|
|
13050
|
+
* flow.map(state => ({ ...state, processedText: state.text.toLowerCase() }))
|
|
13051
|
+
* ```
|
|
13052
|
+
*/
|
|
13053
|
+
map(transform) {
|
|
13054
|
+
const step = (state) => {
|
|
13055
|
+
return transform(state);
|
|
13056
|
+
};
|
|
13057
|
+
if (this.branchContext?.currentBranchValue !== void 0) {
|
|
13058
|
+
const currentBranch = this.branchContext.branches.get(
|
|
13059
|
+
this.branchContext.currentBranchValue
|
|
13060
|
+
) || [];
|
|
13061
|
+
currentBranch.push(step);
|
|
13062
|
+
this.branchContext.branches.set(
|
|
13063
|
+
this.branchContext.currentBranchValue,
|
|
13064
|
+
currentBranch
|
|
13065
|
+
);
|
|
13066
|
+
} else {
|
|
13067
|
+
this.flowDefinition.push(step);
|
|
13068
|
+
}
|
|
13069
|
+
return this;
|
|
13070
|
+
}
|
|
13071
|
+
/**
|
|
13072
|
+
* Labels a step for later reference (useful for feedback loops).
|
|
13073
|
+
*
|
|
13074
|
+
* @param label - The label to assign to the current step position
|
|
13075
|
+
* @returns this (for chaining, no type change)
|
|
13076
|
+
*
|
|
13077
|
+
* @example
|
|
13078
|
+
* ```typescript
|
|
13079
|
+
* flow.label('retry-point')
|
|
13080
|
+
* .execute('queryGen', ...)
|
|
13081
|
+
* ```
|
|
13082
|
+
*/
|
|
13083
|
+
label(label) {
|
|
13084
|
+
if (this.branchContext?.currentBranchValue !== void 0) {
|
|
13085
|
+
throw new Error("Cannot create labels inside branch blocks");
|
|
13086
|
+
}
|
|
13087
|
+
this.stepLabels.set(label, this.flowDefinition.length);
|
|
13088
|
+
return this;
|
|
13089
|
+
}
|
|
13090
|
+
/**
|
|
13091
|
+
* Executes a previously defined node with full type safety.
|
|
13092
|
+
* The node name must exist in TNodes, and the mapping function is typed based on the node's signature.
|
|
13093
|
+
*
|
|
13094
|
+
* @param nodeName - The name of the node to execute (must exist in TNodes)
|
|
13095
|
+
* @param mapping - Typed function that takes the current state and returns the input for the node
|
|
13096
|
+
* @param dynamicContext - Optional object to override the AI service or options for this specific step
|
|
13097
|
+
* @returns New AxFlow instance with TState augmented with the node's result
|
|
13098
|
+
*
|
|
13099
|
+
* @example
|
|
13100
|
+
* ```typescript
|
|
13101
|
+
* flow.execute('summarizer', state => ({ text: state.originalText }), { ai: cheapAI })
|
|
13102
|
+
* ```
|
|
13103
|
+
*/
|
|
13104
|
+
execute(nodeName, mapping, dynamicContext) {
|
|
13105
|
+
if (!this.nodes.has(nodeName)) {
|
|
13106
|
+
throw new Error(
|
|
13107
|
+
`Node '${nodeName}' not found. Make sure to define it with .node() first.`
|
|
13108
|
+
);
|
|
13109
|
+
}
|
|
13110
|
+
const nodeGenerator = this.nodeGenerators.get(nodeName);
|
|
13111
|
+
if (!nodeGenerator) {
|
|
13112
|
+
throw new Error(`Node generator for '${nodeName}' not found.`);
|
|
13113
|
+
}
|
|
13114
|
+
const step = async (state, context3) => {
|
|
13115
|
+
const ai = dynamicContext?.ai ?? context3.mainAi;
|
|
13116
|
+
const options = dynamicContext?.options ?? context3.mainOptions;
|
|
13117
|
+
const nodeInputs = mapping(state);
|
|
13118
|
+
const result = await nodeGenerator.forward(ai, nodeInputs, options);
|
|
13119
|
+
return {
|
|
13120
|
+
...state,
|
|
13121
|
+
[`${nodeName}Result`]: result
|
|
13122
|
+
};
|
|
13123
|
+
};
|
|
13124
|
+
if (this.branchContext?.currentBranchValue !== void 0) {
|
|
13125
|
+
const currentBranch = this.branchContext.branches.get(
|
|
13126
|
+
this.branchContext.currentBranchValue
|
|
13127
|
+
) || [];
|
|
13128
|
+
currentBranch.push(step);
|
|
13129
|
+
this.branchContext.branches.set(
|
|
13130
|
+
this.branchContext.currentBranchValue,
|
|
13131
|
+
currentBranch
|
|
13132
|
+
);
|
|
13133
|
+
} else {
|
|
13134
|
+
this.flowDefinition.push(step);
|
|
13135
|
+
}
|
|
13136
|
+
return this;
|
|
13137
|
+
}
|
|
13138
|
+
/**
|
|
13139
|
+
* Starts a conditional branch based on a predicate function.
|
|
13140
|
+
*
|
|
13141
|
+
* @param predicate - Function that takes state and returns a value to branch on
|
|
13142
|
+
* @returns this (for chaining)
|
|
13143
|
+
*
|
|
13144
|
+
* @example
|
|
13145
|
+
* ```typescript
|
|
13146
|
+
* flow.branch(state => state.qualityResult.needsMoreInfo)
|
|
13147
|
+
* .when(true)
|
|
13148
|
+
* .execute('queryGen', ...)
|
|
13149
|
+
* .when(false)
|
|
13150
|
+
* .execute('answer', ...)
|
|
13151
|
+
* .merge()
|
|
13152
|
+
* ```
|
|
13153
|
+
*/
|
|
13154
|
+
branch(predicate) {
|
|
13155
|
+
if (this.branchContext) {
|
|
13156
|
+
throw new Error("Nested branches are not supported");
|
|
13157
|
+
}
|
|
13158
|
+
this.branchContext = {
|
|
13159
|
+
predicate: (state) => predicate(state),
|
|
13160
|
+
branches: /* @__PURE__ */ new Map(),
|
|
13161
|
+
currentBranchValue: void 0
|
|
13162
|
+
};
|
|
13163
|
+
return this;
|
|
13164
|
+
}
|
|
13165
|
+
/**
|
|
13166
|
+
* Defines a branch case for the current branch context.
|
|
13167
|
+
*
|
|
13168
|
+
* @param value - The value to match against the branch predicate result
|
|
13169
|
+
* @returns this (for chaining)
|
|
13170
|
+
*/
|
|
13171
|
+
when(value) {
|
|
13172
|
+
if (!this.branchContext) {
|
|
13173
|
+
throw new Error("when() called without matching branch()");
|
|
13174
|
+
}
|
|
13175
|
+
this.branchContext.currentBranchValue = value;
|
|
13176
|
+
this.branchContext.branches.set(value, []);
|
|
13177
|
+
return this;
|
|
13178
|
+
}
|
|
13179
|
+
/**
|
|
13180
|
+
* Ends the current branch and merges all branch paths back into the main flow.
|
|
13181
|
+
*
|
|
13182
|
+
* @returns this (for chaining)
|
|
13183
|
+
*/
|
|
13184
|
+
merge() {
|
|
13185
|
+
if (!this.branchContext) {
|
|
13186
|
+
throw new Error("merge() called without matching branch()");
|
|
13187
|
+
}
|
|
13188
|
+
const branchContext = this.branchContext;
|
|
13189
|
+
this.branchContext = null;
|
|
13190
|
+
this.flowDefinition.push(async (state, context3) => {
|
|
13191
|
+
const branchValue = branchContext.predicate(state);
|
|
13192
|
+
const branchSteps = branchContext.branches.get(branchValue);
|
|
13193
|
+
if (!branchSteps) {
|
|
13194
|
+
return state;
|
|
13195
|
+
}
|
|
13196
|
+
let currentState = state;
|
|
13197
|
+
for (const step of branchSteps) {
|
|
13198
|
+
currentState = await step(currentState, context3);
|
|
13199
|
+
}
|
|
13200
|
+
return currentState;
|
|
13201
|
+
});
|
|
13202
|
+
return this;
|
|
13203
|
+
}
|
|
13204
|
+
/**
|
|
13205
|
+
* Executes multiple operations in parallel and merges their results.
|
|
13206
|
+
* Both typed and legacy untyped branches are supported.
|
|
13207
|
+
*
|
|
13208
|
+
* @param branches - Array of functions that define parallel operations
|
|
13209
|
+
* @returns Object with merge method for combining results
|
|
13210
|
+
*
|
|
13211
|
+
* @example
|
|
13212
|
+
* ```typescript
|
|
13213
|
+
* flow.parallel([
|
|
13214
|
+
* subFlow => subFlow.execute('retrieve1', state => ({ query: state.query1 })),
|
|
13215
|
+
* subFlow => subFlow.execute('retrieve2', state => ({ query: state.query2 })),
|
|
13216
|
+
* subFlow => subFlow.execute('retrieve3', state => ({ query: state.query3 }))
|
|
13217
|
+
* ]).merge('documents', (docs1, docs2, docs3) => [...docs1, ...docs2, ...docs3])
|
|
13218
|
+
* ```
|
|
13219
|
+
*/
|
|
13220
|
+
parallel(branches) {
|
|
13221
|
+
const parallelStep = async (state, context3) => {
|
|
13222
|
+
const promises = branches.map(async (branchFn) => {
|
|
13223
|
+
const subContext = new AxFlowSubContextImpl(this.nodeGenerators);
|
|
13224
|
+
const populatedSubContext = branchFn(
|
|
13225
|
+
subContext
|
|
13226
|
+
);
|
|
13227
|
+
return await populatedSubContext.executeSteps(state, context3);
|
|
13228
|
+
});
|
|
13229
|
+
const results = await Promise.all(promises);
|
|
13230
|
+
return {
|
|
13231
|
+
...state,
|
|
13232
|
+
_parallelResults: results
|
|
13233
|
+
};
|
|
13234
|
+
};
|
|
13235
|
+
this.flowDefinition.push(parallelStep);
|
|
13236
|
+
return {
|
|
13237
|
+
merge: (resultKey, mergeFunction) => {
|
|
13238
|
+
this.flowDefinition.push((state) => {
|
|
13239
|
+
const results = state._parallelResults;
|
|
13240
|
+
if (!Array.isArray(results)) {
|
|
13241
|
+
throw new Error("No parallel results found for merge");
|
|
13242
|
+
}
|
|
13243
|
+
const mergedValue = mergeFunction(...results);
|
|
13244
|
+
const newState = { ...state };
|
|
13245
|
+
delete newState._parallelResults;
|
|
13246
|
+
newState[resultKey] = mergedValue;
|
|
13247
|
+
return newState;
|
|
13248
|
+
});
|
|
13249
|
+
return this;
|
|
13250
|
+
}
|
|
13251
|
+
};
|
|
13252
|
+
}
|
|
13253
|
+
/**
|
|
13254
|
+
* Creates a feedback loop that jumps back to a labeled step if a condition is met.
|
|
13255
|
+
*
|
|
13256
|
+
* @param condition - Function that returns true to trigger the feedback loop
|
|
13257
|
+
* @param targetLabel - The label to jump back to
|
|
13258
|
+
* @param maxIterations - Maximum number of iterations to prevent infinite loops (default: 10)
|
|
13259
|
+
* @returns this (for chaining)
|
|
13260
|
+
*
|
|
13261
|
+
* @example
|
|
13262
|
+
* ```typescript
|
|
13263
|
+
* flow.label('retry-point')
|
|
13264
|
+
* .execute('answer', ...)
|
|
13265
|
+
* .execute('qualityCheck', ...)
|
|
13266
|
+
* .feedback(state => state.qualityCheckResult.confidence < 0.7, 'retry-point')
|
|
13267
|
+
* ```
|
|
13268
|
+
*/
|
|
13269
|
+
feedback(condition, targetLabel, maxIterations = 10) {
|
|
13270
|
+
if (!this.stepLabels.has(targetLabel)) {
|
|
13271
|
+
throw new Error(
|
|
13272
|
+
`Label '${targetLabel}' not found. Make sure to define it with .label() before the feedback point.`
|
|
13273
|
+
);
|
|
13274
|
+
}
|
|
13275
|
+
const targetIndex = this.stepLabels.get(targetLabel);
|
|
13276
|
+
const feedbackStepIndex = this.flowDefinition.length;
|
|
13277
|
+
this.flowDefinition.push(async (state, context3) => {
|
|
13278
|
+
let currentState = state;
|
|
13279
|
+
let iterations = 1;
|
|
13280
|
+
const iterationKey = `_feedback_${targetLabel}_iterations`;
|
|
13281
|
+
if (typeof currentState[iterationKey] !== "number") {
|
|
13282
|
+
currentState = { ...currentState, [iterationKey]: 1 };
|
|
13283
|
+
}
|
|
13284
|
+
while (condition(currentState) && iterations < maxIterations) {
|
|
13285
|
+
iterations++;
|
|
13286
|
+
currentState = { ...currentState, [iterationKey]: iterations };
|
|
13287
|
+
for (let i = targetIndex; i < feedbackStepIndex; i++) {
|
|
13288
|
+
const step = this.flowDefinition[i];
|
|
13289
|
+
if (step) {
|
|
13290
|
+
currentState = await step(currentState, context3);
|
|
13291
|
+
}
|
|
13292
|
+
}
|
|
13293
|
+
}
|
|
13294
|
+
return currentState;
|
|
13295
|
+
});
|
|
13296
|
+
return this;
|
|
13297
|
+
}
|
|
13298
|
+
/**
|
|
13299
|
+
* Marks the beginning of a loop block.
|
|
13300
|
+
*
|
|
13301
|
+
* @param condition - Function that takes the current state and returns a boolean
|
|
13302
|
+
* @returns this (for chaining)
|
|
13303
|
+
*
|
|
13304
|
+
* @example
|
|
13305
|
+
* ```typescript
|
|
13306
|
+
* flow.while(state => state.iterations < 3)
|
|
13307
|
+
* .map(state => ({ ...state, iterations: (state.iterations || 0) + 1 }))
|
|
13308
|
+
* .endWhile()
|
|
13309
|
+
* ```
|
|
13310
|
+
*/
|
|
13311
|
+
while(condition) {
|
|
13312
|
+
const loopStartIndex = this.flowDefinition.length;
|
|
13313
|
+
this.loopStack.push(loopStartIndex);
|
|
13314
|
+
const placeholderStep = Object.assign(
|
|
13315
|
+
(state) => state,
|
|
13316
|
+
{
|
|
13317
|
+
_condition: condition,
|
|
13318
|
+
_isLoopStart: true
|
|
13319
|
+
}
|
|
13320
|
+
);
|
|
13321
|
+
this.flowDefinition.push(placeholderStep);
|
|
13322
|
+
return this;
|
|
13323
|
+
}
|
|
13324
|
+
/**
|
|
13325
|
+
* Marks the end of a loop block.
|
|
13326
|
+
*
|
|
13327
|
+
* @returns this (for chaining)
|
|
13328
|
+
*/
|
|
13329
|
+
endWhile() {
|
|
13330
|
+
if (this.loopStack.length === 0) {
|
|
13331
|
+
throw new Error("endWhile() called without matching while()");
|
|
13332
|
+
}
|
|
13333
|
+
const loopStartIndex = this.loopStack.pop();
|
|
13334
|
+
const placeholderStep = this.flowDefinition[loopStartIndex];
|
|
13335
|
+
if (!placeholderStep || !("_isLoopStart" in placeholderStep)) {
|
|
13336
|
+
throw new Error("Loop start step not found or invalid");
|
|
13337
|
+
}
|
|
13338
|
+
const condition = placeholderStep._condition;
|
|
13339
|
+
const loopBodySteps = this.flowDefinition.splice(loopStartIndex + 1);
|
|
13340
|
+
this.flowDefinition[loopStartIndex] = async (state, context3) => {
|
|
13341
|
+
let currentState = state;
|
|
13342
|
+
while (condition(currentState)) {
|
|
13343
|
+
for (const step of loopBodySteps) {
|
|
13344
|
+
currentState = await step(currentState, context3);
|
|
13345
|
+
}
|
|
13346
|
+
}
|
|
13347
|
+
return currentState;
|
|
13348
|
+
};
|
|
13349
|
+
return this;
|
|
13350
|
+
}
|
|
13351
|
+
/**
|
|
13352
|
+
* Executes the flow with the given AI service and input values.
|
|
13353
|
+
*
|
|
13354
|
+
* @param ai - The AI service to use as the default for all steps
|
|
13355
|
+
* @param values - The input values for the flow
|
|
13356
|
+
* @param options - Optional forward options to use as defaults
|
|
13357
|
+
* @returns Promise that resolves to the final output
|
|
13358
|
+
*/
|
|
13359
|
+
async forward(ai, values, options) {
|
|
13360
|
+
let state = { ...values };
|
|
13361
|
+
const context3 = {
|
|
13362
|
+
mainAi: ai,
|
|
13363
|
+
mainOptions: options
|
|
13364
|
+
};
|
|
13365
|
+
for (const step of this.flowDefinition) {
|
|
13366
|
+
state = await step(state, context3);
|
|
13367
|
+
}
|
|
13368
|
+
return state;
|
|
13369
|
+
}
|
|
13370
|
+
};
|
|
13371
|
+
var AxFlowSubContextImpl = class {
|
|
13372
|
+
constructor(nodeGenerators) {
|
|
13373
|
+
this.nodeGenerators = nodeGenerators;
|
|
13374
|
+
}
|
|
13375
|
+
steps = [];
|
|
13376
|
+
execute(nodeName, mapping, dynamicContext) {
|
|
13377
|
+
const nodeGenerator = this.nodeGenerators.get(nodeName);
|
|
13378
|
+
if (!nodeGenerator) {
|
|
13379
|
+
throw new Error(`Node generator for '${nodeName}' not found.`);
|
|
13380
|
+
}
|
|
13381
|
+
this.steps.push(async (state, context3) => {
|
|
13382
|
+
const ai = dynamicContext?.ai ?? context3.mainAi;
|
|
13383
|
+
const options = dynamicContext?.options ?? context3.mainOptions;
|
|
13384
|
+
const nodeInputs = mapping(state);
|
|
13385
|
+
const result = await nodeGenerator.forward(ai, nodeInputs, options);
|
|
13386
|
+
return {
|
|
13387
|
+
...state,
|
|
13388
|
+
[`${nodeName}Result`]: result
|
|
13389
|
+
};
|
|
13390
|
+
});
|
|
13391
|
+
return this;
|
|
13392
|
+
}
|
|
13393
|
+
map(transform) {
|
|
13394
|
+
this.steps.push((state) => transform(state));
|
|
13395
|
+
return this;
|
|
13396
|
+
}
|
|
13397
|
+
async executeSteps(initialState, context3) {
|
|
13398
|
+
let currentState = initialState;
|
|
13399
|
+
for (const step of this.steps) {
|
|
13400
|
+
currentState = await step(currentState, context3);
|
|
13401
|
+
}
|
|
13402
|
+
return currentState;
|
|
13403
|
+
}
|
|
13404
|
+
};
|
|
13405
|
+
var AxFlowTypedSubContextImpl = class {
|
|
13406
|
+
constructor(nodeGenerators) {
|
|
13407
|
+
this.nodeGenerators = nodeGenerators;
|
|
13408
|
+
}
|
|
13409
|
+
steps = [];
|
|
13410
|
+
execute(nodeName, mapping, dynamicContext) {
|
|
13411
|
+
const nodeGenerator = this.nodeGenerators.get(nodeName);
|
|
13412
|
+
if (!nodeGenerator) {
|
|
13413
|
+
throw new Error(`Node generator for '${nodeName}' not found.`);
|
|
13414
|
+
}
|
|
13415
|
+
this.steps.push(async (state, context3) => {
|
|
13416
|
+
const ai = dynamicContext?.ai ?? context3.mainAi;
|
|
13417
|
+
const options = dynamicContext?.options ?? context3.mainOptions;
|
|
13418
|
+
const nodeInputs = mapping(state);
|
|
13419
|
+
const result = await nodeGenerator.forward(ai, nodeInputs, options);
|
|
13420
|
+
return {
|
|
13421
|
+
...state,
|
|
13422
|
+
[`${nodeName}Result`]: result
|
|
13423
|
+
};
|
|
13424
|
+
});
|
|
13425
|
+
return this;
|
|
13426
|
+
}
|
|
13427
|
+
map(transform) {
|
|
13428
|
+
this.steps.push((state) => transform(state));
|
|
13429
|
+
return this;
|
|
13430
|
+
}
|
|
13431
|
+
async executeSteps(initialState, context3) {
|
|
13432
|
+
let currentState = initialState;
|
|
13433
|
+
for (const step of this.steps) {
|
|
13434
|
+
currentState = await step(currentState, context3);
|
|
13435
|
+
}
|
|
13436
|
+
return currentState;
|
|
13437
|
+
}
|
|
13438
|
+
};
|
|
13439
|
+
|
|
12701
13440
|
// dsp/loader.ts
|
|
12702
13441
|
var AxHFDataLoader = class {
|
|
12703
13442
|
rows = [];
|
|
@@ -16649,6 +17388,7 @@ export {
|
|
|
16649
17388
|
AxAIOpenAIResponsesBase,
|
|
16650
17389
|
AxAIOpenAIResponsesImpl,
|
|
16651
17390
|
AxAIOpenAIResponsesModel,
|
|
17391
|
+
AxAIRefusalError,
|
|
16652
17392
|
AxAIReka,
|
|
16653
17393
|
AxAIRekaModel,
|
|
16654
17394
|
AxAIServiceAbortedError,
|
|
@@ -16680,6 +17420,8 @@ export {
|
|
|
16680
17420
|
AxDockerSession,
|
|
16681
17421
|
AxEmbeddingAdapter,
|
|
16682
17422
|
AxEvalUtil,
|
|
17423
|
+
AxFlow,
|
|
17424
|
+
AxFlowTypedSubContextImpl,
|
|
16683
17425
|
AxFunctionError,
|
|
16684
17426
|
AxFunctionProcessor,
|
|
16685
17427
|
AxGen,
|
|
@@ -16757,6 +17499,7 @@ export {
|
|
|
16757
17499
|
axModelInfoHuggingFace,
|
|
16758
17500
|
axModelInfoMistral,
|
|
16759
17501
|
axModelInfoOpenAI,
|
|
17502
|
+
axModelInfoOpenAIResponses,
|
|
16760
17503
|
axModelInfoReka,
|
|
16761
17504
|
axModelInfoTogether,
|
|
16762
17505
|
axSpanAttributes,
|