@mastra/core 1.0.0-beta.3 → 1.0.0-beta.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +44 -0
- package/dist/agent/index.cjs +9 -9
- package/dist/agent/index.js +2 -2
- package/dist/agent/message-list/index.cjs +3 -3
- package/dist/agent/message-list/index.d.ts +5 -0
- package/dist/agent/message-list/index.d.ts.map +1 -1
- package/dist/agent/message-list/index.js +1 -1
- package/dist/{chunk-OWX2PUFH.cjs → chunk-2NVBZKZI.cjs} +161 -129
- package/dist/chunk-2NVBZKZI.cjs.map +1 -0
- package/dist/{chunk-OQF4H5Y2.js → chunk-2OTDXX73.js} +4 -4
- package/dist/{chunk-OQF4H5Y2.js.map → chunk-2OTDXX73.js.map} +1 -1
- package/dist/{chunk-4DWZ4Z6H.js → chunk-3RW5EMSB.js} +3 -4
- package/dist/chunk-3RW5EMSB.js.map +1 -0
- package/dist/{chunk-N4SJ4YX7.cjs → chunk-3W5RQCCY.cjs} +40 -24
- package/dist/chunk-3W5RQCCY.cjs.map +1 -0
- package/dist/{chunk-XRIVPHXV.cjs → chunk-4RXG622P.cjs} +5 -5
- package/dist/{chunk-XRIVPHXV.cjs.map → chunk-4RXG622P.cjs.map} +1 -1
- package/dist/{chunk-VZGBVYXA.cjs → chunk-5WXEYDFI.cjs} +22 -23
- package/dist/chunk-5WXEYDFI.cjs.map +1 -0
- package/dist/{chunk-EZVRSZMK.cjs → chunk-CYVNOIXS.cjs} +11 -11
- package/dist/{chunk-EZVRSZMK.cjs.map → chunk-CYVNOIXS.cjs.map} +1 -1
- package/dist/{chunk-MCUX2D5Q.js → chunk-FVNT7VTO.js} +29 -13
- package/dist/chunk-FVNT7VTO.js.map +1 -0
- package/dist/{chunk-T3WZCEC4.js → chunk-IHJDOC3A.js} +34 -587
- package/dist/chunk-IHJDOC3A.js.map +1 -0
- package/dist/{chunk-QUKUN6NR.cjs → chunk-ISMGVGUM.cjs} +105 -5
- package/dist/chunk-ISMGVGUM.cjs.map +1 -0
- package/dist/{chunk-4RSHBKDJ.cjs → chunk-IWB65P37.cjs} +5 -5
- package/dist/{chunk-4RSHBKDJ.cjs.map → chunk-IWB65P37.cjs.map} +1 -1
- package/dist/{chunk-5CWWU22H.js → chunk-IWQDBVJK.js} +3 -3
- package/dist/{chunk-5CWWU22H.js.map → chunk-IWQDBVJK.js.map} +1 -1
- package/dist/{chunk-KOSW5PP5.js → chunk-MDKPL2R2.js} +464 -124
- package/dist/chunk-MDKPL2R2.js.map +1 -0
- package/dist/{chunk-UIZSWUKP.js → chunk-NZAXAFI3.js} +104 -6
- package/dist/chunk-NZAXAFI3.js.map +1 -0
- package/dist/{chunk-GRGPQ32U.js → chunk-RXDJL5QT.js} +3 -3
- package/dist/{chunk-GRGPQ32U.js.map → chunk-RXDJL5QT.js.map} +1 -1
- package/dist/{chunk-YQ7NLZZ3.cjs → chunk-S6OEQHEI.cjs} +61 -614
- package/dist/chunk-S6OEQHEI.cjs.map +1 -0
- package/dist/{chunk-G3OOCXAI.js → chunk-U7VECK2G.js} +4 -4
- package/dist/{chunk-G3OOCXAI.js.map → chunk-U7VECK2G.js.map} +1 -1
- package/dist/{chunk-G36A2JRR.cjs → chunk-VSM3NLUX.cjs} +296 -199
- package/dist/chunk-VSM3NLUX.cjs.map +1 -0
- package/dist/{chunk-3VOUB4ZU.cjs → chunk-VZC4BWWH.cjs} +8 -8
- package/dist/{chunk-3VOUB4ZU.cjs.map → chunk-VZC4BWWH.cjs.map} +1 -1
- package/dist/{chunk-BAMR7HKO.js → chunk-W3DD3XP5.js} +296 -199
- package/dist/chunk-W3DD3XP5.js.map +1 -0
- package/dist/{chunk-JTXVR2RA.cjs → chunk-WQSGX6XA.cjs} +5 -5
- package/dist/{chunk-JTXVR2RA.cjs.map → chunk-WQSGX6XA.cjs.map} +1 -1
- package/dist/{chunk-VU6DVS7J.js → chunk-WTYNK7Q4.js} +4 -4
- package/dist/{chunk-VU6DVS7J.js.map → chunk-WTYNK7Q4.js.map} +1 -1
- package/dist/{chunk-CKGIPST2.js → chunk-XXBWX7DT.js} +138 -106
- package/dist/chunk-XXBWX7DT.js.map +1 -0
- package/dist/{chunk-ZPMFINU2.cjs → chunk-ZCVTH3CH.cjs} +464 -130
- package/dist/chunk-ZCVTH3CH.cjs.map +1 -0
- package/dist/evals/index.cjs +4 -4
- package/dist/evals/index.js +1 -1
- package/dist/evals/scoreTraces/index.cjs +3 -3
- package/dist/evals/scoreTraces/index.js +1 -1
- package/dist/index.cjs +2 -2
- package/dist/index.js +1 -1
- package/dist/llm/index.cjs +14 -14
- package/dist/llm/index.js +5 -5
- package/dist/llm/model/aisdk/v5/model.d.ts.map +1 -1
- package/dist/loop/index.cjs +2 -2
- package/dist/loop/index.js +1 -1
- package/dist/loop/test-utils/generateText.d.ts.map +1 -1
- package/dist/loop/test-utils/resultObject.d.ts.map +1 -1
- package/dist/loop/test-utils/toUIMessageStream.d.ts.map +1 -1
- package/dist/loop/test-utils/utils.d.ts.map +1 -1
- package/dist/loop/workflows/agentic-execution/llm-execution-step.d.ts.map +1 -1
- package/dist/loop/workflows/agentic-execution/llm-mapping-step.d.ts.map +1 -1
- package/dist/mastra/index.cjs +2 -2
- package/dist/mastra/index.js +1 -1
- package/dist/memory/index.cjs +6 -6
- package/dist/memory/index.js +3 -3
- package/dist/models-dev-6PRLJKVZ.js +3 -0
- package/dist/{models-dev-GCVENVWA.js.map → models-dev-6PRLJKVZ.js.map} +1 -1
- package/dist/models-dev-WHMI5G6Y.cjs +12 -0
- package/dist/{models-dev-TIBJR6IG.cjs.map → models-dev-WHMI5G6Y.cjs.map} +1 -1
- package/dist/netlify-46I3SNNV.cjs +12 -0
- package/dist/{netlify-NTSNNT6F.cjs.map → netlify-46I3SNNV.cjs.map} +1 -1
- package/dist/netlify-EBQ6YUC6.js +3 -0
- package/dist/{netlify-O5NJW7CF.js.map → netlify-EBQ6YUC6.js.map} +1 -1
- package/dist/processors/index.cjs +11 -11
- package/dist/processors/index.js +1 -1
- package/dist/provider-registry-HDG6UMUC.js +3 -0
- package/dist/{provider-registry-74GMFZKT.js.map → provider-registry-HDG6UMUC.js.map} +1 -1
- package/dist/provider-registry-RP2W4B24.cjs +40 -0
- package/dist/{provider-registry-BZP3DIIV.cjs.map → provider-registry-RP2W4B24.cjs.map} +1 -1
- package/dist/relevance/index.cjs +2 -2
- package/dist/relevance/index.js +1 -1
- package/dist/server/auth.d.ts +11 -0
- package/dist/server/auth.d.ts.map +1 -1
- package/dist/server/index.cjs +10 -0
- package/dist/server/index.cjs.map +1 -1
- package/dist/server/index.js +10 -0
- package/dist/server/index.js.map +1 -1
- package/dist/storage/index.cjs +29 -29
- package/dist/storage/index.js +1 -1
- package/dist/stream/aisdk/v5/compat/prepare-tools.d.ts.map +1 -1
- package/dist/stream/index.cjs +11 -11
- package/dist/stream/index.js +2 -2
- package/dist/test-utils/llm-mock.cjs +66 -29
- package/dist/test-utils/llm-mock.cjs.map +1 -1
- package/dist/test-utils/llm-mock.d.ts +4 -2
- package/dist/test-utils/llm-mock.d.ts.map +1 -1
- package/dist/test-utils/llm-mock.js +66 -29
- package/dist/test-utils/llm-mock.js.map +1 -1
- package/dist/voice/aisdk/index.d.ts +3 -0
- package/dist/voice/aisdk/index.d.ts.map +1 -0
- package/dist/voice/aisdk/speech.d.ts +23 -0
- package/dist/voice/aisdk/speech.d.ts.map +1 -0
- package/dist/voice/aisdk/transcription.d.ts +22 -0
- package/dist/voice/aisdk/transcription.d.ts.map +1 -0
- package/dist/voice/composite-voice.d.ts +4 -3
- package/dist/voice/composite-voice.d.ts.map +1 -1
- package/dist/voice/index.cjs +12 -4
- package/dist/voice/index.d.ts +1 -0
- package/dist/voice/index.d.ts.map +1 -1
- package/dist/voice/index.js +1 -1
- package/dist/workflows/evented/index.cjs +10 -10
- package/dist/workflows/evented/index.js +1 -1
- package/dist/workflows/index.cjs +19 -19
- package/dist/workflows/index.js +1 -1
- package/package.json +10 -10
- package/dist/chunk-4DWZ4Z6H.js.map +0 -1
- package/dist/chunk-BAMR7HKO.js.map +0 -1
- package/dist/chunk-CKGIPST2.js.map +0 -1
- package/dist/chunk-G36A2JRR.cjs.map +0 -1
- package/dist/chunk-KOSW5PP5.js.map +0 -1
- package/dist/chunk-MCUX2D5Q.js.map +0 -1
- package/dist/chunk-N4SJ4YX7.cjs.map +0 -1
- package/dist/chunk-OWX2PUFH.cjs.map +0 -1
- package/dist/chunk-QUKUN6NR.cjs.map +0 -1
- package/dist/chunk-T3WZCEC4.js.map +0 -1
- package/dist/chunk-UIZSWUKP.js.map +0 -1
- package/dist/chunk-VZGBVYXA.cjs.map +0 -1
- package/dist/chunk-YQ7NLZZ3.cjs.map +0 -1
- package/dist/chunk-ZPMFINU2.cjs.map +0 -1
- package/dist/models-dev-GCVENVWA.js +0 -3
- package/dist/models-dev-TIBJR6IG.cjs +0 -12
- package/dist/netlify-NTSNNT6F.cjs +0 -12
- package/dist/netlify-O5NJW7CF.js +0 -3
- package/dist/provider-registry-74GMFZKT.js +0 -3
- package/dist/provider-registry-BZP3DIIV.cjs +0 -40
|
@@ -520,24 +520,74 @@ function getRuntimeEnvironmentUserAgent(globalThisAny = globalThis) {
|
|
|
520
520
|
}
|
|
521
521
|
return "runtime/unknown";
|
|
522
522
|
}
|
|
523
|
-
function
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
523
|
+
function normalizeHeaders(headers) {
|
|
524
|
+
if (headers == null) {
|
|
525
|
+
return {};
|
|
526
|
+
}
|
|
527
|
+
const normalized = {};
|
|
528
|
+
if (headers instanceof Headers) {
|
|
529
|
+
headers.forEach((value, key) => {
|
|
530
|
+
normalized[key.toLowerCase()] = value;
|
|
531
|
+
});
|
|
532
|
+
} else {
|
|
533
|
+
if (!Array.isArray(headers)) {
|
|
534
|
+
headers = Object.entries(headers);
|
|
535
|
+
}
|
|
536
|
+
for (const [key, value] of headers) {
|
|
537
|
+
if (value != null) {
|
|
538
|
+
normalized[key.toLowerCase()] = value;
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
return normalized;
|
|
527
543
|
}
|
|
528
544
|
function withUserAgentSuffix(headers, ...userAgentSuffixParts) {
|
|
529
|
-
const
|
|
530
|
-
headers != null ? headers : {}
|
|
531
|
-
);
|
|
532
|
-
const normalizedHeaders = new Headers(cleanedHeaders);
|
|
545
|
+
const normalizedHeaders = new Headers(normalizeHeaders(headers));
|
|
533
546
|
const currentUserAgentHeader = normalizedHeaders.get("user-agent") || "";
|
|
534
547
|
normalizedHeaders.set(
|
|
535
548
|
"user-agent",
|
|
536
549
|
[currentUserAgentHeader, ...userAgentSuffixParts].filter(Boolean).join(" ")
|
|
537
550
|
);
|
|
538
|
-
return Object.fromEntries(normalizedHeaders);
|
|
551
|
+
return Object.fromEntries(normalizedHeaders.entries());
|
|
552
|
+
}
|
|
553
|
+
var VERSION = "3.0.17" ;
|
|
554
|
+
var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
|
|
555
|
+
var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
|
|
556
|
+
var DEFAULT_GENERIC_SUFFIX = "You MUST answer with JSON.";
|
|
557
|
+
function injectJsonInstruction({
|
|
558
|
+
prompt,
|
|
559
|
+
schema,
|
|
560
|
+
schemaPrefix = schema != null ? DEFAULT_SCHEMA_PREFIX : void 0,
|
|
561
|
+
schemaSuffix = schema != null ? DEFAULT_SCHEMA_SUFFIX : DEFAULT_GENERIC_SUFFIX
|
|
562
|
+
}) {
|
|
563
|
+
return [
|
|
564
|
+
prompt != null && prompt.length > 0 ? prompt : void 0,
|
|
565
|
+
prompt != null && prompt.length > 0 ? "" : void 0,
|
|
566
|
+
// add a newline if prompt is not null
|
|
567
|
+
schemaPrefix,
|
|
568
|
+
schema != null ? JSON.stringify(schema) : void 0,
|
|
569
|
+
schemaSuffix
|
|
570
|
+
].filter((line) => line != null).join("\n");
|
|
571
|
+
}
|
|
572
|
+
function injectJsonInstructionIntoMessages({
|
|
573
|
+
messages,
|
|
574
|
+
schema,
|
|
575
|
+
schemaPrefix,
|
|
576
|
+
schemaSuffix
|
|
577
|
+
}) {
|
|
578
|
+
var _a15, _b;
|
|
579
|
+
const systemMessage = ((_a15 = messages[0]) == null ? void 0 : _a15.role) === "system" ? { ...messages[0] } : { role: "system", content: "" };
|
|
580
|
+
systemMessage.content = injectJsonInstruction({
|
|
581
|
+
prompt: systemMessage.content,
|
|
582
|
+
schema,
|
|
583
|
+
schemaPrefix,
|
|
584
|
+
schemaSuffix
|
|
585
|
+
});
|
|
586
|
+
return [
|
|
587
|
+
systemMessage,
|
|
588
|
+
...((_b = messages[0]) == null ? void 0 : _b.role) === "system" ? messages.slice(1) : messages
|
|
589
|
+
];
|
|
539
590
|
}
|
|
540
|
-
var VERSION = "3.0.12" ;
|
|
541
591
|
function loadApiKey({
|
|
542
592
|
apiKey,
|
|
543
593
|
environmentVariableName,
|
|
@@ -633,7 +683,11 @@ function filter(obj) {
|
|
|
633
683
|
}
|
|
634
684
|
function secureJsonParse(text) {
|
|
635
685
|
const { stackTraceLimit } = Error;
|
|
636
|
-
|
|
686
|
+
try {
|
|
687
|
+
Error.stackTraceLimit = 0;
|
|
688
|
+
} catch (e) {
|
|
689
|
+
return _parse(text);
|
|
690
|
+
}
|
|
637
691
|
try {
|
|
638
692
|
return _parse(text);
|
|
639
693
|
} finally {
|
|
@@ -3558,7 +3612,7 @@ var OpenAICompatibleImageModel = class {
|
|
|
3558
3612
|
var openaiCompatibleImageResponseSchema = z4.z.object({
|
|
3559
3613
|
data: z4.z.array(z4.z.object({ b64_json: z4.z.string() }))
|
|
3560
3614
|
});
|
|
3561
|
-
var VERSION2 = "1.0.
|
|
3615
|
+
var VERSION2 = "1.0.27" ;
|
|
3562
3616
|
function createOpenAICompatible(options) {
|
|
3563
3617
|
const baseURL = withoutTrailingSlash(options.baseURL);
|
|
3564
3618
|
const providerName = options.name;
|
|
@@ -3611,7 +3665,7 @@ var MastraModelGateway = class {
|
|
|
3611
3665
|
return this.id;
|
|
3612
3666
|
}
|
|
3613
3667
|
};
|
|
3614
|
-
var VERSION3 = "2.0.
|
|
3668
|
+
var VERSION3 = "2.0.45" ;
|
|
3615
3669
|
var anthropicErrorDataSchema = lazySchema(
|
|
3616
3670
|
() => zodSchema(
|
|
3617
3671
|
z4.z.object({
|
|
@@ -3813,7 +3867,18 @@ var anthropicMessagesResponseSchema = lazySchema(
|
|
|
3813
3867
|
output_tokens: z4.z.number(),
|
|
3814
3868
|
cache_creation_input_tokens: z4.z.number().nullish(),
|
|
3815
3869
|
cache_read_input_tokens: z4.z.number().nullish()
|
|
3816
|
-
})
|
|
3870
|
+
}),
|
|
3871
|
+
container: z4.z.object({
|
|
3872
|
+
expires_at: z4.z.string(),
|
|
3873
|
+
id: z4.z.string(),
|
|
3874
|
+
skills: z4.z.array(
|
|
3875
|
+
z4.z.object({
|
|
3876
|
+
type: z4.z.union([z4.z.literal("anthropic"), z4.z.literal("custom")]),
|
|
3877
|
+
skill_id: z4.z.string(),
|
|
3878
|
+
version: z4.z.string()
|
|
3879
|
+
})
|
|
3880
|
+
).nullish()
|
|
3881
|
+
}).nullish()
|
|
3817
3882
|
})
|
|
3818
3883
|
)
|
|
3819
3884
|
);
|
|
@@ -4043,7 +4108,21 @@ var anthropicMessagesChunkSchema = lazySchema(
|
|
|
4043
4108
|
type: z4.z.literal("message_delta"),
|
|
4044
4109
|
delta: z4.z.object({
|
|
4045
4110
|
stop_reason: z4.z.string().nullish(),
|
|
4046
|
-
stop_sequence: z4.z.string().nullish()
|
|
4111
|
+
stop_sequence: z4.z.string().nullish(),
|
|
4112
|
+
container: z4.z.object({
|
|
4113
|
+
expires_at: z4.z.string(),
|
|
4114
|
+
id: z4.z.string(),
|
|
4115
|
+
skills: z4.z.array(
|
|
4116
|
+
z4.z.object({
|
|
4117
|
+
type: z4.z.union([
|
|
4118
|
+
z4.z.literal("anthropic"),
|
|
4119
|
+
z4.z.literal("custom")
|
|
4120
|
+
]),
|
|
4121
|
+
skill_id: z4.z.string(),
|
|
4122
|
+
version: z4.z.string()
|
|
4123
|
+
})
|
|
4124
|
+
).nullish()
|
|
4125
|
+
}).nullish()
|
|
4047
4126
|
}),
|
|
4048
4127
|
usage: z4.z.looseObject({
|
|
4049
4128
|
output_tokens: z4.z.number(),
|
|
@@ -5286,6 +5365,21 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5286
5365
|
setting: "seed"
|
|
5287
5366
|
});
|
|
5288
5367
|
}
|
|
5368
|
+
if (temperature != null && temperature > 1) {
|
|
5369
|
+
warnings.push({
|
|
5370
|
+
type: "unsupported-setting",
|
|
5371
|
+
setting: "temperature",
|
|
5372
|
+
details: `${temperature} exceeds anthropic maximum of 1.0. clamped to 1.0`
|
|
5373
|
+
});
|
|
5374
|
+
temperature = 1;
|
|
5375
|
+
} else if (temperature != null && temperature < 0) {
|
|
5376
|
+
warnings.push({
|
|
5377
|
+
type: "unsupported-setting",
|
|
5378
|
+
setting: "temperature",
|
|
5379
|
+
details: `${temperature} is below anthropic minimum of 0. clamped to 0`
|
|
5380
|
+
});
|
|
5381
|
+
temperature = 0;
|
|
5382
|
+
}
|
|
5289
5383
|
if ((responseFormat == null ? void 0 : responseFormat.type) === "json") {
|
|
5290
5384
|
if (responseFormat.schema == null) {
|
|
5291
5385
|
warnings.push({
|
|
@@ -5321,7 +5415,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5321
5415
|
});
|
|
5322
5416
|
const isThinking = ((_b = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _b.type) === "enabled";
|
|
5323
5417
|
const thinkingBudget = (_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.budgetTokens;
|
|
5324
|
-
const maxOutputTokensForModel = getMaxOutputTokensForModel(this.modelId);
|
|
5418
|
+
const { maxOutputTokens: maxOutputTokensForModel, knownModel } = getMaxOutputTokensForModel(this.modelId);
|
|
5325
5419
|
const maxTokens = maxOutputTokens != null ? maxOutputTokens : maxOutputTokensForModel;
|
|
5326
5420
|
const baseArgs = {
|
|
5327
5421
|
// model id:
|
|
@@ -5383,7 +5477,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5383
5477
|
}
|
|
5384
5478
|
baseArgs.max_tokens = maxTokens + thinkingBudget;
|
|
5385
5479
|
}
|
|
5386
|
-
if (baseArgs.max_tokens > maxOutputTokensForModel) {
|
|
5480
|
+
if (knownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
|
|
5387
5481
|
if (maxOutputTokens != null) {
|
|
5388
5482
|
warnings.push({
|
|
5389
5483
|
type: "unsupported-setting",
|
|
@@ -5478,7 +5572,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5478
5572
|
});
|
|
5479
5573
|
}
|
|
5480
5574
|
async doGenerate(options) {
|
|
5481
|
-
var _a15, _b, _c, _d, _e, _f;
|
|
5575
|
+
var _a15, _b, _c, _d, _e, _f, _g, _h;
|
|
5482
5576
|
const { args, warnings, betas, usesJsonResponseTool } = await this.getArgs(options);
|
|
5483
5577
|
const citationDocuments = this.extractCitationDocuments(options.prompt);
|
|
5484
5578
|
const {
|
|
@@ -5729,7 +5823,16 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5729
5823
|
anthropic: {
|
|
5730
5824
|
usage: response.usage,
|
|
5731
5825
|
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null,
|
|
5732
|
-
stopSequence: (_f = response.stop_sequence) != null ? _f : null
|
|
5826
|
+
stopSequence: (_f = response.stop_sequence) != null ? _f : null,
|
|
5827
|
+
container: response.container ? {
|
|
5828
|
+
expiresAt: response.container.expires_at,
|
|
5829
|
+
id: response.container.id,
|
|
5830
|
+
skills: (_h = (_g = response.container.skills) == null ? void 0 : _g.map((skill) => ({
|
|
5831
|
+
type: skill.type,
|
|
5832
|
+
skillId: skill.skill_id,
|
|
5833
|
+
version: skill.version
|
|
5834
|
+
}))) != null ? _h : null
|
|
5835
|
+
} : null
|
|
5733
5836
|
}
|
|
5734
5837
|
}
|
|
5735
5838
|
};
|
|
@@ -5759,6 +5862,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5759
5862
|
let rawUsage = void 0;
|
|
5760
5863
|
let cacheCreationInputTokens = null;
|
|
5761
5864
|
let stopSequence = null;
|
|
5865
|
+
let container = null;
|
|
5762
5866
|
let blockType = void 0;
|
|
5763
5867
|
const generateId3 = this.generateId;
|
|
5764
5868
|
return {
|
|
@@ -5768,7 +5872,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5768
5872
|
controller.enqueue({ type: "stream-start", warnings });
|
|
5769
5873
|
},
|
|
5770
5874
|
transform(chunk, controller) {
|
|
5771
|
-
var _a15, _b, _c, _d, _e, _f, _g, _h;
|
|
5875
|
+
var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
5772
5876
|
if (options.includeRawChunks) {
|
|
5773
5877
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
5774
5878
|
}
|
|
@@ -5881,7 +5985,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5881
5985
|
data: part.content.content.source.data
|
|
5882
5986
|
}
|
|
5883
5987
|
}
|
|
5884
|
-
}
|
|
5988
|
+
},
|
|
5989
|
+
providerExecuted: true
|
|
5885
5990
|
});
|
|
5886
5991
|
} else if (part.content.type === "web_fetch_tool_result_error") {
|
|
5887
5992
|
controller.enqueue({
|
|
@@ -6149,6 +6254,15 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
6149
6254
|
isJsonResponseFromTool: usesJsonResponseTool
|
|
6150
6255
|
});
|
|
6151
6256
|
stopSequence = (_h = value.delta.stop_sequence) != null ? _h : null;
|
|
6257
|
+
container = value.delta.container != null ? {
|
|
6258
|
+
expiresAt: value.delta.container.expires_at,
|
|
6259
|
+
id: value.delta.container.id,
|
|
6260
|
+
skills: (_j = (_i = value.delta.container.skills) == null ? void 0 : _i.map((skill) => ({
|
|
6261
|
+
type: skill.type,
|
|
6262
|
+
skillId: skill.skill_id,
|
|
6263
|
+
version: skill.version
|
|
6264
|
+
}))) != null ? _j : null
|
|
6265
|
+
} : null;
|
|
6152
6266
|
rawUsage = {
|
|
6153
6267
|
...rawUsage,
|
|
6154
6268
|
...value.usage
|
|
@@ -6164,7 +6278,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
6164
6278
|
anthropic: {
|
|
6165
6279
|
usage: rawUsage != null ? rawUsage : null,
|
|
6166
6280
|
cacheCreationInputTokens,
|
|
6167
|
-
stopSequence
|
|
6281
|
+
stopSequence,
|
|
6282
|
+
container
|
|
6168
6283
|
}
|
|
6169
6284
|
}
|
|
6170
6285
|
});
|
|
@@ -6189,13 +6304,15 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
6189
6304
|
};
|
|
6190
6305
|
function getMaxOutputTokensForModel(modelId) {
|
|
6191
6306
|
if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
|
|
6192
|
-
return 64e3;
|
|
6307
|
+
return { maxOutputTokens: 64e3, knownModel: true };
|
|
6193
6308
|
} else if (modelId.includes("claude-opus-4-")) {
|
|
6194
|
-
return 32e3;
|
|
6309
|
+
return { maxOutputTokens: 32e3, knownModel: true };
|
|
6195
6310
|
} else if (modelId.includes("claude-3-5-haiku")) {
|
|
6196
|
-
return 8192;
|
|
6311
|
+
return { maxOutputTokens: 8192, knownModel: true };
|
|
6312
|
+
} else if (modelId.includes("claude-3-haiku")) {
|
|
6313
|
+
return { maxOutputTokens: 4096, knownModel: true };
|
|
6197
6314
|
} else {
|
|
6198
|
-
return 4096;
|
|
6315
|
+
return { maxOutputTokens: 4096, knownModel: false };
|
|
6199
6316
|
}
|
|
6200
6317
|
}
|
|
6201
6318
|
var bash_20241022InputSchema = lazySchema(
|
|
@@ -6531,8 +6648,14 @@ var anthropicTools = {
|
|
|
6531
6648
|
webSearch_20250305
|
|
6532
6649
|
};
|
|
6533
6650
|
function createAnthropic(options = {}) {
|
|
6534
|
-
var _a15;
|
|
6535
|
-
const baseURL = (_a15 = withoutTrailingSlash(
|
|
6651
|
+
var _a15, _b;
|
|
6652
|
+
const baseURL = (_a15 = withoutTrailingSlash(
|
|
6653
|
+
loadOptionalSetting({
|
|
6654
|
+
settingValue: options.baseURL,
|
|
6655
|
+
environmentVariableName: "ANTHROPIC_BASE_URL"
|
|
6656
|
+
})
|
|
6657
|
+
)) != null ? _a15 : "https://api.anthropic.com/v1";
|
|
6658
|
+
const providerName = (_b = options.name) != null ? _b : "anthropic.messages";
|
|
6536
6659
|
const getHeaders = () => withUserAgentSuffix(
|
|
6537
6660
|
{
|
|
6538
6661
|
"anthropic-version": "2023-06-01",
|
|
@@ -6548,7 +6671,7 @@ function createAnthropic(options = {}) {
|
|
|
6548
6671
|
const createChatModel = (modelId) => {
|
|
6549
6672
|
var _a22;
|
|
6550
6673
|
return new AnthropicMessagesLanguageModel(modelId, {
|
|
6551
|
-
provider:
|
|
6674
|
+
provider: providerName,
|
|
6552
6675
|
baseURL,
|
|
6553
6676
|
headers: getHeaders,
|
|
6554
6677
|
fetch: options.fetch,
|
|
@@ -6579,7 +6702,7 @@ function createAnthropic(options = {}) {
|
|
|
6579
6702
|
return provider;
|
|
6580
6703
|
}
|
|
6581
6704
|
createAnthropic();
|
|
6582
|
-
var VERSION4 = "2.0.
|
|
6705
|
+
var VERSION4 = "2.0.39" ;
|
|
6583
6706
|
var googleErrorDataSchema = lazySchema(
|
|
6584
6707
|
() => zodSchema(
|
|
6585
6708
|
z4.z.object({
|
|
@@ -6879,19 +7002,20 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
|
|
6879
7002
|
contents.push({
|
|
6880
7003
|
role: "model",
|
|
6881
7004
|
parts: content.map((part) => {
|
|
6882
|
-
var _a22, _b, _c
|
|
7005
|
+
var _a22, _b, _c;
|
|
7006
|
+
const thoughtSignature = ((_b = (_a22 = part.providerOptions) == null ? void 0 : _a22.google) == null ? void 0 : _b.thoughtSignature) != null ? String((_c = part.providerOptions.google) == null ? void 0 : _c.thoughtSignature) : void 0;
|
|
6883
7007
|
switch (part.type) {
|
|
6884
7008
|
case "text": {
|
|
6885
7009
|
return part.text.length === 0 ? void 0 : {
|
|
6886
7010
|
text: part.text,
|
|
6887
|
-
thoughtSignature
|
|
7011
|
+
thoughtSignature
|
|
6888
7012
|
};
|
|
6889
7013
|
}
|
|
6890
7014
|
case "reasoning": {
|
|
6891
7015
|
return part.text.length === 0 ? void 0 : {
|
|
6892
7016
|
text: part.text,
|
|
6893
7017
|
thought: true,
|
|
6894
|
-
thoughtSignature
|
|
7018
|
+
thoughtSignature
|
|
6895
7019
|
};
|
|
6896
7020
|
}
|
|
6897
7021
|
case "file": {
|
|
@@ -6918,7 +7042,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
|
|
6918
7042
|
name: part.toolName,
|
|
6919
7043
|
args: part.input
|
|
6920
7044
|
},
|
|
6921
|
-
thoughtSignature
|
|
7045
|
+
thoughtSignature
|
|
6922
7046
|
};
|
|
6923
7047
|
}
|
|
6924
7048
|
}
|
|
@@ -7001,7 +7125,9 @@ var googleGenerativeAIProviderOptions = lazySchema(
|
|
|
7001
7125
|
responseModalities: z4.z.array(z4.z.enum(["TEXT", "IMAGE"])).optional(),
|
|
7002
7126
|
thinkingConfig: z4.z.object({
|
|
7003
7127
|
thinkingBudget: z4.z.number().optional(),
|
|
7004
|
-
includeThoughts: z4.z.boolean().optional()
|
|
7128
|
+
includeThoughts: z4.z.boolean().optional(),
|
|
7129
|
+
// https://ai.google.dev/gemini-api/docs/gemini-3?thinking=high#thinking_level
|
|
7130
|
+
thinkingLevel: z4.z.enum(["low", "medium", "high"]).optional()
|
|
7005
7131
|
}).optional(),
|
|
7006
7132
|
/**
|
|
7007
7133
|
* Optional.
|
|
@@ -7102,8 +7228,14 @@ function prepareTools3({
|
|
|
7102
7228
|
var _a15;
|
|
7103
7229
|
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
7104
7230
|
const toolWarnings = [];
|
|
7105
|
-
const
|
|
7231
|
+
const isLatest = [
|
|
7232
|
+
"gemini-flash-latest",
|
|
7233
|
+
"gemini-flash-lite-latest",
|
|
7234
|
+
"gemini-pro-latest"
|
|
7235
|
+
].some((id) => id === modelId);
|
|
7236
|
+
const isGemini2orNewer = modelId.includes("gemini-2") || modelId.includes("gemini-3") || isLatest;
|
|
7106
7237
|
const supportsDynamicRetrieval = modelId.includes("gemini-1.5-flash") && !modelId.includes("-8b");
|
|
7238
|
+
const supportsFileSearch = modelId.includes("gemini-2.5");
|
|
7107
7239
|
if (tools == null) {
|
|
7108
7240
|
return { tools: void 0, toolConfig: void 0, toolWarnings };
|
|
7109
7241
|
}
|
|
@@ -7112,10 +7244,11 @@ function prepareTools3({
|
|
|
7112
7244
|
(tool2) => tool2.type === "provider-defined"
|
|
7113
7245
|
);
|
|
7114
7246
|
if (hasFunctionTools && hasProviderDefinedTools) {
|
|
7247
|
+
const functionTools = tools.filter((tool2) => tool2.type === "function");
|
|
7115
7248
|
toolWarnings.push({
|
|
7116
7249
|
type: "unsupported-tool",
|
|
7117
7250
|
tool: tools.find((tool2) => tool2.type === "function"),
|
|
7118
|
-
details:
|
|
7251
|
+
details: `Cannot mix function tools with provider-defined tools in the same request. Falling back to provider-defined tools only. The following function tools will be ignored: ${functionTools.map((t) => t.name).join(", ")}. Please use either function tools or provider-defined tools, but not both.`
|
|
7119
7252
|
});
|
|
7120
7253
|
}
|
|
7121
7254
|
if (hasProviderDefinedTools) {
|
|
@@ -7126,7 +7259,7 @@ function prepareTools3({
|
|
|
7126
7259
|
providerDefinedTools.forEach((tool2) => {
|
|
7127
7260
|
switch (tool2.id) {
|
|
7128
7261
|
case "google.google_search":
|
|
7129
|
-
if (
|
|
7262
|
+
if (isGemini2orNewer) {
|
|
7130
7263
|
googleTools2.push({ googleSearch: {} });
|
|
7131
7264
|
} else if (supportsDynamicRetrieval) {
|
|
7132
7265
|
googleTools2.push({
|
|
@@ -7142,7 +7275,7 @@ function prepareTools3({
|
|
|
7142
7275
|
}
|
|
7143
7276
|
break;
|
|
7144
7277
|
case "google.url_context":
|
|
7145
|
-
if (
|
|
7278
|
+
if (isGemini2orNewer) {
|
|
7146
7279
|
googleTools2.push({ urlContext: {} });
|
|
7147
7280
|
} else {
|
|
7148
7281
|
toolWarnings.push({
|
|
@@ -7153,7 +7286,7 @@ function prepareTools3({
|
|
|
7153
7286
|
}
|
|
7154
7287
|
break;
|
|
7155
7288
|
case "google.code_execution":
|
|
7156
|
-
if (
|
|
7289
|
+
if (isGemini2orNewer) {
|
|
7157
7290
|
googleTools2.push({ codeExecution: {} });
|
|
7158
7291
|
} else {
|
|
7159
7292
|
toolWarnings.push({
|
|
@@ -7163,6 +7296,37 @@ function prepareTools3({
|
|
|
7163
7296
|
});
|
|
7164
7297
|
}
|
|
7165
7298
|
break;
|
|
7299
|
+
case "google.file_search":
|
|
7300
|
+
if (supportsFileSearch) {
|
|
7301
|
+
googleTools2.push({ fileSearch: { ...tool2.args } });
|
|
7302
|
+
} else {
|
|
7303
|
+
toolWarnings.push({
|
|
7304
|
+
type: "unsupported-tool",
|
|
7305
|
+
tool: tool2,
|
|
7306
|
+
details: "The file search tool is only supported with Gemini 2.5 models."
|
|
7307
|
+
});
|
|
7308
|
+
}
|
|
7309
|
+
break;
|
|
7310
|
+
case "google.vertex_rag_store":
|
|
7311
|
+
if (isGemini2orNewer) {
|
|
7312
|
+
googleTools2.push({
|
|
7313
|
+
retrieval: {
|
|
7314
|
+
vertex_rag_store: {
|
|
7315
|
+
rag_resources: {
|
|
7316
|
+
rag_corpus: tool2.args.ragCorpus
|
|
7317
|
+
},
|
|
7318
|
+
similarity_top_k: tool2.args.topK
|
|
7319
|
+
}
|
|
7320
|
+
}
|
|
7321
|
+
});
|
|
7322
|
+
} else {
|
|
7323
|
+
toolWarnings.push({
|
|
7324
|
+
type: "unsupported-tool",
|
|
7325
|
+
tool: tool2,
|
|
7326
|
+
details: "The RAG store tool is not supported with other Gemini models than Gemini 2."
|
|
7327
|
+
});
|
|
7328
|
+
}
|
|
7329
|
+
break;
|
|
7166
7330
|
default:
|
|
7167
7331
|
toolWarnings.push({ type: "unsupported-tool", tool: tool2 });
|
|
7168
7332
|
break;
|
|
@@ -7290,17 +7454,19 @@ var GoogleGenerativeAILanguageModel = class {
|
|
|
7290
7454
|
toolChoice,
|
|
7291
7455
|
providerOptions
|
|
7292
7456
|
}) {
|
|
7293
|
-
var _a15
|
|
7457
|
+
var _a15;
|
|
7294
7458
|
const warnings = [];
|
|
7295
7459
|
const googleOptions = await parseProviderOptions({
|
|
7296
7460
|
provider: "google",
|
|
7297
7461
|
providerOptions,
|
|
7298
7462
|
schema: googleGenerativeAIProviderOptions
|
|
7299
7463
|
});
|
|
7300
|
-
if ((
|
|
7464
|
+
if ((tools == null ? void 0 : tools.some(
|
|
7465
|
+
(tool2) => tool2.type === "provider-defined" && tool2.id === "google.vertex_rag_store"
|
|
7466
|
+
)) && !this.config.provider.startsWith("google.vertex.")) {
|
|
7301
7467
|
warnings.push({
|
|
7302
7468
|
type: "other",
|
|
7303
|
-
message: `The '
|
|
7469
|
+
message: `The 'vertex_rag_store' tool is only supported with the Google Vertex provider and might not be supported or could behave unexpectedly with the current Google provider (${this.config.provider}).`
|
|
7304
7470
|
});
|
|
7305
7471
|
}
|
|
7306
7472
|
const isGemmaModel = this.modelId.toLowerCase().startsWith("gemma-");
|
|
@@ -7334,7 +7500,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
|
7334
7500
|
responseSchema: (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && // Google GenAI does not support all OpenAPI Schema features,
|
|
7335
7501
|
// so this is needed as an escape hatch:
|
|
7336
7502
|
// TODO convert into provider option
|
|
7337
|
-
((
|
|
7503
|
+
((_a15 = googleOptions == null ? void 0 : googleOptions.structuredOutputs) != null ? _a15 : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
|
|
7338
7504
|
...(googleOptions == null ? void 0 : googleOptions.audioTimestamp) && {
|
|
7339
7505
|
audioTimestamp: googleOptions.audioTimestamp
|
|
7340
7506
|
},
|
|
@@ -7746,16 +7912,64 @@ function extractSources({
|
|
|
7746
7912
|
groundingMetadata,
|
|
7747
7913
|
generateId: generateId3
|
|
7748
7914
|
}) {
|
|
7749
|
-
var _a15;
|
|
7750
|
-
|
|
7751
|
-
|
|
7752
|
-
|
|
7753
|
-
|
|
7754
|
-
|
|
7755
|
-
|
|
7756
|
-
|
|
7757
|
-
|
|
7758
|
-
|
|
7915
|
+
var _a15, _b, _c;
|
|
7916
|
+
if (!(groundingMetadata == null ? void 0 : groundingMetadata.groundingChunks)) {
|
|
7917
|
+
return void 0;
|
|
7918
|
+
}
|
|
7919
|
+
const sources = [];
|
|
7920
|
+
for (const chunk of groundingMetadata.groundingChunks) {
|
|
7921
|
+
if (chunk.web != null) {
|
|
7922
|
+
sources.push({
|
|
7923
|
+
type: "source",
|
|
7924
|
+
sourceType: "url",
|
|
7925
|
+
id: generateId3(),
|
|
7926
|
+
url: chunk.web.uri,
|
|
7927
|
+
title: (_a15 = chunk.web.title) != null ? _a15 : void 0
|
|
7928
|
+
});
|
|
7929
|
+
} else if (chunk.retrievedContext != null) {
|
|
7930
|
+
const uri = chunk.retrievedContext.uri;
|
|
7931
|
+
if (uri.startsWith("http://") || uri.startsWith("https://")) {
|
|
7932
|
+
sources.push({
|
|
7933
|
+
type: "source",
|
|
7934
|
+
sourceType: "url",
|
|
7935
|
+
id: generateId3(),
|
|
7936
|
+
url: uri,
|
|
7937
|
+
title: (_b = chunk.retrievedContext.title) != null ? _b : void 0
|
|
7938
|
+
});
|
|
7939
|
+
} else {
|
|
7940
|
+
const title = (_c = chunk.retrievedContext.title) != null ? _c : "Unknown Document";
|
|
7941
|
+
let mediaType = "application/octet-stream";
|
|
7942
|
+
let filename = void 0;
|
|
7943
|
+
if (uri.endsWith(".pdf")) {
|
|
7944
|
+
mediaType = "application/pdf";
|
|
7945
|
+
filename = uri.split("/").pop();
|
|
7946
|
+
} else if (uri.endsWith(".txt")) {
|
|
7947
|
+
mediaType = "text/plain";
|
|
7948
|
+
filename = uri.split("/").pop();
|
|
7949
|
+
} else if (uri.endsWith(".docx")) {
|
|
7950
|
+
mediaType = "application/vnd.openxmlformats-officedocument.wordprocessingml.document";
|
|
7951
|
+
filename = uri.split("/").pop();
|
|
7952
|
+
} else if (uri.endsWith(".doc")) {
|
|
7953
|
+
mediaType = "application/msword";
|
|
7954
|
+
filename = uri.split("/").pop();
|
|
7955
|
+
} else if (uri.match(/\.(md|markdown)$/)) {
|
|
7956
|
+
mediaType = "text/markdown";
|
|
7957
|
+
filename = uri.split("/").pop();
|
|
7958
|
+
} else {
|
|
7959
|
+
filename = uri.split("/").pop();
|
|
7960
|
+
}
|
|
7961
|
+
sources.push({
|
|
7962
|
+
type: "source",
|
|
7963
|
+
sourceType: "document",
|
|
7964
|
+
id: generateId3(),
|
|
7965
|
+
mediaType,
|
|
7966
|
+
title,
|
|
7967
|
+
filename
|
|
7968
|
+
});
|
|
7969
|
+
}
|
|
7970
|
+
}
|
|
7971
|
+
}
|
|
7972
|
+
return sources.length > 0 ? sources : void 0;
|
|
7759
7973
|
}
|
|
7760
7974
|
var getGroundingMetadataSchema = () => z4.z.object({
|
|
7761
7975
|
webSearchQueries: z4.z.array(z4.z.string()).nullish(),
|
|
@@ -7763,8 +7977,12 @@ var getGroundingMetadataSchema = () => z4.z.object({
|
|
|
7763
7977
|
searchEntryPoint: z4.z.object({ renderedContent: z4.z.string() }).nullish(),
|
|
7764
7978
|
groundingChunks: z4.z.array(
|
|
7765
7979
|
z4.z.object({
|
|
7766
|
-
web: z4.z.object({ uri: z4.z.string(), title: z4.z.string() }).nullish(),
|
|
7767
|
-
retrievedContext: z4.z.object({
|
|
7980
|
+
web: z4.z.object({ uri: z4.z.string(), title: z4.z.string().nullish() }).nullish(),
|
|
7981
|
+
retrievedContext: z4.z.object({
|
|
7982
|
+
uri: z4.z.string(),
|
|
7983
|
+
title: z4.z.string().nullish(),
|
|
7984
|
+
text: z4.z.string().nullish()
|
|
7985
|
+
}).nullish()
|
|
7768
7986
|
})
|
|
7769
7987
|
).nullish(),
|
|
7770
7988
|
groundingSupports: z4.z.array(
|
|
@@ -7834,7 +8052,9 @@ var usageSchema2 = z4.z.object({
|
|
|
7834
8052
|
thoughtsTokenCount: z4.z.number().nullish(),
|
|
7835
8053
|
promptTokenCount: z4.z.number().nullish(),
|
|
7836
8054
|
candidatesTokenCount: z4.z.number().nullish(),
|
|
7837
|
-
totalTokenCount: z4.z.number().nullish()
|
|
8055
|
+
totalTokenCount: z4.z.number().nullish(),
|
|
8056
|
+
// https://cloud.google.com/vertex-ai/generative-ai/docs/reference/rest/v1/GenerateContentResponse#TrafficType
|
|
8057
|
+
trafficType: z4.z.string().nullish()
|
|
7838
8058
|
});
|
|
7839
8059
|
var getUrlContextMetadataSchema = () => z4.z.object({
|
|
7840
8060
|
urlMetadata: z4.z.array(
|
|
@@ -7896,6 +8116,30 @@ var codeExecution = createProviderDefinedToolFactoryWithOutputSchema({
|
|
|
7896
8116
|
output: z4.z.string().describe("The output from the code execution.")
|
|
7897
8117
|
})
|
|
7898
8118
|
});
|
|
8119
|
+
var fileSearchArgsBaseSchema = z4.z.object({
|
|
8120
|
+
/** The names of the file_search_stores to retrieve from.
|
|
8121
|
+
* Example: `fileSearchStores/my-file-search-store-123`
|
|
8122
|
+
*/
|
|
8123
|
+
fileSearchStoreNames: z4.z.array(z4.z.string()).describe(
|
|
8124
|
+
"The names of the file_search_stores to retrieve from. Example: `fileSearchStores/my-file-search-store-123`"
|
|
8125
|
+
),
|
|
8126
|
+
/** The number of file search retrieval chunks to retrieve. */
|
|
8127
|
+
topK: z4.z.number().int().positive().describe("The number of file search retrieval chunks to retrieve.").optional(),
|
|
8128
|
+
/** Metadata filter to apply to the file search retrieval documents.
|
|
8129
|
+
* See https://google.aip.dev/160 for the syntax of the filter expression.
|
|
8130
|
+
*/
|
|
8131
|
+
metadataFilter: z4.z.string().describe(
|
|
8132
|
+
"Metadata filter to apply to the file search retrieval documents. See https://google.aip.dev/160 for the syntax of the filter expression."
|
|
8133
|
+
).optional()
|
|
8134
|
+
}).passthrough();
|
|
8135
|
+
var fileSearchArgsSchema = lazySchema(
|
|
8136
|
+
() => zodSchema(fileSearchArgsBaseSchema)
|
|
8137
|
+
);
|
|
8138
|
+
var fileSearch = createProviderDefinedToolFactory({
|
|
8139
|
+
id: "google.file_search",
|
|
8140
|
+
name: "file_search",
|
|
8141
|
+
inputSchema: fileSearchArgsSchema
|
|
8142
|
+
});
|
|
7899
8143
|
var googleSearch = createProviderDefinedToolFactory({
|
|
7900
8144
|
id: "google.google_search",
|
|
7901
8145
|
name: "google_search",
|
|
@@ -7913,6 +8157,14 @@ var urlContext = createProviderDefinedToolFactory({
|
|
|
7913
8157
|
name: "url_context",
|
|
7914
8158
|
inputSchema: lazySchema(() => zodSchema(z4.z.object({})))
|
|
7915
8159
|
});
|
|
8160
|
+
var vertexRagStore = createProviderDefinedToolFactory({
|
|
8161
|
+
id: "google.vertex_rag_store",
|
|
8162
|
+
name: "vertex_rag_store",
|
|
8163
|
+
inputSchema: z4.z.object({
|
|
8164
|
+
ragCorpus: z4.z.string(),
|
|
8165
|
+
topK: z4.z.number().optional()
|
|
8166
|
+
})
|
|
8167
|
+
});
|
|
7916
8168
|
var googleTools = {
|
|
7917
8169
|
/**
|
|
7918
8170
|
* Creates a Google search tool that gives Google direct access to real-time web content.
|
|
@@ -7924,6 +8176,17 @@ var googleTools = {
|
|
|
7924
8176
|
* Must have name "url_context".
|
|
7925
8177
|
*/
|
|
7926
8178
|
urlContext,
|
|
8179
|
+
/**
|
|
8180
|
+
* Enables Retrieval Augmented Generation (RAG) via the Gemini File Search tool.
|
|
8181
|
+
* Must have name "file_search".
|
|
8182
|
+
*
|
|
8183
|
+
* @param fileSearchStoreNames - Fully-qualified File Search store resource names.
|
|
8184
|
+
* @param metadataFilter - Optional filter expression to restrict the files that can be retrieved.
|
|
8185
|
+
* @param topK - Optional result limit for the number of chunks returned from File Search.
|
|
8186
|
+
*
|
|
8187
|
+
* @see https://ai.google.dev/gemini-api/docs/file-search
|
|
8188
|
+
*/
|
|
8189
|
+
fileSearch,
|
|
7927
8190
|
/**
|
|
7928
8191
|
* A tool that enables the model to generate and run Python code.
|
|
7929
8192
|
* Must have name "code_execution".
|
|
@@ -7934,7 +8197,12 @@ var googleTools = {
|
|
|
7934
8197
|
* @see https://ai.google.dev/gemini-api/docs/code-execution (Google AI)
|
|
7935
8198
|
* @see https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/code-execution-api (Vertex AI)
|
|
7936
8199
|
*/
|
|
7937
|
-
codeExecution
|
|
8200
|
+
codeExecution,
|
|
8201
|
+
/**
|
|
8202
|
+
* Creates a Vertex RAG Store tool that enables the model to perform RAG searches against a Vertex RAG Store.
|
|
8203
|
+
* Must have name "vertex_rag_store".
|
|
8204
|
+
*/
|
|
8205
|
+
vertexRagStore
|
|
7938
8206
|
};
|
|
7939
8207
|
var GoogleGenerativeAIImageModel = class {
|
|
7940
8208
|
constructor(modelId, settings, config) {
|
|
@@ -8043,8 +8311,9 @@ var googleImageProviderOptionsSchema = lazySchema(
|
|
|
8043
8311
|
)
|
|
8044
8312
|
);
|
|
8045
8313
|
function createGoogleGenerativeAI(options = {}) {
|
|
8046
|
-
var _a15;
|
|
8314
|
+
var _a15, _b;
|
|
8047
8315
|
const baseURL = (_a15 = withoutTrailingSlash(options.baseURL)) != null ? _a15 : "https://generativelanguage.googleapis.com/v1beta";
|
|
8316
|
+
const providerName = (_b = options.name) != null ? _b : "google.generative-ai";
|
|
8048
8317
|
const getHeaders = () => withUserAgentSuffix(
|
|
8049
8318
|
{
|
|
8050
8319
|
"x-goog-api-key": loadApiKey({
|
|
@@ -8059,7 +8328,7 @@ function createGoogleGenerativeAI(options = {}) {
|
|
|
8059
8328
|
const createChatModel = (modelId) => {
|
|
8060
8329
|
var _a22;
|
|
8061
8330
|
return new GoogleGenerativeAILanguageModel(modelId, {
|
|
8062
|
-
provider:
|
|
8331
|
+
provider: providerName,
|
|
8063
8332
|
baseURL,
|
|
8064
8333
|
headers: getHeaders,
|
|
8065
8334
|
generateId: (_a22 = options.generateId) != null ? _a22 : generateId,
|
|
@@ -8079,13 +8348,13 @@ function createGoogleGenerativeAI(options = {}) {
|
|
|
8079
8348
|
});
|
|
8080
8349
|
};
|
|
8081
8350
|
const createEmbeddingModel = (modelId) => new GoogleGenerativeAIEmbeddingModel(modelId, {
|
|
8082
|
-
provider:
|
|
8351
|
+
provider: providerName,
|
|
8083
8352
|
baseURL,
|
|
8084
8353
|
headers: getHeaders,
|
|
8085
8354
|
fetch: options.fetch
|
|
8086
8355
|
});
|
|
8087
8356
|
const createImageModel = (modelId, settings = {}) => new GoogleGenerativeAIImageModel(modelId, settings, {
|
|
8088
|
-
provider:
|
|
8357
|
+
provider: providerName,
|
|
8089
8358
|
baseURL,
|
|
8090
8359
|
headers: getHeaders,
|
|
8091
8360
|
fetch: options.fetch
|
|
@@ -8307,7 +8576,7 @@ function getResponseMetadata3({
|
|
|
8307
8576
|
return {
|
|
8308
8577
|
id: id != null ? id : void 0,
|
|
8309
8578
|
modelId: model != null ? model : void 0,
|
|
8310
|
-
timestamp: created
|
|
8579
|
+
timestamp: created ? new Date(created * 1e3) : void 0
|
|
8311
8580
|
};
|
|
8312
8581
|
}
|
|
8313
8582
|
function mapOpenAIFinishReason(finishReason) {
|
|
@@ -8491,7 +8760,7 @@ var openaiChatLanguageModelOptions = lazyValidator(
|
|
|
8491
8760
|
/**
|
|
8492
8761
|
* Reasoning effort for reasoning models. Defaults to `medium`.
|
|
8493
8762
|
*/
|
|
8494
|
-
reasoningEffort: z4.z.enum(["minimal", "low", "medium", "high"]).optional(),
|
|
8763
|
+
reasoningEffort: z4.z.enum(["none", "minimal", "low", "medium", "high"]).optional(),
|
|
8495
8764
|
/**
|
|
8496
8765
|
* Maximum number of completion tokens to generate. Useful for reasoning models.
|
|
8497
8766
|
*/
|
|
@@ -8541,6 +8810,15 @@ var openaiChatLanguageModelOptions = lazyValidator(
|
|
|
8541
8810
|
* Useful for improving cache hit rates and working around automatic caching issues.
|
|
8542
8811
|
*/
|
|
8543
8812
|
promptCacheKey: z4.z.string().optional(),
|
|
8813
|
+
/**
|
|
8814
|
+
* The retention policy for the prompt cache.
|
|
8815
|
+
* - 'in_memory': Default. Standard prompt caching behavior.
|
|
8816
|
+
* - '24h': Extended prompt caching that keeps cached prefixes active for up to 24 hours.
|
|
8817
|
+
* Currently only available for 5.1 series models.
|
|
8818
|
+
*
|
|
8819
|
+
* @default 'in_memory'
|
|
8820
|
+
*/
|
|
8821
|
+
promptCacheRetention: z4.z.enum(["in_memory", "24h"]).optional(),
|
|
8544
8822
|
/**
|
|
8545
8823
|
* A stable identifier used to help detect users of your application
|
|
8546
8824
|
* that may be violating OpenAI's usage policies. The IDs should be a
|
|
@@ -8702,6 +8980,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
8702
8980
|
reasoning_effort: openaiOptions.reasoningEffort,
|
|
8703
8981
|
service_tier: openaiOptions.serviceTier,
|
|
8704
8982
|
prompt_cache_key: openaiOptions.promptCacheKey,
|
|
8983
|
+
prompt_cache_retention: openaiOptions.promptCacheRetention,
|
|
8705
8984
|
safety_identifier: openaiOptions.safetyIdentifier,
|
|
8706
8985
|
// messages:
|
|
8707
8986
|
messages
|
|
@@ -8917,7 +9196,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
8917
9196
|
outputTokens: void 0,
|
|
8918
9197
|
totalTokens: void 0
|
|
8919
9198
|
};
|
|
8920
|
-
let
|
|
9199
|
+
let metadataExtracted = false;
|
|
8921
9200
|
let isActiveText = false;
|
|
8922
9201
|
const providerMetadata = { openai: {} };
|
|
8923
9202
|
return {
|
|
@@ -8942,12 +9221,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
8942
9221
|
controller.enqueue({ type: "error", error: value.error });
|
|
8943
9222
|
return;
|
|
8944
9223
|
}
|
|
8945
|
-
if (
|
|
8946
|
-
|
|
8947
|
-
|
|
8948
|
-
|
|
8949
|
-
|
|
8950
|
-
|
|
9224
|
+
if (!metadataExtracted) {
|
|
9225
|
+
const metadata = getResponseMetadata3(value);
|
|
9226
|
+
if (Object.values(metadata).some(Boolean)) {
|
|
9227
|
+
metadataExtracted = true;
|
|
9228
|
+
controller.enqueue({
|
|
9229
|
+
type: "response-metadata",
|
|
9230
|
+
...getResponseMetadata3(value)
|
|
9231
|
+
});
|
|
9232
|
+
}
|
|
8951
9233
|
}
|
|
8952
9234
|
if (value.usage != null) {
|
|
8953
9235
|
usage.inputTokens = (_a15 = value.usage.prompt_tokens) != null ? _a15 : void 0;
|
|
@@ -9119,18 +9401,6 @@ function getSystemMessageMode(modelId) {
|
|
|
9119
9401
|
return (_b = (_a15 = reasoningModels[modelId]) == null ? void 0 : _a15.systemMessageMode) != null ? _b : "developer";
|
|
9120
9402
|
}
|
|
9121
9403
|
var reasoningModels = {
|
|
9122
|
-
"o1-mini": {
|
|
9123
|
-
systemMessageMode: "remove"
|
|
9124
|
-
},
|
|
9125
|
-
"o1-mini-2024-09-12": {
|
|
9126
|
-
systemMessageMode: "remove"
|
|
9127
|
-
},
|
|
9128
|
-
"o1-preview": {
|
|
9129
|
-
systemMessageMode: "remove"
|
|
9130
|
-
},
|
|
9131
|
-
"o1-preview-2024-09-12": {
|
|
9132
|
-
systemMessageMode: "remove"
|
|
9133
|
-
},
|
|
9134
9404
|
o3: {
|
|
9135
9405
|
systemMessageMode: "developer"
|
|
9136
9406
|
},
|
|
@@ -9666,7 +9936,7 @@ var openaiImageResponseSchema = lazyValidator(
|
|
|
9666
9936
|
data: z4.z.array(
|
|
9667
9937
|
z4.z.object({
|
|
9668
9938
|
b64_json: z4.z.string(),
|
|
9669
|
-
revised_prompt: z4.z.string().
|
|
9939
|
+
revised_prompt: z4.z.string().nullish()
|
|
9670
9940
|
})
|
|
9671
9941
|
)
|
|
9672
9942
|
})
|
|
@@ -9811,7 +10081,7 @@ var compoundFilterSchema = z4.z.object({
|
|
|
9811
10081
|
z4.z.union([comparisonFilterSchema, z4.z.lazy(() => compoundFilterSchema)])
|
|
9812
10082
|
)
|
|
9813
10083
|
});
|
|
9814
|
-
var
|
|
10084
|
+
var fileSearchArgsSchema2 = lazySchema(
|
|
9815
10085
|
() => zodSchema(
|
|
9816
10086
|
z4.z.object({
|
|
9817
10087
|
vectorStoreIds: z4.z.array(z4.z.string()),
|
|
@@ -9840,7 +10110,7 @@ var fileSearchOutputSchema = lazySchema(
|
|
|
9840
10110
|
})
|
|
9841
10111
|
)
|
|
9842
10112
|
);
|
|
9843
|
-
var
|
|
10113
|
+
var fileSearch2 = createProviderDefinedToolFactoryWithOutputSchema({
|
|
9844
10114
|
id: "openai.file_search",
|
|
9845
10115
|
name: "file_search",
|
|
9846
10116
|
inputSchema: z4.z.object({}),
|
|
@@ -9934,7 +10204,13 @@ var webSearchOutputSchema = lazySchema(
|
|
|
9934
10204
|
url: z4.z.string(),
|
|
9935
10205
|
pattern: z4.z.string()
|
|
9936
10206
|
})
|
|
9937
|
-
])
|
|
10207
|
+
]),
|
|
10208
|
+
sources: z4.z.array(
|
|
10209
|
+
z4.z.discriminatedUnion("type", [
|
|
10210
|
+
z4.z.object({ type: z4.z.literal("url"), url: z4.z.string() }),
|
|
10211
|
+
z4.z.object({ type: z4.z.literal("api"), name: z4.z.string() })
|
|
10212
|
+
])
|
|
10213
|
+
).optional()
|
|
9938
10214
|
})
|
|
9939
10215
|
)
|
|
9940
10216
|
);
|
|
@@ -10012,7 +10288,7 @@ var openaiTools = {
|
|
|
10012
10288
|
* @param ranking - The ranking options to use for the file search.
|
|
10013
10289
|
* @param filters - The filters to use for the file search.
|
|
10014
10290
|
*/
|
|
10015
|
-
fileSearch,
|
|
10291
|
+
fileSearch: fileSearch2,
|
|
10016
10292
|
/**
|
|
10017
10293
|
* The image generation tool allows you to generate images using a text prompt,
|
|
10018
10294
|
* and optionally image inputs. It leverages the GPT Image model,
|
|
@@ -10492,7 +10768,13 @@ var openaiResponsesChunkSchema = lazyValidator(
|
|
|
10492
10768
|
action: z4.z.discriminatedUnion("type", [
|
|
10493
10769
|
z4.z.object({
|
|
10494
10770
|
type: z4.z.literal("search"),
|
|
10495
|
-
query: z4.z.string().nullish()
|
|
10771
|
+
query: z4.z.string().nullish(),
|
|
10772
|
+
sources: z4.z.array(
|
|
10773
|
+
z4.z.discriminatedUnion("type", [
|
|
10774
|
+
z4.z.object({ type: z4.z.literal("url"), url: z4.z.string() }),
|
|
10775
|
+
z4.z.object({ type: z4.z.literal("api"), name: z4.z.string() })
|
|
10776
|
+
])
|
|
10777
|
+
).nullish()
|
|
10496
10778
|
}),
|
|
10497
10779
|
z4.z.object({
|
|
10498
10780
|
type: z4.z.literal("open_page"),
|
|
@@ -10600,10 +10882,13 @@ var openaiResponsesChunkSchema = lazyValidator(
|
|
|
10600
10882
|
}),
|
|
10601
10883
|
z4.z.object({
|
|
10602
10884
|
type: z4.z.literal("error"),
|
|
10603
|
-
|
|
10604
|
-
|
|
10605
|
-
|
|
10606
|
-
|
|
10885
|
+
sequence_number: z4.z.number(),
|
|
10886
|
+
error: z4.z.object({
|
|
10887
|
+
type: z4.z.string(),
|
|
10888
|
+
code: z4.z.string(),
|
|
10889
|
+
message: z4.z.string(),
|
|
10890
|
+
param: z4.z.string().nullish()
|
|
10891
|
+
})
|
|
10607
10892
|
}),
|
|
10608
10893
|
z4.z.object({ type: z4.z.string() }).loose().transform((value) => ({
|
|
10609
10894
|
type: "unknown_chunk",
|
|
@@ -10616,13 +10901,15 @@ var openaiResponsesChunkSchema = lazyValidator(
|
|
|
10616
10901
|
var openaiResponsesResponseSchema = lazyValidator(
|
|
10617
10902
|
() => zodSchema(
|
|
10618
10903
|
z4.z.object({
|
|
10619
|
-
id: z4.z.string(),
|
|
10620
|
-
created_at: z4.z.number(),
|
|
10904
|
+
id: z4.z.string().optional(),
|
|
10905
|
+
created_at: z4.z.number().optional(),
|
|
10621
10906
|
error: z4.z.object({
|
|
10622
|
-
|
|
10623
|
-
|
|
10907
|
+
message: z4.z.string(),
|
|
10908
|
+
type: z4.z.string(),
|
|
10909
|
+
param: z4.z.string().nullish(),
|
|
10910
|
+
code: z4.z.string()
|
|
10624
10911
|
}).nullish(),
|
|
10625
|
-
model: z4.z.string(),
|
|
10912
|
+
model: z4.z.string().optional(),
|
|
10626
10913
|
output: z4.z.array(
|
|
10627
10914
|
z4.z.discriminatedUnion("type", [
|
|
10628
10915
|
z4.z.object({
|
|
@@ -10664,7 +10951,18 @@ var openaiResponsesResponseSchema = lazyValidator(
|
|
|
10664
10951
|
quote: z4.z.string().nullish()
|
|
10665
10952
|
}),
|
|
10666
10953
|
z4.z.object({
|
|
10667
|
-
type: z4.z.literal("container_file_citation")
|
|
10954
|
+
type: z4.z.literal("container_file_citation"),
|
|
10955
|
+
container_id: z4.z.string(),
|
|
10956
|
+
file_id: z4.z.string(),
|
|
10957
|
+
filename: z4.z.string().nullish(),
|
|
10958
|
+
start_index: z4.z.number().nullish(),
|
|
10959
|
+
end_index: z4.z.number().nullish(),
|
|
10960
|
+
index: z4.z.number().nullish()
|
|
10961
|
+
}),
|
|
10962
|
+
z4.z.object({
|
|
10963
|
+
type: z4.z.literal("file_path"),
|
|
10964
|
+
file_id: z4.z.string(),
|
|
10965
|
+
index: z4.z.number().nullish()
|
|
10668
10966
|
})
|
|
10669
10967
|
])
|
|
10670
10968
|
)
|
|
@@ -10678,7 +10976,13 @@ var openaiResponsesResponseSchema = lazyValidator(
|
|
|
10678
10976
|
action: z4.z.discriminatedUnion("type", [
|
|
10679
10977
|
z4.z.object({
|
|
10680
10978
|
type: z4.z.literal("search"),
|
|
10681
|
-
query: z4.z.string().nullish()
|
|
10979
|
+
query: z4.z.string().nullish(),
|
|
10980
|
+
sources: z4.z.array(
|
|
10981
|
+
z4.z.discriminatedUnion("type", [
|
|
10982
|
+
z4.z.object({ type: z4.z.literal("url"), url: z4.z.string() }),
|
|
10983
|
+
z4.z.object({ type: z4.z.literal("api"), name: z4.z.string() })
|
|
10984
|
+
])
|
|
10985
|
+
).nullish()
|
|
10682
10986
|
}),
|
|
10683
10987
|
z4.z.object({
|
|
10684
10988
|
type: z4.z.literal("open_page"),
|
|
@@ -10697,7 +11001,10 @@ var openaiResponsesResponseSchema = lazyValidator(
|
|
|
10697
11001
|
queries: z4.z.array(z4.z.string()),
|
|
10698
11002
|
results: z4.z.array(
|
|
10699
11003
|
z4.z.object({
|
|
10700
|
-
attributes: z4.z.record(
|
|
11004
|
+
attributes: z4.z.record(
|
|
11005
|
+
z4.z.string(),
|
|
11006
|
+
z4.z.union([z4.z.string(), z4.z.number(), z4.z.boolean()])
|
|
11007
|
+
),
|
|
10701
11008
|
file_id: z4.z.string(),
|
|
10702
11009
|
filename: z4.z.string(),
|
|
10703
11010
|
score: z4.z.number(),
|
|
@@ -10759,7 +11066,7 @@ var openaiResponsesResponseSchema = lazyValidator(
|
|
|
10759
11066
|
)
|
|
10760
11067
|
})
|
|
10761
11068
|
])
|
|
10762
|
-
),
|
|
11069
|
+
).optional(),
|
|
10763
11070
|
service_tier: z4.z.string().nullish(),
|
|
10764
11071
|
incomplete_details: z4.z.object({ reason: z4.z.string() }).nullish(),
|
|
10765
11072
|
usage: z4.z.object({
|
|
@@ -10767,7 +11074,7 @@ var openaiResponsesResponseSchema = lazyValidator(
|
|
|
10767
11074
|
input_tokens_details: z4.z.object({ cached_tokens: z4.z.number().nullish() }).nullish(),
|
|
10768
11075
|
output_tokens: z4.z.number(),
|
|
10769
11076
|
output_tokens_details: z4.z.object({ reasoning_tokens: z4.z.number().nullish() }).nullish()
|
|
10770
|
-
})
|
|
11077
|
+
}).optional()
|
|
10771
11078
|
})
|
|
10772
11079
|
)
|
|
10773
11080
|
);
|
|
@@ -10775,6 +11082,7 @@ var TOP_LOGPROBS_MAX = 20;
|
|
|
10775
11082
|
var openaiResponsesProviderOptionsSchema = lazyValidator(
|
|
10776
11083
|
() => zodSchema(
|
|
10777
11084
|
z4.z.object({
|
|
11085
|
+
conversation: z4.z.string().nullish(),
|
|
10778
11086
|
include: z4.z.array(
|
|
10779
11087
|
z4.z.enum([
|
|
10780
11088
|
"reasoning.encrypted_content",
|
|
@@ -10807,6 +11115,15 @@ var openaiResponsesProviderOptionsSchema = lazyValidator(
|
|
|
10807
11115
|
parallelToolCalls: z4.z.boolean().nullish(),
|
|
10808
11116
|
previousResponseId: z4.z.string().nullish(),
|
|
10809
11117
|
promptCacheKey: z4.z.string().nullish(),
|
|
11118
|
+
/**
|
|
11119
|
+
* The retention policy for the prompt cache.
|
|
11120
|
+
* - 'in_memory': Default. Standard prompt caching behavior.
|
|
11121
|
+
* - '24h': Extended prompt caching that keeps cached prefixes active for up to 24 hours.
|
|
11122
|
+
* Currently only available for 5.1 series models.
|
|
11123
|
+
*
|
|
11124
|
+
* @default 'in_memory'
|
|
11125
|
+
*/
|
|
11126
|
+
promptCacheRetention: z4.z.enum(["in_memory", "24h"]).nullish(),
|
|
10810
11127
|
reasoningEffort: z4.z.string().nullish(),
|
|
10811
11128
|
reasoningSummary: z4.z.string().nullish(),
|
|
10812
11129
|
safetyIdentifier: z4.z.string().nullish(),
|
|
@@ -10846,7 +11163,7 @@ async function prepareResponsesTools({
|
|
|
10846
11163
|
case "openai.file_search": {
|
|
10847
11164
|
const args = await validateTypes({
|
|
10848
11165
|
value: tool2.args,
|
|
10849
|
-
schema:
|
|
11166
|
+
schema: fileSearchArgsSchema2
|
|
10850
11167
|
});
|
|
10851
11168
|
openaiTools2.push({
|
|
10852
11169
|
type: "file_search",
|
|
@@ -11012,6 +11329,13 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11012
11329
|
providerOptions,
|
|
11013
11330
|
schema: openaiResponsesProviderOptionsSchema
|
|
11014
11331
|
});
|
|
11332
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.conversation) && (openaiOptions == null ? void 0 : openaiOptions.previousResponseId)) {
|
|
11333
|
+
warnings.push({
|
|
11334
|
+
type: "unsupported-setting",
|
|
11335
|
+
setting: "conversation",
|
|
11336
|
+
details: "conversation and previousResponseId cannot be used together"
|
|
11337
|
+
});
|
|
11338
|
+
}
|
|
11015
11339
|
const { input, warnings: inputWarnings } = await convertToOpenAIResponsesInput({
|
|
11016
11340
|
prompt,
|
|
11017
11341
|
systemMessageMode: modelConfig.systemMessageMode,
|
|
@@ -11074,6 +11398,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11074
11398
|
}
|
|
11075
11399
|
},
|
|
11076
11400
|
// provider options:
|
|
11401
|
+
conversation: openaiOptions == null ? void 0 : openaiOptions.conversation,
|
|
11077
11402
|
max_tool_calls: openaiOptions == null ? void 0 : openaiOptions.maxToolCalls,
|
|
11078
11403
|
metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
|
|
11079
11404
|
parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
|
|
@@ -11084,6 +11409,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11084
11409
|
service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
|
|
11085
11410
|
include,
|
|
11086
11411
|
prompt_cache_key: openaiOptions == null ? void 0 : openaiOptions.promptCacheKey,
|
|
11412
|
+
prompt_cache_retention: openaiOptions == null ? void 0 : openaiOptions.promptCacheRetention,
|
|
11087
11413
|
safety_identifier: openaiOptions == null ? void 0 : openaiOptions.safetyIdentifier,
|
|
11088
11414
|
top_logprobs: topLogprobs,
|
|
11089
11415
|
truncation: openaiOptions == null ? void 0 : openaiOptions.truncation,
|
|
@@ -11293,7 +11619,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11293
11619
|
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : generateId(),
|
|
11294
11620
|
mediaType: "text/plain",
|
|
11295
11621
|
title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
|
|
11296
|
-
filename: (_l = annotation.filename) != null ? _l : annotation.file_id
|
|
11622
|
+
filename: (_l = annotation.filename) != null ? _l : annotation.file_id,
|
|
11623
|
+
...annotation.file_id ? {
|
|
11624
|
+
providerMetadata: {
|
|
11625
|
+
openai: {
|
|
11626
|
+
fileId: annotation.file_id
|
|
11627
|
+
}
|
|
11628
|
+
}
|
|
11629
|
+
} : {}
|
|
11297
11630
|
});
|
|
11298
11631
|
}
|
|
11299
11632
|
}
|
|
@@ -11403,7 +11736,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11403
11736
|
}
|
|
11404
11737
|
}
|
|
11405
11738
|
const providerMetadata = {
|
|
11406
|
-
openai: {
|
|
11739
|
+
openai: {
|
|
11740
|
+
...response.id != null ? { responseId: response.id } : {}
|
|
11741
|
+
}
|
|
11407
11742
|
};
|
|
11408
11743
|
if (logprobs.length > 0) {
|
|
11409
11744
|
providerMetadata.openai.logprobs = logprobs;
|
|
@@ -11411,6 +11746,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11411
11746
|
if (typeof response.service_tier === "string") {
|
|
11412
11747
|
providerMetadata.openai.serviceTier = response.service_tier;
|
|
11413
11748
|
}
|
|
11749
|
+
const usage = response.usage;
|
|
11414
11750
|
return {
|
|
11415
11751
|
content,
|
|
11416
11752
|
finishReason: mapOpenAIResponseFinishReason({
|
|
@@ -11418,11 +11754,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11418
11754
|
hasFunctionCall
|
|
11419
11755
|
}),
|
|
11420
11756
|
usage: {
|
|
11421
|
-
inputTokens:
|
|
11422
|
-
outputTokens:
|
|
11423
|
-
totalTokens:
|
|
11424
|
-
reasoningTokens: (_q = (_p =
|
|
11425
|
-
cachedInputTokens: (_s = (_r =
|
|
11757
|
+
inputTokens: usage.input_tokens,
|
|
11758
|
+
outputTokens: usage.output_tokens,
|
|
11759
|
+
totalTokens: usage.input_tokens + usage.output_tokens,
|
|
11760
|
+
reasoningTokens: (_q = (_p = usage.output_tokens_details) == null ? void 0 : _p.reasoning_tokens) != null ? _q : void 0,
|
|
11761
|
+
cachedInputTokens: (_s = (_r = usage.input_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? _s : void 0
|
|
11426
11762
|
},
|
|
11427
11763
|
request: { body },
|
|
11428
11764
|
response: {
|
|
@@ -11871,7 +12207,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11871
12207
|
id: (_s = (_r = (_q = self.config).generateId) == null ? void 0 : _r.call(_q)) != null ? _s : generateId(),
|
|
11872
12208
|
mediaType: "text/plain",
|
|
11873
12209
|
title: (_u = (_t = value.annotation.quote) != null ? _t : value.annotation.filename) != null ? _u : "Document",
|
|
11874
|
-
filename: (_v = value.annotation.filename) != null ? _v : value.annotation.file_id
|
|
12210
|
+
filename: (_v = value.annotation.filename) != null ? _v : value.annotation.file_id,
|
|
12211
|
+
...value.annotation.file_id ? {
|
|
12212
|
+
providerMetadata: {
|
|
12213
|
+
openai: {
|
|
12214
|
+
fileId: value.annotation.file_id
|
|
12215
|
+
}
|
|
12216
|
+
}
|
|
12217
|
+
} : {}
|
|
11875
12218
|
});
|
|
11876
12219
|
}
|
|
11877
12220
|
} else if (isErrorChunk(value)) {
|
|
@@ -11949,13 +12292,6 @@ function getResponsesModelConfig(modelId) {
|
|
|
11949
12292
|
};
|
|
11950
12293
|
}
|
|
11951
12294
|
if (modelId.startsWith("o") || modelId.startsWith("gpt-5") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
|
|
11952
|
-
if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {
|
|
11953
|
-
return {
|
|
11954
|
-
...defaults,
|
|
11955
|
-
isReasoningModel: true,
|
|
11956
|
-
systemMessageMode: "remove"
|
|
11957
|
-
};
|
|
11958
|
-
}
|
|
11959
12295
|
return {
|
|
11960
12296
|
...defaults,
|
|
11961
12297
|
isReasoningModel: true,
|
|
@@ -11971,7 +12307,11 @@ function mapWebSearchOutput(action) {
|
|
|
11971
12307
|
var _a15;
|
|
11972
12308
|
switch (action.type) {
|
|
11973
12309
|
case "search":
|
|
11974
|
-
return {
|
|
12310
|
+
return {
|
|
12311
|
+
action: { type: "search", query: (_a15 = action.query) != null ? _a15 : void 0 },
|
|
12312
|
+
// include sources when provided by the Responses API (behind include flag)
|
|
12313
|
+
...action.sources != null && { sources: action.sources }
|
|
12314
|
+
};
|
|
11975
12315
|
case "open_page":
|
|
11976
12316
|
return { action: { type: "openPage", url: action.url } };
|
|
11977
12317
|
case "find":
|
|
@@ -12310,7 +12650,7 @@ var OpenAITranscriptionModel = class {
|
|
|
12310
12650
|
};
|
|
12311
12651
|
}
|
|
12312
12652
|
};
|
|
12313
|
-
var VERSION5 = "2.0.
|
|
12653
|
+
var VERSION5 = "2.0.69" ;
|
|
12314
12654
|
function createOpenAI(options = {}) {
|
|
12315
12655
|
var _a15, _b;
|
|
12316
12656
|
const baseURL = (_a15 = withoutTrailingSlash(
|
|
@@ -12407,17 +12747,10 @@ function createOpenAI(options = {}) {
|
|
|
12407
12747
|
}
|
|
12408
12748
|
createOpenAI();
|
|
12409
12749
|
|
|
12410
|
-
exports.APICallError = APICallError;
|
|
12411
|
-
exports.EmptyResponseBodyError = EmptyResponseBodyError;
|
|
12412
|
-
exports.EventSourceParserStream = EventSourceParserStream;
|
|
12413
|
-
exports.InvalidArgumentError = InvalidArgumentError;
|
|
12414
|
-
exports.JSONParseError = JSONParseError;
|
|
12415
|
-
exports.LoadAPIKeyError = LoadAPIKeyError;
|
|
12416
12750
|
exports.MastraModelGateway = MastraModelGateway;
|
|
12417
12751
|
exports.NoSuchModelError = NoSuchModelError;
|
|
12418
12752
|
exports.OpenAICompatibleImageModel = OpenAICompatibleImageModel;
|
|
12419
12753
|
exports.TooManyEmbeddingValuesForCallError = TooManyEmbeddingValuesForCallError;
|
|
12420
|
-
exports.TypeValidationError = TypeValidationError;
|
|
12421
12754
|
exports.UnsupportedFunctionalityError = UnsupportedFunctionalityError;
|
|
12422
12755
|
exports.combineHeaders = combineHeaders;
|
|
12423
12756
|
exports.convertToBase64 = convertToBase64;
|
|
@@ -12429,10 +12762,11 @@ exports.createJsonResponseHandler = createJsonResponseHandler;
|
|
|
12429
12762
|
exports.createOpenAI = createOpenAI;
|
|
12430
12763
|
exports.createOpenAICompatible = createOpenAICompatible;
|
|
12431
12764
|
exports.generateId = generateId;
|
|
12765
|
+
exports.injectJsonInstructionIntoMessages = injectJsonInstructionIntoMessages;
|
|
12432
12766
|
exports.loadApiKey = loadApiKey;
|
|
12433
12767
|
exports.parseProviderOptions = parseProviderOptions;
|
|
12434
12768
|
exports.postJsonToApi = postJsonToApi;
|
|
12435
12769
|
exports.withUserAgentSuffix = withUserAgentSuffix;
|
|
12436
12770
|
exports.withoutTrailingSlash = withoutTrailingSlash;
|
|
12437
|
-
//# sourceMappingURL=chunk-
|
|
12438
|
-
//# sourceMappingURL=chunk-
|
|
12771
|
+
//# sourceMappingURL=chunk-ZCVTH3CH.cjs.map
|
|
12772
|
+
//# sourceMappingURL=chunk-ZCVTH3CH.cjs.map
|