@mastra/core 1.0.0-beta.3 → 1.0.0-beta.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +369 -0
- package/dist/agent/agent.d.ts +2 -2
- package/dist/agent/agent.d.ts.map +1 -1
- package/dist/agent/agent.types.d.ts +2 -0
- package/dist/agent/agent.types.d.ts.map +1 -1
- package/dist/agent/index.cjs +9 -9
- package/dist/agent/index.js +2 -2
- package/dist/agent/message-list/index.cjs +3 -3
- package/dist/agent/message-list/index.d.ts +5 -0
- package/dist/agent/message-list/index.d.ts.map +1 -1
- package/dist/agent/message-list/index.js +1 -1
- package/dist/agent/message-list/prompt/attachments-to-parts.d.ts.map +1 -1
- package/dist/agent/types.d.ts +2 -3
- package/dist/agent/types.d.ts.map +1 -1
- package/dist/agent/utils.d.ts.map +1 -1
- package/dist/agent/workflows/prepare-stream/index.d.ts.map +1 -1
- package/dist/agent/workflows/prepare-stream/map-results-step.d.ts.map +1 -1
- package/dist/agent/workflows/prepare-stream/stream-step.d.ts +8 -1
- package/dist/agent/workflows/prepare-stream/stream-step.d.ts.map +1 -1
- package/dist/{chunk-4DWZ4Z6H.js → chunk-3B2OPLGG.js} +13 -7
- package/dist/chunk-3B2OPLGG.js.map +1 -0
- package/dist/{chunk-GRGPQ32U.js → chunk-3ZQ7LX73.js} +13 -13
- package/dist/chunk-3ZQ7LX73.js.map +1 -0
- package/dist/{chunk-VZGBVYXA.cjs → chunk-556MJ7CL.cjs} +33 -27
- package/dist/chunk-556MJ7CL.cjs.map +1 -0
- package/dist/{chunk-HGNRQ3OG.js → chunk-5O52O25J.js} +15 -8
- package/dist/chunk-5O52O25J.js.map +1 -0
- package/dist/{chunk-VU6DVS7J.js → chunk-5SA2EZ33.js} +421 -29
- package/dist/chunk-5SA2EZ33.js.map +1 -0
- package/dist/chunk-6XCINXZ7.cjs +194 -0
- package/dist/chunk-6XCINXZ7.cjs.map +1 -0
- package/dist/{chunk-KOSW5PP5.js → chunk-7ZADRRDW.js} +466 -125
- package/dist/chunk-7ZADRRDW.js.map +1 -0
- package/dist/{chunk-JXESKY4A.js → chunk-B5J5HYDN.js} +7 -5
- package/dist/chunk-B5J5HYDN.js.map +1 -0
- package/dist/chunk-BWYU7D33.js +192 -0
- package/dist/chunk-BWYU7D33.js.map +1 -0
- package/dist/{chunk-MCUX2D5Q.js → chunk-D7CJ4HIQ.js} +263 -24
- package/dist/chunk-D7CJ4HIQ.js.map +1 -0
- package/dist/{chunk-G36A2JRR.cjs → chunk-DQIZ5FFX.cjs} +457 -326
- package/dist/chunk-DQIZ5FFX.cjs.map +1 -0
- package/dist/{chunk-ZPMFINU2.cjs → chunk-HNHZGFZY.cjs} +466 -131
- package/dist/chunk-HNHZGFZY.cjs.map +1 -0
- package/dist/{chunk-3VOUB4ZU.cjs → chunk-IITXXVYI.cjs} +17 -17
- package/dist/chunk-IITXXVYI.cjs.map +1 -0
- package/dist/{chunk-QUKUN6NR.cjs → chunk-ISMGVGUM.cjs} +105 -5
- package/dist/chunk-ISMGVGUM.cjs.map +1 -0
- package/dist/{chunk-OQF4H5Y2.js → chunk-KJIQGPQR.js} +4 -4
- package/dist/{chunk-OQF4H5Y2.js.map → chunk-KJIQGPQR.js.map} +1 -1
- package/dist/{chunk-OWX2PUFH.cjs → chunk-KP42JLXE.cjs} +506 -236
- package/dist/chunk-KP42JLXE.cjs.map +1 -0
- package/dist/{chunk-4RSHBKDJ.cjs → chunk-KWWD3U7G.cjs} +5 -5
- package/dist/chunk-KWWD3U7G.cjs.map +1 -0
- package/dist/{chunk-N4SJ4YX7.cjs → chunk-NHP6ZIDG.cjs} +271 -31
- package/dist/chunk-NHP6ZIDG.cjs.map +1 -0
- package/dist/{chunk-UIZSWUKP.js → chunk-NZAXAFI3.js} +104 -6
- package/dist/chunk-NZAXAFI3.js.map +1 -0
- package/dist/{chunk-O6NA3Z43.cjs → chunk-OUUPUAGA.cjs} +10 -8
- package/dist/chunk-OUUPUAGA.cjs.map +1 -0
- package/dist/{chunk-D6EDHNGV.js → chunk-PC6EKOWK.js} +64 -11
- package/dist/chunk-PC6EKOWK.js.map +1 -0
- package/dist/{chunk-YQ7NLZZ3.cjs → chunk-QGWNF2QJ.cjs} +74 -618
- package/dist/chunk-QGWNF2QJ.cjs.map +1 -0
- package/dist/{chunk-HBJPYQRN.cjs → chunk-RROQ46B6.cjs} +69 -16
- package/dist/chunk-RROQ46B6.cjs.map +1 -0
- package/dist/{chunk-G3OOCXAI.js → chunk-T2CJRA6E.js} +4 -4
- package/dist/chunk-T2CJRA6E.js.map +1 -0
- package/dist/{chunk-T3WZCEC4.js → chunk-T2UNO766.js} +47 -591
- package/dist/chunk-T2UNO766.js.map +1 -0
- package/dist/{chunk-EZVRSZMK.cjs → chunk-U3VE2EVB.cjs} +11 -11
- package/dist/{chunk-EZVRSZMK.cjs.map → chunk-U3VE2EVB.cjs.map} +1 -1
- package/dist/{chunk-4IKJAKCD.cjs → chunk-V537VSV4.cjs} +74 -16
- package/dist/chunk-V537VSV4.cjs.map +1 -0
- package/dist/{chunk-XRIVPHXV.cjs → chunk-VYJXTHII.cjs} +422 -30
- package/dist/chunk-VYJXTHII.cjs.map +1 -0
- package/dist/{chunk-LRSB62Z6.cjs → chunk-X7F4CSGR.cjs} +15 -8
- package/dist/chunk-X7F4CSGR.cjs.map +1 -0
- package/dist/{chunk-CKGIPST2.js → chunk-XBO6W7LZ.js} +462 -193
- package/dist/chunk-XBO6W7LZ.js.map +1 -0
- package/dist/{chunk-KEURQGCQ.js → chunk-XIDKHXNR.js} +74 -17
- package/dist/chunk-XIDKHXNR.js.map +1 -0
- package/dist/{chunk-JTXVR2RA.cjs → chunk-XJQX54QP.cjs} +5 -5
- package/dist/{chunk-JTXVR2RA.cjs.map → chunk-XJQX54QP.cjs.map} +1 -1
- package/dist/{chunk-BAMR7HKO.js → chunk-YDFX3JR2.js} +457 -326
- package/dist/chunk-YDFX3JR2.js.map +1 -0
- package/dist/{chunk-5CWWU22H.js → chunk-ZTTMSCLU.js} +3 -3
- package/dist/{chunk-5CWWU22H.js.map → chunk-ZTTMSCLU.js.map} +1 -1
- package/dist/evals/base.d.ts +1 -1
- package/dist/evals/base.d.ts.map +1 -1
- package/dist/evals/base.test-utils.d.ts +25 -25
- package/dist/evals/index.cjs +4 -4
- package/dist/evals/index.js +1 -1
- package/dist/evals/scoreTraces/index.cjs +3 -3
- package/dist/evals/scoreTraces/index.js +1 -1
- package/dist/index.cjs +2 -2
- package/dist/index.js +1 -1
- package/dist/integration/index.cjs +2 -2
- package/dist/integration/index.js +1 -1
- package/dist/llm/index.cjs +18 -14
- package/dist/llm/index.d.ts +2 -1
- package/dist/llm/index.d.ts.map +1 -1
- package/dist/llm/index.js +5 -5
- package/dist/llm/model/aisdk/v5/model.d.ts.map +1 -1
- package/dist/llm/model/gateway-resolver.d.ts.map +1 -1
- package/dist/llm/model/gateways/azure.d.ts +36 -0
- package/dist/llm/model/gateways/azure.d.ts.map +1 -0
- package/dist/llm/model/gateways/base.d.ts +3 -6
- package/dist/llm/model/gateways/base.d.ts.map +1 -1
- package/dist/llm/model/gateways/index.d.ts +4 -1
- package/dist/llm/model/gateways/index.d.ts.map +1 -1
- package/dist/llm/model/gateways/models-dev.d.ts +2 -2
- package/dist/llm/model/gateways/models-dev.d.ts.map +1 -1
- package/dist/llm/model/gateways/netlify.d.ts +2 -2
- package/dist/llm/model/gateways/netlify.d.ts.map +1 -1
- package/dist/llm/model/model.d.ts.map +1 -1
- package/dist/llm/model/model.loop.d.ts +1 -1
- package/dist/llm/model/model.loop.d.ts.map +1 -1
- package/dist/llm/model/provider-registry.d.ts.map +1 -1
- package/dist/llm/model/provider-types.generated.d.ts +192 -11
- package/dist/llm/model/registry-generator.d.ts +12 -0
- package/dist/llm/model/registry-generator.d.ts.map +1 -1
- package/dist/llm/model/router.d.ts.map +1 -1
- package/dist/loop/index.cjs +2 -2
- package/dist/loop/index.js +1 -1
- package/dist/loop/loop.d.ts.map +1 -1
- package/dist/loop/network/index.d.ts.map +1 -1
- package/dist/loop/test-utils/generateText.d.ts.map +1 -1
- package/dist/loop/test-utils/resultObject.d.ts.map +1 -1
- package/dist/loop/test-utils/streamObject.d.ts.map +1 -1
- package/dist/loop/test-utils/toUIMessageStream.d.ts.map +1 -1
- package/dist/loop/test-utils/utils.d.ts.map +1 -1
- package/dist/loop/types.d.ts +8 -0
- package/dist/loop/types.d.ts.map +1 -1
- package/dist/loop/workflows/agentic-execution/index.d.ts.map +1 -1
- package/dist/loop/workflows/agentic-execution/llm-execution-step.d.ts.map +1 -1
- package/dist/loop/workflows/agentic-execution/llm-mapping-step.d.ts.map +1 -1
- package/dist/loop/workflows/agentic-execution/tool-call-step.d.ts +1 -1
- package/dist/loop/workflows/agentic-execution/tool-call-step.d.ts.map +1 -1
- package/dist/mastra/index.cjs +2 -2
- package/dist/mastra/index.d.ts +9 -9
- package/dist/mastra/index.d.ts.map +1 -1
- package/dist/mastra/index.js +1 -1
- package/dist/mcp/index.cjs.map +1 -1
- package/dist/mcp/index.d.ts +4 -4
- package/dist/mcp/index.d.ts.map +1 -1
- package/dist/mcp/index.js.map +1 -1
- package/dist/mcp/types.d.ts +2 -2
- package/dist/mcp/types.d.ts.map +1 -1
- package/dist/memory/index.cjs +89 -34
- package/dist/memory/index.cjs.map +1 -1
- package/dist/memory/index.js +85 -30
- package/dist/memory/index.js.map +1 -1
- package/dist/memory/mock.d.ts +9 -13
- package/dist/memory/mock.d.ts.map +1 -1
- package/dist/models-dev-F6MTIYTO.js +3 -0
- package/dist/{models-dev-GCVENVWA.js.map → models-dev-F6MTIYTO.js.map} +1 -1
- package/dist/models-dev-XIVR5EJV.cjs +12 -0
- package/dist/{models-dev-TIBJR6IG.cjs.map → models-dev-XIVR5EJV.cjs.map} +1 -1
- package/dist/netlify-MXBOGAJR.cjs +12 -0
- package/dist/{netlify-NTSNNT6F.cjs.map → netlify-MXBOGAJR.cjs.map} +1 -1
- package/dist/netlify-RX3JXCFQ.js +3 -0
- package/dist/{netlify-O5NJW7CF.js.map → netlify-RX3JXCFQ.js.map} +1 -1
- package/dist/processors/index.cjs +11 -11
- package/dist/processors/index.d.ts +2 -2
- package/dist/processors/index.d.ts.map +1 -1
- package/dist/processors/index.js +1 -1
- package/dist/processors/processors/batch-parts.d.ts +1 -1
- package/dist/processors/processors/batch-parts.d.ts.map +1 -1
- package/dist/processors/processors/language-detector.d.ts +1 -1
- package/dist/processors/processors/language-detector.d.ts.map +1 -1
- package/dist/processors/processors/moderation.d.ts +1 -1
- package/dist/processors/processors/moderation.d.ts.map +1 -1
- package/dist/processors/processors/pii-detector.d.ts +1 -1
- package/dist/processors/processors/pii-detector.d.ts.map +1 -1
- package/dist/processors/processors/prompt-injection-detector.d.ts +1 -1
- package/dist/processors/processors/prompt-injection-detector.d.ts.map +1 -1
- package/dist/processors/processors/structured-output.d.ts +1 -1
- package/dist/processors/processors/structured-output.d.ts.map +1 -1
- package/dist/processors/processors/system-prompt-scrubber.d.ts +1 -1
- package/dist/processors/processors/system-prompt-scrubber.d.ts.map +1 -1
- package/dist/processors/processors/token-limiter.d.ts +1 -1
- package/dist/processors/processors/token-limiter.d.ts.map +1 -1
- package/dist/processors/processors/unicode-normalizer.d.ts +1 -1
- package/dist/processors/processors/unicode-normalizer.d.ts.map +1 -1
- package/dist/provider-registry-3LUCE7FT.js +3 -0
- package/dist/{provider-registry-74GMFZKT.js.map → provider-registry-3LUCE7FT.js.map} +1 -1
- package/dist/provider-registry-NBRXBOQT.cjs +40 -0
- package/dist/{provider-registry-BZP3DIIV.cjs.map → provider-registry-NBRXBOQT.cjs.map} +1 -1
- package/dist/provider-registry.json +400 -22
- package/dist/{registry-generator-JPCV47SC.cjs → registry-generator-DEPPRYYJ.cjs} +21 -6
- package/dist/registry-generator-DEPPRYYJ.cjs.map +1 -0
- package/dist/{registry-generator-XD4FPZTU.js → registry-generator-FLW6NV42.js} +21 -7
- package/dist/registry-generator-FLW6NV42.js.map +1 -0
- package/dist/relevance/index.cjs +2 -2
- package/dist/relevance/index.js +1 -1
- package/dist/server/auth.d.ts +11 -0
- package/dist/server/auth.d.ts.map +1 -1
- package/dist/server/composite-auth.d.ts +9 -0
- package/dist/server/composite-auth.d.ts.map +1 -0
- package/dist/server/index.cjs +41 -0
- package/dist/server/index.cjs.map +1 -1
- package/dist/server/index.d.ts +1 -0
- package/dist/server/index.d.ts.map +1 -1
- package/dist/server/index.js +41 -1
- package/dist/server/index.js.map +1 -1
- package/dist/storage/index.cjs +29 -29
- package/dist/storage/index.js +1 -1
- package/dist/stream/aisdk/v5/compat/prepare-tools.d.ts.map +1 -1
- package/dist/stream/aisdk/v5/output.d.ts.map +1 -1
- package/dist/stream/base/output.d.ts.map +1 -1
- package/dist/stream/index.cjs +11 -11
- package/dist/stream/index.js +2 -2
- package/dist/stream/types.d.ts +7 -2
- package/dist/stream/types.d.ts.map +1 -1
- package/dist/test-utils/llm-mock.cjs +68 -31
- package/dist/test-utils/llm-mock.cjs.map +1 -1
- package/dist/test-utils/llm-mock.d.ts +4 -2
- package/dist/test-utils/llm-mock.d.ts.map +1 -1
- package/dist/test-utils/llm-mock.js +67 -30
- package/dist/test-utils/llm-mock.js.map +1 -1
- package/dist/tools/index.cjs +6 -6
- package/dist/tools/index.js +2 -2
- package/dist/tools/is-vercel-tool.cjs +2 -2
- package/dist/tools/is-vercel-tool.js +1 -1
- package/dist/tools/stream.d.ts +1 -0
- package/dist/tools/stream.d.ts.map +1 -1
- package/dist/tools/tool-builder/builder.d.ts +2 -0
- package/dist/tools/tool-builder/builder.d.ts.map +1 -1
- package/dist/tools/tool.d.ts +6 -6
- package/dist/tools/tool.d.ts.map +1 -1
- package/dist/tools/types.d.ts +6 -2
- package/dist/tools/types.d.ts.map +1 -1
- package/dist/tools/validation.d.ts +13 -1
- package/dist/tools/validation.d.ts.map +1 -1
- package/dist/utils.cjs +22 -22
- package/dist/utils.d.ts +1 -1
- package/dist/utils.d.ts.map +1 -1
- package/dist/utils.js +1 -1
- package/dist/vector/filter/index.cjs +7 -189
- package/dist/vector/filter/index.cjs.map +1 -1
- package/dist/vector/filter/index.js +1 -190
- package/dist/vector/filter/index.js.map +1 -1
- package/dist/vector/index.cjs +5 -0
- package/dist/vector/index.cjs.map +1 -1
- package/dist/vector/index.d.ts +1 -0
- package/dist/vector/index.d.ts.map +1 -1
- package/dist/vector/index.js +1 -0
- package/dist/vector/index.js.map +1 -1
- package/dist/vector/types.d.ts +86 -3
- package/dist/vector/types.d.ts.map +1 -1
- package/dist/vector/vector.d.ts +39 -2
- package/dist/vector/vector.d.ts.map +1 -1
- package/dist/voice/aisdk/index.d.ts +3 -0
- package/dist/voice/aisdk/index.d.ts.map +1 -0
- package/dist/voice/aisdk/speech.d.ts +23 -0
- package/dist/voice/aisdk/speech.d.ts.map +1 -0
- package/dist/voice/aisdk/transcription.d.ts +22 -0
- package/dist/voice/aisdk/transcription.d.ts.map +1 -0
- package/dist/voice/composite-voice.d.ts +4 -3
- package/dist/voice/composite-voice.d.ts.map +1 -1
- package/dist/voice/index.cjs +12 -4
- package/dist/voice/index.d.ts +1 -0
- package/dist/voice/index.d.ts.map +1 -1
- package/dist/voice/index.js +1 -1
- package/dist/workflows/default.d.ts.map +1 -1
- package/dist/workflows/evented/index.cjs +10 -10
- package/dist/workflows/evented/index.js +1 -1
- package/dist/workflows/evented/step-executor.d.ts.map +1 -1
- package/dist/workflows/index.cjs +23 -19
- package/dist/workflows/index.js +1 -1
- package/dist/workflows/types.d.ts +2 -2
- package/dist/workflows/types.d.ts.map +1 -1
- package/dist/workflows/utils.d.ts +7 -0
- package/dist/workflows/utils.d.ts.map +1 -1
- package/dist/workflows/workflow.d.ts +4 -2
- package/dist/workflows/workflow.d.ts.map +1 -1
- package/package.json +17 -16
- package/src/llm/model/provider-types.generated.d.ts +192 -11
- package/dist/chunk-3VOUB4ZU.cjs.map +0 -1
- package/dist/chunk-4DWZ4Z6H.js.map +0 -1
- package/dist/chunk-4IKJAKCD.cjs.map +0 -1
- package/dist/chunk-4RSHBKDJ.cjs.map +0 -1
- package/dist/chunk-BAMR7HKO.js.map +0 -1
- package/dist/chunk-CKGIPST2.js.map +0 -1
- package/dist/chunk-D6EDHNGV.js.map +0 -1
- package/dist/chunk-G36A2JRR.cjs.map +0 -1
- package/dist/chunk-G3OOCXAI.js.map +0 -1
- package/dist/chunk-GRGPQ32U.js.map +0 -1
- package/dist/chunk-HBJPYQRN.cjs.map +0 -1
- package/dist/chunk-HGNRQ3OG.js.map +0 -1
- package/dist/chunk-JXESKY4A.js.map +0 -1
- package/dist/chunk-KEURQGCQ.js.map +0 -1
- package/dist/chunk-KOSW5PP5.js.map +0 -1
- package/dist/chunk-LRSB62Z6.cjs.map +0 -1
- package/dist/chunk-MCUX2D5Q.js.map +0 -1
- package/dist/chunk-N4SJ4YX7.cjs.map +0 -1
- package/dist/chunk-O6NA3Z43.cjs.map +0 -1
- package/dist/chunk-OWX2PUFH.cjs.map +0 -1
- package/dist/chunk-QUKUN6NR.cjs.map +0 -1
- package/dist/chunk-T3WZCEC4.js.map +0 -1
- package/dist/chunk-UIZSWUKP.js.map +0 -1
- package/dist/chunk-VU6DVS7J.js.map +0 -1
- package/dist/chunk-VZGBVYXA.cjs.map +0 -1
- package/dist/chunk-XRIVPHXV.cjs.map +0 -1
- package/dist/chunk-YQ7NLZZ3.cjs.map +0 -1
- package/dist/chunk-ZPMFINU2.cjs.map +0 -1
- package/dist/models-dev-GCVENVWA.js +0 -3
- package/dist/models-dev-TIBJR6IG.cjs +0 -12
- package/dist/netlify-NTSNNT6F.cjs +0 -12
- package/dist/netlify-O5NJW7CF.js +0 -3
- package/dist/provider-registry-74GMFZKT.js +0 -3
- package/dist/provider-registry-BZP3DIIV.cjs +0 -40
- package/dist/registry-generator-JPCV47SC.cjs.map +0 -1
- package/dist/registry-generator-XD4FPZTU.js.map +0 -1
|
@@ -499,24 +499,74 @@ function getRuntimeEnvironmentUserAgent(globalThisAny = globalThis) {
|
|
|
499
499
|
}
|
|
500
500
|
return "runtime/unknown";
|
|
501
501
|
}
|
|
502
|
-
function
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
502
|
+
function normalizeHeaders(headers) {
|
|
503
|
+
if (headers == null) {
|
|
504
|
+
return {};
|
|
505
|
+
}
|
|
506
|
+
const normalized = {};
|
|
507
|
+
if (headers instanceof Headers) {
|
|
508
|
+
headers.forEach((value, key) => {
|
|
509
|
+
normalized[key.toLowerCase()] = value;
|
|
510
|
+
});
|
|
511
|
+
} else {
|
|
512
|
+
if (!Array.isArray(headers)) {
|
|
513
|
+
headers = Object.entries(headers);
|
|
514
|
+
}
|
|
515
|
+
for (const [key, value] of headers) {
|
|
516
|
+
if (value != null) {
|
|
517
|
+
normalized[key.toLowerCase()] = value;
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
return normalized;
|
|
506
522
|
}
|
|
507
523
|
function withUserAgentSuffix(headers, ...userAgentSuffixParts) {
|
|
508
|
-
const
|
|
509
|
-
headers != null ? headers : {}
|
|
510
|
-
);
|
|
511
|
-
const normalizedHeaders = new Headers(cleanedHeaders);
|
|
524
|
+
const normalizedHeaders = new Headers(normalizeHeaders(headers));
|
|
512
525
|
const currentUserAgentHeader = normalizedHeaders.get("user-agent") || "";
|
|
513
526
|
normalizedHeaders.set(
|
|
514
527
|
"user-agent",
|
|
515
528
|
[currentUserAgentHeader, ...userAgentSuffixParts].filter(Boolean).join(" ")
|
|
516
529
|
);
|
|
517
|
-
return Object.fromEntries(normalizedHeaders);
|
|
530
|
+
return Object.fromEntries(normalizedHeaders.entries());
|
|
531
|
+
}
|
|
532
|
+
var VERSION = "3.0.17" ;
|
|
533
|
+
var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
|
|
534
|
+
var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
|
|
535
|
+
var DEFAULT_GENERIC_SUFFIX = "You MUST answer with JSON.";
|
|
536
|
+
function injectJsonInstruction({
|
|
537
|
+
prompt,
|
|
538
|
+
schema,
|
|
539
|
+
schemaPrefix = schema != null ? DEFAULT_SCHEMA_PREFIX : void 0,
|
|
540
|
+
schemaSuffix = schema != null ? DEFAULT_SCHEMA_SUFFIX : DEFAULT_GENERIC_SUFFIX
|
|
541
|
+
}) {
|
|
542
|
+
return [
|
|
543
|
+
prompt != null && prompt.length > 0 ? prompt : void 0,
|
|
544
|
+
prompt != null && prompt.length > 0 ? "" : void 0,
|
|
545
|
+
// add a newline if prompt is not null
|
|
546
|
+
schemaPrefix,
|
|
547
|
+
schema != null ? JSON.stringify(schema) : void 0,
|
|
548
|
+
schemaSuffix
|
|
549
|
+
].filter((line) => line != null).join("\n");
|
|
550
|
+
}
|
|
551
|
+
function injectJsonInstructionIntoMessages({
|
|
552
|
+
messages,
|
|
553
|
+
schema,
|
|
554
|
+
schemaPrefix,
|
|
555
|
+
schemaSuffix
|
|
556
|
+
}) {
|
|
557
|
+
var _a15, _b;
|
|
558
|
+
const systemMessage = ((_a15 = messages[0]) == null ? void 0 : _a15.role) === "system" ? { ...messages[0] } : { role: "system", content: "" };
|
|
559
|
+
systemMessage.content = injectJsonInstruction({
|
|
560
|
+
prompt: systemMessage.content,
|
|
561
|
+
schema,
|
|
562
|
+
schemaPrefix,
|
|
563
|
+
schemaSuffix
|
|
564
|
+
});
|
|
565
|
+
return [
|
|
566
|
+
systemMessage,
|
|
567
|
+
...((_b = messages[0]) == null ? void 0 : _b.role) === "system" ? messages.slice(1) : messages
|
|
568
|
+
];
|
|
518
569
|
}
|
|
519
|
-
var VERSION = "3.0.12" ;
|
|
520
570
|
function loadApiKey({
|
|
521
571
|
apiKey,
|
|
522
572
|
environmentVariableName,
|
|
@@ -612,7 +662,11 @@ function filter(obj) {
|
|
|
612
662
|
}
|
|
613
663
|
function secureJsonParse(text) {
|
|
614
664
|
const { stackTraceLimit } = Error;
|
|
615
|
-
|
|
665
|
+
try {
|
|
666
|
+
Error.stackTraceLimit = 0;
|
|
667
|
+
} catch (e) {
|
|
668
|
+
return _parse(text);
|
|
669
|
+
}
|
|
616
670
|
try {
|
|
617
671
|
return _parse(text);
|
|
618
672
|
} finally {
|
|
@@ -3537,7 +3591,7 @@ var OpenAICompatibleImageModel = class {
|
|
|
3537
3591
|
var openaiCompatibleImageResponseSchema = z.object({
|
|
3538
3592
|
data: z.array(z.object({ b64_json: z.string() }))
|
|
3539
3593
|
});
|
|
3540
|
-
var VERSION2 = "1.0.
|
|
3594
|
+
var VERSION2 = "1.0.27" ;
|
|
3541
3595
|
function createOpenAICompatible(options) {
|
|
3542
3596
|
const baseURL = withoutTrailingSlash(options.baseURL);
|
|
3543
3597
|
const providerName = options.name;
|
|
@@ -3590,7 +3644,7 @@ var MastraModelGateway = class {
|
|
|
3590
3644
|
return this.id;
|
|
3591
3645
|
}
|
|
3592
3646
|
};
|
|
3593
|
-
var VERSION3 = "2.0.
|
|
3647
|
+
var VERSION3 = "2.0.45" ;
|
|
3594
3648
|
var anthropicErrorDataSchema = lazySchema(
|
|
3595
3649
|
() => zodSchema(
|
|
3596
3650
|
z.object({
|
|
@@ -3792,7 +3846,18 @@ var anthropicMessagesResponseSchema = lazySchema(
|
|
|
3792
3846
|
output_tokens: z.number(),
|
|
3793
3847
|
cache_creation_input_tokens: z.number().nullish(),
|
|
3794
3848
|
cache_read_input_tokens: z.number().nullish()
|
|
3795
|
-
})
|
|
3849
|
+
}),
|
|
3850
|
+
container: z.object({
|
|
3851
|
+
expires_at: z.string(),
|
|
3852
|
+
id: z.string(),
|
|
3853
|
+
skills: z.array(
|
|
3854
|
+
z.object({
|
|
3855
|
+
type: z.union([z.literal("anthropic"), z.literal("custom")]),
|
|
3856
|
+
skill_id: z.string(),
|
|
3857
|
+
version: z.string()
|
|
3858
|
+
})
|
|
3859
|
+
).nullish()
|
|
3860
|
+
}).nullish()
|
|
3796
3861
|
})
|
|
3797
3862
|
)
|
|
3798
3863
|
);
|
|
@@ -4022,7 +4087,21 @@ var anthropicMessagesChunkSchema = lazySchema(
|
|
|
4022
4087
|
type: z.literal("message_delta"),
|
|
4023
4088
|
delta: z.object({
|
|
4024
4089
|
stop_reason: z.string().nullish(),
|
|
4025
|
-
stop_sequence: z.string().nullish()
|
|
4090
|
+
stop_sequence: z.string().nullish(),
|
|
4091
|
+
container: z.object({
|
|
4092
|
+
expires_at: z.string(),
|
|
4093
|
+
id: z.string(),
|
|
4094
|
+
skills: z.array(
|
|
4095
|
+
z.object({
|
|
4096
|
+
type: z.union([
|
|
4097
|
+
z.literal("anthropic"),
|
|
4098
|
+
z.literal("custom")
|
|
4099
|
+
]),
|
|
4100
|
+
skill_id: z.string(),
|
|
4101
|
+
version: z.string()
|
|
4102
|
+
})
|
|
4103
|
+
).nullish()
|
|
4104
|
+
}).nullish()
|
|
4026
4105
|
}),
|
|
4027
4106
|
usage: z.looseObject({
|
|
4028
4107
|
output_tokens: z.number(),
|
|
@@ -5265,6 +5344,21 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5265
5344
|
setting: "seed"
|
|
5266
5345
|
});
|
|
5267
5346
|
}
|
|
5347
|
+
if (temperature != null && temperature > 1) {
|
|
5348
|
+
warnings.push({
|
|
5349
|
+
type: "unsupported-setting",
|
|
5350
|
+
setting: "temperature",
|
|
5351
|
+
details: `${temperature} exceeds anthropic maximum of 1.0. clamped to 1.0`
|
|
5352
|
+
});
|
|
5353
|
+
temperature = 1;
|
|
5354
|
+
} else if (temperature != null && temperature < 0) {
|
|
5355
|
+
warnings.push({
|
|
5356
|
+
type: "unsupported-setting",
|
|
5357
|
+
setting: "temperature",
|
|
5358
|
+
details: `${temperature} is below anthropic minimum of 0. clamped to 0`
|
|
5359
|
+
});
|
|
5360
|
+
temperature = 0;
|
|
5361
|
+
}
|
|
5268
5362
|
if ((responseFormat == null ? void 0 : responseFormat.type) === "json") {
|
|
5269
5363
|
if (responseFormat.schema == null) {
|
|
5270
5364
|
warnings.push({
|
|
@@ -5300,7 +5394,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5300
5394
|
});
|
|
5301
5395
|
const isThinking = ((_b = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _b.type) === "enabled";
|
|
5302
5396
|
const thinkingBudget = (_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.budgetTokens;
|
|
5303
|
-
const maxOutputTokensForModel = getMaxOutputTokensForModel(this.modelId);
|
|
5397
|
+
const { maxOutputTokens: maxOutputTokensForModel, knownModel } = getMaxOutputTokensForModel(this.modelId);
|
|
5304
5398
|
const maxTokens = maxOutputTokens != null ? maxOutputTokens : maxOutputTokensForModel;
|
|
5305
5399
|
const baseArgs = {
|
|
5306
5400
|
// model id:
|
|
@@ -5362,7 +5456,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5362
5456
|
}
|
|
5363
5457
|
baseArgs.max_tokens = maxTokens + thinkingBudget;
|
|
5364
5458
|
}
|
|
5365
|
-
if (baseArgs.max_tokens > maxOutputTokensForModel) {
|
|
5459
|
+
if (knownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
|
|
5366
5460
|
if (maxOutputTokens != null) {
|
|
5367
5461
|
warnings.push({
|
|
5368
5462
|
type: "unsupported-setting",
|
|
@@ -5457,7 +5551,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5457
5551
|
});
|
|
5458
5552
|
}
|
|
5459
5553
|
async doGenerate(options) {
|
|
5460
|
-
var _a15, _b, _c, _d, _e, _f;
|
|
5554
|
+
var _a15, _b, _c, _d, _e, _f, _g, _h;
|
|
5461
5555
|
const { args, warnings, betas, usesJsonResponseTool } = await this.getArgs(options);
|
|
5462
5556
|
const citationDocuments = this.extractCitationDocuments(options.prompt);
|
|
5463
5557
|
const {
|
|
@@ -5708,7 +5802,16 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5708
5802
|
anthropic: {
|
|
5709
5803
|
usage: response.usage,
|
|
5710
5804
|
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null,
|
|
5711
|
-
stopSequence: (_f = response.stop_sequence) != null ? _f : null
|
|
5805
|
+
stopSequence: (_f = response.stop_sequence) != null ? _f : null,
|
|
5806
|
+
container: response.container ? {
|
|
5807
|
+
expiresAt: response.container.expires_at,
|
|
5808
|
+
id: response.container.id,
|
|
5809
|
+
skills: (_h = (_g = response.container.skills) == null ? void 0 : _g.map((skill) => ({
|
|
5810
|
+
type: skill.type,
|
|
5811
|
+
skillId: skill.skill_id,
|
|
5812
|
+
version: skill.version
|
|
5813
|
+
}))) != null ? _h : null
|
|
5814
|
+
} : null
|
|
5712
5815
|
}
|
|
5713
5816
|
}
|
|
5714
5817
|
};
|
|
@@ -5738,6 +5841,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5738
5841
|
let rawUsage = void 0;
|
|
5739
5842
|
let cacheCreationInputTokens = null;
|
|
5740
5843
|
let stopSequence = null;
|
|
5844
|
+
let container = null;
|
|
5741
5845
|
let blockType = void 0;
|
|
5742
5846
|
const generateId3 = this.generateId;
|
|
5743
5847
|
return {
|
|
@@ -5747,7 +5851,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5747
5851
|
controller.enqueue({ type: "stream-start", warnings });
|
|
5748
5852
|
},
|
|
5749
5853
|
transform(chunk, controller) {
|
|
5750
|
-
var _a15, _b, _c, _d, _e, _f, _g, _h;
|
|
5854
|
+
var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
5751
5855
|
if (options.includeRawChunks) {
|
|
5752
5856
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
5753
5857
|
}
|
|
@@ -5860,7 +5964,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
5860
5964
|
data: part.content.content.source.data
|
|
5861
5965
|
}
|
|
5862
5966
|
}
|
|
5863
|
-
}
|
|
5967
|
+
},
|
|
5968
|
+
providerExecuted: true
|
|
5864
5969
|
});
|
|
5865
5970
|
} else if (part.content.type === "web_fetch_tool_result_error") {
|
|
5866
5971
|
controller.enqueue({
|
|
@@ -6128,6 +6233,15 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
6128
6233
|
isJsonResponseFromTool: usesJsonResponseTool
|
|
6129
6234
|
});
|
|
6130
6235
|
stopSequence = (_h = value.delta.stop_sequence) != null ? _h : null;
|
|
6236
|
+
container = value.delta.container != null ? {
|
|
6237
|
+
expiresAt: value.delta.container.expires_at,
|
|
6238
|
+
id: value.delta.container.id,
|
|
6239
|
+
skills: (_j = (_i = value.delta.container.skills) == null ? void 0 : _i.map((skill) => ({
|
|
6240
|
+
type: skill.type,
|
|
6241
|
+
skillId: skill.skill_id,
|
|
6242
|
+
version: skill.version
|
|
6243
|
+
}))) != null ? _j : null
|
|
6244
|
+
} : null;
|
|
6131
6245
|
rawUsage = {
|
|
6132
6246
|
...rawUsage,
|
|
6133
6247
|
...value.usage
|
|
@@ -6143,7 +6257,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
6143
6257
|
anthropic: {
|
|
6144
6258
|
usage: rawUsage != null ? rawUsage : null,
|
|
6145
6259
|
cacheCreationInputTokens,
|
|
6146
|
-
stopSequence
|
|
6260
|
+
stopSequence,
|
|
6261
|
+
container
|
|
6147
6262
|
}
|
|
6148
6263
|
}
|
|
6149
6264
|
});
|
|
@@ -6168,13 +6283,15 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
6168
6283
|
};
|
|
6169
6284
|
function getMaxOutputTokensForModel(modelId) {
|
|
6170
6285
|
if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
|
|
6171
|
-
return 64e3;
|
|
6286
|
+
return { maxOutputTokens: 64e3, knownModel: true };
|
|
6172
6287
|
} else if (modelId.includes("claude-opus-4-")) {
|
|
6173
|
-
return 32e3;
|
|
6288
|
+
return { maxOutputTokens: 32e3, knownModel: true };
|
|
6174
6289
|
} else if (modelId.includes("claude-3-5-haiku")) {
|
|
6175
|
-
return 8192;
|
|
6290
|
+
return { maxOutputTokens: 8192, knownModel: true };
|
|
6291
|
+
} else if (modelId.includes("claude-3-haiku")) {
|
|
6292
|
+
return { maxOutputTokens: 4096, knownModel: true };
|
|
6176
6293
|
} else {
|
|
6177
|
-
return 4096;
|
|
6294
|
+
return { maxOutputTokens: 4096, knownModel: false };
|
|
6178
6295
|
}
|
|
6179
6296
|
}
|
|
6180
6297
|
var bash_20241022InputSchema = lazySchema(
|
|
@@ -6510,8 +6627,14 @@ var anthropicTools = {
|
|
|
6510
6627
|
webSearch_20250305
|
|
6511
6628
|
};
|
|
6512
6629
|
function createAnthropic(options = {}) {
|
|
6513
|
-
var _a15;
|
|
6514
|
-
const baseURL = (_a15 = withoutTrailingSlash(
|
|
6630
|
+
var _a15, _b;
|
|
6631
|
+
const baseURL = (_a15 = withoutTrailingSlash(
|
|
6632
|
+
loadOptionalSetting({
|
|
6633
|
+
settingValue: options.baseURL,
|
|
6634
|
+
environmentVariableName: "ANTHROPIC_BASE_URL"
|
|
6635
|
+
})
|
|
6636
|
+
)) != null ? _a15 : "https://api.anthropic.com/v1";
|
|
6637
|
+
const providerName = (_b = options.name) != null ? _b : "anthropic.messages";
|
|
6515
6638
|
const getHeaders = () => withUserAgentSuffix(
|
|
6516
6639
|
{
|
|
6517
6640
|
"anthropic-version": "2023-06-01",
|
|
@@ -6527,7 +6650,7 @@ function createAnthropic(options = {}) {
|
|
|
6527
6650
|
const createChatModel = (modelId) => {
|
|
6528
6651
|
var _a22;
|
|
6529
6652
|
return new AnthropicMessagesLanguageModel(modelId, {
|
|
6530
|
-
provider:
|
|
6653
|
+
provider: providerName,
|
|
6531
6654
|
baseURL,
|
|
6532
6655
|
headers: getHeaders,
|
|
6533
6656
|
fetch: options.fetch,
|
|
@@ -6558,7 +6681,7 @@ function createAnthropic(options = {}) {
|
|
|
6558
6681
|
return provider;
|
|
6559
6682
|
}
|
|
6560
6683
|
createAnthropic();
|
|
6561
|
-
var VERSION4 = "2.0.
|
|
6684
|
+
var VERSION4 = "2.0.40" ;
|
|
6562
6685
|
var googleErrorDataSchema = lazySchema(
|
|
6563
6686
|
() => zodSchema(
|
|
6564
6687
|
z.object({
|
|
@@ -6858,19 +6981,20 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
|
|
6858
6981
|
contents.push({
|
|
6859
6982
|
role: "model",
|
|
6860
6983
|
parts: content.map((part) => {
|
|
6861
|
-
var _a22, _b, _c
|
|
6984
|
+
var _a22, _b, _c;
|
|
6985
|
+
const thoughtSignature = ((_b = (_a22 = part.providerOptions) == null ? void 0 : _a22.google) == null ? void 0 : _b.thoughtSignature) != null ? String((_c = part.providerOptions.google) == null ? void 0 : _c.thoughtSignature) : void 0;
|
|
6862
6986
|
switch (part.type) {
|
|
6863
6987
|
case "text": {
|
|
6864
6988
|
return part.text.length === 0 ? void 0 : {
|
|
6865
6989
|
text: part.text,
|
|
6866
|
-
thoughtSignature
|
|
6990
|
+
thoughtSignature
|
|
6867
6991
|
};
|
|
6868
6992
|
}
|
|
6869
6993
|
case "reasoning": {
|
|
6870
6994
|
return part.text.length === 0 ? void 0 : {
|
|
6871
6995
|
text: part.text,
|
|
6872
6996
|
thought: true,
|
|
6873
|
-
thoughtSignature
|
|
6997
|
+
thoughtSignature
|
|
6874
6998
|
};
|
|
6875
6999
|
}
|
|
6876
7000
|
case "file": {
|
|
@@ -6897,7 +7021,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
|
|
6897
7021
|
name: part.toolName,
|
|
6898
7022
|
args: part.input
|
|
6899
7023
|
},
|
|
6900
|
-
thoughtSignature
|
|
7024
|
+
thoughtSignature
|
|
6901
7025
|
};
|
|
6902
7026
|
}
|
|
6903
7027
|
}
|
|
@@ -6980,7 +7104,9 @@ var googleGenerativeAIProviderOptions = lazySchema(
|
|
|
6980
7104
|
responseModalities: z.array(z.enum(["TEXT", "IMAGE"])).optional(),
|
|
6981
7105
|
thinkingConfig: z.object({
|
|
6982
7106
|
thinkingBudget: z.number().optional(),
|
|
6983
|
-
includeThoughts: z.boolean().optional()
|
|
7107
|
+
includeThoughts: z.boolean().optional(),
|
|
7108
|
+
// https://ai.google.dev/gemini-api/docs/gemini-3?thinking=high#thinking_level
|
|
7109
|
+
thinkingLevel: z.enum(["low", "medium", "high"]).optional()
|
|
6984
7110
|
}).optional(),
|
|
6985
7111
|
/**
|
|
6986
7112
|
* Optional.
|
|
@@ -7068,7 +7194,8 @@ var googleGenerativeAIProviderOptions = lazySchema(
|
|
|
7068
7194
|
"9:16",
|
|
7069
7195
|
"16:9",
|
|
7070
7196
|
"21:9"
|
|
7071
|
-
]).optional()
|
|
7197
|
+
]).optional(),
|
|
7198
|
+
imageSize: z.enum(["1K", "2K", "4K"]).optional()
|
|
7072
7199
|
}).optional()
|
|
7073
7200
|
})
|
|
7074
7201
|
)
|
|
@@ -7081,8 +7208,14 @@ function prepareTools3({
|
|
|
7081
7208
|
var _a15;
|
|
7082
7209
|
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
7083
7210
|
const toolWarnings = [];
|
|
7084
|
-
const
|
|
7211
|
+
const isLatest = [
|
|
7212
|
+
"gemini-flash-latest",
|
|
7213
|
+
"gemini-flash-lite-latest",
|
|
7214
|
+
"gemini-pro-latest"
|
|
7215
|
+
].some((id) => id === modelId);
|
|
7216
|
+
const isGemini2orNewer = modelId.includes("gemini-2") || modelId.includes("gemini-3") || isLatest;
|
|
7085
7217
|
const supportsDynamicRetrieval = modelId.includes("gemini-1.5-flash") && !modelId.includes("-8b");
|
|
7218
|
+
const supportsFileSearch = modelId.includes("gemini-2.5");
|
|
7086
7219
|
if (tools == null) {
|
|
7087
7220
|
return { tools: void 0, toolConfig: void 0, toolWarnings };
|
|
7088
7221
|
}
|
|
@@ -7091,10 +7224,11 @@ function prepareTools3({
|
|
|
7091
7224
|
(tool2) => tool2.type === "provider-defined"
|
|
7092
7225
|
);
|
|
7093
7226
|
if (hasFunctionTools && hasProviderDefinedTools) {
|
|
7227
|
+
const functionTools = tools.filter((tool2) => tool2.type === "function");
|
|
7094
7228
|
toolWarnings.push({
|
|
7095
7229
|
type: "unsupported-tool",
|
|
7096
7230
|
tool: tools.find((tool2) => tool2.type === "function"),
|
|
7097
|
-
details:
|
|
7231
|
+
details: `Cannot mix function tools with provider-defined tools in the same request. Falling back to provider-defined tools only. The following function tools will be ignored: ${functionTools.map((t) => t.name).join(", ")}. Please use either function tools or provider-defined tools, but not both.`
|
|
7098
7232
|
});
|
|
7099
7233
|
}
|
|
7100
7234
|
if (hasProviderDefinedTools) {
|
|
@@ -7105,7 +7239,7 @@ function prepareTools3({
|
|
|
7105
7239
|
providerDefinedTools.forEach((tool2) => {
|
|
7106
7240
|
switch (tool2.id) {
|
|
7107
7241
|
case "google.google_search":
|
|
7108
|
-
if (
|
|
7242
|
+
if (isGemini2orNewer) {
|
|
7109
7243
|
googleTools2.push({ googleSearch: {} });
|
|
7110
7244
|
} else if (supportsDynamicRetrieval) {
|
|
7111
7245
|
googleTools2.push({
|
|
@@ -7121,7 +7255,7 @@ function prepareTools3({
|
|
|
7121
7255
|
}
|
|
7122
7256
|
break;
|
|
7123
7257
|
case "google.url_context":
|
|
7124
|
-
if (
|
|
7258
|
+
if (isGemini2orNewer) {
|
|
7125
7259
|
googleTools2.push({ urlContext: {} });
|
|
7126
7260
|
} else {
|
|
7127
7261
|
toolWarnings.push({
|
|
@@ -7132,7 +7266,7 @@ function prepareTools3({
|
|
|
7132
7266
|
}
|
|
7133
7267
|
break;
|
|
7134
7268
|
case "google.code_execution":
|
|
7135
|
-
if (
|
|
7269
|
+
if (isGemini2orNewer) {
|
|
7136
7270
|
googleTools2.push({ codeExecution: {} });
|
|
7137
7271
|
} else {
|
|
7138
7272
|
toolWarnings.push({
|
|
@@ -7142,6 +7276,37 @@ function prepareTools3({
|
|
|
7142
7276
|
});
|
|
7143
7277
|
}
|
|
7144
7278
|
break;
|
|
7279
|
+
case "google.file_search":
|
|
7280
|
+
if (supportsFileSearch) {
|
|
7281
|
+
googleTools2.push({ fileSearch: { ...tool2.args } });
|
|
7282
|
+
} else {
|
|
7283
|
+
toolWarnings.push({
|
|
7284
|
+
type: "unsupported-tool",
|
|
7285
|
+
tool: tool2,
|
|
7286
|
+
details: "The file search tool is only supported with Gemini 2.5 models."
|
|
7287
|
+
});
|
|
7288
|
+
}
|
|
7289
|
+
break;
|
|
7290
|
+
case "google.vertex_rag_store":
|
|
7291
|
+
if (isGemini2orNewer) {
|
|
7292
|
+
googleTools2.push({
|
|
7293
|
+
retrieval: {
|
|
7294
|
+
vertex_rag_store: {
|
|
7295
|
+
rag_resources: {
|
|
7296
|
+
rag_corpus: tool2.args.ragCorpus
|
|
7297
|
+
},
|
|
7298
|
+
similarity_top_k: tool2.args.topK
|
|
7299
|
+
}
|
|
7300
|
+
}
|
|
7301
|
+
});
|
|
7302
|
+
} else {
|
|
7303
|
+
toolWarnings.push({
|
|
7304
|
+
type: "unsupported-tool",
|
|
7305
|
+
tool: tool2,
|
|
7306
|
+
details: "The RAG store tool is not supported with other Gemini models than Gemini 2."
|
|
7307
|
+
});
|
|
7308
|
+
}
|
|
7309
|
+
break;
|
|
7145
7310
|
default:
|
|
7146
7311
|
toolWarnings.push({ type: "unsupported-tool", tool: tool2 });
|
|
7147
7312
|
break;
|
|
@@ -7269,17 +7434,19 @@ var GoogleGenerativeAILanguageModel = class {
|
|
|
7269
7434
|
toolChoice,
|
|
7270
7435
|
providerOptions
|
|
7271
7436
|
}) {
|
|
7272
|
-
var _a15
|
|
7437
|
+
var _a15;
|
|
7273
7438
|
const warnings = [];
|
|
7274
7439
|
const googleOptions = await parseProviderOptions({
|
|
7275
7440
|
provider: "google",
|
|
7276
7441
|
providerOptions,
|
|
7277
7442
|
schema: googleGenerativeAIProviderOptions
|
|
7278
7443
|
});
|
|
7279
|
-
if ((
|
|
7444
|
+
if ((tools == null ? void 0 : tools.some(
|
|
7445
|
+
(tool2) => tool2.type === "provider-defined" && tool2.id === "google.vertex_rag_store"
|
|
7446
|
+
)) && !this.config.provider.startsWith("google.vertex.")) {
|
|
7280
7447
|
warnings.push({
|
|
7281
7448
|
type: "other",
|
|
7282
|
-
message: `The '
|
|
7449
|
+
message: `The 'vertex_rag_store' tool is only supported with the Google Vertex provider and might not be supported or could behave unexpectedly with the current Google provider (${this.config.provider}).`
|
|
7283
7450
|
});
|
|
7284
7451
|
}
|
|
7285
7452
|
const isGemmaModel = this.modelId.toLowerCase().startsWith("gemma-");
|
|
@@ -7313,7 +7480,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
|
7313
7480
|
responseSchema: (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && // Google GenAI does not support all OpenAPI Schema features,
|
|
7314
7481
|
// so this is needed as an escape hatch:
|
|
7315
7482
|
// TODO convert into provider option
|
|
7316
|
-
((
|
|
7483
|
+
((_a15 = googleOptions == null ? void 0 : googleOptions.structuredOutputs) != null ? _a15 : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
|
|
7317
7484
|
...(googleOptions == null ? void 0 : googleOptions.audioTimestamp) && {
|
|
7318
7485
|
audioTimestamp: googleOptions.audioTimestamp
|
|
7319
7486
|
},
|
|
@@ -7725,16 +7892,64 @@ function extractSources({
|
|
|
7725
7892
|
groundingMetadata,
|
|
7726
7893
|
generateId: generateId3
|
|
7727
7894
|
}) {
|
|
7728
|
-
var _a15;
|
|
7729
|
-
|
|
7730
|
-
|
|
7731
|
-
|
|
7732
|
-
|
|
7733
|
-
|
|
7734
|
-
|
|
7735
|
-
|
|
7736
|
-
|
|
7737
|
-
|
|
7895
|
+
var _a15, _b, _c;
|
|
7896
|
+
if (!(groundingMetadata == null ? void 0 : groundingMetadata.groundingChunks)) {
|
|
7897
|
+
return void 0;
|
|
7898
|
+
}
|
|
7899
|
+
const sources = [];
|
|
7900
|
+
for (const chunk of groundingMetadata.groundingChunks) {
|
|
7901
|
+
if (chunk.web != null) {
|
|
7902
|
+
sources.push({
|
|
7903
|
+
type: "source",
|
|
7904
|
+
sourceType: "url",
|
|
7905
|
+
id: generateId3(),
|
|
7906
|
+
url: chunk.web.uri,
|
|
7907
|
+
title: (_a15 = chunk.web.title) != null ? _a15 : void 0
|
|
7908
|
+
});
|
|
7909
|
+
} else if (chunk.retrievedContext != null) {
|
|
7910
|
+
const uri = chunk.retrievedContext.uri;
|
|
7911
|
+
if (uri.startsWith("http://") || uri.startsWith("https://")) {
|
|
7912
|
+
sources.push({
|
|
7913
|
+
type: "source",
|
|
7914
|
+
sourceType: "url",
|
|
7915
|
+
id: generateId3(),
|
|
7916
|
+
url: uri,
|
|
7917
|
+
title: (_b = chunk.retrievedContext.title) != null ? _b : void 0
|
|
7918
|
+
});
|
|
7919
|
+
} else {
|
|
7920
|
+
const title = (_c = chunk.retrievedContext.title) != null ? _c : "Unknown Document";
|
|
7921
|
+
let mediaType = "application/octet-stream";
|
|
7922
|
+
let filename = void 0;
|
|
7923
|
+
if (uri.endsWith(".pdf")) {
|
|
7924
|
+
mediaType = "application/pdf";
|
|
7925
|
+
filename = uri.split("/").pop();
|
|
7926
|
+
} else if (uri.endsWith(".txt")) {
|
|
7927
|
+
mediaType = "text/plain";
|
|
7928
|
+
filename = uri.split("/").pop();
|
|
7929
|
+
} else if (uri.endsWith(".docx")) {
|
|
7930
|
+
mediaType = "application/vnd.openxmlformats-officedocument.wordprocessingml.document";
|
|
7931
|
+
filename = uri.split("/").pop();
|
|
7932
|
+
} else if (uri.endsWith(".doc")) {
|
|
7933
|
+
mediaType = "application/msword";
|
|
7934
|
+
filename = uri.split("/").pop();
|
|
7935
|
+
} else if (uri.match(/\.(md|markdown)$/)) {
|
|
7936
|
+
mediaType = "text/markdown";
|
|
7937
|
+
filename = uri.split("/").pop();
|
|
7938
|
+
} else {
|
|
7939
|
+
filename = uri.split("/").pop();
|
|
7940
|
+
}
|
|
7941
|
+
sources.push({
|
|
7942
|
+
type: "source",
|
|
7943
|
+
sourceType: "document",
|
|
7944
|
+
id: generateId3(),
|
|
7945
|
+
mediaType,
|
|
7946
|
+
title,
|
|
7947
|
+
filename
|
|
7948
|
+
});
|
|
7949
|
+
}
|
|
7950
|
+
}
|
|
7951
|
+
}
|
|
7952
|
+
return sources.length > 0 ? sources : void 0;
|
|
7738
7953
|
}
|
|
7739
7954
|
var getGroundingMetadataSchema = () => z.object({
|
|
7740
7955
|
webSearchQueries: z.array(z.string()).nullish(),
|
|
@@ -7742,8 +7957,12 @@ var getGroundingMetadataSchema = () => z.object({
|
|
|
7742
7957
|
searchEntryPoint: z.object({ renderedContent: z.string() }).nullish(),
|
|
7743
7958
|
groundingChunks: z.array(
|
|
7744
7959
|
z.object({
|
|
7745
|
-
web: z.object({ uri: z.string(), title: z.string() }).nullish(),
|
|
7746
|
-
retrievedContext: z.object({
|
|
7960
|
+
web: z.object({ uri: z.string(), title: z.string().nullish() }).nullish(),
|
|
7961
|
+
retrievedContext: z.object({
|
|
7962
|
+
uri: z.string(),
|
|
7963
|
+
title: z.string().nullish(),
|
|
7964
|
+
text: z.string().nullish()
|
|
7965
|
+
}).nullish()
|
|
7747
7966
|
})
|
|
7748
7967
|
).nullish(),
|
|
7749
7968
|
groundingSupports: z.array(
|
|
@@ -7813,7 +8032,9 @@ var usageSchema2 = z.object({
|
|
|
7813
8032
|
thoughtsTokenCount: z.number().nullish(),
|
|
7814
8033
|
promptTokenCount: z.number().nullish(),
|
|
7815
8034
|
candidatesTokenCount: z.number().nullish(),
|
|
7816
|
-
totalTokenCount: z.number().nullish()
|
|
8035
|
+
totalTokenCount: z.number().nullish(),
|
|
8036
|
+
// https://cloud.google.com/vertex-ai/generative-ai/docs/reference/rest/v1/GenerateContentResponse#TrafficType
|
|
8037
|
+
trafficType: z.string().nullish()
|
|
7817
8038
|
});
|
|
7818
8039
|
var getUrlContextMetadataSchema = () => z.object({
|
|
7819
8040
|
urlMetadata: z.array(
|
|
@@ -7875,6 +8096,30 @@ var codeExecution = createProviderDefinedToolFactoryWithOutputSchema({
|
|
|
7875
8096
|
output: z.string().describe("The output from the code execution.")
|
|
7876
8097
|
})
|
|
7877
8098
|
});
|
|
8099
|
+
var fileSearchArgsBaseSchema = z.object({
|
|
8100
|
+
/** The names of the file_search_stores to retrieve from.
|
|
8101
|
+
* Example: `fileSearchStores/my-file-search-store-123`
|
|
8102
|
+
*/
|
|
8103
|
+
fileSearchStoreNames: z.array(z.string()).describe(
|
|
8104
|
+
"The names of the file_search_stores to retrieve from. Example: `fileSearchStores/my-file-search-store-123`"
|
|
8105
|
+
),
|
|
8106
|
+
/** The number of file search retrieval chunks to retrieve. */
|
|
8107
|
+
topK: z.number().int().positive().describe("The number of file search retrieval chunks to retrieve.").optional(),
|
|
8108
|
+
/** Metadata filter to apply to the file search retrieval documents.
|
|
8109
|
+
* See https://google.aip.dev/160 for the syntax of the filter expression.
|
|
8110
|
+
*/
|
|
8111
|
+
metadataFilter: z.string().describe(
|
|
8112
|
+
"Metadata filter to apply to the file search retrieval documents. See https://google.aip.dev/160 for the syntax of the filter expression."
|
|
8113
|
+
).optional()
|
|
8114
|
+
}).passthrough();
|
|
8115
|
+
var fileSearchArgsSchema = lazySchema(
|
|
8116
|
+
() => zodSchema(fileSearchArgsBaseSchema)
|
|
8117
|
+
);
|
|
8118
|
+
var fileSearch = createProviderDefinedToolFactory({
|
|
8119
|
+
id: "google.file_search",
|
|
8120
|
+
name: "file_search",
|
|
8121
|
+
inputSchema: fileSearchArgsSchema
|
|
8122
|
+
});
|
|
7878
8123
|
var googleSearch = createProviderDefinedToolFactory({
|
|
7879
8124
|
id: "google.google_search",
|
|
7880
8125
|
name: "google_search",
|
|
@@ -7892,6 +8137,14 @@ var urlContext = createProviderDefinedToolFactory({
|
|
|
7892
8137
|
name: "url_context",
|
|
7893
8138
|
inputSchema: lazySchema(() => zodSchema(z.object({})))
|
|
7894
8139
|
});
|
|
8140
|
+
var vertexRagStore = createProviderDefinedToolFactory({
|
|
8141
|
+
id: "google.vertex_rag_store",
|
|
8142
|
+
name: "vertex_rag_store",
|
|
8143
|
+
inputSchema: z.object({
|
|
8144
|
+
ragCorpus: z.string(),
|
|
8145
|
+
topK: z.number().optional()
|
|
8146
|
+
})
|
|
8147
|
+
});
|
|
7895
8148
|
var googleTools = {
|
|
7896
8149
|
/**
|
|
7897
8150
|
* Creates a Google search tool that gives Google direct access to real-time web content.
|
|
@@ -7903,6 +8156,17 @@ var googleTools = {
|
|
|
7903
8156
|
* Must have name "url_context".
|
|
7904
8157
|
*/
|
|
7905
8158
|
urlContext,
|
|
8159
|
+
/**
|
|
8160
|
+
* Enables Retrieval Augmented Generation (RAG) via the Gemini File Search tool.
|
|
8161
|
+
* Must have name "file_search".
|
|
8162
|
+
*
|
|
8163
|
+
* @param fileSearchStoreNames - Fully-qualified File Search store resource names.
|
|
8164
|
+
* @param metadataFilter - Optional filter expression to restrict the files that can be retrieved.
|
|
8165
|
+
* @param topK - Optional result limit for the number of chunks returned from File Search.
|
|
8166
|
+
*
|
|
8167
|
+
* @see https://ai.google.dev/gemini-api/docs/file-search
|
|
8168
|
+
*/
|
|
8169
|
+
fileSearch,
|
|
7906
8170
|
/**
|
|
7907
8171
|
* A tool that enables the model to generate and run Python code.
|
|
7908
8172
|
* Must have name "code_execution".
|
|
@@ -7913,7 +8177,12 @@ var googleTools = {
|
|
|
7913
8177
|
* @see https://ai.google.dev/gemini-api/docs/code-execution (Google AI)
|
|
7914
8178
|
* @see https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/code-execution-api (Vertex AI)
|
|
7915
8179
|
*/
|
|
7916
|
-
codeExecution
|
|
8180
|
+
codeExecution,
|
|
8181
|
+
/**
|
|
8182
|
+
* Creates a Vertex RAG Store tool that enables the model to perform RAG searches against a Vertex RAG Store.
|
|
8183
|
+
* Must have name "vertex_rag_store".
|
|
8184
|
+
*/
|
|
8185
|
+
vertexRagStore
|
|
7917
8186
|
};
|
|
7918
8187
|
var GoogleGenerativeAIImageModel = class {
|
|
7919
8188
|
constructor(modelId, settings, config) {
|
|
@@ -8022,8 +8291,9 @@ var googleImageProviderOptionsSchema = lazySchema(
|
|
|
8022
8291
|
)
|
|
8023
8292
|
);
|
|
8024
8293
|
function createGoogleGenerativeAI(options = {}) {
|
|
8025
|
-
var _a15;
|
|
8294
|
+
var _a15, _b;
|
|
8026
8295
|
const baseURL = (_a15 = withoutTrailingSlash(options.baseURL)) != null ? _a15 : "https://generativelanguage.googleapis.com/v1beta";
|
|
8296
|
+
const providerName = (_b = options.name) != null ? _b : "google.generative-ai";
|
|
8027
8297
|
const getHeaders = () => withUserAgentSuffix(
|
|
8028
8298
|
{
|
|
8029
8299
|
"x-goog-api-key": loadApiKey({
|
|
@@ -8038,7 +8308,7 @@ function createGoogleGenerativeAI(options = {}) {
|
|
|
8038
8308
|
const createChatModel = (modelId) => {
|
|
8039
8309
|
var _a22;
|
|
8040
8310
|
return new GoogleGenerativeAILanguageModel(modelId, {
|
|
8041
|
-
provider:
|
|
8311
|
+
provider: providerName,
|
|
8042
8312
|
baseURL,
|
|
8043
8313
|
headers: getHeaders,
|
|
8044
8314
|
generateId: (_a22 = options.generateId) != null ? _a22 : generateId,
|
|
@@ -8058,13 +8328,13 @@ function createGoogleGenerativeAI(options = {}) {
|
|
|
8058
8328
|
});
|
|
8059
8329
|
};
|
|
8060
8330
|
const createEmbeddingModel = (modelId) => new GoogleGenerativeAIEmbeddingModel(modelId, {
|
|
8061
|
-
provider:
|
|
8331
|
+
provider: providerName,
|
|
8062
8332
|
baseURL,
|
|
8063
8333
|
headers: getHeaders,
|
|
8064
8334
|
fetch: options.fetch
|
|
8065
8335
|
});
|
|
8066
8336
|
const createImageModel = (modelId, settings = {}) => new GoogleGenerativeAIImageModel(modelId, settings, {
|
|
8067
|
-
provider:
|
|
8337
|
+
provider: providerName,
|
|
8068
8338
|
baseURL,
|
|
8069
8339
|
headers: getHeaders,
|
|
8070
8340
|
fetch: options.fetch
|
|
@@ -8286,7 +8556,7 @@ function getResponseMetadata3({
|
|
|
8286
8556
|
return {
|
|
8287
8557
|
id: id != null ? id : void 0,
|
|
8288
8558
|
modelId: model != null ? model : void 0,
|
|
8289
|
-
timestamp: created
|
|
8559
|
+
timestamp: created ? new Date(created * 1e3) : void 0
|
|
8290
8560
|
};
|
|
8291
8561
|
}
|
|
8292
8562
|
function mapOpenAIFinishReason(finishReason) {
|
|
@@ -8470,7 +8740,7 @@ var openaiChatLanguageModelOptions = lazyValidator(
|
|
|
8470
8740
|
/**
|
|
8471
8741
|
* Reasoning effort for reasoning models. Defaults to `medium`.
|
|
8472
8742
|
*/
|
|
8473
|
-
reasoningEffort: z.enum(["minimal", "low", "medium", "high"]).optional(),
|
|
8743
|
+
reasoningEffort: z.enum(["none", "minimal", "low", "medium", "high"]).optional(),
|
|
8474
8744
|
/**
|
|
8475
8745
|
* Maximum number of completion tokens to generate. Useful for reasoning models.
|
|
8476
8746
|
*/
|
|
@@ -8520,6 +8790,15 @@ var openaiChatLanguageModelOptions = lazyValidator(
|
|
|
8520
8790
|
* Useful for improving cache hit rates and working around automatic caching issues.
|
|
8521
8791
|
*/
|
|
8522
8792
|
promptCacheKey: z.string().optional(),
|
|
8793
|
+
/**
|
|
8794
|
+
* The retention policy for the prompt cache.
|
|
8795
|
+
* - 'in_memory': Default. Standard prompt caching behavior.
|
|
8796
|
+
* - '24h': Extended prompt caching that keeps cached prefixes active for up to 24 hours.
|
|
8797
|
+
* Currently only available for 5.1 series models.
|
|
8798
|
+
*
|
|
8799
|
+
* @default 'in_memory'
|
|
8800
|
+
*/
|
|
8801
|
+
promptCacheRetention: z.enum(["in_memory", "24h"]).optional(),
|
|
8523
8802
|
/**
|
|
8524
8803
|
* A stable identifier used to help detect users of your application
|
|
8525
8804
|
* that may be violating OpenAI's usage policies. The IDs should be a
|
|
@@ -8681,6 +8960,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
8681
8960
|
reasoning_effort: openaiOptions.reasoningEffort,
|
|
8682
8961
|
service_tier: openaiOptions.serviceTier,
|
|
8683
8962
|
prompt_cache_key: openaiOptions.promptCacheKey,
|
|
8963
|
+
prompt_cache_retention: openaiOptions.promptCacheRetention,
|
|
8684
8964
|
safety_identifier: openaiOptions.safetyIdentifier,
|
|
8685
8965
|
// messages:
|
|
8686
8966
|
messages
|
|
@@ -8896,7 +9176,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
8896
9176
|
outputTokens: void 0,
|
|
8897
9177
|
totalTokens: void 0
|
|
8898
9178
|
};
|
|
8899
|
-
let
|
|
9179
|
+
let metadataExtracted = false;
|
|
8900
9180
|
let isActiveText = false;
|
|
8901
9181
|
const providerMetadata = { openai: {} };
|
|
8902
9182
|
return {
|
|
@@ -8921,12 +9201,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
8921
9201
|
controller.enqueue({ type: "error", error: value.error });
|
|
8922
9202
|
return;
|
|
8923
9203
|
}
|
|
8924
|
-
if (
|
|
8925
|
-
|
|
8926
|
-
|
|
8927
|
-
|
|
8928
|
-
|
|
8929
|
-
|
|
9204
|
+
if (!metadataExtracted) {
|
|
9205
|
+
const metadata = getResponseMetadata3(value);
|
|
9206
|
+
if (Object.values(metadata).some(Boolean)) {
|
|
9207
|
+
metadataExtracted = true;
|
|
9208
|
+
controller.enqueue({
|
|
9209
|
+
type: "response-metadata",
|
|
9210
|
+
...getResponseMetadata3(value)
|
|
9211
|
+
});
|
|
9212
|
+
}
|
|
8930
9213
|
}
|
|
8931
9214
|
if (value.usage != null) {
|
|
8932
9215
|
usage.inputTokens = (_a15 = value.usage.prompt_tokens) != null ? _a15 : void 0;
|
|
@@ -9098,18 +9381,6 @@ function getSystemMessageMode(modelId) {
|
|
|
9098
9381
|
return (_b = (_a15 = reasoningModels[modelId]) == null ? void 0 : _a15.systemMessageMode) != null ? _b : "developer";
|
|
9099
9382
|
}
|
|
9100
9383
|
var reasoningModels = {
|
|
9101
|
-
"o1-mini": {
|
|
9102
|
-
systemMessageMode: "remove"
|
|
9103
|
-
},
|
|
9104
|
-
"o1-mini-2024-09-12": {
|
|
9105
|
-
systemMessageMode: "remove"
|
|
9106
|
-
},
|
|
9107
|
-
"o1-preview": {
|
|
9108
|
-
systemMessageMode: "remove"
|
|
9109
|
-
},
|
|
9110
|
-
"o1-preview-2024-09-12": {
|
|
9111
|
-
systemMessageMode: "remove"
|
|
9112
|
-
},
|
|
9113
9384
|
o3: {
|
|
9114
9385
|
systemMessageMode: "developer"
|
|
9115
9386
|
},
|
|
@@ -9645,7 +9916,7 @@ var openaiImageResponseSchema = lazyValidator(
|
|
|
9645
9916
|
data: z.array(
|
|
9646
9917
|
z.object({
|
|
9647
9918
|
b64_json: z.string(),
|
|
9648
|
-
revised_prompt: z.string().
|
|
9919
|
+
revised_prompt: z.string().nullish()
|
|
9649
9920
|
})
|
|
9650
9921
|
)
|
|
9651
9922
|
})
|
|
@@ -9790,7 +10061,7 @@ var compoundFilterSchema = z.object({
|
|
|
9790
10061
|
z.union([comparisonFilterSchema, z.lazy(() => compoundFilterSchema)])
|
|
9791
10062
|
)
|
|
9792
10063
|
});
|
|
9793
|
-
var
|
|
10064
|
+
var fileSearchArgsSchema2 = lazySchema(
|
|
9794
10065
|
() => zodSchema(
|
|
9795
10066
|
z.object({
|
|
9796
10067
|
vectorStoreIds: z.array(z.string()),
|
|
@@ -9819,7 +10090,7 @@ var fileSearchOutputSchema = lazySchema(
|
|
|
9819
10090
|
})
|
|
9820
10091
|
)
|
|
9821
10092
|
);
|
|
9822
|
-
var
|
|
10093
|
+
var fileSearch2 = createProviderDefinedToolFactoryWithOutputSchema({
|
|
9823
10094
|
id: "openai.file_search",
|
|
9824
10095
|
name: "file_search",
|
|
9825
10096
|
inputSchema: z.object({}),
|
|
@@ -9913,7 +10184,13 @@ var webSearchOutputSchema = lazySchema(
|
|
|
9913
10184
|
url: z.string(),
|
|
9914
10185
|
pattern: z.string()
|
|
9915
10186
|
})
|
|
9916
|
-
])
|
|
10187
|
+
]),
|
|
10188
|
+
sources: z.array(
|
|
10189
|
+
z.discriminatedUnion("type", [
|
|
10190
|
+
z.object({ type: z.literal("url"), url: z.string() }),
|
|
10191
|
+
z.object({ type: z.literal("api"), name: z.string() })
|
|
10192
|
+
])
|
|
10193
|
+
).optional()
|
|
9917
10194
|
})
|
|
9918
10195
|
)
|
|
9919
10196
|
);
|
|
@@ -9991,7 +10268,7 @@ var openaiTools = {
|
|
|
9991
10268
|
* @param ranking - The ranking options to use for the file search.
|
|
9992
10269
|
* @param filters - The filters to use for the file search.
|
|
9993
10270
|
*/
|
|
9994
|
-
fileSearch,
|
|
10271
|
+
fileSearch: fileSearch2,
|
|
9995
10272
|
/**
|
|
9996
10273
|
* The image generation tool allows you to generate images using a text prompt,
|
|
9997
10274
|
* and optionally image inputs. It leverages the GPT Image model,
|
|
@@ -10471,7 +10748,13 @@ var openaiResponsesChunkSchema = lazyValidator(
|
|
|
10471
10748
|
action: z.discriminatedUnion("type", [
|
|
10472
10749
|
z.object({
|
|
10473
10750
|
type: z.literal("search"),
|
|
10474
|
-
query: z.string().nullish()
|
|
10751
|
+
query: z.string().nullish(),
|
|
10752
|
+
sources: z.array(
|
|
10753
|
+
z.discriminatedUnion("type", [
|
|
10754
|
+
z.object({ type: z.literal("url"), url: z.string() }),
|
|
10755
|
+
z.object({ type: z.literal("api"), name: z.string() })
|
|
10756
|
+
])
|
|
10757
|
+
).nullish()
|
|
10475
10758
|
}),
|
|
10476
10759
|
z.object({
|
|
10477
10760
|
type: z.literal("open_page"),
|
|
@@ -10579,10 +10862,13 @@ var openaiResponsesChunkSchema = lazyValidator(
|
|
|
10579
10862
|
}),
|
|
10580
10863
|
z.object({
|
|
10581
10864
|
type: z.literal("error"),
|
|
10582
|
-
|
|
10583
|
-
|
|
10584
|
-
|
|
10585
|
-
|
|
10865
|
+
sequence_number: z.number(),
|
|
10866
|
+
error: z.object({
|
|
10867
|
+
type: z.string(),
|
|
10868
|
+
code: z.string(),
|
|
10869
|
+
message: z.string(),
|
|
10870
|
+
param: z.string().nullish()
|
|
10871
|
+
})
|
|
10586
10872
|
}),
|
|
10587
10873
|
z.object({ type: z.string() }).loose().transform((value) => ({
|
|
10588
10874
|
type: "unknown_chunk",
|
|
@@ -10595,13 +10881,15 @@ var openaiResponsesChunkSchema = lazyValidator(
|
|
|
10595
10881
|
var openaiResponsesResponseSchema = lazyValidator(
|
|
10596
10882
|
() => zodSchema(
|
|
10597
10883
|
z.object({
|
|
10598
|
-
id: z.string(),
|
|
10599
|
-
created_at: z.number(),
|
|
10884
|
+
id: z.string().optional(),
|
|
10885
|
+
created_at: z.number().optional(),
|
|
10600
10886
|
error: z.object({
|
|
10601
|
-
|
|
10602
|
-
|
|
10887
|
+
message: z.string(),
|
|
10888
|
+
type: z.string(),
|
|
10889
|
+
param: z.string().nullish(),
|
|
10890
|
+
code: z.string()
|
|
10603
10891
|
}).nullish(),
|
|
10604
|
-
model: z.string(),
|
|
10892
|
+
model: z.string().optional(),
|
|
10605
10893
|
output: z.array(
|
|
10606
10894
|
z.discriminatedUnion("type", [
|
|
10607
10895
|
z.object({
|
|
@@ -10643,7 +10931,18 @@ var openaiResponsesResponseSchema = lazyValidator(
|
|
|
10643
10931
|
quote: z.string().nullish()
|
|
10644
10932
|
}),
|
|
10645
10933
|
z.object({
|
|
10646
|
-
type: z.literal("container_file_citation")
|
|
10934
|
+
type: z.literal("container_file_citation"),
|
|
10935
|
+
container_id: z.string(),
|
|
10936
|
+
file_id: z.string(),
|
|
10937
|
+
filename: z.string().nullish(),
|
|
10938
|
+
start_index: z.number().nullish(),
|
|
10939
|
+
end_index: z.number().nullish(),
|
|
10940
|
+
index: z.number().nullish()
|
|
10941
|
+
}),
|
|
10942
|
+
z.object({
|
|
10943
|
+
type: z.literal("file_path"),
|
|
10944
|
+
file_id: z.string(),
|
|
10945
|
+
index: z.number().nullish()
|
|
10647
10946
|
})
|
|
10648
10947
|
])
|
|
10649
10948
|
)
|
|
@@ -10657,7 +10956,13 @@ var openaiResponsesResponseSchema = lazyValidator(
|
|
|
10657
10956
|
action: z.discriminatedUnion("type", [
|
|
10658
10957
|
z.object({
|
|
10659
10958
|
type: z.literal("search"),
|
|
10660
|
-
query: z.string().nullish()
|
|
10959
|
+
query: z.string().nullish(),
|
|
10960
|
+
sources: z.array(
|
|
10961
|
+
z.discriminatedUnion("type", [
|
|
10962
|
+
z.object({ type: z.literal("url"), url: z.string() }),
|
|
10963
|
+
z.object({ type: z.literal("api"), name: z.string() })
|
|
10964
|
+
])
|
|
10965
|
+
).nullish()
|
|
10661
10966
|
}),
|
|
10662
10967
|
z.object({
|
|
10663
10968
|
type: z.literal("open_page"),
|
|
@@ -10676,7 +10981,10 @@ var openaiResponsesResponseSchema = lazyValidator(
|
|
|
10676
10981
|
queries: z.array(z.string()),
|
|
10677
10982
|
results: z.array(
|
|
10678
10983
|
z.object({
|
|
10679
|
-
attributes: z.record(
|
|
10984
|
+
attributes: z.record(
|
|
10985
|
+
z.string(),
|
|
10986
|
+
z.union([z.string(), z.number(), z.boolean()])
|
|
10987
|
+
),
|
|
10680
10988
|
file_id: z.string(),
|
|
10681
10989
|
filename: z.string(),
|
|
10682
10990
|
score: z.number(),
|
|
@@ -10738,7 +11046,7 @@ var openaiResponsesResponseSchema = lazyValidator(
|
|
|
10738
11046
|
)
|
|
10739
11047
|
})
|
|
10740
11048
|
])
|
|
10741
|
-
),
|
|
11049
|
+
).optional(),
|
|
10742
11050
|
service_tier: z.string().nullish(),
|
|
10743
11051
|
incomplete_details: z.object({ reason: z.string() }).nullish(),
|
|
10744
11052
|
usage: z.object({
|
|
@@ -10746,7 +11054,7 @@ var openaiResponsesResponseSchema = lazyValidator(
|
|
|
10746
11054
|
input_tokens_details: z.object({ cached_tokens: z.number().nullish() }).nullish(),
|
|
10747
11055
|
output_tokens: z.number(),
|
|
10748
11056
|
output_tokens_details: z.object({ reasoning_tokens: z.number().nullish() }).nullish()
|
|
10749
|
-
})
|
|
11057
|
+
}).optional()
|
|
10750
11058
|
})
|
|
10751
11059
|
)
|
|
10752
11060
|
);
|
|
@@ -10754,6 +11062,7 @@ var TOP_LOGPROBS_MAX = 20;
|
|
|
10754
11062
|
var openaiResponsesProviderOptionsSchema = lazyValidator(
|
|
10755
11063
|
() => zodSchema(
|
|
10756
11064
|
z.object({
|
|
11065
|
+
conversation: z.string().nullish(),
|
|
10757
11066
|
include: z.array(
|
|
10758
11067
|
z.enum([
|
|
10759
11068
|
"reasoning.encrypted_content",
|
|
@@ -10786,6 +11095,15 @@ var openaiResponsesProviderOptionsSchema = lazyValidator(
|
|
|
10786
11095
|
parallelToolCalls: z.boolean().nullish(),
|
|
10787
11096
|
previousResponseId: z.string().nullish(),
|
|
10788
11097
|
promptCacheKey: z.string().nullish(),
|
|
11098
|
+
/**
|
|
11099
|
+
* The retention policy for the prompt cache.
|
|
11100
|
+
* - 'in_memory': Default. Standard prompt caching behavior.
|
|
11101
|
+
* - '24h': Extended prompt caching that keeps cached prefixes active for up to 24 hours.
|
|
11102
|
+
* Currently only available for 5.1 series models.
|
|
11103
|
+
*
|
|
11104
|
+
* @default 'in_memory'
|
|
11105
|
+
*/
|
|
11106
|
+
promptCacheRetention: z.enum(["in_memory", "24h"]).nullish(),
|
|
10789
11107
|
reasoningEffort: z.string().nullish(),
|
|
10790
11108
|
reasoningSummary: z.string().nullish(),
|
|
10791
11109
|
safetyIdentifier: z.string().nullish(),
|
|
@@ -10825,7 +11143,7 @@ async function prepareResponsesTools({
|
|
|
10825
11143
|
case "openai.file_search": {
|
|
10826
11144
|
const args = await validateTypes({
|
|
10827
11145
|
value: tool2.args,
|
|
10828
|
-
schema:
|
|
11146
|
+
schema: fileSearchArgsSchema2
|
|
10829
11147
|
});
|
|
10830
11148
|
openaiTools2.push({
|
|
10831
11149
|
type: "file_search",
|
|
@@ -10991,6 +11309,13 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
10991
11309
|
providerOptions,
|
|
10992
11310
|
schema: openaiResponsesProviderOptionsSchema
|
|
10993
11311
|
});
|
|
11312
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.conversation) && (openaiOptions == null ? void 0 : openaiOptions.previousResponseId)) {
|
|
11313
|
+
warnings.push({
|
|
11314
|
+
type: "unsupported-setting",
|
|
11315
|
+
setting: "conversation",
|
|
11316
|
+
details: "conversation and previousResponseId cannot be used together"
|
|
11317
|
+
});
|
|
11318
|
+
}
|
|
10994
11319
|
const { input, warnings: inputWarnings } = await convertToOpenAIResponsesInput({
|
|
10995
11320
|
prompt,
|
|
10996
11321
|
systemMessageMode: modelConfig.systemMessageMode,
|
|
@@ -11053,6 +11378,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11053
11378
|
}
|
|
11054
11379
|
},
|
|
11055
11380
|
// provider options:
|
|
11381
|
+
conversation: openaiOptions == null ? void 0 : openaiOptions.conversation,
|
|
11056
11382
|
max_tool_calls: openaiOptions == null ? void 0 : openaiOptions.maxToolCalls,
|
|
11057
11383
|
metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
|
|
11058
11384
|
parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
|
|
@@ -11063,6 +11389,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11063
11389
|
service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
|
|
11064
11390
|
include,
|
|
11065
11391
|
prompt_cache_key: openaiOptions == null ? void 0 : openaiOptions.promptCacheKey,
|
|
11392
|
+
prompt_cache_retention: openaiOptions == null ? void 0 : openaiOptions.promptCacheRetention,
|
|
11066
11393
|
safety_identifier: openaiOptions == null ? void 0 : openaiOptions.safetyIdentifier,
|
|
11067
11394
|
top_logprobs: topLogprobs,
|
|
11068
11395
|
truncation: openaiOptions == null ? void 0 : openaiOptions.truncation,
|
|
@@ -11272,7 +11599,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11272
11599
|
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : generateId(),
|
|
11273
11600
|
mediaType: "text/plain",
|
|
11274
11601
|
title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
|
|
11275
|
-
filename: (_l = annotation.filename) != null ? _l : annotation.file_id
|
|
11602
|
+
filename: (_l = annotation.filename) != null ? _l : annotation.file_id,
|
|
11603
|
+
...annotation.file_id ? {
|
|
11604
|
+
providerMetadata: {
|
|
11605
|
+
openai: {
|
|
11606
|
+
fileId: annotation.file_id
|
|
11607
|
+
}
|
|
11608
|
+
}
|
|
11609
|
+
} : {}
|
|
11276
11610
|
});
|
|
11277
11611
|
}
|
|
11278
11612
|
}
|
|
@@ -11382,7 +11716,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11382
11716
|
}
|
|
11383
11717
|
}
|
|
11384
11718
|
const providerMetadata = {
|
|
11385
|
-
openai: {
|
|
11719
|
+
openai: {
|
|
11720
|
+
...response.id != null ? { responseId: response.id } : {}
|
|
11721
|
+
}
|
|
11386
11722
|
};
|
|
11387
11723
|
if (logprobs.length > 0) {
|
|
11388
11724
|
providerMetadata.openai.logprobs = logprobs;
|
|
@@ -11390,6 +11726,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11390
11726
|
if (typeof response.service_tier === "string") {
|
|
11391
11727
|
providerMetadata.openai.serviceTier = response.service_tier;
|
|
11392
11728
|
}
|
|
11729
|
+
const usage = response.usage;
|
|
11393
11730
|
return {
|
|
11394
11731
|
content,
|
|
11395
11732
|
finishReason: mapOpenAIResponseFinishReason({
|
|
@@ -11397,11 +11734,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11397
11734
|
hasFunctionCall
|
|
11398
11735
|
}),
|
|
11399
11736
|
usage: {
|
|
11400
|
-
inputTokens:
|
|
11401
|
-
outputTokens:
|
|
11402
|
-
totalTokens:
|
|
11403
|
-
reasoningTokens: (_q = (_p =
|
|
11404
|
-
cachedInputTokens: (_s = (_r =
|
|
11737
|
+
inputTokens: usage.input_tokens,
|
|
11738
|
+
outputTokens: usage.output_tokens,
|
|
11739
|
+
totalTokens: usage.input_tokens + usage.output_tokens,
|
|
11740
|
+
reasoningTokens: (_q = (_p = usage.output_tokens_details) == null ? void 0 : _p.reasoning_tokens) != null ? _q : void 0,
|
|
11741
|
+
cachedInputTokens: (_s = (_r = usage.input_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? _s : void 0
|
|
11405
11742
|
},
|
|
11406
11743
|
request: { body },
|
|
11407
11744
|
response: {
|
|
@@ -11850,7 +12187,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
11850
12187
|
id: (_s = (_r = (_q = self.config).generateId) == null ? void 0 : _r.call(_q)) != null ? _s : generateId(),
|
|
11851
12188
|
mediaType: "text/plain",
|
|
11852
12189
|
title: (_u = (_t = value.annotation.quote) != null ? _t : value.annotation.filename) != null ? _u : "Document",
|
|
11853
|
-
filename: (_v = value.annotation.filename) != null ? _v : value.annotation.file_id
|
|
12190
|
+
filename: (_v = value.annotation.filename) != null ? _v : value.annotation.file_id,
|
|
12191
|
+
...value.annotation.file_id ? {
|
|
12192
|
+
providerMetadata: {
|
|
12193
|
+
openai: {
|
|
12194
|
+
fileId: value.annotation.file_id
|
|
12195
|
+
}
|
|
12196
|
+
}
|
|
12197
|
+
} : {}
|
|
11854
12198
|
});
|
|
11855
12199
|
}
|
|
11856
12200
|
} else if (isErrorChunk(value)) {
|
|
@@ -11928,13 +12272,6 @@ function getResponsesModelConfig(modelId) {
|
|
|
11928
12272
|
};
|
|
11929
12273
|
}
|
|
11930
12274
|
if (modelId.startsWith("o") || modelId.startsWith("gpt-5") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
|
|
11931
|
-
if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {
|
|
11932
|
-
return {
|
|
11933
|
-
...defaults,
|
|
11934
|
-
isReasoningModel: true,
|
|
11935
|
-
systemMessageMode: "remove"
|
|
11936
|
-
};
|
|
11937
|
-
}
|
|
11938
12275
|
return {
|
|
11939
12276
|
...defaults,
|
|
11940
12277
|
isReasoningModel: true,
|
|
@@ -11950,7 +12287,11 @@ function mapWebSearchOutput(action) {
|
|
|
11950
12287
|
var _a15;
|
|
11951
12288
|
switch (action.type) {
|
|
11952
12289
|
case "search":
|
|
11953
|
-
return {
|
|
12290
|
+
return {
|
|
12291
|
+
action: { type: "search", query: (_a15 = action.query) != null ? _a15 : void 0 },
|
|
12292
|
+
// include sources when provided by the Responses API (behind include flag)
|
|
12293
|
+
...action.sources != null && { sources: action.sources }
|
|
12294
|
+
};
|
|
11954
12295
|
case "open_page":
|
|
11955
12296
|
return { action: { type: "openPage", url: action.url } };
|
|
11956
12297
|
case "find":
|
|
@@ -12289,7 +12630,7 @@ var OpenAITranscriptionModel = class {
|
|
|
12289
12630
|
};
|
|
12290
12631
|
}
|
|
12291
12632
|
};
|
|
12292
|
-
var VERSION5 = "2.0.
|
|
12633
|
+
var VERSION5 = "2.0.69" ;
|
|
12293
12634
|
function createOpenAI(options = {}) {
|
|
12294
12635
|
var _a15, _b;
|
|
12295
12636
|
const baseURL = (_a15 = withoutTrailingSlash(
|
|
@@ -12386,6 +12727,6 @@ function createOpenAI(options = {}) {
|
|
|
12386
12727
|
}
|
|
12387
12728
|
createOpenAI();
|
|
12388
12729
|
|
|
12389
|
-
export {
|
|
12390
|
-
//# sourceMappingURL=chunk-
|
|
12391
|
-
//# sourceMappingURL=chunk-
|
|
12730
|
+
export { MastraModelGateway, NoSuchModelError, OpenAICompatibleImageModel, TooManyEmbeddingValuesForCallError, UnsupportedFunctionalityError, combineHeaders, convertToBase64, createAnthropic, createEventSourceResponseHandler, createGoogleGenerativeAI, createJsonErrorResponseHandler, createJsonResponseHandler, createOpenAI, createOpenAICompatible, generateId, injectJsonInstructionIntoMessages, loadApiKey, parseProviderOptions, postJsonToApi, withUserAgentSuffix, withoutTrailingSlash };
|
|
12731
|
+
//# sourceMappingURL=chunk-7ZADRRDW.js.map
|
|
12732
|
+
//# sourceMappingURL=chunk-7ZADRRDW.js.map
|