promptfoo 0.121.2 → 0.121.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (315) hide show
  1. package/README.md +2 -0
  2. package/dist/src/{accounts-CiBLOnA7.js → accounts-B2XmGjty.js} +5 -5
  3. package/dist/src/{accounts-gtkH-5KX.cjs → accounts-BPyfpSeU.cjs} +5 -5
  4. package/dist/src/{accounts-Bm2D8Db9.js → accounts-CFLK3mnD.js} +6 -6
  5. package/dist/src/{accounts-B0pgC1oV.js → accounts-Xatc0RYb.js} +5 -5
  6. package/dist/src/{agentic-utils-DS1g3GLF.js → agentic-utils-36epdqwB.js} +3 -3
  7. package/dist/src/{cometapi-CUQq3H_a.js → agentic-utils-D8yXo5Lm.js} +4 -61
  8. package/dist/src/{cometapi-C4xSqeID.cjs → agentic-utils-DAVsChuB.cjs} +24 -62
  9. package/dist/src/agentic-utils-DIYAAYE7.js +153 -0
  10. package/dist/src/{agents-CBr9A01V.js → agents-BBVJCIYr.js} +226 -13
  11. package/dist/src/{agents-Di9DKPzn.cjs → agents-BBWxKSM0.cjs} +7 -7
  12. package/dist/src/{agents-DgF2zDag.js → agents-Bqgfdokm.js} +228 -13
  13. package/dist/src/{agents-DbRtpYxR.cjs → agents-CAYbM7qD.cjs} +226 -13
  14. package/dist/src/{agents-9qiOy0ho.js → agents-CLQ-P15P.js} +7 -7
  15. package/dist/src/{agents-cLXA8a_8.js → agents-CgBniSlI.js} +8 -8
  16. package/dist/src/{agents-D__IdAlg.js → agents-DSSTV4bv.js} +226 -15
  17. package/dist/src/{agents-CmvBq8LV.js → agents-wg3ohknq.js} +7 -7
  18. package/dist/src/{aimlapi-BvlNH0gr.cjs → aimlapi-Bv8Fmc-b.cjs} +14 -14
  19. package/dist/src/{aimlapi-DHJU_kcV.js → aimlapi-BwGC1TtS.js} +13 -13
  20. package/dist/src/{aimlapi-CnkC2HqE.js → aimlapi-DaC3qZ-o.js} +14 -14
  21. package/dist/src/{aimlapi-B4rcnZgv.js → aimlapi-MgSLdvy7.js} +13 -13
  22. package/dist/src/app/assets/index-B6l9CVVb.js +439 -0
  23. package/dist/src/app/assets/index-DyZ0Ep37.css +1 -0
  24. package/dist/src/app/assets/sync-CStkzc6u.js +4 -0
  25. package/dist/src/app/assets/vendor-markdown-Bz7N-ca6.js +29 -0
  26. package/dist/src/app/index.html +3 -3
  27. package/dist/src/{audio-Bkv46et0.js → audio-Bn44pQxv.js} +4 -4
  28. package/dist/src/{audio-ClI_AFre.js → audio-DDA5WHdx.js} +4 -4
  29. package/dist/src/{audio-CGMyULza.cjs → audio-DVFjQ67_.cjs} +4 -4
  30. package/dist/src/{audio-Dz3z7s3J.js → audio-DjU9GswO.js} +5 -5
  31. package/dist/src/{base-CGrhspbK.cjs → base-BboXIF_0.cjs} +3 -3
  32. package/dist/src/{base-Dy1V8--Z.js → base-CKjwebIH.js} +3 -3
  33. package/dist/src/{base-DLKtKMFh.js → base-CqzQ4K8j.js} +3 -3
  34. package/dist/src/{base-CpjcHe4e.js → base-Cz2ZC_iA.js} +3 -3
  35. package/dist/src/{blobs-CMHN0Qcz.js → blobs-B1JriOyi.js} +3 -3
  36. package/dist/src/{blobs-BDbfYdrJ.js → blobs-BUWmKWzo.js} +3 -3
  37. package/dist/src/{blobs-D23XLin-.cjs → blobs-C6j0bvFz.cjs} +3 -3
  38. package/dist/src/{blobs-CBO20krR.js → blobs-DXTl6J3H.js} +3 -3
  39. package/dist/src/{cache-Dh5WtQps.cjs → cache-C5yFZ4gC.cjs} +3 -3
  40. package/dist/src/{cache-C4Nxf52C.js → cache-CaT5tPgo.js} +3 -3
  41. package/dist/src/cache-CyCanoMu.js +6 -0
  42. package/dist/src/{cache-BVeDlD87.js → cache-DSqR6ezl.js} +3 -3
  43. package/dist/src/cache-Df_QFDNu.cjs +5 -0
  44. package/dist/src/{cache-i1P6crbO.js → cache-HP0NP4k3.js} +3 -3
  45. package/dist/src/{chat-CzkrVDfz.js → chat-B-52XYI1.js} +12 -12
  46. package/dist/src/{chat-DJIw17u0.js → chat-B0iaWhoh.js} +14 -14
  47. package/dist/src/{chat-qmatte1u.js → chat-BE0qTA8e.js} +13 -13
  48. package/dist/src/{chat-BiKyneZl.js → chat-BEwdgGEg.js} +14 -14
  49. package/dist/src/{chat-C1Qst7jL.cjs → chat-BtIKkLKx.cjs} +13 -13
  50. package/dist/src/{chat-CgF-J-Jj.cjs → chat-CM8qWR3_.cjs} +15 -15
  51. package/dist/src/{chat-C2jrdPMx.js → chat-DK1U-eZ-.js} +12 -12
  52. package/dist/src/{chat-DqxYYtWA.js → chat-pxmiVpWe.js} +14 -14
  53. package/dist/src/{chatkit-65VXf5SR.js → chatkit-BYGQlHlV.js} +4 -4
  54. package/dist/src/{chatkit-DKyPi1Gs.cjs → chatkit-Cx174XI3.cjs} +4 -4
  55. package/dist/src/{chatkit-BxFvW8KY.js → chatkit-_8eJqKcD.js} +4 -4
  56. package/dist/src/{chatkit-Be-Q-a9F.js → chatkit-a2D6mY6s.js} +4 -4
  57. package/dist/src/{claude-agent-sdk-D9Z5Pr9X.cjs → claude-agent-sdk-8ddRp1L2.cjs} +35 -17
  58. package/dist/src/{claude-agent-sdk-DfCoW0E6.js → claude-agent-sdk-Bq5EArsX.js} +33 -15
  59. package/dist/src/{claude-agent-sdk-Apiy0iaz.js → claude-agent-sdk-CMjh4LFH.js} +33 -15
  60. package/dist/src/{claude-agent-sdk-D2bJee9S.js → claude-agent-sdk-HgbFioFw.js} +33 -15
  61. package/dist/src/cloud-DE3t1-ZI.js +4 -0
  62. package/dist/src/{cloud-C0dlstV_.js → cloud-z8KZpUoa.js} +3 -3
  63. package/dist/src/{cloudflare-ai-g7PB6VHR.js → cloudflare-ai-BGyXlpXJ.js} +13 -13
  64. package/dist/src/{cloudflare-ai-8TDxHR0x.js → cloudflare-ai-Bbp26N0L.js} +13 -13
  65. package/dist/src/{cloudflare-ai-CknbZ5LJ.cjs → cloudflare-ai-C62x6MQG.cjs} +14 -14
  66. package/dist/src/{cloudflare-ai-BxAGvfju.js → cloudflare-ai-DdKP9TKT.js} +14 -14
  67. package/dist/src/{cloudflare-gateway-CP9QEWYS.js → cloudflare-gateway-BwAaUgeW.js} +14 -14
  68. package/dist/src/{cloudflare-gateway-B9HWA5wf.js → cloudflare-gateway-D-e9i1Sn.js} +15 -15
  69. package/dist/src/{cloudflare-gateway-CKDb4dJ8.js → cloudflare-gateway-DXhtXDRb.js} +15 -163
  70. package/dist/src/{cloudflare-gateway-BSnDmHYo.cjs → cloudflare-gateway-Dx36ftqF.cjs} +15 -15
  71. package/dist/src/{codex-sdk-DUwKWezN.js → codex-sdk-BQEw16R_.js} +180 -11
  72. package/dist/src/{codex-sdk-C6UMlxwV.js → codex-sdk-C_07GuVS.js} +180 -11
  73. package/dist/src/{codex-sdk-GGAw0qbD.js → codex-sdk-DE5G18dx.js} +180 -11
  74. package/dist/src/{codex-sdk-fAO0c3yA.cjs → codex-sdk-ZLKfDjqP.cjs} +181 -12
  75. package/dist/src/cometapi-BDyV-NNm.js +62 -0
  76. package/dist/src/cometapi-C3hOlM7-.cjs +62 -0
  77. package/dist/src/{cometapi-BL9yvj_f.js → cometapi-hhL4TAh3.js} +14 -14
  78. package/dist/src/{cometapi-DFNiKmSz.js → cometapi-sp7sJpBD.js} +15 -15
  79. package/dist/src/{completion-5MzrpJxT.js → completion-BCimtq-h.js} +6 -6
  80. package/dist/src/{completion-qRoZAYRB.js → completion-DCjv7RZ3.js} +6 -6
  81. package/dist/src/{completion-CM6oK8PS.cjs → completion-DlXUhj5c.cjs} +6 -6
  82. package/dist/src/{completion-DZ083F31.js → completion-DoYy49ti.js} +6 -6
  83. package/dist/src/{createHash-CfZSc0b4.cjs → createHash-BYwImsYv.cjs} +2 -2
  84. package/dist/src/{docker-DcF2pRrj.cjs → docker-Cqj2-QVi.cjs} +14 -14
  85. package/dist/src/{docker-Bb5dcxr8.js → docker-CxCkwMzc.js} +13 -13
  86. package/dist/src/{docker-BvfL2BrW.js → docker-DpguQj-w.js} +14 -14
  87. package/dist/src/{docker-ExVyLp0S.js → docker-FeBni2dw.js} +13 -13
  88. package/dist/src/{esm-C03C-mv3.js → esm-7UIl0pPM.js} +2 -2
  89. package/dist/src/{esm-Cd1AjG1D.js → esm-CKWP3u_P.js} +3 -3
  90. package/dist/src/{esm-CnNt7sI4.cjs → esm-CipptfDu.cjs} +2 -2
  91. package/dist/src/{esm-CaIwzWR5.js → esm-SUNIX1x3.js} +3 -3
  92. package/dist/src/eval-7aEqoMs3.js +15 -0
  93. package/dist/src/{eval-Dg2nG4v2.js → eval-BTqTn7lb.js} +10 -10
  94. package/dist/src/{evalResult-BDMqrapS.js → evalResult-BkIhRdTe.js} +7 -7
  95. package/dist/src/evalResult-CYNHkk5A.js +12 -0
  96. package/dist/src/evalResult-CuvJeNiM.js +10 -0
  97. package/dist/src/{evalResult-BBRNtX4I.js → evalResult-DUDShQrm.js} +7 -7
  98. package/dist/src/{evalResult-fuaI8HkH.cjs → evalResult-DpARzUCb.cjs} +7 -7
  99. package/dist/src/evalResult-tGdilrWt.cjs +10 -0
  100. package/dist/src/evaluator-BBUqRhz1.js +36 -0
  101. package/dist/src/{evaluator-BhoWwp5b.js → evaluator-BcvOGaam.js} +823 -73
  102. package/dist/src/{extractor-D25qpmGX.js → extractor-C8XwivI9.js} +6 -6
  103. package/dist/src/{extractor-DReVID0K.js → extractor-CAZ2G3Kh.js} +6 -6
  104. package/dist/src/{extractor-pYLLi3wS.cjs → extractor-DG3sSfXE.cjs} +6 -6
  105. package/dist/src/{extractor-C0EVHewb.js → extractor-D_wd8jxt.js} +6 -6
  106. package/dist/src/{fetch-HaqdX7U1.js → fetch-BiYv2BZc.js} +3 -3
  107. package/dist/src/{fetch-BPkYtG8K.cjs → fetch-BnR9wSnm.cjs} +3 -3
  108. package/dist/src/{fetch-Cwxnd8zz.js → fetch-CVAtKnI3.js} +3 -3
  109. package/dist/src/{fetch-Dxpd4_sr.js → fetch-DoVRJZhJ.js} +4 -4
  110. package/dist/src/fetch-UWU706qb.js +5 -0
  111. package/dist/src/{genaiTracer-DN4dQywX.cjs → genaiTracer-BfxrvSUb.cjs} +2 -2
  112. package/dist/src/{graders-DU49_J8Y.cjs → graders-BElhu9ZY.cjs} +126 -55
  113. package/dist/src/{graders-DP7KFFo-.js → graders-BXAJ0sbS.js} +120 -55
  114. package/dist/src/graders-BxfEguVY.js +32 -0
  115. package/dist/src/graders-CzVMbEnv.js +34 -0
  116. package/dist/src/{graders-BTeBGqjJ.js → graders-DG7mhg-b.js} +120 -55
  117. package/dist/src/graders-DjCXfj0l.cjs +32 -0
  118. package/dist/src/{graders-Bj_Odv7c.js → graders-RjHF8VfG.js} +120 -55
  119. package/dist/src/graders-kHzIWOKu.js +32 -0
  120. package/dist/src/{image-BLmROtN3.cjs → image--F58eEIn.cjs} +6 -6
  121. package/dist/src/{image-B0h9VEMc.js → image-6WQXK8m8.js} +4 -4
  122. package/dist/src/{image-Dpxa1Jt6.js → image-B8b6f36E.js} +6 -6
  123. package/dist/src/{image-CHfWvljl.js → image-CoxZp9PZ.js} +6 -6
  124. package/dist/src/{image-B02ogr_b.js → image-DO0RYnjH.js} +5 -5
  125. package/dist/src/{image-DS-o-0ph.js → image-PoF6DN3x.js} +6 -6
  126. package/dist/src/{image-C1madmKh.cjs → image-fza3zuKs.cjs} +4 -4
  127. package/dist/src/{image-Bb4vWQLM.js → image-xNbw5ph2.js} +4 -4
  128. package/dist/src/index.cjs +853 -104
  129. package/dist/src/index.d.cts +573 -60
  130. package/dist/src/index.d.ts +573 -60
  131. package/dist/src/index.js +850 -102
  132. package/dist/src/{interactiveCheck-BgLZUIt3.js → interactiveCheck-BnMYOjMu.js} +2 -2
  133. package/dist/src/{knowledgeBase-B3OoKIej.js → knowledgeBase-Bi7CmDbx.js} +7 -7
  134. package/dist/src/{knowledgeBase-CYTLHOt1.js → knowledgeBase-Ce3ofVan.js} +8 -8
  135. package/dist/src/{knowledgeBase-D33Ty2l6.js → knowledgeBase-DFRXPZl_.js} +7 -7
  136. package/dist/src/{knowledgeBase-DOO_BM9b.cjs → knowledgeBase-DqrLX8fy.cjs} +7 -7
  137. package/dist/src/{litellm-AaeZcZQF.js → litellm-Bo2gQXpo.js} +14 -14
  138. package/dist/src/{litellm-NbjknEh6.js → litellm-CKiAxnoM.js} +13 -13
  139. package/dist/src/{litellm-I_hbp_dc.cjs → litellm-CnHI69aj.cjs} +14 -14
  140. package/dist/src/{litellm-TrljxD9G.js → litellm-Tc294Jhj.js} +13 -13
  141. package/dist/src/{logger-KkObSCzq.js → logger-BcJBzSSA.js} +10 -14
  142. package/dist/src/{logger-DLcq4dWf.js → logger-BnkjG2jt.js} +10 -14
  143. package/dist/src/{logger-Cp1GPUjj.cjs → logger-D5iKBpu_.cjs} +27 -13
  144. package/dist/src/{logger-CT3IKMKA.js → logger-DO8_zM18.js} +10 -14
  145. package/dist/src/{luma-ray-BS2_tY8L.js → luma-ray-0ehMPt5N.js} +10 -10
  146. package/dist/src/{luma-ray-DDsjcgZZ.js → luma-ray-C9q8rdQe.js} +9 -9
  147. package/dist/src/{luma-ray-f6I2fft-.js → luma-ray-DP0QA9qn.js} +9 -9
  148. package/dist/src/{luma-ray-Due0n7di.cjs → luma-ray-m9Ku2meV.cjs} +9 -9
  149. package/dist/src/main.js +69 -71
  150. package/dist/src/{messages-D0lx5qK7.js → messages-DJNo37Ko.js} +14 -9
  151. package/dist/src/{messages-BS17jdMx.js → messages-Dy9QecMs.js} +14 -9
  152. package/dist/src/{messages-Bs1kC7P4.cjs → messages-HJsyEh4o.cjs} +15 -10
  153. package/dist/src/{messages-ZJk778GH.js → messages-biC_ex-p.js} +14 -9
  154. package/dist/src/{modelslab-DRb74SP4.js → modelslab-B5J-ZM5c.js} +9 -9
  155. package/dist/src/{modelslab-Bx9IrZfS.js → modelslab-BI458moT.js} +10 -10
  156. package/dist/src/{modelslab-Bmni6skY.js → modelslab-BTOT8FUO.js} +9 -9
  157. package/dist/src/{modelslab-CoUX6Jc_.cjs → modelslab-IQbNg-r7.cjs} +9 -9
  158. package/dist/src/{nova-reel-bgjxilYW.js → nova-reel-BZ9y-Y5s.js} +9 -9
  159. package/dist/src/{nova-reel-C_QM18Xn.cjs → nova-reel-CE5etkv9.cjs} +9 -9
  160. package/dist/src/{nova-reel-D_W1tjMH.js → nova-reel-DEeQlnOJ.js} +10 -10
  161. package/dist/src/{nova-reel-BfPq-0Yk.js → nova-reel-Xw1SXLpg.js} +9 -9
  162. package/dist/src/{nova-sonic-De1HW5fD.js → nova-sonic-DWswpN1E.js} +7 -7
  163. package/dist/src/{nova-sonic-CFb5GYhg.js → nova-sonic-DXTLpi-r.js} +6 -6
  164. package/dist/src/{nova-sonic-zfcljeRp.cjs → nova-sonic-N0yCm0vb.cjs} +6 -6
  165. package/dist/src/{nova-sonic-DIGQNR07.js → nova-sonic-Ogqf-csn.js} +6 -6
  166. package/dist/src/{openai-DhbB7eWK.js → openai-BMcwgD5C.js} +2 -2
  167. package/dist/src/{openai-j-sE2O7r.js → openai-BcB5KlTk.js} +2 -2
  168. package/dist/src/{openai-Cuif0GEt.cjs → openai-CoxGAQwn.cjs} +2 -2
  169. package/dist/src/{openai-DElQ-fPX.js → openai-D6wITiVn.js} +2 -2
  170. package/dist/src/{openclaw-tiVYRtr-.js → openclaw-0Sv7AK3O.js} +13 -13
  171. package/dist/src/{openclaw-CSugPYAr.cjs → openclaw-CXxbKgDH.cjs} +14 -14
  172. package/dist/src/{openclaw-DuvJKEW5.js → openclaw-D1FSCps-.js} +13 -13
  173. package/dist/src/{openclaw-DiSz3I5L.js → openclaw-D2ENvu7a.js} +14 -14
  174. package/dist/src/{opencode-sdk-0j6rTWNb.js → opencode-sdk-C71Z0ehR.js} +13 -13
  175. package/dist/src/{opencode-sdk-B3CWY9h_.js → opencode-sdk-CHCs7dEb.js} +12 -12
  176. package/dist/src/{opencode-sdk-C2y6UkP2.js → opencode-sdk-DDxj4QqH.js} +12 -12
  177. package/dist/src/{opencode-sdk-BL764Jdi.cjs → opencode-sdk-WWJhnbKr.cjs} +16 -16
  178. package/dist/src/{otlpReceiver-C99PPb48.js → otlpReceiver-C9KlUtxh.js} +6 -6
  179. package/dist/src/{otlpReceiver-CdNBdbsk.js → otlpReceiver-CZL48YfC.js} +6 -6
  180. package/dist/src/{otlpReceiver-D89fR-rC.js → otlpReceiver-CavGAA6k.js} +6 -6
  181. package/dist/src/{otlpReceiver-CGq6LspY.cjs → otlpReceiver-DHKqJlsz.cjs} +6 -6
  182. package/dist/src/{providerRegistry-B0RUOLI_.js → providerRegistry-B9lh-_tx.js} +2 -2
  183. package/dist/src/{providerRegistry-Civky8Ar.cjs → providerRegistry-BTDgfV5h.cjs} +2 -2
  184. package/dist/src/{providerRegistry-CD8MEar9.js → providerRegistry-BkzVH5Ba.js} +2 -2
  185. package/dist/src/{providerRegistry-DM8rZYol.js → providerRegistry-CUWki5mQ.js} +2 -2
  186. package/dist/src/providers-BSLEaIQG.js +32 -0
  187. package/dist/src/{providers-CgKOSgTR.cjs → providers-CScd1wN6.cjs} +733 -464
  188. package/dist/src/{providers-BlqUifFg.js → providers-Ch6Mr0gn.js} +795 -526
  189. package/dist/src/{providers-Dk_6ocUX.js → providers-Cn73d5sr.js} +795 -526
  190. package/dist/src/providers-D-FnDg8k.cjs +31 -0
  191. package/dist/src/providers-DEYiFVAo.js +30 -0
  192. package/dist/src/{providers-D8lF1sqW.js → providers-DvddrgxL.js} +795 -526
  193. package/dist/src/providers-sS2WI8YD.js +30 -0
  194. package/dist/src/{pythonUtils-D6fwaDSg.js → pythonUtils-Bzwbgpbg.js} +3 -3
  195. package/dist/src/{pythonUtils-D5nxkQ0P.js → pythonUtils-Cpo0Ez1p.js} +3 -3
  196. package/dist/src/{pythonUtils-CTU3Y3lw.cjs → pythonUtils-dAVigVK-.cjs} +3 -3
  197. package/dist/src/{pythonUtils-C3py6GC1.js → pythonUtils-wIqk7zAf.js} +3 -3
  198. package/dist/src/{quiverai-CIaELU_m.js → quiverai-BeofbLVc.js} +4 -4
  199. package/dist/src/{quiverai-uH-dcTIr.js → quiverai-CCQn73lq.js} +5 -5
  200. package/dist/src/{quiverai-PdShCPox.cjs → quiverai-CcUhPIBg.cjs} +4 -4
  201. package/dist/src/{quiverai-BbOUOn2L.js → quiverai-DVSEqJiq.js} +4 -4
  202. package/dist/src/{render-Drod8m7K.js → render-BHl6QVq9.js} +3 -3
  203. package/dist/src/{responses-WNGNYe3K.js → responses-BKP_WYis.js} +14 -10
  204. package/dist/src/{responses-DIR9Ud3j.js → responses-CQb1Tj69.js} +14 -10
  205. package/dist/src/{responses-CB2jwoAr.js → responses-CgNyTPsY.js} +14 -10
  206. package/dist/src/{responses-D8SBTL64.cjs → responses-mo0KQDbu.cjs} +14 -10
  207. package/dist/src/rubyUtils-B1HXG4ej.cjs +4 -0
  208. package/dist/src/{rubyUtils-DhCAlxZr.cjs → rubyUtils-CGeUtCfW.cjs} +3 -3
  209. package/dist/src/{rubyUtils-Boc4HZzX.js → rubyUtils-CiVfln3g.js} +3 -3
  210. package/dist/src/{rubyUtils-BcuGX77l.js → rubyUtils-DECSbsfY.js} +3 -3
  211. package/dist/src/{rubyUtils-BUVePouc.js → rubyUtils-PgU-gHmx.js} +3 -3
  212. package/dist/src/rubyUtils-Rt6pKA96.js +5 -0
  213. package/dist/src/{sagemaker-CNBxx5CJ.js → sagemaker-CVv8W7so.js} +17 -17
  214. package/dist/src/{sagemaker-CemTFp2h.js → sagemaker-CqeASYE5.js} +17 -17
  215. package/dist/src/{sagemaker-YSyBXQQh.js → sagemaker-MUbD5V3v.js} +18 -18
  216. package/dist/src/{sagemaker-Cl28mZU2.cjs → sagemaker-jiw1wQa-.cjs} +17 -17
  217. package/dist/src/{scanner-BsBlNXNn.js → scanner-DVDeUz1r.js} +10 -10
  218. package/dist/src/server/index.js +854 -106
  219. package/dist/src/server-B0Xh1Gx-.js +7 -0
  220. package/dist/src/{server-C_7Ax-hA.cjs → server-BtoCXeXI.cjs} +4 -4
  221. package/dist/src/{server-VWgWb00X.js → server-CP9qKM40.js} +4 -4
  222. package/dist/src/{server-CuxBbeSY.js → server-Cns05F1j.js} +5 -5
  223. package/dist/src/server-DJTKu9IR.cjs +5 -0
  224. package/dist/src/{server-CqzrVGpF.js → server-DZ9MtCn0.js} +6 -6
  225. package/dist/src/{signal-4U3mfRvL.js → signal-C3ZTsUgi.js} +3 -3
  226. package/dist/src/{slack-DOdy_kyv.js → slack-2sdpGzbt.js} +2 -2
  227. package/dist/src/{slack-BmVAVGaK.cjs → slack-94iG3T0s.cjs} +2 -2
  228. package/dist/src/{slack-DCUPTzS2.js → slack-BR0HtO3K.js} +2 -2
  229. package/dist/src/{slack-DXMKtA-f.js → slack-DCEV-vWP.js} +2 -2
  230. package/dist/src/store-C5u6MgC8.js +6 -0
  231. package/dist/src/{store-DLlFCC4h.cjs → store-CLyU7AtI.cjs} +17 -5
  232. package/dist/src/store-CNHk-De4.cjs +5 -0
  233. package/dist/src/{store-DXilxTl-.js → store-Cj258DgL.js} +17 -5
  234. package/dist/src/{store-Dim__MDd.js → store-P8OKm19S.js} +17 -5
  235. package/dist/src/{store-CXGFv4aR.js → store-VB0GP46K.js} +17 -5
  236. package/dist/src/{tables-DLJPUdUE.js → tables-BEIFz2tM.js} +3 -3
  237. package/dist/src/{tables-DPi7wKeM.cjs → tables-BdZQEpRz.cjs} +3 -3
  238. package/dist/src/{tables-gftXzE9I.js → tables-DmzvLbeZ.js} +3 -3
  239. package/dist/src/{tables-6YKwjN9-.js → tables-kC7R5kiK.js} +3 -3
  240. package/dist/src/{telemetry-CMrFgtPB.js → telemetry-BnH5VJAU.js} +4 -4
  241. package/dist/src/{telemetry-Dthj_BbD.js → telemetry-BugWqKiu.js} +4 -4
  242. package/dist/src/{telemetry-Cps3mIU-.js → telemetry-DPXLd7UE.js} +4 -4
  243. package/dist/src/telemetry-Yig0Tino.js +7 -0
  244. package/dist/src/telemetry-p8Pwqm1i.cjs +5 -0
  245. package/dist/src/{telemetry-DaX14Chu.cjs → telemetry-re627Lre.cjs} +4 -4
  246. package/dist/src/{transcription-NLVG9MT1.cjs → transcription-BvtsrzRG.cjs} +13 -13
  247. package/dist/src/{transcription-BNYURcXg.js → transcription-CaMivnjG.js} +13 -13
  248. package/dist/src/{transcription-s6A-bNrZ.js → transcription-DOMMTu01.js} +14 -14
  249. package/dist/src/{transcription-B_OdaHp7.js → transcription-Hb3VnC4M.js} +13 -13
  250. package/dist/src/{transform-DuHvhZpj.cjs → transform-0BwoBsvO.cjs} +19 -5
  251. package/dist/src/{transform-uAytVuyX.js → transform-B2-jIv68.js} +8 -6
  252. package/dist/src/{transform-DECvGmzp.js → transform-BqPkNPYm.js} +4 -4
  253. package/dist/src/{transform-aa6tmVpZ.js → transform-BzK09Q_9.js} +4 -4
  254. package/dist/src/transform-ChNIpHz7.js +6 -0
  255. package/dist/src/{transform-D5HsjduX.js → transform-DrleutM3.js} +8 -6
  256. package/dist/src/{transform-vNucnNr0.js → transform-DyDAwEpE.js} +8 -6
  257. package/dist/src/transform-PtQ6rAE3.cjs +5 -0
  258. package/dist/src/{transform-CzK1Q0zl.cjs → transform-ZrG2dvlo.cjs} +4 -4
  259. package/dist/src/{transform-DilY9wbS.js → transform-ljLYHEPh.js} +4 -4
  260. package/dist/src/{transformersAvailability-CEVM2GNQ.js → transformersAvailability-BGkzavwb.js} +1 -1
  261. package/dist/src/{transformersAvailability-CwayUSlh.cjs → transformersAvailability-DKoRtQLy.cjs} +1 -1
  262. package/dist/src/{types-Cbd8uOMq.js → types-CIhFeUC4.js} +7 -1
  263. package/dist/src/{types-CzW2QFyi.js → types-Cd3ygw8W.js} +7 -1
  264. package/dist/src/{types-C_7nyzr1.cjs → types-D8cGDZbL.cjs} +8 -2
  265. package/dist/src/{types-DmyIJ-sR.js → types-q8GXGF65.js} +7 -1
  266. package/dist/src/{util-DGNOS1db.cjs → util--9u9UVCt.cjs} +3 -3
  267. package/dist/src/{util-ZzmqNPlg.js → util-BLvy9qfE.js} +7 -7
  268. package/dist/src/{util-C1CeHl-P.js → util-Bm3E9jpK.js} +7 -7
  269. package/dist/src/{util-BV4XUC0n.js → util-BtoGs5Cb.js} +18 -4
  270. package/dist/src/{util-BzMcevZc.cjs → util-CFj4YKIn.cjs} +18 -4
  271. package/dist/src/{util-BRYkYPTd.js → util-CMMkIxfU.js} +7 -7
  272. package/dist/src/{util-Dnmk2mBQ.js → util-CgDCK4KI.js} +18 -4
  273. package/dist/src/{util-B9vlHIIh.cjs → util-CuLo2pMR.cjs} +7 -7
  274. package/dist/src/{util-CMy69ZgQ.js → util-DM2rTn_6.js} +18 -4
  275. package/dist/src/{util-B3xGByQh.js → util-DMFeUvLz.js} +3 -3
  276. package/dist/src/{util-BHGHw5G1.js → util-DbVG-yZU.js} +3 -3
  277. package/dist/src/{util-Bv6uGDfH.js → util-vNmDL5DT.js} +3 -3
  278. package/dist/src/{utils-XiOAgly5.js → utils-CFxO9KGo.js} +2 -2
  279. package/dist/src/{utils-f2-Moju7.js → utils-DEuL4VNB.js} +2 -2
  280. package/dist/src/{utils-Cz9qXqII.cjs → utils-DKw8mrgr.cjs} +3 -3
  281. package/dist/src/{utils-dLokC-eR.js → utils-DOjD4dTC.js} +2 -2
  282. package/dist/tsconfig.tsbuildinfo +1 -1
  283. package/package.json +32 -32
  284. package/dist/src/app/assets/index-4LKxG2CG.js +0 -439
  285. package/dist/src/app/assets/index-C3zcsZFQ.css +0 -1
  286. package/dist/src/app/assets/sync-9qqYcY-B.js +0 -4
  287. package/dist/src/app/assets/vendor-markdown-0tekx3KX.js +0 -29
  288. package/dist/src/app/tsconfig.app.tsbuildinfo +0 -1
  289. package/dist/src/cache-CeUpFm3M.cjs +0 -5
  290. package/dist/src/cache-n-RCJ-hL.js +0 -6
  291. package/dist/src/cloud-BBh91EUK.js +0 -4
  292. package/dist/src/eval-B3r2CVXr.js +0 -15
  293. package/dist/src/evalResult-5xwYnECe.js +0 -12
  294. package/dist/src/evalResult-71lY93Kj.cjs +0 -10
  295. package/dist/src/evalResult-Dx5P5cIv.js +0 -10
  296. package/dist/src/evaluator-Jx6bRZV6.js +0 -36
  297. package/dist/src/fetch-BxNb_Lp3.js +0 -5
  298. package/dist/src/graders-B_pgMLS2.js +0 -34
  299. package/dist/src/graders-DErokPDO.cjs +0 -32
  300. package/dist/src/graders-DR_uNe54.js +0 -32
  301. package/dist/src/graders-w3176Wz-.js +0 -32
  302. package/dist/src/providers-B7V0njNs.js +0 -32
  303. package/dist/src/providers-BEwbhv0X.js +0 -30
  304. package/dist/src/providers-CH3C7zf7.js +0 -30
  305. package/dist/src/providers-zyB6k_38.cjs +0 -31
  306. package/dist/src/rubyUtils-BUHu6PhO.js +0 -5
  307. package/dist/src/rubyUtils-CP42kMvq.cjs +0 -4
  308. package/dist/src/server-DA4Cyrrq.js +0 -7
  309. package/dist/src/server-Dulb-4-K.cjs +0 -5
  310. package/dist/src/store-CXS-Q_91.js +0 -6
  311. package/dist/src/store-eYkaKMwq.cjs +0 -5
  312. package/dist/src/telemetry-BpMfhthR.cjs +0 -5
  313. package/dist/src/telemetry-Dw38hanS.js +0 -7
  314. package/dist/src/transform-DTGDnAzW.js +0 -6
  315. package/dist/src/transform-m3qNw4KP.cjs +0 -5
@@ -1,39 +1,39 @@
1
1
  #!/usr/bin/env node
2
- import { C as getEnvString, O as state, S as getEnvInt, b as getEnvBool, d as extractFirstJsonObject, g as safeJsonStringify, l as sanitizeObject, m as isValidJson, o as logger, u as sanitizeUrl, v as getConfigDirectoryPath, x as getEnvFloat } from "./logger-KkObSCzq.js";
3
- import { C as CLOUD_PROVIDER_PREFIX, N as VERSION, S as transformTools, _ as openaiToolChoiceToBedrock, b as toTitleCase, d as LONG_RUNNING_MODEL_TIMEOUT_MS, f as REQUEST_TIMEOUT_MS, h as isOpenAIToolChoice, i as fetchWithTimeout, l as sleep, m as isOpenAIToolArray, n as fetchWithProxy, p as calculateCost, r as fetchWithRetries, v as openaiToolsToBedrock, x as transformToolChoice, y as parseChatPrompt } from "./fetch-Dxpd4_sr.js";
2
+ import { C as getEnvBool, E as getEnvString, T as getEnvInt, _ as isValidJson, f as sanitizeObject, j as state, m as extractFirstJsonObject, p as sanitizeUrl, s as logger, w as getEnvFloat, x as getConfigDirectoryPath, y as safeJsonStringify } from "./logger-BcJBzSSA.js";
3
+ import { C as CLOUD_PROVIDER_PREFIX, N as VERSION, S as transformTools, _ as openaiToolChoiceToBedrock, b as toTitleCase, d as LONG_RUNNING_MODEL_TIMEOUT_MS, f as REQUEST_TIMEOUT_MS$1, h as isOpenAIToolChoice, i as fetchWithTimeout, l as sleep, m as isOpenAIToolArray, n as fetchWithProxy, p as calculateCost, r as fetchWithRetries, v as openaiToolsToBedrock, x as transformToolChoice, y as parseChatPrompt } from "./fetch-DoVRJZhJ.js";
4
4
  import { t as invariant } from "./invariant-BtWWVVhl.js";
5
- import { c as isLoggedIntoCloud, o as getUserEmail } from "./accounts-Bm2D8Db9.js";
6
- import { a as cloudConfig } from "./cloud-C0dlstV_.js";
7
- import { r as telemetry } from "./telemetry-CMrFgtPB.js";
8
- import { A as DATASET_PLUGINS, E as isUuid, I as isCustomStrategy, M as MULTI_TURN_STRATEGIES, O as AGENTIC_STRATEGIES, V as pluginDescriptions, dt as STRATEGY_EXEMPT_PLUGINS, m as isProviderOptions, p as isApiProvider, v as ProviderOptionsSchema } from "./types-Cbd8uOMq.js";
9
- import { f as neverGenerateRemote, l as getRemoteGenerationUrl, m as shouldGenerateRemote, p as neverGenerateRemoteForRegularEvals, r as checkServerFeatureSupport, u as getRemoteGenerationUrlForUnaligned } from "./server-CqzrVGpF.js";
10
- import { a as fetchWithCache, o as getCache, s as isCacheEnabled } from "./cache-BVeDlD87.js";
5
+ import { c as isLoggedIntoCloud, o as getUserEmail } from "./accounts-CFLK3mnD.js";
6
+ import { a as cloudConfig } from "./cloud-z8KZpUoa.js";
7
+ import { r as telemetry } from "./telemetry-BnH5VJAU.js";
8
+ import { A as DATASET_PLUGINS, E as isUuid, I as isCustomStrategy, M as MULTI_TURN_STRATEGIES, O as AGENTIC_STRATEGIES, V as pluginDescriptions, dt as STRATEGY_EXEMPT_PLUGINS, m as isProviderOptions, p as isApiProvider, v as ProviderOptionsSchema } from "./types-CIhFeUC4.js";
9
+ import { f as neverGenerateRemote, l as getRemoteGenerationUrl, m as shouldGenerateRemote, p as neverGenerateRemoteForRegularEvals, r as checkServerFeatureSupport, u as getRemoteGenerationUrlForUnaligned } from "./server-DZ9MtCn0.js";
10
+ import { a as fetchWithCache, o as getCache, s as isCacheEnabled } from "./cache-DSqR6ezl.js";
11
11
  import { r as isTransientConnectionError } from "./errors-P6ll7XSJ.js";
12
12
  import { a as isVideoFile, i as isJavascriptFile, n as isAudioFile, r as isImageFile } from "./fileExtensions-Ds-foDzt.js";
13
- import { C as extractVariablesFromTemplates, E as parseFileUrl, _ as parsePathOrGlob, b as renderEnvOnlyInObject, d as getResolvedRelativePath, f as maybeLoadConfigFromExternalFile, g as maybeLoadToolsFromExternalFile, h as maybeLoadResponseFormatFromExternalFile, m as maybeLoadFromExternalFileWithVars, p as maybeLoadFromExternalFile, w as getNunjucksEngine, x as renderVarsInObject } from "./util-C1CeHl-P.js";
14
- import { a as safeJoin, i as resolvePackageEntryPoint, n as getWrapperDir, o as safeResolve, r as importModule } from "./esm-C03C-mv3.js";
15
- import { i as validatePythonPath, n as getEnvInt$1, r as runPython, t as getConfiguredPythonPath } from "./pythonUtils-C3py6GC1.js";
16
- import { n as transform, r as getProcessShim, t as TransformInputType } from "./transform-DilY9wbS.js";
13
+ import { C as extractVariablesFromTemplates, E as parseFileUrl, T as loadFunction, _ as parsePathOrGlob, b as renderEnvOnlyInObject, d as getResolvedRelativePath, f as maybeLoadConfigFromExternalFile, g as maybeLoadToolsFromExternalFile, h as maybeLoadResponseFormatFromExternalFile, m as maybeLoadFromExternalFileWithVars, p as maybeLoadFromExternalFile, w as getNunjucksEngine, x as renderVarsInObject } from "./util-Bm3E9jpK.js";
14
+ import { a as safeJoin, i as resolvePackageEntryPoint, n as getWrapperDir, o as safeResolve, r as importModule } from "./esm-7UIl0pPM.js";
15
+ import { i as validatePythonPath, n as getEnvInt$1, r as runPython, t as getConfiguredPythonPath } from "./pythonUtils-wIqk7zAf.js";
16
+ import { n as transform, r as getProcessShim, t as TransformInputType } from "./transform-ljLYHEPh.js";
17
17
  import { n as sha256 } from "./createHash-Da8fMwqB.js";
18
18
  import { n as withGenAISpan } from "./genaiTracer-C1rxGO8Q.js";
19
- import { i as normalizeFinishReason, n as MCPClient, r as FINISH_REASON_MAP, t as OpenAiChatCompletionProvider } from "./chat-BiKyneZl.js";
19
+ import { i as normalizeFinishReason, n as MCPClient, r as FINISH_REASON_MAP, t as OpenAiChatCompletionProvider } from "./chat-BEwdgGEg.js";
20
20
  import { a as createEmptyTokenUsage, n as accumulateResponseTokenUsage, r as accumulateTokenUsage } from "./tokenUsageUtils-DflFMjS0.js";
21
- import { S as toDataUri, _ as getGoogleClient, a as calculateGoogleCost, b as resolveProjectId, c as geminiFormatAndSystemInstructions, d as mergeParts, f as normalizeSafetySettings, g as GoogleAuthManager, h as CHAT_MODELS, i as transformMCPToolsToOpenAi, l as getCandidate, o as createAuthCacheDiscriminator, p as normalizeTools, r as transformMCPToolsToGoogle, s as formatCandidateContents, u as getGoogleAccessToken, y as loadCredentials } from "./transform-D5HsjduX.js";
22
- import { n as AnthropicGenericProvider, t as AnthropicMessagesProvider } from "./messages-D0lx5qK7.js";
23
- import { a as parseMessages, i as outputFromMessage, n as calculateAnthropicCost, r as getTokenUsage$2, t as ANTHROPIC_MODELS } from "./util-B3xGByQh.js";
24
- import { n as ResponsesProcessor, r as FunctionCallbackHandler, t as OpenAiResponsesProvider } from "./responses-WNGNYe3K.js";
25
- import { t as OpenAiGenericProvider } from "./openai-DhbB7eWK.js";
26
- import { a as calculateOpenAICost, c as getTokenUsage$3, o as failApiCall, r as OPENAI_REALTIME_MODELS, s as formatOpenAiError } from "./util-CMy69ZgQ.js";
27
- import { n as OpenAiEmbeddingProvider, t as OpenAiCompletionProvider } from "./completion-DZ083F31.js";
28
- import { i as storeBlob } from "./blobs-CMHN0Qcz.js";
29
- import { a as evalResultsTable, g as getDb } from "./tables-gftXzE9I.js";
30
- import { n as isBlobStorageEnabled, r as shouldAttemptRemoteBlobUpload, t as extractAndStoreBinaryData } from "./extractor-DReVID0K.js";
21
+ import { A as TOKEN_REFRESH_BUFFER_MS, S as resolveProjectId, _ as determineGoogleVertexMode, a as calculateGoogleCost, c as geminiFormatAndSystemInstructions, d as mergeParts, f as normalizeSafetySettings, g as GoogleAuthManager, h as CHAT_MODELS, i as transformMCPToolsToOpenAi, l as getCandidate, o as createAuthCacheDiscriminator, p as normalizeTools, r as transformMCPToolsToGoogle, s as formatCandidateContents, u as getGoogleAccessToken, v as getGoogleApiKey, w as toDataUri, x as loadCredentials, y as getGoogleClient } from "./transform-DrleutM3.js";
22
+ import { n as AnthropicGenericProvider, t as AnthropicMessagesProvider } from "./messages-DJNo37Ko.js";
23
+ import { a as parseMessages, i as outputFromMessage, n as calculateAnthropicCost, r as getTokenUsage$2, t as ANTHROPIC_MODELS } from "./util-DMFeUvLz.js";
24
+ import { n as ResponsesProcessor, r as FunctionCallbackHandler, t as OpenAiResponsesProvider } from "./responses-BKP_WYis.js";
25
+ import { t as OpenAiGenericProvider } from "./openai-BMcwgD5C.js";
26
+ import { a as calculateOpenAICost, c as getTokenUsage$3, o as failApiCall, r as OPENAI_REALTIME_MODELS, s as formatOpenAiError } from "./util-DM2rTn_6.js";
27
+ import { n as OpenAiEmbeddingProvider, t as OpenAiCompletionProvider } from "./completion-DoYy49ti.js";
28
+ import { i as storeBlob } from "./blobs-B1JriOyi.js";
29
+ import { a as evalResultsTable, g as getDb } from "./tables-DmzvLbeZ.js";
30
+ import { n as isBlobStorageEnabled, r as shouldAttemptRemoteBlobUpload, t as extractAndStoreBinaryData } from "./extractor-CAZ2G3Kh.js";
31
31
  import { n as escapeRegExp, t as ellipsize } from "./text-Db-Wt2u2.js";
32
- import { n as getTraceStore } from "./store-Dim__MDd.js";
33
- import { a as novaParseMessages, i as novaOutputFromMessage, t as AwsBedrockGenericProvider } from "./base-Dy1V8--Z.js";
34
- import { i as formatOutput, n as buildStructuredImageOutputs, r as callOpenAiImageApi, t as OpenAiImageProvider } from "./image-DS-o-0ph.js";
35
- import { t as providerRegistry } from "./providerRegistry-B0RUOLI_.js";
36
- import { n as runRuby } from "./rubyUtils-Boc4HZzX.js";
32
+ import { n as getTraceStore } from "./store-P8OKm19S.js";
33
+ import { a as novaParseMessages, i as novaOutputFromMessage, t as AwsBedrockGenericProvider } from "./base-CKjwebIH.js";
34
+ import { i as formatOutput, n as buildStructuredImageOutputs, r as callOpenAiImageApi, t as OpenAiImageProvider } from "./image-PoF6DN3x.js";
35
+ import { t as providerRegistry } from "./providerRegistry-B9lh-_tx.js";
36
+ import { n as runRuby } from "./rubyUtils-CiVfln3g.js";
37
37
  import { Agent } from "undici";
38
38
  import { z } from "zod";
39
39
  import input from "@inquirer/input";
@@ -172,9 +172,9 @@ function normalizeEvalConfig(config) {
172
172
  const tests = Array.isArray(config.tests) ? config.tests : Array.isArray(config.testCases) ? config.testCases : [];
173
173
  const commandLineOptions = {
174
174
  ...isRecord(config.commandLineOptions) ? config.commandLineOptions : {},
175
- ...config.maxConcurrency != null ? { maxConcurrency: config.maxConcurrency } : {},
176
- ...config.delay != null ? { delay: config.delay } : {},
177
- ...config.verbose != null ? { verbose: config.verbose } : {}
175
+ ...config.maxConcurrency == null ? {} : { maxConcurrency: config.maxConcurrency },
176
+ ...config.delay == null ? {} : { delay: config.delay },
177
+ ...config.verbose == null ? {} : { verbose: config.verbose }
178
178
  };
179
179
  const normalizedConfig = {
180
180
  ...config,
@@ -772,6 +772,62 @@ const AZURE_MODELS = [
772
772
  output: 20 / 1e6
773
773
  }
774
774
  },
775
+ {
776
+ id: "gpt-5.4",
777
+ cost: {
778
+ input: 2.5 / 1e6,
779
+ output: 10 / 1e6
780
+ }
781
+ },
782
+ {
783
+ id: "gpt-5.4-2026-03-05",
784
+ cost: {
785
+ input: 2.5 / 1e6,
786
+ output: 10 / 1e6
787
+ }
788
+ },
789
+ {
790
+ id: "gpt-5.4-pro",
791
+ cost: {
792
+ input: 5 / 1e6,
793
+ output: 20 / 1e6
794
+ }
795
+ },
796
+ {
797
+ id: "gpt-5.4-pro-2026-03-05",
798
+ cost: {
799
+ input: 5 / 1e6,
800
+ output: 20 / 1e6
801
+ }
802
+ },
803
+ {
804
+ id: "gpt-5.4-mini",
805
+ cost: {
806
+ input: .4 / 1e6,
807
+ output: 1.6 / 1e6
808
+ }
809
+ },
810
+ {
811
+ id: "gpt-5.4-mini-2026-03-17",
812
+ cost: {
813
+ input: .4 / 1e6,
814
+ output: 1.6 / 1e6
815
+ }
816
+ },
817
+ {
818
+ id: "gpt-5.4-nano",
819
+ cost: {
820
+ input: .1 / 1e6,
821
+ output: .4 / 1e6
822
+ }
823
+ },
824
+ {
825
+ id: "gpt-5.4-nano-2026-03-17",
826
+ cost: {
827
+ input: .1 / 1e6,
828
+ output: .4 / 1e6
829
+ }
830
+ },
775
831
  {
776
832
  id: "gpt-5-mini",
777
833
  cost: {
@@ -2198,7 +2254,7 @@ var AzureChatCompletionProvider = class extends AzureGenericProvider {
2198
2254
  ...this.config.headers
2199
2255
  },
2200
2256
  body: JSON.stringify(body)
2201
- }, REQUEST_TIMEOUT_MS, "json", context?.bustCache ?? context?.debug);
2257
+ }, REQUEST_TIMEOUT_MS$1, "json", context?.bustCache ?? context?.debug);
2202
2258
  cached = isCached;
2203
2259
  latencyMs = fetchLatencyMs;
2204
2260
  if (typeof responseData === "string") try {
@@ -2298,7 +2354,7 @@ var AzureEmbeddingProvider = class extends AzureGenericProvider {
2298
2354
  ...this.config.headers
2299
2355
  },
2300
2356
  body: JSON.stringify(body)
2301
- }, REQUEST_TIMEOUT_MS));
2357
+ }, REQUEST_TIMEOUT_MS$1));
2302
2358
  } catch (err) {
2303
2359
  return {
2304
2360
  error: `API call error: ${String(err)}`,
@@ -2459,7 +2515,7 @@ var AzureModerationProvider = class AzureModerationProvider extends AzureGeneric
2459
2515
  ...this.configWithHeaders.passthrough || {}
2460
2516
  };
2461
2517
  const controller = new AbortController();
2462
- const timeoutId = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS);
2518
+ const timeoutId = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS$1);
2463
2519
  const response = await fetchWithProxy(url, {
2464
2520
  method: "POST",
2465
2521
  headers,
@@ -2706,7 +2762,7 @@ var GoogleGenericProvider = class {
2706
2762
  * Get the request timeout in milliseconds.
2707
2763
  */
2708
2764
  getTimeout() {
2709
- return this.config.timeoutMs || REQUEST_TIMEOUT_MS;
2765
+ return this.config.timeoutMs || REQUEST_TIMEOUT_MS$1;
2710
2766
  }
2711
2767
  };
2712
2768
  //#endregion
@@ -2848,7 +2904,7 @@ var AIStudioChatProvider = class extends GoogleGenericProvider {
2848
2904
  headers,
2849
2905
  body: JSON.stringify(body),
2850
2906
  ...authDiscriminator && { _authHash: authDiscriminator }
2851
- }, REQUEST_TIMEOUT_MS, "json", context?.bustCache ?? context?.debug));
2907
+ }, REQUEST_TIMEOUT_MS$1, "json", context?.bustCache ?? context?.debug));
2852
2908
  } catch (err) {
2853
2909
  return { error: `API call error: ${String(err)}` };
2854
2910
  }
@@ -2875,7 +2931,7 @@ var AIStudioChatProvider = class extends GoogleGenericProvider {
2875
2931
  rejectedPrediction: 0
2876
2932
  } }
2877
2933
  };
2878
- const completionForCost = data.usageMetadata?.candidatesTokenCount != null ? data.usageMetadata.candidatesTokenCount + (data.usageMetadata?.thoughtsTokenCount ?? 0) : void 0;
2934
+ const completionForCost = data.usageMetadata?.candidatesTokenCount == null ? void 0 : data.usageMetadata.candidatesTokenCount + (data.usageMetadata?.thoughtsTokenCount ?? 0);
2879
2935
  return {
2880
2936
  output,
2881
2937
  tokenUsage,
@@ -2930,7 +2986,7 @@ var AIStudioChatProvider = class extends GoogleGenericProvider {
2930
2986
  headers,
2931
2987
  body: JSON.stringify(body),
2932
2988
  ...authDiscriminator && { _authHash: authDiscriminator }
2933
- }, REQUEST_TIMEOUT_MS, "json", false));
2989
+ }, REQUEST_TIMEOUT_MS$1, "json", false));
2934
2990
  } catch (err) {
2935
2991
  return { error: `API call error: ${String(err)}` };
2936
2992
  }
@@ -2972,7 +3028,7 @@ var AIStudioChatProvider = class extends GoogleGenericProvider {
2972
3028
  rejectedPrediction: 0
2973
3029
  } }
2974
3030
  };
2975
- const completionForCost = data.usageMetadata?.candidatesTokenCount != null ? data.usageMetadata.candidatesTokenCount + (data.usageMetadata?.thoughtsTokenCount ?? 0) : void 0;
3031
+ const completionForCost = data.usageMetadata?.candidatesTokenCount == null ? void 0 : data.usageMetadata.candidatesTokenCount + (data.usageMetadata?.thoughtsTokenCount ?? 0);
2976
3032
  const cost = cached ? void 0 : calculateGoogleCost(this.modelName, config, data.usageMetadata?.promptTokenCount, completionForCost);
2977
3033
  return {
2978
3034
  output,
@@ -3163,7 +3219,7 @@ var VertexChatProvider = class extends GoogleGenericProvider {
3163
3219
  method: "POST",
3164
3220
  headers: { "Content-Type": "application/json; charset=utf-8" },
3165
3221
  data: body,
3166
- timeout: REQUEST_TIMEOUT_MS
3222
+ timeout: REQUEST_TIMEOUT_MS$1
3167
3223
  })).data;
3168
3224
  } catch (err) {
3169
3225
  const error = err;
@@ -3277,7 +3333,7 @@ var VertexChatProvider = class extends GoogleGenericProvider {
3277
3333
  method: "POST",
3278
3334
  headers: await this.getAuthHeaders(),
3279
3335
  body: JSON.stringify(body),
3280
- signal: AbortSignal.timeout(REQUEST_TIMEOUT_MS)
3336
+ signal: AbortSignal.timeout(REQUEST_TIMEOUT_MS$1)
3281
3337
  });
3282
3338
  if (!res.ok) {
3283
3339
  const errorData = await res.json().catch(() => null);
@@ -3293,7 +3349,7 @@ var VertexChatProvider = class extends GoogleGenericProvider {
3293
3349
  url,
3294
3350
  method: "POST",
3295
3351
  data: body,
3296
- timeout: REQUEST_TIMEOUT_MS
3352
+ timeout: REQUEST_TIMEOUT_MS$1
3297
3353
  })).data;
3298
3354
  }
3299
3355
  } catch (err) {
@@ -3396,7 +3452,7 @@ var VertexChatProvider = class extends GoogleGenericProvider {
3396
3452
  rejectedPrediction: 0
3397
3453
  } }
3398
3454
  };
3399
- const completionForCost = completionTokenCount != null ? completionTokenCount + (thoughtsTokenCount ?? 0) : void 0;
3455
+ const completionForCost = completionTokenCount == null ? void 0 : completionTokenCount + (thoughtsTokenCount ?? 0);
3400
3456
  const cost = calculateGoogleCost(this.modelName, config, promptTokenCount, completionForCost, true);
3401
3457
  response = {
3402
3458
  cached: false,
@@ -3483,7 +3539,7 @@ var VertexChatProvider = class extends GoogleGenericProvider {
3483
3539
  method: "POST",
3484
3540
  headers: { "Content-Type": "application/json" },
3485
3541
  data: body,
3486
- timeout: REQUEST_TIMEOUT_MS
3542
+ timeout: REQUEST_TIMEOUT_MS$1
3487
3543
  })).data;
3488
3544
  } catch (err) {
3489
3545
  return { error: `API call error: ${JSON.stringify(err)}` };
@@ -3552,7 +3608,7 @@ var VertexChatProvider = class extends GoogleGenericProvider {
3552
3608
  method: "POST",
3553
3609
  headers: { "Content-Type": "application/json; charset=utf-8" },
3554
3610
  data: body,
3555
- timeout: REQUEST_TIMEOUT_MS
3611
+ timeout: REQUEST_TIMEOUT_MS$1
3556
3612
  })).data;
3557
3613
  logger.debug(`Llama API response: ${JSON.stringify(data)}`);
3558
3614
  } catch (err) {
@@ -3879,12 +3935,14 @@ var MistralChatCompletionProvider = class MistralChatCompletionProvider {
3879
3935
  };
3880
3936
  return withGenAISpan(spanContext, () => this.callApiInternal(prompt, context, config), resultExtractor);
3881
3937
  }
3882
- async callApiInternal(prompt, _context, config = {}) {
3938
+ async callApiInternal(prompt, context, config = {}) {
3883
3939
  if (!this.getApiKey()) throw new Error("Mistral API key is not set. Set the MISTRAL_API_KEY environment variable or add `apiKey` or `apiKeyEnvar` to the provider config.");
3884
3940
  const messages = parseChatPrompt(prompt, [{
3885
3941
  role: "user",
3886
3942
  content: prompt
3887
3943
  }]);
3944
+ const loadedTools = config.tools ? await maybeLoadToolsFromExternalFile(config.tools, context?.vars) : void 0;
3945
+ const hasTools = Array.isArray(loadedTools) ? loadedTools.length > 0 : loadedTools !== void 0;
3888
3946
  const params = {
3889
3947
  model: this.modelName,
3890
3948
  messages,
@@ -3893,6 +3951,9 @@ var MistralChatCompletionProvider = class MistralChatCompletionProvider {
3893
3951
  max_tokens: config?.max_tokens || 1024,
3894
3952
  safe_prompt: config?.safe_prompt || false,
3895
3953
  random_seed: config?.random_seed || null,
3954
+ ...hasTools ? { tools: loadedTools } : {},
3955
+ ...config?.tool_choice ? { tool_choice: config.tool_choice } : {},
3956
+ ..."parallel_tool_calls" in config ? { parallel_tool_calls: Boolean(config.parallel_tool_calls) } : {},
3896
3957
  ...config?.response_format ? { response_format: config.response_format } : {}
3897
3958
  };
3898
3959
  const cacheKey = `mistral:${JSON.stringify(params)}`;
@@ -3927,15 +3988,20 @@ var MistralChatCompletionProvider = class MistralChatCompletionProvider {
3927
3988
  Authorization: `Bearer ${this.getApiKey()}`
3928
3989
  },
3929
3990
  body: JSON.stringify(params)
3930
- }, REQUEST_TIMEOUT_MS));
3991
+ }, REQUEST_TIMEOUT_MS$1));
3931
3992
  } catch (err) {
3932
3993
  return { error: `API call error: ${String(err)}` };
3933
3994
  }
3934
3995
  logger.debug("Mistral API response", { data });
3935
3996
  if (data.error) return { error: `API call error: ${data.error}` };
3936
- if (!data.choices || !data.choices[0] || !data.choices[0].message.content) return { error: `Malformed response data: ${JSON.stringify(data)}` };
3997
+ if (!data.choices || !data.choices[0] || !data.choices[0].message) return { error: `Malformed response data: ${JSON.stringify(data)}` };
3998
+ const message = data.choices[0].message;
3999
+ let output;
4000
+ if (message.content && message.tool_calls?.length) output = message;
4001
+ else if (message.tool_calls?.length) output = message.tool_calls;
4002
+ else output = message.content;
3937
4003
  const result = {
3938
- output: data.choices[0].message.content,
4004
+ output,
3939
4005
  tokenUsage: getTokenUsage$1(data, cached),
3940
4006
  cached,
3941
4007
  cost: calculateMistralCost(this.modelName, config, data.usage?.prompt_tokens, data.usage?.completion_tokens)
@@ -4008,7 +4074,7 @@ var MistralEmbeddingProvider = class {
4008
4074
  Authorization: `Bearer ${this.getApiKey()}`
4009
4075
  },
4010
4076
  body: JSON.stringify(body)
4011
- }, REQUEST_TIMEOUT_MS));
4077
+ }, REQUEST_TIMEOUT_MS$1));
4012
4078
  } catch (err) {
4013
4079
  logger.error(`API call error: ${err}`);
4014
4080
  throw err;
@@ -4144,7 +4210,7 @@ var OpenAiModerationProvider = class OpenAiModerationProvider extends OpenAiGene
4144
4210
  method: "POST",
4145
4211
  headers,
4146
4212
  body: requestBody
4147
- }, REQUEST_TIMEOUT_MS, "json", false, this.config.maxRetries);
4213
+ }, REQUEST_TIMEOUT_MS$1, "json", false, this.config.maxRetries);
4148
4214
  if (status < 200 || status >= 300) return handleApiError$1(`${status} ${statusText}`, typeof data === "string" ? data : JSON.stringify(data));
4149
4215
  logger.debug(`\tOpenAI moderation API response: ${JSON.stringify(data)}`);
4150
4216
  const response = parseOpenAIModerationResponse(data);
@@ -4257,7 +4323,7 @@ var PromptfooChatCompletionProvider = class {
4257
4323
  headers: { "Content-Type": "application/json" },
4258
4324
  body: JSON.stringify(body),
4259
4325
  ...callApiOptions?.abortSignal && { signal: callApiOptions.abortSignal }
4260
- }, REQUEST_TIMEOUT_MS);
4326
+ }, REQUEST_TIMEOUT_MS$1);
4261
4327
  const data = await response.json();
4262
4328
  if (!data.result) {
4263
4329
  logger.debug(`Error from promptfoo completion provider. Status: ${response.status} ${response.statusText} ${JSON.stringify(data)} `);
@@ -4316,7 +4382,7 @@ var PromptfooSimulatedUserProvider = class {
4316
4382
  headers: { "Content-Type": "application/json" },
4317
4383
  body: JSON.stringify(body),
4318
4384
  ...callApiOptions?.abortSignal && { signal: callApiOptions.abortSignal }
4319
- }, REQUEST_TIMEOUT_MS);
4385
+ }, REQUEST_TIMEOUT_MS$1);
4320
4386
  if (!response.ok) throw new Error(`API call failed with status ${response.status}: ${await response.text()}`);
4321
4387
  const data = await response.json();
4322
4388
  return {
@@ -5383,7 +5449,7 @@ async function loadRedteamProvider({ provider, jsonOnly = false, preferSmallMode
5383
5449
  ret = redteamProvider;
5384
5450
  } else if (typeof redteamProvider === "string" || isProviderOptions(redteamProvider)) {
5385
5451
  logger.debug(`Loading ${purpose} provider`, { provider: redteamProvider });
5386
- ret = (await (await import("./providers-B7V0njNs.js")).loadApiProviders([redteamProvider]))[0];
5452
+ ret = (await (await import("./providers-BSLEaIQG.js")).loadApiProviders([redteamProvider]))[0];
5387
5453
  } else {
5388
5454
  const defaultModel = preferSmallModel ? ATTACKER_MODEL_SMALL : ATTACKER_MODEL;
5389
5455
  logger.debug(`Using default ${purpose} provider: ${defaultModel}`);
@@ -5670,7 +5736,7 @@ async function externalizeResponseForRedteamHistory(response, context) {
5670
5736
  */
5671
5737
  async function tryUnblocking({ messages, lastResponse, goal, purpose }) {
5672
5738
  try {
5673
- const { checkServerFeatureSupport } = await import("./server-DA4Cyrrq.js");
5739
+ const { checkServerFeatureSupport } = await import("./server-B0Xh1Gx-.js");
5674
5740
  const supportsUnblocking = await checkServerFeatureSupport("blocking-question-analysis", "2025-06-16T14:49:11-07:00");
5675
5741
  if (!getEnvBool("PROMPTFOO_ENABLE_UNBLOCKING")) {
5676
5742
  logger.debug("[Unblocking] Disabled by default (set PROMPTFOO_ENABLE_UNBLOCKING=true to enable)");
@@ -6639,7 +6705,7 @@ async function extractGoalFromPrompt(prompt, purpose, pluginId, policy) {
6639
6705
  method: "POST",
6640
6706
  headers: { "Content-Type": "application/json" },
6641
6707
  body: JSON.stringify(requestBody)
6642
- }, REQUEST_TIMEOUT_MS);
6708
+ }, REQUEST_TIMEOUT_MS$1);
6643
6709
  logger.debug(`Goal extraction response - Status: ${status} ${statusText || ""}, Data: ${JSON.stringify(data)}`);
6644
6710
  if (status !== 200) {
6645
6711
  logger.warn(`Failed to extract goal from prompt: HTTP ${status} ${statusText || ""}, Response Data: ${JSON.stringify(data)}`);
@@ -7138,7 +7204,7 @@ async function generateCitations(testCases, injectVar, config) {
7138
7204
  method: "POST",
7139
7205
  headers: { "Content-Type": "application/json" },
7140
7206
  body: JSON.stringify(payload)
7141
- }, REQUEST_TIMEOUT_MS);
7207
+ }, REQUEST_TIMEOUT_MS$1);
7142
7208
  logger.debug(`Got remote citation generation result for case ${Number(index) + 1}: ${JSON.stringify(data)}`);
7143
7209
  if (data.error) {
7144
7210
  logger.error(`[Citation] Error in citation generation: ${data.error}`);
@@ -7275,7 +7341,7 @@ async function generateGcgPrompts(testCases, injectVar, config) {
7275
7341
  method: "POST",
7276
7342
  headers: { "Content-Type": "application/json" },
7277
7343
  body: JSON.stringify(payload)
7278
- }, REQUEST_TIMEOUT_MS);
7344
+ }, REQUEST_TIMEOUT_MS$1);
7279
7345
  logger.debug(`Got GCG generation result for case ${Number(index) + 1}: ${JSON.stringify(data)}`);
7280
7346
  if (data.error) {
7281
7347
  logger.error(`[GCG] Error in GCG generation: ${data.error}`);
@@ -8200,7 +8266,7 @@ async function generateLikertPrompts(testCases, injectVar, config) {
8200
8266
  method: "POST",
8201
8267
  headers: { "Content-Type": "application/json" },
8202
8268
  body: JSON.stringify(payload)
8203
- }, REQUEST_TIMEOUT_MS);
8269
+ }, REQUEST_TIMEOUT_MS$1);
8204
8270
  logger.debug(`Got Likert jailbreak generation result for case ${Number(index) + 1}: ${JSON.stringify(data)}`);
8205
8271
  if (data.error || !data.modifiedPrompts) {
8206
8272
  logger.error(`[jailbreak:likert] Error in Likert generation: ${data.error}}`);
@@ -8285,7 +8351,7 @@ async function generateMathPrompt(testCases, injectVar, config) {
8285
8351
  method: "POST",
8286
8352
  headers: { "Content-Type": "application/json" },
8287
8353
  body: JSON.stringify(payload)
8288
- }, REQUEST_TIMEOUT_MS);
8354
+ }, REQUEST_TIMEOUT_MS$1);
8289
8355
  logger.debug(`Got remote MathPrompt generation result for batch ${Number(index) + 1}: ${JSON.stringify(data)}`);
8290
8356
  allResults = allResults.concat(data.result);
8291
8357
  processedBatches++;
@@ -9175,7 +9241,7 @@ async function textToAudio(text, language = "en", options) {
9175
9241
  method: "POST",
9176
9242
  headers: { "Content-Type": "application/json" },
9177
9243
  body: JSON.stringify(payload)
9178
- }, REQUEST_TIMEOUT_MS);
9244
+ }, REQUEST_TIMEOUT_MS$1);
9179
9245
  if (data.error || !data.audioBase64) throw new Error(`Error in remote audio generation: ${data.error || "No audio data returned"}`);
9180
9246
  logger.debug(`Received audio base64 from remote API (${data.audioBase64.length} chars)`);
9181
9247
  const base64Audio = data.audioBase64;
@@ -9591,13 +9657,21 @@ async function generateCompositePrompts(testCases, injectVar, config) {
9591
9657
  email: getUserEmail(),
9592
9658
  ...config.n && { n: config.n },
9593
9659
  ...config.modelFamily && { modelFamily: config.modelFamily },
9594
- ...inputs && { inputs }
9660
+ ...inputs && { inputs },
9661
+ ...config.techniques && { techniques: config.techniques },
9662
+ ...config.evasions && { evasions: config.evasions },
9663
+ ...config.alwaysIncludeTechniques && { alwaysIncludeTechniques: config.alwaysIncludeTechniques },
9664
+ ...config.compositionOrder && { compositionOrder: config.compositionOrder },
9665
+ ...config.combinationMode && { combinationMode: config.combinationMode },
9666
+ ...config.includeEvasionGuidance != null && { includeEvasionGuidance: config.includeEvasionGuidance },
9667
+ ...config.evasionGuidance && { evasionGuidance: config.evasionGuidance },
9668
+ ...config.targetContext && { targetContext: config.targetContext }
9595
9669
  };
9596
9670
  const { data } = await fetchWithCache(getRemoteGenerationUrl(), {
9597
9671
  method: "POST",
9598
9672
  headers: { "Content-Type": "application/json" },
9599
9673
  body: JSON.stringify(payload)
9600
- }, REQUEST_TIMEOUT_MS);
9674
+ }, REQUEST_TIMEOUT_MS$1);
9601
9675
  logger.debug(`Got composite jailbreak generation result for case ${Number(index) + 1}: ${JSON.stringify(data)}`);
9602
9676
  if (data.error || !data.modifiedPrompts) {
9603
9677
  logger.error(`[jailbreak:composite] Error in composite generation: ${data.error}}`);
@@ -10722,7 +10796,7 @@ var CrescendoProvider = class {
10722
10796
  });
10723
10797
  let assertToUse = test?.assert?.find((a) => a.type && a.type.includes(test.metadata?.pluginId));
10724
10798
  if (!assertToUse) assertToUse = test?.assert?.find((a) => a.type);
10725
- const { getGraderById } = await import("./graders-B_pgMLS2.js");
10799
+ const { getGraderById } = await import("./graders-CzVMbEnv.js");
10726
10800
  let graderPassed;
10727
10801
  const additionalRubric = getGoalRubric(this.userGoal);
10728
10802
  while (roundNum < this.maxTurns) try {
@@ -10828,18 +10902,7 @@ var CrescendoProvider = class {
10828
10902
  if (grader) {
10829
10903
  const gradingTraceSummary = tracingOptions.includeInGrading ? response.traceSummary ?? (response.traceContext ? formatTraceSummary(response.traceContext) : void 0) : void 0;
10830
10904
  let gradingContext;
10831
- if (lastResponse.metadata?.wasExfiltrated !== void 0) {
10832
- logger.debug("[Crescendo] Using exfil data from provider response metadata");
10833
- gradingContext = {
10834
- ...tracingOptions.includeInGrading ? {
10835
- traceContext: response.traceContext,
10836
- traceSummary: gradingTraceSummary
10837
- } : {},
10838
- wasExfiltrated: Boolean(lastResponse.metadata.wasExfiltrated),
10839
- exfilCount: Number(lastResponse.metadata.exfilCount) || 0,
10840
- exfilRecords: []
10841
- };
10842
- } else {
10905
+ if (lastResponse.metadata?.wasExfiltrated === void 0) {
10843
10906
  const webPageUuid = test.metadata?.webPageUuid;
10844
10907
  if (webPageUuid) {
10845
10908
  const evalId = context?.evaluationId ?? test.metadata?.evaluationId;
@@ -10858,6 +10921,17 @@ var CrescendoProvider = class {
10858
10921
  exfilRecords: exfilData.exfilRecords
10859
10922
  };
10860
10923
  }
10924
+ } else {
10925
+ logger.debug("[Crescendo] Using exfil data from provider response metadata");
10926
+ gradingContext = {
10927
+ ...tracingOptions.includeInGrading ? {
10928
+ traceContext: response.traceContext,
10929
+ traceSummary: gradingTraceSummary
10930
+ } : {},
10931
+ wasExfiltrated: Boolean(lastResponse.metadata.wasExfiltrated),
10932
+ exfilCount: Number(lastResponse.metadata.exfilCount) || 0,
10933
+ exfilRecords: []
10934
+ };
10861
10935
  }
10862
10936
  if (!gradingContext && tracingOptions.includeInGrading) gradingContext = {
10863
10937
  traceContext: response.traceContext,
@@ -11413,7 +11487,7 @@ var CustomProvider = class {
11413
11487
  let lastTransformResult;
11414
11488
  let assertToUse = test?.assert?.find((a) => a.type && a.type.includes(test.metadata?.pluginId));
11415
11489
  if (!assertToUse) assertToUse = test?.assert?.find((a) => a.type);
11416
- const { getGraderById } = await import("./graders-B_pgMLS2.js");
11490
+ const { getGraderById } = await import("./graders-CzVMbEnv.js");
11417
11491
  let graderPassed;
11418
11492
  let storedGraderResult;
11419
11493
  const additionalRubric = getGoalRubric(this.userGoal);
@@ -11911,7 +11985,7 @@ var GoatProvider = class {
11911
11985
  let assertToUse;
11912
11986
  let graderPassed;
11913
11987
  let storedGraderResult;
11914
- const { getGraderById } = await import("./graders-B_pgMLS2.js");
11988
+ const { getGraderById } = await import("./graders-CzVMbEnv.js");
11915
11989
  let test;
11916
11990
  if (context?.test) {
11917
11991
  test = context?.test;
@@ -12194,18 +12268,7 @@ var GoatProvider = class {
12194
12268
  const grader = assertToUse ? getGraderById(assertToUse.type) : void 0;
12195
12269
  if (test && grader && finalOutput) {
12196
12270
  let gradingContext;
12197
- if (finalResponse.metadata?.wasExfiltrated !== void 0) {
12198
- logger.debug("[GOAT] Using exfil data from provider response metadata");
12199
- gradingContext = {
12200
- ...tracingOptions.includeInGrading ? {
12201
- traceContext: targetResponse.traceContext,
12202
- traceSummary: gradingTraceSummary
12203
- } : {},
12204
- wasExfiltrated: Boolean(finalResponse.metadata.wasExfiltrated),
12205
- exfilCount: Number(finalResponse.metadata.exfilCount) || 0,
12206
- exfilRecords: []
12207
- };
12208
- } else {
12271
+ if (finalResponse.metadata?.wasExfiltrated === void 0) {
12209
12272
  const webPageUuid = test.metadata?.webPageUuid;
12210
12273
  if (webPageUuid) {
12211
12274
  const evalId = context?.evaluationId ?? test.metadata?.evaluationId;
@@ -12224,6 +12287,17 @@ var GoatProvider = class {
12224
12287
  exfilRecords: exfilData.exfilRecords
12225
12288
  };
12226
12289
  }
12290
+ } else {
12291
+ logger.debug("[GOAT] Using exfil data from provider response metadata");
12292
+ gradingContext = {
12293
+ ...tracingOptions.includeInGrading ? {
12294
+ traceContext: targetResponse.traceContext,
12295
+ traceSummary: gradingTraceSummary
12296
+ } : {},
12297
+ wasExfiltrated: Boolean(finalResponse.metadata.wasExfiltrated),
12298
+ exfilCount: Number(finalResponse.metadata.exfilCount) || 0,
12299
+ exfilRecords: []
12300
+ };
12227
12301
  }
12228
12302
  if (!gradingContext && tracingOptions.includeInGrading) gradingContext = {
12229
12303
  traceContext: targetResponse.traceContext,
@@ -12384,7 +12458,7 @@ var HydraProvider = class {
12384
12458
  let lastTransformResult;
12385
12459
  let lastTransformDisplayVars;
12386
12460
  let lastFinalAttackPrompt;
12387
- const { getGraderById } = await import("./graders-B_pgMLS2.js");
12461
+ const { getGraderById } = await import("./graders-CzVMbEnv.js");
12388
12462
  let assertToUse = test?.assert?.find((a) => a.type && a.type.includes(test.metadata?.pluginId));
12389
12463
  if (!assertToUse) assertToUse = test?.assert?.find((a) => a.type);
12390
12464
  let previousTraceSummary;
@@ -13238,7 +13312,7 @@ async function runRedteamConversation$2({ context, filters, injectVar, numIterat
13238
13312
  if (sessionId) sessionIds.push(sessionId);
13239
13313
  let assertToUse = test?.assert?.find((a) => a.type && a.type.includes(test.metadata?.pluginId));
13240
13314
  if (!assertToUse) assertToUse = test?.assert?.find((a) => a.type);
13241
- const { getGraderById } = await import("./graders-B_pgMLS2.js");
13315
+ const { getGraderById } = await import("./graders-CzVMbEnv.js");
13242
13316
  if (test && assertToUse) {
13243
13317
  const grader = getGraderById(assertToUse.type);
13244
13318
  if (grader) {
@@ -14065,7 +14139,7 @@ async function runMetaAgentRedteam({ context, filters, injectVar, numIterations,
14065
14139
  previousTraceSummary = attackTraceSummary;
14066
14140
  let assertToUse = test?.assert?.find((a) => a.type && a.type.includes(test.metadata?.pluginId));
14067
14141
  if (!assertToUse) assertToUse = test?.assert?.find((a) => a.type);
14068
- const { getGraderById } = await import("./graders-B_pgMLS2.js");
14142
+ const { getGraderById } = await import("./graders-CzVMbEnv.js");
14069
14143
  if (test && assertToUse) {
14070
14144
  const grader = getGraderById(assertToUse.type);
14071
14145
  if (grader) {
@@ -14649,7 +14723,7 @@ async function runRedteamConversation({ context, filters, injectVar, options, pr
14649
14723
  noImprovementCount++;
14650
14724
  if (noImprovementCount % 5 === 0) logger.debug(`[Depth ${depth}, Attempt ${attempts}] No improvement for ${noImprovementCount} consecutive iterations. Max score: ${maxScore}`);
14651
14725
  }
14652
- const { getGraderById } = await import("./graders-B_pgMLS2.js");
14726
+ const { getGraderById } = await import("./graders-CzVMbEnv.js");
14653
14727
  let graderPassed;
14654
14728
  let assertToUse = test?.assert?.find((a) => a.type && a.type.includes(test.metadata?.pluginId));
14655
14729
  if (!assertToUse) assertToUse = test?.assert?.find((a) => a.type);
@@ -15323,7 +15397,7 @@ var AI21ChatCompletionProvider = class AI21ChatCompletionProvider {
15323
15397
  messages,
15324
15398
  temperature: config?.temperature ?? .1,
15325
15399
  top_p: config?.top_p || 1,
15326
- max_tokens: config?.max_tokens || 1024,
15400
+ max_tokens: config?.max_tokens ?? 1024,
15327
15401
  n: 1,
15328
15402
  stop: [],
15329
15403
  response_format: config.response_format || { type: "text" }
@@ -15338,7 +15412,7 @@ var AI21ChatCompletionProvider = class AI21ChatCompletionProvider {
15338
15412
  Authorization: `Bearer ${this.getApiKey()}`
15339
15413
  },
15340
15414
  body: JSON.stringify(body)
15341
- }, REQUEST_TIMEOUT_MS));
15415
+ }, REQUEST_TIMEOUT_MS$1));
15342
15416
  } catch (err) {
15343
15417
  return { error: `API call error: ${String(err)}` };
15344
15418
  }
@@ -15504,7 +15578,7 @@ var AnthropicCompletionProvider = class extends AnthropicGenericProvider {
15504
15578
  const params = {
15505
15579
  model: this.modelName,
15506
15580
  prompt: `${Anthropic.HUMAN_PROMPT} ${prompt} ${Anthropic.AI_PROMPT}`,
15507
- max_tokens_to_sample: this.config?.max_tokens_to_sample || getEnvInt("ANTHROPIC_MAX_TOKENS", 1024),
15581
+ max_tokens_to_sample: this.config?.max_tokens_to_sample ?? getEnvInt("ANTHROPIC_MAX_TOKENS", 1024),
15508
15582
  temperature: this.config.temperature ?? getEnvFloat("ANTHROPIC_TEMPERATURE", 0),
15509
15583
  stop_sequences: stop
15510
15584
  };
@@ -15678,7 +15752,7 @@ var AzureAssistantProvider = class extends AzureGenericProvider {
15678
15752
  * Helper method to make HTTP requests using fetchWithCache
15679
15753
  */
15680
15754
  async makeRequest(url, options) {
15681
- const timeoutMs = this.assistantConfig.timeoutMs || REQUEST_TIMEOUT_MS;
15755
+ const timeoutMs = this.assistantConfig.timeoutMs || REQUEST_TIMEOUT_MS$1;
15682
15756
  const retries = this.assistantConfig.retryOptions?.maxRetries || 4;
15683
15757
  const shouldBustCache = url.includes("/runs/") && options.method === "GET" || url.includes("/threads") && options.method === "POST" && !url.includes("/messages") && !url.includes("submit_tool_outputs");
15684
15758
  try {
@@ -15976,7 +16050,7 @@ var AzureCompletionProvider = class extends AzureGenericProvider {
15976
16050
  ...this.config.headers
15977
16051
  },
15978
16052
  body: JSON.stringify(body)
15979
- }, REQUEST_TIMEOUT_MS, "json", context?.bustCache ?? context?.debug));
16053
+ }, REQUEST_TIMEOUT_MS$1, "json", context?.bustCache ?? context?.debug));
15980
16054
  } catch (err) {
15981
16055
  return { error: `API call error: ${String(err)}` };
15982
16056
  }
@@ -16043,61 +16117,65 @@ var AzureCompletionProvider = class extends AzureGenericProvider {
16043
16117
  var AzureFoundryAgentProvider = class extends AzureGenericProvider {
16044
16118
  assistantConfig;
16045
16119
  loadedFunctionCallbacks = {};
16120
+ processor;
16046
16121
  projectClient = null;
16047
16122
  projectUrl;
16123
+ resolvedAgent = null;
16124
+ warnedUnsupportedFields = /* @__PURE__ */ new Set();
16048
16125
  constructor(deploymentName, options = {}) {
16049
16126
  super(deploymentName, options);
16050
16127
  this.assistantConfig = options.config || {};
16051
16128
  this.projectUrl = options.config?.projectUrl || process.env.AZURE_AI_PROJECT_URL || "";
16052
16129
  if (!this.projectUrl) throw new Error("Azure AI Project URL must be provided via projectUrl option or AZURE_AI_PROJECT_URL environment variable");
16130
+ this.processor = new ResponsesProcessor({
16131
+ modelName: this.assistantConfig.modelName || deploymentName,
16132
+ providerType: "azure",
16133
+ functionCallbackHandler: new FunctionCallbackHandler(),
16134
+ costCalculator: (_modelName, usage, requestConfig) => calculateAzureCost(requestConfig?.model || this.assistantConfig.modelName || this.deploymentName, usage) ?? 0
16135
+ });
16053
16136
  if (this.assistantConfig.functionToolCallbacks) this.preloadFunctionCallbacks();
16054
16137
  }
16055
- /**
16056
- * Initialize the Azure AI Project client
16057
- */
16058
16138
  async initializeClient() {
16059
16139
  if (this.projectClient) return this.projectClient;
16060
16140
  try {
16061
16141
  const { AIProjectClient } = await import("@azure/ai-projects");
16062
16142
  const { DefaultAzureCredential } = await import("@azure/identity");
16063
- this.projectClient = new AIProjectClient(this.projectUrl, new DefaultAzureCredential());
16143
+ const projectClient = new AIProjectClient(this.projectUrl, new DefaultAzureCredential());
16144
+ this.projectClient = projectClient;
16064
16145
  logger.debug("Azure AI Project client initialized successfully");
16065
- return this.projectClient;
16146
+ return projectClient;
16066
16147
  } catch (error) {
16067
- logger.error(`Failed to initialize Azure AI Project client: ${error instanceof Error ? error.message : String(error)}`);
16068
- throw new Error(`Failed to initialize Azure AI Project client: ${error instanceof Error ? error.message : String(error)}`);
16148
+ const errorMessage = error instanceof Error ? error.message : String(error);
16149
+ logger.error(`Failed to initialize Azure AI Project client: ${errorMessage}`);
16150
+ throw new Error(`Failed to initialize Azure AI Project client: ${errorMessage}`);
16069
16151
  }
16070
16152
  }
16071
- /**
16072
- * Preloads all function callbacks to ensure they're ready when needed
16073
- */
16153
+ async resolveAgent(client) {
16154
+ if (this.resolvedAgent) return this.resolvedAgent;
16155
+ try {
16156
+ const agent = await client.agents.get(this.deploymentName);
16157
+ this.resolvedAgent = agent;
16158
+ return agent;
16159
+ } catch (error) {
16160
+ logger.debug(`[AzureFoundryAgentProvider] Direct agent lookup failed for '${this.deploymentName}', falling back to list lookup`, { error: error instanceof Error ? error.message : String(error) });
16161
+ }
16162
+ for await (const agent of client.agents.list()) if (agent.id === this.deploymentName || agent.name === this.deploymentName) {
16163
+ this.resolvedAgent = agent;
16164
+ return agent;
16165
+ }
16166
+ throw new Error(`Azure Foundry agent '${this.deploymentName}' was not found by name or legacy ID in project '${this.projectUrl}'. The Azure AI Projects v2 SDK resolves agents by name. Update the provider to use azure:foundry-agent:<agent-name>, or keep using the legacy ID format and ensure the agent still exists in this project.`);
16167
+ }
16074
16168
  async preloadFunctionCallbacks() {
16075
16169
  if (!this.assistantConfig.functionToolCallbacks) return;
16076
16170
  const callbacks = this.assistantConfig.functionToolCallbacks;
16077
16171
  for (const [name, callback] of Object.entries(callbacks)) try {
16078
- if (typeof callback === "string") {
16079
- const callbackStr = callback;
16080
- if (callbackStr.startsWith("file://")) {
16081
- const fn = await this.loadExternalFunction(callbackStr);
16082
- this.loadedFunctionCallbacks[name] = fn;
16083
- logger.debug(`Successfully preloaded function callback '${name}' from file`);
16084
- } else {
16085
- this.loadedFunctionCallbacks[name] = new Function("return " + callbackStr)();
16086
- logger.debug(`Successfully preloaded inline function callback '${name}'`);
16087
- }
16088
- } else if (typeof callback === "function") {
16089
- this.loadedFunctionCallbacks[name] = callback;
16090
- logger.debug(`Successfully stored function callback '${name}'`);
16091
- }
16172
+ if (typeof callback === "string") if (callback.startsWith("file://")) this.loadedFunctionCallbacks[name] = await this.loadExternalFunction(callback);
16173
+ else this.loadedFunctionCallbacks[name] = new Function("return " + callback)();
16174
+ else if (typeof callback === "function") this.loadedFunctionCallbacks[name] = callback;
16092
16175
  } catch (error) {
16093
16176
  logger.error(`Failed to preload function callback '${name}': ${error}`);
16094
16177
  }
16095
16178
  }
16096
- /**
16097
- * Loads a function from an external file
16098
- * @param fileRef The file reference in the format 'file://path/to/file:functionName'
16099
- * @returns The loaded function
16100
- */
16101
16179
  async loadExternalFunction(fileRef) {
16102
16180
  let filePath = fileRef.slice(7);
16103
16181
  let functionName;
@@ -16106,11 +16184,9 @@ var AzureFoundryAgentProvider = class extends AzureGenericProvider {
16106
16184
  if (splits[0] && isJavascriptFile(splits[0])) [filePath, functionName] = splits;
16107
16185
  }
16108
16186
  try {
16109
- const resolvedPath = path.resolve(state.basePath || "", filePath);
16110
- logger.debug(`Loading function from ${resolvedPath}${functionName ? `:${functionName}` : ""}`);
16111
- const requiredModule = await importModule(resolvedPath, functionName);
16187
+ const requiredModule = await importModule(path.resolve(state.basePath || "", filePath), functionName);
16112
16188
  if (typeof requiredModule === "function") return requiredModule;
16113
- else if (requiredModule && typeof requiredModule === "object" && functionName && functionName in requiredModule) {
16189
+ if (requiredModule && typeof requiredModule === "object" && functionName && functionName in requiredModule) {
16114
16190
  const fn = requiredModule[functionName];
16115
16191
  if (typeof fn === "function") return fn;
16116
16192
  }
@@ -16119,139 +16195,197 @@ var AzureFoundryAgentProvider = class extends AzureGenericProvider {
16119
16195
  throw new Error(`Error loading function from ${filePath}: ${error.message || String(error)}`);
16120
16196
  }
16121
16197
  }
16122
- /**
16123
- * Executes a function callback with proper error handling
16124
- */
16125
- async executeFunctionCallback(functionName, args, context) {
16198
+ async executeFunctionCallback(functionName, args, context, callbacks) {
16126
16199
  try {
16127
16200
  let callback = this.loadedFunctionCallbacks[functionName];
16201
+ const effectiveCallbacks = callbacks || this.assistantConfig.functionToolCallbacks;
16128
16202
  if (!callback) {
16129
- const callbackRef = this.assistantConfig.functionToolCallbacks?.[functionName];
16130
- if (callbackRef && typeof callbackRef === "string") {
16131
- const callbackStr = callbackRef;
16132
- if (callbackStr.startsWith("file://")) callback = await this.loadExternalFunction(callbackStr);
16133
- else callback = new Function("return " + callbackStr)();
16134
- this.loadedFunctionCallbacks[functionName] = callback;
16135
- } else if (typeof callbackRef === "function") {
16136
- callback = callbackRef;
16137
- this.loadedFunctionCallbacks[functionName] = callback;
16138
- }
16203
+ const callbackRef = effectiveCallbacks?.[functionName];
16204
+ if (callbackRef && typeof callbackRef === "string") if (callbackRef.startsWith("file://")) callback = await this.loadExternalFunction(callbackRef);
16205
+ else callback = new Function("return " + callbackRef)();
16206
+ else if (typeof callbackRef === "function") callback = callbackRef;
16207
+ if (callback) this.loadedFunctionCallbacks[functionName] = callback;
16139
16208
  }
16140
16209
  if (!callback) throw new Error(`No callback found for function '${functionName}'`);
16141
- logger.debug(`Executing function '${functionName}' with args: ${args}${context ? ` and context: ${JSON.stringify(context)}` : ""}`);
16142
16210
  const result = await callback(args, context);
16143
16211
  if (result === void 0 || result === null) return "";
16144
- else if (typeof result === "object") try {
16145
- return JSON.stringify(result);
16146
- } catch (error) {
16147
- logger.warn(`Error stringifying result from function '${functionName}': ${error}`);
16148
- return String(result);
16149
- }
16150
- else return String(result);
16212
+ if (typeof result === "object") return JSON.stringify(result);
16213
+ return String(result);
16151
16214
  } catch (error) {
16152
16215
  logger.error(`Error executing function '${functionName}': ${error.message || String(error)}`);
16153
16216
  return JSON.stringify({ error: `Error in ${functionName}: ${error.message || String(error)}` });
16154
16217
  }
16155
16218
  }
16219
+ parsePromptInput(prompt) {
16220
+ try {
16221
+ const parsedJson = JSON.parse(prompt);
16222
+ if (Array.isArray(parsedJson)) return parsedJson;
16223
+ } catch {}
16224
+ return [{
16225
+ type: "message",
16226
+ role: "user",
16227
+ content: prompt
16228
+ }];
16229
+ }
16230
+ warnForUnsupportedConfig(config) {
16231
+ const unsupportedFields = [
16232
+ config.frequency_penalty === void 0 ? null : "frequency_penalty",
16233
+ config.presence_penalty === void 0 ? null : "presence_penalty",
16234
+ config.retryOptions ? "retryOptions" : null,
16235
+ config.seed === void 0 ? null : "seed",
16236
+ config.stop?.length ? "stop" : null,
16237
+ config.timeoutMs === void 0 ? null : "timeoutMs",
16238
+ config.tool_resources ? "tool_resources" : null
16239
+ ].filter(Boolean);
16240
+ if (unsupportedFields.length === 0) return;
16241
+ const warningKey = unsupportedFields.sort().join(",");
16242
+ if (this.warnedUnsupportedFields.has(warningKey)) return;
16243
+ this.warnedUnsupportedFields.add(warningKey);
16244
+ logger.warn(`[AzureFoundryAgentProvider] The Azure AI Projects v2 agent runtime ignores these per-request settings: ${unsupportedFields.join(", ")}. Configure them on the agent itself, or pass supported Responses API fields instead.`);
16245
+ }
16246
+ async buildResponsesBody(prompt, context) {
16247
+ const config = {
16248
+ ...this.assistantConfig,
16249
+ ...context?.prompt?.config
16250
+ };
16251
+ this.warnForUnsupportedConfig(config);
16252
+ const responseFormat = maybeLoadResponseFormatFromExternalFile(config.response_format, context?.vars);
16253
+ const loadedTools = config.tools ? await maybeLoadToolsFromExternalFile(config.tools, context?.vars) : void 0;
16254
+ const reasoningEffort = config.reasoning_effort ? renderVarsInObject(config.reasoning_effort, context?.vars) : void 0;
16255
+ const maxOutputTokens = config.max_output_tokens ?? config.max_completion_tokens ?? config.max_tokens;
16256
+ let text;
16257
+ if (responseFormat?.type === "json_object") text = { format: { type: "json_object" } };
16258
+ else if (responseFormat?.type === "json_schema") {
16259
+ const schema = responseFormat.schema || responseFormat.json_schema?.schema;
16260
+ text = { format: {
16261
+ type: "json_schema",
16262
+ name: responseFormat.json_schema?.name || responseFormat.name || "response_schema",
16263
+ schema,
16264
+ strict: responseFormat.json_schema?.strict ?? responseFormat.strict ?? true
16265
+ } };
16266
+ }
16267
+ if (config.verbosity) text = {
16268
+ ...text || {},
16269
+ verbosity: config.verbosity
16270
+ };
16271
+ return {
16272
+ body: {
16273
+ input: this.parsePromptInput(prompt),
16274
+ ...config.instructions ? { instructions: config.instructions } : {},
16275
+ ...config.metadata ? { metadata: config.metadata } : {},
16276
+ ...config.modelName ? { model: config.modelName } : {},
16277
+ ...maxOutputTokens === void 0 ? {} : { max_output_tokens: maxOutputTokens },
16278
+ ...reasoningEffort ? { reasoning: { effort: reasoningEffort } } : {},
16279
+ ...config.temperature === void 0 ? {} : { temperature: config.temperature },
16280
+ ...config.top_p === void 0 ? {} : { top_p: config.top_p },
16281
+ ...config.tool_choice ? { tool_choice: config.tool_choice } : {},
16282
+ ...loadedTools ? { tools: loadedTools } : {},
16283
+ ...text ? { text } : {},
16284
+ ...config.passthrough || {}
16285
+ },
16286
+ effectiveConfig: {
16287
+ ...config,
16288
+ response_format: responseFormat,
16289
+ tools: loadedTools
16290
+ }
16291
+ };
16292
+ }
16293
+ getFunctionCalls(response) {
16294
+ return (response.output || []).filter((item) => {
16295
+ return item?.type === "function_call" && typeof item.id === "string" && typeof item.call_id === "string" && typeof item.name === "string" && typeof item.arguments === "string";
16296
+ });
16297
+ }
16298
+ getCallableFunctionCalls(response, callbacks) {
16299
+ const functionCalls = this.getFunctionCalls(response);
16300
+ if (functionCalls.length === 0 || !callbacks || Object.keys(callbacks).length === 0) return [];
16301
+ const missingCallbacks = functionCalls.filter((call) => !(call.name in callbacks));
16302
+ if (missingCallbacks.length > 0) {
16303
+ logger.debug(`[AzureFoundryAgentProvider] Returning unresolved function calls because callbacks are missing for: ${missingCallbacks.map((call) => call.name).join(", ")}`);
16304
+ return [];
16305
+ }
16306
+ return functionCalls;
16307
+ }
16308
+ async buildFunctionCallOutputs(functionCalls, response, agent, callbacks) {
16309
+ const callbackContext = {
16310
+ threadId: response.conversation?.id || response.id,
16311
+ runId: response.id,
16312
+ assistantId: agent.id,
16313
+ provider: "azure-foundry"
16314
+ };
16315
+ return Promise.all(functionCalls.map(async (call) => ({
16316
+ type: "function_call_output",
16317
+ call_id: call.call_id,
16318
+ output: await this.executeFunctionCallback(call.name, call.arguments, callbackContext, callbacks)
16319
+ })));
16320
+ }
16321
+ getAgentReference(agent) {
16322
+ return { body: { agent: {
16323
+ name: agent.name,
16324
+ type: "agent_reference"
16325
+ } } };
16326
+ }
16327
+ async processResponse(response, effectiveConfig) {
16328
+ const result = await this.processor.processResponseOutput(response, effectiveConfig, false);
16329
+ if (!result.error) return result;
16330
+ if (response.output_text) {
16331
+ logger.debug(`[AzureFoundryAgentProvider] ResponsesProcessor returned an error, falling back to output_text`, { processorError: result.error });
16332
+ return {
16333
+ ...result,
16334
+ error: void 0,
16335
+ output: response.output_text,
16336
+ raw: response
16337
+ };
16338
+ }
16339
+ return result;
16340
+ }
16156
16341
  async callApi(prompt, context, _callApiOptions) {
16157
- const cacheKey = `azure_foundry_agent:${this.deploymentName}:${JSON.stringify({
16158
- frequency_penalty: this.assistantConfig.frequency_penalty,
16159
- instructions: this.assistantConfig.instructions,
16160
- max_completion_tokens: this.assistantConfig.max_completion_tokens,
16161
- max_tokens: this.assistantConfig.max_tokens,
16162
- model: this.assistantConfig.modelName,
16163
- presence_penalty: this.assistantConfig.presence_penalty,
16164
- prompt,
16165
- response_format: this.assistantConfig.response_format,
16166
- seed: this.assistantConfig.seed,
16167
- stop: this.assistantConfig.stop,
16168
- temperature: this.assistantConfig.temperature,
16169
- tool_choice: this.assistantConfig.tool_choice,
16170
- tool_resources: this.assistantConfig.tool_resources,
16171
- tools: JSON.stringify(await maybeLoadToolsFromExternalFile(this.assistantConfig.tools, context?.vars)),
16172
- top_p: this.assistantConfig.top_p
16173
- })}`;
16342
+ const { body, effectiveConfig } = await this.buildResponsesBody(prompt, context);
16343
+ const cacheKey = `azure_foundry_agent:${this.deploymentName}:${JSON.stringify(body)}`;
16174
16344
  if (isCacheEnabled()) try {
16175
16345
  const cachedResult = await (await getCache()).get(cacheKey);
16176
16346
  if (cachedResult) {
16177
- logger.debug(`Cache hit for agent prompt: ${prompt.substring(0, 50)}...`);
16347
+ logger.debug(`Cache hit for Foundry agent prompt: ${prompt.substring(0, 50)}...`);
16178
16348
  return {
16179
16349
  ...cachedResult,
16180
16350
  cached: true
16181
16351
  };
16182
16352
  }
16183
- } catch (err) {
16184
- logger.warn(`Error checking cache: ${err}`);
16353
+ } catch (error) {
16354
+ logger.warn(`Error checking cache for Azure Foundry agent response: ${error}`);
16185
16355
  }
16186
16356
  try {
16187
16357
  const client = await this.initializeClient();
16188
- if (!client) throw new Error("Failed to initialize Azure AI Project client");
16189
- const agent = await client.agents.getAgent(this.deploymentName);
16190
- logger.debug(`Retrieved agent: ${agent.name}`);
16191
- const thread = await client.agents.threads.create();
16192
- logger.debug(`Created thread: ${thread.id}`);
16193
- const message = await client.agents.messages.create(thread.id, "user", prompt);
16194
- logger.debug(`Created message: ${message.id}`);
16195
- const runOptions = {};
16196
- if (this.assistantConfig.temperature !== void 0) runOptions.temperature = this.assistantConfig.temperature;
16197
- if (this.assistantConfig.top_p !== void 0) runOptions.top_p = this.assistantConfig.top_p;
16198
- if (this.assistantConfig.frequency_penalty !== void 0) runOptions.frequency_penalty = this.assistantConfig.frequency_penalty;
16199
- if (this.assistantConfig.presence_penalty !== void 0) runOptions.presence_penalty = this.assistantConfig.presence_penalty;
16200
- if (this.assistantConfig.max_completion_tokens !== void 0) runOptions.max_completion_tokens = this.assistantConfig.max_completion_tokens;
16201
- if (this.assistantConfig.max_tokens !== void 0) runOptions.max_tokens = this.assistantConfig.max_tokens;
16202
- if (this.assistantConfig.response_format) runOptions.response_format = this.assistantConfig.response_format;
16203
- if (this.assistantConfig.stop) runOptions.stop = this.assistantConfig.stop;
16204
- if (this.assistantConfig.seed !== void 0) runOptions.seed = this.assistantConfig.seed;
16205
- if (this.assistantConfig.tool_resources) runOptions.tool_resources = this.assistantConfig.tool_resources;
16206
- if (this.assistantConfig.tool_choice) runOptions.tool_choice = this.assistantConfig.tool_choice;
16207
- if (this.assistantConfig.tools) {
16208
- const loadedTools = await maybeLoadToolsFromExternalFile(this.assistantConfig.tools, context?.vars);
16209
- if (loadedTools !== void 0) runOptions.tools = loadedTools;
16210
- }
16211
- if (this.assistantConfig.modelName) runOptions.model = this.assistantConfig.modelName;
16212
- if (this.assistantConfig.instructions) runOptions.instructions = this.assistantConfig.instructions;
16213
- const run = await client.agents.runs.create(thread.id, agent.id, runOptions);
16214
- logger.debug(`Created run: ${run.id}`);
16215
- let result;
16216
- if (this.assistantConfig.functionToolCallbacks && Object.keys(this.assistantConfig.functionToolCallbacks).length > 0) result = await this.pollRunWithToolCallHandling(client, thread.id, run);
16217
- else {
16218
- const completedRun = await this.pollRun(client, thread.id, run.id);
16219
- if (completedRun.status === "completed") result = await this.processCompletedRun(client, thread.id, completedRun);
16220
- else if (completedRun.lastError) {
16221
- const errorCode = completedRun.lastError.code || "";
16222
- const errorMessage = completedRun.lastError.message || "";
16223
- if (errorCode === "content_filter" || this.isContentFilterError(errorMessage)) {
16224
- const lowerErrorMessage = errorMessage.toLowerCase();
16225
- const isInputFiltered = lowerErrorMessage.includes("prompt") || lowerErrorMessage.includes("input");
16226
- const isOutputFiltered = lowerErrorMessage.includes("output") || lowerErrorMessage.includes("response");
16227
- result = {
16228
- output: "The generated content was filtered due to triggering Azure OpenAI Service's content filtering system.",
16229
- guardrails: {
16230
- flagged: true,
16231
- flaggedInput: isInputFiltered,
16232
- flaggedOutput: !isInputFiltered && (isOutputFiltered || !isOutputFiltered)
16233
- }
16234
- };
16235
- } else result = { error: `Thread run failed: ${errorCode} - ${errorMessage}` };
16236
- } else result = { error: `Thread run failed with status: ${completedRun.status}` };
16237
- }
16358
+ const agent = await this.resolveAgent(client);
16359
+ const openAIClient = client.getOpenAIClient();
16360
+ const responseOptions = this.getAgentReference(agent);
16361
+ const maxLoopTimeMs = this.assistantConfig.maxPollTimeMs || 3e5;
16362
+ const startTime = Date.now();
16363
+ let response = await openAIClient.responses.create(body, responseOptions);
16364
+ while (Date.now() - startTime <= maxLoopTimeMs) {
16365
+ const functionCalls = this.getCallableFunctionCalls(response, effectiveConfig.functionToolCallbacks);
16366
+ if (functionCalls.length === 0) break;
16367
+ const outputs = await this.buildFunctionCallOutputs(functionCalls, response, agent, effectiveConfig.functionToolCallbacks);
16368
+ logger.debug(`[AzureFoundryAgentProvider] Submitting ${outputs.length} function_call_output item(s)`);
16369
+ response = await openAIClient.responses.create({
16370
+ input: outputs,
16371
+ previous_response_id: response.id
16372
+ }, responseOptions);
16373
+ }
16374
+ if (Date.now() - startTime > maxLoopTimeMs) return { error: `Azure Foundry agent tool-calling loop timed out after ${maxLoopTimeMs}ms.` };
16375
+ const result = await this.processResponse(response, effectiveConfig);
16238
16376
  if (isCacheEnabled() && !result.error) try {
16239
16377
  await (await getCache()).set(cacheKey, result);
16240
- logger.debug(`Cached agent response for prompt: ${prompt.substring(0, 50)}...`);
16241
- } catch (err) {
16242
- logger.warn(`Error caching result: ${err}`);
16378
+ } catch (error) {
16379
+ logger.warn(`Error caching Azure Foundry agent response: ${error}`);
16243
16380
  }
16244
16381
  return result;
16245
- } catch (err) {
16246
- logger.error(`Error in Azure Foundry Agent API call: ${err}`);
16247
- return this.formatError(err);
16382
+ } catch (error) {
16383
+ logger.error(`Error in Azure Foundry Agent API call: ${error}`);
16384
+ return this.formatError(error);
16248
16385
  }
16249
16386
  }
16250
- /**
16251
- * Format error responses consistently
16252
- */
16253
- formatError(err) {
16254
- const errorMessage = err.message || String(err);
16387
+ formatError(error) {
16388
+ const errorMessage = error instanceof Error ? error.message : String(error);
16255
16389
  if (this.isContentFilterError(errorMessage)) {
16256
16390
  const lowerErrorMessage = errorMessage.toLowerCase();
16257
16391
  const isInputFiltered = lowerErrorMessage.includes("prompt") || lowerErrorMessage.includes("input");
@@ -16265,14 +16399,10 @@ var AzureFoundryAgentProvider = class extends AzureGenericProvider {
16265
16399
  }
16266
16400
  };
16267
16401
  }
16268
- if (errorMessage.includes("Can't add messages to thread") && errorMessage.includes("while a run")) return { error: `Error in Azure Foundry Agent API call: ${errorMessage}` };
16269
16402
  if (this.isRateLimitError(errorMessage)) return { error: `Rate limit exceeded: ${errorMessage}` };
16270
16403
  if (this.isServiceError(errorMessage)) return { error: `Service error: ${errorMessage}` };
16271
16404
  return { error: `Error in Azure Foundry Agent API call: ${errorMessage}` };
16272
16405
  }
16273
- /**
16274
- * Helper methods to check for specific error types
16275
- */
16276
16406
  isContentFilterError(errorMessage) {
16277
16407
  const lowerErrorMessage = errorMessage.toLowerCase();
16278
16408
  return lowerErrorMessage.includes("content_filter") || lowerErrorMessage.includes("content filter") || lowerErrorMessage.includes("filtered due to") || lowerErrorMessage.includes("content filtering") || lowerErrorMessage.includes("inappropriate content") || lowerErrorMessage.includes("safety guidelines") || lowerErrorMessage.includes("guardrail");
@@ -16283,160 +16413,6 @@ var AzureFoundryAgentProvider = class extends AzureGenericProvider {
16283
16413
  isServiceError(errorMessage) {
16284
16414
  return errorMessage.includes("Service unavailable") || errorMessage.includes("Bad gateway") || errorMessage.includes("Gateway timeout") || errorMessage.includes("Server is busy") || errorMessage.includes("Sorry, something went wrong");
16285
16415
  }
16286
- isServerError(errorMessage) {
16287
- return errorMessage.includes("500") || errorMessage.includes("502") || errorMessage.includes("503") || errorMessage.includes("504");
16288
- }
16289
- isRetryableError(code, message) {
16290
- if (code === "rate_limit_exceeded") return true;
16291
- if (!message) return false;
16292
- return this.isRateLimitError(message) || this.isServiceError(message) || this.isServerError(message);
16293
- }
16294
- /**
16295
- * Poll a run until it completes or fails
16296
- */
16297
- async pollRun(client, threadId, runId, pollIntervalMs = 1e3) {
16298
- const maxPollTime = this.assistantConfig.maxPollTimeMs || 3e5;
16299
- const startTime = Date.now();
16300
- let run = await client.agents.runs.get(threadId, runId);
16301
- while (["queued", "in_progress"].includes(run.status)) {
16302
- if (Date.now() - startTime > maxPollTime) throw new Error(`Run polling timed out after ${maxPollTime}ms. Last status: ${run.status}`);
16303
- await sleep(pollIntervalMs);
16304
- run = await client.agents.runs.get(threadId, runId);
16305
- if (Date.now() - startTime > 3e4) pollIntervalMs = Math.min(pollIntervalMs * 1.5, 5e3);
16306
- }
16307
- return run;
16308
- }
16309
- /**
16310
- * Handle tool calls during run polling
16311
- */
16312
- async pollRunWithToolCallHandling(client, threadId, initialRun) {
16313
- const maxPollTime = this.assistantConfig.maxPollTimeMs || 3e5;
16314
- const startTime = Date.now();
16315
- let pollIntervalMs = 1e3;
16316
- let run = initialRun;
16317
- while (true) {
16318
- if (Date.now() - startTime > maxPollTime) return { error: `Run polling timed out after ${maxPollTime}ms. The operation may still be in progress.` };
16319
- try {
16320
- run = await client.agents.runs.get(threadId, run.id);
16321
- logger.debug(`Run status: ${run.status}`);
16322
- if (run.status === "requires_action") if (run.requiredAction?.type === "submit_tool_outputs" && run.requiredAction.submitToolOutputs?.toolCalls) {
16323
- const toolCalls = run.requiredAction.submitToolOutputs.toolCalls;
16324
- const functionCallsWithCallbacks = toolCalls.filter((toolCall) => {
16325
- return toolCall.type === "function" && toolCall.function && toolCall.function.name in (this.assistantConfig.functionToolCallbacks ?? {});
16326
- });
16327
- if (functionCallsWithCallbacks.length === 0) {
16328
- logger.debug(`No matching callbacks found for tool calls. Available functions: ${Object.keys(this.assistantConfig.functionToolCallbacks || {}).join(", ")}. Tool calls: ${JSON.stringify(toolCalls)}`);
16329
- const emptyOutputs = toolCalls.map((toolCall) => ({
16330
- toolCallId: toolCall.id,
16331
- output: JSON.stringify({ message: `No callback registered for function ${toolCall.type === "function" ? toolCall.function?.name : toolCall.type}` })
16332
- }));
16333
- try {
16334
- await client.agents.runs.submitToolOutputs(threadId, run.id, emptyOutputs);
16335
- await sleep(pollIntervalMs);
16336
- continue;
16337
- } catch (error) {
16338
- logger.error(`Error submitting empty tool outputs: ${error.message}`);
16339
- return { error: `Error submitting empty tool outputs: ${error.message}` };
16340
- }
16341
- }
16342
- const callbackContext = {
16343
- threadId,
16344
- runId: run.id,
16345
- assistantId: this.deploymentName,
16346
- provider: "azure-foundry"
16347
- };
16348
- const toolOutputs = await Promise.all(functionCallsWithCallbacks.map(async (toolCall) => {
16349
- const functionName = toolCall.function.name;
16350
- const functionArgs = toolCall.function.arguments;
16351
- try {
16352
- logger.debug(`Calling function ${functionName} with args: ${functionArgs}`);
16353
- const outputResult = await this.executeFunctionCallback(functionName, functionArgs, callbackContext);
16354
- logger.debug(`Function ${functionName} result: ${outputResult}`);
16355
- return {
16356
- toolCallId: toolCall.id,
16357
- output: outputResult
16358
- };
16359
- } catch (error) {
16360
- logger.error(`Error calling function ${functionName}: ${error}`);
16361
- return {
16362
- toolCallId: toolCall.id,
16363
- output: JSON.stringify({ error: String(error) })
16364
- };
16365
- }
16366
- }));
16367
- if (toolOutputs.length === 0) {
16368
- logger.error("No valid tool outputs to submit");
16369
- break;
16370
- }
16371
- logger.debug(`Submitting tool outputs: ${JSON.stringify(toolOutputs)}`);
16372
- try {
16373
- await client.agents.runs.submitToolOutputs(threadId, run.id, toolOutputs);
16374
- } catch (error) {
16375
- logger.error(`Error submitting tool outputs: ${error.message}`);
16376
- return { error: `Error submitting tool outputs: ${error.message}` };
16377
- }
16378
- } else {
16379
- logger.error(`Unknown required action type: ${run.requiredAction?.type}`);
16380
- break;
16381
- }
16382
- else if ([
16383
- "completed",
16384
- "failed",
16385
- "cancelled",
16386
- "expired"
16387
- ].includes(run.status)) {
16388
- if (run.status !== "completed") {
16389
- if (run.lastError) {
16390
- const errorCode = run.lastError.code || "";
16391
- const errorMessage = run.lastError.message || "";
16392
- if (errorCode === "content_filter" || this.isContentFilterError(errorMessage)) {
16393
- const lowerErrorMessage = errorMessage.toLowerCase();
16394
- const isInputFiltered = lowerErrorMessage.includes("prompt") || lowerErrorMessage.includes("input");
16395
- const isOutputFiltered = lowerErrorMessage.includes("output") || lowerErrorMessage.includes("response");
16396
- return {
16397
- output: "The generated content was filtered due to triggering Azure OpenAI Service's content filtering system.",
16398
- guardrails: {
16399
- flagged: true,
16400
- flaggedInput: isInputFiltered,
16401
- flaggedOutput: !isInputFiltered && (isOutputFiltered || !isOutputFiltered)
16402
- }
16403
- };
16404
- }
16405
- return { error: `Thread run failed: ${errorCode} - ${errorMessage}` };
16406
- }
16407
- return { error: `Thread run failed with status: ${run.status}` };
16408
- }
16409
- break;
16410
- }
16411
- await sleep(pollIntervalMs);
16412
- if (Date.now() - startTime > 3e4) pollIntervalMs = Math.min(pollIntervalMs * 1.5, 5e3);
16413
- } catch (error) {
16414
- logger.error(`Error polling run status: ${error}`);
16415
- const errorMessage = error.message || String(error);
16416
- if (this.isRetryableError("", errorMessage)) return { error: `Error polling run status: ${errorMessage}` };
16417
- return { error: `Error polling run status: ${errorMessage}` };
16418
- }
16419
- }
16420
- return await this.processCompletedRun(client, threadId, run);
16421
- }
16422
- /**
16423
- * Process a completed run to extract messages
16424
- */
16425
- async processCompletedRun(client, threadId, _run) {
16426
- try {
16427
- const messages = [];
16428
- for await (const message of client.agents.messages.list(threadId, { order: "asc" })) messages.push(message);
16429
- const outputBlocks = [];
16430
- messages.forEach((message) => {
16431
- const contentBlocks = message.content.map((content) => content.type === "text" && content.text ? content.text.value : `<${content.type} output>`).join("\n");
16432
- outputBlocks.push(`[${toTitleCase(message.role)}] ${contentBlocks}`);
16433
- });
16434
- return { output: outputBlocks.join("\n\n").trim() };
16435
- } catch (err) {
16436
- logger.error(`Error processing run results: ${err}`);
16437
- return { error: `Error processing run results: ${err.message || String(err)}` };
16438
- }
16439
- }
16440
16416
  };
16441
16417
  //#endregion
16442
16418
  //#region src/providers/azure/responses.ts
@@ -16506,9 +16482,9 @@ var AzureResponsesProvider = class extends AzureGenericProvider {
16506
16482
  const body = {
16507
16483
  model: this.deploymentName,
16508
16484
  input,
16509
- ...maxOutputTokens !== void 0 ? { max_output_tokens: maxOutputTokens } : {},
16485
+ ...maxOutputTokens === void 0 ? {} : { max_output_tokens: maxOutputTokens },
16510
16486
  ...reasoningEffort ? { reasoning: { effort: reasoningEffort } } : {},
16511
- ...temperature !== void 0 ? { temperature } : {},
16487
+ ...temperature === void 0 ? {} : { temperature },
16512
16488
  ...instructions ? { instructions } : {},
16513
16489
  ...config.top_p !== void 0 || getEnvString("OPENAI_TOP_P") ? { top_p: config.top_p ?? getEnvFloat("OPENAI_TOP_P", 1) } : {},
16514
16490
  ...config.tools ? { tools: await maybeLoadToolsFromExternalFile(config.tools, context?.vars) } : {},
@@ -16539,7 +16515,7 @@ var AzureResponsesProvider = class extends AzureGenericProvider {
16539
16515
  }
16540
16516
  const body = await this.getAzureResponsesBody(prompt, context, callApiOptions);
16541
16517
  const isDeepResearchModel = this.deploymentName.includes("deep-research");
16542
- let timeout = REQUEST_TIMEOUT_MS;
16518
+ let timeout = REQUEST_TIMEOUT_MS$1;
16543
16519
  if (isDeepResearchModel) {
16544
16520
  const evalTimeout = getEnvInt("PROMPTFOO_EVAL_TIMEOUT_MS", 0);
16545
16521
  timeout = evalTimeout > 0 ? evalTimeout : LONG_RUNNING_MODEL_TIMEOUT_MS;
@@ -17543,9 +17519,9 @@ var AwsBedrockConverseProvider = class extends AwsBedrockGenericProvider {
17543
17519
  const temperature = reasoningEnabled ? void 0 : temperatureValue;
17544
17520
  const topP = reasoningEnabled ? void 0 : topPValue;
17545
17521
  if (maxTokens !== void 0 || temperature !== void 0 || topP !== void 0 || stopSequences) return {
17546
- ...maxTokens !== void 0 ? { maxTokens } : {},
17547
- ...temperature !== void 0 ? { temperature } : {},
17548
- ...topP !== void 0 ? { topP } : {},
17522
+ ...maxTokens === void 0 ? {} : { maxTokens },
17523
+ ...temperature === void 0 ? {} : { temperature },
17524
+ ...topP === void 0 ? {} : { topP },
17549
17525
  ...stopSequences ? { stopSequences } : {}
17550
17526
  };
17551
17527
  }
@@ -17765,7 +17741,7 @@ var AwsBedrockConverseProvider = class extends AwsBedrockGenericProvider {
17765
17741
  if (hasSuccessfulCallback && results.length > 0) return {
17766
17742
  output: results.join("\n"),
17767
17743
  tokenUsage,
17768
- ...cost !== void 0 ? { cost } : {},
17744
+ ...cost === void 0 ? {} : { cost },
17769
17745
  ...Object.keys(metadata).length > 0 ? { metadata } : {},
17770
17746
  ...guardrails ? { guardrails } : {},
17771
17747
  ...malformedError ? { error: malformedError } : {}
@@ -17775,7 +17751,7 @@ var AwsBedrockConverseProvider = class extends AwsBedrockGenericProvider {
17775
17751
  return {
17776
17752
  output: extractTextFromContentBlocks(content, showThinking),
17777
17753
  tokenUsage,
17778
- ...cost !== void 0 ? { cost } : {},
17754
+ ...cost === void 0 ? {} : { cost },
17779
17755
  ...Object.keys(metadata).length > 0 ? { metadata } : {},
17780
17756
  ...guardrails ? { guardrails } : {},
17781
17757
  ...malformedError ? { error: malformedError } : {}
@@ -17889,7 +17865,7 @@ var AwsBedrockConverseProvider = class extends AwsBedrockGenericProvider {
17889
17865
  return {
17890
17866
  output: finalOutput,
17891
17867
  tokenUsage,
17892
- ...cost !== void 0 ? { cost } : {},
17868
+ ...cost === void 0 ? {} : { cost },
17893
17869
  ...Object.keys(metadata).length > 0 ? { metadata } : {},
17894
17870
  ...malformedError ? { error: malformedError } : {}
17895
17871
  };
@@ -19553,7 +19529,7 @@ var CohereChatCompletionProvider = class CohereChatCompletionProvider {
19553
19529
  "X-Client-Name": getEnvString("COHERE_CLIENT_NAME") || "promptfoo"
19554
19530
  },
19555
19531
  body: JSON.stringify(body)
19556
- }, REQUEST_TIMEOUT_MS));
19532
+ }, REQUEST_TIMEOUT_MS$1));
19557
19533
  if (data.message) return { error: data.message };
19558
19534
  const tokenUsage = {
19559
19535
  cached: cached ? data.token_count?.total_tokens || 0 : 0,
@@ -19615,7 +19591,7 @@ var CohereEmbeddingProvider = class {
19615
19591
  "X-Client-Name": getEnvString("COHERE_CLIENT_NAME") || "promptfoo"
19616
19592
  },
19617
19593
  body: JSON.stringify(body)
19618
- }, REQUEST_TIMEOUT_MS));
19594
+ }, REQUEST_TIMEOUT_MS$1));
19619
19595
  } catch (err) {
19620
19596
  logger.error(`API call error: ${err}`);
19621
19597
  throw err;
@@ -20524,7 +20500,7 @@ var ElevenLabsAgentsProvider = class {
20524
20500
  promptLength: prompt.length
20525
20501
  });
20526
20502
  const simulationRequest = buildSimulationRequest(parseConversation(prompt, context), this.config.simulatedUser, this.config.evaluationCriteria, this.config.toolMockConfig);
20527
- simulationRequest.new_turns_limit = this.config.maxTurns || 10;
20503
+ simulationRequest.new_turns_limit = this.config.maxTurns ?? 10;
20528
20504
  logger.debug("[ElevenLabs Agents] Request payload", {
20529
20505
  endpoint: `/convai/agents/${agentId}/simulate-conversation`,
20530
20506
  payload: simulationRequest
@@ -20651,7 +20627,7 @@ var ElevenLabsAgentsProvider = class {
20651
20627
  simulatedUser: config?.simulatedUser,
20652
20628
  evaluationCriteria: config?.evaluationCriteria,
20653
20629
  toolMockConfig: config?.toolMockConfig,
20654
- maxTurns: config?.maxTurns || 10,
20630
+ maxTurns: config?.maxTurns ?? 10,
20655
20631
  label: options.label || options.id
20656
20632
  };
20657
20633
  }
@@ -22522,7 +22498,7 @@ var GeminiImageProvider = class {
22522
22498
  headers,
22523
22499
  body: JSON.stringify(body),
22524
22500
  ...authDiscriminator && { _authHash: authDiscriminator }
22525
- }, REQUEST_TIMEOUT_MS, "json", false);
22501
+ }, REQUEST_TIMEOUT_MS$1, "json", false);
22526
22502
  const latencyMs = Date.now() - startTime;
22527
22503
  return this.processResponse(data, cached, latencyMs);
22528
22504
  } catch (err) {
@@ -22550,7 +22526,7 @@ var GeminiImageProvider = class {
22550
22526
  ...this.config.headers || {}
22551
22527
  },
22552
22528
  data: body,
22553
- timeout: REQUEST_TIMEOUT_MS
22529
+ timeout: REQUEST_TIMEOUT_MS$1
22554
22530
  });
22555
22531
  const latencyMs = Date.now() - startTime;
22556
22532
  return this.processResponse(response.data, false, latencyMs);
@@ -22711,7 +22687,7 @@ var GoogleImageProvider = class {
22711
22687
  ...this.config.headers || {}
22712
22688
  },
22713
22689
  data: body,
22714
- timeout: REQUEST_TIMEOUT_MS
22690
+ timeout: REQUEST_TIMEOUT_MS$1
22715
22691
  }), "Vertex AI API call");
22716
22692
  const latencyMs = Date.now() - startTime;
22717
22693
  return this.processResponse(response.data, false, latencyMs);
@@ -22748,7 +22724,7 @@ var GoogleImageProvider = class {
22748
22724
  headers,
22749
22725
  body: JSON.stringify(body),
22750
22726
  ...authDiscriminator && { _authHash: authDiscriminator }
22751
- }, REQUEST_TIMEOUT_MS, "json"), "Google AI Studio API call");
22727
+ }, REQUEST_TIMEOUT_MS$1, "json"), "Google AI Studio API call");
22752
22728
  return this.processResponse(response.data, response.cached, response.latencyMs);
22753
22729
  } catch (err) {
22754
22730
  return { error: `API call error: ${String(err)}` };
@@ -23296,6 +23272,8 @@ const DEFAULT_RESOLUTION$1 = "720p";
23296
23272
  const DEFAULT_DURATION$1 = 8;
23297
23273
  const DEFAULT_POLL_INTERVAL_MS$1 = 1e4;
23298
23274
  const DEFAULT_MAX_POLL_TIME_MS$1 = 6e5;
23275
+ const REQUEST_TIMEOUT_MS = 3e5;
23276
+ const AI_STUDIO_BASE_URL = "https://generativelanguage.googleapis.com/v1beta";
23299
23277
  function validateAspectRatio$1(ratio) {
23300
23278
  if (!["16:9", "9:16"].includes(ratio)) return {
23301
23279
  valid: false,
@@ -23347,6 +23325,13 @@ var GoogleVideoProvider = class {
23347
23325
  async getProjectId() {
23348
23326
  return await resolveProjectId(this.config, this.env);
23349
23327
  }
23328
+ isVertexMode(config = this.config) {
23329
+ return determineGoogleVertexMode(config, this.env);
23330
+ }
23331
+ getApiKey(config = this.config) {
23332
+ const { apiKey } = getGoogleApiKey(config, this.env, this.isVertexMode(config));
23333
+ return apiKey;
23334
+ }
23350
23335
  async getClientWithCredentials() {
23351
23336
  const { client } = await getGoogleClient({ credentials: loadCredentials(this.config.credentials) });
23352
23337
  return client;
@@ -23355,6 +23340,17 @@ var GoogleVideoProvider = class {
23355
23340
  const location = this.getLocation();
23356
23341
  return `https://${location}-aiplatform.googleapis.com/v1/projects/${await this.getProjectId()}/locations/${location}/publishers/google/models/${this.modelName}:${action}`;
23357
23342
  }
23343
+ getAiStudioEndpoint(pathSuffix) {
23344
+ return `${AI_STUDIO_BASE_URL}/${pathSuffix}`;
23345
+ }
23346
+ async getAiStudioHeaders(config) {
23347
+ const apiKey = this.getApiKey(config);
23348
+ if (!apiKey) throw new Error("Google API key is not set. Set GOOGLE_API_KEY or GEMINI_API_KEY, or add `apiKey` to the provider config.");
23349
+ return {
23350
+ "Content-Type": "application/json",
23351
+ "x-goog-api-key": apiKey
23352
+ };
23353
+ }
23358
23354
  /**
23359
23355
  * Load image data from file:// path or return as-is if base64
23360
23356
  */
@@ -23367,10 +23363,24 @@ var GoogleVideoProvider = class {
23367
23363
  return { data: imagePath };
23368
23364
  }
23369
23365
  /**
23366
+ * Load video data from file:// path or return as-is if base64
23367
+ */
23368
+ loadVideoData(videoPath) {
23369
+ if (videoPath.startsWith("file://")) {
23370
+ const filePath = videoPath.slice(7);
23371
+ if (!fs.existsSync(filePath)) return { error: `Video file not found: ${filePath}` };
23372
+ return { data: fs.readFileSync(filePath).toString("base64") };
23373
+ }
23374
+ return { data: videoPath };
23375
+ }
23376
+ /**
23370
23377
  * Create a new video generation job
23371
23378
  */
23372
23379
  async createVideoJob(prompt, config) {
23373
- const url = await this.getVertexEndpoint("predictLongRunning");
23380
+ if (this.isVertexMode(config)) return this.createVertexVideoJob(prompt, config);
23381
+ return this.createAiStudioVideoJob(prompt, config);
23382
+ }
23383
+ buildVertexRequestBody(prompt, config) {
23374
23384
  const instance = { prompt };
23375
23385
  if (config.aspectRatio) instance.aspectRatio = config.aspectRatio;
23376
23386
  if (config.resolution) instance.resolution = config.resolution;
@@ -23414,7 +23424,69 @@ var GoogleVideoProvider = class {
23414
23424
  }
23415
23425
  const extendVideoId = config.extendVideoId || config.sourceVideo;
23416
23426
  if (extendVideoId) instance.video = { operationName: extendVideoId };
23427
+ return { body: { instances: [instance] } };
23428
+ }
23429
+ buildAiStudioRequestBody(prompt, config) {
23430
+ const instance = { prompt };
23431
+ const parameters = {};
23432
+ if (config.aspectRatio) parameters.aspectRatio = config.aspectRatio;
23433
+ if (config.resolution) parameters.resolution = config.resolution;
23434
+ if (config.durationSeconds) parameters.durationSeconds = config.durationSeconds;
23435
+ if (config.negativePrompt) parameters.negativePrompt = config.negativePrompt;
23436
+ if (config.personGeneration) parameters.personGeneration = config.personGeneration;
23437
+ if (config.seed !== void 0) parameters.seed = config.seed;
23438
+ if (config.image) {
23439
+ const { data: imageData, error } = this.loadImageData(config.image);
23440
+ if (error) return { error };
23441
+ instance.image = { inlineData: {
23442
+ mimeType: "image/png",
23443
+ data: imageData
23444
+ } };
23445
+ }
23446
+ const lastFrame = config.lastFrame || config.lastImage;
23447
+ if (lastFrame) {
23448
+ const { data: lastFrameData, error } = this.loadImageData(lastFrame);
23449
+ if (error) return { error };
23450
+ instance.lastFrame = { inlineData: {
23451
+ mimeType: "image/png",
23452
+ data: lastFrameData
23453
+ } };
23454
+ }
23455
+ if (config.referenceImages && config.referenceImages.length > 0) {
23456
+ const refs = [];
23457
+ for (const ref of config.referenceImages.slice(0, 3)) {
23458
+ const imagePath = typeof ref === "string" ? ref : ref.image;
23459
+ const referenceType = typeof ref === "string" ? "asset" : ref.referenceType || "asset";
23460
+ const { data: imageData, error } = this.loadImageData(imagePath);
23461
+ if (error) return { error };
23462
+ refs.push({
23463
+ image: { inlineData: {
23464
+ mimeType: "image/png",
23465
+ data: imageData
23466
+ } },
23467
+ referenceType
23468
+ });
23469
+ }
23470
+ instance.referenceImages = refs;
23471
+ }
23472
+ const sourceVideo = config.extendVideoId || config.sourceVideo;
23473
+ if (sourceVideo) {
23474
+ if (sourceVideo.includes("/operations/")) return { error: "Google AI Studio Veo does not accept operation IDs for video extension. Use `vertex:video:*` with `extendVideoId`, or provide base64/file:// video data via `sourceVideo`." };
23475
+ const { data: videoData, error } = this.loadVideoData(sourceVideo);
23476
+ if (error) return { error };
23477
+ instance.video = { inlineData: {
23478
+ mimeType: "video/mp4",
23479
+ data: videoData
23480
+ } };
23481
+ }
23417
23482
  const body = { instances: [instance] };
23483
+ if (Object.keys(parameters).length > 0) body.parameters = parameters;
23484
+ return { body };
23485
+ }
23486
+ async createVertexVideoJob(prompt, config) {
23487
+ const url = await this.getVertexEndpoint("predictLongRunning");
23488
+ const { body, error: bodyError } = this.buildVertexRequestBody(prompt, config);
23489
+ if (bodyError || !body) return { error: bodyError || "Failed to build Vertex Veo request" };
23418
23490
  try {
23419
23491
  const client = await this.getClientWithCredentials();
23420
23492
  logger.debug("[Google Video] Creating video job", {
@@ -23432,10 +23504,37 @@ var GoogleVideoProvider = class {
23432
23504
  return { error: `Failed to create video job: ${error.response?.data?.error?.message || error.message || String(err)}` };
23433
23505
  }
23434
23506
  }
23507
+ async createAiStudioVideoJob(prompt, config) {
23508
+ const { body, error: bodyError } = this.buildAiStudioRequestBody(prompt, config);
23509
+ if (bodyError || !body) return { error: bodyError || "Failed to build Google AI Studio Veo request" };
23510
+ try {
23511
+ const headers = await this.getAiStudioHeaders(config);
23512
+ const url = this.getAiStudioEndpoint(`models/${this.modelName}:predictLongRunning`);
23513
+ logger.debug("[Google Video] Creating video job", {
23514
+ url,
23515
+ model: this.modelName,
23516
+ transport: "google-ai-studio"
23517
+ });
23518
+ const response = await fetchWithTimeout(url, {
23519
+ method: "POST",
23520
+ headers,
23521
+ body: JSON.stringify(body)
23522
+ }, REQUEST_TIMEOUT_MS);
23523
+ const data = await response.json();
23524
+ if (!response.ok) return { error: `Failed to create video job: ${data.error?.message || response.statusText}` };
23525
+ return { operation: data };
23526
+ } catch (err) {
23527
+ return { error: `Failed to create video job: ${err.message || String(err)}` };
23528
+ }
23529
+ }
23435
23530
  /**
23436
23531
  * Poll for video job completion using fetchPredictOperation endpoint
23437
23532
  */
23438
- async pollOperationStatus(operationName, pollIntervalMs, maxPollTimeMs) {
23533
+ async pollOperationStatus(operationName, pollIntervalMs, maxPollTimeMs, config) {
23534
+ if (this.isVertexMode(config)) return this.pollVertexOperationStatus(operationName, pollIntervalMs, maxPollTimeMs);
23535
+ return this.pollAiStudioOperationStatus(operationName, pollIntervalMs, maxPollTimeMs, config);
23536
+ }
23537
+ async pollVertexOperationStatus(operationName, pollIntervalMs, maxPollTimeMs) {
23439
23538
  const startTime = Date.now();
23440
23539
  const location = this.getLocation();
23441
23540
  const url = `https://${location}-aiplatform.googleapis.com/v1/projects/${await this.getProjectId()}/locations/${location}/publishers/google/models/${this.modelName}:fetchPredictOperation`;
@@ -23460,10 +23559,37 @@ var GoogleVideoProvider = class {
23460
23559
  }
23461
23560
  return { error: `Video generation timed out after ${maxPollTimeMs / 1e3} seconds` };
23462
23561
  }
23562
+ async pollAiStudioOperationStatus(operationName, pollIntervalMs, maxPollTimeMs, config) {
23563
+ const startTime = Date.now();
23564
+ const url = this.getAiStudioEndpoint(operationName);
23565
+ const headers = await this.getAiStudioHeaders(config);
23566
+ logger.debug(`[Google Video] Polling operation via Google AI Studio: ${url}`);
23567
+ while (Date.now() - startTime < maxPollTimeMs) try {
23568
+ const response = await fetchWithTimeout(url, {
23569
+ method: "GET",
23570
+ headers
23571
+ }, REQUEST_TIMEOUT_MS);
23572
+ const operation = await response.json();
23573
+ if (!response.ok) return { error: `Polling error: ${operation.error?.message || response.statusText}` };
23574
+ logger.debug(`[Google Video] Operation status: done=${operation.done}, progress=${operation.metadata?.progress}%`);
23575
+ if (operation.done) {
23576
+ if (operation.error) return { error: `Video generation failed: ${operation.error.message}` };
23577
+ return { operation };
23578
+ }
23579
+ await sleep(pollIntervalMs);
23580
+ } catch (err) {
23581
+ return { error: `Polling error: ${err.message || String(err)}` };
23582
+ }
23583
+ return { error: `Video generation timed out after ${maxPollTimeMs / 1e3} seconds` };
23584
+ }
23463
23585
  /**
23464
23586
  * Download video from URI and store to blob storage
23465
23587
  */
23466
- async downloadVideoToBlob(videoUri) {
23588
+ async downloadVideoToBlob(videoUri, config) {
23589
+ if (this.isVertexMode(config)) return this.downloadVertexVideoToBlob(videoUri);
23590
+ return this.downloadAiStudioVideoToBlob(videoUri, config);
23591
+ }
23592
+ async downloadVertexVideoToBlob(videoUri) {
23467
23593
  try {
23468
23594
  const response = await (await this.getClientWithCredentials()).request({
23469
23595
  url: videoUri,
@@ -23480,6 +23606,23 @@ var GoogleVideoProvider = class {
23480
23606
  return { error: `Download error: ${err.message || String(err)}` };
23481
23607
  }
23482
23608
  }
23609
+ async downloadAiStudioVideoToBlob(videoUri, config) {
23610
+ try {
23611
+ const response = await fetchWithTimeout(videoUri, {
23612
+ method: "GET",
23613
+ headers: await this.getAiStudioHeaders(config)
23614
+ }, REQUEST_TIMEOUT_MS);
23615
+ if (!response.ok) return { error: `Download error: ${response.statusText}` };
23616
+ const { ref } = await storeBlob(Buffer.from(await response.arrayBuffer()), "video/mp4", {
23617
+ kind: "video",
23618
+ location: "response.video"
23619
+ });
23620
+ logger.debug(`[Google Video] Stored video to blob storage: ${ref.uri}`);
23621
+ return { blobRef: ref };
23622
+ } catch (err) {
23623
+ return { error: `Download error: ${err.message || String(err)}` };
23624
+ }
23625
+ }
23483
23626
  /**
23484
23627
  * Store base64 encoded video to blob storage
23485
23628
  */
@@ -23497,20 +23640,40 @@ var GoogleVideoProvider = class {
23497
23640
  }
23498
23641
  async callApi(prompt, context) {
23499
23642
  if (!prompt || prompt.trim() === "") return { error: "Prompt is required for video generation" };
23500
- let projectId = this.config.projectId || getEnvString("GOOGLE_CLOUD_PROJECT") || getEnvString("GOOGLE_PROJECT_ID") || this.env?.GOOGLE_CLOUD_PROJECT || this.env?.GOOGLE_PROJECT_ID;
23501
- if (!projectId) try {
23502
- projectId = await resolveProjectId(this.config, this.env);
23503
- } catch {
23504
- return { error: "Google Veo video generation requires Vertex AI. Set GOOGLE_CLOUD_PROJECT environment variable or add `projectId` to the provider config, then run \"gcloud auth application-default login\"." };
23505
- }
23506
- const config = {
23643
+ let effectiveConfig = {
23507
23644
  ...this.config,
23508
23645
  ...context?.prompt?.config
23509
23646
  };
23510
- const model = config.model || this.modelName;
23511
- const aspectRatio = config.aspectRatio || DEFAULT_ASPECT_RATIO$1;
23512
- const resolution = config.resolution || DEFAULT_RESOLUTION$1;
23513
- const durationSeconds = config.durationSeconds || config.duration || DEFAULT_DURATION$1;
23647
+ let isVertexMode = this.isVertexMode(effectiveConfig);
23648
+ if (isVertexMode) {
23649
+ let projectId = effectiveConfig.projectId || getEnvString("GOOGLE_CLOUD_PROJECT") || getEnvString("GOOGLE_PROJECT_ID") || this.env?.GOOGLE_CLOUD_PROJECT || this.env?.GOOGLE_PROJECT_ID;
23650
+ if (!projectId) try {
23651
+ projectId = await resolveProjectId(effectiveConfig, this.env);
23652
+ } catch {
23653
+ return { error: "Google Veo video generation via Vertex AI requires a project ID. Set GOOGLE_CLOUD_PROJECT or add `projectId` to the provider config, then run \"gcloud auth application-default login\"." };
23654
+ }
23655
+ effectiveConfig = {
23656
+ ...effectiveConfig,
23657
+ vertexai: true,
23658
+ ...projectId ? { projectId } : {}
23659
+ };
23660
+ } else if (!this.getApiKey(effectiveConfig)) try {
23661
+ const adcProjectId = await resolveProjectId(effectiveConfig, this.env);
23662
+ if (adcProjectId) {
23663
+ isVertexMode = true;
23664
+ effectiveConfig = {
23665
+ ...effectiveConfig,
23666
+ vertexai: true,
23667
+ projectId: adcProjectId
23668
+ };
23669
+ } else return { error: "Google Veo video generation via Google AI Studio requires an API key. Set GOOGLE_API_KEY or GEMINI_API_KEY, or add `apiKey` to the provider config." };
23670
+ } catch {
23671
+ return { error: "Google Veo video generation via Google AI Studio requires an API key. Set GOOGLE_API_KEY or GEMINI_API_KEY, or add `apiKey` to the provider config." };
23672
+ }
23673
+ const model = effectiveConfig.model || this.modelName;
23674
+ const aspectRatio = effectiveConfig.aspectRatio || DEFAULT_ASPECT_RATIO$1;
23675
+ const resolution = effectiveConfig.resolution || DEFAULT_RESOLUTION$1;
23676
+ const durationSeconds = effectiveConfig.durationSeconds || effectiveConfig.duration || DEFAULT_DURATION$1;
23514
23677
  const ratioValidation = validateAspectRatio$1(aspectRatio);
23515
23678
  if (!ratioValidation.valid) return { error: ratioValidation.message };
23516
23679
  const durationValidation = validateDuration$1(model, durationSeconds);
@@ -23520,7 +23683,7 @@ var GoogleVideoProvider = class {
23520
23683
  const startTime = Date.now();
23521
23684
  logger.info(`[Google Video] Creating video job for model ${model}...`);
23522
23685
  const { operation: createdOp, error: createError } = await this.createVideoJob(prompt, {
23523
- ...config,
23686
+ ...effectiveConfig,
23524
23687
  aspectRatio,
23525
23688
  resolution,
23526
23689
  durationSeconds
@@ -23528,9 +23691,9 @@ var GoogleVideoProvider = class {
23528
23691
  if (createError || !createdOp) return { error: createError || "Failed to create video job" };
23529
23692
  const operationName = createdOp.name;
23530
23693
  logger.info(`[Google Video] Video job created: ${operationName}`);
23531
- const pollIntervalMs = config.pollIntervalMs || DEFAULT_POLL_INTERVAL_MS$1;
23532
- const maxPollTimeMs = config.maxPollTimeMs || DEFAULT_MAX_POLL_TIME_MS$1;
23533
- const { operation: completedOp, error: pollError } = await this.pollOperationStatus(operationName, pollIntervalMs, maxPollTimeMs);
23694
+ const pollIntervalMs = effectiveConfig.pollIntervalMs || DEFAULT_POLL_INTERVAL_MS$1;
23695
+ const maxPollTimeMs = effectiveConfig.maxPollTimeMs || DEFAULT_MAX_POLL_TIME_MS$1;
23696
+ const { operation: completedOp, error: pollError } = await this.pollOperationStatus(operationName, pollIntervalMs, maxPollTimeMs, effectiveConfig);
23534
23697
  if (pollError || !completedOp) return { error: pollError || "Polling failed" };
23535
23698
  let blobRef;
23536
23699
  const base64Video = completedOp.response?.videos?.[0]?.bytesBase64Encoded;
@@ -23545,7 +23708,7 @@ var GoogleVideoProvider = class {
23545
23708
  logger.debug(`[Google Video] Response: ${JSON.stringify(completedOp.response)}`);
23546
23709
  return { error: "No video data in response" };
23547
23710
  }
23548
- const { blobRef: ref, error: downloadError } = await this.downloadVideoToBlob(videoUri);
23711
+ const { blobRef: ref, error: downloadError } = await this.downloadVideoToBlob(videoUri, effectiveConfig);
23549
23712
  if (downloadError) return { error: downloadError };
23550
23713
  blobRef = ref;
23551
23714
  }
@@ -24235,13 +24398,22 @@ const ApiKeyAuthSchema = z.object({
24235
24398
  placement: z.enum(["header", "query"]),
24236
24399
  keyName: z.string()
24237
24400
  });
24401
+ const FileAuthSchema = z.object({
24402
+ type: z.literal("file"),
24403
+ path: z.string().min(1)
24404
+ });
24238
24405
  const AuthSchema = z.union([
24239
24406
  OAuthClientCredentialsSchema,
24240
24407
  OAuthPasswordSchema,
24241
24408
  BasicAuthSchema,
24242
24409
  BearerAuthSchema,
24243
- ApiKeyAuthSchema
24410
+ ApiKeyAuthSchema,
24411
+ FileAuthSchema
24244
24412
  ]);
24413
+ const FileAuthResultSchema = z.object({
24414
+ token: z.string().min(1),
24415
+ expiration: z.number().finite().nullable().optional()
24416
+ });
24245
24417
  /**
24246
24418
  * Configuration for a separate session endpoint that must be called before the main API.
24247
24419
  * The session endpoint returns a session ID that is then used in the main request.
@@ -24330,6 +24502,12 @@ async function loadTransformModule(transform) {
24330
24502
  }
24331
24503
  return transform;
24332
24504
  }
24505
+ function hasOwnProperty(obj, key) {
24506
+ return Object.prototype.hasOwnProperty.call(obj, key);
24507
+ }
24508
+ function parseFileAuthReference(filePath) {
24509
+ return filePath.startsWith("file://") ? parseFileUrl(filePath) : { filePath };
24510
+ }
24333
24511
  async function createSessionParser(parser) {
24334
24512
  if (!parser) return () => "";
24335
24513
  if (typeof parser === "function") return (response) => parser(response);
@@ -24690,20 +24868,11 @@ var HttpProvider = class {
24690
24868
  password: this.config.auth.password ? nunjucks.renderString(this.config.auth.password, vars) : void 0
24691
24869
  } : baseConfig;
24692
24870
  const now = Date.now();
24693
- if (this.lastToken && this.lastTokenExpiresAt && now + 6e4 < this.lastTokenExpiresAt) {
24871
+ if (this.hasValidCachedToken(now)) {
24694
24872
  logger.debug("[HTTP Provider Auth]: Using cached OAuth token");
24695
24873
  return;
24696
24874
  }
24697
- if (this.tokenRefreshPromise != null) {
24698
- logger.debug("[HTTP Provider Auth]: Token refresh already in progress, waiting...");
24699
- try {
24700
- await this.tokenRefreshPromise;
24701
- if (this.lastToken && this.lastTokenExpiresAt && Date.now() + 6e4 < this.lastTokenExpiresAt) return;
24702
- logger.debug("[HTTP Provider Auth]: Token expired while waiting, refreshing again...");
24703
- } catch {
24704
- logger.debug("[HTTP Provider Auth]: Previous token refresh failed, retrying...");
24705
- }
24706
- }
24875
+ if (this.tokenRefreshPromise != null && await this.waitForInFlightTokenRefresh()) return;
24707
24876
  logger.debug("[HTTP Provider Auth]: Refreshing OAuth token");
24708
24877
  const refreshPromise = this.performTokenRefresh(oauthConfig, now);
24709
24878
  this.tokenRefreshPromise = refreshPromise;
@@ -24732,7 +24901,7 @@ var HttpProvider = class {
24732
24901
  body: tokenRequestBody.toString()
24733
24902
  };
24734
24903
  if (httpsAgent) fetchOptions.dispatcher = httpsAgent;
24735
- const response = await fetchWithCache(oauthConfig.tokenUrl, fetchOptions, REQUEST_TIMEOUT_MS, "text", true, 0);
24904
+ const response = await fetchWithCache(oauthConfig.tokenUrl, fetchOptions, REQUEST_TIMEOUT_MS$1, "text", true, 0);
24736
24905
  if (response.status < 200 || response.status >= 300) throw new Error(`OAuth token request failed with status ${response.status} ${response.statusText}: ${response.data}`);
24737
24906
  const tokenData = JSON.parse(response.data);
24738
24907
  if (!tokenData.access_token) throw new Error("OAuth token response missing access_token");
@@ -24745,6 +24914,70 @@ var HttpProvider = class {
24745
24914
  }
24746
24915
  invariant(this.lastToken, "OAuth token should be defined at this point");
24747
24916
  }
24917
+ hasValidCachedToken(now = Date.now()) {
24918
+ if (!this.lastToken) return false;
24919
+ if (this.lastTokenExpiresAt == null) return this.config.auth?.type === "file";
24920
+ return now + TOKEN_REFRESH_BUFFER_MS < this.lastTokenExpiresAt;
24921
+ }
24922
+ async waitForInFlightTokenRefresh() {
24923
+ if (this.tokenRefreshPromise == null) return false;
24924
+ logger.debug("[HTTP Provider Auth]: Token refresh already in progress, waiting...");
24925
+ try {
24926
+ await this.tokenRefreshPromise;
24927
+ if (this.hasValidCachedToken()) return true;
24928
+ logger.debug("[HTTP Provider Auth]: Token expired while waiting, refreshing again...");
24929
+ } catch {
24930
+ logger.debug("[HTTP Provider Auth]: Previous token refresh failed, retrying...");
24931
+ }
24932
+ return false;
24933
+ }
24934
+ async refreshFileTokenIfNeeded(prompt, vars, context) {
24935
+ if (!this.config.auth || this.config.auth.type !== "file") {
24936
+ logger.debug("[HTTP Provider Auth]: No file auth configured");
24937
+ return;
24938
+ }
24939
+ if (this.hasValidCachedToken()) {
24940
+ logger.debug("[HTTP Provider Auth]: Using cached file auth token");
24941
+ return;
24942
+ }
24943
+ if (this.tokenRefreshPromise != null && await this.waitForInFlightTokenRefresh()) return;
24944
+ logger.debug("[HTTP Provider Auth]: Refreshing file auth token");
24945
+ const refreshPromise = this.performFileTokenRefresh(prompt, vars, context);
24946
+ this.tokenRefreshPromise = refreshPromise;
24947
+ try {
24948
+ await refreshPromise;
24949
+ } finally {
24950
+ if (this.tokenRefreshPromise === refreshPromise) this.tokenRefreshPromise = void 0;
24951
+ }
24952
+ }
24953
+ async performFileTokenRefresh(prompt, vars, context) {
24954
+ invariant(this.config.auth?.type === "file", "File auth should be configured");
24955
+ const { filePath, functionName } = parseFileAuthReference(this.config.auth.path);
24956
+ const defaultFunctionName = filePath.endsWith(".py") ? "get_auth" : "default";
24957
+ const authContext = {
24958
+ ...context ?? {},
24959
+ prompt: context?.prompt ?? {
24960
+ raw: prompt,
24961
+ label: prompt
24962
+ },
24963
+ vars
24964
+ };
24965
+ try {
24966
+ const authFn = await loadFunction({
24967
+ filePath,
24968
+ functionName,
24969
+ defaultFunctionName
24970
+ });
24971
+ const result = FileAuthResultSchema.parse(await authFn(authContext));
24972
+ this.lastToken = result.token;
24973
+ this.lastTokenExpiresAt = result.expiration ?? void 0;
24974
+ logger.debug("[HTTP Provider Auth]: Successfully refreshed file auth token");
24975
+ } catch (err) {
24976
+ logger.error(`[HTTP Provider Auth]: Failed to refresh file auth token: ${String(err)}`);
24977
+ throw new Error(`Failed to refresh file auth token: ${String(err)}`);
24978
+ }
24979
+ invariant(this.lastToken, "File auth token should be defined at this point");
24980
+ }
24748
24981
  async refreshSignatureIfNeeded(vars) {
24749
24982
  if (!this.config.signatureAuth) {
24750
24983
  logger.debug("[HTTP Provider Auth]: No signature auth configured");
@@ -24825,7 +25058,7 @@ var HttpProvider = class {
24825
25058
  };
24826
25059
  if (body) fetchOptions.body = body;
24827
25060
  if (httpsAgent) fetchOptions.dispatcher = httpsAgent;
24828
- const response = await fetchWithCache(url, fetchOptions, REQUEST_TIMEOUT_MS, "text", true, this.config.maxRetries);
25061
+ const response = await fetchWithCache(url, fetchOptions, REQUEST_TIMEOUT_MS$1, "text", true, this.config.maxRetries);
24829
25062
  if (response.status < 200 || response.status >= 300) throw new Error(`Session endpoint request failed with status ${response.status} ${response.statusText}: ${response.data}`);
24830
25063
  const rawText = response.data;
24831
25064
  let parsedData;
@@ -24926,12 +25159,21 @@ var HttpProvider = class {
24926
25159
  ...context?.vars || {},
24927
25160
  prompt,
24928
25161
  ...context?.evaluationId ? { evaluationId: context.evaluationId } : {},
24929
- ...transformedTools !== void 0 ? { tools: serializeForTemplate(transformedTools) } : {},
24930
- ...transformedToolChoice !== void 0 ? { tool_choice: serializeForTemplate(transformedToolChoice) } : {}
25162
+ ...transformedTools === void 0 ? {} : { tools: serializeForTemplate(transformedTools) },
25163
+ ...transformedToolChoice === void 0 ? {} : { tool_choice: serializeForTemplate(transformedToolChoice) }
24931
25164
  };
24932
25165
  if (this.config.auth?.type === "oauth") {
24933
25166
  await this.refreshOAuthTokenIfNeeded(vars);
24934
25167
  invariant(this.lastToken, "OAuth token should be defined at this point");
25168
+ if (hasOwnProperty(vars, "token")) logger.warn("[HTTP Provider Auth]: `token` is already defined in vars and will be overwritten");
25169
+ vars.token = this.lastToken;
25170
+ } else if (this.config.auth?.type === "file") {
25171
+ await this.refreshFileTokenIfNeeded(prompt, vars, context);
25172
+ invariant(this.lastToken, "File auth token should be defined at this point");
25173
+ if (hasOwnProperty(vars, "token")) logger.warn("[HTTP Provider Auth]: `token` is already defined in vars and will be overwritten");
25174
+ if (hasOwnProperty(vars, "expiration")) logger.warn("[HTTP Provider Auth]: `expiration` is already defined in vars and will be overwritten");
25175
+ vars.token = this.lastToken;
25176
+ vars.expiration = this.lastTokenExpiresAt;
24935
25177
  }
24936
25178
  if (this.config.signatureAuth) {
24937
25179
  await this.refreshSignatureIfNeeded(vars);
@@ -25001,7 +25243,7 @@ var HttpProvider = class {
25001
25243
  }
25002
25244
  let data, cached = false, status, statusText, responseHeaders, latencyMs;
25003
25245
  try {
25004
- ({data, cached, status, statusText, headers: responseHeaders, latencyMs} = await fetchWithCache(url, fetchOptions, REQUEST_TIMEOUT_MS, "text", context?.bustCache ?? context?.debug, this.config.maxRetries));
25246
+ ({data, cached, status, statusText, headers: responseHeaders, latencyMs} = await fetchWithCache(url, fetchOptions, REQUEST_TIMEOUT_MS$1, "text", context?.bustCache ?? context?.debug, this.config.maxRetries));
25005
25247
  } catch (err) {
25006
25248
  throw err;
25007
25249
  }
@@ -25119,7 +25361,7 @@ var HttpProvider = class {
25119
25361
  }
25120
25362
  let data, cached = false, status, statusText, responseHeaders, latencyMs;
25121
25363
  try {
25122
- ({data, cached, status, statusText, headers: responseHeaders, latencyMs} = await fetchWithCache(url, fetchOptions, REQUEST_TIMEOUT_MS, "text", context?.bustCache ?? context?.debug, this.config.maxRetries));
25364
+ ({data, cached, status, statusText, headers: responseHeaders, latencyMs} = await fetchWithCache(url, fetchOptions, REQUEST_TIMEOUT_MS$1, "text", context?.bustCache ?? context?.debug, this.config.maxRetries));
25123
25365
  } catch (err) {
25124
25366
  throw err;
25125
25367
  }
@@ -25366,7 +25608,7 @@ var HuggingfaceTextGenerationProvider = class {
25366
25608
  ...this.getApiKey() ? { Authorization: `Bearer ${this.getApiKey()}` } : {}
25367
25609
  },
25368
25610
  body: JSON.stringify(params)
25369
- }, REQUEST_TIMEOUT_MS);
25611
+ }, REQUEST_TIMEOUT_MS$1);
25370
25612
  logger.debug("Huggingface Inference API response", { data: response.data });
25371
25613
  if (response.data.error) return { error: `API call error: ${response.data.error}` };
25372
25614
  if (!response.data[0] && !response.data.generated_text) return { error: `Malformed response data: ${response.data}` };
@@ -25408,7 +25650,7 @@ var HuggingfaceTextClassificationProvider = class {
25408
25650
  ...this.getApiKey() ? { Authorization: `Bearer ${this.getApiKey()}` } : {}
25409
25651
  },
25410
25652
  body: JSON.stringify(params)
25411
- }, REQUEST_TIMEOUT_MS);
25653
+ }, REQUEST_TIMEOUT_MS$1);
25412
25654
  if (response.data.error) return { error: `API call error: ${response.data.error}` };
25413
25655
  if (!response.data[0] || !Array.isArray(response.data[0])) return { error: `Malformed response data: ${response.data}` };
25414
25656
  const scores = {};
@@ -25471,7 +25713,7 @@ var HuggingfaceFeatureExtractionProvider = class {
25471
25713
  ...this.getApiKey() ? { Authorization: `Bearer ${this.getApiKey()}` } : {}
25472
25714
  },
25473
25715
  body: JSON.stringify(params)
25474
- }, REQUEST_TIMEOUT_MS);
25716
+ }, REQUEST_TIMEOUT_MS$1);
25475
25717
  if (typeof response.data === "object" && "error" in response.data) return { error: `API call error: ${response.data.error}` };
25476
25718
  if (!Array.isArray(response.data)) return { error: `Malformed response data: ${response.data}` };
25477
25719
  return { embedding: response.data };
@@ -25521,7 +25763,7 @@ var HuggingfaceSentenceSimilarityProvider = class {
25521
25763
  ...this.getApiKey() ? { Authorization: `Bearer ${this.getApiKey()}` } : {}
25522
25764
  },
25523
25765
  body: JSON.stringify(params)
25524
- }, REQUEST_TIMEOUT_MS);
25766
+ }, REQUEST_TIMEOUT_MS$1);
25525
25767
  if (typeof response.data === "object" && "error" in response.data) return { error: `API call error: ${response.data.error}` };
25526
25768
  if (!Array.isArray(response.data)) return { error: `Malformed response data: ${response.data}` };
25527
25769
  return { similarity: response.data[0] };
@@ -25563,7 +25805,7 @@ var HuggingfaceTokenExtractionProvider = class {
25563
25805
  ...this.getApiKey() ? { Authorization: `Bearer ${this.getApiKey()}` } : {}
25564
25806
  },
25565
25807
  body: JSON.stringify(params)
25566
- }, REQUEST_TIMEOUT_MS);
25808
+ }, REQUEST_TIMEOUT_MS$1);
25567
25809
  if (typeof response.data === "object" && "error" in response.data) return { error: `API call error: ${response.data.error}` };
25568
25810
  if (!Array.isArray(response.data)) return { error: `Malformed response data: ${response.data}` };
25569
25811
  const classification = {};
@@ -25642,7 +25884,7 @@ var LlamaProvider = class {
25642
25884
  method: "POST",
25643
25885
  headers: { "Content-Type": "application/json" },
25644
25886
  body: JSON.stringify(body)
25645
- }, REQUEST_TIMEOUT_MS));
25887
+ }, REQUEST_TIMEOUT_MS$1));
25646
25888
  } catch (err) {
25647
25889
  return { error: `API call error: ${String(err)}` };
25648
25890
  }
@@ -25768,13 +26010,20 @@ function createLlamaApiProvider(providerPath, options = {}) {
25768
26010
  }
25769
26011
  //#endregion
25770
26012
  //#region src/providers/localai.ts
26013
+ function parseEnvFloat(value) {
26014
+ if (value === void 0) return;
26015
+ const parsed = Number.parseFloat(value);
26016
+ return Number.isNaN(parsed) ? void 0 : parsed;
26017
+ }
25771
26018
  var LocalAiGenericProvider = class {
25772
26019
  modelName;
25773
26020
  apiBaseUrl;
25774
26021
  config;
26022
+ env;
25775
26023
  constructor(modelName, options = {}) {
25776
26024
  const { id, config, env } = options;
25777
26025
  this.modelName = modelName;
26026
+ this.env = env;
25778
26027
  this.apiBaseUrl = config?.apiBaseUrl || env?.LOCALAI_BASE_URL || getEnvString("LOCALAI_BASE_URL") || "http://localhost:8080/v1";
25779
26028
  this.config = config || {};
25780
26029
  this.id = id ? () => id : this.id;
@@ -25798,7 +26047,7 @@ var LocalAiChatProvider = class extends LocalAiGenericProvider {
25798
26047
  const body = {
25799
26048
  model: this.modelName,
25800
26049
  messages,
25801
- temperature: this.config.temperature || getEnvFloat("LOCALAI_TEMPERATURE") || .7
26050
+ temperature: this.config.temperature ?? parseEnvFloat(this.env?.LOCALAI_TEMPERATURE) ?? getEnvFloat("LOCALAI_TEMPERATURE") ?? .7
25802
26051
  };
25803
26052
  let data;
25804
26053
  try {
@@ -25806,7 +26055,7 @@ var LocalAiChatProvider = class extends LocalAiGenericProvider {
25806
26055
  method: "POST",
25807
26056
  headers: { "Content-Type": "application/json" },
25808
26057
  body: JSON.stringify(body)
25809
- }, REQUEST_TIMEOUT_MS));
26058
+ }, REQUEST_TIMEOUT_MS$1));
25810
26059
  } catch (err) {
25811
26060
  return { error: `API call error: ${String(err)}` };
25812
26061
  }
@@ -25829,7 +26078,7 @@ var LocalAiEmbeddingProvider = class extends LocalAiGenericProvider {
25829
26078
  method: "POST",
25830
26079
  headers: { "Content-Type": "application/json" },
25831
26080
  body: JSON.stringify(body)
25832
- }, REQUEST_TIMEOUT_MS));
26081
+ }, REQUEST_TIMEOUT_MS$1));
25833
26082
  } catch (err) {
25834
26083
  return { error: `API call error: ${String(err)}` };
25835
26084
  }
@@ -25847,7 +26096,7 @@ var LocalAiCompletionProvider = class extends LocalAiGenericProvider {
25847
26096
  const body = {
25848
26097
  model: this.modelName,
25849
26098
  prompt,
25850
- temperature: this.config.temperature || getEnvFloat("LOCALAI_TEMPERATURE") || .7
26099
+ temperature: this.config.temperature ?? parseEnvFloat(this.env?.LOCALAI_TEMPERATURE) ?? getEnvFloat("LOCALAI_TEMPERATURE") ?? .7
25851
26100
  };
25852
26101
  let data;
25853
26102
  try {
@@ -25855,7 +26104,7 @@ var LocalAiCompletionProvider = class extends LocalAiGenericProvider {
25855
26104
  method: "POST",
25856
26105
  headers: { "Content-Type": "application/json" },
25857
26106
  body: JSON.stringify(body)
25858
- }, REQUEST_TIMEOUT_MS));
26107
+ }, REQUEST_TIMEOUT_MS$1));
25859
26108
  } catch (err) {
25860
26109
  return { error: `API call error: ${String(err)}` };
25861
26110
  }
@@ -26097,7 +26346,7 @@ var NscaleImageProvider = class NscaleImageProvider extends OpenAiImageProvider
26097
26346
  let data, status, statusText;
26098
26347
  let cached = false;
26099
26348
  try {
26100
- ({data, cached, status, statusText} = await callOpenAiImageApi(`${this.getApiUrl()}${endpoint}`, body, headers, REQUEST_TIMEOUT_MS));
26349
+ ({data, cached, status, statusText} = await callOpenAiImageApi(`${this.getApiUrl()}${endpoint}`, body, headers, REQUEST_TIMEOUT_MS$1));
26101
26350
  if (status < 200 || status >= 300) return { error: `API error: ${status} ${statusText}\n${typeof data === "string" ? data : JSON.stringify(data)}` };
26102
26351
  } catch (err) {
26103
26352
  logger.error(`API call error: ${String(err)}`);
@@ -26265,7 +26514,7 @@ var OllamaCompletionProvider = class {
26265
26514
  if (OllamaCompletionOptionKeys.has(optionName) && optionName !== "think" && optionName !== "tools" && optionName !== "passthrough") options[optionName] = this.config[optionName];
26266
26515
  return options;
26267
26516
  }, {}),
26268
- ...this.config.think !== void 0 ? { think: this.config.think } : {},
26517
+ ...this.config.think === void 0 ? {} : { think: this.config.think },
26269
26518
  ...this.config.passthrough || {}
26270
26519
  };
26271
26520
  if (this.config.think !== void 0) params.think = this.config.think;
@@ -26279,7 +26528,7 @@ var OllamaCompletionProvider = class {
26279
26528
  ...getEnvString("OLLAMA_API_KEY") ? { Authorization: `Bearer ${getEnvString("OLLAMA_API_KEY")}` } : {}
26280
26529
  },
26281
26530
  body: JSON.stringify(params)
26282
- }, REQUEST_TIMEOUT_MS, "text");
26531
+ }, REQUEST_TIMEOUT_MS$1, "text");
26283
26532
  } catch (err) {
26284
26533
  return { error: `API call error: ${String(err)}. Output:\n${response?.data}` };
26285
26534
  }
@@ -26364,7 +26613,7 @@ var OllamaChatProvider = class {
26364
26613
  if (OllamaCompletionOptionKeys.has(optionName) && optionName !== "tools") options[optionName] = this.config[optionName];
26365
26614
  return options;
26366
26615
  }, {}),
26367
- ...this.config.think !== void 0 ? { think: this.config.think } : {},
26616
+ ...this.config.think === void 0 ? {} : { think: this.config.think },
26368
26617
  ...this.config.passthrough || {}
26369
26618
  };
26370
26619
  if (this.config.tools) {
@@ -26381,7 +26630,7 @@ var OllamaChatProvider = class {
26381
26630
  ...getEnvString("OLLAMA_API_KEY") ? { Authorization: `Bearer ${getEnvString("OLLAMA_API_KEY")}` } : {}
26382
26631
  },
26383
26632
  body: JSON.stringify(params)
26384
- }, REQUEST_TIMEOUT_MS, "text", context?.bustCache ?? context?.debug);
26633
+ }, REQUEST_TIMEOUT_MS$1, "text", context?.bustCache ?? context?.debug);
26385
26634
  } catch (err) {
26386
26635
  return { error: `API call error: ${String(err)}. Output:\n${response?.data}` };
26387
26636
  }
@@ -26449,7 +26698,7 @@ var OllamaEmbeddingProvider = class extends OllamaCompletionProvider {
26449
26698
  ...getEnvString("OLLAMA_API_KEY") ? { Authorization: `Bearer ${getEnvString("OLLAMA_API_KEY")}` } : {}
26450
26699
  },
26451
26700
  body: JSON.stringify(params)
26452
- }, REQUEST_TIMEOUT_MS, "json");
26701
+ }, REQUEST_TIMEOUT_MS$1, "json");
26453
26702
  } catch (err) {
26454
26703
  return { error: `API call error: ${String(err)}` };
26455
26704
  }
@@ -26573,7 +26822,7 @@ var OpenAiAssistantProvider = class extends OpenAiGenericProvider {
26573
26822
  organization: this.getOrganization(),
26574
26823
  baseURL: this.getApiUrl(),
26575
26824
  maxRetries: 3,
26576
- timeout: REQUEST_TIMEOUT_MS,
26825
+ timeout: REQUEST_TIMEOUT_MS$1,
26577
26826
  defaultHeaders: this.assistantConfig.headers
26578
26827
  });
26579
26828
  const messages = parseChatPrompt(prompt, [{
@@ -27706,7 +27955,12 @@ const SORA_COSTS = {
27706
27955
  /**
27707
27956
  * Valid video sizes (aspect ratios) for OpenAI Sora
27708
27957
  */
27709
- const VALID_VIDEO_SIZES = ["1280x720", "720x1280"];
27958
+ const VALID_VIDEO_SIZES = [
27959
+ "1280x720",
27960
+ "720x1280",
27961
+ "1792x1024",
27962
+ "1024x1792"
27963
+ ];
27710
27964
  /**
27711
27965
  * Valid video durations in seconds for OpenAI Sora
27712
27966
  */
@@ -27874,7 +28128,7 @@ var OpenAiVideoProvider = class extends OpenAiGenericProvider {
27874
28128
  * Download video content and store in media storage
27875
28129
  */
27876
28130
  async downloadVideoContent(soraVideoId, variant, cacheKey, evalId) {
27877
- const url = `${this.getApiUrl()}/videos/${soraVideoId}/content${variant !== "video" ? `?variant=${variant}` : ""}`;
28131
+ const url = `${this.getApiUrl()}/videos/${soraVideoId}/content${variant === "video" ? "" : `?variant=${variant}`}`;
27878
28132
  const headers = this.getAuthHeaders();
27879
28133
  try {
27880
28134
  const response = await fetchWithProxy(url, {
@@ -28104,7 +28358,7 @@ var OpenRouterProvider = class extends OpenAiChatCompletionProvider {
28104
28358
  ...config.headers
28105
28359
  },
28106
28360
  body: JSON.stringify(body)
28107
- }, REQUEST_TIMEOUT_MS, "json", context?.bustCache ?? context?.debug));
28361
+ }, REQUEST_TIMEOUT_MS$1, "json", context?.bustCache ?? context?.debug));
28108
28362
  if (status < 200 || status >= 300) return { error: `API error: ${status} ${statusText}\n${typeof data === "string" ? data : JSON.stringify(data)}` };
28109
28363
  } catch (err) {
28110
28364
  logger.error(`API call error: ${String(err)}`);
@@ -28371,7 +28625,7 @@ var PythonWorker = class {
28371
28625
  maxCrashes = 3;
28372
28626
  pendingRequest = null;
28373
28627
  requestTimeout = null;
28374
- constructor(scriptPath, functionName, pythonPath, timeout = REQUEST_TIMEOUT_MS, onReady) {
28628
+ constructor(scriptPath, functionName, pythonPath, timeout = REQUEST_TIMEOUT_MS$1, onReady) {
28375
28629
  this.scriptPath = scriptPath;
28376
28630
  this.functionName = functionName;
28377
28631
  this.pythonPath = pythonPath;
@@ -28912,7 +29166,7 @@ var ReplicateProvider = class {
28912
29166
  providerId: this.id(),
28913
29167
  temperature: this.config.temperature,
28914
29168
  topP: this.config.top_p,
28915
- maxTokens: this.config.max_tokens || this.config.max_length || this.config.max_new_tokens,
29169
+ maxTokens: this.config.max_tokens ?? this.config.max_length ?? this.config.max_new_tokens,
28916
29170
  testIndex: context?.test?.vars?.__testIdx,
28917
29171
  promptLabel: context?.prompt?.label,
28918
29172
  traceparent: context?.traceparent
@@ -28956,14 +29210,14 @@ var ReplicateProvider = class {
28956
29210
  let response;
28957
29211
  try {
28958
29212
  const inputOptions = {
28959
- max_length: this.config.max_length || getEnvInt("REPLICATE_MAX_LENGTH"),
28960
- max_new_tokens: this.config.max_new_tokens || getEnvInt("REPLICATE_MAX_NEW_TOKENS"),
28961
- temperature: this.config.temperature || getEnvFloat("REPLICATE_TEMPERATURE"),
28962
- top_p: this.config.top_p || getEnvFloat("REPLICATE_TOP_P"),
28963
- top_k: this.config.top_k || getEnvInt("REPLICATE_TOP_K"),
28964
- repetition_penalty: this.config.repetition_penalty || getEnvFloat("REPLICATE_REPETITION_PENALTY"),
28965
- stop_sequences: this.config.stop_sequences || getEnvString("REPLICATE_STOP_SEQUENCES"),
28966
- seed: this.config.seed || getEnvInt("REPLICATE_SEED"),
29213
+ max_length: this.config.max_length ?? getEnvInt("REPLICATE_MAX_LENGTH"),
29214
+ max_new_tokens: this.config.max_new_tokens ?? getEnvInt("REPLICATE_MAX_NEW_TOKENS"),
29215
+ temperature: this.config.temperature ?? getEnvFloat("REPLICATE_TEMPERATURE"),
29216
+ top_p: this.config.top_p ?? getEnvFloat("REPLICATE_TOP_P"),
29217
+ top_k: this.config.top_k ?? getEnvInt("REPLICATE_TOP_K"),
29218
+ repetition_penalty: this.config.repetition_penalty ?? getEnvFloat("REPLICATE_REPETITION_PENALTY"),
29219
+ stop_sequences: this.config.stop_sequences ?? getEnvString("REPLICATE_STOP_SEQUENCES"),
29220
+ seed: this.config.seed ?? getEnvInt("REPLICATE_SEED"),
28967
29221
  system_prompt: systemPrompt,
28968
29222
  prompt: userPrompt
28969
29223
  };
@@ -28982,7 +29236,7 @@ var ReplicateProvider = class {
28982
29236
  Prefer: "wait=60"
28983
29237
  },
28984
29238
  body: JSON.stringify(data)
28985
- }, REQUEST_TIMEOUT_MS, "json")).data;
29239
+ }, REQUEST_TIMEOUT_MS$1, "json")).data;
28986
29240
  if (response.status === "starting" || response.status === "processing") response = await this.pollForCompletion(response.id);
28987
29241
  if (response.status === "failed") throw new Error(response.error || "Prediction failed");
28988
29242
  response = response.output;
@@ -29018,7 +29272,7 @@ var ReplicateProvider = class {
29018
29272
  const prediction = (await fetchWithCache(`https://api.replicate.com/v1/predictions/${predictionId}`, {
29019
29273
  method: "GET",
29020
29274
  headers: { Authorization: `Bearer ${this.apiKey}` }
29021
- }, REQUEST_TIMEOUT_MS, "json", false)).data;
29275
+ }, REQUEST_TIMEOUT_MS$1, "json", false)).data;
29022
29276
  if (prediction.status === "succeeded" || prediction.status === "failed" || prediction.status === "canceled") return prediction;
29023
29277
  await new Promise((resolve) => setTimeout(resolve, pollInterval));
29024
29278
  }
@@ -29110,7 +29364,7 @@ var ReplicateImageProvider = class extends ReplicateProvider {
29110
29364
  Prefer: "wait=60"
29111
29365
  },
29112
29366
  body: JSON.stringify(data)
29113
- }, REQUEST_TIMEOUT_MS, "json")).data;
29367
+ }, REQUEST_TIMEOUT_MS$1, "json")).data;
29114
29368
  logger.debug(`Initial prediction status: ${prediction.status}, ID: ${prediction.id}`);
29115
29369
  if (prediction.status === "starting" || prediction.status === "processing") prediction = await this.pollForCompletion(prediction.id);
29116
29370
  logger.debug(`Final prediction status: ${prediction.status}, output: ${JSON.stringify(prediction.output)}`);
@@ -29451,7 +29705,7 @@ var SnowflakeCortexProvider = class extends OpenAiChatCompletionProvider {
29451
29705
  ...config.headers
29452
29706
  },
29453
29707
  body: JSON.stringify(body)
29454
- }, REQUEST_TIMEOUT_MS, "json", context?.bustCache ?? context?.debug));
29708
+ }, REQUEST_TIMEOUT_MS$1, "json", context?.bustCache ?? context?.debug));
29455
29709
  if (status < 200 || status >= 300) return { error: `API error: ${status} ${statusText}\n${typeof data === "string" ? data : JSON.stringify(data)}` };
29456
29710
  } catch (err) {
29457
29711
  logger.error(`[Snowflake Cortex] API call error: ${String(err)}`);
@@ -29866,7 +30120,7 @@ function createTrueFoundryProvider(providerPath, options = {}) {
29866
30120
  }
29867
30121
  //#endregion
29868
30122
  //#region src/providers/vercel.ts
29869
- const DEFAULT_TIMEOUT_MS = REQUEST_TIMEOUT_MS;
30123
+ const DEFAULT_TIMEOUT_MS = REQUEST_TIMEOUT_MS$1;
29870
30124
  /**
29871
30125
  * Resolves the API key from config, environment variables, or defaults.
29872
30126
  */
@@ -30267,7 +30521,7 @@ var VoyageEmbeddingProvider = class {
30267
30521
  ...this.config.headers
30268
30522
  },
30269
30523
  body: JSON.stringify(body)
30270
- }, REQUEST_TIMEOUT_MS));
30524
+ }, REQUEST_TIMEOUT_MS$1));
30271
30525
  } catch (err) {
30272
30526
  logger.error(`API call error: ${err}`);
30273
30527
  throw err;
@@ -30377,7 +30631,7 @@ function generateConfigHash(config) {
30377
30631
  }
30378
30632
  async function fetchModelSpecs() {
30379
30633
  try {
30380
- const { data, cached: _cached, latencyMs: _latencyMs } = await fetchWithCache("https://us-south.ml.cloud.ibm.com/ml/v1/foundation_model_specs?version=2023-09-30", { headers: { "Content-Type": "application/json" } }, REQUEST_TIMEOUT_MS);
30634
+ const { data, cached: _cached, latencyMs: _latencyMs } = await fetchWithCache("https://us-south.ml.cloud.ibm.com/ml/v1/foundation_model_specs?version=2023-09-30", { headers: { "Content-Type": "application/json" } }, REQUEST_TIMEOUT_MS$1);
30381
30635
  return (typeof data === "string" ? JSON.parse(data) : data)?.resources || [];
30382
30636
  } catch (error) {
30383
30637
  logger.error(`Failed to fetch model specs: ${error}`);
@@ -30676,7 +30930,7 @@ var WebhookProvider = class {
30676
30930
  method: "POST",
30677
30931
  headers: { "Content-Type": "application/json" },
30678
30932
  body: JSON.stringify(params)
30679
- }, REQUEST_TIMEOUT_MS, "json"));
30933
+ }, REQUEST_TIMEOUT_MS$1, "json"));
30680
30934
  } catch (err) {
30681
30935
  return { error: `Webhook call error: ${String(err)}` };
30682
30936
  }
@@ -30754,7 +31008,7 @@ var WebSocketProvider = class {
30754
31008
  constructor(url, options) {
30755
31009
  this.config = options.config;
30756
31010
  this.url = this.config.url || url;
30757
- this.timeoutMs = this.config.timeoutMs || REQUEST_TIMEOUT_MS;
31011
+ this.timeoutMs = this.config.timeoutMs || REQUEST_TIMEOUT_MS$1;
30758
31012
  this.transformResponse = createTransformResponse(this.config.transformResponse || this.config.responseParser);
30759
31013
  this.streamResponse = this.config.streamResponse ? createStreamResponse(this.config.streamResponse) : void 0;
30760
31014
  invariant(this.config.messageTemplate, `Expected WebSocket provider ${this.url} to have a config containing {messageTemplate}, but got ${safeJsonStringify(this.config)}`);
@@ -30771,7 +31025,7 @@ var WebSocketProvider = class {
30771
31025
  prompt
30772
31026
  };
30773
31027
  const message = nunjucks.renderString(this.config.messageTemplate, vars);
30774
- const streamResponse = this.streamResponse != null ? await this.streamResponse : void 0;
31028
+ const streamResponse = this.streamResponse == null ? void 0 : await this.streamResponse;
30775
31029
  logger.debug(`Sending WebSocket message to ${this.url}: ${message}`);
30776
31030
  let accumulator = { error: "unknown error occurred" };
30777
31031
  return new Promise((resolve, reject) => {
@@ -31209,7 +31463,7 @@ var XAIImageProvider = class extends OpenAiImageProvider {
31209
31463
  let cached = false;
31210
31464
  let latencyMs;
31211
31465
  try {
31212
- ({data, cached, status, statusText, latencyMs} = await callOpenAiImageApi(`${this.getApiUrl()}${endpoint}`, body, headers, REQUEST_TIMEOUT_MS));
31466
+ ({data, cached, status, statusText, latencyMs} = await callOpenAiImageApi(`${this.getApiUrl()}${endpoint}`, body, headers, REQUEST_TIMEOUT_MS$1));
31213
31467
  if (status < 200 || status >= 300) return { error: `API error: ${status} ${statusText}\n${typeof data === "string" ? data : JSON.stringify(data)}` };
31214
31468
  } catch (err) {
31215
31469
  logger.error(`API call error: ${String(err)}`);
@@ -31333,10 +31587,10 @@ var XAIResponsesProvider = class {
31333
31587
  const body = {
31334
31588
  model: this.modelName,
31335
31589
  input,
31336
- ...maxOutputTokens !== void 0 ? { max_output_tokens: maxOutputTokens } : {},
31337
- ...temperature !== void 0 ? { temperature } : {},
31590
+ ...maxOutputTokens === void 0 ? {} : { max_output_tokens: maxOutputTokens },
31591
+ ...temperature === void 0 ? {} : { temperature },
31338
31592
  ...config.instructions ? { instructions: config.instructions } : {},
31339
- ...config.top_p !== void 0 ? { top_p: config.top_p } : {},
31593
+ ...config.top_p === void 0 ? {} : { top_p: config.top_p },
31340
31594
  ...loadedTools && loadedTools.length > 0 ? { tools: loadedTools } : {},
31341
31595
  ...config.tool_choice ? { tool_choice: config.tool_choice } : {},
31342
31596
  ...config.previous_response_id ? { previous_response_id: config.previous_response_id } : {},
@@ -31382,7 +31636,7 @@ var XAIResponsesProvider = class {
31382
31636
  ...config.headers
31383
31637
  },
31384
31638
  body: JSON.stringify(body)
31385
- }, REQUEST_TIMEOUT_MS, "json", context?.bustCache ?? context?.debug, this.config.maxRetries);
31639
+ }, REQUEST_TIMEOUT_MS$1, "json", context?.bustCache ?? context?.debug, this.config.maxRetries);
31386
31640
  data = response.data;
31387
31641
  cached = response.cached;
31388
31642
  status = response.status;
@@ -32159,7 +32413,7 @@ const providerMap = [
32159
32413
  {
32160
32414
  test: (providerPath) => providerPath.startsWith("opencode:") || providerPath === "opencode",
32161
32415
  create: async (providerPath, providerOptions, context) => {
32162
- const { OpenCodeSDKProvider } = await import("./opencode-sdk-0j6rTWNb.js");
32416
+ const { OpenCodeSDKProvider } = await import("./opencode-sdk-C71Z0ehR.js");
32163
32417
  return new OpenCodeSDKProvider({
32164
32418
  ...providerOptions,
32165
32419
  id: providerPath,
@@ -32171,18 +32425,15 @@ const providerMap = [
32171
32425
  {
32172
32426
  test: (providerPath) => providerPath.startsWith("openclaw:") || providerPath === "openclaw",
32173
32427
  create: async (providerPath, providerOptions, context) => {
32174
- const { createOpenClawProvider } = await import("./openclaw-DiSz3I5L.js");
32428
+ const { createOpenClawProvider } = await import("./openclaw-D2ENvu7a.js");
32175
32429
  return createOpenClawProvider(providerPath, providerOptions, context.env);
32176
32430
  }
32177
32431
  },
32178
32432
  {
32179
32433
  test: (providerPath) => providerPath.startsWith("anthropic:claude-agent-sdk") || providerPath.startsWith("anthropic:claude-code"),
32180
- create: async (_providerPath, providerOptions, context) => {
32181
- const { ClaudeCodeSDKProvider } = await import("./claude-agent-sdk-DfCoW0E6.js");
32182
- return new ClaudeCodeSDKProvider({
32183
- ...providerOptions,
32184
- env: context.env
32185
- });
32434
+ create: async (_providerPath, providerOptions, _context) => {
32435
+ const { ClaudeCodeSDKProvider } = await import("./claude-agent-sdk-Bq5EArsX.js");
32436
+ return new ClaudeCodeSDKProvider({ ...providerOptions });
32186
32437
  }
32187
32438
  },
32188
32439
  {
@@ -32235,25 +32486,25 @@ const providerMap = [
32235
32486
  const modelName = splits.slice(2).join(":");
32236
32487
  if (modelType === "converse") return new AwsBedrockConverseProvider(modelName, providerOptions);
32237
32488
  if (modelType === "nova-sonic" || modelType.includes("amazon.nova-sonic")) {
32238
- const { NovaSonicProvider } = await import("./nova-sonic-De1HW5fD.js");
32489
+ const { NovaSonicProvider } = await import("./nova-sonic-DWswpN1E.js");
32239
32490
  return new NovaSonicProvider("amazon.nova-sonic-v1:0", providerOptions);
32240
32491
  }
32241
32492
  if (modelType.includes("luma.ray") || modelName.includes("luma.ray")) {
32242
- const { LumaRayVideoProvider } = await import("./luma-ray-BS2_tY8L.js");
32493
+ const { LumaRayVideoProvider } = await import("./luma-ray-0ehMPt5N.js");
32243
32494
  return new LumaRayVideoProvider(modelName.includes("luma.ray") ? modelName : splits.slice(1).join(":") || "luma.ray-v2:0", providerOptions);
32244
32495
  }
32245
32496
  if (modelType.includes("amazon.nova-reel") || modelType === "video" && (modelName.includes("amazon.nova-reel") || modelName === "")) {
32246
- const { NovaReelVideoProvider } = await import("./nova-reel-D_W1tjMH.js");
32497
+ const { NovaReelVideoProvider } = await import("./nova-reel-DEeQlnOJ.js");
32247
32498
  return new NovaReelVideoProvider(modelName || "amazon.nova-reel-v1:1", providerOptions);
32248
32499
  }
32249
32500
  if (modelType === "agents") {
32250
- const { AwsBedrockAgentsProvider } = await import("./agents-cLXA8a_8.js");
32501
+ const { AwsBedrockAgentsProvider } = await import("./agents-CgBniSlI.js");
32251
32502
  return new AwsBedrockAgentsProvider(modelName, providerOptions);
32252
32503
  }
32253
32504
  if (modelType === "completion") return new AwsBedrockCompletionProvider(modelName, providerOptions);
32254
32505
  if (modelType === "embeddings" || modelType === "embedding") return new AwsBedrockEmbeddingProvider(modelName, providerOptions);
32255
32506
  if (modelType === "kb" || modelType === "knowledge-base") {
32256
- const { AwsBedrockKnowledgeBaseProvider } = await import("./knowledgeBase-CYTLHOt1.js");
32507
+ const { AwsBedrockKnowledgeBaseProvider } = await import("./knowledgeBase-Ce3ofVan.js");
32257
32508
  return new AwsBedrockKnowledgeBaseProvider(modelName, providerOptions);
32258
32509
  }
32259
32510
  return new AwsBedrockCompletionProvider(splits.slice(1).join(":"), providerOptions);
@@ -32263,7 +32514,7 @@ const providerMap = [
32263
32514
  test: (providerPath) => providerPath.startsWith("bedrock-agent:"),
32264
32515
  create: async (providerPath, providerOptions, _context) => {
32265
32516
  const agentId = providerPath.substring(14);
32266
- const { AwsBedrockAgentsProvider } = await import("./agents-cLXA8a_8.js");
32517
+ const { AwsBedrockAgentsProvider } = await import("./agents-CgBniSlI.js");
32267
32518
  return new AwsBedrockAgentsProvider(agentId, providerOptions);
32268
32519
  }
32269
32520
  },
@@ -32273,7 +32524,7 @@ const providerMap = [
32273
32524
  const splits = providerPath.split(":");
32274
32525
  const modelType = splits[1];
32275
32526
  const endpointName = splits.slice(2).join(":");
32276
- const { SageMakerCompletionProvider, SageMakerEmbeddingProvider } = await import("./sagemaker-YSyBXQQh.js");
32527
+ const { SageMakerCompletionProvider, SageMakerEmbeddingProvider } = await import("./sagemaker-MUbD5V3v.js");
32277
32528
  if (modelType === "embedding" || modelType === "embeddings") return new SageMakerEmbeddingProvider(endpointName || modelType, providerOptions);
32278
32529
  if (splits.length === 2) return new SageMakerCompletionProvider(modelType, providerOptions);
32279
32530
  if (endpointName.includes("jumpstart") || modelType === "jumpstart") return new SageMakerCompletionProvider(endpointName, {
@@ -32314,7 +32565,7 @@ const providerMap = [
32314
32565
  {
32315
32566
  test: (providerPath) => providerPath.startsWith("cloudflare-ai:"),
32316
32567
  create: async (providerPath, providerOptions, context) => {
32317
- const { createCloudflareAiProvider } = await import("./cloudflare-ai-BxAGvfju.js");
32568
+ const { createCloudflareAiProvider } = await import("./cloudflare-ai-DdKP9TKT.js");
32318
32569
  return createCloudflareAiProvider(providerPath, {
32319
32570
  ...providerOptions,
32320
32571
  env: context.env
@@ -32324,7 +32575,7 @@ const providerMap = [
32324
32575
  {
32325
32576
  test: (providerPath) => providerPath.startsWith("cloudflare-gateway:"),
32326
32577
  create: async (providerPath, providerOptions, context) => {
32327
- const { createCloudflareGatewayProvider } = await import("./cloudflare-gateway-B9HWA5wf.js");
32578
+ const { createCloudflareGatewayProvider } = await import("./cloudflare-gateway-D-e9i1Sn.js");
32328
32579
  return createCloudflareGatewayProvider(providerPath, {
32329
32580
  ...providerOptions,
32330
32581
  env: context.env
@@ -32476,27 +32727,27 @@ const providerMap = [
32476
32727
  create: async (providerPath, providerOptions, context) => {
32477
32728
  const modelType = providerPath.split(":")[1];
32478
32729
  if (modelType === "image") {
32479
- const { createHyperbolicImageProvider } = await import("./image-B02ogr_b.js");
32730
+ const { createHyperbolicImageProvider } = await import("./image-DO0RYnjH.js");
32480
32731
  return createHyperbolicImageProvider(providerPath, {
32481
32732
  ...providerOptions,
32482
32733
  env: context.env
32483
32734
  });
32484
32735
  }
32485
32736
  if (modelType === "audio") {
32486
- const { createHyperbolicAudioProvider } = await import("./audio-Dz3z7s3J.js");
32737
+ const { createHyperbolicAudioProvider } = await import("./audio-DjU9GswO.js");
32487
32738
  return createHyperbolicAudioProvider(providerPath, {
32488
32739
  ...providerOptions,
32489
32740
  env: context.env
32490
32741
  });
32491
32742
  }
32492
- const { createHyperbolicProvider } = await import("./chat-qmatte1u.js");
32743
+ const { createHyperbolicProvider } = await import("./chat-BE0qTA8e.js");
32493
32744
  return createHyperbolicProvider(providerPath, providerOptions);
32494
32745
  }
32495
32746
  },
32496
32747
  {
32497
32748
  test: (providerPath) => providerPath.startsWith("litellm:"),
32498
32749
  create: async (providerPath, providerOptions, context) => {
32499
- const { createLiteLLMProvider } = await import("./litellm-AaeZcZQF.js");
32750
+ const { createLiteLLMProvider } = await import("./litellm-Bo2gQXpo.js");
32500
32751
  return createLiteLLMProvider(providerPath, {
32501
32752
  config: providerOptions,
32502
32753
  env: context.env
@@ -32553,9 +32804,16 @@ const providerMap = [
32553
32804
  const modelName = splits.slice(2).join(":");
32554
32805
  const configuredModel = getConfiguredOpenAiModel(providerOptions);
32555
32806
  if (modelType === "codex-sdk" || modelType === "codex") {
32556
- const { OpenAICodexSDKProvider } = await import("./codex-sdk-GGAw0qbD.js");
32807
+ const { OpenAICodexSDKProvider } = await import("./codex-sdk-DE5G18dx.js");
32808
+ const codexModel = modelName || configuredModel;
32809
+ const codexProviderId = providerOptions.id ?? providerPath;
32557
32810
  return new OpenAICodexSDKProvider({
32558
32811
  ...providerOptions,
32812
+ id: codexProviderId,
32813
+ config: codexModel ? {
32814
+ ...providerOptions.config,
32815
+ model: codexModel
32816
+ } : providerOptions.config,
32559
32817
  env: context.env
32560
32818
  });
32561
32819
  }
@@ -32566,7 +32824,7 @@ const providerMap = [
32566
32824
  if (modelType === "realtime") return new OpenAiRealtimeProvider(modelName || configuredModel || "gpt-4o-realtime-preview-2024-12-17", providerOptions);
32567
32825
  if (modelType === "responses") return new OpenAiResponsesProvider(modelName || configuredModel || "gpt-4.1-2025-04-14", providerOptions);
32568
32826
  if (modelType === "transcription") {
32569
- const { OpenAiTranscriptionProvider } = await import("./transcription-s6A-bNrZ.js");
32827
+ const { OpenAiTranscriptionProvider } = await import("./transcription-DOMMTu01.js");
32570
32828
  return new OpenAiTranscriptionProvider(modelName || configuredModel || "gpt-4o-transcribe-diarize", providerOptions);
32571
32829
  }
32572
32830
  if (OpenAiChatCompletionProvider.OPENAI_CHAT_MODEL_NAMES.includes(modelType)) return new OpenAiChatCompletionProvider(modelType, providerOptions);
@@ -32574,11 +32832,11 @@ const providerMap = [
32574
32832
  if (OpenAiRealtimeProvider.OPENAI_REALTIME_MODEL_NAMES.includes(modelType)) return new OpenAiRealtimeProvider(modelType, providerOptions);
32575
32833
  if (OpenAiResponsesProvider.OPENAI_RESPONSES_MODEL_NAMES.includes(modelType)) return new OpenAiResponsesProvider(modelType, providerOptions);
32576
32834
  if (modelType === "agents") {
32577
- const { OpenAiAgentsProvider } = await import("./agents-D__IdAlg.js");
32835
+ const { OpenAiAgentsProvider } = await import("./agents-Bqgfdokm.js");
32578
32836
  return new OpenAiAgentsProvider(modelName || "default-agent", providerOptions);
32579
32837
  }
32580
32838
  if (modelType === "chatkit") {
32581
- const { OpenAiChatKitProvider } = await import("./chatkit-BxFvW8KY.js");
32839
+ const { OpenAiChatKitProvider } = await import("./chatkit-_8eJqKcD.js");
32582
32840
  return new OpenAiChatKitProvider(modelName || "", providerOptions);
32583
32841
  }
32584
32842
  if (modelType === "assistant") return new OpenAiAssistantProvider(modelName, providerOptions);
@@ -32621,7 +32879,7 @@ const providerMap = [
32621
32879
  {
32622
32880
  test: (providerPath) => providerPath.startsWith("quiverai:"),
32623
32881
  create: async (providerPath, providerOptions, context) => {
32624
- const { createQuiverAiProvider } = await import("./quiverai-uH-dcTIr.js");
32882
+ const { createQuiverAiProvider } = await import("./quiverai-CCQn73lq.js");
32625
32883
  return createQuiverAiProvider(providerPath, providerOptions, context.env);
32626
32884
  }
32627
32885
  },
@@ -32639,7 +32897,7 @@ const providerMap = [
32639
32897
  {
32640
32898
  test: (providerPath) => providerPath.startsWith("modelslab:"),
32641
32899
  create: async (providerPath, providerOptions, context) => {
32642
- const { ModelsLabImageProvider } = await import("./modelslab-Bx9IrZfS.js");
32900
+ const { ModelsLabImageProvider } = await import("./modelslab-BI458moT.js");
32643
32901
  const splits = providerPath.split(":");
32644
32902
  const modelType = splits[1];
32645
32903
  const modelName = splits.slice(2).join(":");
@@ -32683,7 +32941,7 @@ const providerMap = [
32683
32941
  {
32684
32942
  test: (providerPath) => providerPath.startsWith("aimlapi:"),
32685
32943
  create: async (providerPath, providerOptions, context) => {
32686
- const { createAimlApiProvider } = await import("./aimlapi-CnkC2HqE.js");
32944
+ const { createAimlApiProvider } = await import("./aimlapi-DaC3qZ-o.js");
32687
32945
  return createAimlApiProvider(providerPath, {
32688
32946
  ...providerOptions,
32689
32947
  env: context.env
@@ -32693,7 +32951,7 @@ const providerMap = [
32693
32951
  {
32694
32952
  test: (providerPath) => providerPath.startsWith("cometapi:"),
32695
32953
  create: async (providerPath, providerOptions, context) => {
32696
- const { createCometApiProvider } = await import("./cometapi-DFNiKmSz.js");
32954
+ const { createCometApiProvider } = await import("./cometapi-sp7sJpBD.js");
32697
32955
  return createCometApiProvider(providerPath, {
32698
32956
  ...providerOptions,
32699
32957
  env: context.env
@@ -32703,7 +32961,7 @@ const providerMap = [
32703
32961
  {
32704
32962
  test: (providerPath) => providerPath.startsWith("docker:"),
32705
32963
  create: async (providerPath, providerOptions, context) => {
32706
- const { createDockerProvider } = await import("./docker-BvfL2BrW.js");
32964
+ const { createDockerProvider } = await import("./docker-DpguQj-w.js");
32707
32965
  return createDockerProvider(providerPath, {
32708
32966
  ...providerOptions,
32709
32967
  env: context.env
@@ -32724,6 +32982,14 @@ const providerMap = [
32724
32982
  create: async (providerPath, providerOptions, _context) => {
32725
32983
  const splits = providerPath.split(":");
32726
32984
  const firstPart = splits[1];
32985
+ if (firstPart === "video") return new GoogleVideoProvider(splits.slice(2).join(":"), {
32986
+ ...providerOptions,
32987
+ id: providerPath,
32988
+ config: {
32989
+ ...providerOptions.config,
32990
+ vertexai: true
32991
+ }
32992
+ });
32727
32993
  if (firstPart === "chat") return new VertexChatProvider(splits.slice(2).join(":"), providerOptions);
32728
32994
  if (firstPart === "embedding" || firstPart === "embeddings") return new VertexEmbeddingProvider(splits.slice(2).join(":"), providerOptions);
32729
32995
  return new VertexChatProvider(splits.slice(1).join(":"), providerOptions);
@@ -32790,7 +33056,10 @@ const providerMap = [
32790
33056
  const modelName = splits.slice(2).join(":");
32791
33057
  if (serviceType === "live") return new GoogleLiveProvider(modelName, providerOptions);
32792
33058
  else if (serviceType === "image") return new GoogleImageProvider(modelName, providerOptions);
32793
- else if (serviceType === "video") return new GoogleVideoProvider(modelName, providerOptions);
33059
+ else if (serviceType === "video") return new GoogleVideoProvider(modelName, {
33060
+ ...providerOptions,
33061
+ id: providerPath
33062
+ });
32794
33063
  }
32795
33064
  const modelName = splits[1];
32796
33065
  if (modelName.includes("-image")) return new GeminiImageProvider(modelName, providerOptions);
@@ -32989,7 +33258,7 @@ Example: transformers:feature-extraction:Xenova/all-MiniLM-L6-v2`);
32989
33258
  test: (providerPath) => providerPath === "slack" || providerPath.startsWith("slack:"),
32990
33259
  create: async (providerPath, providerOptions, _context) => {
32991
33260
  try {
32992
- const { SlackProvider } = await import("./slack-DCUPTzS2.js");
33261
+ const { SlackProvider } = await import("./slack-BR0HtO3K.js");
32993
33262
  if (providerPath === "slack") return new SlackProvider(providerOptions);
32994
33263
  const splits = providerPath.split(":");
32995
33264
  if (splits.length < 2) throw new Error("Invalid Slack provider path. Use slack:<channel_id> or slack:channel:<channel_id>");
@@ -33285,4 +33554,4 @@ function getProviderIds(providerPaths) {
33285
33554
  //#endregion
33286
33555
  export { AzureEmbeddingProvider as $, renderPrompt as A, OpenAiModerationProvider as B, extractVariablesFromJson as C, isEmptyResponse as D, isBasicRefusal as E, TokenUsageTracker as F, VertexChatProvider as G, MistralEmbeddingProvider as H, createRateLimitRegistry as I, DefaultGradingProvider$1 as J, AIStudioChatProvider as K, createProviderRateLimitOptions as L, isPackagePath as M, loadFromPackage as N, removePrefix as O, redteamProviderManager as P, AzureModerationProvider as Q, PromptfooHarmfulCompletionProvider as R, extractPromptFromTags as S, getShortPluginId as T, DefaultEmbeddingProvider as U, MistralChatCompletionProvider as V, DefaultGradingProvider as W, DefaultSuggestionsProvider as X, DefaultLlmRubricProvider as Y, DefaultSynthesizeProvider as Z, pluginMatchesStrategyTargets as _, resolveTeamId as _t, resolveProviderConfigs as a, getCloudDatabaseId as at, extractGoalFromPrompt as b, createTransformRequest as c, getEvalConfigFromCloud as ct, Strategies as d, getPoliciesFromCloud as dt, AzureChatCompletionProvider as et, loadStrategy as f, getProviderFromCloud as ft, retrieveMedia as g, resolveTeamFromIdentifier as gt, mediaExists as h, makeRequest as ht, resolveProvider as i, checkCloudPermissions as it, runExtensionHook as j, collectFileMetadata as k, createTransformResponse$1 as l, getOrgContext as lt, getMediaStorage as m, isCloudProvider as mt, loadApiProvider as n, parseScriptParts as nt, MCPProvider as o, getConfigFromCloud as ot, validateStrategies as p, getUserTeams as pt, DefaultGradingJsonProvider as q, loadApiProviders as r, canCreateTargets as rt, HttpProvider as s, getDefaultTeam as st, getProviderIds as t, getFileHashes as tt, GoogleLiveProvider as u, getPluginSeverityOverridesFromCloud as ut, checkExfilTracking as v, getSessionId as w, extractInputVarsFromPrompt as x, extractAllPromptsFromTags as y, REDTEAM_MEMORY_POISONING_PLUGIN_ID as z };
33287
33556
 
33288
- //# sourceMappingURL=providers-BlqUifFg.js.map
33557
+ //# sourceMappingURL=providers-Ch6Mr0gn.js.map