@use-lattice/litmus 0.121.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (199) hide show
  1. package/LICENSE +19 -0
  2. package/dist/src/accounts-Bt1oJb1Z.cjs +219 -0
  3. package/dist/src/accounts-DjOU8Rm3.js +178 -0
  4. package/dist/src/agentic-utils-D03IiXQc.js +153 -0
  5. package/dist/src/agentic-utils-Dh7xaMQM.cjs +180 -0
  6. package/dist/src/agents-C6BIMlZa.js +231 -0
  7. package/dist/src/agents-DvIpNX1L.cjs +666 -0
  8. package/dist/src/agents-ZP0RP9vV.cjs +231 -0
  9. package/dist/src/agents-maJXdjbR.js +665 -0
  10. package/dist/src/aimlapi-BTbQjG2E.cjs +30 -0
  11. package/dist/src/aimlapi-CwMxqfXP.js +30 -0
  12. package/dist/src/audio-BBUdvsde.cjs +97 -0
  13. package/dist/src/audio-D5DPZ7I-.js +97 -0
  14. package/dist/src/base-BEysXrkq.cjs +222 -0
  15. package/dist/src/base-C451JQfq.js +193 -0
  16. package/dist/src/blobs-BY8MDmpo.js +230 -0
  17. package/dist/src/blobs-BgcNn97m.cjs +256 -0
  18. package/dist/src/cache-BBE_lsTA.cjs +4 -0
  19. package/dist/src/cache-BkrqU5Ba.js +237 -0
  20. package/dist/src/cache-DsCxFlsZ.cjs +297 -0
  21. package/dist/src/chat-CPJWDP6a.cjs +289 -0
  22. package/dist/src/chat-CXX3xzkk.cjs +811 -0
  23. package/dist/src/chat-CcDgZFJ4.js +787 -0
  24. package/dist/src/chat-Dz5ZeGO2.js +289 -0
  25. package/dist/src/chatkit-Dw0mKkML.cjs +1158 -0
  26. package/dist/src/chatkit-swAIVuea.js +1157 -0
  27. package/dist/src/chunk-DEq-mXcV.js +15 -0
  28. package/dist/src/claude-agent-sdk-BXZJtOg6.js +379 -0
  29. package/dist/src/claude-agent-sdk-CkfyjDoG.cjs +383 -0
  30. package/dist/src/cloudflare-ai-BzpJcqUH.js +161 -0
  31. package/dist/src/cloudflare-ai-Cmy_R1y2.cjs +161 -0
  32. package/dist/src/cloudflare-gateway-B9tVQKok.cjs +272 -0
  33. package/dist/src/cloudflare-gateway-DrD3ew3H.js +272 -0
  34. package/dist/src/codex-sdk-Dezj9Nwm.js +1056 -0
  35. package/dist/src/codex-sdk-Dl9D4k5B.cjs +1060 -0
  36. package/dist/src/cometapi-C-9YvCHC.js +54 -0
  37. package/dist/src/cometapi-DHgDKoO2.cjs +54 -0
  38. package/dist/src/completion-B8Ctyxpr.js +120 -0
  39. package/dist/src/completion-Cxrt08sj.cjs +131 -0
  40. package/dist/src/createHash-BwgE13yv.cjs +27 -0
  41. package/dist/src/createHash-DmPQkvBh.js +15 -0
  42. package/dist/src/docker-BiqcTwLv.js +80 -0
  43. package/dist/src/docker-C7tEJnP-.cjs +80 -0
  44. package/dist/src/esm-C62Zofr1.cjs +409 -0
  45. package/dist/src/esm-DMVc93eh.js +379 -0
  46. package/dist/src/evalResult-C3NJPQOo.cjs +301 -0
  47. package/dist/src/evalResult-C7JJAPBb.js +295 -0
  48. package/dist/src/evalResult-DoVTZZWI.cjs +2 -0
  49. package/dist/src/extractor-DnMD3fwt.cjs +391 -0
  50. package/dist/src/extractor-DtlL28vL.js +374 -0
  51. package/dist/src/fetch-BTxakTSg.cjs +1133 -0
  52. package/dist/src/fetch-DQckpUFz.js +928 -0
  53. package/dist/src/fileExtensions-DnqA1y9x.js +85 -0
  54. package/dist/src/fileExtensions-bYh77CN8.cjs +114 -0
  55. package/dist/src/genaiTracer-CyZrmaK0.cjs +268 -0
  56. package/dist/src/genaiTracer-D3fD9dNV.js +256 -0
  57. package/dist/src/graders-BNscxFrU.js +13644 -0
  58. package/dist/src/graders-D2oE9Msq.js +2 -0
  59. package/dist/src/graders-c0Ez_w-9.cjs +2 -0
  60. package/dist/src/graders-d0F2M3e9.cjs +14056 -0
  61. package/dist/src/image-0ZhE0VlR.cjs +280 -0
  62. package/dist/src/image-CWE1pdNv.js +257 -0
  63. package/dist/src/image-D9ZK6hwL.js +163 -0
  64. package/dist/src/image-DKZgZITg.cjs +163 -0
  65. package/dist/src/index.cjs +11366 -0
  66. package/dist/src/index.d.cts +19640 -0
  67. package/dist/src/index.d.ts +19641 -0
  68. package/dist/src/index.js +11306 -0
  69. package/dist/src/invariant-Ddh24eXh.js +25 -0
  70. package/dist/src/invariant-kfQ8Bu82.cjs +30 -0
  71. package/dist/src/knowledgeBase-BgPyGFUd.cjs +122 -0
  72. package/dist/src/knowledgeBase-DyHilYaP.js +122 -0
  73. package/dist/src/litellm-CyMeneHS.js +135 -0
  74. package/dist/src/litellm-DWDF73yF.cjs +135 -0
  75. package/dist/src/logger-C40ZGil9.js +717 -0
  76. package/dist/src/logger-DyfK9PBt.cjs +917 -0
  77. package/dist/src/luma-ray-BAU9X_ep.cjs +315 -0
  78. package/dist/src/luma-ray-nwVseBbv.js +313 -0
  79. package/dist/src/messages-B5ADWTTv.js +245 -0
  80. package/dist/src/messages-BCnZfqrS.cjs +257 -0
  81. package/dist/src/meteor-DLZZ3osF.cjs +134 -0
  82. package/dist/src/meteor-DUiCJRC-.js +134 -0
  83. package/dist/src/modelslab-00cveB8L.cjs +163 -0
  84. package/dist/src/modelslab-D9sCU_L7.js +163 -0
  85. package/dist/src/nova-reel-CTapvqYH.js +276 -0
  86. package/dist/src/nova-reel-DlWuuroF.cjs +278 -0
  87. package/dist/src/nova-sonic-5UPWfeMv.cjs +363 -0
  88. package/dist/src/nova-sonic-BhSwQNym.js +363 -0
  89. package/dist/src/openai-BWrJK9d8.cjs +52 -0
  90. package/dist/src/openai-DumO8WQn.js +47 -0
  91. package/dist/src/openclaw-B8brrjC_.cjs +577 -0
  92. package/dist/src/openclaw-Bkayww9q.js +571 -0
  93. package/dist/src/opencode-sdk-7xjoDNiM.cjs +562 -0
  94. package/dist/src/opencode-sdk-SGwAPxht.js +558 -0
  95. package/dist/src/otlpReceiver-CoAHfAN9.cjs +15 -0
  96. package/dist/src/otlpReceiver-oO3EQwI9.js +14 -0
  97. package/dist/src/providerRegistry-4yjhaEM8.js +45 -0
  98. package/dist/src/providerRegistry-DhV4rJIc.cjs +50 -0
  99. package/dist/src/providers-B5RJVG-7.cjs +33609 -0
  100. package/dist/src/providers-BdmZCLzV.js +33262 -0
  101. package/dist/src/providers-CxtRxn8e.js +2 -0
  102. package/dist/src/providers-DnQLNbx1.cjs +3 -0
  103. package/dist/src/pythonUtils-BD0druiM.cjs +275 -0
  104. package/dist/src/pythonUtils-IBhn5YGR.js +249 -0
  105. package/dist/src/quiverai-BDOwZBsM.cjs +213 -0
  106. package/dist/src/quiverai-D3JTF5lD.js +213 -0
  107. package/dist/src/responses-B2LCDCXZ.js +667 -0
  108. package/dist/src/responses-BvNm4Xv9.cjs +685 -0
  109. package/dist/src/rubyUtils-B0NwnfpY.cjs +245 -0
  110. package/dist/src/rubyUtils-BroxzZ7c.cjs +2 -0
  111. package/dist/src/rubyUtils-hqVw5UvJ.js +222 -0
  112. package/dist/src/sagemaker-Cno2V-Sx.js +689 -0
  113. package/dist/src/sagemaker-fV_KUgs5.cjs +691 -0
  114. package/dist/src/server-BOuAXb06.cjs +238 -0
  115. package/dist/src/server-CtI-EWzm.cjs +2 -0
  116. package/dist/src/server-Cy3DZymt.js +189 -0
  117. package/dist/src/slack-CP8xBePa.js +135 -0
  118. package/dist/src/slack-DSQ1yXVb.cjs +135 -0
  119. package/dist/src/store-BwDDaBjb.cjs +246 -0
  120. package/dist/src/store-DcbLC593.cjs +2 -0
  121. package/dist/src/store-IGpqMIkv.js +240 -0
  122. package/dist/src/tables-3Q2cL7So.cjs +373 -0
  123. package/dist/src/tables-Bi2fjr4W.js +288 -0
  124. package/dist/src/telemetry-Bg2WqF79.js +161 -0
  125. package/dist/src/telemetry-D0x6u5kX.cjs +166 -0
  126. package/dist/src/telemetry-DXNimrI0.cjs +2 -0
  127. package/dist/src/text-B_UCRPp2.js +22 -0
  128. package/dist/src/text-CW1cyrwj.cjs +33 -0
  129. package/dist/src/tokenUsageUtils-NYT-WKS6.js +138 -0
  130. package/dist/src/tokenUsageUtils-bVa1ga6f.cjs +173 -0
  131. package/dist/src/transcription-Cl_W16Pr.js +122 -0
  132. package/dist/src/transcription-yt1EecY8.cjs +124 -0
  133. package/dist/src/transform-BCtGrl_W.cjs +228 -0
  134. package/dist/src/transform-Bv6gG2MJ.cjs +1688 -0
  135. package/dist/src/transform-CY1wbpRy.js +1507 -0
  136. package/dist/src/transform-DU8rUL9P.cjs +2 -0
  137. package/dist/src/transform-yWaShiKr.js +216 -0
  138. package/dist/src/transformersAvailability-BGkzavwb.js +35 -0
  139. package/dist/src/transformersAvailability-DKoRtQLy.cjs +35 -0
  140. package/dist/src/types-5aqHpBwE.cjs +3769 -0
  141. package/dist/src/types-Bn6D9c4U.js +3300 -0
  142. package/dist/src/util-BkKlTkI2.js +293 -0
  143. package/dist/src/util-CTh0bfOm.cjs +1119 -0
  144. package/dist/src/util-D17oBwo7.cjs +328 -0
  145. package/dist/src/util-DsS_-v4p.js +613 -0
  146. package/dist/src/util-DuntT1Ga.js +951 -0
  147. package/dist/src/util-aWjdCYMI.cjs +667 -0
  148. package/dist/src/utils-CisQwpjA.js +94 -0
  149. package/dist/src/utils-yWamDvmz.cjs +123 -0
  150. package/dist/tsconfig.tsbuildinfo +1 -0
  151. package/drizzle/0000_lush_hellion.sql +36 -0
  152. package/drizzle/0001_wide_calypso.sql +3 -0
  153. package/drizzle/0002_tidy_juggernaut.sql +1 -0
  154. package/drizzle/0003_lively_naoko.sql +8 -0
  155. package/drizzle/0004_minor_peter_quill.sql +19 -0
  156. package/drizzle/0005_silky_millenium_guard.sql +2 -0
  157. package/drizzle/0006_harsh_caretaker.sql +42 -0
  158. package/drizzle/0007_cloudy_wong.sql +1 -0
  159. package/drizzle/0008_broad_boomer.sql +2 -0
  160. package/drizzle/0009_strong_marten_broadcloak.sql +19 -0
  161. package/drizzle/0010_needy_bishop.sql +11 -0
  162. package/drizzle/0011_moaning_millenium_guard.sql +1 -0
  163. package/drizzle/0012_late_marten_broadcloak.sql +2 -0
  164. package/drizzle/0013_previous_dormammu.sql +9 -0
  165. package/drizzle/0014_lazy_captain_universe.sql +2 -0
  166. package/drizzle/0015_zippy_wallop.sql +29 -0
  167. package/drizzle/0016_jazzy_zemo.sql +2 -0
  168. package/drizzle/0017_reflective_praxagora.sql +4 -0
  169. package/drizzle/0018_fat_vanisher.sql +22 -0
  170. package/drizzle/0019_new_clint_barton.sql +8 -0
  171. package/drizzle/0020_skinny_maverick.sql +1 -0
  172. package/drizzle/0021_mysterious_madelyne_pryor.sql +13 -0
  173. package/drizzle/0022_sleepy_ultimo.sql +25 -0
  174. package/drizzle/0023_wooden_mandrill.sql +2 -0
  175. package/drizzle/AGENTS.md +68 -0
  176. package/drizzle/CLAUDE.md +1 -0
  177. package/drizzle/meta/0000_snapshot.json +221 -0
  178. package/drizzle/meta/0001_snapshot.json +214 -0
  179. package/drizzle/meta/0002_snapshot.json +221 -0
  180. package/drizzle/meta/0005_snapshot.json +369 -0
  181. package/drizzle/meta/0006_snapshot.json +638 -0
  182. package/drizzle/meta/0007_snapshot.json +640 -0
  183. package/drizzle/meta/0008_snapshot.json +649 -0
  184. package/drizzle/meta/0009_snapshot.json +554 -0
  185. package/drizzle/meta/0010_snapshot.json +619 -0
  186. package/drizzle/meta/0011_snapshot.json +627 -0
  187. package/drizzle/meta/0012_snapshot.json +639 -0
  188. package/drizzle/meta/0013_snapshot.json +717 -0
  189. package/drizzle/meta/0014_snapshot.json +717 -0
  190. package/drizzle/meta/0015_snapshot.json +897 -0
  191. package/drizzle/meta/0016_snapshot.json +1031 -0
  192. package/drizzle/meta/0018_snapshot.json +1210 -0
  193. package/drizzle/meta/0019_snapshot.json +1165 -0
  194. package/drizzle/meta/0020_snapshot.json +1232 -0
  195. package/drizzle/meta/0021_snapshot.json +1311 -0
  196. package/drizzle/meta/0022_snapshot.json +1481 -0
  197. package/drizzle/meta/0023_snapshot.json +1496 -0
  198. package/drizzle/meta/_journal.json +174 -0
  199. package/package.json +240 -0
@@ -0,0 +1,667 @@
1
+ import { T as state, b as getEnvInt, r as logger, x as getEnvString, y as getEnvFloat } from "./logger-C40ZGil9.js";
2
+ import { r as importModule } from "./esm-DMVc93eh.js";
3
+ import { i as isJavascriptFile } from "./fileExtensions-DnqA1y9x.js";
4
+ import { d as maybeLoadToolsFromExternalFile, h as renderVarsInObject, u as maybeLoadResponseFormatFromExternalFile } from "./util-DuntT1Ga.js";
5
+ import { h as REQUEST_TIMEOUT_MS, m as LONG_RUNNING_MODEL_TIMEOUT_MS } from "./fetch-DQckpUFz.js";
6
+ import { r as fetchWithCache } from "./cache-BkrqU5Ba.js";
7
+ import { t as OpenAiGenericProvider } from "./openai-DumO8WQn.js";
8
+ import { a as calculateOpenAICost, c as getTokenUsage$1, s as formatOpenAiError } from "./util-DsS_-v4p.js";
9
+ import path from "path";
10
+ //#region src/providers/functionCallbackUtils.ts
11
+ /**
12
+ * Handles function callback execution for AI providers.
13
+ * Provides a unified way to execute function callbacks across different provider formats.
14
+ */
15
+ var FunctionCallbackHandler = class {
16
+ loadedCallbacks = {};
17
+ mcpToolNames = null;
18
+ constructor(mcpClient) {
19
+ this.mcpClient = mcpClient;
20
+ }
21
+ /**
22
+ * Processes a function call by executing its callback or returning the original call
23
+ * @param call The function call to process (can be various formats)
24
+ * @param callbacks Configuration mapping function names to callbacks
25
+ * @param context Optional context to pass to the callback
26
+ * @returns The result of processing
27
+ */
28
+ async processCall(call, callbacks, context) {
29
+ const functionInfo = this.extractFunctionInfo(call);
30
+ if (this.mcpClient && functionInfo) {
31
+ if (this.mcpToolNames === null) {
32
+ const mcpTools = this.mcpClient.getAllTools();
33
+ this.mcpToolNames = new Set(mcpTools.map((tool) => tool.name));
34
+ }
35
+ if (this.mcpToolNames.has(functionInfo.name)) return await this.executeMcpTool(functionInfo.name, functionInfo.arguments);
36
+ }
37
+ if (!functionInfo || !callbacks || !callbacks[functionInfo.name]) return {
38
+ output: typeof call === "string" ? call : JSON.stringify(call),
39
+ isError: false
40
+ };
41
+ try {
42
+ return {
43
+ output: await this.executeCallback(functionInfo.name, functionInfo.arguments || "{}", callbacks, context),
44
+ isError: false
45
+ };
46
+ } catch (error) {
47
+ logger.debug(`Function callback failed for ${functionInfo.name}: ${error}`);
48
+ return {
49
+ output: typeof call === "string" ? call : JSON.stringify(call),
50
+ isError: true
51
+ };
52
+ }
53
+ }
54
+ /**
55
+ * Processes multiple function calls
56
+ * @param calls Array of calls or a single call
57
+ * @param callbacks Configuration mapping function names to callbacks
58
+ * @param context Optional context to pass to callbacks
59
+ * @param options Processing options
60
+ * @returns Processed output in appropriate format
61
+ */
62
+ async processCalls(calls, callbacks, context, _options) {
63
+ if (!calls) return calls;
64
+ const isArray = Array.isArray(calls);
65
+ const callsArray = isArray ? calls : [calls];
66
+ const results = await Promise.all(callsArray.map((call) => this.processCall(call, callbacks, context)));
67
+ if (results.some((r, index) => !r.isError && r.output !== JSON.stringify(callsArray[index]))) {
68
+ const outputs = results.map((r) => r.output);
69
+ if (!isArray && outputs.length === 1) return outputs[0];
70
+ return outputs.every((o) => typeof o === "string") ? outputs.join("\n") : outputs;
71
+ }
72
+ if (!isArray && results.length === 1) return results[0].output;
73
+ return calls;
74
+ }
75
+ /**
76
+ * Extracts function name and arguments from various call formats
77
+ */
78
+ extractFunctionInfo(call) {
79
+ if (!call || typeof call !== "object") return null;
80
+ if (call.name && typeof call.name === "string") return {
81
+ name: call.name,
82
+ arguments: call.arguments
83
+ };
84
+ if (call.type === "function" && call.function?.name) return {
85
+ name: call.function.name,
86
+ arguments: call.function.arguments
87
+ };
88
+ return null;
89
+ }
90
+ /**
91
+ * Executes a function callback
92
+ */
93
+ async executeCallback(functionName, args, callbacks, context) {
94
+ let callback = this.loadedCallbacks[functionName];
95
+ if (!callback) {
96
+ const callbackConfig = callbacks[functionName];
97
+ if (typeof callbackConfig === "string") if (callbackConfig.startsWith("file://")) callback = await this.loadExternalFunction(callbackConfig);
98
+ else callback = new Function("return " + callbackConfig)();
99
+ else if (typeof callbackConfig === "function") callback = callbackConfig;
100
+ else throw new Error(`Invalid callback configuration for ${functionName}`);
101
+ this.loadedCallbacks[functionName] = callback;
102
+ }
103
+ const result = await callback(args, context);
104
+ return typeof result === "string" ? result : JSON.stringify(result);
105
+ }
106
+ /**
107
+ * Loads a function from an external file
108
+ */
109
+ async loadExternalFunction(fileRef) {
110
+ let filePath = fileRef.slice(7);
111
+ let functionName;
112
+ if (filePath.includes(":")) {
113
+ const splits = filePath.split(":");
114
+ if (splits[0] && isJavascriptFile(splits[0])) [filePath, functionName] = splits;
115
+ }
116
+ try {
117
+ const resolvedPath = path.resolve(state.basePath || "", filePath);
118
+ logger.debug(`Loading function from ${resolvedPath}${functionName ? `:${functionName}` : ""}`);
119
+ const mod = await importModule(resolvedPath);
120
+ const func = functionName && mod[functionName] ? mod[functionName] : mod.default || mod;
121
+ if (typeof func !== "function") throw new Error(`Expected ${resolvedPath}${functionName ? `:${functionName}` : ""} to export a function, got ${typeof func}`);
122
+ return func;
123
+ } catch (error) {
124
+ throw new Error(`Failed to load function from ${fileRef}: ${error}`);
125
+ }
126
+ }
127
+ /**
128
+ * Executes an MCP tool
129
+ */
130
+ async executeMcpTool(toolName, args) {
131
+ try {
132
+ if (!this.mcpClient) throw new Error("MCP client not available");
133
+ const parsedArgs = args == null || args === "" ? {} : typeof args === "string" ? JSON.parse(args) : args;
134
+ const result = await this.mcpClient.callTool(toolName, parsedArgs);
135
+ if (result?.error) return {
136
+ output: `MCP Tool Error (${toolName}): ${result.error}`,
137
+ isError: true
138
+ };
139
+ const normalizeContent = (content) => {
140
+ if (content == null) return "";
141
+ if (typeof content === "string") return content;
142
+ if (Array.isArray(content)) return content.map((part) => {
143
+ if (typeof part === "string") return part;
144
+ if (part && typeof part === "object") {
145
+ if ("text" in part && part.text != null) return String(part.text);
146
+ if ("json" in part) return JSON.stringify(part.json);
147
+ if ("data" in part) return JSON.stringify(part.data);
148
+ return JSON.stringify(part);
149
+ }
150
+ return String(part);
151
+ }).join("\n");
152
+ return JSON.stringify(content);
153
+ };
154
+ return {
155
+ output: `MCP Tool Result (${toolName}): ${normalizeContent(result?.content)}`,
156
+ isError: false
157
+ };
158
+ } catch (error) {
159
+ const errorMessage = error instanceof Error ? error.message : String(error);
160
+ logger.debug(`MCP tool execution failed for ${toolName}: ${errorMessage}`);
161
+ return {
162
+ output: `MCP Tool Error (${toolName}): ${errorMessage}`,
163
+ isError: true
164
+ };
165
+ }
166
+ }
167
+ /**
168
+ * Sets the MCP client, preserving any loaded callbacks
169
+ */
170
+ setMcpClient(client) {
171
+ this.mcpClient = client;
172
+ this.mcpToolNames = null;
173
+ }
174
+ /**
175
+ * Clears the cached callbacks
176
+ */
177
+ clearCache() {
178
+ this.loadedCallbacks = {};
179
+ }
180
+ };
181
+ //#endregion
182
+ //#region src/providers/responses/processor.ts
183
+ /**
184
+ * Extract user-facing metadata from response data.
185
+ * Only includes fields that are useful for users viewing eval results.
186
+ */
187
+ function extractMetadata(data, processedOutput) {
188
+ const metadata = {};
189
+ if (typeof data.id === "string" && data.id) metadata.responseId = data.id;
190
+ if (typeof data.model === "string" && data.model) metadata.model = data.model;
191
+ if (Array.isArray(processedOutput.annotations) && processedOutput.annotations.length > 0) metadata.annotations = processedOutput.annotations;
192
+ return metadata;
193
+ }
194
+ /**
195
+ * Extract token usage from response data, handling both OpenAI Chat Completions format
196
+ * (prompt_tokens, completion_tokens) and Azure Responses format (input_tokens, output_tokens)
197
+ */
198
+ function getTokenUsage(data, cached) {
199
+ if (data.usage) if (cached) {
200
+ const totalTokens = data.usage.total_tokens || (data.usage.input_tokens || 0) + (data.usage.output_tokens || 0);
201
+ return {
202
+ cached: totalTokens,
203
+ total: totalTokens,
204
+ numRequests: 1
205
+ };
206
+ } else {
207
+ const promptTokens = data.usage.prompt_tokens || data.usage.input_tokens || 0;
208
+ const completionTokens = data.usage.completion_tokens || data.usage.output_tokens || 0;
209
+ return {
210
+ total: data.usage.total_tokens || promptTokens + completionTokens,
211
+ prompt: promptTokens,
212
+ completion: completionTokens,
213
+ numRequests: 1,
214
+ ...data.usage.completion_tokens_details ? { completionDetails: {
215
+ reasoning: data.usage.completion_tokens_details.reasoning_tokens,
216
+ acceptedPrediction: data.usage.completion_tokens_details.accepted_prediction_tokens,
217
+ rejectedPrediction: data.usage.completion_tokens_details.rejected_prediction_tokens
218
+ } } : {}
219
+ };
220
+ }
221
+ return {};
222
+ }
223
+ /**
224
+ * Shared response processor for OpenAI and Azure Responses APIs.
225
+ * Handles all response types with identical logic to ensure feature parity.
226
+ */
227
+ var ResponsesProcessor = class {
228
+ constructor(config) {
229
+ this.config = config;
230
+ }
231
+ async processResponseOutput(data, requestConfig, cached) {
232
+ logger.debug(`Processing ${this.config.providerType} responses output`, {
233
+ responseId: data.id,
234
+ model: data.model
235
+ });
236
+ if (data.error) return { error: formatOpenAiError(data) };
237
+ try {
238
+ const context = {
239
+ config: requestConfig,
240
+ cached,
241
+ data
242
+ };
243
+ const processedOutput = await this.processOutput(data.output, context);
244
+ if (processedOutput.isRefusal) return {
245
+ output: processedOutput.refusal,
246
+ tokenUsage: getTokenUsage(data, cached),
247
+ isRefusal: true,
248
+ cached,
249
+ cost: this.config.costCalculator(this.config.modelName, data.usage, requestConfig),
250
+ raw: data,
251
+ metadata: extractMetadata(data, processedOutput)
252
+ };
253
+ let finalOutput = processedOutput.result;
254
+ if (requestConfig.response_format?.type === "json_schema" && typeof finalOutput === "string") try {
255
+ finalOutput = JSON.parse(finalOutput);
256
+ } catch (error) {
257
+ logger.error(`Failed to parse JSON output: ${error}`);
258
+ }
259
+ const result = {
260
+ output: finalOutput,
261
+ tokenUsage: getTokenUsage(data, cached),
262
+ cached,
263
+ cost: this.config.costCalculator(this.config.modelName, data.usage, requestConfig),
264
+ raw: data,
265
+ metadata: extractMetadata(data, processedOutput)
266
+ };
267
+ if (processedOutput.annotations && processedOutput.annotations.length > 0) result.raw = {
268
+ ...data,
269
+ annotations: processedOutput.annotations
270
+ };
271
+ return result;
272
+ } catch (err) {
273
+ return { error: `Error parsing response: ${String(err)}\nResponse: ${JSON.stringify(data)}` };
274
+ }
275
+ }
276
+ async processOutput(output, context) {
277
+ if (this.config.modelName.includes("deep-research")) logger.debug(`Deep research response structure: ${JSON.stringify(context.data, null, 2)}`);
278
+ if (!output || !Array.isArray(output) || output.length === 0) throw new Error("Invalid response format: Missing output array");
279
+ let result = "";
280
+ let refusal = "";
281
+ let isRefusal = false;
282
+ const annotations = [];
283
+ for (const item of output) {
284
+ if (!item || typeof item !== "object") {
285
+ logger.warn(`Skipping invalid output item: ${JSON.stringify(item)}`);
286
+ continue;
287
+ }
288
+ const processed = await this.processOutputItem(item, context);
289
+ if (processed.isRefusal) {
290
+ refusal = processed.content || "";
291
+ isRefusal = true;
292
+ } else if (processed.content) if (result) result += "\n" + processed.content;
293
+ else result = processed.content;
294
+ if (processed.annotations) annotations.push(...processed.annotations);
295
+ }
296
+ return {
297
+ result,
298
+ refusal,
299
+ isRefusal,
300
+ annotations: annotations.length > 0 ? annotations : void 0
301
+ };
302
+ }
303
+ async processOutputItem(item, context) {
304
+ switch (item.type) {
305
+ case "function_call": return await this.processFunctionCall(item, context);
306
+ case "message": return await this.processMessage(item, context);
307
+ case "tool_result": return this.processToolResult(item);
308
+ case "reasoning": return this.processReasoning(item);
309
+ case "web_search_call": return this.processWebSearch(item);
310
+ case "code_interpreter_call": return this.processCodeInterpreter(item);
311
+ case "mcp_list_tools": return this.processMcpListTools(item);
312
+ case "mcp_call": return this.processMcpCall(item);
313
+ case "mcp_approval_request": return this.processMcpApprovalRequest(item);
314
+ default:
315
+ logger.debug(`Unknown output item type: ${item.type}`);
316
+ return {};
317
+ }
318
+ }
319
+ async processFunctionCall(item, context) {
320
+ let functionResult;
321
+ if (item.arguments === "{}" && item.status === "completed") functionResult = JSON.stringify({
322
+ type: "function_call",
323
+ name: item.name,
324
+ status: "no_arguments_provided",
325
+ note: "Function called but no arguments were extracted. Consider using the correct Responses API tool format."
326
+ });
327
+ else functionResult = await this.config.functionCallbackHandler.processCalls(item, context.config.functionToolCallbacks);
328
+ return { content: functionResult };
329
+ }
330
+ async processMessage(item, context) {
331
+ if (item.role !== "assistant") return {};
332
+ let content = "";
333
+ let isRefusal = false;
334
+ let refusal = "";
335
+ const annotations = [];
336
+ if (item.content) for (const contentItem of item.content) {
337
+ if (!contentItem || typeof contentItem !== "object") {
338
+ logger.warn(`Skipping invalid content item: ${JSON.stringify(contentItem)}`);
339
+ continue;
340
+ }
341
+ if (contentItem.type === "output_text") {
342
+ content += contentItem.text;
343
+ if (Array.isArray(contentItem.annotations) && contentItem.annotations.length > 0) annotations.push(...contentItem.annotations);
344
+ } else if (contentItem.type === "tool_use" || contentItem.type === "function_call") content = await this.config.functionCallbackHandler.processCalls(contentItem, context.config.functionToolCallbacks);
345
+ else if (contentItem.type === "refusal") {
346
+ refusal = contentItem.refusal;
347
+ isRefusal = true;
348
+ }
349
+ }
350
+ else if (item.refusal) {
351
+ refusal = item.refusal;
352
+ isRefusal = true;
353
+ }
354
+ return {
355
+ content: isRefusal ? refusal : content,
356
+ isRefusal,
357
+ annotations: annotations.length > 0 ? annotations : void 0
358
+ };
359
+ }
360
+ processToolResult(item) {
361
+ return Promise.resolve({ content: JSON.stringify(item) });
362
+ }
363
+ processReasoning(item) {
364
+ if (!item.summary || !item.summary.length) return Promise.resolve({});
365
+ const reasoningText = `Reasoning: ${item.summary.map((s) => s.text).join("\n")}`;
366
+ return Promise.resolve({ content: reasoningText });
367
+ }
368
+ processWebSearch(item) {
369
+ let content = "";
370
+ const action = item.action;
371
+ if (action) if (action.type === "search") content = `Web Search: "${action.query}"`;
372
+ else if (action.type === "open_page") content = `Opening page: ${action.url}`;
373
+ else if (action.type === "find_in_page") content = `Finding in page: "${action.query}"`;
374
+ else content = `Web action: ${action.type}`;
375
+ else content = `Web Search Call (status: ${item.status || "unknown"})`;
376
+ if (item.status === "failed" && item.error) content += ` (Error: ${item.error})`;
377
+ return Promise.resolve({ content });
378
+ }
379
+ processCodeInterpreter(item) {
380
+ let content = `Code Interpreter: ${item.code || "Running code..."}`;
381
+ if (item.status === "failed" && item.error) content += ` (Error: ${item.error})`;
382
+ return Promise.resolve({ content });
383
+ }
384
+ processMcpListTools(item) {
385
+ const content = `MCP Tools from ${item.server_label}: ${JSON.stringify(item.tools, null, 2)}`;
386
+ return Promise.resolve({ content });
387
+ }
388
+ processMcpCall(item) {
389
+ let content;
390
+ if (item.error) content = `MCP Tool Error (${item.name}): ${item.error}`;
391
+ else content = `MCP Tool Result (${item.name}): ${item.output}`;
392
+ return Promise.resolve({ content });
393
+ }
394
+ processMcpApprovalRequest(item) {
395
+ const content = `MCP Approval Required for ${item.server_label}.${item.name}: ${item.arguments}`;
396
+ return Promise.resolve({ content });
397
+ }
398
+ };
399
+ //#endregion
400
+ //#region src/providers/openai/responses.ts
401
+ var OpenAiResponsesProvider = class extends OpenAiGenericProvider {
402
+ functionCallbackHandler = new FunctionCallbackHandler();
403
+ processor;
404
+ static OPENAI_RESPONSES_MODEL_NAMES = [
405
+ "gpt-4o",
406
+ "gpt-4o-2024-08-06",
407
+ "gpt-4o-2024-11-20",
408
+ "gpt-4o-2024-05-13",
409
+ "gpt-4o-2024-07-18",
410
+ "gpt-4o-mini",
411
+ "gpt-4o-mini-2024-07-18",
412
+ "gpt-4.1",
413
+ "gpt-4.1-2025-04-14",
414
+ "gpt-4.1-mini",
415
+ "gpt-4.1-mini-2025-04-14",
416
+ "gpt-4.1-nano",
417
+ "gpt-4.1-nano-2025-04-14",
418
+ "gpt-5",
419
+ "gpt-5-2025-08-07",
420
+ "gpt-5-chat",
421
+ "gpt-5-chat-latest",
422
+ "gpt-5-nano",
423
+ "gpt-5-nano-2025-08-07",
424
+ "gpt-5-mini",
425
+ "gpt-5-mini-2025-08-07",
426
+ "gpt-5-pro",
427
+ "gpt-5-pro-2025-10-06",
428
+ "gpt-5.1",
429
+ "gpt-5.1-2025-11-13",
430
+ "gpt-5.1-mini",
431
+ "gpt-5.1-nano",
432
+ "gpt-5.1-codex",
433
+ "gpt-5.1-codex-max",
434
+ "gpt-5.1-chat-latest",
435
+ "gpt-5.2",
436
+ "gpt-5.2-2025-12-11",
437
+ "gpt-5.2-chat-latest",
438
+ "gpt-5.2-codex",
439
+ "gpt-5.2-pro",
440
+ "gpt-5.2-pro-2025-12-11",
441
+ "gpt-5.3-chat-latest",
442
+ "gpt-5.3-codex",
443
+ "gpt-5.3-codex-spark",
444
+ "gpt-5.4",
445
+ "gpt-5.4-2026-03-05",
446
+ "gpt-5.4-mini",
447
+ "gpt-5.4-mini-2026-03-17",
448
+ "gpt-5.4-nano",
449
+ "gpt-5.4-nano-2026-03-17",
450
+ "gpt-5.4-pro",
451
+ "gpt-5.4-pro-2026-03-05",
452
+ "gpt-audio",
453
+ "gpt-audio-2025-08-28",
454
+ "gpt-audio-mini",
455
+ "gpt-audio-mini-2025-10-06",
456
+ "computer-use-preview",
457
+ "computer-use-preview-2025-03-11",
458
+ "o1",
459
+ "o1-2024-12-17",
460
+ "o1-preview",
461
+ "o1-preview-2024-09-12",
462
+ "o1-mini",
463
+ "o1-mini-2024-09-12",
464
+ "o1-pro",
465
+ "o1-pro-2025-03-19",
466
+ "o3-pro",
467
+ "o3-pro-2025-06-10",
468
+ "o3",
469
+ "o3-2025-04-16",
470
+ "o4-mini",
471
+ "o4-mini-2025-04-16",
472
+ "o3-mini",
473
+ "o3-mini-2025-01-31",
474
+ "codex-mini-latest",
475
+ "gpt-5-codex",
476
+ "o3-deep-research",
477
+ "o3-deep-research-2025-06-26",
478
+ "o4-mini-deep-research",
479
+ "o4-mini-deep-research-2025-06-26"
480
+ ];
481
+ config;
482
+ constructor(modelName, options = {}) {
483
+ super(modelName, options);
484
+ this.config = options.config || {};
485
+ this.processor = new ResponsesProcessor({
486
+ modelName: this.modelName,
487
+ providerType: "openai",
488
+ functionCallbackHandler: this.functionCallbackHandler,
489
+ costCalculator: (modelName, usage, config) => calculateOpenAICost(modelName, config, usage?.input_tokens, usage?.output_tokens, 0, 0) ?? 0
490
+ });
491
+ }
492
+ isGPT5Model() {
493
+ return this.modelName.startsWith("gpt-5") || this.modelName.includes("/gpt-5");
494
+ }
495
+ isReasoningModel() {
496
+ return this.modelName.startsWith("o1") || this.modelName.startsWith("o3") || this.modelName.startsWith("o4") || this.modelName.includes("/o1") || this.modelName.includes("/o3") || this.modelName.includes("/o4") || this.modelName === "codex-mini-latest" || this.isGPT5Model();
497
+ }
498
+ supportsTemperature() {
499
+ return !this.isReasoningModel();
500
+ }
501
+ async getOpenAiBody(prompt, context, _callApiOptions) {
502
+ const config = {
503
+ ...this.config,
504
+ ...context?.prompt?.config
505
+ };
506
+ let input;
507
+ try {
508
+ const parsedJson = JSON.parse(prompt);
509
+ if (Array.isArray(parsedJson)) input = parsedJson;
510
+ else input = prompt;
511
+ } catch {
512
+ input = prompt;
513
+ }
514
+ const isReasoningModel = this.isReasoningModel();
515
+ const maxOutputTokensDefault = config.omitDefaults ? getEnvString("OPENAI_MAX_TOKENS") === void 0 ? void 0 : getEnvInt("OPENAI_MAX_TOKENS") : getEnvInt("OPENAI_MAX_TOKENS", 1024);
516
+ const reasoningMaxOutputTokensDefault = getEnvInt("OPENAI_MAX_COMPLETION_TOKENS") ?? getEnvInt("OPENAI_MAX_TOKENS");
517
+ const maxOutputTokens = config.max_output_tokens ?? (isReasoningModel ? reasoningMaxOutputTokensDefault : maxOutputTokensDefault);
518
+ const temperatureDefault = config.omitDefaults ? getEnvString("OPENAI_TEMPERATURE") === void 0 ? void 0 : getEnvFloat("OPENAI_TEMPERATURE") : getEnvFloat("OPENAI_TEMPERATURE", 0);
519
+ const temperature = this.supportsTemperature() ? config.temperature ?? temperatureDefault : void 0;
520
+ const reasoningEffort = isReasoningModel ? renderVarsInObject(config.reasoning_effort, context?.vars) : void 0;
521
+ const instructions = config.instructions;
522
+ const responseFormat = maybeLoadResponseFormatFromExternalFile(config.response_format, context?.vars);
523
+ let textFormat;
524
+ if (responseFormat) if (responseFormat.type === "json_object") textFormat = { format: { type: "json_object" } };
525
+ else if (responseFormat.type === "json_schema") {
526
+ const schema = responseFormat.schema || responseFormat.json_schema?.schema;
527
+ textFormat = { format: {
528
+ type: "json_schema",
529
+ name: responseFormat.json_schema?.name || responseFormat.name || "response_schema",
530
+ schema,
531
+ strict: true
532
+ } };
533
+ } else textFormat = { format: { type: "text" } };
534
+ else textFormat = { format: { type: "text" } };
535
+ if (this.isGPT5Model() && config.verbosity) textFormat = {
536
+ ...textFormat,
537
+ verbosity: config.verbosity
538
+ };
539
+ const loadedTools = config.tools ? await maybeLoadToolsFromExternalFile(config.tools, context?.vars) : void 0;
540
+ const body = {
541
+ model: this.modelName,
542
+ input,
543
+ ...maxOutputTokens === void 0 ? {} : { max_output_tokens: maxOutputTokens },
544
+ ...reasoningEffort ? { reasoning: { effort: reasoningEffort } } : {},
545
+ ...temperature === void 0 ? {} : { temperature },
546
+ ...instructions ? { instructions } : {},
547
+ ...(!reasoningEffort || reasoningEffort === "none") && (config.top_p !== void 0 || getEnvString("OPENAI_TOP_P")) ? { top_p: config.top_p ?? getEnvFloat("OPENAI_TOP_P", 1) } : {},
548
+ ...loadedTools ? { tools: loadedTools } : {},
549
+ ...config.tool_choice ? { tool_choice: config.tool_choice } : {},
550
+ ...config.max_tool_calls ? { max_tool_calls: config.max_tool_calls } : {},
551
+ ...config.previous_response_id ? { previous_response_id: config.previous_response_id } : {},
552
+ text: textFormat,
553
+ ...config.truncation ? { truncation: config.truncation } : {},
554
+ ...config.metadata ? { metadata: config.metadata } : {},
555
+ ..."parallel_tool_calls" in config ? { parallel_tool_calls: Boolean(config.parallel_tool_calls) } : {},
556
+ ...config.stream ? { stream: config.stream } : {},
557
+ ..."store" in config ? { store: Boolean(config.store) } : {},
558
+ ...config.background ? { background: config.background } : {},
559
+ ...config.webhook_url ? { webhook_url: config.webhook_url } : {},
560
+ ...config.user ? { user: config.user } : {},
561
+ ...config.passthrough || {}
562
+ };
563
+ if (config.reasoning && this.isReasoningModel()) body.reasoning = config.reasoning;
564
+ return {
565
+ body,
566
+ config: {
567
+ ...config,
568
+ tools: loadedTools,
569
+ response_format: responseFormat
570
+ }
571
+ };
572
+ }
573
+ async callApi(prompt, context, callApiOptions) {
574
+ if (this.requiresApiKey() && !this.getApiKey()) throw new Error(this.getMissingApiKeyErrorMessage());
575
+ const { body, config } = await this.getOpenAiBody(prompt, context, callApiOptions);
576
+ const isDeepResearchModel = this.modelName.includes("deep-research");
577
+ if (isDeepResearchModel) {
578
+ if (!config.tools?.some((tool) => tool.type === "web_search_preview")) return { error: `Deep research model ${this.modelName} requires the web_search_preview tool to be configured. Add it to your provider config:\ntools:\n - type: web_search_preview` };
579
+ const mcpTools = config.tools?.filter((tool) => tool.type === "mcp") || [];
580
+ for (const mcpTool of mcpTools) if (mcpTool.require_approval !== "never") return { error: `Deep research model ${this.modelName} requires MCP tools to have require_approval: 'never'. Update your MCP tool configuration:\ntools:\n - type: mcp\n require_approval: never` };
581
+ }
582
+ let timeout = REQUEST_TIMEOUT_MS;
583
+ const isGpt5ProModel = /(^|\/)gpt-5(?:\.\d+)?-pro(?:-|$)/.test(this.modelName);
584
+ if (isDeepResearchModel || isGpt5ProModel) {
585
+ const evalTimeout = getEnvInt("PROMPTFOO_EVAL_TIMEOUT_MS", 0);
586
+ timeout = evalTimeout > 0 ? evalTimeout : LONG_RUNNING_MODEL_TIMEOUT_MS;
587
+ logger.debug(`Using timeout of ${timeout}ms for long-running model ${this.modelName}`);
588
+ }
589
+ let data;
590
+ let status;
591
+ let statusText;
592
+ let cached = false;
593
+ let deleteFromCache;
594
+ let responseHeaders;
595
+ try {
596
+ ({data, cached, status, statusText, deleteFromCache, headers: responseHeaders} = await fetchWithCache(`${this.getApiUrl()}/responses`, {
597
+ method: "POST",
598
+ headers: {
599
+ "Content-Type": "application/json",
600
+ ...this.getApiKey() ? { Authorization: `Bearer ${this.getApiKey()}` } : {},
601
+ ...this.getOrganization() ? { "OpenAI-Organization": this.getOrganization() } : {},
602
+ ...config.headers
603
+ },
604
+ body: JSON.stringify(body)
605
+ }, timeout, "json", context?.bustCache ?? context?.debug, this.config.maxRetries));
606
+ if (status < 200 || status >= 300) {
607
+ const errorMessage = `API error: ${status} ${statusText}\n${typeof data === "string" ? data : JSON.stringify(data)}`;
608
+ if (typeof data === "object" && data?.error?.code === "invalid_prompt") return {
609
+ output: errorMessage,
610
+ tokenUsage: data?.usage ? getTokenUsage$1(data, cached) : void 0,
611
+ isRefusal: true,
612
+ metadata: { http: {
613
+ status,
614
+ statusText,
615
+ headers: responseHeaders ?? {}
616
+ } }
617
+ };
618
+ return {
619
+ error: errorMessage,
620
+ metadata: { http: {
621
+ status,
622
+ statusText,
623
+ headers: responseHeaders ?? {}
624
+ } }
625
+ };
626
+ }
627
+ } catch (err) {
628
+ logger.error(`API call error: ${String(err)}`);
629
+ await deleteFromCache?.();
630
+ return {
631
+ error: `API call error: ${String(err)}`,
632
+ metadata: { http: {
633
+ status: 0,
634
+ statusText: "Error",
635
+ headers: responseHeaders ?? {}
636
+ } }
637
+ };
638
+ }
639
+ if (data.error?.message) {
640
+ await deleteFromCache?.();
641
+ return {
642
+ error: formatOpenAiError(data),
643
+ metadata: { http: {
644
+ status,
645
+ statusText,
646
+ headers: responseHeaders ?? {}
647
+ } }
648
+ };
649
+ }
650
+ const result = await this.processor.processResponseOutput(data, config, cached);
651
+ return {
652
+ ...result,
653
+ metadata: {
654
+ ...result.metadata,
655
+ http: {
656
+ status,
657
+ statusText,
658
+ headers: responseHeaders ?? {}
659
+ }
660
+ }
661
+ };
662
+ }
663
+ };
664
+ //#endregion
665
+ export { ResponsesProcessor as n, FunctionCallbackHandler as r, OpenAiResponsesProvider as t };
666
+
667
+ //# sourceMappingURL=responses-B2LCDCXZ.js.map