@copilotkit/aimock 1.7.0 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (134) hide show
  1. package/.claude-plugin/marketplace.json +1 -1
  2. package/.claude-plugin/plugin.json +1 -1
  3. package/README.md +3 -1
  4. package/dist/bedrock-converse.cjs +13 -8
  5. package/dist/bedrock-converse.cjs.map +1 -1
  6. package/dist/bedrock-converse.d.cts.map +1 -1
  7. package/dist/bedrock-converse.d.ts.map +1 -1
  8. package/dist/bedrock-converse.js +14 -9
  9. package/dist/bedrock-converse.js.map +1 -1
  10. package/dist/bedrock.cjs +51 -15
  11. package/dist/bedrock.cjs.map +1 -1
  12. package/dist/bedrock.d.cts.map +1 -1
  13. package/dist/bedrock.d.ts.map +1 -1
  14. package/dist/bedrock.js +52 -16
  15. package/dist/bedrock.js.map +1 -1
  16. package/dist/cohere.cjs +3 -2
  17. package/dist/cohere.cjs.map +1 -1
  18. package/dist/cohere.d.cts.map +1 -1
  19. package/dist/cohere.d.ts.map +1 -1
  20. package/dist/cohere.js +4 -3
  21. package/dist/cohere.js.map +1 -1
  22. package/dist/embeddings.cjs +3 -2
  23. package/dist/embeddings.cjs.map +1 -1
  24. package/dist/embeddings.d.cts.map +1 -1
  25. package/dist/embeddings.d.ts.map +1 -1
  26. package/dist/embeddings.js +4 -3
  27. package/dist/embeddings.js.map +1 -1
  28. package/dist/gemini.cjs +129 -35
  29. package/dist/gemini.cjs.map +1 -1
  30. package/dist/gemini.d.cts.map +1 -1
  31. package/dist/gemini.d.ts.map +1 -1
  32. package/dist/gemini.js +130 -36
  33. package/dist/gemini.js.map +1 -1
  34. package/dist/helpers.cjs +154 -3
  35. package/dist/helpers.cjs.map +1 -1
  36. package/dist/helpers.d.cts +1 -1
  37. package/dist/helpers.d.cts.map +1 -1
  38. package/dist/helpers.d.ts +1 -1
  39. package/dist/helpers.d.ts.map +1 -1
  40. package/dist/helpers.js +151 -4
  41. package/dist/helpers.js.map +1 -1
  42. package/dist/index.cjs +1 -0
  43. package/dist/index.d.cts +2 -2
  44. package/dist/index.d.ts +2 -2
  45. package/dist/index.js +2 -2
  46. package/dist/journal.cjs +26 -9
  47. package/dist/journal.cjs.map +1 -1
  48. package/dist/journal.d.cts +10 -5
  49. package/dist/journal.d.cts.map +1 -1
  50. package/dist/journal.d.ts +10 -5
  51. package/dist/journal.d.ts.map +1 -1
  52. package/dist/journal.js +26 -10
  53. package/dist/journal.js.map +1 -1
  54. package/dist/llmock.cjs +2 -2
  55. package/dist/llmock.cjs.map +1 -1
  56. package/dist/llmock.d.cts +1 -1
  57. package/dist/llmock.d.ts +1 -1
  58. package/dist/llmock.js +2 -2
  59. package/dist/llmock.js.map +1 -1
  60. package/dist/messages.cjs +192 -2
  61. package/dist/messages.cjs.map +1 -1
  62. package/dist/messages.d.cts.map +1 -1
  63. package/dist/messages.d.ts.map +1 -1
  64. package/dist/messages.js +193 -3
  65. package/dist/messages.js.map +1 -1
  66. package/dist/ollama.cjs +39 -13
  67. package/dist/ollama.cjs.map +1 -1
  68. package/dist/ollama.d.cts.map +1 -1
  69. package/dist/ollama.d.ts.map +1 -1
  70. package/dist/ollama.js +40 -14
  71. package/dist/ollama.js.map +1 -1
  72. package/dist/recorder.cjs +1 -1
  73. package/dist/recorder.cjs.map +1 -1
  74. package/dist/recorder.d.cts +1 -0
  75. package/dist/recorder.d.cts.map +1 -1
  76. package/dist/recorder.d.ts +1 -0
  77. package/dist/recorder.d.ts.map +1 -1
  78. package/dist/recorder.js +1 -1
  79. package/dist/recorder.js.map +1 -1
  80. package/dist/responses.cjs +250 -126
  81. package/dist/responses.cjs.map +1 -1
  82. package/dist/responses.d.cts.map +1 -1
  83. package/dist/responses.d.ts.map +1 -1
  84. package/dist/responses.js +251 -127
  85. package/dist/responses.js.map +1 -1
  86. package/dist/router.cjs +16 -10
  87. package/dist/router.cjs.map +1 -1
  88. package/dist/router.d.cts +1 -1
  89. package/dist/router.d.cts.map +1 -1
  90. package/dist/router.d.ts +1 -1
  91. package/dist/router.d.ts.map +1 -1
  92. package/dist/router.js +16 -10
  93. package/dist/router.js.map +1 -1
  94. package/dist/server.cjs +47 -7
  95. package/dist/server.cjs.map +1 -1
  96. package/dist/server.d.cts.map +1 -1
  97. package/dist/server.d.ts.map +1 -1
  98. package/dist/server.js +48 -8
  99. package/dist/server.js.map +1 -1
  100. package/dist/stream-collapse.cjs +48 -40
  101. package/dist/stream-collapse.cjs.map +1 -1
  102. package/dist/stream-collapse.d.cts.map +1 -1
  103. package/dist/stream-collapse.d.ts.map +1 -1
  104. package/dist/stream-collapse.js +48 -40
  105. package/dist/stream-collapse.js.map +1 -1
  106. package/dist/types.d.cts +22 -1
  107. package/dist/types.d.cts.map +1 -1
  108. package/dist/types.d.ts +22 -1
  109. package/dist/types.d.ts.map +1 -1
  110. package/dist/ws-gemini-live.cjs +4 -2
  111. package/dist/ws-gemini-live.cjs.map +1 -1
  112. package/dist/ws-gemini-live.d.cts +3 -1
  113. package/dist/ws-gemini-live.d.cts.map +1 -1
  114. package/dist/ws-gemini-live.d.ts +3 -1
  115. package/dist/ws-gemini-live.d.ts.map +1 -1
  116. package/dist/ws-gemini-live.js +4 -2
  117. package/dist/ws-gemini-live.js.map +1 -1
  118. package/dist/ws-realtime.cjs +4 -2
  119. package/dist/ws-realtime.cjs.map +1 -1
  120. package/dist/ws-realtime.d.cts +3 -1
  121. package/dist/ws-realtime.d.cts.map +1 -1
  122. package/dist/ws-realtime.d.ts +3 -1
  123. package/dist/ws-realtime.d.ts.map +1 -1
  124. package/dist/ws-realtime.js +4 -2
  125. package/dist/ws-realtime.js.map +1 -1
  126. package/dist/ws-responses.cjs +4 -2
  127. package/dist/ws-responses.cjs.map +1 -1
  128. package/dist/ws-responses.d.cts +3 -1
  129. package/dist/ws-responses.d.cts.map +1 -1
  130. package/dist/ws-responses.d.ts +3 -1
  131. package/dist/ws-responses.d.ts.map +1 -1
  132. package/dist/ws-responses.js +4 -2
  133. package/dist/ws-responses.js.map +1 -1
  134. package/package.json +5 -1
package/dist/recorder.js CHANGED
@@ -103,7 +103,7 @@ async function proxyAndRecord(req, res, request, providerKey, pathname, fixtures
103
103
  } catch {}
104
104
  fixtureResponse = buildFixtureResponse(parsedResponse, upstreamStatus, encodingFormat);
105
105
  }
106
- const fixtureMatch = buildFixtureMatch(request);
106
+ const fixtureMatch = buildFixtureMatch(defaults.requestTransform ? defaults.requestTransform(request) : request);
107
107
  const fixture = {
108
108
  match: fixtureMatch,
109
109
  response: fixtureResponse
@@ -1 +1 @@
1
- {"version":3,"file":"recorder.js","names":[],"sources":["../src/recorder.ts"],"sourcesContent":["import * as http from \"node:http\";\nimport * as https from \"node:https\";\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport * as crypto from \"node:crypto\";\nimport type {\n ChatCompletionRequest,\n Fixture,\n FixtureResponse,\n RecordConfig,\n RecordProviderKey,\n ToolCall,\n} from \"./types.js\";\nimport { getLastMessageByRole, getTextContent } from \"./router.js\";\nimport type { Logger } from \"./logger.js\";\nimport { collapseStreamingResponse } from \"./stream-collapse.js\";\nimport { writeErrorResponse } from \"./sse-writer.js\";\nimport { resolveUpstreamUrl } from \"./url.js\";\n\n/** Headers to strip when proxying — hop-by-hop (RFC 2616 §13.5.1) + client-set. */\nconst STRIP_HEADERS = new Set([\n // Hop-by-hop (RFC 2616 §13.5.1)\n \"connection\",\n \"keep-alive\",\n \"transfer-encoding\",\n \"te\",\n \"trailer\",\n \"upgrade\",\n \"proxy-authorization\",\n \"proxy-authenticate\",\n // Set by HTTP client from the target URL / body\n \"host\",\n \"content-length\",\n // Not relevant for LLM APIs; avoid leaking or mismatched encoding\n \"cookie\",\n \"accept-encoding\",\n]);\n\n/**\n * Proxy an unmatched request to the real upstream provider, record the\n * response as a fixture on disk and in memory, then relay the response\n * back to the original client.\n *\n * Returns `true` if the request was proxied (provider configured),\n * `false` if no upstream URL is configured for the given provider key.\n */\nexport async function proxyAndRecord(\n req: http.IncomingMessage,\n res: http.ServerResponse,\n request: ChatCompletionRequest,\n providerKey: RecordProviderKey,\n pathname: string,\n fixtures: Fixture[],\n defaults: { record?: RecordConfig; logger: Logger },\n rawBody?: string,\n): Promise<boolean> {\n const record = defaults.record;\n if (!record) return false;\n\n const providers = record.providers;\n const upstreamUrl = providers[providerKey];\n\n if (!upstreamUrl) {\n defaults.logger.warn(`No upstream URL configured for provider \"${providerKey}\" — cannot proxy`);\n return false;\n }\n\n const fixturePath = record.fixturePath ?? \"./fixtures/recorded\";\n let target: URL;\n try {\n target = resolveUpstreamUrl(upstreamUrl, pathname);\n } catch {\n defaults.logger.error(`Invalid upstream URL for provider \"${providerKey}\": ${upstreamUrl}`);\n writeErrorResponse(\n res,\n 502,\n JSON.stringify({\n error: { message: `Invalid upstream URL: ${upstreamUrl}`, type: \"proxy_error\" },\n }),\n );\n return true;\n }\n\n defaults.logger.warn(`NO FIXTURE MATCH — proxying to ${upstreamUrl}${pathname}`);\n\n // Forward all request headers except hop-by-hop and client-set ones.\n const forwardHeaders: Record<string, string> = {};\n for (const [name, val] of Object.entries(req.headers)) {\n if (val !== undefined && !STRIP_HEADERS.has(name)) {\n forwardHeaders[name] = Array.isArray(val) ? val.join(\", \") : val;\n }\n }\n\n const requestBody = rawBody ?? JSON.stringify(request);\n\n // Make upstream request\n let upstreamStatus: number;\n let upstreamHeaders: http.IncomingHttpHeaders;\n let upstreamBody: string;\n let rawBuffer: Buffer;\n\n try {\n const result = await makeUpstreamRequest(target, forwardHeaders, requestBody);\n upstreamStatus = result.status;\n upstreamHeaders = result.headers;\n upstreamBody = result.body;\n rawBuffer = result.rawBuffer;\n } catch (err) {\n const msg = err instanceof Error ? err.message : \"Unknown proxy error\";\n defaults.logger.error(`Proxy request failed: ${msg}`);\n res.writeHead(502, { \"Content-Type\": \"application/json\" });\n res.end(\n JSON.stringify({\n error: { message: `Proxy to upstream failed: ${msg}`, type: \"proxy_error\" },\n }),\n );\n return true;\n }\n\n // Detect streaming response and collapse if necessary\n const contentType = upstreamHeaders[\"content-type\"];\n const ctString = Array.isArray(contentType) ? contentType.join(\", \") : (contentType ?? \"\");\n const isBinaryStream = ctString.toLowerCase().includes(\"application/vnd.amazon.eventstream\");\n const collapsed = collapseStreamingResponse(\n ctString,\n providerKey,\n isBinaryStream ? rawBuffer : upstreamBody,\n defaults.logger,\n );\n\n let fixtureResponse: FixtureResponse;\n\n if (collapsed) {\n // Streaming response — use collapsed result\n defaults.logger.warn(`Streaming response detected (${ctString}) — collapsing to fixture`);\n if (collapsed.truncated) {\n defaults.logger.warn(\"Bedrock EventStream: CRC mismatch — response may be truncated\");\n }\n if (collapsed.droppedChunks && collapsed.droppedChunks > 0) {\n defaults.logger.warn(`${collapsed.droppedChunks} chunk(s) dropped during stream collapse`);\n }\n if (collapsed.content === \"\" && (!collapsed.toolCalls || collapsed.toolCalls.length === 0)) {\n defaults.logger.warn(\"Stream collapse produced empty content — fixture may be incomplete\");\n }\n if (collapsed.toolCalls && collapsed.toolCalls.length > 0) {\n if (collapsed.content) {\n defaults.logger.warn(\n \"Collapsed response has both content and toolCalls — preferring toolCalls\",\n );\n }\n fixtureResponse = { toolCalls: collapsed.toolCalls };\n } else {\n fixtureResponse = { content: collapsed.content ?? \"\" };\n }\n } else {\n // Non-streaming — try to parse as JSON\n let parsedResponse: unknown = null;\n try {\n parsedResponse = JSON.parse(upstreamBody);\n } catch {\n // Not JSON — could be an unknown format\n defaults.logger.warn(\"Upstream response is not valid JSON — saving as error fixture\");\n }\n let encodingFormat: string | undefined;\n try {\n encodingFormat = rawBody ? JSON.parse(rawBody).encoding_format : undefined;\n } catch {\n /* not JSON */\n }\n fixtureResponse = buildFixtureResponse(parsedResponse, upstreamStatus, encodingFormat);\n }\n\n // Build the match criteria from the original request\n const fixtureMatch = buildFixtureMatch(request);\n\n // Build and save the fixture\n const fixture: Fixture = { match: fixtureMatch, response: fixtureResponse };\n\n // Check if the match is empty (all undefined values) — warn but still save to disk\n const matchValues = Object.values(fixtureMatch);\n const isEmptyMatch = matchValues.length === 0 || matchValues.every((v) => v === undefined);\n if (isEmptyMatch) {\n defaults.logger.warn(\n \"Recorded fixture has empty match criteria — skipping in-memory registration\",\n );\n }\n\n const timestamp = new Date().toISOString().replace(/[:.]/g, \"-\");\n const filename = `${providerKey}-${timestamp}-${crypto.randomUUID().slice(0, 8)}.json`;\n const filepath = path.join(fixturePath, filename);\n\n let writtenToDisk = false;\n try {\n // Ensure fixture directory exists\n fs.mkdirSync(fixturePath, { recursive: true });\n\n // Collect warnings for the fixture file\n const warnings: string[] = [];\n if (isEmptyMatch) {\n warnings.push(\"Empty match criteria — this fixture will not match any request\");\n }\n if (collapsed?.truncated) {\n warnings.push(\"Stream response was truncated — fixture may be incomplete\");\n }\n\n // Auth headers are forwarded to upstream but excluded from saved fixtures for security\n const fileContent: Record<string, unknown> = { fixtures: [fixture] };\n if (warnings.length > 0) {\n fileContent._warning = warnings.join(\"; \");\n }\n fs.writeFileSync(filepath, JSON.stringify(fileContent, null, 2), \"utf-8\");\n writtenToDisk = true;\n } catch (err) {\n const msg = err instanceof Error ? err.message : \"Unknown filesystem error\";\n defaults.logger.error(`Failed to save fixture to disk: ${msg}`);\n res.setHeader(\"X-LLMock-Record-Error\", msg);\n }\n\n if (writtenToDisk) {\n // Register in memory so subsequent identical requests match (skip if empty match)\n if (!isEmptyMatch) {\n fixtures.push(fixture);\n }\n defaults.logger.warn(`Response recorded → ${filepath}`);\n } else {\n defaults.logger.warn(`Response relayed but NOT saved to disk — see error above`);\n }\n\n // Relay upstream response to client\n const relayHeaders: Record<string, string> = {};\n if (ctString) {\n relayHeaders[\"Content-Type\"] = ctString;\n }\n res.writeHead(upstreamStatus, relayHeaders);\n res.end(isBinaryStream ? rawBuffer : upstreamBody);\n\n return true;\n}\n\n// ---------------------------------------------------------------------------\n// Internal helpers\n// ---------------------------------------------------------------------------\n\nfunction makeUpstreamRequest(\n target: URL,\n headers: Record<string, string>,\n body: string,\n): Promise<{ status: number; headers: http.IncomingHttpHeaders; body: string; rawBuffer: Buffer }> {\n return new Promise((resolve, reject) => {\n const transport = target.protocol === \"https:\" ? https : http;\n const UPSTREAM_TIMEOUT_MS = 30_000;\n const BODY_TIMEOUT_MS = 30_000;\n const req = transport.request(\n target,\n {\n method: \"POST\",\n timeout: UPSTREAM_TIMEOUT_MS,\n headers: {\n ...headers,\n \"Content-Length\": Buffer.byteLength(body).toString(),\n },\n },\n (res) => {\n res.setTimeout(BODY_TIMEOUT_MS, () => {\n req.destroy(new Error(`Upstream response timed out after ${BODY_TIMEOUT_MS / 1000}s`));\n });\n const chunks: Buffer[] = [];\n res.on(\"data\", (chunk: Buffer) => chunks.push(chunk));\n res.on(\"error\", reject);\n res.on(\"end\", () => {\n const rawBuffer = Buffer.concat(chunks);\n resolve({\n status: res.statusCode ?? 500,\n headers: res.headers,\n body: rawBuffer.toString(),\n rawBuffer,\n });\n });\n },\n );\n req.on(\"timeout\", () => {\n req.destroy(\n new Error(\n `Upstream request timed out after ${UPSTREAM_TIMEOUT_MS / 1000}s: ${target.href}`,\n ),\n );\n });\n req.on(\"error\", reject);\n req.write(body);\n req.end();\n });\n}\n\n/**\n * Detect the response format from the parsed upstream JSON and convert\n * it into an llmock FixtureResponse.\n */\nfunction buildFixtureResponse(\n parsed: unknown,\n status: number,\n encodingFormat?: string,\n): FixtureResponse {\n if (parsed === null || parsed === undefined) {\n // Raw / unparseable response — save as error\n return {\n error: { message: \"Upstream returned non-JSON response\", type: \"proxy_error\" },\n status,\n };\n }\n\n const obj = parsed as Record<string, unknown>;\n\n // Error response\n if (obj.error) {\n const err = obj.error as Record<string, unknown>;\n return {\n error: {\n message: String(err.message ?? \"Unknown error\"),\n type: String(err.type ?? \"api_error\"),\n code: err.code ? String(err.code) : undefined,\n },\n status,\n };\n }\n\n // OpenAI embeddings: { data: [{ embedding: [...] }] }\n if (Array.isArray(obj.data) && obj.data.length > 0) {\n const first = obj.data[0] as Record<string, unknown>;\n if (Array.isArray(first.embedding)) {\n return { embedding: first.embedding as number[] };\n }\n if (typeof first.embedding === \"string\" && encodingFormat === \"base64\") {\n try {\n const buf = Buffer.from(first.embedding, \"base64\");\n const floats = new Float32Array(buf.buffer, buf.byteOffset, buf.byteLength / 4);\n return { embedding: Array.from(floats) };\n } catch {\n // Corrupted base64 or non-float32 data — fall through to error\n }\n }\n }\n\n // Direct embedding: { embedding: [...] }\n if (Array.isArray(obj.embedding)) {\n return { embedding: obj.embedding as number[] };\n }\n\n // OpenAI chat completion: { choices: [{ message: { content, tool_calls } }] }\n if (Array.isArray(obj.choices) && obj.choices.length > 0) {\n const choice = obj.choices[0] as Record<string, unknown>;\n const message = choice.message as Record<string, unknown> | undefined;\n if (message) {\n // Tool calls\n if (Array.isArray(message.tool_calls) && message.tool_calls.length > 0) {\n const toolCalls: ToolCall[] = (message.tool_calls as Array<Record<string, unknown>>).map(\n (tc) => {\n const fn = tc.function as Record<string, unknown>;\n return {\n name: String(fn.name),\n arguments: String(fn.arguments),\n };\n },\n );\n return { toolCalls };\n }\n // Text content\n if (typeof message.content === \"string\") {\n return { content: message.content };\n }\n }\n }\n\n // Anthropic: { content: [{ type: \"text\", text: \"...\" }] } or tool_use\n if (Array.isArray(obj.content) && obj.content.length > 0) {\n const blocks = obj.content as Array<Record<string, unknown>>;\n // Check for tool_use blocks first\n const toolUseBlocks = blocks.filter((b) => b.type === \"tool_use\");\n if (toolUseBlocks.length > 0) {\n const toolCalls: ToolCall[] = toolUseBlocks.map((b) => ({\n name: String(b.name),\n arguments: typeof b.input === \"string\" ? b.input : JSON.stringify(b.input),\n }));\n return { toolCalls };\n }\n // Text blocks\n const textBlock = blocks.find((b) => b.type === \"text\");\n if (textBlock && typeof textBlock.text === \"string\") {\n return { content: textBlock.text };\n }\n }\n\n // Gemini: { candidates: [{ content: { parts: [{ text: \"...\" }] } }] }\n if (Array.isArray(obj.candidates) && obj.candidates.length > 0) {\n const candidate = obj.candidates[0] as Record<string, unknown>;\n const content = candidate.content as Record<string, unknown> | undefined;\n if (content && Array.isArray(content.parts)) {\n const parts = content.parts as Array<Record<string, unknown>>;\n // Tool calls (functionCall)\n const fnCallParts = parts.filter((p) => p.functionCall);\n if (fnCallParts.length > 0) {\n const toolCalls: ToolCall[] = fnCallParts.map((p) => {\n const fc = p.functionCall as Record<string, unknown>;\n return {\n name: String(fc.name),\n arguments: typeof fc.args === \"string\" ? fc.args : JSON.stringify(fc.args),\n };\n });\n return { toolCalls };\n }\n // Text\n const textPart = parts.find((p) => typeof p.text === \"string\");\n if (textPart && typeof textPart.text === \"string\") {\n return { content: textPart.text };\n }\n }\n }\n\n // Bedrock Converse: { output: { message: { role, content: [{ text }, { toolUse }] } } }\n if (obj.output && typeof obj.output === \"object\") {\n const output = obj.output as Record<string, unknown>;\n const msg = output.message as Record<string, unknown> | undefined;\n if (msg && Array.isArray(msg.content)) {\n const blocks = msg.content as Array<Record<string, unknown>>;\n const toolUseBlocks = blocks.filter((b) => b.toolUse);\n if (toolUseBlocks.length > 0) {\n const toolCalls: ToolCall[] = toolUseBlocks.map((b) => {\n const tu = b.toolUse as Record<string, unknown>;\n return {\n name: String(tu.name ?? \"\"),\n arguments: typeof tu.input === \"string\" ? tu.input : JSON.stringify(tu.input),\n };\n });\n return { toolCalls };\n }\n const textBlock = blocks.find((b) => typeof b.text === \"string\");\n if (textBlock && typeof textBlock.text === \"string\") {\n return { content: textBlock.text };\n }\n }\n }\n\n // Ollama: { message: { content: \"...\", tool_calls: [...] } }\n if (obj.message && typeof obj.message === \"object\") {\n const msg = obj.message as Record<string, unknown>;\n // Tool calls (check before content — Ollama sends content: \"\" alongside tool_calls)\n if (Array.isArray(msg.tool_calls) && msg.tool_calls.length > 0) {\n const toolCalls: ToolCall[] = (msg.tool_calls as Array<Record<string, unknown>>)\n .filter((tc) => tc.function != null)\n .map((tc) => {\n const fn = tc.function as Record<string, unknown>;\n return {\n name: String(fn.name ?? \"\"),\n arguments:\n typeof fn.arguments === \"string\" ? fn.arguments : JSON.stringify(fn.arguments),\n };\n });\n return { toolCalls };\n }\n if (typeof msg.content === \"string\" && msg.content.length > 0) {\n return { content: msg.content };\n }\n // Ollama message with content array (like Cohere)\n if (Array.isArray(msg.content) && msg.content.length > 0) {\n const first = msg.content[0] as Record<string, unknown>;\n if (typeof first.text === \"string\") {\n return { content: first.text };\n }\n }\n }\n\n // Fallback: unknown format — save as error\n return {\n error: {\n message: \"Could not detect response format from upstream\",\n type: \"proxy_error\",\n },\n status,\n };\n}\n\n/**\n * Derive fixture match criteria from the original request.\n */\nfunction buildFixtureMatch(request: ChatCompletionRequest): {\n userMessage?: string;\n inputText?: string;\n} {\n // Embedding request\n if (request.embeddingInput) {\n return { inputText: request.embeddingInput };\n }\n\n // Chat request — match on the last user message\n const lastUser = getLastMessageByRole(request.messages ?? [], \"user\");\n if (lastUser) {\n const text = getTextContent(lastUser.content);\n if (text) {\n return { userMessage: text };\n }\n }\n\n return {};\n}\n"],"mappings":";;;;;;;;;;;;AAoBA,MAAM,gBAAgB,IAAI,IAAI;CAE5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CAEA;CACA;CAEA;CACA;CACD,CAAC;;;;;;;;;AAUF,eAAsB,eACpB,KACA,KACA,SACA,aACA,UACA,UACA,UACA,SACkB;CAClB,MAAM,SAAS,SAAS;AACxB,KAAI,CAAC,OAAQ,QAAO;CAGpB,MAAM,cADY,OAAO,UACK;AAE9B,KAAI,CAAC,aAAa;AAChB,WAAS,OAAO,KAAK,4CAA4C,YAAY,kBAAkB;AAC/F,SAAO;;CAGT,MAAM,cAAc,OAAO,eAAe;CAC1C,IAAI;AACJ,KAAI;AACF,WAAS,mBAAmB,aAAa,SAAS;SAC5C;AACN,WAAS,OAAO,MAAM,sCAAsC,YAAY,KAAK,cAAc;AAC3F,qBACE,KACA,KACA,KAAK,UAAU,EACb,OAAO;GAAE,SAAS,yBAAyB;GAAe,MAAM;GAAe,EAChF,CAAC,CACH;AACD,SAAO;;AAGT,UAAS,OAAO,KAAK,kCAAkC,cAAc,WAAW;CAGhF,MAAM,iBAAyC,EAAE;AACjD,MAAK,MAAM,CAAC,MAAM,QAAQ,OAAO,QAAQ,IAAI,QAAQ,CACnD,KAAI,QAAQ,UAAa,CAAC,cAAc,IAAI,KAAK,CAC/C,gBAAe,QAAQ,MAAM,QAAQ,IAAI,GAAG,IAAI,KAAK,KAAK,GAAG;CAIjE,MAAM,cAAc,WAAW,KAAK,UAAU,QAAQ;CAGtD,IAAI;CACJ,IAAI;CACJ,IAAI;CACJ,IAAI;AAEJ,KAAI;EACF,MAAM,SAAS,MAAM,oBAAoB,QAAQ,gBAAgB,YAAY;AAC7E,mBAAiB,OAAO;AACxB,oBAAkB,OAAO;AACzB,iBAAe,OAAO;AACtB,cAAY,OAAO;UACZ,KAAK;EACZ,MAAM,MAAM,eAAe,QAAQ,IAAI,UAAU;AACjD,WAAS,OAAO,MAAM,yBAAyB,MAAM;AACrD,MAAI,UAAU,KAAK,EAAE,gBAAgB,oBAAoB,CAAC;AAC1D,MAAI,IACF,KAAK,UAAU,EACb,OAAO;GAAE,SAAS,6BAA6B;GAAO,MAAM;GAAe,EAC5E,CAAC,CACH;AACD,SAAO;;CAIT,MAAM,cAAc,gBAAgB;CACpC,MAAM,WAAW,MAAM,QAAQ,YAAY,GAAG,YAAY,KAAK,KAAK,GAAI,eAAe;CACvF,MAAM,iBAAiB,SAAS,aAAa,CAAC,SAAS,qCAAqC;CAC5F,MAAM,YAAY,0BAChB,UACA,aACA,iBAAiB,YAAY,cAC7B,SAAS,OACV;CAED,IAAI;AAEJ,KAAI,WAAW;AAEb,WAAS,OAAO,KAAK,gCAAgC,SAAS,2BAA2B;AACzF,MAAI,UAAU,UACZ,UAAS,OAAO,KAAK,gEAAgE;AAEvF,MAAI,UAAU,iBAAiB,UAAU,gBAAgB,EACvD,UAAS,OAAO,KAAK,GAAG,UAAU,cAAc,0CAA0C;AAE5F,MAAI,UAAU,YAAY,OAAO,CAAC,UAAU,aAAa,UAAU,UAAU,WAAW,GACtF,UAAS,OAAO,KAAK,qEAAqE;AAE5F,MAAI,UAAU,aAAa,UAAU,UAAU,SAAS,GAAG;AACzD,OAAI,UAAU,QACZ,UAAS,OAAO,KACd,2EACD;AAEH,qBAAkB,EAAE,WAAW,UAAU,WAAW;QAEpD,mBAAkB,EAAE,SAAS,UAAU,WAAW,IAAI;QAEnD;EAEL,IAAI,iBAA0B;AAC9B,MAAI;AACF,oBAAiB,KAAK,MAAM,aAAa;UACnC;AAEN,YAAS,OAAO,KAAK,gEAAgE;;EAEvF,IAAI;AACJ,MAAI;AACF,oBAAiB,UAAU,KAAK,MAAM,QAAQ,CAAC,kBAAkB;UAC3D;AAGR,oBAAkB,qBAAqB,gBAAgB,gBAAgB,eAAe;;CAIxF,MAAM,eAAe,kBAAkB,QAAQ;CAG/C,MAAM,UAAmB;EAAE,OAAO;EAAc,UAAU;EAAiB;CAG3E,MAAM,cAAc,OAAO,OAAO,aAAa;CAC/C,MAAM,eAAe,YAAY,WAAW,KAAK,YAAY,OAAO,MAAM,MAAM,OAAU;AAC1F,KAAI,aACF,UAAS,OAAO,KACd,8EACD;CAIH,MAAM,WAAW,GAAG,YAAY,oBADd,IAAI,MAAM,EAAC,aAAa,CAAC,QAAQ,SAAS,IAAI,CACnB,GAAG,OAAO,YAAY,CAAC,MAAM,GAAG,EAAE,CAAC;CAChF,MAAM,WAAW,KAAK,KAAK,aAAa,SAAS;CAEjD,IAAI,gBAAgB;AACpB,KAAI;AAEF,KAAG,UAAU,aAAa,EAAE,WAAW,MAAM,CAAC;EAG9C,MAAM,WAAqB,EAAE;AAC7B,MAAI,aACF,UAAS,KAAK,iEAAiE;AAEjF,MAAI,WAAW,UACb,UAAS,KAAK,4DAA4D;EAI5E,MAAM,cAAuC,EAAE,UAAU,CAAC,QAAQ,EAAE;AACpE,MAAI,SAAS,SAAS,EACpB,aAAY,WAAW,SAAS,KAAK,KAAK;AAE5C,KAAG,cAAc,UAAU,KAAK,UAAU,aAAa,MAAM,EAAE,EAAE,QAAQ;AACzE,kBAAgB;UACT,KAAK;EACZ,MAAM,MAAM,eAAe,QAAQ,IAAI,UAAU;AACjD,WAAS,OAAO,MAAM,mCAAmC,MAAM;AAC/D,MAAI,UAAU,yBAAyB,IAAI;;AAG7C,KAAI,eAAe;AAEjB,MAAI,CAAC,aACH,UAAS,KAAK,QAAQ;AAExB,WAAS,OAAO,KAAK,uBAAuB,WAAW;OAEvD,UAAS,OAAO,KAAK,2DAA2D;CAIlF,MAAM,eAAuC,EAAE;AAC/C,KAAI,SACF,cAAa,kBAAkB;AAEjC,KAAI,UAAU,gBAAgB,aAAa;AAC3C,KAAI,IAAI,iBAAiB,YAAY,aAAa;AAElD,QAAO;;AAOT,SAAS,oBACP,QACA,SACA,MACiG;AACjG,QAAO,IAAI,SAAS,SAAS,WAAW;EACtC,MAAM,YAAY,OAAO,aAAa,WAAW,QAAQ;EACzD,MAAM,sBAAsB;EAC5B,MAAM,kBAAkB;EACxB,MAAM,MAAM,UAAU,QACpB,QACA;GACE,QAAQ;GACR,SAAS;GACT,SAAS;IACP,GAAG;IACH,kBAAkB,OAAO,WAAW,KAAK,CAAC,UAAU;IACrD;GACF,GACA,QAAQ;AACP,OAAI,WAAW,uBAAuB;AACpC,QAAI,wBAAQ,IAAI,MAAM,qCAAqC,kBAAkB,IAAK,GAAG,CAAC;KACtF;GACF,MAAM,SAAmB,EAAE;AAC3B,OAAI,GAAG,SAAS,UAAkB,OAAO,KAAK,MAAM,CAAC;AACrD,OAAI,GAAG,SAAS,OAAO;AACvB,OAAI,GAAG,aAAa;IAClB,MAAM,YAAY,OAAO,OAAO,OAAO;AACvC,YAAQ;KACN,QAAQ,IAAI,cAAc;KAC1B,SAAS,IAAI;KACb,MAAM,UAAU,UAAU;KAC1B;KACD,CAAC;KACF;IAEL;AACD,MAAI,GAAG,iBAAiB;AACtB,OAAI,wBACF,IAAI,MACF,oCAAoC,sBAAsB,IAAK,KAAK,OAAO,OAC5E,CACF;IACD;AACF,MAAI,GAAG,SAAS,OAAO;AACvB,MAAI,MAAM,KAAK;AACf,MAAI,KAAK;GACT;;;;;;AAOJ,SAAS,qBACP,QACA,QACA,gBACiB;AACjB,KAAI,WAAW,QAAQ,WAAW,OAEhC,QAAO;EACL,OAAO;GAAE,SAAS;GAAuC,MAAM;GAAe;EAC9E;EACD;CAGH,MAAM,MAAM;AAGZ,KAAI,IAAI,OAAO;EACb,MAAM,MAAM,IAAI;AAChB,SAAO;GACL,OAAO;IACL,SAAS,OAAO,IAAI,WAAW,gBAAgB;IAC/C,MAAM,OAAO,IAAI,QAAQ,YAAY;IACrC,MAAM,IAAI,OAAO,OAAO,IAAI,KAAK,GAAG;IACrC;GACD;GACD;;AAIH,KAAI,MAAM,QAAQ,IAAI,KAAK,IAAI,IAAI,KAAK,SAAS,GAAG;EAClD,MAAM,QAAQ,IAAI,KAAK;AACvB,MAAI,MAAM,QAAQ,MAAM,UAAU,CAChC,QAAO,EAAE,WAAW,MAAM,WAAuB;AAEnD,MAAI,OAAO,MAAM,cAAc,YAAY,mBAAmB,SAC5D,KAAI;GACF,MAAM,MAAM,OAAO,KAAK,MAAM,WAAW,SAAS;GAClD,MAAM,SAAS,IAAI,aAAa,IAAI,QAAQ,IAAI,YAAY,IAAI,aAAa,EAAE;AAC/E,UAAO,EAAE,WAAW,MAAM,KAAK,OAAO,EAAE;UAClC;;AAOZ,KAAI,MAAM,QAAQ,IAAI,UAAU,CAC9B,QAAO,EAAE,WAAW,IAAI,WAAuB;AAIjD,KAAI,MAAM,QAAQ,IAAI,QAAQ,IAAI,IAAI,QAAQ,SAAS,GAAG;EAExD,MAAM,UADS,IAAI,QAAQ,GACJ;AACvB,MAAI,SAAS;AAEX,OAAI,MAAM,QAAQ,QAAQ,WAAW,IAAI,QAAQ,WAAW,SAAS,EAUnE,QAAO,EAAE,WATsB,QAAQ,WAA8C,KAClF,OAAO;IACN,MAAM,KAAK,GAAG;AACd,WAAO;KACL,MAAM,OAAO,GAAG,KAAK;KACrB,WAAW,OAAO,GAAG,UAAU;KAChC;KAEJ,EACmB;AAGtB,OAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,EAAE,SAAS,QAAQ,SAAS;;;AAMzC,KAAI,MAAM,QAAQ,IAAI,QAAQ,IAAI,IAAI,QAAQ,SAAS,GAAG;EACxD,MAAM,SAAS,IAAI;EAEnB,MAAM,gBAAgB,OAAO,QAAQ,MAAM,EAAE,SAAS,WAAW;AACjE,MAAI,cAAc,SAAS,EAKzB,QAAO,EAAE,WAJqB,cAAc,KAAK,OAAO;GACtD,MAAM,OAAO,EAAE,KAAK;GACpB,WAAW,OAAO,EAAE,UAAU,WAAW,EAAE,QAAQ,KAAK,UAAU,EAAE,MAAM;GAC3E,EAAE,EACiB;EAGtB,MAAM,YAAY,OAAO,MAAM,MAAM,EAAE,SAAS,OAAO;AACvD,MAAI,aAAa,OAAO,UAAU,SAAS,SACzC,QAAO,EAAE,SAAS,UAAU,MAAM;;AAKtC,KAAI,MAAM,QAAQ,IAAI,WAAW,IAAI,IAAI,WAAW,SAAS,GAAG;EAE9D,MAAM,UADY,IAAI,WAAW,GACP;AAC1B,MAAI,WAAW,MAAM,QAAQ,QAAQ,MAAM,EAAE;GAC3C,MAAM,QAAQ,QAAQ;GAEtB,MAAM,cAAc,MAAM,QAAQ,MAAM,EAAE,aAAa;AACvD,OAAI,YAAY,SAAS,EAQvB,QAAO,EAAE,WAPqB,YAAY,KAAK,MAAM;IACnD,MAAM,KAAK,EAAE;AACb,WAAO;KACL,MAAM,OAAO,GAAG,KAAK;KACrB,WAAW,OAAO,GAAG,SAAS,WAAW,GAAG,OAAO,KAAK,UAAU,GAAG,KAAK;KAC3E;KACD,EACkB;GAGtB,MAAM,WAAW,MAAM,MAAM,MAAM,OAAO,EAAE,SAAS,SAAS;AAC9D,OAAI,YAAY,OAAO,SAAS,SAAS,SACvC,QAAO,EAAE,SAAS,SAAS,MAAM;;;AAMvC,KAAI,IAAI,UAAU,OAAO,IAAI,WAAW,UAAU;EAEhD,MAAM,MADS,IAAI,OACA;AACnB,MAAI,OAAO,MAAM,QAAQ,IAAI,QAAQ,EAAE;GACrC,MAAM,SAAS,IAAI;GACnB,MAAM,gBAAgB,OAAO,QAAQ,MAAM,EAAE,QAAQ;AACrD,OAAI,cAAc,SAAS,EAQzB,QAAO,EAAE,WAPqB,cAAc,KAAK,MAAM;IACrD,MAAM,KAAK,EAAE;AACb,WAAO;KACL,MAAM,OAAO,GAAG,QAAQ,GAAG;KAC3B,WAAW,OAAO,GAAG,UAAU,WAAW,GAAG,QAAQ,KAAK,UAAU,GAAG,MAAM;KAC9E;KACD,EACkB;GAEtB,MAAM,YAAY,OAAO,MAAM,MAAM,OAAO,EAAE,SAAS,SAAS;AAChE,OAAI,aAAa,OAAO,UAAU,SAAS,SACzC,QAAO,EAAE,SAAS,UAAU,MAAM;;;AAMxC,KAAI,IAAI,WAAW,OAAO,IAAI,YAAY,UAAU;EAClD,MAAM,MAAM,IAAI;AAEhB,MAAI,MAAM,QAAQ,IAAI,WAAW,IAAI,IAAI,WAAW,SAAS,EAW3D,QAAO,EAAE,WAVsB,IAAI,WAChC,QAAQ,OAAO,GAAG,YAAY,KAAK,CACnC,KAAK,OAAO;GACX,MAAM,KAAK,GAAG;AACd,UAAO;IACL,MAAM,OAAO,GAAG,QAAQ,GAAG;IAC3B,WACE,OAAO,GAAG,cAAc,WAAW,GAAG,YAAY,KAAK,UAAU,GAAG,UAAU;IACjF;IACD,EACgB;AAEtB,MAAI,OAAO,IAAI,YAAY,YAAY,IAAI,QAAQ,SAAS,EAC1D,QAAO,EAAE,SAAS,IAAI,SAAS;AAGjC,MAAI,MAAM,QAAQ,IAAI,QAAQ,IAAI,IAAI,QAAQ,SAAS,GAAG;GACxD,MAAM,QAAQ,IAAI,QAAQ;AAC1B,OAAI,OAAO,MAAM,SAAS,SACxB,QAAO,EAAE,SAAS,MAAM,MAAM;;;AAMpC,QAAO;EACL,OAAO;GACL,SAAS;GACT,MAAM;GACP;EACD;EACD;;;;;AAMH,SAAS,kBAAkB,SAGzB;AAEA,KAAI,QAAQ,eACV,QAAO,EAAE,WAAW,QAAQ,gBAAgB;CAI9C,MAAM,WAAW,qBAAqB,QAAQ,YAAY,EAAE,EAAE,OAAO;AACrE,KAAI,UAAU;EACZ,MAAM,OAAO,eAAe,SAAS,QAAQ;AAC7C,MAAI,KACF,QAAO,EAAE,aAAa,MAAM;;AAIhC,QAAO,EAAE"}
1
+ {"version":3,"file":"recorder.js","names":[],"sources":["../src/recorder.ts"],"sourcesContent":["import * as http from \"node:http\";\nimport * as https from \"node:https\";\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport * as crypto from \"node:crypto\";\nimport type {\n ChatCompletionRequest,\n Fixture,\n FixtureResponse,\n RecordConfig,\n RecordProviderKey,\n ToolCall,\n} from \"./types.js\";\nimport { getLastMessageByRole, getTextContent } from \"./router.js\";\nimport type { Logger } from \"./logger.js\";\nimport { collapseStreamingResponse } from \"./stream-collapse.js\";\nimport { writeErrorResponse } from \"./sse-writer.js\";\nimport { resolveUpstreamUrl } from \"./url.js\";\n\n/** Headers to strip when proxying — hop-by-hop (RFC 2616 §13.5.1) + client-set. */\nconst STRIP_HEADERS = new Set([\n // Hop-by-hop (RFC 2616 §13.5.1)\n \"connection\",\n \"keep-alive\",\n \"transfer-encoding\",\n \"te\",\n \"trailer\",\n \"upgrade\",\n \"proxy-authorization\",\n \"proxy-authenticate\",\n // Set by HTTP client from the target URL / body\n \"host\",\n \"content-length\",\n // Not relevant for LLM APIs; avoid leaking or mismatched encoding\n \"cookie\",\n \"accept-encoding\",\n]);\n\n/**\n * Proxy an unmatched request to the real upstream provider, record the\n * response as a fixture on disk and in memory, then relay the response\n * back to the original client.\n *\n * Returns `true` if the request was proxied (provider configured),\n * `false` if no upstream URL is configured for the given provider key.\n */\nexport async function proxyAndRecord(\n req: http.IncomingMessage,\n res: http.ServerResponse,\n request: ChatCompletionRequest,\n providerKey: RecordProviderKey,\n pathname: string,\n fixtures: Fixture[],\n defaults: {\n record?: RecordConfig;\n logger: Logger;\n requestTransform?: (req: ChatCompletionRequest) => ChatCompletionRequest;\n },\n rawBody?: string,\n): Promise<boolean> {\n const record = defaults.record;\n if (!record) return false;\n\n const providers = record.providers;\n const upstreamUrl = providers[providerKey];\n\n if (!upstreamUrl) {\n defaults.logger.warn(`No upstream URL configured for provider \"${providerKey}\" — cannot proxy`);\n return false;\n }\n\n const fixturePath = record.fixturePath ?? \"./fixtures/recorded\";\n let target: URL;\n try {\n target = resolveUpstreamUrl(upstreamUrl, pathname);\n } catch {\n defaults.logger.error(`Invalid upstream URL for provider \"${providerKey}\": ${upstreamUrl}`);\n writeErrorResponse(\n res,\n 502,\n JSON.stringify({\n error: { message: `Invalid upstream URL: ${upstreamUrl}`, type: \"proxy_error\" },\n }),\n );\n return true;\n }\n\n defaults.logger.warn(`NO FIXTURE MATCH — proxying to ${upstreamUrl}${pathname}`);\n\n // Forward all request headers except hop-by-hop and client-set ones.\n const forwardHeaders: Record<string, string> = {};\n for (const [name, val] of Object.entries(req.headers)) {\n if (val !== undefined && !STRIP_HEADERS.has(name)) {\n forwardHeaders[name] = Array.isArray(val) ? val.join(\", \") : val;\n }\n }\n\n const requestBody = rawBody ?? JSON.stringify(request);\n\n // Make upstream request\n let upstreamStatus: number;\n let upstreamHeaders: http.IncomingHttpHeaders;\n let upstreamBody: string;\n let rawBuffer: Buffer;\n\n try {\n const result = await makeUpstreamRequest(target, forwardHeaders, requestBody);\n upstreamStatus = result.status;\n upstreamHeaders = result.headers;\n upstreamBody = result.body;\n rawBuffer = result.rawBuffer;\n } catch (err) {\n const msg = err instanceof Error ? err.message : \"Unknown proxy error\";\n defaults.logger.error(`Proxy request failed: ${msg}`);\n res.writeHead(502, { \"Content-Type\": \"application/json\" });\n res.end(\n JSON.stringify({\n error: { message: `Proxy to upstream failed: ${msg}`, type: \"proxy_error\" },\n }),\n );\n return true;\n }\n\n // Detect streaming response and collapse if necessary\n const contentType = upstreamHeaders[\"content-type\"];\n const ctString = Array.isArray(contentType) ? contentType.join(\", \") : (contentType ?? \"\");\n const isBinaryStream = ctString.toLowerCase().includes(\"application/vnd.amazon.eventstream\");\n const collapsed = collapseStreamingResponse(\n ctString,\n providerKey,\n isBinaryStream ? rawBuffer : upstreamBody,\n defaults.logger,\n );\n\n let fixtureResponse: FixtureResponse;\n\n if (collapsed) {\n // Streaming response — use collapsed result\n defaults.logger.warn(`Streaming response detected (${ctString}) — collapsing to fixture`);\n if (collapsed.truncated) {\n defaults.logger.warn(\"Bedrock EventStream: CRC mismatch — response may be truncated\");\n }\n if (collapsed.droppedChunks && collapsed.droppedChunks > 0) {\n defaults.logger.warn(`${collapsed.droppedChunks} chunk(s) dropped during stream collapse`);\n }\n if (collapsed.content === \"\" && (!collapsed.toolCalls || collapsed.toolCalls.length === 0)) {\n defaults.logger.warn(\"Stream collapse produced empty content — fixture may be incomplete\");\n }\n if (collapsed.toolCalls && collapsed.toolCalls.length > 0) {\n if (collapsed.content) {\n defaults.logger.warn(\n \"Collapsed response has both content and toolCalls — preferring toolCalls\",\n );\n }\n fixtureResponse = { toolCalls: collapsed.toolCalls };\n } else {\n fixtureResponse = { content: collapsed.content ?? \"\" };\n }\n } else {\n // Non-streaming — try to parse as JSON\n let parsedResponse: unknown = null;\n try {\n parsedResponse = JSON.parse(upstreamBody);\n } catch {\n // Not JSON — could be an unknown format\n defaults.logger.warn(\"Upstream response is not valid JSON — saving as error fixture\");\n }\n let encodingFormat: string | undefined;\n try {\n encodingFormat = rawBody ? JSON.parse(rawBody).encoding_format : undefined;\n } catch {\n /* not JSON */\n }\n fixtureResponse = buildFixtureResponse(parsedResponse, upstreamStatus, encodingFormat);\n }\n\n // Build the match criteria from the (optionally transformed) request\n const matchRequest = defaults.requestTransform ? defaults.requestTransform(request) : request;\n const fixtureMatch = buildFixtureMatch(matchRequest);\n\n // Build and save the fixture\n const fixture: Fixture = { match: fixtureMatch, response: fixtureResponse };\n\n // Check if the match is empty (all undefined values) — warn but still save to disk\n const matchValues = Object.values(fixtureMatch);\n const isEmptyMatch = matchValues.length === 0 || matchValues.every((v) => v === undefined);\n if (isEmptyMatch) {\n defaults.logger.warn(\n \"Recorded fixture has empty match criteria — skipping in-memory registration\",\n );\n }\n\n const timestamp = new Date().toISOString().replace(/[:.]/g, \"-\");\n const filename = `${providerKey}-${timestamp}-${crypto.randomUUID().slice(0, 8)}.json`;\n const filepath = path.join(fixturePath, filename);\n\n let writtenToDisk = false;\n try {\n // Ensure fixture directory exists\n fs.mkdirSync(fixturePath, { recursive: true });\n\n // Collect warnings for the fixture file\n const warnings: string[] = [];\n if (isEmptyMatch) {\n warnings.push(\"Empty match criteria — this fixture will not match any request\");\n }\n if (collapsed?.truncated) {\n warnings.push(\"Stream response was truncated — fixture may be incomplete\");\n }\n\n // Auth headers are forwarded to upstream but excluded from saved fixtures for security\n const fileContent: Record<string, unknown> = { fixtures: [fixture] };\n if (warnings.length > 0) {\n fileContent._warning = warnings.join(\"; \");\n }\n fs.writeFileSync(filepath, JSON.stringify(fileContent, null, 2), \"utf-8\");\n writtenToDisk = true;\n } catch (err) {\n const msg = err instanceof Error ? err.message : \"Unknown filesystem error\";\n defaults.logger.error(`Failed to save fixture to disk: ${msg}`);\n res.setHeader(\"X-LLMock-Record-Error\", msg);\n }\n\n if (writtenToDisk) {\n // Register in memory so subsequent identical requests match (skip if empty match)\n if (!isEmptyMatch) {\n fixtures.push(fixture);\n }\n defaults.logger.warn(`Response recorded → ${filepath}`);\n } else {\n defaults.logger.warn(`Response relayed but NOT saved to disk — see error above`);\n }\n\n // Relay upstream response to client\n const relayHeaders: Record<string, string> = {};\n if (ctString) {\n relayHeaders[\"Content-Type\"] = ctString;\n }\n res.writeHead(upstreamStatus, relayHeaders);\n res.end(isBinaryStream ? rawBuffer : upstreamBody);\n\n return true;\n}\n\n// ---------------------------------------------------------------------------\n// Internal helpers\n// ---------------------------------------------------------------------------\n\nfunction makeUpstreamRequest(\n target: URL,\n headers: Record<string, string>,\n body: string,\n): Promise<{ status: number; headers: http.IncomingHttpHeaders; body: string; rawBuffer: Buffer }> {\n return new Promise((resolve, reject) => {\n const transport = target.protocol === \"https:\" ? https : http;\n const UPSTREAM_TIMEOUT_MS = 30_000;\n const BODY_TIMEOUT_MS = 30_000;\n const req = transport.request(\n target,\n {\n method: \"POST\",\n timeout: UPSTREAM_TIMEOUT_MS,\n headers: {\n ...headers,\n \"Content-Length\": Buffer.byteLength(body).toString(),\n },\n },\n (res) => {\n res.setTimeout(BODY_TIMEOUT_MS, () => {\n req.destroy(new Error(`Upstream response timed out after ${BODY_TIMEOUT_MS / 1000}s`));\n });\n const chunks: Buffer[] = [];\n res.on(\"data\", (chunk: Buffer) => chunks.push(chunk));\n res.on(\"error\", reject);\n res.on(\"end\", () => {\n const rawBuffer = Buffer.concat(chunks);\n resolve({\n status: res.statusCode ?? 500,\n headers: res.headers,\n body: rawBuffer.toString(),\n rawBuffer,\n });\n });\n },\n );\n req.on(\"timeout\", () => {\n req.destroy(\n new Error(\n `Upstream request timed out after ${UPSTREAM_TIMEOUT_MS / 1000}s: ${target.href}`,\n ),\n );\n });\n req.on(\"error\", reject);\n req.write(body);\n req.end();\n });\n}\n\n/**\n * Detect the response format from the parsed upstream JSON and convert\n * it into an llmock FixtureResponse.\n */\nfunction buildFixtureResponse(\n parsed: unknown,\n status: number,\n encodingFormat?: string,\n): FixtureResponse {\n if (parsed === null || parsed === undefined) {\n // Raw / unparseable response — save as error\n return {\n error: { message: \"Upstream returned non-JSON response\", type: \"proxy_error\" },\n status,\n };\n }\n\n const obj = parsed as Record<string, unknown>;\n\n // Error response\n if (obj.error) {\n const err = obj.error as Record<string, unknown>;\n return {\n error: {\n message: String(err.message ?? \"Unknown error\"),\n type: String(err.type ?? \"api_error\"),\n code: err.code ? String(err.code) : undefined,\n },\n status,\n };\n }\n\n // OpenAI embeddings: { data: [{ embedding: [...] }] }\n if (Array.isArray(obj.data) && obj.data.length > 0) {\n const first = obj.data[0] as Record<string, unknown>;\n if (Array.isArray(first.embedding)) {\n return { embedding: first.embedding as number[] };\n }\n if (typeof first.embedding === \"string\" && encodingFormat === \"base64\") {\n try {\n const buf = Buffer.from(first.embedding, \"base64\");\n const floats = new Float32Array(buf.buffer, buf.byteOffset, buf.byteLength / 4);\n return { embedding: Array.from(floats) };\n } catch {\n // Corrupted base64 or non-float32 data — fall through to error\n }\n }\n }\n\n // Direct embedding: { embedding: [...] }\n if (Array.isArray(obj.embedding)) {\n return { embedding: obj.embedding as number[] };\n }\n\n // OpenAI chat completion: { choices: [{ message: { content, tool_calls } }] }\n if (Array.isArray(obj.choices) && obj.choices.length > 0) {\n const choice = obj.choices[0] as Record<string, unknown>;\n const message = choice.message as Record<string, unknown> | undefined;\n if (message) {\n // Tool calls\n if (Array.isArray(message.tool_calls) && message.tool_calls.length > 0) {\n const toolCalls: ToolCall[] = (message.tool_calls as Array<Record<string, unknown>>).map(\n (tc) => {\n const fn = tc.function as Record<string, unknown>;\n return {\n name: String(fn.name),\n arguments: String(fn.arguments),\n };\n },\n );\n return { toolCalls };\n }\n // Text content\n if (typeof message.content === \"string\") {\n return { content: message.content };\n }\n }\n }\n\n // Anthropic: { content: [{ type: \"text\", text: \"...\" }] } or tool_use\n if (Array.isArray(obj.content) && obj.content.length > 0) {\n const blocks = obj.content as Array<Record<string, unknown>>;\n // Check for tool_use blocks first\n const toolUseBlocks = blocks.filter((b) => b.type === \"tool_use\");\n if (toolUseBlocks.length > 0) {\n const toolCalls: ToolCall[] = toolUseBlocks.map((b) => ({\n name: String(b.name),\n arguments: typeof b.input === \"string\" ? b.input : JSON.stringify(b.input),\n }));\n return { toolCalls };\n }\n // Text blocks\n const textBlock = blocks.find((b) => b.type === \"text\");\n if (textBlock && typeof textBlock.text === \"string\") {\n return { content: textBlock.text };\n }\n }\n\n // Gemini: { candidates: [{ content: { parts: [{ text: \"...\" }] } }] }\n if (Array.isArray(obj.candidates) && obj.candidates.length > 0) {\n const candidate = obj.candidates[0] as Record<string, unknown>;\n const content = candidate.content as Record<string, unknown> | undefined;\n if (content && Array.isArray(content.parts)) {\n const parts = content.parts as Array<Record<string, unknown>>;\n // Tool calls (functionCall)\n const fnCallParts = parts.filter((p) => p.functionCall);\n if (fnCallParts.length > 0) {\n const toolCalls: ToolCall[] = fnCallParts.map((p) => {\n const fc = p.functionCall as Record<string, unknown>;\n return {\n name: String(fc.name),\n arguments: typeof fc.args === \"string\" ? fc.args : JSON.stringify(fc.args),\n };\n });\n return { toolCalls };\n }\n // Text\n const textPart = parts.find((p) => typeof p.text === \"string\");\n if (textPart && typeof textPart.text === \"string\") {\n return { content: textPart.text };\n }\n }\n }\n\n // Bedrock Converse: { output: { message: { role, content: [{ text }, { toolUse }] } } }\n if (obj.output && typeof obj.output === \"object\") {\n const output = obj.output as Record<string, unknown>;\n const msg = output.message as Record<string, unknown> | undefined;\n if (msg && Array.isArray(msg.content)) {\n const blocks = msg.content as Array<Record<string, unknown>>;\n const toolUseBlocks = blocks.filter((b) => b.toolUse);\n if (toolUseBlocks.length > 0) {\n const toolCalls: ToolCall[] = toolUseBlocks.map((b) => {\n const tu = b.toolUse as Record<string, unknown>;\n return {\n name: String(tu.name ?? \"\"),\n arguments: typeof tu.input === \"string\" ? tu.input : JSON.stringify(tu.input),\n };\n });\n return { toolCalls };\n }\n const textBlock = blocks.find((b) => typeof b.text === \"string\");\n if (textBlock && typeof textBlock.text === \"string\") {\n return { content: textBlock.text };\n }\n }\n }\n\n // Ollama: { message: { content: \"...\", tool_calls: [...] } }\n if (obj.message && typeof obj.message === \"object\") {\n const msg = obj.message as Record<string, unknown>;\n // Tool calls (check before content — Ollama sends content: \"\" alongside tool_calls)\n if (Array.isArray(msg.tool_calls) && msg.tool_calls.length > 0) {\n const toolCalls: ToolCall[] = (msg.tool_calls as Array<Record<string, unknown>>)\n .filter((tc) => tc.function != null)\n .map((tc) => {\n const fn = tc.function as Record<string, unknown>;\n return {\n name: String(fn.name ?? \"\"),\n arguments:\n typeof fn.arguments === \"string\" ? fn.arguments : JSON.stringify(fn.arguments),\n };\n });\n return { toolCalls };\n }\n if (typeof msg.content === \"string\" && msg.content.length > 0) {\n return { content: msg.content };\n }\n // Ollama message with content array (like Cohere)\n if (Array.isArray(msg.content) && msg.content.length > 0) {\n const first = msg.content[0] as Record<string, unknown>;\n if (typeof first.text === \"string\") {\n return { content: first.text };\n }\n }\n }\n\n // Fallback: unknown format — save as error\n return {\n error: {\n message: \"Could not detect response format from upstream\",\n type: \"proxy_error\",\n },\n status,\n };\n}\n\n/**\n * Derive fixture match criteria from the original request.\n */\nfunction buildFixtureMatch(request: ChatCompletionRequest): {\n userMessage?: string;\n inputText?: string;\n} {\n // Embedding request\n if (request.embeddingInput) {\n return { inputText: request.embeddingInput };\n }\n\n // Chat request — match on the last user message\n const lastUser = getLastMessageByRole(request.messages ?? [], \"user\");\n if (lastUser) {\n const text = getTextContent(lastUser.content);\n if (text) {\n return { userMessage: text };\n }\n }\n\n return {};\n}\n"],"mappings":";;;;;;;;;;;;AAoBA,MAAM,gBAAgB,IAAI,IAAI;CAE5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CAEA;CACA;CAEA;CACA;CACD,CAAC;;;;;;;;;AAUF,eAAsB,eACpB,KACA,KACA,SACA,aACA,UACA,UACA,UAKA,SACkB;CAClB,MAAM,SAAS,SAAS;AACxB,KAAI,CAAC,OAAQ,QAAO;CAGpB,MAAM,cADY,OAAO,UACK;AAE9B,KAAI,CAAC,aAAa;AAChB,WAAS,OAAO,KAAK,4CAA4C,YAAY,kBAAkB;AAC/F,SAAO;;CAGT,MAAM,cAAc,OAAO,eAAe;CAC1C,IAAI;AACJ,KAAI;AACF,WAAS,mBAAmB,aAAa,SAAS;SAC5C;AACN,WAAS,OAAO,MAAM,sCAAsC,YAAY,KAAK,cAAc;AAC3F,qBACE,KACA,KACA,KAAK,UAAU,EACb,OAAO;GAAE,SAAS,yBAAyB;GAAe,MAAM;GAAe,EAChF,CAAC,CACH;AACD,SAAO;;AAGT,UAAS,OAAO,KAAK,kCAAkC,cAAc,WAAW;CAGhF,MAAM,iBAAyC,EAAE;AACjD,MAAK,MAAM,CAAC,MAAM,QAAQ,OAAO,QAAQ,IAAI,QAAQ,CACnD,KAAI,QAAQ,UAAa,CAAC,cAAc,IAAI,KAAK,CAC/C,gBAAe,QAAQ,MAAM,QAAQ,IAAI,GAAG,IAAI,KAAK,KAAK,GAAG;CAIjE,MAAM,cAAc,WAAW,KAAK,UAAU,QAAQ;CAGtD,IAAI;CACJ,IAAI;CACJ,IAAI;CACJ,IAAI;AAEJ,KAAI;EACF,MAAM,SAAS,MAAM,oBAAoB,QAAQ,gBAAgB,YAAY;AAC7E,mBAAiB,OAAO;AACxB,oBAAkB,OAAO;AACzB,iBAAe,OAAO;AACtB,cAAY,OAAO;UACZ,KAAK;EACZ,MAAM,MAAM,eAAe,QAAQ,IAAI,UAAU;AACjD,WAAS,OAAO,MAAM,yBAAyB,MAAM;AACrD,MAAI,UAAU,KAAK,EAAE,gBAAgB,oBAAoB,CAAC;AAC1D,MAAI,IACF,KAAK,UAAU,EACb,OAAO;GAAE,SAAS,6BAA6B;GAAO,MAAM;GAAe,EAC5E,CAAC,CACH;AACD,SAAO;;CAIT,MAAM,cAAc,gBAAgB;CACpC,MAAM,WAAW,MAAM,QAAQ,YAAY,GAAG,YAAY,KAAK,KAAK,GAAI,eAAe;CACvF,MAAM,iBAAiB,SAAS,aAAa,CAAC,SAAS,qCAAqC;CAC5F,MAAM,YAAY,0BAChB,UACA,aACA,iBAAiB,YAAY,cAC7B,SAAS,OACV;CAED,IAAI;AAEJ,KAAI,WAAW;AAEb,WAAS,OAAO,KAAK,gCAAgC,SAAS,2BAA2B;AACzF,MAAI,UAAU,UACZ,UAAS,OAAO,KAAK,gEAAgE;AAEvF,MAAI,UAAU,iBAAiB,UAAU,gBAAgB,EACvD,UAAS,OAAO,KAAK,GAAG,UAAU,cAAc,0CAA0C;AAE5F,MAAI,UAAU,YAAY,OAAO,CAAC,UAAU,aAAa,UAAU,UAAU,WAAW,GACtF,UAAS,OAAO,KAAK,qEAAqE;AAE5F,MAAI,UAAU,aAAa,UAAU,UAAU,SAAS,GAAG;AACzD,OAAI,UAAU,QACZ,UAAS,OAAO,KACd,2EACD;AAEH,qBAAkB,EAAE,WAAW,UAAU,WAAW;QAEpD,mBAAkB,EAAE,SAAS,UAAU,WAAW,IAAI;QAEnD;EAEL,IAAI,iBAA0B;AAC9B,MAAI;AACF,oBAAiB,KAAK,MAAM,aAAa;UACnC;AAEN,YAAS,OAAO,KAAK,gEAAgE;;EAEvF,IAAI;AACJ,MAAI;AACF,oBAAiB,UAAU,KAAK,MAAM,QAAQ,CAAC,kBAAkB;UAC3D;AAGR,oBAAkB,qBAAqB,gBAAgB,gBAAgB,eAAe;;CAKxF,MAAM,eAAe,kBADA,SAAS,mBAAmB,SAAS,iBAAiB,QAAQ,GAAG,QAClC;CAGpD,MAAM,UAAmB;EAAE,OAAO;EAAc,UAAU;EAAiB;CAG3E,MAAM,cAAc,OAAO,OAAO,aAAa;CAC/C,MAAM,eAAe,YAAY,WAAW,KAAK,YAAY,OAAO,MAAM,MAAM,OAAU;AAC1F,KAAI,aACF,UAAS,OAAO,KACd,8EACD;CAIH,MAAM,WAAW,GAAG,YAAY,oBADd,IAAI,MAAM,EAAC,aAAa,CAAC,QAAQ,SAAS,IAAI,CACnB,GAAG,OAAO,YAAY,CAAC,MAAM,GAAG,EAAE,CAAC;CAChF,MAAM,WAAW,KAAK,KAAK,aAAa,SAAS;CAEjD,IAAI,gBAAgB;AACpB,KAAI;AAEF,KAAG,UAAU,aAAa,EAAE,WAAW,MAAM,CAAC;EAG9C,MAAM,WAAqB,EAAE;AAC7B,MAAI,aACF,UAAS,KAAK,iEAAiE;AAEjF,MAAI,WAAW,UACb,UAAS,KAAK,4DAA4D;EAI5E,MAAM,cAAuC,EAAE,UAAU,CAAC,QAAQ,EAAE;AACpE,MAAI,SAAS,SAAS,EACpB,aAAY,WAAW,SAAS,KAAK,KAAK;AAE5C,KAAG,cAAc,UAAU,KAAK,UAAU,aAAa,MAAM,EAAE,EAAE,QAAQ;AACzE,kBAAgB;UACT,KAAK;EACZ,MAAM,MAAM,eAAe,QAAQ,IAAI,UAAU;AACjD,WAAS,OAAO,MAAM,mCAAmC,MAAM;AAC/D,MAAI,UAAU,yBAAyB,IAAI;;AAG7C,KAAI,eAAe;AAEjB,MAAI,CAAC,aACH,UAAS,KAAK,QAAQ;AAExB,WAAS,OAAO,KAAK,uBAAuB,WAAW;OAEvD,UAAS,OAAO,KAAK,2DAA2D;CAIlF,MAAM,eAAuC,EAAE;AAC/C,KAAI,SACF,cAAa,kBAAkB;AAEjC,KAAI,UAAU,gBAAgB,aAAa;AAC3C,KAAI,IAAI,iBAAiB,YAAY,aAAa;AAElD,QAAO;;AAOT,SAAS,oBACP,QACA,SACA,MACiG;AACjG,QAAO,IAAI,SAAS,SAAS,WAAW;EACtC,MAAM,YAAY,OAAO,aAAa,WAAW,QAAQ;EACzD,MAAM,sBAAsB;EAC5B,MAAM,kBAAkB;EACxB,MAAM,MAAM,UAAU,QACpB,QACA;GACE,QAAQ;GACR,SAAS;GACT,SAAS;IACP,GAAG;IACH,kBAAkB,OAAO,WAAW,KAAK,CAAC,UAAU;IACrD;GACF,GACA,QAAQ;AACP,OAAI,WAAW,uBAAuB;AACpC,QAAI,wBAAQ,IAAI,MAAM,qCAAqC,kBAAkB,IAAK,GAAG,CAAC;KACtF;GACF,MAAM,SAAmB,EAAE;AAC3B,OAAI,GAAG,SAAS,UAAkB,OAAO,KAAK,MAAM,CAAC;AACrD,OAAI,GAAG,SAAS,OAAO;AACvB,OAAI,GAAG,aAAa;IAClB,MAAM,YAAY,OAAO,OAAO,OAAO;AACvC,YAAQ;KACN,QAAQ,IAAI,cAAc;KAC1B,SAAS,IAAI;KACb,MAAM,UAAU,UAAU;KAC1B;KACD,CAAC;KACF;IAEL;AACD,MAAI,GAAG,iBAAiB;AACtB,OAAI,wBACF,IAAI,MACF,oCAAoC,sBAAsB,IAAK,KAAK,OAAO,OAC5E,CACF;IACD;AACF,MAAI,GAAG,SAAS,OAAO;AACvB,MAAI,MAAM,KAAK;AACf,MAAI,KAAK;GACT;;;;;;AAOJ,SAAS,qBACP,QACA,QACA,gBACiB;AACjB,KAAI,WAAW,QAAQ,WAAW,OAEhC,QAAO;EACL,OAAO;GAAE,SAAS;GAAuC,MAAM;GAAe;EAC9E;EACD;CAGH,MAAM,MAAM;AAGZ,KAAI,IAAI,OAAO;EACb,MAAM,MAAM,IAAI;AAChB,SAAO;GACL,OAAO;IACL,SAAS,OAAO,IAAI,WAAW,gBAAgB;IAC/C,MAAM,OAAO,IAAI,QAAQ,YAAY;IACrC,MAAM,IAAI,OAAO,OAAO,IAAI,KAAK,GAAG;IACrC;GACD;GACD;;AAIH,KAAI,MAAM,QAAQ,IAAI,KAAK,IAAI,IAAI,KAAK,SAAS,GAAG;EAClD,MAAM,QAAQ,IAAI,KAAK;AACvB,MAAI,MAAM,QAAQ,MAAM,UAAU,CAChC,QAAO,EAAE,WAAW,MAAM,WAAuB;AAEnD,MAAI,OAAO,MAAM,cAAc,YAAY,mBAAmB,SAC5D,KAAI;GACF,MAAM,MAAM,OAAO,KAAK,MAAM,WAAW,SAAS;GAClD,MAAM,SAAS,IAAI,aAAa,IAAI,QAAQ,IAAI,YAAY,IAAI,aAAa,EAAE;AAC/E,UAAO,EAAE,WAAW,MAAM,KAAK,OAAO,EAAE;UAClC;;AAOZ,KAAI,MAAM,QAAQ,IAAI,UAAU,CAC9B,QAAO,EAAE,WAAW,IAAI,WAAuB;AAIjD,KAAI,MAAM,QAAQ,IAAI,QAAQ,IAAI,IAAI,QAAQ,SAAS,GAAG;EAExD,MAAM,UADS,IAAI,QAAQ,GACJ;AACvB,MAAI,SAAS;AAEX,OAAI,MAAM,QAAQ,QAAQ,WAAW,IAAI,QAAQ,WAAW,SAAS,EAUnE,QAAO,EAAE,WATsB,QAAQ,WAA8C,KAClF,OAAO;IACN,MAAM,KAAK,GAAG;AACd,WAAO;KACL,MAAM,OAAO,GAAG,KAAK;KACrB,WAAW,OAAO,GAAG,UAAU;KAChC;KAEJ,EACmB;AAGtB,OAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,EAAE,SAAS,QAAQ,SAAS;;;AAMzC,KAAI,MAAM,QAAQ,IAAI,QAAQ,IAAI,IAAI,QAAQ,SAAS,GAAG;EACxD,MAAM,SAAS,IAAI;EAEnB,MAAM,gBAAgB,OAAO,QAAQ,MAAM,EAAE,SAAS,WAAW;AACjE,MAAI,cAAc,SAAS,EAKzB,QAAO,EAAE,WAJqB,cAAc,KAAK,OAAO;GACtD,MAAM,OAAO,EAAE,KAAK;GACpB,WAAW,OAAO,EAAE,UAAU,WAAW,EAAE,QAAQ,KAAK,UAAU,EAAE,MAAM;GAC3E,EAAE,EACiB;EAGtB,MAAM,YAAY,OAAO,MAAM,MAAM,EAAE,SAAS,OAAO;AACvD,MAAI,aAAa,OAAO,UAAU,SAAS,SACzC,QAAO,EAAE,SAAS,UAAU,MAAM;;AAKtC,KAAI,MAAM,QAAQ,IAAI,WAAW,IAAI,IAAI,WAAW,SAAS,GAAG;EAE9D,MAAM,UADY,IAAI,WAAW,GACP;AAC1B,MAAI,WAAW,MAAM,QAAQ,QAAQ,MAAM,EAAE;GAC3C,MAAM,QAAQ,QAAQ;GAEtB,MAAM,cAAc,MAAM,QAAQ,MAAM,EAAE,aAAa;AACvD,OAAI,YAAY,SAAS,EAQvB,QAAO,EAAE,WAPqB,YAAY,KAAK,MAAM;IACnD,MAAM,KAAK,EAAE;AACb,WAAO;KACL,MAAM,OAAO,GAAG,KAAK;KACrB,WAAW,OAAO,GAAG,SAAS,WAAW,GAAG,OAAO,KAAK,UAAU,GAAG,KAAK;KAC3E;KACD,EACkB;GAGtB,MAAM,WAAW,MAAM,MAAM,MAAM,OAAO,EAAE,SAAS,SAAS;AAC9D,OAAI,YAAY,OAAO,SAAS,SAAS,SACvC,QAAO,EAAE,SAAS,SAAS,MAAM;;;AAMvC,KAAI,IAAI,UAAU,OAAO,IAAI,WAAW,UAAU;EAEhD,MAAM,MADS,IAAI,OACA;AACnB,MAAI,OAAO,MAAM,QAAQ,IAAI,QAAQ,EAAE;GACrC,MAAM,SAAS,IAAI;GACnB,MAAM,gBAAgB,OAAO,QAAQ,MAAM,EAAE,QAAQ;AACrD,OAAI,cAAc,SAAS,EAQzB,QAAO,EAAE,WAPqB,cAAc,KAAK,MAAM;IACrD,MAAM,KAAK,EAAE;AACb,WAAO;KACL,MAAM,OAAO,GAAG,QAAQ,GAAG;KAC3B,WAAW,OAAO,GAAG,UAAU,WAAW,GAAG,QAAQ,KAAK,UAAU,GAAG,MAAM;KAC9E;KACD,EACkB;GAEtB,MAAM,YAAY,OAAO,MAAM,MAAM,OAAO,EAAE,SAAS,SAAS;AAChE,OAAI,aAAa,OAAO,UAAU,SAAS,SACzC,QAAO,EAAE,SAAS,UAAU,MAAM;;;AAMxC,KAAI,IAAI,WAAW,OAAO,IAAI,YAAY,UAAU;EAClD,MAAM,MAAM,IAAI;AAEhB,MAAI,MAAM,QAAQ,IAAI,WAAW,IAAI,IAAI,WAAW,SAAS,EAW3D,QAAO,EAAE,WAVsB,IAAI,WAChC,QAAQ,OAAO,GAAG,YAAY,KAAK,CACnC,KAAK,OAAO;GACX,MAAM,KAAK,GAAG;AACd,UAAO;IACL,MAAM,OAAO,GAAG,QAAQ,GAAG;IAC3B,WACE,OAAO,GAAG,cAAc,WAAW,GAAG,YAAY,KAAK,UAAU,GAAG,UAAU;IACjF;IACD,EACgB;AAEtB,MAAI,OAAO,IAAI,YAAY,YAAY,IAAI,QAAQ,SAAS,EAC1D,QAAO,EAAE,SAAS,IAAI,SAAS;AAGjC,MAAI,MAAM,QAAQ,IAAI,QAAQ,IAAI,IAAI,QAAQ,SAAS,GAAG;GACxD,MAAM,QAAQ,IAAI,QAAQ;AAC1B,OAAI,OAAO,MAAM,SAAS,SACxB,QAAO,EAAE,SAAS,MAAM,MAAM;;;AAMpC,QAAO;EACL,OAAO;GACL,SAAS;GACT,MAAM;GACP;EACD;EACD;;;;;AAMH,SAAS,kBAAkB,SAGzB;AAEA,KAAI,QAAQ,eACV,QAAO,EAAE,WAAW,QAAQ,gBAAgB;CAI9C,MAAM,WAAW,qBAAqB,QAAQ,YAAY,EAAE,EAAE,OAAO;AACrE,KAAI,UAAU;EACZ,MAAM,OAAO,eAAe,SAAS,QAAQ;AAC7C,MAAI,KACF,QAAO,EAAE,aAAa,MAAM;;AAIhC,QAAO,EAAE"}
@@ -76,108 +76,9 @@ function itemId() {
76
76
  return require_helpers.generateId("msg");
77
77
  }
78
78
  function buildTextStreamEvents(content, model, chunkSize, reasoning, webSearches) {
79
- const respId = responseId();
80
- const msgId = itemId();
81
- const created = Math.floor(Date.now() / 1e3);
82
- const events = [];
83
- let msgOutputIndex = 0;
84
- const prefixOutputItems = [];
85
- events.push({
86
- type: "response.created",
87
- response: {
88
- id: respId,
89
- object: "response",
90
- created_at: created,
91
- model,
92
- status: "in_progress",
93
- output: []
94
- }
95
- });
96
- events.push({
97
- type: "response.in_progress",
98
- response: {
99
- id: respId,
100
- object: "response",
101
- created_at: created,
102
- model,
103
- status: "in_progress",
104
- output: []
105
- }
106
- });
107
- if (reasoning) {
108
- const reasoningEvents = buildReasoningStreamEvents(reasoning, model, chunkSize);
109
- events.push(...reasoningEvents);
110
- const doneEvent = reasoningEvents.find((e) => e.type === "response.output_item.done" && e.item?.type === "reasoning");
111
- if (doneEvent) prefixOutputItems.push(doneEvent.item);
112
- msgOutputIndex++;
113
- }
114
- if (webSearches && webSearches.length > 0) {
115
- const searchEvents = buildWebSearchStreamEvents(webSearches, msgOutputIndex);
116
- events.push(...searchEvents);
117
- const doneEvents = searchEvents.filter((e) => e.type === "response.output_item.done" && e.item?.type === "web_search_call");
118
- for (const de of doneEvents) prefixOutputItems.push(de.item);
119
- msgOutputIndex += webSearches.length;
120
- }
121
- events.push({
122
- type: "response.output_item.added",
123
- output_index: msgOutputIndex,
124
- item: {
125
- type: "message",
126
- id: msgId,
127
- status: "in_progress",
128
- role: "assistant",
129
- content: []
130
- }
131
- });
132
- events.push({
133
- type: "response.content_part.added",
134
- output_index: msgOutputIndex,
135
- content_index: 0,
136
- part: {
137
- type: "output_text",
138
- text: ""
139
- }
140
- });
141
- for (let i = 0; i < content.length; i += chunkSize) {
142
- const slice = content.slice(i, i + chunkSize);
143
- events.push({
144
- type: "response.output_text.delta",
145
- item_id: msgId,
146
- output_index: msgOutputIndex,
147
- content_index: 0,
148
- delta: slice
149
- });
150
- }
151
- events.push({
152
- type: "response.output_text.done",
153
- output_index: msgOutputIndex,
154
- content_index: 0,
155
- text: content
156
- });
157
- events.push({
158
- type: "response.content_part.done",
159
- output_index: msgOutputIndex,
160
- content_index: 0,
161
- part: {
162
- type: "output_text",
163
- text: content
164
- }
165
- });
166
- const msgItem = {
167
- type: "message",
168
- id: msgId,
169
- status: "completed",
170
- role: "assistant",
171
- content: [{
172
- type: "output_text",
173
- text: content
174
- }]
175
- };
176
- events.push({
177
- type: "response.output_item.done",
178
- output_index: msgOutputIndex,
179
- item: msgItem
180
- });
79
+ const { respId, created, events, prefixOutputItems, nextOutputIndex } = buildResponsePreamble(model, chunkSize, reasoning, webSearches);
80
+ const { events: msgEvents, msgItem } = buildMessageOutputEvents(content, chunkSize, nextOutputIndex);
81
+ events.push(...msgEvents);
181
82
  events.push({
182
83
  type: "response.completed",
183
84
  response: {
@@ -359,7 +260,7 @@ function buildWebSearchStreamEvents(queries, startOutputIndex) {
359
260
  type: "web_search_call",
360
261
  id: searchId,
361
262
  status: "in_progress",
362
- query: queries[i]
263
+ action: { query: queries[i] }
363
264
  }
364
265
  });
365
266
  events.push({
@@ -369,15 +270,128 @@ function buildWebSearchStreamEvents(queries, startOutputIndex) {
369
270
  type: "web_search_call",
370
271
  id: searchId,
371
272
  status: "completed",
372
- query: queries[i]
273
+ action: { query: queries[i] }
373
274
  }
374
275
  });
375
276
  }
376
277
  return events;
377
278
  }
378
- function buildTextResponse(content, model, reasoning, webSearches) {
279
+ function buildResponsePreamble(model, chunkSize, reasoning, webSearches) {
379
280
  const respId = responseId();
281
+ const created = Math.floor(Date.now() / 1e3);
282
+ const events = [];
283
+ const prefixOutputItems = [];
284
+ let nextOutputIndex = 0;
285
+ events.push({
286
+ type: "response.created",
287
+ response: {
288
+ id: respId,
289
+ object: "response",
290
+ created_at: created,
291
+ model,
292
+ status: "in_progress",
293
+ output: []
294
+ }
295
+ });
296
+ events.push({
297
+ type: "response.in_progress",
298
+ response: {
299
+ id: respId,
300
+ object: "response",
301
+ created_at: created,
302
+ model,
303
+ status: "in_progress",
304
+ output: []
305
+ }
306
+ });
307
+ if (reasoning) {
308
+ const reasoningEvents = buildReasoningStreamEvents(reasoning, model, chunkSize);
309
+ events.push(...reasoningEvents);
310
+ const doneEvent = reasoningEvents.find((e) => e.type === "response.output_item.done" && e.item?.type === "reasoning");
311
+ if (doneEvent) prefixOutputItems.push(doneEvent.item);
312
+ nextOutputIndex++;
313
+ }
314
+ if (webSearches && webSearches.length > 0) {
315
+ const searchEvents = buildWebSearchStreamEvents(webSearches, nextOutputIndex);
316
+ events.push(...searchEvents);
317
+ const doneEvents = searchEvents.filter((e) => e.type === "response.output_item.done" && e.item?.type === "web_search_call");
318
+ for (const de of doneEvents) prefixOutputItems.push(de.item);
319
+ nextOutputIndex += webSearches.length;
320
+ }
321
+ return {
322
+ respId,
323
+ created,
324
+ events,
325
+ prefixOutputItems,
326
+ nextOutputIndex
327
+ };
328
+ }
329
+ function buildMessageOutputEvents(content, chunkSize, outputIndex) {
380
330
  const msgId = itemId();
331
+ const events = [];
332
+ events.push({
333
+ type: "response.output_item.added",
334
+ output_index: outputIndex,
335
+ item: {
336
+ type: "message",
337
+ id: msgId,
338
+ status: "in_progress",
339
+ role: "assistant",
340
+ content: []
341
+ }
342
+ });
343
+ events.push({
344
+ type: "response.content_part.added",
345
+ output_index: outputIndex,
346
+ content_index: 0,
347
+ part: {
348
+ type: "output_text",
349
+ text: ""
350
+ }
351
+ });
352
+ for (let i = 0; i < content.length; i += chunkSize) events.push({
353
+ type: "response.output_text.delta",
354
+ item_id: msgId,
355
+ output_index: outputIndex,
356
+ content_index: 0,
357
+ delta: content.slice(i, i + chunkSize)
358
+ });
359
+ events.push({
360
+ type: "response.output_text.done",
361
+ output_index: outputIndex,
362
+ content_index: 0,
363
+ text: content
364
+ });
365
+ events.push({
366
+ type: "response.content_part.done",
367
+ output_index: outputIndex,
368
+ content_index: 0,
369
+ part: {
370
+ type: "output_text",
371
+ text: content
372
+ }
373
+ });
374
+ const msgItem = {
375
+ type: "message",
376
+ id: msgId,
377
+ status: "completed",
378
+ role: "assistant",
379
+ content: [{
380
+ type: "output_text",
381
+ text: content
382
+ }]
383
+ };
384
+ events.push({
385
+ type: "response.output_item.done",
386
+ output_index: outputIndex,
387
+ item: msgItem
388
+ });
389
+ return {
390
+ events,
391
+ msgItem
392
+ };
393
+ }
394
+ function buildOutputPrefix(content, reasoning, webSearches) {
381
395
  const output = [];
382
396
  if (reasoning) output.push({
383
397
  type: "reasoning",
@@ -391,11 +405,11 @@ function buildTextResponse(content, model, reasoning, webSearches) {
391
405
  type: "web_search_call",
392
406
  id: require_helpers.generateId("ws"),
393
407
  status: "completed",
394
- query
408
+ action: { query }
395
409
  });
396
410
  output.push({
397
411
  type: "message",
398
- id: msgId,
412
+ id: itemId(),
399
413
  status: "completed",
400
414
  role: "assistant",
401
415
  content: [{
@@ -403,8 +417,11 @@ function buildTextResponse(content, model, reasoning, webSearches) {
403
417
  text: content
404
418
  }]
405
419
  });
420
+ return output;
421
+ }
422
+ function buildResponseEnvelope(model, output) {
406
423
  return {
407
- id: respId,
424
+ id: responseId(),
408
425
  object: "response",
409
426
  created_at: Math.floor(Date.now() / 1e3),
410
427
  model,
@@ -417,27 +434,101 @@ function buildTextResponse(content, model, reasoning, webSearches) {
417
434
  }
418
435
  };
419
436
  }
437
+ function buildTextResponse(content, model, reasoning, webSearches) {
438
+ return buildResponseEnvelope(model, buildOutputPrefix(content, reasoning, webSearches));
439
+ }
420
440
  function buildToolCallResponse(toolCalls, model) {
421
- return {
422
- id: responseId(),
423
- object: "response",
424
- created_at: Math.floor(Date.now() / 1e3),
425
- model,
426
- status: "completed",
427
- output: toolCalls.map((tc) => ({
441
+ return buildResponseEnvelope(model, toolCalls.map((tc) => ({
442
+ type: "function_call",
443
+ id: require_helpers.generateId("fc"),
444
+ call_id: tc.id || require_helpers.generateToolCallId(),
445
+ name: tc.name,
446
+ arguments: tc.arguments,
447
+ status: "completed"
448
+ })));
449
+ }
450
+ function buildContentWithToolCallsStreamEvents(content, toolCalls, model, chunkSize, reasoning, webSearches) {
451
+ const { respId, created, events, prefixOutputItems, nextOutputIndex } = buildResponsePreamble(model, chunkSize, reasoning, webSearches);
452
+ const { events: msgEvents, msgItem } = buildMessageOutputEvents(content, chunkSize, nextOutputIndex);
453
+ events.push(...msgEvents);
454
+ const fcOutputItems = [];
455
+ for (let idx = 0; idx < toolCalls.length; idx++) {
456
+ const tc = toolCalls[idx];
457
+ const callId = tc.id || require_helpers.generateToolCallId();
458
+ const fcId = require_helpers.generateId("fc");
459
+ const fcOutputIndex = nextOutputIndex + 1 + idx;
460
+ const args = tc.arguments;
461
+ events.push({
462
+ type: "response.output_item.added",
463
+ output_index: fcOutputIndex,
464
+ item: {
465
+ type: "function_call",
466
+ id: fcId,
467
+ call_id: callId,
468
+ name: tc.name,
469
+ arguments: "",
470
+ status: "in_progress"
471
+ }
472
+ });
473
+ for (let i = 0; i < args.length; i += chunkSize) events.push({
474
+ type: "response.function_call_arguments.delta",
475
+ item_id: fcId,
476
+ output_index: fcOutputIndex,
477
+ delta: args.slice(i, i + chunkSize)
478
+ });
479
+ events.push({
480
+ type: "response.function_call_arguments.done",
481
+ output_index: fcOutputIndex,
482
+ arguments: args
483
+ });
484
+ const doneItem = {
428
485
  type: "function_call",
429
- id: require_helpers.generateId("fc"),
430
- call_id: tc.id || require_helpers.generateToolCallId(),
486
+ id: fcId,
487
+ call_id: callId,
431
488
  name: tc.name,
432
- arguments: tc.arguments,
489
+ arguments: args,
433
490
  status: "completed"
434
- })),
435
- usage: {
436
- input_tokens: 0,
437
- output_tokens: 0,
438
- total_tokens: 0
491
+ };
492
+ events.push({
493
+ type: "response.output_item.done",
494
+ output_index: fcOutputIndex,
495
+ item: doneItem
496
+ });
497
+ fcOutputItems.push(doneItem);
498
+ }
499
+ events.push({
500
+ type: "response.completed",
501
+ response: {
502
+ id: respId,
503
+ object: "response",
504
+ created_at: created,
505
+ model,
506
+ status: "completed",
507
+ output: [
508
+ ...prefixOutputItems,
509
+ msgItem,
510
+ ...fcOutputItems
511
+ ],
512
+ usage: {
513
+ input_tokens: 0,
514
+ output_tokens: 0,
515
+ total_tokens: 0
516
+ }
439
517
  }
440
- };
518
+ });
519
+ return events;
520
+ }
521
+ function buildContentWithToolCallsResponse(content, toolCalls, model, reasoning, webSearches) {
522
+ const output = buildOutputPrefix(content, reasoning, webSearches);
523
+ for (const tc of toolCalls) output.push({
524
+ type: "function_call",
525
+ id: require_helpers.generateId("fc"),
526
+ call_id: tc.id || require_helpers.generateToolCallId(),
527
+ name: tc.name,
528
+ arguments: tc.arguments,
529
+ status: "completed"
530
+ });
531
+ return buildResponseEnvelope(model, output);
441
532
  }
442
533
  async function writeResponsesSSEStream(res, events, optionsOrLatency) {
443
534
  const opts = typeof optionsOrLatency === "number" ? { latency: optionsOrLatency } : optionsOrLatency ?? {};
@@ -487,8 +578,9 @@ async function handleResponses(req, res, raw, fixtures, journal, defaults, setCo
487
578
  return;
488
579
  }
489
580
  const completionReq = responsesToCompletionRequest(responsesReq);
490
- const fixture = require_router.matchFixture(fixtures, completionReq, journal.fixtureMatchCounts);
491
- if (fixture) journal.incrementFixtureMatchCount(fixture, fixtures);
581
+ const testId = require_helpers.getTestId(req);
582
+ const fixture = require_router.matchFixture(fixtures, completionReq, journal.getFixtureMatchCountsForTest(testId), defaults.requestTransform);
583
+ if (fixture) journal.incrementFixtureMatchCount(fixture, fixtures, testId);
492
584
  if (require_chaos.applyChaos(res, fixture, defaults.chaos, req.headers, journal, {
493
585
  method: req.method ?? "POST",
494
586
  path: req.url ?? "/v1/responses",
@@ -549,6 +641,38 @@ async function handleResponses(req, res, raw, fixtures, journal, defaults, setCo
549
641
  require_sse_writer.writeErrorResponse(res, status, JSON.stringify(response));
550
642
  return;
551
643
  }
644
+ if (require_helpers.isContentWithToolCallsResponse(response)) {
645
+ const journalEntry = journal.add({
646
+ method: req.method ?? "POST",
647
+ path: req.url ?? "/v1/responses",
648
+ headers: require_helpers.flattenHeaders(req.headers),
649
+ body: completionReq,
650
+ response: {
651
+ status: 200,
652
+ fixture
653
+ }
654
+ });
655
+ if (responsesReq.stream !== true) {
656
+ const body = buildContentWithToolCallsResponse(response.content, response.toolCalls, completionReq.model, response.reasoning, response.webSearches);
657
+ res.writeHead(200, { "Content-Type": "application/json" });
658
+ res.end(JSON.stringify(body));
659
+ } else {
660
+ const events = buildContentWithToolCallsStreamEvents(response.content, response.toolCalls, completionReq.model, chunkSize, response.reasoning, response.webSearches);
661
+ const interruption = require_interruption.createInterruptionSignal(fixture);
662
+ if (!await writeResponsesSSEStream(res, events, {
663
+ latency,
664
+ streamingProfile: fixture.streamingProfile,
665
+ signal: interruption?.signal,
666
+ onChunkSent: interruption?.tick
667
+ })) {
668
+ if (!res.writableEnded) res.destroy();
669
+ journalEntry.response.interrupted = true;
670
+ journalEntry.response.interruptReason = interruption?.reason();
671
+ }
672
+ interruption?.cleanup();
673
+ }
674
+ return;
675
+ }
552
676
  if (require_helpers.isTextResponse(response)) {
553
677
  const journalEntry = journal.add({
554
678
  method: req.method ?? "POST",