ai 2.2.27 → 2.2.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../streams/ai-stream.ts","../shared/stream-parts.ts","../streams/stream-data.ts","../streams/aws-bedrock-stream.ts","../shared/utils.ts","../streams/openai-stream.ts","../streams/streaming-text-response.ts","../streams/huggingface-stream.ts","../streams/cohere-stream.ts","../streams/anthropic-stream.ts","../streams/langchain-stream.ts","../streams/replicate-stream.ts","../shared/read-data-stream.ts","../shared/parse-complex-response.ts","../streams/streaming-react-response.ts","../streams/assistant-response.ts"],"sourcesContent":["import {\n createParser,\n type EventSourceParser,\n type ParsedEvent,\n type ReconnectInterval,\n} from 'eventsource-parser';\nimport { OpenAIStreamCallbacks } from './openai-stream';\n\nexport interface FunctionCallPayload {\n name: string;\n arguments: Record<string, unknown>;\n}\n\n/**\n * Configuration options and helper callback methods for AIStream stream lifecycle events.\n * @interface\n */\nexport interface AIStreamCallbacksAndOptions {\n /** `onStart`: Called once when the stream is initialized. */\n onStart?: () => Promise<void> | void;\n /** `onCompletion`: Called for each tokenized message. */\n onCompletion?: (completion: string) => Promise<void> | void;\n /** `onFinal`: Called once when the stream is closed with the final completion message. */\n onFinal?: (completion: string) => Promise<void> | void;\n /** `onToken`: Called for each tokenized message. */\n onToken?: (token: string) => Promise<void> | void;\n /**\n * A flag for enabling the experimental_StreamData class and the new protocol.\n * @see https://github.com/vercel-labs/ai/pull/425\n *\n * When StreamData is rolled out, this will be removed and the new protocol will be used by default.\n */\n experimental_streamData?: boolean;\n}\n\n// new TokenData()\n// data: TokenData,\n/**\n * Custom parser for AIStream data.\n * @interface\n */\nexport interface AIStreamParser {\n (data: string): string | void;\n}\n\n/**\n * Creates a TransformStream that parses events from an EventSource stream using a custom parser.\n * @param {AIStreamParser} customParser - Function to handle event data.\n * @returns {TransformStream<Uint8Array, string>} TransformStream parsing events.\n */\nexport function createEventStreamTransformer(\n customParser?: AIStreamParser,\n): TransformStream<Uint8Array, string> {\n const textDecoder = new TextDecoder();\n let eventSourceParser: EventSourceParser;\n\n return new TransformStream({\n async start(controller): Promise<void> {\n eventSourceParser = createParser(\n (event: ParsedEvent | ReconnectInterval) => {\n if (\n ('data' in event &&\n event.type === 'event' &&\n event.data === '[DONE]') ||\n // Replicate doesn't send [DONE] but does send a 'done' event\n // @see https://replicate.com/docs/streaming\n (event as any).event === 'done'\n ) {\n controller.terminate();\n return;\n }\n\n if ('data' in event) {\n const parsedMessage = customParser\n ? customParser(event.data)\n : event.data;\n if (parsedMessage) controller.enqueue(parsedMessage);\n }\n },\n );\n },\n\n transform(chunk) {\n eventSourceParser.feed(textDecoder.decode(chunk));\n },\n });\n}\n\n/**\n * Creates a transform stream that encodes input messages and invokes optional callback functions.\n * The transform stream uses the provided callbacks to execute custom logic at different stages of the stream's lifecycle.\n * - `onStart`: Called once when the stream is initialized.\n * - `onToken`: Called for each tokenized message.\n * - `onCompletion`: Called every time an AIStream completion message is received. This can occur multiple times when using e.g. OpenAI functions\n * - `onFinal`: Called once when the stream is closed with the final completion message.\n *\n * This function is useful when you want to process a stream of messages and perform specific actions during the stream's lifecycle.\n *\n * @param {AIStreamCallbacksAndOptions} [callbacks] - An object containing the callback functions.\n * @return {TransformStream<string, Uint8Array>} A transform stream that encodes input messages as Uint8Array and allows the execution of custom logic through callbacks.\n *\n * @example\n * const callbacks = {\n * onStart: async () => console.log('Stream started'),\n * onToken: async (token) => console.log(`Token: ${token}`),\n * onCompletion: async (completion) => console.log(`Completion: ${completion}`)\n * onFinal: async () => data.close()\n * };\n * const transformer = createCallbacksTransformer(callbacks);\n */\nexport function createCallbacksTransformer(\n cb: AIStreamCallbacksAndOptions | OpenAIStreamCallbacks | undefined,\n): TransformStream<string, Uint8Array> {\n const textEncoder = new TextEncoder();\n let aggregatedResponse = '';\n const callbacks = cb || {};\n\n return new TransformStream({\n async start(): Promise<void> {\n if (callbacks.onStart) await callbacks.onStart();\n },\n\n async transform(message, controller): Promise<void> {\n controller.enqueue(textEncoder.encode(message));\n\n aggregatedResponse += message;\n if (callbacks.onToken) await callbacks.onToken(message);\n },\n\n async flush(): Promise<void> {\n const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);\n // If it's OpenAICallbacks, it has an experimental_onFunctionCall which means that the createFunctionCallTransformer\n // will handle calling onComplete.\n if (callbacks.onCompletion) {\n await callbacks.onCompletion(aggregatedResponse);\n }\n\n if (callbacks.onFinal && !isOpenAICallbacks) {\n await callbacks.onFinal(aggregatedResponse);\n }\n },\n });\n}\n\nfunction isOfTypeOpenAIStreamCallbacks(\n callbacks: AIStreamCallbacksAndOptions | OpenAIStreamCallbacks,\n): callbacks is OpenAIStreamCallbacks {\n return 'experimental_onFunctionCall' in callbacks;\n}\n/**\n * Returns a stateful function that, when invoked, trims leading whitespace\n * from the input text. The trimming only occurs on the first invocation, ensuring that\n * subsequent calls do not alter the input text. This is particularly useful in scenarios\n * where a text stream is being processed and only the initial whitespace should be removed.\n *\n * @return {function(string): string} A function that takes a string as input and returns a string\n * with leading whitespace removed if it is the first invocation; otherwise, it returns the input unchanged.\n *\n * @example\n * const trimStart = trimStartOfStreamHelper();\n * const output1 = trimStart(\" text\"); // \"text\"\n * const output2 = trimStart(\" text\"); // \" text\"\n *\n */\nexport function trimStartOfStreamHelper(): (text: string) => string {\n let isStreamStart = true;\n\n return (text: string): string => {\n if (isStreamStart) {\n text = text.trimStart();\n if (text) isStreamStart = false;\n }\n return text;\n };\n}\n\n/**\n * Returns a ReadableStream created from the response, parsed and handled with custom logic.\n * The stream goes through two transformation stages, first parsing the events and then\n * invoking the provided callbacks.\n *\n * For 2xx HTTP responses:\n * - The function continues with standard stream processing.\n *\n * For non-2xx HTTP responses:\n * - If the response body is defined, it asynchronously extracts and decodes the response body.\n * - It then creates a custom ReadableStream to propagate a detailed error message.\n *\n * @param {Response} response - The response.\n * @param {AIStreamParser} customParser - The custom parser function.\n * @param {AIStreamCallbacksAndOptions} callbacks - The callbacks.\n * @return {ReadableStream} The AIStream.\n * @throws Will throw an error if the response is not OK.\n */\nexport function AIStream(\n response: Response,\n customParser?: AIStreamParser,\n callbacks?: AIStreamCallbacksAndOptions,\n): ReadableStream<Uint8Array> {\n if (!response.ok) {\n if (response.body) {\n const reader = response.body.getReader();\n return new ReadableStream({\n async start(controller) {\n const { done, value } = await reader.read();\n if (!done) {\n const errorText = new TextDecoder().decode(value);\n controller.error(new Error(`Response error: ${errorText}`));\n }\n },\n });\n } else {\n return new ReadableStream({\n start(controller) {\n controller.error(new Error('Response error: No response body'));\n },\n });\n }\n }\n\n const responseBodyStream = response.body || createEmptyReadableStream();\n\n return responseBodyStream\n .pipeThrough(createEventStreamTransformer(customParser))\n .pipeThrough(createCallbacksTransformer(callbacks));\n}\n\n// outputs lines like\n// 0: chunk\n// 0: more chunk\n// 1: a fct call\n// z: added data from Data\n\n/**\n * Creates an empty ReadableStream that immediately closes upon creation.\n * This function is used as a fallback for creating a ReadableStream when the response body is null or undefined,\n * ensuring that the subsequent pipeline processing doesn't fail due to a lack of a stream.\n *\n * @returns {ReadableStream} An empty and closed ReadableStream instance.\n */\nfunction createEmptyReadableStream(): ReadableStream {\n return new ReadableStream({\n start(controller) {\n controller.close();\n },\n });\n}\n\n/**\n * Implements ReadableStream.from(asyncIterable), which isn't documented in MDN and isn't implemented in node.\n * https://github.com/whatwg/streams/commit/8d7a0bf26eb2cc23e884ddbaac7c1da4b91cf2bc\n */\nexport function readableFromAsyncIterable<T>(iterable: AsyncIterable<T>) {\n let it = iterable[Symbol.asyncIterator]();\n return new ReadableStream<T>({\n async pull(controller) {\n const { done, value } = await it.next();\n if (done) controller.close();\n else controller.enqueue(value);\n },\n\n async cancel(reason) {\n await it.return?.(reason);\n },\n });\n}\n","import {\n AssistantMessage,\n DataMessage,\n FunctionCall,\n JSONValue,\n} from './types';\nimport { StreamString } from './utils';\n\nexport interface StreamPart<CODE extends string, NAME extends string, TYPE> {\n code: CODE;\n name: NAME;\n parse: (value: JSONValue) => { type: NAME; value: TYPE };\n}\n\nconst textStreamPart: StreamPart<'0', 'text', string> = {\n code: '0',\n name: 'text',\n parse: (value: JSONValue) => {\n if (typeof value !== 'string') {\n throw new Error('\"text\" parts expect a string value.');\n }\n return { type: 'text', value };\n },\n};\n\nconst functionCallStreamPart: StreamPart<\n '1',\n 'function_call',\n { function_call: FunctionCall }\n> = {\n code: '1',\n name: 'function_call',\n parse: (value: JSONValue) => {\n if (\n value == null ||\n typeof value !== 'object' ||\n !('function_call' in value) ||\n typeof value.function_call !== 'object' ||\n value.function_call == null ||\n !('name' in value.function_call) ||\n !('arguments' in value.function_call) ||\n typeof value.function_call.name !== 'string' ||\n typeof value.function_call.arguments !== 'string'\n ) {\n throw new Error(\n '\"function_call\" parts expect an object with a \"function_call\" property.',\n );\n }\n\n return {\n type: 'function_call',\n value: value as unknown as { function_call: FunctionCall },\n };\n },\n};\n\nconst dataStreamPart: StreamPart<'2', 'data', Array<JSONValue>> = {\n code: '2',\n name: 'data',\n parse: (value: JSONValue) => {\n if (!Array.isArray(value)) {\n throw new Error('\"data\" parts expect an array value.');\n }\n\n return { type: 'data', value };\n },\n};\n\nconst errorStreamPart: StreamPart<'3', 'error', string> = {\n code: '3',\n name: 'error',\n parse: (value: JSONValue) => {\n if (typeof value !== 'string') {\n throw new Error('\"error\" parts expect a string value.');\n }\n return { type: 'error', value };\n },\n};\n\nconst assistantMessageStreamPart: StreamPart<\n '4',\n 'assistant_message',\n AssistantMessage\n> = {\n code: '4',\n name: 'assistant_message',\n parse: (value: JSONValue) => {\n if (\n value == null ||\n typeof value !== 'object' ||\n !('id' in value) ||\n !('role' in value) ||\n !('content' in value) ||\n typeof value.id !== 'string' ||\n typeof value.role !== 'string' ||\n value.role !== 'assistant' ||\n !Array.isArray(value.content) ||\n !value.content.every(\n item =>\n item != null &&\n typeof item === 'object' &&\n 'type' in item &&\n item.type === 'text' &&\n 'text' in item &&\n item.text != null &&\n typeof item.text === 'object' &&\n 'value' in item.text &&\n typeof item.text.value === 'string',\n )\n ) {\n throw new Error(\n '\"assistant_message\" parts expect an object with an \"id\", \"role\", and \"content\" property.',\n );\n }\n\n return {\n type: 'assistant_message',\n value: value as AssistantMessage,\n };\n },\n};\n\nconst assistantControlDataStreamPart: StreamPart<\n '5',\n 'assistant_control_data',\n {\n threadId: string;\n messageId: string;\n }\n> = {\n code: '5',\n name: 'assistant_control_data',\n parse: (value: JSONValue) => {\n if (\n value == null ||\n typeof value !== 'object' ||\n !('threadId' in value) ||\n !('messageId' in value) ||\n typeof value.threadId !== 'string' ||\n typeof value.messageId !== 'string'\n ) {\n throw new Error(\n '\"assistant_control_data\" parts expect an object with a \"threadId\" and \"messageId\" property.',\n );\n }\n\n return {\n type: 'assistant_control_data',\n value: {\n threadId: value.threadId,\n messageId: value.messageId,\n },\n };\n },\n};\n\nconst dataMessageStreamPart: StreamPart<'6', 'data_message', DataMessage> = {\n code: '6',\n name: 'data_message',\n parse: (value: JSONValue) => {\n if (\n value == null ||\n typeof value !== 'object' ||\n !('role' in value) ||\n !('data' in value) ||\n typeof value.role !== 'string' ||\n value.role !== 'data'\n ) {\n throw new Error(\n '\"data_message\" parts expect an object with a \"role\" and \"data\" property.',\n );\n }\n\n return {\n type: 'data_message',\n value: value as DataMessage,\n };\n },\n};\n\nconst streamParts = [\n textStreamPart,\n functionCallStreamPart,\n dataStreamPart,\n errorStreamPart,\n assistantMessageStreamPart,\n assistantControlDataStreamPart,\n dataMessageStreamPart,\n] as const;\n\n// union type of all stream parts\ntype StreamParts =\n | typeof textStreamPart\n | typeof functionCallStreamPart\n | typeof dataStreamPart\n | typeof errorStreamPart\n | typeof assistantMessageStreamPart\n | typeof assistantControlDataStreamPart\n | typeof dataMessageStreamPart;\n\n/**\n * Maps the type of a stream part to its value type.\n */\ntype StreamPartValueType = {\n [P in StreamParts as P['name']]: ReturnType<P['parse']>['value'];\n};\n\nexport type StreamPartType =\n | ReturnType<typeof textStreamPart.parse>\n | ReturnType<typeof functionCallStreamPart.parse>\n | ReturnType<typeof dataStreamPart.parse>\n | ReturnType<typeof errorStreamPart.parse>\n | ReturnType<typeof assistantMessageStreamPart.parse>\n | ReturnType<typeof assistantControlDataStreamPart.parse>\n | ReturnType<typeof dataMessageStreamPart.parse>;\n\nexport const streamPartsByCode = {\n [textStreamPart.code]: textStreamPart,\n [functionCallStreamPart.code]: functionCallStreamPart,\n [dataStreamPart.code]: dataStreamPart,\n [errorStreamPart.code]: errorStreamPart,\n [assistantMessageStreamPart.code]: assistantMessageStreamPart,\n [assistantControlDataStreamPart.code]: assistantControlDataStreamPart,\n [dataMessageStreamPart.code]: dataMessageStreamPart,\n} as const;\n\n/**\n * The map of prefixes for data in the stream\n *\n * - 0: Text from the LLM response\n * - 1: (OpenAI) function_call responses\n * - 2: custom JSON added by the user using `Data`\n *\n * Example:\n * ```\n * 0:Vercel\n * 0:'s\n * 0: AI\n * 0: AI\n * 0: SDK\n * 0: is great\n * 0:!\n * 2: { \"someJson\": \"value\" }\n * 1: {\"function_call\": {\"name\": \"get_current_weather\", \"arguments\": \"{\\\\n\\\\\"location\\\\\": \\\\\"Charlottesville, Virginia\\\\\",\\\\n\\\\\"format\\\\\": \\\\\"celsius\\\\\"\\\\n}\"}}\n *```\n */\nexport const StreamStringPrefixes = {\n [textStreamPart.name]: textStreamPart.code,\n [functionCallStreamPart.name]: functionCallStreamPart.code,\n [dataStreamPart.name]: dataStreamPart.code,\n [errorStreamPart.name]: errorStreamPart.code,\n [assistantMessageStreamPart.name]: assistantMessageStreamPart.code,\n [assistantControlDataStreamPart.name]: assistantControlDataStreamPart.code,\n [dataMessageStreamPart.name]: dataMessageStreamPart.code,\n} as const;\n\nexport const validCodes = streamParts.map(part => part.code);\n\n/**\n * Parses a stream part from a string.\n *\n * @param line The string to parse.\n * @returns The parsed stream part.\n * @throws An error if the string cannot be parsed.\n */\nexport const parseStreamPart = (line: string): StreamPartType => {\n const firstSeparatorIndex = line.indexOf(':');\n\n if (firstSeparatorIndex === -1) {\n throw new Error('Failed to parse stream string. No separator found.');\n }\n\n const prefix = line.slice(0, firstSeparatorIndex);\n\n if (!validCodes.includes(prefix as keyof typeof streamPartsByCode)) {\n throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`);\n }\n\n const code = prefix as keyof typeof streamPartsByCode;\n\n const textValue = line.slice(firstSeparatorIndex + 1);\n const jsonValue: JSONValue = JSON.parse(textValue);\n\n return streamPartsByCode[code].parse(jsonValue);\n};\n\n/**\n * Prepends a string with a prefix from the `StreamChunkPrefixes`, JSON-ifies it,\n * and appends a new line.\n *\n * It ensures type-safety for the part type and value.\n */\nexport function formatStreamPart<T extends keyof StreamPartValueType>(\n type: T,\n value: StreamPartValueType[T],\n): StreamString {\n const streamPart = streamParts.find(part => part.name === type);\n\n if (!streamPart) {\n throw new Error(`Invalid stream part type: ${type}`);\n }\n\n return `${streamPart.code}:${JSON.stringify(value)}\\n`;\n}\n","import { formatStreamPart } from '../shared/stream-parts';\nimport { JSONValue } from '../shared/types';\n\n/**\n * A stream wrapper to send custom JSON-encoded data back to the client.\n */\nexport class experimental_StreamData {\n private encoder = new TextEncoder();\n\n private controller: TransformStreamDefaultController<Uint8Array> | null =\n null;\n public stream: TransformStream<Uint8Array, Uint8Array>;\n\n // closing the stream is synchronous, but we want to return a promise\n // in case we're doing async work\n private isClosedPromise: Promise<void> | null = null;\n private isClosedPromiseResolver: undefined | (() => void) = undefined;\n private isClosed: boolean = false;\n\n // array to store appended data\n private data: JSONValue[] = [];\n constructor() {\n this.isClosedPromise = new Promise(resolve => {\n this.isClosedPromiseResolver = resolve;\n });\n\n const self = this;\n this.stream = new TransformStream({\n start: async controller => {\n self.controller = controller;\n },\n transform: async (chunk, controller) => {\n // add buffered data to the stream\n if (self.data.length > 0) {\n const encodedData = self.encoder.encode(\n formatStreamPart('data', self.data),\n );\n self.data = [];\n controller.enqueue(encodedData);\n }\n\n controller.enqueue(chunk);\n },\n async flush(controller) {\n // Show a warning during dev if the data stream is hanging after 3 seconds.\n const warningTimeout =\n process.env.NODE_ENV === 'development'\n ? setTimeout(() => {\n console.warn(\n 'The data stream is hanging. Did you forget to close it with `data.close()`?',\n );\n }, 3000)\n : null;\n\n await self.isClosedPromise;\n\n if (warningTimeout !== null) {\n clearTimeout(warningTimeout);\n }\n\n if (self.data.length) {\n const encodedData = self.encoder.encode(\n formatStreamPart('data', self.data),\n );\n controller.enqueue(encodedData);\n }\n },\n });\n }\n\n async close(): Promise<void> {\n if (this.isClosed) {\n throw new Error('Data Stream has already been closed.');\n }\n\n if (!this.controller) {\n throw new Error('Stream controller is not initialized.');\n }\n\n this.isClosedPromiseResolver?.();\n this.isClosed = true;\n }\n\n append(value: JSONValue): void {\n if (this.isClosed) {\n throw new Error('Data Stream has already been closed.');\n }\n\n this.data.push(value);\n }\n}\n\n/**\n * A TransformStream for LLMs that do not have their own transform stream handlers managing encoding (e.g. OpenAIStream has one for function call handling).\n * This assumes every chunk is a 'text' chunk.\n */\nexport function createStreamDataTransformer(\n experimental_streamData: boolean | undefined,\n) {\n if (!experimental_streamData) {\n return new TransformStream({\n transform: async (chunk, controller) => {\n controller.enqueue(chunk);\n },\n });\n }\n const encoder = new TextEncoder();\n const decoder = new TextDecoder();\n return new TransformStream({\n transform: async (chunk, controller) => {\n const message = decoder.decode(chunk);\n controller.enqueue(encoder.encode(formatStreamPart('text', message)));\n },\n });\n}\n","import {\n AIStreamCallbacksAndOptions,\n createCallbacksTransformer,\n readableFromAsyncIterable,\n} from './ai-stream';\nimport { createStreamDataTransformer } from './stream-data';\n\ninterface AWSBedrockResponse {\n body?: AsyncIterable<{\n chunk?: { bytes?: Uint8Array };\n }>;\n}\n\nasync function* asDeltaIterable(\n response: AWSBedrockResponse,\n extractTextDeltaFromChunk: (chunk: any) => string,\n) {\n const decoder = new TextDecoder();\n for await (const chunk of response.body ?? []) {\n const bytes = chunk.chunk?.bytes;\n\n if (bytes != null) {\n const chunkText = decoder.decode(bytes);\n const chunkJSON = JSON.parse(chunkText);\n const delta = extractTextDeltaFromChunk(chunkJSON);\n\n if (delta != null) {\n yield delta;\n }\n }\n }\n}\n\nexport function AWSBedrockAnthropicStream(\n response: AWSBedrockResponse,\n callbacks?: AIStreamCallbacksAndOptions,\n): ReadableStream {\n return AWSBedrockStream(response, callbacks, chunk => chunk.completion);\n}\n\nexport function AWSBedrockCohereStream(\n response: AWSBedrockResponse,\n callbacks?: AIStreamCallbacksAndOptions,\n): ReadableStream {\n return AWSBedrockStream(\n response,\n callbacks,\n // As of 2023-11-17, Bedrock does not support streaming for Cohere,\n // so we take the full generation:\n chunk => chunk.generations?.[0]?.text,\n );\n}\n\nexport function AWSBedrockLlama2Stream(\n response: AWSBedrockResponse,\n callbacks?: AIStreamCallbacksAndOptions,\n): ReadableStream {\n return AWSBedrockStream(response, callbacks, chunk => chunk.generation);\n}\n\nexport function AWSBedrockStream(\n response: AWSBedrockResponse,\n callbacks: AIStreamCallbacksAndOptions | undefined,\n extractTextDeltaFromChunk: (chunk: any) => string,\n) {\n return readableFromAsyncIterable(\n asDeltaIterable(response, extractTextDeltaFromChunk),\n )\n .pipeThrough(createCallbacksTransformer(callbacks))\n .pipeThrough(\n createStreamDataTransformer(callbacks?.experimental_streamData),\n );\n}\n","import { customAlphabet } from 'nanoid/non-secure';\nimport {\n StreamPartType,\n StreamStringPrefixes,\n parseStreamPart,\n} from './stream-parts';\n\n// 7-character random string\nexport const nanoid = customAlphabet(\n '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',\n 7,\n);\n\n// simple decoder signatures:\nfunction createChunkDecoder(): (chunk: Uint8Array | undefined) => string;\nfunction createChunkDecoder(\n complex: false,\n): (chunk: Uint8Array | undefined) => string;\n// complex decoder signature:\nfunction createChunkDecoder(\n complex: true,\n): (chunk: Uint8Array | undefined) => StreamPartType[];\n// combined signature for when the client calls this function with a boolean:\nfunction createChunkDecoder(\n complex?: boolean,\n): (chunk: Uint8Array | undefined) => StreamPartType[] | string;\nfunction createChunkDecoder(complex?: boolean) {\n const decoder = new TextDecoder();\n\n if (!complex) {\n return function (chunk: Uint8Array | undefined): string {\n if (!chunk) return '';\n return decoder.decode(chunk, { stream: true });\n };\n }\n\n return function (chunk: Uint8Array | undefined) {\n const decoded = decoder\n .decode(chunk, { stream: true })\n .split('\\n')\n .filter(line => line !== ''); // splitting leaves an empty string at the end\n\n return decoded.map(parseStreamPart).filter(Boolean);\n };\n}\n\nexport { createChunkDecoder };\n\nexport const isStreamStringEqualToType = (\n type: keyof typeof StreamStringPrefixes,\n value: string,\n): value is StreamString =>\n value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith('\\n');\n\nexport type StreamString =\n `${(typeof StreamStringPrefixes)[keyof typeof StreamStringPrefixes]}:${string}\\n`;\n\n/**\n * A header sent to the client so it knows how to handle parsing the stream (as a deprecated text response or using the new prefixed protocol)\n */\nexport const COMPLEX_HEADER = 'X-Experimental-Stream-Data';\n","import { formatStreamPart } from '../shared/stream-parts';\nimport {\n CreateMessage,\n FunctionCall,\n JSONValue,\n Message,\n} from '../shared/types';\nimport { createChunkDecoder } from '../shared/utils';\n\nimport {\n AIStream,\n trimStartOfStreamHelper,\n type AIStreamCallbacksAndOptions,\n FunctionCallPayload,\n readableFromAsyncIterable,\n createCallbacksTransformer,\n} from './ai-stream';\nimport { createStreamDataTransformer } from './stream-data';\n\nexport type OpenAIStreamCallbacks = AIStreamCallbacksAndOptions & {\n /**\n * @example\n * ```js\n * const response = await openai.chat.completions.create({\n * model: 'gpt-3.5-turbo-0613',\n * stream: true,\n * messages,\n * functions,\n * })\n *\n * const stream = OpenAIStream(response, {\n * experimental_onFunctionCall: async (functionCallPayload, createFunctionCallMessages) => {\n * // ... run your custom logic here\n * const result = await myFunction(functionCallPayload)\n *\n * // Ask for another completion, or return a string to send to the client as an assistant message.\n * return await openai.chat.completions.create({\n * model: 'gpt-3.5-turbo-0613',\n * stream: true,\n * // Append the relevant \"assistant\" and \"function\" call messages\n * messages: [...messages, ...createFunctionCallMessages(result)],\n * functions,\n * })\n * }\n * })\n * ```\n */\n experimental_onFunctionCall?: (\n functionCallPayload: FunctionCallPayload,\n createFunctionCallMessages: (\n functionCallResult: JSONValue,\n ) => CreateMessage[],\n ) => Promise<\n Response | undefined | void | string | AsyncIterableOpenAIStreamReturnTypes\n >;\n};\n\n// https://github.com/openai/openai-node/blob/07b3504e1c40fd929f4aae1651b83afc19e3baf8/src/resources/chat/completions.ts#L28-L40\ninterface ChatCompletionChunk {\n id: string;\n choices: Array<ChatCompletionChunkChoice>;\n created: number;\n model: string;\n object: string;\n}\n\n// https://github.com/openai/openai-node/blob/07b3504e1c40fd929f4aae1651b83afc19e3baf8/src/resources/chat/completions.ts#L43-L49\n// Updated for https://github.com/openai/openai-node/commit/f10c757d831d90407ba47b4659d9cd34b1a35b1d\n// Updated to https://github.com/openai/openai-node/commit/84b43280089eacdf18f171723591856811beddce\ninterface ChatCompletionChunkChoice {\n delta: ChoiceDelta;\n finish_reason:\n | 'stop'\n | 'length'\n | 'tool_calls'\n | 'content_filter'\n | 'function_call'\n | null;\n index: number;\n}\n\n// https://github.com/openai/openai-node/blob/07b3504e1c40fd929f4aae1651b83afc19e3baf8/src/resources/chat/completions.ts#L123-L139\n// Updated to https://github.com/openai/openai-node/commit/84b43280089eacdf18f171723591856811beddce\ninterface ChoiceDelta {\n /**\n * The contents of the chunk message.\n */\n content?: string | null;\n\n /**\n * The name and arguments of a function that should be called, as generated by the\n * model.\n */\n function_call?: FunctionCall;\n\n /**\n * The role of the author of this message.\n */\n role?: 'system' | 'user' | 'assistant' | 'tool';\n\n tool_calls?: Array<DeltaToolCall>;\n}\n\n// From https://github.com/openai/openai-node/blob/master/src/resources/chat/completions.ts\n// Updated to https://github.com/openai/openai-node/commit/84b43280089eacdf18f171723591856811beddce\ninterface DeltaToolCall {\n index: number;\n\n /**\n * The ID of the tool call.\n */\n id?: string;\n\n function?: ToolCallFunction;\n\n /**\n * The type of the tool. Currently, only `function` is supported.\n */\n type?: 'function';\n}\n\n// From https://github.com/openai/openai-node/blob/master/src/resources/chat/completions.ts\n// Updated to https://github.com/openai/openai-node/commit/84b43280089eacdf18f171723591856811beddce\ninterface ToolCallFunction {\n /**\n * The arguments to call the function with, as generated by the model in JSON\n * format. Note that the model does not always generate valid JSON, and may\n * hallucinate parameters not defined by your function schema. Validate the\n * arguments in your code before calling your function.\n */\n arguments?: string;\n\n /**\n * The name of the function to call.\n */\n name?: string;\n}\n\n/**\n * https://github.com/openai/openai-node/blob/3ec43ee790a2eb6a0ccdd5f25faa23251b0f9b8e/src/resources/completions.ts#L28C1-L64C1\n * Completions API. Streamed and non-streamed responses are the same.\n */\ninterface Completion {\n /**\n * A unique identifier for the completion.\n */\n id: string;\n\n /**\n * The list of completion choices the model generated for the input prompt.\n */\n choices: Array<CompletionChoice>;\n\n /**\n * The Unix timestamp of when the completion was created.\n */\n created: number;\n\n /**\n * The model used for completion.\n */\n model: string;\n\n /**\n * The object type, which is always \"text_completion\"\n */\n object: string;\n\n /**\n * Usage statistics for the completion request.\n */\n usage?: CompletionUsage;\n}\n\ninterface CompletionChoice {\n /**\n * The reason the model stopped generating tokens. This will be `stop` if the model\n * hit a natural stop point or a provided stop sequence, or `length` if the maximum\n * number of tokens specified in the request was reached.\n */\n finish_reason: 'stop' | 'length' | 'content_filter';\n\n index: number;\n\n // edited: Removed CompletionChoice.logProbs and replaced with any\n logprobs: any | null;\n\n text: string;\n}\n\nexport interface CompletionUsage {\n /**\n * Usage statistics for the completion request.\n */\n\n /**\n * Number of tokens in the generated completion.\n */\n completion_tokens: number;\n\n /**\n * Number of tokens in the prompt.\n */\n prompt_tokens: number;\n\n /**\n * Total number of tokens used in the request (prompt + completion).\n */\n total_tokens: number;\n}\n\n/**\n * Creates a parser function for processing the OpenAI stream data.\n * The parser extracts and trims text content from the JSON data. This parser\n * can handle data for chat or completion models.\n *\n * @return {(data: string) => string | void} A parser function that takes a JSON string as input and returns the extracted text content or nothing.\n */\nfunction parseOpenAIStream(): (data: string) => string | void {\n const extract = chunkToText();\n return data => {\n return extract(JSON.parse(data) as OpenAIStreamReturnTypes);\n };\n}\n\n/**\n * Reads chunks from OpenAI's new Streamable interface, which is essentially\n * the same as the old Response body interface with an included SSE parser\n * doing the parsing for us.\n */\nasync function* streamable(stream: AsyncIterableOpenAIStreamReturnTypes) {\n const extract = chunkToText();\n for await (const chunk of stream) {\n const text = extract(chunk);\n if (text) yield text;\n }\n}\n\nfunction chunkToText(): (chunk: OpenAIStreamReturnTypes) => string | void {\n const trimStartOfStream = trimStartOfStreamHelper();\n let isFunctionStreamingIn: boolean;\n return json => {\n /*\n If the response is a function call, the first streaming chunk from OpenAI returns the name of the function like so\n\n {\n ...\n \"choices\": [{\n \"index\": 0,\n \"delta\": {\n \"role\": \"assistant\",\n \"content\": null,\n \"function_call\": {\n \"name\": \"get_current_weather\",\n \"arguments\": \"\"\n }\n },\n \"finish_reason\": null\n }]\n }\n\n Then, it begins streaming the arguments for the function call.\n The second chunk looks like:\n\n {\n ...\n \"choices\": [{\n \"index\": 0,\n \"delta\": {\n \"function_call\": {\n \"arguments\": \"{\\n\"\n }\n },\n \"finish_reason\": null\n }]\n }\n\n Third chunk:\n\n {\n ...\n \"choices\": [{\n \"index\": 0,\n \"delta\": {\n \"function_call\": {\n \"arguments\": \"\\\"location\"\n }\n },\n \"finish_reason\": null\n }]\n }\n\n ...\n\n Finally, the last chunk has a `finish_reason` of either `function_call`:\n\n {\n ...\n \"choices\": [{\n \"index\": 0,\n \"delta\": {},\n \"finish_reason\": \"function_call\"\n }]\n }\n\n or `stop`, when the `function_call` request parameter \n is specified with a particular function via `{\\\"name\\\": \\\"my_function\\\"}` \n\n {\n ...\n \"choices\": [{\n \"index\": 0,\n \"delta\": {},\n \"finish_reason\": \"stop\"\n }]\n }\n\n With the implementation below, the client will end up getting a\n response like the one below streamed to them whenever a function call\n response is returned:\n\n {\n \"function_call\": {\n \"name\": \"get_current_weather\",\n \"arguments\": \"{\\\"location\\\": \\\"San Francisco, CA\\\", \\\"format\\\": \\\"celsius\\\"}\n }\n }\n */\n if (\n isChatCompletionChunk(json) &&\n json.choices[0]?.delta?.function_call?.name\n ) {\n isFunctionStreamingIn = true;\n return `{\"function_call\": {\"name\": \"${json.choices[0]?.delta?.function_call.name}\", \"arguments\": \"`;\n } else if (\n isChatCompletionChunk(json) &&\n json.choices[0]?.delta?.function_call?.arguments\n ) {\n const argumentChunk: string =\n json.choices[0].delta.function_call.arguments;\n\n let escapedPartialJson = argumentChunk\n .replace(/\\\\/g, '\\\\\\\\') // Replace backslashes first to prevent double escaping\n .replace(/\\//g, '\\\\/') // Escape slashes\n .replace(/\"/g, '\\\\\"') // Escape double quotes\n .replace(/\\n/g, '\\\\n') // Escape new lines\n .replace(/\\r/g, '\\\\r') // Escape carriage returns\n .replace(/\\t/g, '\\\\t') // Escape tabs\n .replace(/\\f/g, '\\\\f'); // Escape form feeds\n\n return `${escapedPartialJson}`;\n } else if (\n isFunctionStreamingIn &&\n (json.choices[0]?.finish_reason === 'function_call' ||\n json.choices[0]?.finish_reason === 'stop')\n ) {\n isFunctionStreamingIn = false; // Reset the flag\n return '\"}}';\n }\n\n const text = trimStartOfStream(\n isChatCompletionChunk(json) && json.choices[0].delta.content\n ? json.choices[0].delta.content\n : isCompletion(json)\n ? json.choices[0].text\n : '',\n );\n return text;\n };\n}\n\nconst __internal__OpenAIFnMessagesSymbol = Symbol(\n 'internal_openai_fn_messages',\n);\n\ntype AsyncIterableOpenAIStreamReturnTypes =\n | AsyncIterable<ChatCompletionChunk>\n | AsyncIterable<Completion>;\n\ntype ExtractType<T> = T extends AsyncIterable<infer U> ? U : never;\n\ntype OpenAIStreamReturnTypes =\n ExtractType<AsyncIterableOpenAIStreamReturnTypes>;\n\nfunction isChatCompletionChunk(\n data: OpenAIStreamReturnTypes,\n): data is ChatCompletionChunk {\n return (\n 'choices' in data &&\n data.choices &&\n data.choices[0] &&\n 'delta' in data.choices[0]\n );\n}\n\nfunction isCompletion(data: OpenAIStreamReturnTypes): data is Completion {\n return (\n 'choices' in data &&\n data.choices &&\n data.choices[0] &&\n 'text' in data.choices[0]\n );\n}\n\nexport function OpenAIStream(\n res: Response | AsyncIterableOpenAIStreamReturnTypes,\n callbacks?: OpenAIStreamCallbacks,\n): ReadableStream {\n // Annotate the internal `messages` property for recursive function calls\n const cb:\n | undefined\n | (OpenAIStreamCallbacks & {\n [__internal__OpenAIFnMessagesSymbol]?: CreateMessage[];\n }) = callbacks;\n\n let stream: ReadableStream<Uint8Array>;\n if (Symbol.asyncIterator in res) {\n stream = readableFromAsyncIterable(streamable(res)).pipeThrough(\n createCallbacksTransformer(\n cb?.experimental_onFunctionCall\n ? {\n ...cb,\n onFinal: undefined,\n }\n : {\n ...cb,\n },\n ),\n );\n } else {\n stream = AIStream(\n res,\n parseOpenAIStream(),\n cb?.experimental_onFunctionCall\n ? {\n ...cb,\n onFinal: undefined,\n }\n : {\n ...cb,\n },\n );\n }\n\n if (cb && cb.experimental_onFunctionCall) {\n const functionCallTransformer = createFunctionCallTransformer(cb);\n return stream.pipeThrough(functionCallTransformer);\n } else {\n return stream.pipeThrough(\n createStreamDataTransformer(cb?.experimental_streamData),\n );\n }\n}\n\nfunction createFunctionCallTransformer(\n callbacks: OpenAIStreamCallbacks & {\n [__internal__OpenAIFnMessagesSymbol]?: CreateMessage[];\n },\n): TransformStream<Uint8Array, Uint8Array> {\n const textEncoder = new TextEncoder();\n let isFirstChunk = true;\n let aggregatedResponse = '';\n let aggregatedFinalCompletionResponse = '';\n let isFunctionStreamingIn = false;\n\n let functionCallMessages: CreateMessage[] =\n callbacks[__internal__OpenAIFnMessagesSymbol] || [];\n\n const isComplexMode = callbacks?.experimental_streamData;\n const decode = createChunkDecoder();\n\n return new TransformStream({\n async transform(chunk, controller): Promise<void> {\n const message = decode(chunk);\n aggregatedFinalCompletionResponse += message;\n\n const shouldHandleAsFunction =\n isFirstChunk && message.startsWith('{\"function_call\":');\n\n if (shouldHandleAsFunction) {\n isFunctionStreamingIn = true;\n aggregatedResponse += message;\n isFirstChunk = false;\n return;\n }\n\n // Stream as normal\n if (!isFunctionStreamingIn) {\n controller.enqueue(\n isComplexMode\n ? textEncoder.encode(formatStreamPart('text', message))\n : chunk,\n );\n return;\n } else {\n aggregatedResponse += message;\n }\n },\n async flush(controller): Promise<void> {\n try {\n const isEndOfFunction =\n !isFirstChunk &&\n callbacks.experimental_onFunctionCall &&\n isFunctionStreamingIn;\n\n // This callbacks.experimental_onFunctionCall check should not be necessary but TS complains\n if (isEndOfFunction && callbacks.experimental_onFunctionCall) {\n isFunctionStreamingIn = false;\n const payload = JSON.parse(aggregatedResponse);\n const argumentsPayload = JSON.parse(payload.function_call.arguments);\n\n // Append the function call message to the list\n let newFunctionCallMessages: CreateMessage[] = [\n ...functionCallMessages,\n ];\n\n const functionResponse = await callbacks.experimental_onFunctionCall(\n {\n name: payload.function_call.name,\n arguments: argumentsPayload,\n },\n result => {\n // Append the function call request and result messages to the list\n newFunctionCallMessages = [\n ...functionCallMessages,\n {\n role: 'assistant',\n content: '',\n function_call: payload.function_call,\n },\n {\n role: 'function',\n name: payload.function_call.name,\n content: JSON.stringify(result),\n },\n ];\n\n // Return it to the user\n return newFunctionCallMessages;\n },\n );\n\n if (!functionResponse) {\n // The user didn't do anything with the function call on the server and wants\n // to either do nothing or run it on the client\n // so we just return the function call as a message\n controller.enqueue(\n textEncoder.encode(\n isComplexMode\n ? formatStreamPart(\n 'function_call',\n // parse to prevent double-encoding:\n JSON.parse(aggregatedResponse),\n )\n : aggregatedResponse,\n ),\n );\n return;\n } else if (typeof functionResponse === 'string') {\n // The user returned a string, so we just return it as a message\n controller.enqueue(\n isComplexMode\n ? textEncoder.encode(formatStreamPart('text', functionResponse))\n : textEncoder.encode(functionResponse),\n );\n return;\n }\n\n // Recursively:\n\n // We don't want to trigger onStart or onComplete recursively\n // so we remove them from the callbacks\n // see https://github.com/vercel/ai/issues/351\n const filteredCallbacks: OpenAIStreamCallbacks = {\n ...callbacks,\n onStart: undefined,\n };\n // We only want onFinal to be called the _last_ time\n callbacks.onFinal = undefined;\n\n const openAIStream = OpenAIStream(functionResponse, {\n ...filteredCallbacks,\n [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages,\n } as AIStreamCallbacksAndOptions);\n\n const reader = openAIStream.getReader();\n\n while (true) {\n const { done, value } = await reader.read();\n if (done) {\n break;\n }\n controller.enqueue(value);\n }\n }\n } finally {\n if (callbacks.onFinal && aggregatedFinalCompletionResponse) {\n await callbacks.onFinal(aggregatedFinalCompletionResponse);\n }\n }\n },\n });\n}\n","import type { ServerResponse } from 'node:http';\nimport { experimental_StreamData } from './stream-data';\nimport { COMPLEX_HEADER } from '../shared/utils';\n\n/**\n * A utility class for streaming text responses.\n */\nexport class StreamingTextResponse extends Response {\n constructor(\n res: ReadableStream,\n init?: ResponseInit,\n data?: experimental_StreamData,\n ) {\n let processedStream = res;\n\n if (data) {\n processedStream = res.pipeThrough(data.stream);\n }\n\n super(processedStream as any, {\n ...init,\n status: 200,\n headers: {\n 'Content-Type': 'text/plain; charset=utf-8',\n [COMPLEX_HEADER]: data ? 'true' : 'false',\n ...init?.headers,\n },\n });\n }\n}\n\n/**\n * A utility function to stream a ReadableStream to a Node.js response-like object.\n */\nexport function streamToResponse(\n res: ReadableStream,\n response: ServerResponse,\n init?: { headers?: Record<string, string>; status?: number },\n) {\n response.writeHead(init?.status || 200, {\n 'Content-Type': 'text/plain; charset=utf-8',\n ...init?.headers,\n });\n\n const reader = res.getReader();\n function read() {\n reader.read().then(({ done, value }: { done: boolean; value?: any }) => {\n if (done) {\n response.end();\n return;\n }\n response.write(value);\n read();\n });\n }\n read();\n}\n","import {\n type AIStreamCallbacksAndOptions,\n createCallbacksTransformer,\n trimStartOfStreamHelper,\n} from './ai-stream';\nimport { createStreamDataTransformer } from './stream-data';\n\nfunction createParser(res: AsyncGenerator<any>) {\n const trimStartOfStream = trimStartOfStreamHelper();\n return new ReadableStream<string>({\n async pull(controller): Promise<void> {\n const { value, done } = await res.next();\n\n if (done) {\n controller.close();\n return;\n }\n\n const text = trimStartOfStream(value.token?.text ?? '');\n if (!text) return;\n\n // some HF models return generated_text instead of a real ending token\n if (value.generated_text != null && value.generated_text.length > 0) {\n return;\n }\n\n // <|endoftext|> is for https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5\n // <|end|> is for https://huggingface.co/HuggingFaceH4/starchat-beta\n // </s> is also often last token in the stream depending on the model\n if (text === '</s>' || text === '<|endoftext|>' || text === '<|end|>') {\n return;\n }\n\n controller.enqueue(text);\n },\n });\n}\n\nexport function HuggingFaceStream(\n res: AsyncGenerator<any>,\n callbacks?: AIStreamCallbacksAndOptions,\n): ReadableStream {\n return createParser(res)\n .pipeThrough(createCallbacksTransformer(callbacks))\n .pipeThrough(\n createStreamDataTransformer(callbacks?.experimental_streamData),\n );\n}\n","import {\n type AIStreamCallbacksAndOptions,\n createCallbacksTransformer,\n} from './ai-stream';\nimport { createStreamDataTransformer } from './stream-data';\n\nconst utf8Decoder = new TextDecoder('utf-8');\n\nasync function processLines(\n lines: string[],\n controller: ReadableStreamDefaultController<string>,\n) {\n for (const line of lines) {\n const { text, is_finished } = JSON.parse(line);\n\n // closing the reader is handed in readAndProcessLines\n if (!is_finished) {\n controller.enqueue(text);\n }\n }\n}\n\nasync function readAndProcessLines(\n reader: ReadableStreamDefaultReader<Uint8Array>,\n controller: ReadableStreamDefaultController<string>,\n) {\n let segment = '';\n\n while (true) {\n const { value: chunk, done } = await reader.read();\n if (done) {\n break;\n }\n\n segment += utf8Decoder.decode(chunk, { stream: true });\n\n const linesArray = segment.split(/\\r\\n|\\n|\\r/g);\n segment = linesArray.pop() || '';\n\n await processLines(linesArray, controller);\n }\n\n if (segment) {\n const linesArray = [segment];\n await processLines(linesArray, controller);\n }\n\n controller.close();\n}\n\nfunction createParser(res: Response) {\n const reader = res.body?.getReader();\n\n return new ReadableStream<string>({\n async start(controller): Promise<void> {\n if (!reader) {\n controller.close();\n return;\n }\n\n await readAndProcessLines(reader, controller);\n },\n });\n}\n\nexport function CohereStream(\n reader: Response,\n callbacks?: AIStreamCallbacksAndOptions,\n): ReadableStream {\n return createParser(reader)\n .pipeThrough(createCallbacksTransformer(callbacks))\n .pipeThrough(\n createStreamDataTransformer(callbacks?.experimental_streamData),\n );\n}\n","import {\n AIStream,\n readableFromAsyncIterable,\n type AIStreamCallbacksAndOptions,\n createCallbacksTransformer,\n} from './ai-stream';\nimport { createStreamDataTransformer } from './stream-data';\n\n// https://github.com/anthropics/anthropic-sdk-typescript/blob/0fc31f4f1ae2976afd0af3236e82d9e2c84c43c9/src/resources/completions.ts#L28-L49\ninterface CompletionChunk {\n /**\n * The resulting completion up to and excluding the stop sequences.\n */\n completion: string;\n\n /**\n * The model that performed the completion.\n */\n model: string;\n\n /**\n * The reason that we stopped sampling.\n *\n * This may be one the following values:\n *\n * - `\"stop_sequence\"`: we reached a stop sequence — either provided by you via the\n * `stop_sequences` parameter, or a stop sequence built into the model\n * - `\"max_tokens\"`: we exceeded `max_tokens_to_sample` or the model's maximum\n */\n stop_reason: string;\n}\n\ninterface StreamError {\n error: {\n type: string;\n message: string;\n };\n}\n\ninterface StreamPing {}\n\ntype StreamData = CompletionChunk | StreamError | StreamPing;\n\nfunction parseAnthropicStream(): (data: string) => string | void {\n let previous = '';\n\n return data => {\n const json = JSON.parse(data as string) as StreamData;\n\n // error event\n if ('error' in json) {\n throw new Error(`${json.error.type}: ${json.error.message}`);\n }\n\n // ping event\n if (!('completion' in json)) {\n return;\n }\n\n // On API versions older than 2023-06-01,\n // Anthropic's `completion` field is cumulative unlike OpenAI's\n // deltas. In order to compute the delta, we must slice out the text\n // we previously received.\n const text = json.completion;\n if (\n !previous ||\n (text.length > previous.length && text.startsWith(previous))\n ) {\n const delta = text.slice(previous.length);\n previous = text;\n\n return delta;\n }\n\n return text;\n };\n}\n\nasync function* streamable(stream: AsyncIterable<CompletionChunk>) {\n for await (const chunk of stream) {\n const text = chunk.completion;\n if (text) yield text;\n }\n}\n\n/**\n * Accepts either a fetch Response from the Anthropic `POST /v1/complete` endpoint,\n * or the return value of `await client.completions.create({ stream: true })`\n * from the `@anthropic-ai/sdk` package.\n */\nexport function AnthropicStream(\n res: Response | AsyncIterable<CompletionChunk>,\n cb?: AIStreamCallbacksAndOptions,\n): ReadableStream {\n if (Symbol.asyncIterator in res) {\n return readableFromAsyncIterable(streamable(res))\n .pipeThrough(createCallbacksTransformer(cb))\n .pipeThrough(createStreamDataTransformer(cb?.experimental_streamData));\n } else {\n return AIStream(res, parseAnthropicStream(), cb).pipeThrough(\n createStreamDataTransformer(cb?.experimental_streamData),\n );\n }\n}\n","import {\n type AIStreamCallbacksAndOptions,\n createCallbacksTransformer,\n} from './ai-stream';\nimport { createStreamDataTransformer } from './stream-data';\n\nexport function LangChainStream(callbacks?: AIStreamCallbacksAndOptions) {\n const stream = new TransformStream();\n const writer = stream.writable.getWriter();\n\n const runs = new Set();\n\n const handleError = async (e: Error, runId: string) => {\n runs.delete(runId);\n await writer.ready;\n await writer.abort(e);\n };\n\n const handleStart = async (runId: string) => {\n runs.add(runId);\n };\n\n const handleEnd = async (runId: string) => {\n runs.delete(runId);\n\n if (runs.size === 0) {\n await writer.ready;\n await writer.close();\n }\n };\n\n return {\n stream: stream.readable\n .pipeThrough(createCallbacksTransformer(callbacks))\n .pipeThrough(\n createStreamDataTransformer(callbacks?.experimental_streamData),\n ),\n writer,\n handlers: {\n handleLLMNewToken: async (token: string) => {\n await writer.ready;\n await writer.write(token);\n },\n handleLLMStart: async (_llm: any, _prompts: string[], runId: string) => {\n handleStart(runId);\n },\n handleLLMEnd: async (_output: any, runId: string) => {\n await handleEnd(runId);\n },\n handleLLMError: async (e: Error, runId: string) => {\n await handleError(e, runId);\n },\n handleChainStart: async (_chain: any, _inputs: any, runId: string) => {\n handleStart(runId);\n },\n handleChainEnd: async (_outputs: any, runId: string) => {\n await handleEnd(runId);\n },\n handleChainError: async (e: Error, runId: string) => {\n await handleError(e, runId);\n },\n handleToolStart: async (_tool: any, _input: string, runId: string) => {\n handleStart(runId);\n },\n handleToolEnd: async (_output: string, runId: string) => {\n await handleEnd(runId);\n },\n handleToolError: async (e: Error, runId: string) => {\n await handleError(e, runId);\n },\n },\n };\n}\n","import { AIStream, type AIStreamCallbacksAndOptions } from './ai-stream';\nimport { createStreamDataTransformer } from './stream-data';\n\n// from replicate SDK\ninterface Prediction {\n id: string;\n status: 'starting' | 'processing' | 'succeeded' | 'failed' | 'canceled';\n version: string;\n input: object;\n output?: any;\n source: 'api' | 'web';\n error?: any;\n logs?: string;\n metrics?: {\n predict_time?: number;\n };\n webhook?: string;\n webhook_events_filter?: ('start' | 'output' | 'logs' | 'completed')[];\n created_at: string;\n updated_at?: string;\n completed_at?: string;\n urls: {\n get: string;\n cancel: string;\n stream?: string;\n };\n}\n\n/**\n * Stream predictions from Replicate.\n * Only certain models are supported and you must pass `stream: true` to\n * replicate.predictions.create().\n * @see https://github.com/replicate/replicate-javascript#streaming\n *\n * @example\n * const response = await replicate.predictions.create({\n * stream: true,\n * input: {\n * prompt: messages.join('\\n')\n * },\n * version: '2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1'\n * })\n *\n * const stream = await ReplicateStream(response)\n * return new StreamingTextResponse(stream)\n *\n */\nexport async function ReplicateStream(\n res: Prediction,\n cb?: AIStreamCallbacksAndOptions,\n options?: {\n headers?: Record<string, string>;\n },\n): Promise<ReadableStream> {\n const url = res.urls?.stream;\n\n if (!url) {\n if (res.error) throw new Error(res.error);\n else throw new Error('Missing stream URL in Replicate response');\n }\n\n const eventStream = await fetch(url, {\n method: 'GET',\n headers: {\n Accept: 'text/event-stream',\n ...options?.headers,\n },\n });\n\n return AIStream(eventStream, undefined, cb).pipeThrough(\n createStreamDataTransformer(cb?.experimental_streamData),\n );\n}\n","import { StreamPartType, parseStreamPart } from './stream-parts';\n\nconst NEWLINE = '\\n'.charCodeAt(0);\n\n// concatenates all the chunks into a single Uint8Array\nfunction concatChunks(chunks: Uint8Array[], totalLength: number) {\n const concatenatedChunks = new Uint8Array(totalLength);\n\n let offset = 0;\n for (const chunk of chunks) {\n concatenatedChunks.set(chunk, offset);\n offset += chunk.length;\n }\n chunks.length = 0;\n\n return concatenatedChunks;\n}\n\nexport async function* readDataStream(\n reader: ReadableStreamDefaultReader<Uint8Array>,\n {\n isAborted,\n }: {\n isAborted?: () => boolean;\n } = {},\n): AsyncGenerator<StreamPartType> {\n // implementation note: this slightly more complex algorithm is required\n // to pass the tests in the edge environment.\n\n const decoder = new TextDecoder();\n const chunks: Uint8Array[] = [];\n let totalLength = 0;\n\n while (true) {\n const { value } = await reader.read();\n\n if (value) {\n chunks.push(value);\n totalLength += value.length;\n if (value[value.length - 1] !== NEWLINE) {\n // if the last character is not a newline, we have not read the whole JSON value\n continue;\n }\n }\n\n if (chunks.length === 0) {\n break; // we have reached the end of the stream\n }\n\n const concatenatedChunks = concatChunks(chunks, totalLength);\n totalLength = 0;\n\n const streamParts = decoder\n .decode(concatenatedChunks, { stream: true })\n .split('\\n')\n .filter(line => line !== '') // splitting leaves an empty string at the end\n .map(parseStreamPart);\n\n for (const streamPart of streamParts) {\n yield streamPart;\n }\n\n // The request has been aborted, stop reading the stream.\n if (isAborted?.()) {\n reader.cancel();\n break;\n }\n }\n}\n","import { readDataStream } from './read-data-stream';\nimport type { FunctionCall, JSONValue, Message } from './types';\nimport { nanoid } from './utils';\n\ntype PrefixMap = {\n text?: Message;\n function_call?: Message & {\n role: 'assistant';\n function_call: FunctionCall;\n };\n data: JSONValue[];\n};\n\nexport async function parseComplexResponse({\n reader,\n abortControllerRef,\n update,\n onFinish,\n generateId = nanoid,\n getCurrentDate = () => new Date(),\n}: {\n reader: ReadableStreamDefaultReader<Uint8Array>;\n abortControllerRef?: {\n current: AbortController | null;\n };\n update: (merged: Message[], data: JSONValue[] | undefined) => void;\n onFinish?: (prefixMap: PrefixMap) => void;\n generateId?: () => string;\n getCurrentDate?: () => Date;\n}) {\n const createdAt = getCurrentDate();\n const prefixMap: PrefixMap = {\n data: [],\n };\n\n // we create a map of each prefix, and for each prefixed message we push to the map\n for await (const { type, value } of readDataStream(reader, {\n isAborted: () => abortControllerRef?.current === null,\n })) {\n if (type === 'text') {\n if (prefixMap['text']) {\n prefixMap['text'] = {\n ...prefixMap['text'],\n content: (prefixMap['text'].content || '') + value,\n };\n } else {\n prefixMap['text'] = {\n id: generateId(),\n role: 'assistant',\n content: value,\n createdAt,\n };\n }\n }\n\n let functionCallMessage: Message | null = null;\n\n if (type === 'function_call') {\n prefixMap['function_call'] = {\n id: generateId(),\n role: 'assistant',\n content: '',\n function_call: value.function_call,\n name: value.function_call.name,\n createdAt,\n };\n\n functionCallMessage = prefixMap['function_call'];\n }\n\n if (type === 'data') {\n prefixMap['data'].push(...value);\n }\n\n const responseMessage = prefixMap['text'];\n\n // We add function calls and response messages to the messages[], but data is its own thing\n const merged = [functionCallMessage, responseMessage].filter(\n Boolean,\n ) as Message[];\n\n update(merged, [...prefixMap['data']]); // make a copy of the data array\n }\n\n onFinish?.(prefixMap);\n\n return {\n messages: [prefixMap.text, prefixMap.function_call].filter(\n Boolean,\n ) as Message[],\n data: prefixMap.data,\n };\n}\n","/**\n * This is a naive implementation of the streaming React response API.\n * Currently, it can carry the original raw content, data payload and a special\n * UI payload and stream them via \"rows\" (nested promises).\n * It must be used inside Server Actions so Flight can encode the React elements.\n *\n * It is naive as unlike the StreamingTextResponse, it does not send the diff\n * between the rows, but flushing the full payload on each row.\n */\n\nimport { parseComplexResponse } from '../shared/parse-complex-response';\nimport { IdGenerator, JSONValue } from '../shared/types';\nimport { createChunkDecoder, nanoid } from '../shared/utils';\nimport { experimental_StreamData } from './stream-data';\n\ntype UINode = string | JSX.Element | JSX.Element[] | null | undefined;\n\ntype Payload = {\n ui: UINode | Promise<UINode>;\n content: string;\n};\n\nexport type ReactResponseRow = Payload & {\n next: null | Promise<ReactResponseRow>;\n};\n\n/**\n * A utility class for streaming React responses.\n */\nexport class experimental_StreamingReactResponse {\n constructor(\n res: ReadableStream,\n options?: {\n ui?: (message: {\n content: string;\n data?: JSONValue[] | undefined;\n }) => UINode | Promise<UINode>;\n data?: experimental_StreamData;\n generateId?: IdGenerator;\n },\n ) {\n let resolveFunc: (row: ReactResponseRow) => void = () => {};\n let next = new Promise<ReactResponseRow>(resolve => {\n resolveFunc = resolve;\n });\n\n if (options?.data) {\n const processedStream: ReadableStream<Uint8Array> = res.pipeThrough(\n options.data.stream,\n );\n\n let lastPayload: Payload | undefined = undefined;\n\n // runs asynchronously (no await on purpose)\n parseComplexResponse({\n reader: processedStream.getReader(),\n update: (merged, data) => {\n const content = merged[0]?.content ?? '';\n const ui = options?.ui?.({ content, data }) || content;\n const payload: Payload = { ui, content };\n\n const resolvePrevious = resolveFunc;\n const nextRow = new Promise<ReactResponseRow>(resolve => {\n resolveFunc = resolve;\n });\n\n resolvePrevious({\n next: nextRow,\n ...payload,\n });\n\n lastPayload = payload;\n },\n generateId: options.generateId ?? nanoid,\n onFinish: () => {\n // The last payload is resolved twice. This is necessary because we immediately\n // push out a payload, but we also need to forward the finish event with a payload.\n if (lastPayload !== undefined) {\n resolveFunc({\n next: null,\n ...lastPayload,\n });\n }\n },\n });\n\n return next;\n }\n\n let content = '';\n\n const decode = createChunkDecoder();\n const reader = res.getReader();\n async function readChunk() {\n const { done, value } = await reader.read();\n if (!done) {\n content += decode(value);\n }\n\n // TODO: Handle generators. With this current implementation we can support\n // synchronous and asynchronous UIs.\n // TODO: Handle function calls.\n const ui = options?.ui?.({ content }) || content;\n\n const payload: Payload = {\n ui,\n content,\n };\n\n const resolvePrevious = resolveFunc;\n const nextRow = done\n ? null\n : new Promise<ReactResponseRow>(resolve => {\n resolveFunc = resolve;\n });\n resolvePrevious({\n next: nextRow,\n ...payload,\n });\n\n if (done) {\n return;\n }\n\n await readChunk();\n }\n readChunk();\n\n return next;\n }\n}\n","import { formatStreamPart } from '../shared/stream-parts';\nimport { AssistantMessage, DataMessage } from '../shared/types';\n\nexport function experimental_AssistantResponse(\n { threadId, messageId }: { threadId: string; messageId: string },\n process: (stream: {\n threadId: string;\n messageId: string;\n sendMessage: (message: AssistantMessage) => void;\n sendDataMessage: (message: DataMessage) => void;\n }) => Promise<void>,\n): Response {\n const stream = new ReadableStream({\n async start(controller) {\n const textEncoder = new TextEncoder();\n\n const sendMessage = (message: AssistantMessage) => {\n controller.enqueue(\n textEncoder.encode(formatStreamPart('assistant_message', message)),\n );\n };\n\n const sendDataMessage = (message: DataMessage) => {\n controller.enqueue(\n textEncoder.encode(formatStreamPart('data_message', message)),\n );\n };\n\n const sendError = (errorMessage: string) => {\n controller.enqueue(\n textEncoder.encode(formatStreamPart('error', errorMessage)),\n );\n };\n\n // send the threadId and messageId as the first message:\n controller.enqueue(\n textEncoder.encode(\n formatStreamPart('assistant_control_data', {\n threadId,\n messageId,\n }),\n ),\n );\n\n try {\n await process({\n threadId,\n messageId,\n sendMessage,\n sendDataMessage,\n });\n } catch (error) {\n sendError((error as any).message ?? `${error}`);\n } finally {\n controller.close();\n }\n },\n pull(controller) {},\n cancel() {},\n });\n\n return new Response(stream, {\n status: 200,\n headers: {\n 'Content-Type': 'text/plain; charset=utf-8',\n },\n });\n}\n"],"mappings":";AAAA;AAAA,EACE;AAAA,OAIK;AA6CA,SAAS,6BACd,cACqC;AACrC,QAAM,cAAc,IAAI,YAAY;AACpC,MAAI;AAEJ,SAAO,IAAI,gBAAgB;AAAA,IACzB,MAAM,MAAM,YAA2B;AACrC,0BAAoB;AAAA,QAClB,CAAC,UAA2C;AAC1C,cACG,UAAU,SACT,MAAM,SAAS,WACf,MAAM,SAAS;AAAA;AAAA,UAGhB,MAAc,UAAU,QACzB;AACA,uBAAW,UAAU;AACrB;AAAA,UACF;AAEA,cAAI,UAAU,OAAO;AACnB,kBAAM,gBAAgB,eAClB,aAAa,MAAM,IAAI,IACvB,MAAM;AACV,gBAAI;AAAe,yBAAW,QAAQ,aAAa;AAAA,UACrD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IAEA,UAAU,OAAO;AACf,wBAAkB,KAAK,YAAY,OAAO,KAAK,CAAC;AAAA,IAClD;AAAA,EACF,CAAC;AACH;AAwBO,SAAS,2BACd,IACqC;AACrC,QAAM,cAAc,IAAI,YAAY;AACpC,MAAI,qBAAqB;AACzB,QAAM,YAAY,MAAM,CAAC;AAEzB,SAAO,IAAI,gBAAgB;AAAA,IACzB,MAAM,QAAuB;AAC3B,UAAI,UAAU;AAAS,cAAM,UAAU,QAAQ;AAAA,IACjD;AAAA,IAEA,MAAM,UAAU,SAAS,YAA2B;AAClD,iBAAW,QAAQ,YAAY,OAAO,OAAO,CAAC;AAE9C,4BAAsB;AACtB,UAAI,UAAU;AAAS,cAAM,UAAU,QAAQ,OAAO;AAAA,IACxD;AAAA,IAEA,MAAM,QAAuB;AAC3B,YAAM,oBAAoB,8BAA8B,SAAS;AAGjE,UAAI,UAAU,cAAc;AAC1B,cAAM,UAAU,aAAa,kBAAkB;AAAA,MACjD;AAEA,UAAI,UAAU,WAAW,CAAC,mBAAmB;AAC3C,cAAM,UAAU,QAAQ,kBAAkB;AAAA,MAC5C;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEA,SAAS,8BACP,WACoC;AACpC,SAAO,iCAAiC;AAC1C;AAgBO,SAAS,0BAAoD;AAClE,MAAI,gBAAgB;AAEpB,SAAO,CAAC,SAAyB;AAC/B,QAAI,eAAe;AACjB,aAAO,KAAK,UAAU;AACtB,UAAI;AAAM,wBAAgB;AAAA,IAC5B;AACA,WAAO;AAAA,EACT;AACF;AAoBO,SAAS,SACd,UACA,cACA,WAC4B;AAC5B,MAAI,CAAC,SAAS,IAAI;AAChB,QAAI,SAAS,MAAM;AACjB,YAAM,SAAS,SAAS,KAAK,UAAU;AACvC,aAAO,IAAI,eAAe;AAAA,QACxB,MAAM,MAAM,YAAY;AACtB,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,cAAI,CAAC,MAAM;AACT,kBAAM,YAAY,IAAI,YAAY,EAAE,OAAO,KAAK;AAChD,uBAAW,MAAM,IAAI,MAAM,mBAAmB,WAAW,CAAC;AAAA,UAC5D;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,aAAO,IAAI,eAAe;AAAA,QACxB,MAAM,YAAY;AAChB,qBAAW,MAAM,IAAI,MAAM,kCAAkC,CAAC;AAAA,QAChE;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,qBAAqB,SAAS,QAAQ,0BAA0B;AAEtE,SAAO,mBACJ,YAAY,6BAA6B,YAAY,CAAC,EACtD,YAAY,2BAA2B,SAAS,CAAC;AACtD;AAeA,SAAS,4BAA4C;AACnD,SAAO,IAAI,eAAe;AAAA,IACxB,MAAM,YAAY;AAChB,iBAAW,MAAM;AAAA,IACnB;AAAA,EACF,CAAC;AACH;AAMO,SAAS,0BAA6B,UAA4B;AACvE,MAAI,KAAK,SAAS,OAAO,aAAa,EAAE;AACxC,SAAO,IAAI,eAAkB;AAAA,IAC3B,MAAM,KAAK,YAAY;AACrB,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,GAAG,KAAK;AACtC,UAAI;AAAM,mBAAW,MAAM;AAAA;AACtB,mBAAW,QAAQ,KAAK;AAAA,IAC/B;AAAA,IAEA,MAAM,OAAO,QAAQ;AArQzB;AAsQM,cAAM,QAAG,WAAH,4BAAY;AAAA,IACpB;AAAA,EACF,CAAC;AACH;;;AC3PA,IAAM,iBAAkD;AAAA,EACtD,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO,CAAC,UAAqB;AAC3B,QAAI,OAAO,UAAU,UAAU;AAC7B,YAAM,IAAI,MAAM,qCAAqC;AAAA,IACvD;AACA,WAAO,EAAE,MAAM,QAAQ,MAAM;AAAA,EAC/B;AACF;AAEA,IAAM,yBAIF;AAAA,EACF,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO,CAAC,UAAqB;AAC3B,QACE,SAAS,QACT,OAAO,UAAU,YACjB,EAAE,mBAAmB,UACrB,OAAO,MAAM,kBAAkB,YAC/B,MAAM,iBAAiB,QACvB,EAAE,UAAU,MAAM,kBAClB,EAAE,eAAe,MAAM,kBACvB,OAAO,MAAM,cAAc,SAAS,YACpC,OAAO,MAAM,cAAc,cAAc,UACzC;AACA,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,iBAA4D;AAAA,EAChE,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO,CAAC,UAAqB;AAC3B,QAAI,CAAC,MAAM,QAAQ,KAAK,GAAG;AACzB,YAAM,IAAI,MAAM,qCAAqC;AAAA,IACvD;AAEA,WAAO,EAAE,MAAM,QAAQ,MAAM;AAAA,EAC/B;AACF;AAEA,IAAM,kBAAoD;AAAA,EACxD,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO,CAAC,UAAqB;AAC3B,QAAI,OAAO,UAAU,UAAU;AAC7B,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AACA,WAAO,EAAE,MAAM,SAAS,MAAM;AAAA,EAChC;AACF;AAEA,IAAM,6BAIF;AAAA,EACF,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO,CAAC,UAAqB;AAC3B,QACE,SAAS,QACT,OAAO,UAAU,YACjB,EAAE,QAAQ,UACV,EAAE,UAAU,UACZ,EAAE,aAAa,UACf,OAAO,MAAM,OAAO,YACpB,OAAO,MAAM,SAAS,YACtB,MAAM,SAAS,eACf,CAAC,MAAM,QAAQ,MAAM,OAAO,KAC5B,CAAC,MAAM,QAAQ;AAAA,MACb,UACE,QAAQ,QACR,OAAO,SAAS,YAChB,UAAU,QACV,KAAK,SAAS,UACd,UAAU,QACV,KAAK,QAAQ,QACb,OAAO,KAAK,SAAS,YACrB,WAAW,KAAK,QAChB,OAAO,KAAK,KAAK,UAAU;AAAA,IAC/B,GACA;AACA,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,iCAOF;AAAA,EACF,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO,CAAC,UAAqB;AAC3B,QACE,SAAS,QACT,OAAO,UAAU,YACjB,EAAE,cAAc,UAChB,EAAE,eAAe,UACjB,OAAO,MAAM,aAAa,YAC1B,OAAO,MAAM,cAAc,UAC3B;AACA,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO;AAAA,QACL,UAAU,MAAM;AAAA,QAChB,WAAW,MAAM;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,wBAAsE;AAAA,EAC1E,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO,CAAC,UAAqB;AAC3B,QACE,SAAS,QACT,OAAO,UAAU,YACjB,EAAE,UAAU,UACZ,EAAE,UAAU,UACZ,OAAO,MAAM,SAAS,YACtB,MAAM,SAAS,QACf;AACA,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,cAAc;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AA4BO,IAAM,oBAAoB;AAAA,EAC/B,CAAC,eAAe,IAAI,GAAG;AAAA,EACvB,CAAC,uBAAuB,IAAI,GAAG;AAAA,EAC/B,CAAC,eAAe,IAAI,GAAG;AAAA,EACvB,CAAC,gBAAgB,IAAI,GAAG;AAAA,EACxB,CAAC,2BAA2B,IAAI,GAAG;AAAA,EACnC,CAAC,+BAA+B,IAAI,GAAG;AAAA,EACvC,CAAC,sBAAsB,IAAI,GAAG;AAChC;AAsBO,IAAM,uBAAuB;AAAA,EAClC,CAAC,eAAe,IAAI,GAAG,eAAe;AAAA,EACtC,CAAC,uBAAuB,IAAI,GAAG,uBAAuB;AAAA,EACtD,CAAC,eAAe,IAAI,GAAG,eAAe;AAAA,EACtC,CAAC,gBAAgB,IAAI,GAAG,gBAAgB;AAAA,EACxC,CAAC,2BAA2B,IAAI,GAAG,2BAA2B;AAAA,EAC9D,CAAC,+BAA+B,IAAI,GAAG,+BAA+B;AAAA,EACtE,CAAC,sBAAsB,IAAI,GAAG,sBAAsB;AACtD;AAEO,IAAM,aAAa,YAAY,IAAI,UAAQ,KAAK,IAAI;AASpD,IAAM,kBAAkB,CAAC,SAAiC;AAC/D,QAAM,sBAAsB,KAAK,QAAQ,GAAG;AAE5C,MAAI,wBAAwB,IAAI;AAC9B,UAAM,IAAI,MAAM,oDAAoD;AAAA,EACtE;AAEA,QAAM,SAAS,KAAK,MAAM,GAAG,mBAAmB;AAEhD,MAAI,CAAC,WAAW,SAAS,MAAwC,GAAG;AAClE,UAAM,IAAI,MAAM,+CAA+C,SAAS;AAAA,EAC1E;AAEA,QAAM,OAAO;AAEb,QAAM,YAAY,KAAK,MAAM,sBAAsB,CAAC;AACpD,QAAM,YAAuB,KAAK,MAAM,SAAS;AAEjD,SAAO,kBAAkB,IAAI,EAAE,MAAM,SAAS;AAChD;AAQO,SAAS,iBACd,MACA,OACc;AACd,QAAM,aAAa,YAAY,KAAK,UAAQ,KAAK,SAAS,IAAI;AAE9D,MAAI,CAAC,YAAY;AACf,UAAM,IAAI,MAAM,6BAA6B,MAAM;AAAA,EACrD;AAEA,SAAO,GAAG,WAAW,QAAQ,KAAK,UAAU,KAAK;AAAA;AACnD;;;ACzSO,IAAM,0BAAN,MAA8B;AAAA,EAenC,cAAc;AAdd,SAAQ,UAAU,IAAI,YAAY;AAElC,SAAQ,aACN;AAKF;AAAA;AAAA,SAAQ,kBAAwC;AAChD,SAAQ,0BAAoD;AAC5D,SAAQ,WAAoB;AAG5B;AAAA,SAAQ,OAAoB,CAAC;AAE3B,SAAK,kBAAkB,IAAI,QAAQ,aAAW;AAC5C,WAAK,0BAA0B;AAAA,IACjC,CAAC;AAED,UAAM,OAAO;AACb,SAAK,SAAS,IAAI,gBAAgB;AAAA,MAChC,OAAO,OAAM,eAAc;AACzB,aAAK,aAAa;AAAA,MACpB;AAAA,MACA,WAAW,OAAO,OAAO,eAAe;AAEtC,YAAI,KAAK,KAAK,SAAS,GAAG;AACxB,gBAAM,cAAc,KAAK,QAAQ;AAAA,YAC/B,iBAAiB,QAAQ,KAAK,IAAI;AAAA,UACpC;AACA,eAAK,OAAO,CAAC;AACb,qBAAW,QAAQ,WAAW;AAAA,QAChC;AAEA,mBAAW,QAAQ,KAAK;AAAA,MAC1B;AAAA,MACA,MAAM,MAAM,YAAY;AAEtB,cAAM,iBACJ,QAAQ,IAAI,aAAa,gBACrB,WAAW,MAAM;AACf,kBAAQ;AAAA,YACN;AAAA,UACF;AAAA,QACF,GAAG,GAAI,IACP;AAEN,cAAM,KAAK;AAEX,YAAI,mBAAmB,MAAM;AAC3B,uBAAa,cAAc;AAAA,QAC7B;AAEA,YAAI,KAAK,KAAK,QAAQ;AACpB,gBAAM,cAAc,KAAK,QAAQ;AAAA,YAC/B,iBAAiB,QAAQ,KAAK,IAAI;AAAA,UACpC;AACA,qBAAW,QAAQ,WAAW;AAAA,QAChC;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,QAAuB;AAtE/B;AAuEI,QAAI,KAAK,UAAU;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,QAAI,CAAC,KAAK,YAAY;AACpB,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,eAAK,4BAAL;AACA,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,OAAO,OAAwB;AAC7B,QAAI,KAAK,UAAU;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,SAAK,KAAK,KAAK,KAAK;AAAA,EACtB;AACF;AAMO,SAAS,4BACd,yBACA;AACA,MAAI,CAAC,yBAAyB;AAC5B,WAAO,IAAI,gBAAgB;AAAA,MACzB,WAAW,OAAO,OAAO,eAAe;AACtC,mBAAW,QAAQ,KAAK;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AACA,QAAM,UAAU,IAAI,YAAY;AAChC,QAAM,UAAU,IAAI,YAAY;AAChC,SAAO,IAAI,gBAAgB;AAAA,IACzB,WAAW,OAAO,OAAO,eAAe;AACtC,YAAM,UAAU,QAAQ,OAAO,KAAK;AACpC,iBAAW,QAAQ,QAAQ,OAAO,iBAAiB,QAAQ,OAAO,CAAC,CAAC;AAAA,IACtE;AAAA,EACF,CAAC;AACH;;;ACrGA,gBAAgB,gBACd,UACA,2BACA;AAhBF;AAiBE,QAAM,UAAU,IAAI,YAAY;AAChC,mBAAiB,UAAS,cAAS,SAAT,YAAiB,CAAC,GAAG;AAC7C,UAAM,SAAQ,WAAM,UAAN,mBAAa;AAE3B,QAAI,SAAS,MAAM;AACjB,YAAM,YAAY,QAAQ,OAAO,KAAK;AACtC,YAAM,YAAY,KAAK,MAAM,SAAS;AACtC,YAAM,QAAQ,0BAA0B,SAAS;AAEjD,UAAI,SAAS,MAAM;AACjB,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;AAEO,SAAS,0BACd,UACA,WACgB;AAChB,SAAO,iBAAiB,UAAU,WAAW,WAAS,MAAM,UAAU;AACxE;AAEO,SAAS,uBACd,UACA,WACgB;AAChB,SAAO;AAAA,IACL;AAAA,IACA;AAAA;AAAA;AAAA,IAGA,WAAM;AAjDV;AAiDa,+BAAM,gBAAN,mBAAoB,OAApB,mBAAwB;AAAA;AAAA,EACnC;AACF;AAEO,SAAS,uBACd,UACA,WACgB;AAChB,SAAO,iBAAiB,UAAU,WAAW,WAAS,MAAM,UAAU;AACxE;AAEO,SAAS,iBACd,UACA,WACA,2BACA;AACA,SAAO;AAAA,IACL,gBAAgB,UAAU,yBAAyB;AAAA,EACrD,EACG,YAAY,2BAA2B,SAAS,CAAC,EACjD;AAAA,IACC,4BAA4B,uCAAW,uBAAuB;AAAA,EAChE;AACJ;;;ACxEA,SAAS,sBAAsB;AAQxB,IAAM,SAAS;AAAA,EACpB;AAAA,EACA;AACF;AAeA,SAAS,mBAAmB,SAAmB;AAC7C,QAAM,UAAU,IAAI,YAAY;AAEhC,MAAI,CAAC,SAAS;AACZ,WAAO,SAAU,OAAuC;AACtD,UAAI,CAAC;AAAO,eAAO;AACnB,aAAO,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAAA,IAC/C;AAAA,EACF;AAEA,SAAO,SAAU,OAA+B;AAC9C,UAAM,UAAU,QACb,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC,EAC9B,MAAM,IAAI,EACV,OAAO,UAAQ,SAAS,EAAE;AAE7B,WAAO,QAAQ,IAAI,eAAe,EAAE,OAAO,OAAO;AAAA,EACpD;AACF;AAIO,IAAM,4BAA4B,CACvC,MACA,UAEA,MAAM,WAAW,GAAG,qBAAqB,IAAI,IAAI,KAAK,MAAM,SAAS,IAAI;AAQpE,IAAM,iBAAiB;;;AC8J9B,SAAS,oBAAqD;AAC5D,QAAM,UAAU,YAAY;AAC5B,SAAO,UAAQ;AACb,WAAO,QAAQ,KAAK,MAAM,IAAI,CAA4B;AAAA,EAC5D;AACF;AAOA,gBAAgB,WAAW,QAA8C;AACvE,QAAM,UAAU,YAAY;AAC5B,mBAAiB,SAAS,QAAQ;AAChC,UAAM,OAAO,QAAQ,KAAK;AAC1B,QAAI;AAAM,YAAM;AAAA,EAClB;AACF;AAEA,SAAS,cAAiE;AACxE,QAAM,oBAAoB,wBAAwB;AAClD,MAAI;AACJ,SAAO,UAAQ;AAjPjB;AAwUI,QACE,sBAAsB,IAAI,OAC1B,sBAAK,QAAQ,CAAC,MAAd,mBAAiB,UAAjB,mBAAwB,kBAAxB,mBAAuC,OACvC;AACA,8BAAwB;AACxB,aAAO,gCAA+B,gBAAK,QAAQ,CAAC,MAAd,mBAAiB,UAAjB,mBAAwB,cAAc;AAAA,IAC9E,WACE,sBAAsB,IAAI,OAC1B,sBAAK,QAAQ,CAAC,MAAd,mBAAiB,UAAjB,mBAAwB,kBAAxB,mBAAuC,YACvC;AACA,YAAM,gBACJ,KAAK,QAAQ,CAAC,EAAE,MAAM,cAAc;AAEtC,UAAI,qBAAqB,cACtB,QAAQ,OAAO,MAAM,EACrB,QAAQ,OAAO,KAAK,EACpB,QAAQ,MAAM,KAAK,EACnB,QAAQ,OAAO,KAAK,EACpB,QAAQ,OAAO,KAAK,EACpB,QAAQ,OAAO,KAAK,EACpB,QAAQ,OAAO,KAAK;AAEvB,aAAO,GAAG;AAAA,IACZ,WACE,4BACC,UAAK,QAAQ,CAAC,MAAd,mBAAiB,mBAAkB,qBAClC,UAAK,QAAQ,CAAC,MAAd,mBAAiB,mBAAkB,SACrC;AACA,8BAAwB;AACxB,aAAO;AAAA,IACT;AAEA,UAAM,OAAO;AAAA,MACX,sBAAsB,IAAI,KAAK,KAAK,QAAQ,CAAC,EAAE,MAAM,UACjD,KAAK,QAAQ,CAAC,EAAE,MAAM,UACtB,aAAa,IAAI,IACjB,KAAK,QAAQ,CAAC,EAAE,OAChB;AAAA,IACN;AACA,WAAO;AAAA,EACT;AACF;AAEA,IAAM,qCAAqC;AAAA,EACzC;AACF;AAWA,SAAS,sBACP,MAC6B;AAC7B,SACE,aAAa,QACb,KAAK,WACL,KAAK,QAAQ,CAAC,KACd,WAAW,KAAK,QAAQ,CAAC;AAE7B;AAEA,SAAS,aAAa,MAAmD;AACvE,SACE,aAAa,QACb,KAAK,WACL,KAAK,QAAQ,CAAC,KACd,UAAU,KAAK,QAAQ,CAAC;AAE5B;AAEO,SAAS,aACd,KACA,WACgB;AAEhB,QAAM,KAIG;AAET,MAAI;AACJ,MAAI,OAAO,iBAAiB,KAAK;AAC/B,aAAS,0BAA0B,WAAW,GAAG,CAAC,EAAE;AAAA,MAClD;AAAA,SACE,yBAAI,+BACA;AAAA,UACE,GAAG;AAAA,UACH,SAAS;AAAA,QACX,IACA;AAAA,UACE,GAAG;AAAA,QACL;AAAA,MACN;AAAA,IACF;AAAA,EACF,OAAO;AACL,aAAS;AAAA,MACP;AAAA,MACA,kBAAkB;AAAA,OAClB,yBAAI,+BACA;AAAA,QACE,GAAG;AAAA,QACH,SAAS;AAAA,MACX,IACA;AAAA,QACE,GAAG;AAAA,MACL;AAAA,IACN;AAAA,EACF;AAEA,MAAI,MAAM,GAAG,6BAA6B;AACxC,UAAM,0BAA0B,8BAA8B,EAAE;AAChE,WAAO,OAAO,YAAY,uBAAuB;AAAA,EACnD,OAAO;AACL,WAAO,OAAO;AAAA,MACZ,4BAA4B,yBAAI,uBAAuB;AAAA,IACzD;AAAA,EACF;AACF;AAEA,SAAS,8BACP,WAGyC;AACzC,QAAM,cAAc,IAAI,YAAY;AACpC,MAAI,eAAe;AACnB,MAAI,qBAAqB;AACzB,MAAI,oCAAoC;AACxC,MAAI,wBAAwB;AAE5B,MAAI,uBACF,UAAU,kCAAkC,KAAK,CAAC;AAEpD,QAAM,gBAAgB,uCAAW;AACjC,QAAM,SAAS,mBAAmB;AAElC,SAAO,IAAI,gBAAgB;AAAA,IACzB,MAAM,UAAU,OAAO,YAA2B;AAChD,YAAM,UAAU,OAAO,KAAK;AAC5B,2CAAqC;AAErC,YAAM,yBACJ,gBAAgB,QAAQ,WAAW,mBAAmB;AAExD,UAAI,wBAAwB;AAC1B,gCAAwB;AACxB,8BAAsB;AACtB,uBAAe;AACf;AAAA,MACF;AAGA,UAAI,CAAC,uBAAuB;AAC1B,mBAAW;AAAA,UACT,gBACI,YAAY,OAAO,iBAAiB,QAAQ,OAAO,CAAC,IACpD;AAAA,QACN;AACA;AAAA,MACF,OAAO;AACL,8BAAsB;AAAA,MACxB;AAAA,IACF;AAAA,IACA,MAAM,MAAM,YAA2B;AACrC,UAAI;AACF,cAAM,kBACJ,CAAC,gBACD,UAAU,+BACV;AAGF,YAAI,mBAAmB,UAAU,6BAA6B;AAC5D,kCAAwB;AACxB,gBAAM,UAAU,KAAK,MAAM,kBAAkB;AAC7C,gBAAM,mBAAmB,KAAK,MAAM,QAAQ,cAAc,SAAS;AAGnE,cAAI,0BAA2C;AAAA,YAC7C,GAAG;AAAA,UACL;AAEA,gBAAM,mBAAmB,MAAM,UAAU;AAAA,YACvC;AAAA,cACE,MAAM,QAAQ,cAAc;AAAA,cAC5B,WAAW;AAAA,YACb;AAAA,YACA,YAAU;AAER,wCAA0B;AAAA,gBACxB,GAAG;AAAA,gBACH;AAAA,kBACE,MAAM;AAAA,kBACN,SAAS;AAAA,kBACT,eAAe,QAAQ;AAAA,gBACzB;AAAA,gBACA;AAAA,kBACE,MAAM;AAAA,kBACN,MAAM,QAAQ,cAAc;AAAA,kBAC5B,SAAS,KAAK,UAAU,MAAM;AAAA,gBAChC;AAAA,cACF;AAGA,qBAAO;AAAA,YACT;AAAA,UACF;AAEA,cAAI,CAAC,kBAAkB;AAIrB,uBAAW;AAAA,cACT,YAAY;AAAA,gBACV,gBACI;AAAA,kBACE;AAAA;AAAA,kBAEA,KAAK,MAAM,kBAAkB;AAAA,gBAC/B,IACA;AAAA,cACN;AAAA,YACF;AACA;AAAA,UACF,WAAW,OAAO,qBAAqB,UAAU;AAE/C,uBAAW;AAAA,cACT,gBACI,YAAY,OAAO,iBAAiB,QAAQ,gBAAgB,CAAC,IAC7D,YAAY,OAAO,gBAAgB;AAAA,YACzC;AACA;AAAA,UACF;AAOA,gBAAM,oBAA2C;AAAA,YAC/C,GAAG;AAAA,YACH,SAAS;AAAA,UACX;AAEA,oBAAU,UAAU;AAEpB,gBAAM,eAAe,aAAa,kBAAkB;AAAA,YAClD,GAAG;AAAA,YACH,CAAC,kCAAkC,GAAG;AAAA,UACxC,CAAgC;AAEhC,gBAAM,SAAS,aAAa,UAAU;AAEtC,iBAAO,MAAM;AACX,kBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,gBAAI,MAAM;AACR;AAAA,YACF;AACA,uBAAW,QAAQ,KAAK;AAAA,UAC1B;AAAA,QACF;AAAA,MACF,UAAE;AACA,YAAI,UAAU,WAAW,mCAAmC;AAC1D,gBAAM,UAAU,QAAQ,iCAAiC;AAAA,QAC3D;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;ACnlBO,IAAM,wBAAN,cAAoC,SAAS;AAAA,EAClD,YACE,KACA,MACA,MACA;AACA,QAAI,kBAAkB;AAEtB,QAAI,MAAM;AACR,wBAAkB,IAAI,YAAY,KAAK,MAAM;AAAA,IAC/C;AAEA,UAAM,iBAAwB;AAAA,MAC5B,GAAG;AAAA,MACH,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,CAAC,cAAc,GAAG,OAAO,SAAS;AAAA,QAClC,GAAG,6BAAM;AAAA,MACX;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAKO,SAAS,iBACd,KACA,UACA,MACA;AACA,WAAS,WAAU,6BAAM,WAAU,KAAK;AAAA,IACtC,gBAAgB;AAAA,IAChB,GAAG,6BAAM;AAAA,EACX,CAAC;AAED,QAAM,SAAS,IAAI,UAAU;AAC7B,WAAS,OAAO;AACd,WAAO,KAAK,EAAE,KAAK,CAAC,EAAE,MAAM,MAAM,MAAsC;AACtE,UAAI,MAAM;AACR,iBAAS,IAAI;AACb;AAAA,MACF;AACA,eAAS,MAAM,KAAK;AACpB,WAAK;AAAA,IACP,CAAC;AAAA,EACH;AACA,OAAK;AACP;;;ACjDA,SAASA,cAAa,KAA0B;AAC9C,QAAM,oBAAoB,wBAAwB;AAClD,SAAO,IAAI,eAAuB;AAAA,IAChC,MAAM,KAAK,YAA2B;AAV1C;AAWM,YAAM,EAAE,OAAO,KAAK,IAAI,MAAM,IAAI,KAAK;AAEvC,UAAI,MAAM;AACR,mBAAW,MAAM;AACjB;AAAA,MACF;AAEA,YAAM,OAAO,mBAAkB,iBAAM,UAAN,mBAAa,SAAb,YAAqB,EAAE;AACtD,UAAI,CAAC;AAAM;AAGX,UAAI,MAAM,kBAAkB,QAAQ,MAAM,eAAe,SAAS,GAAG;AACnE;AAAA,MACF;AAKA,UAAI,SAAS,UAAU,SAAS,mBAAmB,SAAS,WAAW;AACrE;AAAA,MACF;AAEA,iBAAW,QAAQ,IAAI;AAAA,IACzB;AAAA,EACF,CAAC;AACH;AAEO,SAAS,kBACd,KACA,WACgB;AAChB,SAAOA,cAAa,GAAG,EACpB,YAAY,2BAA2B,SAAS,CAAC,EACjD;AAAA,IACC,4BAA4B,uCAAW,uBAAuB;AAAA,EAChE;AACJ;;;ACzCA,IAAM,cAAc,IAAI,YAAY,OAAO;AAE3C,eAAe,aACb,OACA,YACA;AACA,aAAW,QAAQ,OAAO;AACxB,UAAM,EAAE,MAAM,YAAY,IAAI,KAAK,MAAM,IAAI;AAG7C,QAAI,CAAC,aAAa;AAChB,iBAAW,QAAQ,IAAI;AAAA,IACzB;AAAA,EACF;AACF;AAEA,eAAe,oBACb,QACA,YACA;AACA,MAAI,UAAU;AAEd,SAAO,MAAM;AACX,UAAM,EAAE,OAAO,OAAO,KAAK,IAAI,MAAM,OAAO,KAAK;AACjD,QAAI,MAAM;AACR;AAAA,IACF;AAEA,eAAW,YAAY,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAErD,UAAM,aAAa,QAAQ,MAAM,aAAa;AAC9C,cAAU,WAAW,IAAI,KAAK;AAE9B,UAAM,aAAa,YAAY,UAAU;AAAA,EAC3C;AAEA,MAAI,SAAS;AACX,UAAM,aAAa,CAAC,OAAO;AAC3B,UAAM,aAAa,YAAY,UAAU;AAAA,EAC3C;AAEA,aAAW,MAAM;AACnB;AAEA,SAASC,cAAa,KAAe;AAlDrC;AAmDE,QAAM,UAAS,SAAI,SAAJ,mBAAU;AAEzB,SAAO,IAAI,eAAuB;AAAA,IAChC,MAAM,MAAM,YAA2B;AACrC,UAAI,CAAC,QAAQ;AACX,mBAAW,MAAM;AACjB;AAAA,MACF;AAEA,YAAM,oBAAoB,QAAQ,UAAU;AAAA,IAC9C;AAAA,EACF,CAAC;AACH;AAEO,SAAS,aACd,QACA,WACgB;AAChB,SAAOA,cAAa,MAAM,EACvB,YAAY,2BAA2B,SAAS,CAAC,EACjD;AAAA,IACC,4BAA4B,uCAAW,uBAAuB;AAAA,EAChE;AACJ;;;AC/BA,SAAS,uBAAwD;AAC/D,MAAI,WAAW;AAEf,SAAO,UAAQ;AACb,UAAM,OAAO,KAAK,MAAM,IAAc;AAGtC,QAAI,WAAW,MAAM;AACnB,YAAM,IAAI,MAAM,GAAG,KAAK,MAAM,SAAS,KAAK,MAAM,SAAS;AAAA,IAC7D;AAGA,QAAI,EAAE,gBAAgB,OAAO;AAC3B;AAAA,IACF;AAMA,UAAM,OAAO,KAAK;AAClB,QACE,CAAC,YACA,KAAK,SAAS,SAAS,UAAU,KAAK,WAAW,QAAQ,GAC1D;AACA,YAAM,QAAQ,KAAK,MAAM,SAAS,MAAM;AACxC,iBAAW;AAEX,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AACF;AAEA,gBAAgBC,YAAW,QAAwC;AACjE,mBAAiB,SAAS,QAAQ;AAChC,UAAM,OAAO,MAAM;AACnB,QAAI;AAAM,YAAM;AAAA,EAClB;AACF;AAOO,SAAS,gBACd,KACA,IACgB;AAChB,MAAI,OAAO,iBAAiB,KAAK;AAC/B,WAAO,0BAA0BA,YAAW,GAAG,CAAC,EAC7C,YAAY,2BAA2B,EAAE,CAAC,EAC1C,YAAY,4BAA4B,yBAAI,uBAAuB,CAAC;AAAA,EACzE,OAAO;AACL,WAAO,SAAS,KAAK,qBAAqB,GAAG,EAAE,EAAE;AAAA,MAC/C,4BAA4B,yBAAI,uBAAuB;AAAA,IACzD;AAAA,EACF;AACF;;;ACjGO,SAAS,gBAAgB,WAAyC;AACvE,QAAM,SAAS,IAAI,gBAAgB;AACnC,QAAM,SAAS,OAAO,SAAS,UAAU;AAEzC,QAAM,OAAO,oBAAI,IAAI;AAErB,QAAM,cAAc,OAAO,GAAU,UAAkB;AACrD,SAAK,OAAO,KAAK;AACjB,UAAM,OAAO;AACb,UAAM,OAAO,MAAM,CAAC;AAAA,EACtB;AAEA,QAAM,cAAc,OAAO,UAAkB;AAC3C,SAAK,IAAI,KAAK;AAAA,EAChB;AAEA,QAAM,YAAY,OAAO,UAAkB;AACzC,SAAK,OAAO,KAAK;AAEjB,QAAI,KAAK,SAAS,GAAG;AACnB,YAAM,OAAO;AACb,YAAM,OAAO,MAAM;AAAA,IACrB;AAAA,EACF;AAEA,SAAO;AAAA,IACL,QAAQ,OAAO,SACZ,YAAY,2BAA2B,SAAS,CAAC,EACjD;AAAA,MACC,4BAA4B,uCAAW,uBAAuB;AAAA,IAChE;AAAA,IACF;AAAA,IACA,UAAU;AAAA,MACR,mBAAmB,OAAO,UAAkB;AAC1C,cAAM,OAAO;AACb,cAAM,OAAO,MAAM,KAAK;AAAA,MAC1B;AAAA,MACA,gBAAgB,OAAO,MAAW,UAAoB,UAAkB;AACtE,oBAAY,KAAK;AAAA,MACnB;AAAA,MACA,cAAc,OAAO,SAAc,UAAkB;AACnD,cAAM,UAAU,KAAK;AAAA,MACvB;AAAA,MACA,gBAAgB,OAAO,GAAU,UAAkB;AACjD,cAAM,YAAY,GAAG,KAAK;AAAA,MAC5B;AAAA,MACA,kBAAkB,OAAO,QAAa,SAAc,UAAkB;AACpE,oBAAY,KAAK;AAAA,MACnB;AAAA,MACA,gBAAgB,OAAO,UAAe,UAAkB;AACtD,cAAM,UAAU,KAAK;AAAA,MACvB;AAAA,MACA,kBAAkB,OAAO,GAAU,UAAkB;AACnD,cAAM,YAAY,GAAG,KAAK;AAAA,MAC5B;AAAA,MACA,iBAAiB,OAAO,OAAY,QAAgB,UAAkB;AACpE,oBAAY,KAAK;AAAA,MACnB;AAAA,MACA,eAAe,OAAO,SAAiB,UAAkB;AACvD,cAAM,UAAU,KAAK;AAAA,MACvB;AAAA,MACA,iBAAiB,OAAO,GAAU,UAAkB;AAClD,cAAM,YAAY,GAAG,KAAK;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AACF;;;ACzBA,eAAsB,gBACpB,KACA,IACA,SAGyB;AArD3B;AAsDE,QAAM,OAAM,SAAI,SAAJ,mBAAU;AAEtB,MAAI,CAAC,KAAK;AACR,QAAI,IAAI;AAAO,YAAM,IAAI,MAAM,IAAI,KAAK;AAAA;AACnC,YAAM,IAAI,MAAM,0CAA0C;AAAA,EACjE;AAEA,QAAM,cAAc,MAAM,MAAM,KAAK;AAAA,IACnC,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,QAAQ;AAAA,MACR,GAAG,mCAAS;AAAA,IACd;AAAA,EACF,CAAC;AAED,SAAO,SAAS,aAAa,QAAW,EAAE,EAAE;AAAA,IAC1C,4BAA4B,yBAAI,uBAAuB;AAAA,EACzD;AACF;;;ACtEA,IAAM,UAAU,KAAK,WAAW,CAAC;AAGjC,SAAS,aAAa,QAAsB,aAAqB;AAC/D,QAAM,qBAAqB,IAAI,WAAW,WAAW;AAErD,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC1B,uBAAmB,IAAI,OAAO,MAAM;AACpC,cAAU,MAAM;AAAA,EAClB;AACA,SAAO,SAAS;AAEhB,SAAO;AACT;AAEA,gBAAuB,eACrB,QACA;AAAA,EACE;AACF,IAEI,CAAC,GAC2B;AAIhC,QAAM,UAAU,IAAI,YAAY;AAChC,QAAM,SAAuB,CAAC;AAC9B,MAAI,cAAc;AAElB,SAAO,MAAM;AACX,UAAM,EAAE,MAAM,IAAI,MAAM,OAAO,KAAK;AAEpC,QAAI,OAAO;AACT,aAAO,KAAK,KAAK;AACjB,qBAAe,MAAM;AACrB,UAAI,MAAM,MAAM,SAAS,CAAC,MAAM,SAAS;AAEvC;AAAA,MACF;AAAA,IACF;AAEA,QAAI,OAAO,WAAW,GAAG;AACvB;AAAA,IACF;AAEA,UAAM,qBAAqB,aAAa,QAAQ,WAAW;AAC3D,kBAAc;AAEd,UAAMC,eAAc,QACjB,OAAO,oBAAoB,EAAE,QAAQ,KAAK,CAAC,EAC3C,MAAM,IAAI,EACV,OAAO,UAAQ,SAAS,EAAE,EAC1B,IAAI,eAAe;AAEtB,eAAW,cAAcA,cAAa;AACpC,YAAM;AAAA,IACR;AAGA,QAAI,0CAAe;AACjB,aAAO,OAAO;AACd;AAAA,IACF;AAAA,EACF;AACF;;;ACvDA,eAAsB,qBAAqB;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,aAAa;AAAA,EACb,iBAAiB,MAAM,oBAAI,KAAK;AAClC,GASG;AACD,QAAM,YAAY,eAAe;AACjC,QAAM,YAAuB;AAAA,IAC3B,MAAM,CAAC;AAAA,EACT;AAGA,mBAAiB,EAAE,MAAM,MAAM,KAAK,eAAe,QAAQ;AAAA,IACzD,WAAW,OAAM,yDAAoB,aAAY;AAAA,EACnD,CAAC,GAAG;AACF,QAAI,SAAS,QAAQ;AACnB,UAAI,UAAU,MAAM,GAAG;AACrB,kBAAU,MAAM,IAAI;AAAA,UAClB,GAAG,UAAU,MAAM;AAAA,UACnB,UAAU,UAAU,MAAM,EAAE,WAAW,MAAM;AAAA,QAC/C;AAAA,MACF,OAAO;AACL,kBAAU,MAAM,IAAI;AAAA,UAClB,IAAI,WAAW;AAAA,UACf,MAAM;AAAA,UACN,SAAS;AAAA,UACT;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,sBAAsC;AAE1C,QAAI,SAAS,iBAAiB;AAC5B,gBAAU,eAAe,IAAI;AAAA,QAC3B,IAAI,WAAW;AAAA,QACf,MAAM;AAAA,QACN,SAAS;AAAA,QACT,eAAe,MAAM;AAAA,QACrB,MAAM,MAAM,cAAc;AAAA,QAC1B;AAAA,MACF;AAEA,4BAAsB,UAAU,eAAe;AAAA,IACjD;AAEA,QAAI,SAAS,QAAQ;AACnB,gBAAU,MAAM,EAAE,KAAK,GAAG,KAAK;AAAA,IACjC;AAEA,UAAM,kBAAkB,UAAU,MAAM;AAGxC,UAAM,SAAS,CAAC,qBAAqB,eAAe,EAAE;AAAA,MACpD;AAAA,IACF;AAEA,WAAO,QAAQ,CAAC,GAAG,UAAU,MAAM,CAAC,CAAC;AAAA,EACvC;AAEA,uCAAW;AAEX,SAAO;AAAA,IACL,UAAU,CAAC,UAAU,MAAM,UAAU,aAAa,EAAE;AAAA,MAClD;AAAA,IACF;AAAA,IACA,MAAM,UAAU;AAAA,EAClB;AACF;;;AC/DO,IAAM,sCAAN,MAA0C;AAAA,EAC/C,YACE,KACA,SAQA;AAxCJ;AAyCI,QAAI,cAA+C,MAAM;AAAA,IAAC;AAC1D,QAAI,OAAO,IAAI,QAA0B,aAAW;AAClD,oBAAc;AAAA,IAChB,CAAC;AAED,QAAI,mCAAS,MAAM;AACjB,YAAM,kBAA8C,IAAI;AAAA,QACtD,QAAQ,KAAK;AAAA,MACf;AAEA,UAAI,cAAmC;AAGvC,2BAAqB;AAAA,QACnB,QAAQ,gBAAgB,UAAU;AAAA,QAClC,QAAQ,CAAC,QAAQ,SAAS;AAxDlC,cAAAC,KAAA;AAyDU,gBAAMC,YAAU,MAAAD,MAAA,OAAO,CAAC,MAAR,gBAAAA,IAAW,YAAX,YAAsB;AACtC,gBAAM,OAAK,wCAAS,OAAT,iCAAc,EAAE,SAAAC,UAAS,KAAK,OAAMA;AAC/C,gBAAM,UAAmB,EAAE,IAAI,SAAAA,SAAQ;AAEvC,gBAAM,kBAAkB;AACxB,gBAAM,UAAU,IAAI,QAA0B,aAAW;AACvD,0BAAc;AAAA,UAChB,CAAC;AAED,0BAAgB;AAAA,YACd,MAAM;AAAA,YACN,GAAG;AAAA,UACL,CAAC;AAED,wBAAc;AAAA,QAChB;AAAA,QACA,aAAY,aAAQ,eAAR,YAAsB;AAAA,QAClC,UAAU,MAAM;AAGd,cAAI,gBAAgB,QAAW;AAC7B,wBAAY;AAAA,cACV,MAAM;AAAA,cACN,GAAG;AAAA,YACL,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF,CAAC;AAED,aAAO;AAAA,IACT;AAEA,QAAI,UAAU;AAEd,UAAM,SAAS,mBAAmB;AAClC,UAAM,SAAS,IAAI,UAAU;AAC7B,mBAAe,YAAY;AA7F/B,UAAAD;AA8FM,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI,CAAC,MAAM;AACT,mBAAW,OAAO,KAAK;AAAA,MACzB;AAKA,YAAM,OAAKA,MAAA,mCAAS,OAAT,gBAAAA,IAAA,cAAc,EAAE,QAAQ,OAAM;AAEzC,YAAM,UAAmB;AAAA,QACvB;AAAA,QACA;AAAA,MACF;AAEA,YAAM,kBAAkB;AACxB,YAAM,UAAU,OACZ,OACA,IAAI,QAA0B,aAAW;AACvC,sBAAc;AAAA,MAChB,CAAC;AACL,sBAAgB;AAAA,QACd,MAAM;AAAA,QACN,GAAG;AAAA,MACL,CAAC;AAED,UAAI,MAAM;AACR;AAAA,MACF;AAEA,YAAM,UAAU;AAAA,IAClB;AACA,cAAU;AAEV,WAAO;AAAA,EACT;AACF;;;AC/HO,SAAS,+BACd,EAAE,UAAU,UAAU,GACtBE,UAMU;AACV,QAAM,SAAS,IAAI,eAAe;AAAA,IAChC,MAAM,MAAM,YAAY;AAb5B;AAcM,YAAM,cAAc,IAAI,YAAY;AAEpC,YAAM,cAAc,CAAC,YAA8B;AACjD,mBAAW;AAAA,UACT,YAAY,OAAO,iBAAiB,qBAAqB,OAAO,CAAC;AAAA,QACnE;AAAA,MACF;AAEA,YAAM,kBAAkB,CAAC,YAAyB;AAChD,mBAAW;AAAA,UACT,YAAY,OAAO,iBAAiB,gBAAgB,OAAO,CAAC;AAAA,QAC9D;AAAA,MACF;AAEA,YAAM,YAAY,CAAC,iBAAyB;AAC1C,mBAAW;AAAA,UACT,YAAY,OAAO,iBAAiB,SAAS,YAAY,CAAC;AAAA,QAC5D;AAAA,MACF;AAGA,iBAAW;AAAA,QACT,YAAY;AAAA,UACV,iBAAiB,0BAA0B;AAAA,YACzC;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,UAAI;AACF,cAAMA,SAAQ;AAAA,UACZ;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH,SAAS,OAAP;AACA,mBAAW,WAAc,YAAd,YAAyB,GAAG,OAAO;AAAA,MAChD,UAAE;AACA,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF;AAAA,IACA,KAAK,YAAY;AAAA,IAAC;AAAA,IAClB,SAAS;AAAA,IAAC;AAAA,EACZ,CAAC;AAED,SAAO,IAAI,SAAS,QAAQ;AAAA,IAC1B,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,gBAAgB;AAAA,IAClB;AAAA,EACF,CAAC;AACH;","names":["createParser","createParser","streamable","streamParts","_a","content","process"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai",
3
- "version": "2.2.27",
3
+ "version": "2.2.29",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -66,13 +66,13 @@
66
66
  "devDependencies": {
67
67
  "@anthropic-ai/sdk": "0.10.0",
68
68
  "@aws-sdk/client-bedrock-runtime": "3.451.0",
69
- "@edge-runtime/jest-environment": "1.1.0-beta.31",
69
+ "@edge-runtime/vm": "^3.1.7",
70
70
  "@huggingface/inference": "2.6.4",
71
+ "@solidjs/testing-library": "0.8.4",
71
72
  "@testing-library/jest-dom": "^6.1.4",
72
73
  "@testing-library/react": "^14.0.0",
73
74
  "@testing-library/user-event": "^14.5.1",
74
75
  "@testing-library/vue": "^8.0.1",
75
- "@types/jest": "29.2.0",
76
76
  "@types/node": "^17.0.12",
77
77
  "@types/react": "^18.2.8",
78
78
  "@types/react-dom": "^18.2.0",
@@ -84,8 +84,10 @@
84
84
  "msw": "2.0.9",
85
85
  "openai": "4.16.1",
86
86
  "react-dom": "^18.2.0",
87
+ "solid-js": "1.8.7",
87
88
  "tsup": "^6.7.0",
88
89
  "typescript": "5.1.3",
90
+ "vite-plugin-solid": "2.7.2",
89
91
  "@vercel/ai-tsconfig": "0.0.0",
90
92
  "eslint-config-vercel-ai": "0.0.0"
91
93
  },
@@ -138,8 +140,16 @@
138
140
  "type-check": "tsc --noEmit",
139
141
  "prettier-check": "prettier --check \"./**/*.ts*\"",
140
142
  "test": "pnpm test:node && pnpm test:edge && pnpm test:ui",
141
- "test:edge": "vitest --config vitest.edge.config.js --run",
142
- "test:node": "vitest --config vitest.node.config.js --run",
143
- "test:ui": "vitest --config vitest.ui.config.js --run"
143
+ "test:edge": "vitest --config vitest.edge.config.js --run --threads=false",
144
+ "test:node": "vitest --config vitest.node.config.js --run --threads=false",
145
+ "test:ui": "pnpm test:ui:react && pnpm test:ui:vue && pnpm test:ui:solid",
146
+ "test:ui:react": "vitest --config vitest.ui.react.config.js --run",
147
+ "test:ui:solid": "vitest --config vitest.ui.solid.config.js --run",
148
+ "test:ui:vue": "vitest --config vitest.ui.vue.config.js --run",
149
+ "test:edge:watch": "vitest --config vitest.edge.config.js --threads=false",
150
+ "test:node:watch": "vitest --config vitest.node.config.js --threads=false",
151
+ "test:ui:react:watch": "vitest --config vitest.ui.react.config.js",
152
+ "test:ui:solid:watch": "vitest --config vitest.ui.solid.config.js",
153
+ "test:ui:vue:watch": "vitest --config vitest.ui.vue.config.js"
144
154
  }
145
155
  }
@@ -19,7 +19,7 @@ interface Message {
19
19
  createdAt?: Date;
20
20
  content: string;
21
21
  ui?: string | JSX.Element | JSX.Element[] | null | undefined;
22
- role: 'system' | 'user' | 'assistant' | 'function';
22
+ role: 'system' | 'user' | 'assistant' | 'function' | 'data';
23
23
  /**
24
24
  * If the message has a role of `function`, the `name` field is the name of the function.
25
25
  * Otherwise, the name field should not be set.
@@ -31,7 +31,11 @@ interface Message {
31
31
  * not be set.
32
32
  */
33
33
  function_call?: string | FunctionCall;
34
+ data?: JSONValue;
34
35
  }
36
+ type JSONValue = null | string | number | boolean | {
37
+ [x: string]: JSONValue;
38
+ } | Array<JSONValue>;
35
39
 
36
40
  /**
37
41
  * A prompt constructor for Anthropic models.
@@ -59,4 +63,155 @@ declare function experimental_buildOpenAssistantPrompt(messages: Pick<Message, '
59
63
  */
60
64
  declare function experimental_buildLlama2Prompt(messages: Pick<Message, 'content' | 'role'>[]): string;
61
65
 
62
- export { experimental_buildAnthropicPrompt, experimental_buildLlama2Prompt, experimental_buildOpenAssistantPrompt, experimental_buildStarChatBetaPrompt };
66
+ declare function experimental_buildOpenAIMessages(messages: Message[]): ChatCompletionMessageParam[];
67
+ type ChatCompletionMessageParam = ChatCompletionSystemMessageParam | ChatCompletionUserMessageParam | ChatCompletionAssistantMessageParam | ChatCompletionToolMessageParam | ChatCompletionFunctionMessageParam;
68
+ interface ChatCompletionSystemMessageParam {
69
+ /**
70
+ * The contents of the system message.
71
+ */
72
+ content: string | null;
73
+ /**
74
+ * The role of the messages author, in this case `system`.
75
+ */
76
+ role: 'system';
77
+ }
78
+ interface ChatCompletionUserMessageParam {
79
+ /**
80
+ * The contents of the user message.
81
+ */
82
+ content: string | Array<ChatCompletionContentPart> | null;
83
+ /**
84
+ * The role of the messages author, in this case `user`.
85
+ */
86
+ role: 'user';
87
+ }
88
+ type ChatCompletionContentPart = ChatCompletionContentPartText | ChatCompletionContentPartImage;
89
+ interface ChatCompletionContentPartText {
90
+ /**
91
+ * The text content.
92
+ */
93
+ text: string;
94
+ /**
95
+ * The type of the content part.
96
+ */
97
+ type: 'text';
98
+ }
99
+ interface ChatCompletionContentPartImage {
100
+ image_url: ChatCompletionContentPartImage.ImageURL;
101
+ /**
102
+ * The type of the content part.
103
+ */
104
+ type: 'image_url';
105
+ }
106
+ declare namespace ChatCompletionContentPartImage {
107
+ interface ImageURL {
108
+ /**
109
+ * Specifies the detail level of the image.
110
+ */
111
+ detail?: 'auto' | 'low' | 'high';
112
+ /**
113
+ * Either a URL of the image or the base64 encoded image data.
114
+ */
115
+ url?: string;
116
+ }
117
+ }
118
+ interface ChatCompletionAssistantMessageParam {
119
+ /**
120
+ * The contents of the assistant message.
121
+ */
122
+ content: string | null;
123
+ /**
124
+ * The role of the messages author, in this case `assistant`.
125
+ */
126
+ role: 'assistant';
127
+ /**
128
+ * Deprecated and replaced by `tool_calls`. The name and arguments of a function
129
+ * that should be called, as generated by the model.
130
+ */
131
+ function_call?: ChatCompletionAssistantMessageParam.FunctionCall;
132
+ /**
133
+ * The tool calls generated by the model, such as function calls.
134
+ */
135
+ tool_calls?: Array<ChatCompletionMessageToolCall>;
136
+ }
137
+ declare namespace ChatCompletionAssistantMessageParam {
138
+ /**
139
+ * Deprecated and replaced by `tool_calls`. The name and arguments of a function
140
+ * that should be called, as generated by the model.
141
+ */
142
+ interface FunctionCall {
143
+ /**
144
+ * The arguments to call the function with, as generated by the model in JSON
145
+ * format. Note that the model does not always generate valid JSON, and may
146
+ * hallucinate parameters not defined by your function schema. Validate the
147
+ * arguments in your code before calling your function.
148
+ */
149
+ arguments: string;
150
+ /**
151
+ * The name of the function to call.
152
+ */
153
+ name: string;
154
+ }
155
+ }
156
+ interface ChatCompletionMessageToolCall {
157
+ /**
158
+ * The ID of the tool call.
159
+ */
160
+ id: string;
161
+ /**
162
+ * The function that the model called.
163
+ */
164
+ function: ChatCompletionMessageToolCall.Function;
165
+ /**
166
+ * The type of the tool. Currently, only `function` is supported.
167
+ */
168
+ type: 'function';
169
+ }
170
+ declare namespace ChatCompletionMessageToolCall {
171
+ /**
172
+ * The function that the model called.
173
+ */
174
+ interface Function {
175
+ /**
176
+ * The arguments to call the function with, as generated by the model in JSON
177
+ * format. Note that the model does not always generate valid JSON, and may
178
+ * hallucinate parameters not defined by your function schema. Validate the
179
+ * arguments in your code before calling your function.
180
+ */
181
+ arguments: string;
182
+ /**
183
+ * The name of the function to call.
184
+ */
185
+ name: string;
186
+ }
187
+ }
188
+ interface ChatCompletionToolMessageParam {
189
+ /**
190
+ * The contents of the tool message.
191
+ */
192
+ content: string | null;
193
+ /**
194
+ * The role of the messages author, in this case `tool`.
195
+ */
196
+ role: 'tool';
197
+ /**
198
+ * Tool call that this message is responding to.
199
+ */
200
+ tool_call_id: string;
201
+ }
202
+ interface ChatCompletionFunctionMessageParam {
203
+ /**
204
+ * The return value from the function call, to return to the model.
205
+ */
206
+ content: string | null;
207
+ /**
208
+ * The name of the function to call.
209
+ */
210
+ name: string;
211
+ /**
212
+ * The role of the messages author, in this case `function`.
213
+ */
214
+ role: 'function';
215
+ }
216
+
217
+ export { ChatCompletionAssistantMessageParam, ChatCompletionContentPart, ChatCompletionContentPartImage, ChatCompletionContentPartText, ChatCompletionFunctionMessageParam, ChatCompletionMessageParam, ChatCompletionMessageToolCall, ChatCompletionSystemMessageParam, ChatCompletionToolMessageParam, ChatCompletionUserMessageParam, experimental_buildAnthropicPrompt, experimental_buildLlama2Prompt, experimental_buildOpenAIMessages, experimental_buildOpenAssistantPrompt, experimental_buildStarChatBetaPrompt };
@@ -22,6 +22,7 @@ var prompts_exports = {};
22
22
  __export(prompts_exports, {
23
23
  experimental_buildAnthropicPrompt: () => experimental_buildAnthropicPrompt,
24
24
  experimental_buildLlama2Prompt: () => experimental_buildLlama2Prompt,
25
+ experimental_buildOpenAIMessages: () => experimental_buildOpenAIMessages,
25
26
  experimental_buildOpenAssistantPrompt: () => experimental_buildOpenAssistantPrompt,
26
27
  experimental_buildStarChatBetaPrompt: () => experimental_buildStarChatBetaPrompt
27
28
  });
@@ -97,10 +98,55 @@ ${content}
97
98
  });
98
99
  return startPrompt + conversation.join("") + endPrompt;
99
100
  }
101
+
102
+ // prompts/openai.tsx
103
+ function experimental_buildOpenAIMessages(messages) {
104
+ return messages.map((message) => {
105
+ switch (message.role) {
106
+ case "system":
107
+ case "user":
108
+ return {
109
+ role: message.role,
110
+ content: message.content
111
+ };
112
+ case "assistant": {
113
+ const function_call = message.function_call;
114
+ if (function_call !== void 0 && (typeof function_call === "string" || function_call.arguments === void 0 || function_call.name === void 0)) {
115
+ throw new Error(
116
+ "Invalid function call in message. Expected a function call object"
117
+ );
118
+ }
119
+ return {
120
+ role: message.role,
121
+ content: message.content,
122
+ function_call: function_call === void 0 ? void 0 : {
123
+ name: function_call.name,
124
+ arguments: function_call.arguments
125
+ }
126
+ };
127
+ }
128
+ case "function": {
129
+ if (message.name === void 0) {
130
+ throw new Error("Invalid function call in message. Expected a name");
131
+ }
132
+ return {
133
+ role: message.role,
134
+ content: message.content,
135
+ name: message.name
136
+ };
137
+ }
138
+ case "data": {
139
+ throw "unsupported message role 'data'";
140
+ }
141
+ }
142
+ });
143
+ }
100
144
  // Annotate the CommonJS export names for ESM import in node:
101
145
  0 && (module.exports = {
102
146
  experimental_buildAnthropicPrompt,
103
147
  experimental_buildLlama2Prompt,
148
+ experimental_buildOpenAIMessages,
104
149
  experimental_buildOpenAssistantPrompt,
105
150
  experimental_buildStarChatBetaPrompt
106
151
  });
152
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../index.ts","../anthropic.ts","../huggingface.ts","../openai.tsx"],"sourcesContent":["export * from './anthropic';\nexport * from './huggingface';\nexport * from './openai';\n","import { Message } from '../shared/types';\n\n/**\n * A prompt constructor for Anthropic models.\n * Does not support `function` messages.\n * @see https://docs.anthropic.com/claude/reference/getting-started-with-the-api\n */\nexport function experimental_buildAnthropicPrompt(\n messages: Pick<Message, 'content' | 'role'>[],\n) {\n return (\n messages.map(({ content, role }) => {\n if (role === 'user') {\n return `\\n\\nHuman: ${content}`;\n } else {\n return `\\n\\nAssistant: ${content}`;\n }\n }) + '\\n\\nAssistant:'\n );\n}\n","import { Message } from '../shared/types';\n\n/**\n * A prompt constructor for the HuggingFace StarChat Beta model.\n * Does not support `function` messages.\n * @see https://huggingface.co/HuggingFaceH4/starchat-beta\n */\nexport function experimental_buildStarChatBetaPrompt(\n messages: Pick<Message, 'content' | 'role'>[],\n) {\n return (\n messages\n .map(({ content, role }) => {\n if (role === 'user') {\n return `<|user|>\\n${content}<|end|>\\n`;\n } else if (role === 'assistant') {\n return `<|assistant|>\\n${content}<|end|>\\n`;\n } else if (role === 'system') {\n return `<|system|>\\n${content}<|end|>\\n`;\n } else if (role === 'function') {\n throw new Error('StarChat Beta does not support function calls.');\n }\n })\n .join('') + '<|assistant|>'\n );\n}\n\n/**\n * A prompt constructor for HuggingFace OpenAssistant models.\n * Does not support `function` or `system` messages.\n * @see https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5\n */\nexport function experimental_buildOpenAssistantPrompt(\n messages: Pick<Message, 'content' | 'role'>[],\n) {\n return (\n messages\n .map(({ content, role }) => {\n if (role === 'user') {\n return `<|prompter|>${content}<|endoftext|>`;\n } else if (role === 'function') {\n throw new Error('OpenAssistant does not support function calls.');\n } else if (role === 'system') {\n throw new Error('OpenAssistant does not support system messages.');\n } else {\n return `<|assistant|>${content}<|endoftext|>`;\n }\n })\n .join('') + '<|assistant|>'\n );\n}\n\n/**\n * A prompt constructor for HuggingFace LLama 2 chat models.\n * Does not support `function` messages.\n * @see https://huggingface.co/meta-llama/Llama-2-70b-chat-hf and https://huggingface.co/blog/llama2#how-to-prompt-llama-2\n */\nexport function experimental_buildLlama2Prompt(\n messages: Pick<Message, 'content' | 'role'>[],\n) {\n const startPrompt = `<s>[INST] `;\n const endPrompt = ` [/INST]`;\n const conversation = messages.map(({ content, role }, index) => {\n if (role === 'user') {\n return content.trim();\n } else if (role === 'assistant') {\n return ` [/INST] ${content}</s><s>[INST] `;\n } else if (role === 'function') {\n throw new Error('Llama 2 does not support function calls.');\n } else if (role === 'system' && index === 0) {\n return `<<SYS>>\\n${content}\\n<</SYS>>\\n\\n`;\n } else {\n throw new Error(`Invalid message role: ${role}`);\n }\n });\n\n return startPrompt + conversation.join('') + endPrompt;\n}\n","import { Message } from '../shared/types';\n\nexport function experimental_buildOpenAIMessages(\n messages: Message[],\n): ChatCompletionMessageParam[] {\n return messages.map(message => {\n switch (message.role) {\n case 'system':\n case 'user':\n return {\n role: message.role,\n content: message.content,\n } satisfies ChatCompletionMessageParam;\n\n case 'assistant': {\n const function_call = message.function_call;\n\n if (\n function_call !== undefined &&\n (typeof function_call === 'string' ||\n function_call.arguments === undefined ||\n function_call.name === undefined)\n ) {\n throw new Error(\n 'Invalid function call in message. Expected a function call object',\n );\n }\n\n return {\n role: message.role,\n content: message.content,\n function_call:\n function_call === undefined\n ? undefined\n : {\n name: function_call.name!,\n arguments: function_call.arguments!,\n },\n } satisfies ChatCompletionMessageParam;\n }\n\n case 'function': {\n if (message.name === undefined) {\n throw new Error('Invalid function call in message. Expected a name');\n }\n\n return {\n role: message.role,\n content: message.content,\n name: message.name,\n } satisfies ChatCompletionMessageParam;\n }\n\n case 'data': {\n throw \"unsupported message role 'data'\";\n }\n }\n });\n}\n\n// copy of open ai messages (so we don't have a dependency on the openai package)\nexport type ChatCompletionMessageParam =\n | ChatCompletionSystemMessageParam\n | ChatCompletionUserMessageParam\n | ChatCompletionAssistantMessageParam\n | ChatCompletionToolMessageParam\n | ChatCompletionFunctionMessageParam;\n\nexport interface ChatCompletionSystemMessageParam {\n /**\n * The contents of the system message.\n */\n content: string | null;\n\n /**\n * The role of the messages author, in this case `system`.\n */\n role: 'system';\n}\n\nexport interface ChatCompletionUserMessageParam {\n /**\n * The contents of the user message.\n */\n content: string | Array<ChatCompletionContentPart> | null;\n\n /**\n * The role of the messages author, in this case `user`.\n */\n role: 'user';\n}\n\nexport type ChatCompletionContentPart =\n | ChatCompletionContentPartText\n | ChatCompletionContentPartImage;\n\nexport interface ChatCompletionContentPartText {\n /**\n * The text content.\n */\n text: string;\n\n /**\n * The type of the content part.\n */\n type: 'text';\n}\n\nexport interface ChatCompletionContentPartImage {\n image_url: ChatCompletionContentPartImage.ImageURL;\n\n /**\n * The type of the content part.\n */\n type: 'image_url';\n}\n\nexport namespace ChatCompletionContentPartImage {\n export interface ImageURL {\n /**\n * Specifies the detail level of the image.\n */\n detail?: 'auto' | 'low' | 'high';\n\n /**\n * Either a URL of the image or the base64 encoded image data.\n */\n url?: string;\n }\n}\n\nexport interface ChatCompletionAssistantMessageParam {\n /**\n * The contents of the assistant message.\n */\n content: string | null;\n\n /**\n * The role of the messages author, in this case `assistant`.\n */\n role: 'assistant';\n\n /**\n * Deprecated and replaced by `tool_calls`. The name and arguments of a function\n * that should be called, as generated by the model.\n */\n function_call?: ChatCompletionAssistantMessageParam.FunctionCall;\n\n /**\n * The tool calls generated by the model, such as function calls.\n */\n tool_calls?: Array<ChatCompletionMessageToolCall>;\n}\n\nexport namespace ChatCompletionAssistantMessageParam {\n /**\n * Deprecated and replaced by `tool_calls`. The name and arguments of a function\n * that should be called, as generated by the model.\n */\n export interface FunctionCall {\n /**\n * The arguments to call the function with, as generated by the model in JSON\n * format. Note that the model does not always generate valid JSON, and may\n * hallucinate parameters not defined by your function schema. Validate the\n * arguments in your code before calling your function.\n */\n arguments: string;\n\n /**\n * The name of the function to call.\n */\n name: string;\n }\n}\n\nexport interface ChatCompletionMessageToolCall {\n /**\n * The ID of the tool call.\n */\n id: string;\n\n /**\n * The function that the model called.\n */\n function: ChatCompletionMessageToolCall.Function;\n\n /**\n * The type of the tool. Currently, only `function` is supported.\n */\n type: 'function';\n}\n\nexport namespace ChatCompletionMessageToolCall {\n /**\n * The function that the model called.\n */\n export interface Function {\n /**\n * The arguments to call the function with, as generated by the model in JSON\n * format. Note that the model does not always generate valid JSON, and may\n * hallucinate parameters not defined by your function schema. Validate the\n * arguments in your code before calling your function.\n */\n arguments: string;\n\n /**\n * The name of the function to call.\n */\n name: string;\n }\n}\n\nexport interface ChatCompletionToolMessageParam {\n /**\n * The contents of the tool message.\n */\n content: string | null;\n\n /**\n * The role of the messages author, in this case `tool`.\n */\n role: 'tool';\n\n /**\n * Tool call that this message is responding to.\n */\n tool_call_id: string;\n}\n\nexport interface ChatCompletionFunctionMessageParam {\n /**\n * The return value from the function call, to return to the model.\n */\n content: string | null;\n\n /**\n * The name of the function to call.\n */\n name: string;\n\n /**\n * The role of the messages author, in this case `function`.\n */\n role: 'function';\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACOO,SAAS,kCACd,UACA;AACA,SACE,SAAS,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM;AAClC,QAAI,SAAS,QAAQ;AACnB,aAAO;AAAA;AAAA,SAAc;AAAA,IACvB,OAAO;AACL,aAAO;AAAA;AAAA,aAAkB;AAAA,IAC3B;AAAA,EACF,CAAC,IAAI;AAET;;;ACZO,SAAS,qCACd,UACA;AACA,SACE,SACG,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM;AAC1B,QAAI,SAAS,QAAQ;AACnB,aAAO;AAAA,EAAa;AAAA;AAAA,IACtB,WAAW,SAAS,aAAa;AAC/B,aAAO;AAAA,EAAkB;AAAA;AAAA,IAC3B,WAAW,SAAS,UAAU;AAC5B,aAAO;AAAA,EAAe;AAAA;AAAA,IACxB,WAAW,SAAS,YAAY;AAC9B,YAAM,IAAI,MAAM,gDAAgD;AAAA,IAClE;AAAA,EACF,CAAC,EACA,KAAK,EAAE,IAAI;AAElB;AAOO,SAAS,sCACd,UACA;AACA,SACE,SACG,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM;AAC1B,QAAI,SAAS,QAAQ;AACnB,aAAO,eAAe;AAAA,IACxB,WAAW,SAAS,YAAY;AAC9B,YAAM,IAAI,MAAM,gDAAgD;AAAA,IAClE,WAAW,SAAS,UAAU;AAC5B,YAAM,IAAI,MAAM,iDAAiD;AAAA,IACnE,OAAO;AACL,aAAO,gBAAgB;AAAA,IACzB;AAAA,EACF,CAAC,EACA,KAAK,EAAE,IAAI;AAElB;AAOO,SAAS,+BACd,UACA;AACA,QAAM,cAAc;AACpB,QAAM,YAAY;AAClB,QAAM,eAAe,SAAS,IAAI,CAAC,EAAE,SAAS,KAAK,GAAG,UAAU;AAC9D,QAAI,SAAS,QAAQ;AACnB,aAAO,QAAQ,KAAK;AAAA,IACtB,WAAW,SAAS,aAAa;AAC/B,aAAO,YAAY;AAAA,IACrB,WAAW,SAAS,YAAY;AAC9B,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D,WAAW,SAAS,YAAY,UAAU,GAAG;AAC3C,aAAO;AAAA,EAAY;AAAA;AAAA;AAAA;AAAA,IACrB,OAAO;AACL,YAAM,IAAI,MAAM,yBAAyB,MAAM;AAAA,IACjD;AAAA,EACF,CAAC;AAED,SAAO,cAAc,aAAa,KAAK,EAAE,IAAI;AAC/C;;;AC3EO,SAAS,iCACd,UAC8B;AAC9B,SAAO,SAAS,IAAI,aAAW;AAC7B,YAAQ,QAAQ,MAAM;AAAA,MACpB,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,UACL,MAAM,QAAQ;AAAA,UACd,SAAS,QAAQ;AAAA,QACnB;AAAA,MAEF,KAAK,aAAa;AAChB,cAAM,gBAAgB,QAAQ;AAE9B,YACE,kBAAkB,WACjB,OAAO,kBAAkB,YACxB,cAAc,cAAc,UAC5B,cAAc,SAAS,SACzB;AACA,gBAAM,IAAI;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAEA,eAAO;AAAA,UACL,MAAM,QAAQ;AAAA,UACd,SAAS,QAAQ;AAAA,UACjB,eACE,kBAAkB,SACd,SACA;AAAA,YACE,MAAM,cAAc;AAAA,YACpB,WAAW,cAAc;AAAA,UAC3B;AAAA,QACR;AAAA,MACF;AAAA,MAEA,KAAK,YAAY;AACf,YAAI,QAAQ,SAAS,QAAW;AAC9B,gBAAM,IAAI,MAAM,mDAAmD;AAAA,QACrE;AAEA,eAAO;AAAA,UACL,MAAM,QAAQ;AAAA,UACd,SAAS,QAAQ;AAAA,UACjB,MAAM,QAAQ;AAAA,QAChB;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF,CAAC;AACH;","names":[]}
@@ -68,9 +68,54 @@ ${content}
68
68
  });
69
69
  return startPrompt + conversation.join("") + endPrompt;
70
70
  }
71
+
72
+ // prompts/openai.tsx
73
+ function experimental_buildOpenAIMessages(messages) {
74
+ return messages.map((message) => {
75
+ switch (message.role) {
76
+ case "system":
77
+ case "user":
78
+ return {
79
+ role: message.role,
80
+ content: message.content
81
+ };
82
+ case "assistant": {
83
+ const function_call = message.function_call;
84
+ if (function_call !== void 0 && (typeof function_call === "string" || function_call.arguments === void 0 || function_call.name === void 0)) {
85
+ throw new Error(
86
+ "Invalid function call in message. Expected a function call object"
87
+ );
88
+ }
89
+ return {
90
+ role: message.role,
91
+ content: message.content,
92
+ function_call: function_call === void 0 ? void 0 : {
93
+ name: function_call.name,
94
+ arguments: function_call.arguments
95
+ }
96
+ };
97
+ }
98
+ case "function": {
99
+ if (message.name === void 0) {
100
+ throw new Error("Invalid function call in message. Expected a name");
101
+ }
102
+ return {
103
+ role: message.role,
104
+ content: message.content,
105
+ name: message.name
106
+ };
107
+ }
108
+ case "data": {
109
+ throw "unsupported message role 'data'";
110
+ }
111
+ }
112
+ });
113
+ }
71
114
  export {
72
115
  experimental_buildAnthropicPrompt,
73
116
  experimental_buildLlama2Prompt,
117
+ experimental_buildOpenAIMessages,
74
118
  experimental_buildOpenAssistantPrompt,
75
119
  experimental_buildStarChatBetaPrompt
76
120
  };
121
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../anthropic.ts","../huggingface.ts","../openai.tsx"],"sourcesContent":["import { Message } from '../shared/types';\n\n/**\n * A prompt constructor for Anthropic models.\n * Does not support `function` messages.\n * @see https://docs.anthropic.com/claude/reference/getting-started-with-the-api\n */\nexport function experimental_buildAnthropicPrompt(\n messages: Pick<Message, 'content' | 'role'>[],\n) {\n return (\n messages.map(({ content, role }) => {\n if (role === 'user') {\n return `\\n\\nHuman: ${content}`;\n } else {\n return `\\n\\nAssistant: ${content}`;\n }\n }) + '\\n\\nAssistant:'\n );\n}\n","import { Message } from '../shared/types';\n\n/**\n * A prompt constructor for the HuggingFace StarChat Beta model.\n * Does not support `function` messages.\n * @see https://huggingface.co/HuggingFaceH4/starchat-beta\n */\nexport function experimental_buildStarChatBetaPrompt(\n messages: Pick<Message, 'content' | 'role'>[],\n) {\n return (\n messages\n .map(({ content, role }) => {\n if (role === 'user') {\n return `<|user|>\\n${content}<|end|>\\n`;\n } else if (role === 'assistant') {\n return `<|assistant|>\\n${content}<|end|>\\n`;\n } else if (role === 'system') {\n return `<|system|>\\n${content}<|end|>\\n`;\n } else if (role === 'function') {\n throw new Error('StarChat Beta does not support function calls.');\n }\n })\n .join('') + '<|assistant|>'\n );\n}\n\n/**\n * A prompt constructor for HuggingFace OpenAssistant models.\n * Does not support `function` or `system` messages.\n * @see https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5\n */\nexport function experimental_buildOpenAssistantPrompt(\n messages: Pick<Message, 'content' | 'role'>[],\n) {\n return (\n messages\n .map(({ content, role }) => {\n if (role === 'user') {\n return `<|prompter|>${content}<|endoftext|>`;\n } else if (role === 'function') {\n throw new Error('OpenAssistant does not support function calls.');\n } else if (role === 'system') {\n throw new Error('OpenAssistant does not support system messages.');\n } else {\n return `<|assistant|>${content}<|endoftext|>`;\n }\n })\n .join('') + '<|assistant|>'\n );\n}\n\n/**\n * A prompt constructor for HuggingFace LLama 2 chat models.\n * Does not support `function` messages.\n * @see https://huggingface.co/meta-llama/Llama-2-70b-chat-hf and https://huggingface.co/blog/llama2#how-to-prompt-llama-2\n */\nexport function experimental_buildLlama2Prompt(\n messages: Pick<Message, 'content' | 'role'>[],\n) {\n const startPrompt = `<s>[INST] `;\n const endPrompt = ` [/INST]`;\n const conversation = messages.map(({ content, role }, index) => {\n if (role === 'user') {\n return content.trim();\n } else if (role === 'assistant') {\n return ` [/INST] ${content}</s><s>[INST] `;\n } else if (role === 'function') {\n throw new Error('Llama 2 does not support function calls.');\n } else if (role === 'system' && index === 0) {\n return `<<SYS>>\\n${content}\\n<</SYS>>\\n\\n`;\n } else {\n throw new Error(`Invalid message role: ${role}`);\n }\n });\n\n return startPrompt + conversation.join('') + endPrompt;\n}\n","import { Message } from '../shared/types';\n\nexport function experimental_buildOpenAIMessages(\n messages: Message[],\n): ChatCompletionMessageParam[] {\n return messages.map(message => {\n switch (message.role) {\n case 'system':\n case 'user':\n return {\n role: message.role,\n content: message.content,\n } satisfies ChatCompletionMessageParam;\n\n case 'assistant': {\n const function_call = message.function_call;\n\n if (\n function_call !== undefined &&\n (typeof function_call === 'string' ||\n function_call.arguments === undefined ||\n function_call.name === undefined)\n ) {\n throw new Error(\n 'Invalid function call in message. Expected a function call object',\n );\n }\n\n return {\n role: message.role,\n content: message.content,\n function_call:\n function_call === undefined\n ? undefined\n : {\n name: function_call.name!,\n arguments: function_call.arguments!,\n },\n } satisfies ChatCompletionMessageParam;\n }\n\n case 'function': {\n if (message.name === undefined) {\n throw new Error('Invalid function call in message. Expected a name');\n }\n\n return {\n role: message.role,\n content: message.content,\n name: message.name,\n } satisfies ChatCompletionMessageParam;\n }\n\n case 'data': {\n throw \"unsupported message role 'data'\";\n }\n }\n });\n}\n\n// copy of open ai messages (so we don't have a dependency on the openai package)\nexport type ChatCompletionMessageParam =\n | ChatCompletionSystemMessageParam\n | ChatCompletionUserMessageParam\n | ChatCompletionAssistantMessageParam\n | ChatCompletionToolMessageParam\n | ChatCompletionFunctionMessageParam;\n\nexport interface ChatCompletionSystemMessageParam {\n /**\n * The contents of the system message.\n */\n content: string | null;\n\n /**\n * The role of the messages author, in this case `system`.\n */\n role: 'system';\n}\n\nexport interface ChatCompletionUserMessageParam {\n /**\n * The contents of the user message.\n */\n content: string | Array<ChatCompletionContentPart> | null;\n\n /**\n * The role of the messages author, in this case `user`.\n */\n role: 'user';\n}\n\nexport type ChatCompletionContentPart =\n | ChatCompletionContentPartText\n | ChatCompletionContentPartImage;\n\nexport interface ChatCompletionContentPartText {\n /**\n * The text content.\n */\n text: string;\n\n /**\n * The type of the content part.\n */\n type: 'text';\n}\n\nexport interface ChatCompletionContentPartImage {\n image_url: ChatCompletionContentPartImage.ImageURL;\n\n /**\n * The type of the content part.\n */\n type: 'image_url';\n}\n\nexport namespace ChatCompletionContentPartImage {\n export interface ImageURL {\n /**\n * Specifies the detail level of the image.\n */\n detail?: 'auto' | 'low' | 'high';\n\n /**\n * Either a URL of the image or the base64 encoded image data.\n */\n url?: string;\n }\n}\n\nexport interface ChatCompletionAssistantMessageParam {\n /**\n * The contents of the assistant message.\n */\n content: string | null;\n\n /**\n * The role of the messages author, in this case `assistant`.\n */\n role: 'assistant';\n\n /**\n * Deprecated and replaced by `tool_calls`. The name and arguments of a function\n * that should be called, as generated by the model.\n */\n function_call?: ChatCompletionAssistantMessageParam.FunctionCall;\n\n /**\n * The tool calls generated by the model, such as function calls.\n */\n tool_calls?: Array<ChatCompletionMessageToolCall>;\n}\n\nexport namespace ChatCompletionAssistantMessageParam {\n /**\n * Deprecated and replaced by `tool_calls`. The name and arguments of a function\n * that should be called, as generated by the model.\n */\n export interface FunctionCall {\n /**\n * The arguments to call the function with, as generated by the model in JSON\n * format. Note that the model does not always generate valid JSON, and may\n * hallucinate parameters not defined by your function schema. Validate the\n * arguments in your code before calling your function.\n */\n arguments: string;\n\n /**\n * The name of the function to call.\n */\n name: string;\n }\n}\n\nexport interface ChatCompletionMessageToolCall {\n /**\n * The ID of the tool call.\n */\n id: string;\n\n /**\n * The function that the model called.\n */\n function: ChatCompletionMessageToolCall.Function;\n\n /**\n * The type of the tool. Currently, only `function` is supported.\n */\n type: 'function';\n}\n\nexport namespace ChatCompletionMessageToolCall {\n /**\n * The function that the model called.\n */\n export interface Function {\n /**\n * The arguments to call the function with, as generated by the model in JSON\n * format. Note that the model does not always generate valid JSON, and may\n * hallucinate parameters not defined by your function schema. Validate the\n * arguments in your code before calling your function.\n */\n arguments: string;\n\n /**\n * The name of the function to call.\n */\n name: string;\n }\n}\n\nexport interface ChatCompletionToolMessageParam {\n /**\n * The contents of the tool message.\n */\n content: string | null;\n\n /**\n * The role of the messages author, in this case `tool`.\n */\n role: 'tool';\n\n /**\n * Tool call that this message is responding to.\n */\n tool_call_id: string;\n}\n\nexport interface ChatCompletionFunctionMessageParam {\n /**\n * The return value from the function call, to return to the model.\n */\n content: string | null;\n\n /**\n * The name of the function to call.\n */\n name: string;\n\n /**\n * The role of the messages author, in this case `function`.\n */\n role: 'function';\n}\n"],"mappings":";AAOO,SAAS,kCACd,UACA;AACA,SACE,SAAS,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM;AAClC,QAAI,SAAS,QAAQ;AACnB,aAAO;AAAA;AAAA,SAAc;AAAA,IACvB,OAAO;AACL,aAAO;AAAA;AAAA,aAAkB;AAAA,IAC3B;AAAA,EACF,CAAC,IAAI;AAET;;;ACZO,SAAS,qCACd,UACA;AACA,SACE,SACG,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM;AAC1B,QAAI,SAAS,QAAQ;AACnB,aAAO;AAAA,EAAa;AAAA;AAAA,IACtB,WAAW,SAAS,aAAa;AAC/B,aAAO;AAAA,EAAkB;AAAA;AAAA,IAC3B,WAAW,SAAS,UAAU;AAC5B,aAAO;AAAA,EAAe;AAAA;AAAA,IACxB,WAAW,SAAS,YAAY;AAC9B,YAAM,IAAI,MAAM,gDAAgD;AAAA,IAClE;AAAA,EACF,CAAC,EACA,KAAK,EAAE,IAAI;AAElB;AAOO,SAAS,sCACd,UACA;AACA,SACE,SACG,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM;AAC1B,QAAI,SAAS,QAAQ;AACnB,aAAO,eAAe;AAAA,IACxB,WAAW,SAAS,YAAY;AAC9B,YAAM,IAAI,MAAM,gDAAgD;AAAA,IAClE,WAAW,SAAS,UAAU;AAC5B,YAAM,IAAI,MAAM,iDAAiD;AAAA,IACnE,OAAO;AACL,aAAO,gBAAgB;AAAA,IACzB;AAAA,EACF,CAAC,EACA,KAAK,EAAE,IAAI;AAElB;AAOO,SAAS,+BACd,UACA;AACA,QAAM,cAAc;AACpB,QAAM,YAAY;AAClB,QAAM,eAAe,SAAS,IAAI,CAAC,EAAE,SAAS,KAAK,GAAG,UAAU;AAC9D,QAAI,SAAS,QAAQ;AACnB,aAAO,QAAQ,KAAK;AAAA,IACtB,WAAW,SAAS,aAAa;AAC/B,aAAO,YAAY;AAAA,IACrB,WAAW,SAAS,YAAY;AAC9B,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D,WAAW,SAAS,YAAY,UAAU,GAAG;AAC3C,aAAO;AAAA,EAAY;AAAA;AAAA;AAAA;AAAA,IACrB,OAAO;AACL,YAAM,IAAI,MAAM,yBAAyB,MAAM;AAAA,IACjD;AAAA,EACF,CAAC;AAED,SAAO,cAAc,aAAa,KAAK,EAAE,IAAI;AAC/C;;;AC3EO,SAAS,iCACd,UAC8B;AAC9B,SAAO,SAAS,IAAI,aAAW;AAC7B,YAAQ,QAAQ,MAAM;AAAA,MACpB,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,UACL,MAAM,QAAQ;AAAA,UACd,SAAS,QAAQ;AAAA,QACnB;AAAA,MAEF,KAAK,aAAa;AAChB,cAAM,gBAAgB,QAAQ;AAE9B,YACE,kBAAkB,WACjB,OAAO,kBAAkB,YACxB,cAAc,cAAc,UAC5B,cAAc,SAAS,SACzB;AACA,gBAAM,IAAI;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAEA,eAAO;AAAA,UACL,MAAM,QAAQ;AAAA,UACd,SAAS,QAAQ;AAAA,UACjB,eACE,kBAAkB,SACd,SACA;AAAA,YACE,MAAM,cAAc;AAAA,YACpB,WAAW,cAAc;AAAA,UAC3B;AAAA,QACR;AAAA,MACF;AAAA,MAEA,KAAK,YAAY;AACf,YAAI,QAAQ,SAAS,QAAW;AAC9B,gBAAM,IAAI,MAAM,mDAAmD;AAAA,QACrE;AAEA,eAAO;AAAA,UACL,MAAM,QAAQ;AAAA,UACd,SAAS,QAAQ;AAAA,UACjB,MAAM,QAAQ;AAAA,QAChB;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF,CAAC;AACH;","names":[]}