@assistant-ui/react 0.4.3 → 0.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/edge.js CHANGED
@@ -63,19 +63,13 @@ function assistantEncoderStream() {
63
63
  case "finish": {
64
64
  const { type, ...rest } = chunk;
65
65
  controller.enqueue(
66
- formatStreamPart(
67
- "F" /* Finish */,
68
- JSON.stringify(rest)
69
- )
66
+ formatStreamPart("F" /* Finish */, rest)
70
67
  );
71
68
  break;
72
69
  }
73
70
  case "error": {
74
71
  controller.enqueue(
75
- formatStreamPart(
76
- "E" /* Error */,
77
- JSON.stringify(chunk.error)
78
- )
72
+ formatStreamPart("E" /* Error */, chunk.error)
79
73
  );
80
74
  break;
81
75
  }
@@ -87,64 +81,12 @@ function assistantEncoderStream() {
87
81
  }
88
82
  });
89
83
  }
90
- function formatStreamPart(code, value) {
84
+ function formatStreamPart(...[code, value]) {
91
85
  return `${code}:${JSON.stringify(value)}
92
86
  `;
93
87
  }
94
88
 
95
- // src/runtimes/edge/createEdgeRuntimeAPI.ts
96
- var createEdgeRuntimeAPI = ({ model }) => {
97
- const POST = async (request) => {
98
- const { system, messages, tools } = await request.json();
99
- const { stream } = await streamMessage({
100
- model,
101
- abortSignal: request.signal,
102
- ...system ? { system } : void 0,
103
- messages,
104
- tools
105
- });
106
- return new Response(stream, {
107
- headers: {
108
- contentType: "text/plain; charset=utf-8"
109
- }
110
- });
111
- };
112
- return { POST };
113
- };
114
- async function streamMessage({
115
- model,
116
- system,
117
- messages,
118
- tools,
119
- toolChoice,
120
- ...options
121
- }) {
122
- const { stream, warnings, rawResponse } = await model.doStream({
123
- inputFormat: "messages",
124
- mode: {
125
- type: "regular",
126
- ...tools ? { tools } : void 0,
127
- ...toolChoice ? { toolChoice } : void 0
128
- },
129
- prompt: convertToLanguageModelPrompt(system, messages),
130
- ...options
131
- });
132
- return {
133
- stream: stream.pipeThrough(assistantEncoderStream()).pipeThrough(new TextEncoderStream()),
134
- warnings,
135
- rawResponse
136
- };
137
- }
138
- function convertToLanguageModelPrompt(system, messages) {
139
- const languageModelMessages = [];
140
- if (system != null) {
141
- languageModelMessages.push({ role: "system", content: system });
142
- }
143
- languageModelMessages.push(
144
- ...messages.flatMap(convertToLanguageModelMessage)
145
- );
146
- return languageModelMessages;
147
- }
89
+ // src/runtimes/edge/converters/toLanguageModelMessages.ts
148
90
  var assistantMessageSplitter = () => {
149
91
  const stash = [];
150
92
  let assistantMessage = {
@@ -196,66 +138,120 @@ var assistantMessageSplitter = () => {
196
138
  }
197
139
  };
198
140
  };
199
- function convertToLanguageModelMessage(message) {
200
- const role = message.role;
201
- switch (role) {
202
- case "system": {
203
- return [{ role: "system", content: message.content[0].text }];
204
- }
205
- case "user": {
206
- const msg = {
207
- role: "user",
208
- content: message.content.map(
209
- (part) => {
210
- const type = part.type;
211
- switch (type) {
212
- case "text": {
213
- return part;
214
- }
215
- case "image": {
216
- return {
217
- type: "image",
218
- image: new URL(part.image)
219
- };
220
- }
221
- default: {
222
- const unhandledType = type;
223
- throw new Error(
224
- `Unspported content part type: ${unhandledType}`
225
- );
141
+ function toLanguageModelMessages(message) {
142
+ return message.flatMap((message2) => {
143
+ const role = message2.role;
144
+ switch (role) {
145
+ case "system": {
146
+ return [{ role: "system", content: message2.content[0].text }];
147
+ }
148
+ case "user": {
149
+ const msg = {
150
+ role: "user",
151
+ content: message2.content.map(
152
+ (part) => {
153
+ const type = part.type;
154
+ switch (type) {
155
+ case "text": {
156
+ return part;
157
+ }
158
+ case "image": {
159
+ return {
160
+ type: "image",
161
+ image: new URL(part.image)
162
+ };
163
+ }
164
+ default: {
165
+ const unhandledType = type;
166
+ throw new Error(
167
+ `Unspported content part type: ${unhandledType}`
168
+ );
169
+ }
226
170
  }
227
171
  }
228
- }
229
- )
230
- };
231
- return [msg];
232
- }
233
- case "assistant": {
234
- const splitter = assistantMessageSplitter();
235
- for (const part of message.content) {
236
- const type = part.type;
237
- switch (type) {
238
- case "text": {
239
- splitter.addTextContentPart(part);
240
- break;
241
- }
242
- case "tool-call": {
243
- splitter.addToolCallPart(part);
244
- break;
245
- }
246
- default: {
247
- const _exhaustiveCheck = type;
248
- throw new Error(`Unhandled content part type: ${_exhaustiveCheck}`);
172
+ )
173
+ };
174
+ return [msg];
175
+ }
176
+ case "assistant": {
177
+ const splitter = assistantMessageSplitter();
178
+ for (const part of message2.content) {
179
+ const type = part.type;
180
+ switch (type) {
181
+ case "text": {
182
+ splitter.addTextContentPart(part);
183
+ break;
184
+ }
185
+ case "tool-call": {
186
+ splitter.addToolCallPart(part);
187
+ break;
188
+ }
189
+ default: {
190
+ const unhandledType = type;
191
+ throw new Error(`Unhandled content part type: ${unhandledType}`);
192
+ }
249
193
  }
250
194
  }
195
+ return splitter.getMessages();
196
+ }
197
+ default: {
198
+ const unhandledRole = role;
199
+ throw new Error(`Unknown message role: ${unhandledRole}`);
251
200
  }
252
- return splitter.getMessages();
253
- }
254
- default: {
255
- const unhandledRole = role;
256
- throw new Error(`Unknown message role: ${unhandledRole}`);
257
201
  }
202
+ });
203
+ }
204
+
205
+ // src/runtimes/edge/createEdgeRuntimeAPI.ts
206
+ var createEdgeRuntimeAPI = ({ model }) => {
207
+ const POST = async (request) => {
208
+ const { system, messages, tools } = await request.json();
209
+ const { stream } = await streamMessage({
210
+ model,
211
+ abortSignal: request.signal,
212
+ ...system ? { system } : void 0,
213
+ messages,
214
+ tools
215
+ });
216
+ return new Response(stream, {
217
+ headers: {
218
+ contentType: "text/plain; charset=utf-8"
219
+ }
220
+ });
221
+ };
222
+ return { POST };
223
+ };
224
+ async function streamMessage({
225
+ model,
226
+ system,
227
+ messages,
228
+ tools,
229
+ toolChoice,
230
+ ...options
231
+ }) {
232
+ const { stream, warnings, rawResponse } = await model.doStream({
233
+ inputFormat: "messages",
234
+ mode: {
235
+ type: "regular",
236
+ ...tools ? { tools } : void 0,
237
+ ...toolChoice ? { toolChoice } : void 0
238
+ },
239
+ prompt: convertToLanguageModelPrompt(system, messages),
240
+ ...options
241
+ });
242
+ return {
243
+ stream: stream.pipeThrough(assistantEncoderStream()).pipeThrough(new TextEncoderStream()),
244
+ warnings,
245
+ rawResponse
246
+ };
247
+ }
248
+ function convertToLanguageModelPrompt(system, messages) {
249
+ const languageModelMessages = [];
250
+ if (system != null) {
251
+ languageModelMessages.push({ role: "system", content: system });
258
252
  }
253
+ languageModelMessages.push(...toLanguageModelMessages(messages));
254
+ return languageModelMessages;
259
255
  }
260
256
  // Annotate the CommonJS export names for ESM import in node:
261
257
  0 && (module.exports = {
package/dist/edge.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/edge.ts","../src/runtimes/edge/streams/assistantEncoderStream.ts","../src/runtimes/edge/createEdgeRuntimeAPI.ts"],"sourcesContent":["export { createEdgeRuntimeAPI } from \"./runtimes/edge/createEdgeRuntimeAPI\";\n","import { AssistantStreamChunkType } from \"./AssistantStreamChunkType\";\nimport { LanguageModelV1StreamPart } from \"@ai-sdk/provider\";\n\nexport function assistantEncoderStream() {\n const toolCalls = new Set<string>();\n return new TransformStream<LanguageModelV1StreamPart, string>({\n transform(chunk, controller) {\n const chunkType = chunk.type;\n switch (chunkType) {\n case \"text-delta\": {\n controller.enqueue(\n formatStreamPart(\n AssistantStreamChunkType.TextDelta,\n chunk.textDelta,\n ),\n );\n break;\n }\n case \"tool-call-delta\": {\n if (!toolCalls.has(chunk.toolCallId)) {\n toolCalls.add(chunk.toolCallId);\n controller.enqueue(\n formatStreamPart(AssistantStreamChunkType.ToolCallBegin, {\n id: chunk.toolCallId,\n name: chunk.toolName,\n }),\n );\n }\n\n controller.enqueue(\n formatStreamPart(\n AssistantStreamChunkType.ToolCallArgsTextDelta,\n chunk.argsTextDelta,\n ),\n );\n break;\n }\n\n // ignore\n case \"tool-call\":\n break;\n\n case \"finish\": {\n const { type, ...rest } = chunk;\n controller.enqueue(\n formatStreamPart(\n AssistantStreamChunkType.Finish,\n JSON.stringify(rest),\n ),\n );\n break;\n }\n\n case \"error\": {\n controller.enqueue(\n formatStreamPart(\n AssistantStreamChunkType.Error,\n JSON.stringify(chunk.error),\n ),\n );\n break;\n }\n default: {\n const unhandledType: never = chunkType;\n throw new Error(`Unhandled chunk type: ${unhandledType}`);\n }\n }\n },\n });\n}\n\nexport function formatStreamPart(\n code: AssistantStreamChunkType,\n value: any,\n): string {\n return `${code}:${JSON.stringify(value)}\\n`;\n}\n","import {\n LanguageModelV1,\n LanguageModelV1ToolChoice,\n LanguageModelV1FunctionTool,\n LanguageModelV1Message,\n LanguageModelV1Prompt,\n LanguageModelV1CallOptions,\n LanguageModelV1TextPart,\n LanguageModelV1CallWarning,\n LanguageModelV1ToolCallPart,\n LanguageModelV1ToolResultPart,\n LanguageModelV1ImagePart,\n} from \"@ai-sdk/provider\";\nimport {\n CoreThreadMessage,\n TextContentPart,\n ToolCallContentPart,\n} from \"../../types/AssistantTypes\";\nimport { assistantEncoderStream } from \"./streams/assistantEncoderStream\";\nimport { EdgeRuntimeRequestOptions } from \"./EdgeRuntimeRequestOptions\";\n\nexport const createEdgeRuntimeAPI = ({ model }: { model: LanguageModelV1 }) => {\n const POST = async (request: Request) => {\n const { system, messages, tools } =\n (await request.json()) as EdgeRuntimeRequestOptions;\n\n const { stream } = await streamMessage({\n model,\n abortSignal: request.signal,\n\n ...(system ? { system } : undefined),\n messages,\n tools,\n });\n\n return new Response(stream, {\n headers: {\n contentType: \"text/plain; charset=utf-8\",\n },\n });\n };\n return { POST };\n};\n\ntype StreamMessageResult = {\n stream: ReadableStream<Uint8Array>;\n warnings: LanguageModelV1CallWarning[] | undefined;\n rawResponse: unknown;\n};\n\nasync function streamMessage({\n model,\n system,\n messages,\n tools,\n toolChoice,\n ...options\n}: Omit<LanguageModelV1CallOptions, \"inputFormat\" | \"mode\" | \"prompt\"> & {\n model: LanguageModelV1;\n system?: string;\n messages: CoreThreadMessage[];\n tools?: LanguageModelV1FunctionTool[];\n toolChoice?: LanguageModelV1ToolChoice;\n}): Promise<StreamMessageResult> {\n const { stream, warnings, rawResponse } = await model.doStream({\n inputFormat: \"messages\",\n mode: {\n type: \"regular\",\n ...(tools ? { tools } : undefined),\n ...(toolChoice ? { toolChoice } : undefined),\n },\n prompt: convertToLanguageModelPrompt(system, messages),\n ...options,\n });\n\n return {\n stream: stream\n .pipeThrough(assistantEncoderStream())\n .pipeThrough(new TextEncoderStream()),\n warnings,\n rawResponse,\n };\n}\n\nexport function convertToLanguageModelPrompt(\n system: string | undefined,\n messages: CoreThreadMessage[],\n): LanguageModelV1Prompt {\n const languageModelMessages: LanguageModelV1Prompt = [];\n\n if (system != null) {\n languageModelMessages.push({ role: \"system\", content: system });\n }\n languageModelMessages.push(\n ...messages.flatMap(convertToLanguageModelMessage),\n );\n\n return languageModelMessages;\n}\n\nconst assistantMessageSplitter = () => {\n const stash: LanguageModelV1Message[] = [];\n let assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (LanguageModelV1TextPart | LanguageModelV1ToolCallPart)[],\n };\n let toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n\n return {\n addTextContentPart: (part: TextContentPart) => {\n if (toolMessage.content.length > 0) {\n stash.push(assistantMessage);\n stash.push(toolMessage);\n\n assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (\n | LanguageModelV1TextPart\n | LanguageModelV1ToolCallPart\n )[],\n };\n\n toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n }\n\n assistantMessage.content.push(part);\n },\n addToolCallPart: (part: ToolCallContentPart) => {\n assistantMessage.content.push({\n type: \"tool-call\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n args: part.args,\n });\n if (part.result) {\n toolMessage.content.push({\n type: \"tool-result\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n result: part.result,\n // isError\n });\n }\n },\n getMessages: () => {\n if (toolMessage.content.length > 0) {\n return [...stash, assistantMessage, toolMessage];\n }\n\n return [...stash, assistantMessage];\n },\n };\n};\n\nexport function convertToLanguageModelMessage(\n message: CoreThreadMessage,\n): LanguageModelV1Message[] {\n const role = message.role;\n switch (role) {\n case \"system\": {\n return [{ role: \"system\", content: message.content[0].text }];\n }\n\n case \"user\": {\n const msg: LanguageModelV1Message = {\n role: \"user\",\n content: message.content.map(\n (part): LanguageModelV1TextPart | LanguageModelV1ImagePart => {\n const type = part.type;\n switch (type) {\n case \"text\": {\n return part;\n }\n\n case \"image\": {\n return {\n type: \"image\",\n image: new URL(part.image),\n };\n }\n\n default: {\n const unhandledType: never = type;\n throw new Error(\n `Unspported content part type: ${unhandledType}`,\n );\n }\n }\n },\n ),\n };\n return [msg];\n }\n\n case \"assistant\": {\n const splitter = assistantMessageSplitter();\n for (const part of message.content) {\n const type = part.type;\n switch (type) {\n case \"text\": {\n splitter.addTextContentPart(part);\n break;\n }\n case \"tool-call\": {\n splitter.addToolCallPart(part);\n break;\n }\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unhandled content part type: ${_exhaustiveCheck}`);\n }\n }\n }\n return splitter.getMessages();\n }\n\n default: {\n const unhandledRole: never = role;\n throw new Error(`Unknown message role: ${unhandledRole}`);\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACGO,SAAS,yBAAyB;AACvC,QAAM,YAAY,oBAAI,IAAY;AAClC,SAAO,IAAI,gBAAmD;AAAA,IAC5D,UAAU,OAAO,YAAY;AAC3B,YAAM,YAAY,MAAM;AACxB,cAAQ,WAAW;AAAA,QACjB,KAAK,cAAc;AACjB,qBAAW;AAAA,YACT;AAAA;AAAA,cAEE,MAAM;AAAA,YACR;AAAA,UACF;AACA;AAAA,QACF;AAAA,QACA,KAAK,mBAAmB;AACtB,cAAI,CAAC,UAAU,IAAI,MAAM,UAAU,GAAG;AACpC,sBAAU,IAAI,MAAM,UAAU;AAC9B,uBAAW;AAAA,cACT,0CAAyD;AAAA,gBACvD,IAAI,MAAM;AAAA,gBACV,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAAA,UACF;AAEA,qBAAW;AAAA,YACT;AAAA;AAAA,cAEE,MAAM;AAAA,YACR;AAAA,UACF;AACA;AAAA,QACF;AAAA,QAGA,KAAK;AACH;AAAA,QAEF,KAAK,UAAU;AACb,gBAAM,EAAE,MAAM,GAAG,KAAK,IAAI;AAC1B,qBAAW;AAAA,YACT;AAAA;AAAA,cAEE,KAAK,UAAU,IAAI;AAAA,YACrB;AAAA,UACF;AACA;AAAA,QACF;AAAA,QAEA,KAAK,SAAS;AACZ,qBAAW;AAAA,YACT;AAAA;AAAA,cAEE,KAAK,UAAU,MAAM,KAAK;AAAA,YAC5B;AAAA,UACF;AACA;AAAA,QACF;AAAA,QACA,SAAS;AACP,gBAAM,gBAAuB;AAC7B,gBAAM,IAAI,MAAM,yBAAyB,aAAa,EAAE;AAAA,QAC1D;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEO,SAAS,iBACd,MACA,OACQ;AACR,SAAO,GAAG,IAAI,IAAI,KAAK,UAAU,KAAK,CAAC;AAAA;AACzC;;;ACvDO,IAAM,uBAAuB,CAAC,EAAE,MAAM,MAAkC;AAC7E,QAAM,OAAO,OAAO,YAAqB;AACvC,UAAM,EAAE,QAAQ,UAAU,MAAM,IAC7B,MAAM,QAAQ,KAAK;AAEtB,UAAM,EAAE,OAAO,IAAI,MAAM,cAAc;AAAA,MACrC;AAAA,MACA,aAAa,QAAQ;AAAA,MAErB,GAAI,SAAS,EAAE,OAAO,IAAI;AAAA,MAC1B;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,IAAI,SAAS,QAAQ;AAAA,MAC1B,SAAS;AAAA,QACP,aAAa;AAAA,MACf;AAAA,IACF,CAAC;AAAA,EACH;AACA,SAAO,EAAE,KAAK;AAChB;AAQA,eAAe,cAAc;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,GAAG;AACL,GAMiC;AAC/B,QAAM,EAAE,QAAQ,UAAU,YAAY,IAAI,MAAM,MAAM,SAAS;AAAA,IAC7D,aAAa;AAAA,IACb,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,GAAI,QAAQ,EAAE,MAAM,IAAI;AAAA,MACxB,GAAI,aAAa,EAAE,WAAW,IAAI;AAAA,IACpC;AAAA,IACA,QAAQ,6BAA6B,QAAQ,QAAQ;AAAA,IACrD,GAAG;AAAA,EACL,CAAC;AAED,SAAO;AAAA,IACL,QAAQ,OACL,YAAY,uBAAuB,CAAC,EACpC,YAAY,IAAI,kBAAkB,CAAC;AAAA,IACtC;AAAA,IACA;AAAA,EACF;AACF;AAEO,SAAS,6BACd,QACA,UACuB;AACvB,QAAM,wBAA+C,CAAC;AAEtD,MAAI,UAAU,MAAM;AAClB,0BAAsB,KAAK,EAAE,MAAM,UAAU,SAAS,OAAO,CAAC;AAAA,EAChE;AACA,wBAAsB;AAAA,IACpB,GAAG,SAAS,QAAQ,6BAA6B;AAAA,EACnD;AAEA,SAAO;AACT;AAEA,IAAM,2BAA2B,MAAM;AACrC,QAAM,QAAkC,CAAC;AACzC,MAAI,mBAAmB;AAAA,IACrB,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AACA,MAAI,cAAc;AAAA,IAChB,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAEA,SAAO;AAAA,IACL,oBAAoB,CAAC,SAA0B;AAC7C,UAAI,YAAY,QAAQ,SAAS,GAAG;AAClC,cAAM,KAAK,gBAAgB;AAC3B,cAAM,KAAK,WAAW;AAEtB,2BAAmB;AAAA,UACjB,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,QAIZ;AAEA,sBAAc;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,QACZ;AAAA,MACF;AAEA,uBAAiB,QAAQ,KAAK,IAAI;AAAA,IACpC;AAAA,IACA,iBAAiB,CAAC,SAA8B;AAC9C,uBAAiB,QAAQ,KAAK;AAAA,QAC5B,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK;AAAA,QACf,MAAM,KAAK;AAAA,MACb,CAAC;AACD,UAAI,KAAK,QAAQ;AACf,oBAAY,QAAQ,KAAK;AAAA,UACvB,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,UAAU,KAAK;AAAA,UACf,QAAQ,KAAK;AAAA;AAAA,QAEf,CAAC;AAAA,MACH;AAAA,IACF;AAAA,IACA,aAAa,MAAM;AACjB,UAAI,YAAY,QAAQ,SAAS,GAAG;AAClC,eAAO,CAAC,GAAG,OAAO,kBAAkB,WAAW;AAAA,MACjD;AAEA,aAAO,CAAC,GAAG,OAAO,gBAAgB;AAAA,IACpC;AAAA,EACF;AACF;AAEO,SAAS,8BACd,SAC0B;AAC1B,QAAM,OAAO,QAAQ;AACrB,UAAQ,MAAM;AAAA,IACZ,KAAK,UAAU;AACb,aAAO,CAAC,EAAE,MAAM,UAAU,SAAS,QAAQ,QAAQ,CAAC,EAAE,KAAK,CAAC;AAAA,IAC9D;AAAA,IAEA,KAAK,QAAQ;AACX,YAAM,MAA8B;AAAA,QAClC,MAAM;AAAA,QACN,SAAS,QAAQ,QAAQ;AAAA,UACvB,CAAC,SAA6D;AAC5D,kBAAM,OAAO,KAAK;AAClB,oBAAQ,MAAM;AAAA,cACZ,KAAK,QAAQ;AACX,uBAAO;AAAA,cACT;AAAA,cAEA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,OAAO,IAAI,IAAI,KAAK,KAAK;AAAA,gBAC3B;AAAA,cACF;AAAA,cAEA,SAAS;AACP,sBAAM,gBAAuB;AAC7B,sBAAM,IAAI;AAAA,kBACR,iCAAiC,aAAa;AAAA,gBAChD;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,aAAO,CAAC,GAAG;AAAA,IACb;AAAA,IAEA,KAAK,aAAa;AAChB,YAAM,WAAW,yBAAyB;AAC1C,iBAAW,QAAQ,QAAQ,SAAS;AAClC,cAAM,OAAO,KAAK;AAClB,gBAAQ,MAAM;AAAA,UACZ,KAAK,QAAQ;AACX,qBAAS,mBAAmB,IAAI;AAChC;AAAA,UACF;AAAA,UACA,KAAK,aAAa;AAChB,qBAAS,gBAAgB,IAAI;AAC7B;AAAA,UACF;AAAA,UACA,SAAS;AACP,kBAAM,mBAA0B;AAChC,kBAAM,IAAI,MAAM,gCAAgC,gBAAgB,EAAE;AAAA,UACpE;AAAA,QACF;AAAA,MACF;AACA,aAAO,SAAS,YAAY;AAAA,IAC9B;AAAA,IAEA,SAAS;AACP,YAAM,gBAAuB;AAC7B,YAAM,IAAI,MAAM,yBAAyB,aAAa,EAAE;AAAA,IAC1D;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../src/edge.ts","../src/runtimes/edge/streams/assistantEncoderStream.ts","../src/runtimes/edge/converters/toLanguageModelMessages.ts","../src/runtimes/edge/createEdgeRuntimeAPI.ts"],"sourcesContent":["export { createEdgeRuntimeAPI } from \"./runtimes/edge/createEdgeRuntimeAPI\";\n","import {\n AssistantStreamChunkTuple,\n AssistantStreamChunkType,\n} from \"./AssistantStreamChunkType\";\nimport { LanguageModelV1StreamPart } from \"@ai-sdk/provider\";\n\nexport function assistantEncoderStream() {\n const toolCalls = new Set<string>();\n return new TransformStream<LanguageModelV1StreamPart, string>({\n transform(chunk, controller) {\n const chunkType = chunk.type;\n switch (chunkType) {\n case \"text-delta\": {\n controller.enqueue(\n formatStreamPart(\n AssistantStreamChunkType.TextDelta,\n chunk.textDelta,\n ),\n );\n break;\n }\n case \"tool-call-delta\": {\n if (!toolCalls.has(chunk.toolCallId)) {\n toolCalls.add(chunk.toolCallId);\n controller.enqueue(\n formatStreamPart(AssistantStreamChunkType.ToolCallBegin, {\n id: chunk.toolCallId,\n name: chunk.toolName,\n }),\n );\n }\n\n controller.enqueue(\n formatStreamPart(\n AssistantStreamChunkType.ToolCallArgsTextDelta,\n chunk.argsTextDelta,\n ),\n );\n break;\n }\n\n // ignore\n case \"tool-call\":\n break;\n\n case \"finish\": {\n const { type, ...rest } = chunk;\n controller.enqueue(\n formatStreamPart(AssistantStreamChunkType.Finish, rest),\n );\n break;\n }\n\n case \"error\": {\n controller.enqueue(\n formatStreamPart(AssistantStreamChunkType.Error, chunk.error),\n );\n break;\n }\n default: {\n const unhandledType: never = chunkType;\n throw new Error(`Unhandled chunk type: ${unhandledType}`);\n }\n }\n },\n });\n}\n\nexport function formatStreamPart(\n ...[code, value]: AssistantStreamChunkTuple\n): string {\n return `${code}:${JSON.stringify(value)}\\n`;\n}\n","import {\n LanguageModelV1ImagePart,\n LanguageModelV1Message,\n LanguageModelV1TextPart,\n LanguageModelV1ToolCallPart,\n LanguageModelV1ToolResultPart,\n} from \"@ai-sdk/provider\";\nimport { CoreMessage, ThreadMessage } from \"../../../types\";\nimport { TextContentPart, ToolCallContentPart } from \"../../../types\";\n\nconst assistantMessageSplitter = () => {\n const stash: LanguageModelV1Message[] = [];\n let assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (LanguageModelV1TextPart | LanguageModelV1ToolCallPart)[],\n };\n let toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n\n return {\n addTextContentPart: (part: TextContentPart) => {\n if (toolMessage.content.length > 0) {\n stash.push(assistantMessage);\n stash.push(toolMessage);\n\n assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (\n | LanguageModelV1TextPart\n | LanguageModelV1ToolCallPart\n )[],\n };\n\n toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n }\n\n assistantMessage.content.push(part);\n },\n addToolCallPart: (part: ToolCallContentPart) => {\n assistantMessage.content.push({\n type: \"tool-call\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n args: part.args,\n });\n if (part.result) {\n toolMessage.content.push({\n type: \"tool-result\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n result: part.result,\n // isError\n });\n }\n },\n getMessages: () => {\n if (toolMessage.content.length > 0) {\n return [...stash, assistantMessage, toolMessage];\n }\n\n return [...stash, assistantMessage];\n },\n };\n};\n\nexport function toLanguageModelMessages(\n message: readonly CoreMessage[] | readonly ThreadMessage[],\n): LanguageModelV1Message[] {\n return message.flatMap((message) => {\n const role = message.role;\n switch (role) {\n case \"system\": {\n return [{ role: \"system\", content: message.content[0].text }];\n }\n\n case \"user\": {\n const msg: LanguageModelV1Message = {\n role: \"user\",\n content: message.content.map(\n (part): LanguageModelV1TextPart | LanguageModelV1ImagePart => {\n const type = part.type;\n switch (type) {\n case \"text\": {\n return part;\n }\n\n case \"image\": {\n return {\n type: \"image\",\n image: new URL(part.image),\n };\n }\n\n default: {\n const unhandledType: \"ui\" = type;\n throw new Error(\n `Unspported content part type: ${unhandledType}`,\n );\n }\n }\n },\n ),\n };\n return [msg];\n }\n\n case \"assistant\": {\n const splitter = assistantMessageSplitter();\n for (const part of message.content) {\n const type = part.type;\n switch (type) {\n case \"text\": {\n splitter.addTextContentPart(part);\n break;\n }\n case \"tool-call\": {\n splitter.addToolCallPart(part);\n break;\n }\n default: {\n const unhandledType: \"ui\" = type;\n throw new Error(`Unhandled content part type: ${unhandledType}`);\n }\n }\n }\n return splitter.getMessages();\n }\n\n default: {\n const unhandledRole: never = role;\n throw new Error(`Unknown message role: ${unhandledRole}`);\n }\n }\n });\n}\n","import {\n LanguageModelV1,\n LanguageModelV1ToolChoice,\n LanguageModelV1FunctionTool,\n LanguageModelV1Prompt,\n LanguageModelV1CallOptions,\n LanguageModelV1CallWarning,\n} from \"@ai-sdk/provider\";\nimport { CoreMessage } from \"../../types/AssistantTypes\";\nimport { assistantEncoderStream } from \"./streams/assistantEncoderStream\";\nimport { EdgeRuntimeRequestOptions } from \"./EdgeRuntimeRequestOptions\";\nimport { toLanguageModelMessages } from \"./converters/toLanguageModelMessages\";\n\nexport const createEdgeRuntimeAPI = ({ model }: { model: LanguageModelV1 }) => {\n const POST = async (request: Request) => {\n const { system, messages, tools } =\n (await request.json()) as EdgeRuntimeRequestOptions;\n\n const { stream } = await streamMessage({\n model,\n abortSignal: request.signal,\n\n ...(system ? { system } : undefined),\n messages,\n tools,\n });\n\n return new Response(stream, {\n headers: {\n contentType: \"text/plain; charset=utf-8\",\n },\n });\n };\n return { POST };\n};\n\ntype StreamMessageResult = {\n stream: ReadableStream<Uint8Array>;\n warnings: LanguageModelV1CallWarning[] | undefined;\n rawResponse: unknown;\n};\n\nasync function streamMessage({\n model,\n system,\n messages,\n tools,\n toolChoice,\n ...options\n}: Omit<LanguageModelV1CallOptions, \"inputFormat\" | \"mode\" | \"prompt\"> & {\n model: LanguageModelV1;\n system?: string;\n messages: CoreMessage[];\n tools?: LanguageModelV1FunctionTool[];\n toolChoice?: LanguageModelV1ToolChoice;\n}): Promise<StreamMessageResult> {\n const { stream, warnings, rawResponse } = await model.doStream({\n inputFormat: \"messages\",\n mode: {\n type: \"regular\",\n ...(tools ? { tools } : undefined),\n ...(toolChoice ? { toolChoice } : undefined),\n },\n prompt: convertToLanguageModelPrompt(system, messages),\n ...options,\n });\n\n return {\n stream: stream\n .pipeThrough(assistantEncoderStream())\n .pipeThrough(new TextEncoderStream()),\n warnings,\n rawResponse,\n };\n}\n\nexport function convertToLanguageModelPrompt(\n system: string | undefined,\n messages: CoreMessage[],\n): LanguageModelV1Prompt {\n const languageModelMessages: LanguageModelV1Prompt = [];\n\n if (system != null) {\n languageModelMessages.push({ role: \"system\", content: system });\n }\n languageModelMessages.push(...toLanguageModelMessages(messages));\n\n return languageModelMessages;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACMO,SAAS,yBAAyB;AACvC,QAAM,YAAY,oBAAI,IAAY;AAClC,SAAO,IAAI,gBAAmD;AAAA,IAC5D,UAAU,OAAO,YAAY;AAC3B,YAAM,YAAY,MAAM;AACxB,cAAQ,WAAW;AAAA,QACjB,KAAK,cAAc;AACjB,qBAAW;AAAA,YACT;AAAA;AAAA,cAEE,MAAM;AAAA,YACR;AAAA,UACF;AACA;AAAA,QACF;AAAA,QACA,KAAK,mBAAmB;AACtB,cAAI,CAAC,UAAU,IAAI,MAAM,UAAU,GAAG;AACpC,sBAAU,IAAI,MAAM,UAAU;AAC9B,uBAAW;AAAA,cACT,0CAAyD;AAAA,gBACvD,IAAI,MAAM;AAAA,gBACV,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAAA,UACF;AAEA,qBAAW;AAAA,YACT;AAAA;AAAA,cAEE,MAAM;AAAA,YACR;AAAA,UACF;AACA;AAAA,QACF;AAAA,QAGA,KAAK;AACH;AAAA,QAEF,KAAK,UAAU;AACb,gBAAM,EAAE,MAAM,GAAG,KAAK,IAAI;AAC1B,qBAAW;AAAA,YACT,mCAAkD,IAAI;AAAA,UACxD;AACA;AAAA,QACF;AAAA,QAEA,KAAK,SAAS;AACZ,qBAAW;AAAA,YACT,kCAAiD,MAAM,KAAK;AAAA,UAC9D;AACA;AAAA,QACF;AAAA,QACA,SAAS;AACP,gBAAM,gBAAuB;AAC7B,gBAAM,IAAI,MAAM,yBAAyB,aAAa,EAAE;AAAA,QAC1D;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEO,SAAS,oBACX,CAAC,MAAM,KAAK,GACP;AACR,SAAO,GAAG,IAAI,IAAI,KAAK,UAAU,KAAK,CAAC;AAAA;AACzC;;;AC9DA,IAAM,2BAA2B,MAAM;AACrC,QAAM,QAAkC,CAAC;AACzC,MAAI,mBAAmB;AAAA,IACrB,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AACA,MAAI,cAAc;AAAA,IAChB,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAEA,SAAO;AAAA,IACL,oBAAoB,CAAC,SAA0B;AAC7C,UAAI,YAAY,QAAQ,SAAS,GAAG;AAClC,cAAM,KAAK,gBAAgB;AAC3B,cAAM,KAAK,WAAW;AAEtB,2BAAmB;AAAA,UACjB,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,QAIZ;AAEA,sBAAc;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,QACZ;AAAA,MACF;AAEA,uBAAiB,QAAQ,KAAK,IAAI;AAAA,IACpC;AAAA,IACA,iBAAiB,CAAC,SAA8B;AAC9C,uBAAiB,QAAQ,KAAK;AAAA,QAC5B,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK;AAAA,QACf,MAAM,KAAK;AAAA,MACb,CAAC;AACD,UAAI,KAAK,QAAQ;AACf,oBAAY,QAAQ,KAAK;AAAA,UACvB,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,UAAU,KAAK;AAAA,UACf,QAAQ,KAAK;AAAA;AAAA,QAEf,CAAC;AAAA,MACH;AAAA,IACF;AAAA,IACA,aAAa,MAAM;AACjB,UAAI,YAAY,QAAQ,SAAS,GAAG;AAClC,eAAO,CAAC,GAAG,OAAO,kBAAkB,WAAW;AAAA,MACjD;AAEA,aAAO,CAAC,GAAG,OAAO,gBAAgB;AAAA,IACpC;AAAA,EACF;AACF;AAEO,SAAS,wBACd,SAC0B;AAC1B,SAAO,QAAQ,QAAQ,CAACA,aAAY;AAClC,UAAM,OAAOA,SAAQ;AACrB,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,eAAO,CAAC,EAAE,MAAM,UAAU,SAASA,SAAQ,QAAQ,CAAC,EAAE,KAAK,CAAC;AAAA,MAC9D;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,MAA8B;AAAA,UAClC,MAAM;AAAA,UACN,SAASA,SAAQ,QAAQ;AAAA,YACvB,CAAC,SAA6D;AAC5D,oBAAM,OAAO,KAAK;AAClB,sBAAQ,MAAM;AAAA,gBACZ,KAAK,QAAQ;AACX,yBAAO;AAAA,gBACT;AAAA,gBAEA,KAAK,SAAS;AACZ,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,OAAO,IAAI,IAAI,KAAK,KAAK;AAAA,kBAC3B;AAAA,gBACF;AAAA,gBAEA,SAAS;AACP,wBAAM,gBAAsB;AAC5B,wBAAM,IAAI;AAAA,oBACR,iCAAiC,aAAa;AAAA,kBAChD;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AACA,eAAO,CAAC,GAAG;AAAA,MACb;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,WAAW,yBAAyB;AAC1C,mBAAW,QAAQA,SAAQ,SAAS;AAClC,gBAAM,OAAO,KAAK;AAClB,kBAAQ,MAAM;AAAA,YACZ,KAAK,QAAQ;AACX,uBAAS,mBAAmB,IAAI;AAChC;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,uBAAS,gBAAgB,IAAI;AAC7B;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,gBAAsB;AAC5B,oBAAM,IAAI,MAAM,gCAAgC,aAAa,EAAE;AAAA,YACjE;AAAA,UACF;AAAA,QACF;AACA,eAAO,SAAS,YAAY;AAAA,MAC9B;AAAA,MAEA,SAAS;AACP,cAAM,gBAAuB;AAC7B,cAAM,IAAI,MAAM,yBAAyB,aAAa,EAAE;AAAA,MAC1D;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AC9HO,IAAM,uBAAuB,CAAC,EAAE,MAAM,MAAkC;AAC7E,QAAM,OAAO,OAAO,YAAqB;AACvC,UAAM,EAAE,QAAQ,UAAU,MAAM,IAC7B,MAAM,QAAQ,KAAK;AAEtB,UAAM,EAAE,OAAO,IAAI,MAAM,cAAc;AAAA,MACrC;AAAA,MACA,aAAa,QAAQ;AAAA,MAErB,GAAI,SAAS,EAAE,OAAO,IAAI;AAAA,MAC1B;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,IAAI,SAAS,QAAQ;AAAA,MAC1B,SAAS;AAAA,QACP,aAAa;AAAA,MACf;AAAA,IACF,CAAC;AAAA,EACH;AACA,SAAO,EAAE,KAAK;AAChB;AAQA,eAAe,cAAc;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,GAAG;AACL,GAMiC;AAC/B,QAAM,EAAE,QAAQ,UAAU,YAAY,IAAI,MAAM,MAAM,SAAS;AAAA,IAC7D,aAAa;AAAA,IACb,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,GAAI,QAAQ,EAAE,MAAM,IAAI;AAAA,MACxB,GAAI,aAAa,EAAE,WAAW,IAAI;AAAA,IACpC;AAAA,IACA,QAAQ,6BAA6B,QAAQ,QAAQ;AAAA,IACrD,GAAG;AAAA,EACL,CAAC;AAED,SAAO;AAAA,IACL,QAAQ,OACL,YAAY,uBAAuB,CAAC,EACpC,YAAY,IAAI,kBAAkB,CAAC;AAAA,IACtC;AAAA,IACA;AAAA,EACF;AACF;AAEO,SAAS,6BACd,QACA,UACuB;AACvB,QAAM,wBAA+C,CAAC;AAEtD,MAAI,UAAU,MAAM;AAClB,0BAAsB,KAAK,EAAE,MAAM,UAAU,SAAS,OAAO,CAAC;AAAA,EAChE;AACA,wBAAsB,KAAK,GAAG,wBAAwB,QAAQ,CAAC;AAE/D,SAAO;AACT;","names":["message"]}
package/dist/edge.mjs CHANGED
@@ -37,19 +37,13 @@ function assistantEncoderStream() {
37
37
  case "finish": {
38
38
  const { type, ...rest } = chunk;
39
39
  controller.enqueue(
40
- formatStreamPart(
41
- "F" /* Finish */,
42
- JSON.stringify(rest)
43
- )
40
+ formatStreamPart("F" /* Finish */, rest)
44
41
  );
45
42
  break;
46
43
  }
47
44
  case "error": {
48
45
  controller.enqueue(
49
- formatStreamPart(
50
- "E" /* Error */,
51
- JSON.stringify(chunk.error)
52
- )
46
+ formatStreamPart("E" /* Error */, chunk.error)
53
47
  );
54
48
  break;
55
49
  }
@@ -61,64 +55,12 @@ function assistantEncoderStream() {
61
55
  }
62
56
  });
63
57
  }
64
- function formatStreamPart(code, value) {
58
+ function formatStreamPart(...[code, value]) {
65
59
  return `${code}:${JSON.stringify(value)}
66
60
  `;
67
61
  }
68
62
 
69
- // src/runtimes/edge/createEdgeRuntimeAPI.ts
70
- var createEdgeRuntimeAPI = ({ model }) => {
71
- const POST = async (request) => {
72
- const { system, messages, tools } = await request.json();
73
- const { stream } = await streamMessage({
74
- model,
75
- abortSignal: request.signal,
76
- ...system ? { system } : void 0,
77
- messages,
78
- tools
79
- });
80
- return new Response(stream, {
81
- headers: {
82
- contentType: "text/plain; charset=utf-8"
83
- }
84
- });
85
- };
86
- return { POST };
87
- };
88
- async function streamMessage({
89
- model,
90
- system,
91
- messages,
92
- tools,
93
- toolChoice,
94
- ...options
95
- }) {
96
- const { stream, warnings, rawResponse } = await model.doStream({
97
- inputFormat: "messages",
98
- mode: {
99
- type: "regular",
100
- ...tools ? { tools } : void 0,
101
- ...toolChoice ? { toolChoice } : void 0
102
- },
103
- prompt: convertToLanguageModelPrompt(system, messages),
104
- ...options
105
- });
106
- return {
107
- stream: stream.pipeThrough(assistantEncoderStream()).pipeThrough(new TextEncoderStream()),
108
- warnings,
109
- rawResponse
110
- };
111
- }
112
- function convertToLanguageModelPrompt(system, messages) {
113
- const languageModelMessages = [];
114
- if (system != null) {
115
- languageModelMessages.push({ role: "system", content: system });
116
- }
117
- languageModelMessages.push(
118
- ...messages.flatMap(convertToLanguageModelMessage)
119
- );
120
- return languageModelMessages;
121
- }
63
+ // src/runtimes/edge/converters/toLanguageModelMessages.ts
122
64
  var assistantMessageSplitter = () => {
123
65
  const stash = [];
124
66
  let assistantMessage = {
@@ -170,66 +112,120 @@ var assistantMessageSplitter = () => {
170
112
  }
171
113
  };
172
114
  };
173
- function convertToLanguageModelMessage(message) {
174
- const role = message.role;
175
- switch (role) {
176
- case "system": {
177
- return [{ role: "system", content: message.content[0].text }];
178
- }
179
- case "user": {
180
- const msg = {
181
- role: "user",
182
- content: message.content.map(
183
- (part) => {
184
- const type = part.type;
185
- switch (type) {
186
- case "text": {
187
- return part;
188
- }
189
- case "image": {
190
- return {
191
- type: "image",
192
- image: new URL(part.image)
193
- };
194
- }
195
- default: {
196
- const unhandledType = type;
197
- throw new Error(
198
- `Unspported content part type: ${unhandledType}`
199
- );
115
+ function toLanguageModelMessages(message) {
116
+ return message.flatMap((message2) => {
117
+ const role = message2.role;
118
+ switch (role) {
119
+ case "system": {
120
+ return [{ role: "system", content: message2.content[0].text }];
121
+ }
122
+ case "user": {
123
+ const msg = {
124
+ role: "user",
125
+ content: message2.content.map(
126
+ (part) => {
127
+ const type = part.type;
128
+ switch (type) {
129
+ case "text": {
130
+ return part;
131
+ }
132
+ case "image": {
133
+ return {
134
+ type: "image",
135
+ image: new URL(part.image)
136
+ };
137
+ }
138
+ default: {
139
+ const unhandledType = type;
140
+ throw new Error(
141
+ `Unspported content part type: ${unhandledType}`
142
+ );
143
+ }
200
144
  }
201
145
  }
202
- }
203
- )
204
- };
205
- return [msg];
206
- }
207
- case "assistant": {
208
- const splitter = assistantMessageSplitter();
209
- for (const part of message.content) {
210
- const type = part.type;
211
- switch (type) {
212
- case "text": {
213
- splitter.addTextContentPart(part);
214
- break;
215
- }
216
- case "tool-call": {
217
- splitter.addToolCallPart(part);
218
- break;
219
- }
220
- default: {
221
- const _exhaustiveCheck = type;
222
- throw new Error(`Unhandled content part type: ${_exhaustiveCheck}`);
146
+ )
147
+ };
148
+ return [msg];
149
+ }
150
+ case "assistant": {
151
+ const splitter = assistantMessageSplitter();
152
+ for (const part of message2.content) {
153
+ const type = part.type;
154
+ switch (type) {
155
+ case "text": {
156
+ splitter.addTextContentPart(part);
157
+ break;
158
+ }
159
+ case "tool-call": {
160
+ splitter.addToolCallPart(part);
161
+ break;
162
+ }
163
+ default: {
164
+ const unhandledType = type;
165
+ throw new Error(`Unhandled content part type: ${unhandledType}`);
166
+ }
223
167
  }
224
168
  }
169
+ return splitter.getMessages();
170
+ }
171
+ default: {
172
+ const unhandledRole = role;
173
+ throw new Error(`Unknown message role: ${unhandledRole}`);
225
174
  }
226
- return splitter.getMessages();
227
- }
228
- default: {
229
- const unhandledRole = role;
230
- throw new Error(`Unknown message role: ${unhandledRole}`);
231
175
  }
176
+ });
177
+ }
178
+
179
+ // src/runtimes/edge/createEdgeRuntimeAPI.ts
180
+ var createEdgeRuntimeAPI = ({ model }) => {
181
+ const POST = async (request) => {
182
+ const { system, messages, tools } = await request.json();
183
+ const { stream } = await streamMessage({
184
+ model,
185
+ abortSignal: request.signal,
186
+ ...system ? { system } : void 0,
187
+ messages,
188
+ tools
189
+ });
190
+ return new Response(stream, {
191
+ headers: {
192
+ contentType: "text/plain; charset=utf-8"
193
+ }
194
+ });
195
+ };
196
+ return { POST };
197
+ };
198
+ async function streamMessage({
199
+ model,
200
+ system,
201
+ messages,
202
+ tools,
203
+ toolChoice,
204
+ ...options
205
+ }) {
206
+ const { stream, warnings, rawResponse } = await model.doStream({
207
+ inputFormat: "messages",
208
+ mode: {
209
+ type: "regular",
210
+ ...tools ? { tools } : void 0,
211
+ ...toolChoice ? { toolChoice } : void 0
212
+ },
213
+ prompt: convertToLanguageModelPrompt(system, messages),
214
+ ...options
215
+ });
216
+ return {
217
+ stream: stream.pipeThrough(assistantEncoderStream()).pipeThrough(new TextEncoderStream()),
218
+ warnings,
219
+ rawResponse
220
+ };
221
+ }
222
+ function convertToLanguageModelPrompt(system, messages) {
223
+ const languageModelMessages = [];
224
+ if (system != null) {
225
+ languageModelMessages.push({ role: "system", content: system });
232
226
  }
227
+ languageModelMessages.push(...toLanguageModelMessages(messages));
228
+ return languageModelMessages;
233
229
  }
234
230
  export {
235
231
  createEdgeRuntimeAPI
package/dist/edge.mjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/runtimes/edge/streams/assistantEncoderStream.ts","../src/runtimes/edge/createEdgeRuntimeAPI.ts"],"sourcesContent":["import { AssistantStreamChunkType } from \"./AssistantStreamChunkType\";\nimport { LanguageModelV1StreamPart } from \"@ai-sdk/provider\";\n\nexport function assistantEncoderStream() {\n const toolCalls = new Set<string>();\n return new TransformStream<LanguageModelV1StreamPart, string>({\n transform(chunk, controller) {\n const chunkType = chunk.type;\n switch (chunkType) {\n case \"text-delta\": {\n controller.enqueue(\n formatStreamPart(\n AssistantStreamChunkType.TextDelta,\n chunk.textDelta,\n ),\n );\n break;\n }\n case \"tool-call-delta\": {\n if (!toolCalls.has(chunk.toolCallId)) {\n toolCalls.add(chunk.toolCallId);\n controller.enqueue(\n formatStreamPart(AssistantStreamChunkType.ToolCallBegin, {\n id: chunk.toolCallId,\n name: chunk.toolName,\n }),\n );\n }\n\n controller.enqueue(\n formatStreamPart(\n AssistantStreamChunkType.ToolCallArgsTextDelta,\n chunk.argsTextDelta,\n ),\n );\n break;\n }\n\n // ignore\n case \"tool-call\":\n break;\n\n case \"finish\": {\n const { type, ...rest } = chunk;\n controller.enqueue(\n formatStreamPart(\n AssistantStreamChunkType.Finish,\n JSON.stringify(rest),\n ),\n );\n break;\n }\n\n case \"error\": {\n controller.enqueue(\n formatStreamPart(\n AssistantStreamChunkType.Error,\n JSON.stringify(chunk.error),\n ),\n );\n break;\n }\n default: {\n const unhandledType: never = chunkType;\n throw new Error(`Unhandled chunk type: ${unhandledType}`);\n }\n }\n },\n });\n}\n\nexport function formatStreamPart(\n code: AssistantStreamChunkType,\n value: any,\n): string {\n return `${code}:${JSON.stringify(value)}\\n`;\n}\n","import {\n LanguageModelV1,\n LanguageModelV1ToolChoice,\n LanguageModelV1FunctionTool,\n LanguageModelV1Message,\n LanguageModelV1Prompt,\n LanguageModelV1CallOptions,\n LanguageModelV1TextPart,\n LanguageModelV1CallWarning,\n LanguageModelV1ToolCallPart,\n LanguageModelV1ToolResultPart,\n LanguageModelV1ImagePart,\n} from \"@ai-sdk/provider\";\nimport {\n CoreThreadMessage,\n TextContentPart,\n ToolCallContentPart,\n} from \"../../types/AssistantTypes\";\nimport { assistantEncoderStream } from \"./streams/assistantEncoderStream\";\nimport { EdgeRuntimeRequestOptions } from \"./EdgeRuntimeRequestOptions\";\n\nexport const createEdgeRuntimeAPI = ({ model }: { model: LanguageModelV1 }) => {\n const POST = async (request: Request) => {\n const { system, messages, tools } =\n (await request.json()) as EdgeRuntimeRequestOptions;\n\n const { stream } = await streamMessage({\n model,\n abortSignal: request.signal,\n\n ...(system ? { system } : undefined),\n messages,\n tools,\n });\n\n return new Response(stream, {\n headers: {\n contentType: \"text/plain; charset=utf-8\",\n },\n });\n };\n return { POST };\n};\n\ntype StreamMessageResult = {\n stream: ReadableStream<Uint8Array>;\n warnings: LanguageModelV1CallWarning[] | undefined;\n rawResponse: unknown;\n};\n\nasync function streamMessage({\n model,\n system,\n messages,\n tools,\n toolChoice,\n ...options\n}: Omit<LanguageModelV1CallOptions, \"inputFormat\" | \"mode\" | \"prompt\"> & {\n model: LanguageModelV1;\n system?: string;\n messages: CoreThreadMessage[];\n tools?: LanguageModelV1FunctionTool[];\n toolChoice?: LanguageModelV1ToolChoice;\n}): Promise<StreamMessageResult> {\n const { stream, warnings, rawResponse } = await model.doStream({\n inputFormat: \"messages\",\n mode: {\n type: \"regular\",\n ...(tools ? { tools } : undefined),\n ...(toolChoice ? { toolChoice } : undefined),\n },\n prompt: convertToLanguageModelPrompt(system, messages),\n ...options,\n });\n\n return {\n stream: stream\n .pipeThrough(assistantEncoderStream())\n .pipeThrough(new TextEncoderStream()),\n warnings,\n rawResponse,\n };\n}\n\nexport function convertToLanguageModelPrompt(\n system: string | undefined,\n messages: CoreThreadMessage[],\n): LanguageModelV1Prompt {\n const languageModelMessages: LanguageModelV1Prompt = [];\n\n if (system != null) {\n languageModelMessages.push({ role: \"system\", content: system });\n }\n languageModelMessages.push(\n ...messages.flatMap(convertToLanguageModelMessage),\n );\n\n return languageModelMessages;\n}\n\nconst assistantMessageSplitter = () => {\n const stash: LanguageModelV1Message[] = [];\n let assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (LanguageModelV1TextPart | LanguageModelV1ToolCallPart)[],\n };\n let toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n\n return {\n addTextContentPart: (part: TextContentPart) => {\n if (toolMessage.content.length > 0) {\n stash.push(assistantMessage);\n stash.push(toolMessage);\n\n assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (\n | LanguageModelV1TextPart\n | LanguageModelV1ToolCallPart\n )[],\n };\n\n toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n }\n\n assistantMessage.content.push(part);\n },\n addToolCallPart: (part: ToolCallContentPart) => {\n assistantMessage.content.push({\n type: \"tool-call\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n args: part.args,\n });\n if (part.result) {\n toolMessage.content.push({\n type: \"tool-result\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n result: part.result,\n // isError\n });\n }\n },\n getMessages: () => {\n if (toolMessage.content.length > 0) {\n return [...stash, assistantMessage, toolMessage];\n }\n\n return [...stash, assistantMessage];\n },\n };\n};\n\nexport function convertToLanguageModelMessage(\n message: CoreThreadMessage,\n): LanguageModelV1Message[] {\n const role = message.role;\n switch (role) {\n case \"system\": {\n return [{ role: \"system\", content: message.content[0].text }];\n }\n\n case \"user\": {\n const msg: LanguageModelV1Message = {\n role: \"user\",\n content: message.content.map(\n (part): LanguageModelV1TextPart | LanguageModelV1ImagePart => {\n const type = part.type;\n switch (type) {\n case \"text\": {\n return part;\n }\n\n case \"image\": {\n return {\n type: \"image\",\n image: new URL(part.image),\n };\n }\n\n default: {\n const unhandledType: never = type;\n throw new Error(\n `Unspported content part type: ${unhandledType}`,\n );\n }\n }\n },\n ),\n };\n return [msg];\n }\n\n case \"assistant\": {\n const splitter = assistantMessageSplitter();\n for (const part of message.content) {\n const type = part.type;\n switch (type) {\n case \"text\": {\n splitter.addTextContentPart(part);\n break;\n }\n case \"tool-call\": {\n splitter.addToolCallPart(part);\n break;\n }\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unhandled content part type: ${_exhaustiveCheck}`);\n }\n }\n }\n return splitter.getMessages();\n }\n\n default: {\n const unhandledRole: never = role;\n throw new Error(`Unknown message role: ${unhandledRole}`);\n }\n }\n}\n"],"mappings":";AAGO,SAAS,yBAAyB;AACvC,QAAM,YAAY,oBAAI,IAAY;AAClC,SAAO,IAAI,gBAAmD;AAAA,IAC5D,UAAU,OAAO,YAAY;AAC3B,YAAM,YAAY,MAAM;AACxB,cAAQ,WAAW;AAAA,QACjB,KAAK,cAAc;AACjB,qBAAW;AAAA,YACT;AAAA;AAAA,cAEE,MAAM;AAAA,YACR;AAAA,UACF;AACA;AAAA,QACF;AAAA,QACA,KAAK,mBAAmB;AACtB,cAAI,CAAC,UAAU,IAAI,MAAM,UAAU,GAAG;AACpC,sBAAU,IAAI,MAAM,UAAU;AAC9B,uBAAW;AAAA,cACT,0CAAyD;AAAA,gBACvD,IAAI,MAAM;AAAA,gBACV,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAAA,UACF;AAEA,qBAAW;AAAA,YACT;AAAA;AAAA,cAEE,MAAM;AAAA,YACR;AAAA,UACF;AACA;AAAA,QACF;AAAA,QAGA,KAAK;AACH;AAAA,QAEF,KAAK,UAAU;AACb,gBAAM,EAAE,MAAM,GAAG,KAAK,IAAI;AAC1B,qBAAW;AAAA,YACT;AAAA;AAAA,cAEE,KAAK,UAAU,IAAI;AAAA,YACrB;AAAA,UACF;AACA;AAAA,QACF;AAAA,QAEA,KAAK,SAAS;AACZ,qBAAW;AAAA,YACT;AAAA;AAAA,cAEE,KAAK,UAAU,MAAM,KAAK;AAAA,YAC5B;AAAA,UACF;AACA;AAAA,QACF;AAAA,QACA,SAAS;AACP,gBAAM,gBAAuB;AAC7B,gBAAM,IAAI,MAAM,yBAAyB,aAAa,EAAE;AAAA,QAC1D;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEO,SAAS,iBACd,MACA,OACQ;AACR,SAAO,GAAG,IAAI,IAAI,KAAK,UAAU,KAAK,CAAC;AAAA;AACzC;;;ACvDO,IAAM,uBAAuB,CAAC,EAAE,MAAM,MAAkC;AAC7E,QAAM,OAAO,OAAO,YAAqB;AACvC,UAAM,EAAE,QAAQ,UAAU,MAAM,IAC7B,MAAM,QAAQ,KAAK;AAEtB,UAAM,EAAE,OAAO,IAAI,MAAM,cAAc;AAAA,MACrC;AAAA,MACA,aAAa,QAAQ;AAAA,MAErB,GAAI,SAAS,EAAE,OAAO,IAAI;AAAA,MAC1B;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,IAAI,SAAS,QAAQ;AAAA,MAC1B,SAAS;AAAA,QACP,aAAa;AAAA,MACf;AAAA,IACF,CAAC;AAAA,EACH;AACA,SAAO,EAAE,KAAK;AAChB;AAQA,eAAe,cAAc;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,GAAG;AACL,GAMiC;AAC/B,QAAM,EAAE,QAAQ,UAAU,YAAY,IAAI,MAAM,MAAM,SAAS;AAAA,IAC7D,aAAa;AAAA,IACb,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,GAAI,QAAQ,EAAE,MAAM,IAAI;AAAA,MACxB,GAAI,aAAa,EAAE,WAAW,IAAI;AAAA,IACpC;AAAA,IACA,QAAQ,6BAA6B,QAAQ,QAAQ;AAAA,IACrD,GAAG;AAAA,EACL,CAAC;AAED,SAAO;AAAA,IACL,QAAQ,OACL,YAAY,uBAAuB,CAAC,EACpC,YAAY,IAAI,kBAAkB,CAAC;AAAA,IACtC;AAAA,IACA;AAAA,EACF;AACF;AAEO,SAAS,6BACd,QACA,UACuB;AACvB,QAAM,wBAA+C,CAAC;AAEtD,MAAI,UAAU,MAAM;AAClB,0BAAsB,KAAK,EAAE,MAAM,UAAU,SAAS,OAAO,CAAC;AAAA,EAChE;AACA,wBAAsB;AAAA,IACpB,GAAG,SAAS,QAAQ,6BAA6B;AAAA,EACnD;AAEA,SAAO;AACT;AAEA,IAAM,2BAA2B,MAAM;AACrC,QAAM,QAAkC,CAAC;AACzC,MAAI,mBAAmB;AAAA,IACrB,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AACA,MAAI,cAAc;AAAA,IAChB,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAEA,SAAO;AAAA,IACL,oBAAoB,CAAC,SAA0B;AAC7C,UAAI,YAAY,QAAQ,SAAS,GAAG;AAClC,cAAM,KAAK,gBAAgB;AAC3B,cAAM,KAAK,WAAW;AAEtB,2BAAmB;AAAA,UACjB,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,QAIZ;AAEA,sBAAc;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,QACZ;AAAA,MACF;AAEA,uBAAiB,QAAQ,KAAK,IAAI;AAAA,IACpC;AAAA,IACA,iBAAiB,CAAC,SAA8B;AAC9C,uBAAiB,QAAQ,KAAK;AAAA,QAC5B,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK;AAAA,QACf,MAAM,KAAK;AAAA,MACb,CAAC;AACD,UAAI,KAAK,QAAQ;AACf,oBAAY,QAAQ,KAAK;AAAA,UACvB,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,UAAU,KAAK;AAAA,UACf,QAAQ,KAAK;AAAA;AAAA,QAEf,CAAC;AAAA,MACH;AAAA,IACF;AAAA,IACA,aAAa,MAAM;AACjB,UAAI,YAAY,QAAQ,SAAS,GAAG;AAClC,eAAO,CAAC,GAAG,OAAO,kBAAkB,WAAW;AAAA,MACjD;AAEA,aAAO,CAAC,GAAG,OAAO,gBAAgB;AAAA,IACpC;AAAA,EACF;AACF;AAEO,SAAS,8BACd,SAC0B;AAC1B,QAAM,OAAO,QAAQ;AACrB,UAAQ,MAAM;AAAA,IACZ,KAAK,UAAU;AACb,aAAO,CAAC,EAAE,MAAM,UAAU,SAAS,QAAQ,QAAQ,CAAC,EAAE,KAAK,CAAC;AAAA,IAC9D;AAAA,IAEA,KAAK,QAAQ;AACX,YAAM,MAA8B;AAAA,QAClC,MAAM;AAAA,QACN,SAAS,QAAQ,QAAQ;AAAA,UACvB,CAAC,SAA6D;AAC5D,kBAAM,OAAO,KAAK;AAClB,oBAAQ,MAAM;AAAA,cACZ,KAAK,QAAQ;AACX,uBAAO;AAAA,cACT;AAAA,cAEA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,OAAO,IAAI,IAAI,KAAK,KAAK;AAAA,gBAC3B;AAAA,cACF;AAAA,cAEA,SAAS;AACP,sBAAM,gBAAuB;AAC7B,sBAAM,IAAI;AAAA,kBACR,iCAAiC,aAAa;AAAA,gBAChD;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,aAAO,CAAC,GAAG;AAAA,IACb;AAAA,IAEA,KAAK,aAAa;AAChB,YAAM,WAAW,yBAAyB;AAC1C,iBAAW,QAAQ,QAAQ,SAAS;AAClC,cAAM,OAAO,KAAK;AAClB,gBAAQ,MAAM;AAAA,UACZ,KAAK,QAAQ;AACX,qBAAS,mBAAmB,IAAI;AAChC;AAAA,UACF;AAAA,UACA,KAAK,aAAa;AAChB,qBAAS,gBAAgB,IAAI;AAC7B;AAAA,UACF;AAAA,UACA,SAAS;AACP,kBAAM,mBAA0B;AAChC,kBAAM,IAAI,MAAM,gCAAgC,gBAAgB,EAAE;AAAA,UACpE;AAAA,QACF;AAAA,MACF;AACA,aAAO,SAAS,YAAY;AAAA,IAC9B;AAAA,IAEA,SAAS;AACP,YAAM,gBAAuB;AAC7B,YAAM,IAAI,MAAM,yBAAyB,aAAa,EAAE;AAAA,IAC1D;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../src/runtimes/edge/streams/assistantEncoderStream.ts","../src/runtimes/edge/converters/toLanguageModelMessages.ts","../src/runtimes/edge/createEdgeRuntimeAPI.ts"],"sourcesContent":["import {\n AssistantStreamChunkTuple,\n AssistantStreamChunkType,\n} from \"./AssistantStreamChunkType\";\nimport { LanguageModelV1StreamPart } from \"@ai-sdk/provider\";\n\nexport function assistantEncoderStream() {\n const toolCalls = new Set<string>();\n return new TransformStream<LanguageModelV1StreamPart, string>({\n transform(chunk, controller) {\n const chunkType = chunk.type;\n switch (chunkType) {\n case \"text-delta\": {\n controller.enqueue(\n formatStreamPart(\n AssistantStreamChunkType.TextDelta,\n chunk.textDelta,\n ),\n );\n break;\n }\n case \"tool-call-delta\": {\n if (!toolCalls.has(chunk.toolCallId)) {\n toolCalls.add(chunk.toolCallId);\n controller.enqueue(\n formatStreamPart(AssistantStreamChunkType.ToolCallBegin, {\n id: chunk.toolCallId,\n name: chunk.toolName,\n }),\n );\n }\n\n controller.enqueue(\n formatStreamPart(\n AssistantStreamChunkType.ToolCallArgsTextDelta,\n chunk.argsTextDelta,\n ),\n );\n break;\n }\n\n // ignore\n case \"tool-call\":\n break;\n\n case \"finish\": {\n const { type, ...rest } = chunk;\n controller.enqueue(\n formatStreamPart(AssistantStreamChunkType.Finish, rest),\n );\n break;\n }\n\n case \"error\": {\n controller.enqueue(\n formatStreamPart(AssistantStreamChunkType.Error, chunk.error),\n );\n break;\n }\n default: {\n const unhandledType: never = chunkType;\n throw new Error(`Unhandled chunk type: ${unhandledType}`);\n }\n }\n },\n });\n}\n\nexport function formatStreamPart(\n ...[code, value]: AssistantStreamChunkTuple\n): string {\n return `${code}:${JSON.stringify(value)}\\n`;\n}\n","import {\n LanguageModelV1ImagePart,\n LanguageModelV1Message,\n LanguageModelV1TextPart,\n LanguageModelV1ToolCallPart,\n LanguageModelV1ToolResultPart,\n} from \"@ai-sdk/provider\";\nimport { CoreMessage, ThreadMessage } from \"../../../types\";\nimport { TextContentPart, ToolCallContentPart } from \"../../../types\";\n\nconst assistantMessageSplitter = () => {\n const stash: LanguageModelV1Message[] = [];\n let assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (LanguageModelV1TextPart | LanguageModelV1ToolCallPart)[],\n };\n let toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n\n return {\n addTextContentPart: (part: TextContentPart) => {\n if (toolMessage.content.length > 0) {\n stash.push(assistantMessage);\n stash.push(toolMessage);\n\n assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (\n | LanguageModelV1TextPart\n | LanguageModelV1ToolCallPart\n )[],\n };\n\n toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n }\n\n assistantMessage.content.push(part);\n },\n addToolCallPart: (part: ToolCallContentPart) => {\n assistantMessage.content.push({\n type: \"tool-call\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n args: part.args,\n });\n if (part.result) {\n toolMessage.content.push({\n type: \"tool-result\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n result: part.result,\n // isError\n });\n }\n },\n getMessages: () => {\n if (toolMessage.content.length > 0) {\n return [...stash, assistantMessage, toolMessage];\n }\n\n return [...stash, assistantMessage];\n },\n };\n};\n\nexport function toLanguageModelMessages(\n message: readonly CoreMessage[] | readonly ThreadMessage[],\n): LanguageModelV1Message[] {\n return message.flatMap((message) => {\n const role = message.role;\n switch (role) {\n case \"system\": {\n return [{ role: \"system\", content: message.content[0].text }];\n }\n\n case \"user\": {\n const msg: LanguageModelV1Message = {\n role: \"user\",\n content: message.content.map(\n (part): LanguageModelV1TextPart | LanguageModelV1ImagePart => {\n const type = part.type;\n switch (type) {\n case \"text\": {\n return part;\n }\n\n case \"image\": {\n return {\n type: \"image\",\n image: new URL(part.image),\n };\n }\n\n default: {\n const unhandledType: \"ui\" = type;\n throw new Error(\n `Unspported content part type: ${unhandledType}`,\n );\n }\n }\n },\n ),\n };\n return [msg];\n }\n\n case \"assistant\": {\n const splitter = assistantMessageSplitter();\n for (const part of message.content) {\n const type = part.type;\n switch (type) {\n case \"text\": {\n splitter.addTextContentPart(part);\n break;\n }\n case \"tool-call\": {\n splitter.addToolCallPart(part);\n break;\n }\n default: {\n const unhandledType: \"ui\" = type;\n throw new Error(`Unhandled content part type: ${unhandledType}`);\n }\n }\n }\n return splitter.getMessages();\n }\n\n default: {\n const unhandledRole: never = role;\n throw new Error(`Unknown message role: ${unhandledRole}`);\n }\n }\n });\n}\n","import {\n LanguageModelV1,\n LanguageModelV1ToolChoice,\n LanguageModelV1FunctionTool,\n LanguageModelV1Prompt,\n LanguageModelV1CallOptions,\n LanguageModelV1CallWarning,\n} from \"@ai-sdk/provider\";\nimport { CoreMessage } from \"../../types/AssistantTypes\";\nimport { assistantEncoderStream } from \"./streams/assistantEncoderStream\";\nimport { EdgeRuntimeRequestOptions } from \"./EdgeRuntimeRequestOptions\";\nimport { toLanguageModelMessages } from \"./converters/toLanguageModelMessages\";\n\nexport const createEdgeRuntimeAPI = ({ model }: { model: LanguageModelV1 }) => {\n const POST = async (request: Request) => {\n const { system, messages, tools } =\n (await request.json()) as EdgeRuntimeRequestOptions;\n\n const { stream } = await streamMessage({\n model,\n abortSignal: request.signal,\n\n ...(system ? { system } : undefined),\n messages,\n tools,\n });\n\n return new Response(stream, {\n headers: {\n contentType: \"text/plain; charset=utf-8\",\n },\n });\n };\n return { POST };\n};\n\ntype StreamMessageResult = {\n stream: ReadableStream<Uint8Array>;\n warnings: LanguageModelV1CallWarning[] | undefined;\n rawResponse: unknown;\n};\n\nasync function streamMessage({\n model,\n system,\n messages,\n tools,\n toolChoice,\n ...options\n}: Omit<LanguageModelV1CallOptions, \"inputFormat\" | \"mode\" | \"prompt\"> & {\n model: LanguageModelV1;\n system?: string;\n messages: CoreMessage[];\n tools?: LanguageModelV1FunctionTool[];\n toolChoice?: LanguageModelV1ToolChoice;\n}): Promise<StreamMessageResult> {\n const { stream, warnings, rawResponse } = await model.doStream({\n inputFormat: \"messages\",\n mode: {\n type: \"regular\",\n ...(tools ? { tools } : undefined),\n ...(toolChoice ? { toolChoice } : undefined),\n },\n prompt: convertToLanguageModelPrompt(system, messages),\n ...options,\n });\n\n return {\n stream: stream\n .pipeThrough(assistantEncoderStream())\n .pipeThrough(new TextEncoderStream()),\n warnings,\n rawResponse,\n };\n}\n\nexport function convertToLanguageModelPrompt(\n system: string | undefined,\n messages: CoreMessage[],\n): LanguageModelV1Prompt {\n const languageModelMessages: LanguageModelV1Prompt = [];\n\n if (system != null) {\n languageModelMessages.push({ role: \"system\", content: system });\n }\n languageModelMessages.push(...toLanguageModelMessages(messages));\n\n return languageModelMessages;\n}\n"],"mappings":";AAMO,SAAS,yBAAyB;AACvC,QAAM,YAAY,oBAAI,IAAY;AAClC,SAAO,IAAI,gBAAmD;AAAA,IAC5D,UAAU,OAAO,YAAY;AAC3B,YAAM,YAAY,MAAM;AACxB,cAAQ,WAAW;AAAA,QACjB,KAAK,cAAc;AACjB,qBAAW;AAAA,YACT;AAAA;AAAA,cAEE,MAAM;AAAA,YACR;AAAA,UACF;AACA;AAAA,QACF;AAAA,QACA,KAAK,mBAAmB;AACtB,cAAI,CAAC,UAAU,IAAI,MAAM,UAAU,GAAG;AACpC,sBAAU,IAAI,MAAM,UAAU;AAC9B,uBAAW;AAAA,cACT,0CAAyD;AAAA,gBACvD,IAAI,MAAM;AAAA,gBACV,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAAA,UACF;AAEA,qBAAW;AAAA,YACT;AAAA;AAAA,cAEE,MAAM;AAAA,YACR;AAAA,UACF;AACA;AAAA,QACF;AAAA,QAGA,KAAK;AACH;AAAA,QAEF,KAAK,UAAU;AACb,gBAAM,EAAE,MAAM,GAAG,KAAK,IAAI;AAC1B,qBAAW;AAAA,YACT,mCAAkD,IAAI;AAAA,UACxD;AACA;AAAA,QACF;AAAA,QAEA,KAAK,SAAS;AACZ,qBAAW;AAAA,YACT,kCAAiD,MAAM,KAAK;AAAA,UAC9D;AACA;AAAA,QACF;AAAA,QACA,SAAS;AACP,gBAAM,gBAAuB;AAC7B,gBAAM,IAAI,MAAM,yBAAyB,aAAa,EAAE;AAAA,QAC1D;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEO,SAAS,oBACX,CAAC,MAAM,KAAK,GACP;AACR,SAAO,GAAG,IAAI,IAAI,KAAK,UAAU,KAAK,CAAC;AAAA;AACzC;;;AC9DA,IAAM,2BAA2B,MAAM;AACrC,QAAM,QAAkC,CAAC;AACzC,MAAI,mBAAmB;AAAA,IACrB,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AACA,MAAI,cAAc;AAAA,IAChB,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAEA,SAAO;AAAA,IACL,oBAAoB,CAAC,SAA0B;AAC7C,UAAI,YAAY,QAAQ,SAAS,GAAG;AAClC,cAAM,KAAK,gBAAgB;AAC3B,cAAM,KAAK,WAAW;AAEtB,2BAAmB;AAAA,UACjB,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,QAIZ;AAEA,sBAAc;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,QACZ;AAAA,MACF;AAEA,uBAAiB,QAAQ,KAAK,IAAI;AAAA,IACpC;AAAA,IACA,iBAAiB,CAAC,SAA8B;AAC9C,uBAAiB,QAAQ,KAAK;AAAA,QAC5B,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK;AAAA,QACf,MAAM,KAAK;AAAA,MACb,CAAC;AACD,UAAI,KAAK,QAAQ;AACf,oBAAY,QAAQ,KAAK;AAAA,UACvB,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,UAAU,KAAK;AAAA,UACf,QAAQ,KAAK;AAAA;AAAA,QAEf,CAAC;AAAA,MACH;AAAA,IACF;AAAA,IACA,aAAa,MAAM;AACjB,UAAI,YAAY,QAAQ,SAAS,GAAG;AAClC,eAAO,CAAC,GAAG,OAAO,kBAAkB,WAAW;AAAA,MACjD;AAEA,aAAO,CAAC,GAAG,OAAO,gBAAgB;AAAA,IACpC;AAAA,EACF;AACF;AAEO,SAAS,wBACd,SAC0B;AAC1B,SAAO,QAAQ,QAAQ,CAACA,aAAY;AAClC,UAAM,OAAOA,SAAQ;AACrB,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,eAAO,CAAC,EAAE,MAAM,UAAU,SAASA,SAAQ,QAAQ,CAAC,EAAE,KAAK,CAAC;AAAA,MAC9D;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,MAA8B;AAAA,UAClC,MAAM;AAAA,UACN,SAASA,SAAQ,QAAQ;AAAA,YACvB,CAAC,SAA6D;AAC5D,oBAAM,OAAO,KAAK;AAClB,sBAAQ,MAAM;AAAA,gBACZ,KAAK,QAAQ;AACX,yBAAO;AAAA,gBACT;AAAA,gBAEA,KAAK,SAAS;AACZ,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,OAAO,IAAI,IAAI,KAAK,KAAK;AAAA,kBAC3B;AAAA,gBACF;AAAA,gBAEA,SAAS;AACP,wBAAM,gBAAsB;AAC5B,wBAAM,IAAI;AAAA,oBACR,iCAAiC,aAAa;AAAA,kBAChD;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AACA,eAAO,CAAC,GAAG;AAAA,MACb;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,WAAW,yBAAyB;AAC1C,mBAAW,QAAQA,SAAQ,SAAS;AAClC,gBAAM,OAAO,KAAK;AAClB,kBAAQ,MAAM;AAAA,YACZ,KAAK,QAAQ;AACX,uBAAS,mBAAmB,IAAI;AAChC;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,uBAAS,gBAAgB,IAAI;AAC7B;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,gBAAsB;AAC5B,oBAAM,IAAI,MAAM,gCAAgC,aAAa,EAAE;AAAA,YACjE;AAAA,UACF;AAAA,QACF;AACA,eAAO,SAAS,YAAY;AAAA,MAC9B;AAAA,MAEA,SAAS;AACP,cAAM,gBAAuB;AAC7B,cAAM,IAAI,MAAM,yBAAyB,aAAa,EAAE;AAAA,MAC1D;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AC9HO,IAAM,uBAAuB,CAAC,EAAE,MAAM,MAAkC;AAC7E,QAAM,OAAO,OAAO,YAAqB;AACvC,UAAM,EAAE,QAAQ,UAAU,MAAM,IAC7B,MAAM,QAAQ,KAAK;AAEtB,UAAM,EAAE,OAAO,IAAI,MAAM,cAAc;AAAA,MACrC;AAAA,MACA,aAAa,QAAQ;AAAA,MAErB,GAAI,SAAS,EAAE,OAAO,IAAI;AAAA,MAC1B;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,IAAI,SAAS,QAAQ;AAAA,MAC1B,SAAS;AAAA,QACP,aAAa;AAAA,MACf;AAAA,IACF,CAAC;AAAA,EACH;AACA,SAAO,EAAE,KAAK;AAChB;AAQA,eAAe,cAAc;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,GAAG;AACL,GAMiC;AAC/B,QAAM,EAAE,QAAQ,UAAU,YAAY,IAAI,MAAM,MAAM,SAAS;AAAA,IAC7D,aAAa;AAAA,IACb,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,GAAI,QAAQ,EAAE,MAAM,IAAI;AAAA,MACxB,GAAI,aAAa,EAAE,WAAW,IAAI;AAAA,IACpC;AAAA,IACA,QAAQ,6BAA6B,QAAQ,QAAQ;AAAA,IACrD,GAAG;AAAA,EACL,CAAC;AAED,SAAO;AAAA,IACL,QAAQ,OACL,YAAY,uBAAuB,CAAC,EACpC,YAAY,IAAI,kBAAkB,CAAC;AAAA,IACtC;AAAA,IACA;AAAA,EACF;AACF;AAEO,SAAS,6BACd,QACA,UACuB;AACvB,QAAM,wBAA+C,CAAC;AAEtD,MAAI,UAAU,MAAM;AAClB,0BAAsB,KAAK,EAAE,MAAM,UAAU,SAAS,OAAO,CAAC;AAAA,EAChE;AACA,wBAAsB,KAAK,GAAG,wBAAwB,QAAQ,CAAC;AAE/D,SAAO;AACT;","names":["message"]}