@providerprotocol/ai 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,537 @@
1
+ import {
2
+ createProvider
3
+ } from "../chunk-Y6Q7JCNP.js";
4
+ import {
5
+ AssistantMessage,
6
+ isAssistantMessage,
7
+ isToolResultMessage,
8
+ isUserMessage
9
+ } from "../chunk-QUUX4G7U.js";
10
+ import {
11
+ UPPError,
12
+ doFetch,
13
+ doStreamFetch,
14
+ normalizeHttpError,
15
+ resolveApiKey
16
+ } from "../chunk-SUNYWHTH.js";
17
+
18
+ // src/providers/ollama/transform.ts
19
+ function transformRequest(request, modelId) {
20
+ const params = request.params ?? {};
21
+ const ollamaRequest = {
22
+ model: modelId,
23
+ messages: transformMessages(request.messages, request.system)
24
+ };
25
+ const options = {};
26
+ if (params.num_predict !== void 0) options.num_predict = params.num_predict;
27
+ if (params.temperature !== void 0) options.temperature = params.temperature;
28
+ if (params.top_p !== void 0) options.top_p = params.top_p;
29
+ if (params.top_k !== void 0) options.top_k = params.top_k;
30
+ if (params.min_p !== void 0) options.min_p = params.min_p;
31
+ if (params.typical_p !== void 0) options.typical_p = params.typical_p;
32
+ if (params.repeat_penalty !== void 0) options.repeat_penalty = params.repeat_penalty;
33
+ if (params.repeat_last_n !== void 0) options.repeat_last_n = params.repeat_last_n;
34
+ if (params.presence_penalty !== void 0) options.presence_penalty = params.presence_penalty;
35
+ if (params.frequency_penalty !== void 0) options.frequency_penalty = params.frequency_penalty;
36
+ if (params.mirostat !== void 0) options.mirostat = params.mirostat;
37
+ if (params.mirostat_eta !== void 0) options.mirostat_eta = params.mirostat_eta;
38
+ if (params.mirostat_tau !== void 0) options.mirostat_tau = params.mirostat_tau;
39
+ if (params.penalize_newline !== void 0) options.penalize_newline = params.penalize_newline;
40
+ if (params.stop !== void 0) options.stop = params.stop;
41
+ if (params.seed !== void 0) options.seed = params.seed;
42
+ if (params.num_keep !== void 0) options.num_keep = params.num_keep;
43
+ if (params.num_ctx !== void 0) options.num_ctx = params.num_ctx;
44
+ if (params.num_batch !== void 0) options.num_batch = params.num_batch;
45
+ if (params.num_thread !== void 0) options.num_thread = params.num_thread;
46
+ if (params.num_gpu !== void 0) options.num_gpu = params.num_gpu;
47
+ if (params.main_gpu !== void 0) options.main_gpu = params.main_gpu;
48
+ if (params.low_vram !== void 0) options.low_vram = params.low_vram;
49
+ if (params.f16_kv !== void 0) options.f16_kv = params.f16_kv;
50
+ if (params.use_mmap !== void 0) options.use_mmap = params.use_mmap;
51
+ if (params.use_mlock !== void 0) options.use_mlock = params.use_mlock;
52
+ if (params.vocab_only !== void 0) options.vocab_only = params.vocab_only;
53
+ if (params.numa !== void 0) options.numa = params.numa;
54
+ if (params.tfs_z !== void 0) options.tfs_z = params.tfs_z;
55
+ if (Object.keys(options).length > 0) {
56
+ ollamaRequest.options = options;
57
+ }
58
+ if (params.keep_alive !== void 0) {
59
+ ollamaRequest.keep_alive = params.keep_alive;
60
+ }
61
+ if (params.think !== void 0) {
62
+ ollamaRequest.think = params.think;
63
+ }
64
+ if (params.logprobs !== void 0) {
65
+ ollamaRequest.logprobs = params.logprobs;
66
+ }
67
+ if (params.top_logprobs !== void 0) {
68
+ ollamaRequest.top_logprobs = params.top_logprobs;
69
+ }
70
+ if (request.tools && request.tools.length > 0) {
71
+ ollamaRequest.tools = request.tools.map(transformTool);
72
+ }
73
+ if (request.structure) {
74
+ ollamaRequest.format = request.structure;
75
+ }
76
+ return ollamaRequest;
77
+ }
78
+ function transformMessages(messages, system) {
79
+ const ollamaMessages = [];
80
+ if (system) {
81
+ ollamaMessages.push({
82
+ role: "system",
83
+ content: system
84
+ });
85
+ }
86
+ for (const msg of messages) {
87
+ if (isUserMessage(msg)) {
88
+ const textContent = [];
89
+ const images = [];
90
+ for (const block of msg.content) {
91
+ if (block.type === "text") {
92
+ textContent.push(block.text);
93
+ } else if (block.type === "image") {
94
+ const imageBlock = block;
95
+ if (imageBlock.source.type === "base64") {
96
+ images.push(imageBlock.source.data);
97
+ } else if (imageBlock.source.type === "bytes") {
98
+ const base64 = btoa(
99
+ Array.from(imageBlock.source.data).map((b) => String.fromCharCode(b)).join("")
100
+ );
101
+ images.push(base64);
102
+ } else if (imageBlock.source.type === "url") {
103
+ textContent.push(`[Image: ${imageBlock.source.url}]`);
104
+ }
105
+ }
106
+ }
107
+ const message = {
108
+ role: "user",
109
+ content: textContent.join("\n")
110
+ };
111
+ if (images.length > 0) {
112
+ message.images = images;
113
+ }
114
+ ollamaMessages.push(message);
115
+ } else if (isAssistantMessage(msg)) {
116
+ const textContent = msg.content.filter((block) => block.type === "text").map((block) => block.text).join("\n");
117
+ const message = {
118
+ role: "assistant",
119
+ content: textContent
120
+ };
121
+ if (msg.toolCalls && msg.toolCalls.length > 0) {
122
+ message.tool_calls = msg.toolCalls.map((call) => ({
123
+ function: {
124
+ name: call.toolName,
125
+ arguments: call.arguments
126
+ }
127
+ }));
128
+ }
129
+ ollamaMessages.push(message);
130
+ } else if (isToolResultMessage(msg)) {
131
+ for (const result of msg.results) {
132
+ ollamaMessages.push({
133
+ role: "tool",
134
+ tool_name: result.toolCallId,
135
+ // In our UPP, toolCallId maps to tool name for Ollama
136
+ content: typeof result.result === "string" ? result.result : JSON.stringify(result.result)
137
+ });
138
+ }
139
+ }
140
+ }
141
+ return ollamaMessages;
142
+ }
143
+ function transformTool(tool) {
144
+ return {
145
+ type: "function",
146
+ function: {
147
+ name: tool.name,
148
+ description: tool.description,
149
+ parameters: {
150
+ type: "object",
151
+ properties: tool.parameters.properties,
152
+ required: tool.parameters.required
153
+ }
154
+ }
155
+ };
156
+ }
157
+ function transformResponse(data) {
158
+ const textContent = [];
159
+ const toolCalls = [];
160
+ let structuredData;
161
+ if (data.message.content) {
162
+ textContent.push({ type: "text", text: data.message.content });
163
+ try {
164
+ structuredData = JSON.parse(data.message.content);
165
+ } catch {
166
+ }
167
+ }
168
+ if (data.message.tool_calls) {
169
+ for (const call of data.message.tool_calls) {
170
+ toolCalls.push({
171
+ toolCallId: call.function.name,
172
+ // Ollama doesn't have separate IDs, use name
173
+ toolName: call.function.name,
174
+ arguments: call.function.arguments
175
+ });
176
+ }
177
+ }
178
+ const message = new AssistantMessage(
179
+ textContent,
180
+ toolCalls.length > 0 ? toolCalls : void 0,
181
+ {
182
+ metadata: {
183
+ ollama: {
184
+ model: data.model,
185
+ created_at: data.created_at,
186
+ done_reason: data.done_reason,
187
+ thinking: data.message.thinking,
188
+ total_duration: data.total_duration,
189
+ load_duration: data.load_duration,
190
+ prompt_eval_duration: data.prompt_eval_duration,
191
+ eval_duration: data.eval_duration,
192
+ logprobs: data.logprobs
193
+ }
194
+ }
195
+ }
196
+ );
197
+ const usage = {
198
+ inputTokens: data.prompt_eval_count ?? 0,
199
+ outputTokens: data.eval_count ?? 0,
200
+ totalTokens: (data.prompt_eval_count ?? 0) + (data.eval_count ?? 0)
201
+ };
202
+ let stopReason = "end_turn";
203
+ if (data.done_reason === "length") {
204
+ stopReason = "max_tokens";
205
+ } else if (data.done_reason === "stop") {
206
+ stopReason = "end_turn";
207
+ } else if (toolCalls.length > 0) {
208
+ stopReason = "tool_use";
209
+ }
210
+ return {
211
+ message,
212
+ usage,
213
+ stopReason,
214
+ data: structuredData
215
+ };
216
+ }
217
+ function createStreamState() {
218
+ return {
219
+ model: "",
220
+ content: "",
221
+ thinking: "",
222
+ toolCalls: [],
223
+ doneReason: null,
224
+ promptEvalCount: 0,
225
+ evalCount: 0,
226
+ totalDuration: 0,
227
+ isFirstChunk: true,
228
+ createdAt: ""
229
+ };
230
+ }
231
+ function transformStreamChunk(chunk, state) {
232
+ const events = [];
233
+ if (state.isFirstChunk) {
234
+ state.model = chunk.model;
235
+ state.createdAt = chunk.created_at;
236
+ events.push({ type: "message_start", index: 0, delta: {} });
237
+ state.isFirstChunk = false;
238
+ }
239
+ if (chunk.message) {
240
+ if (chunk.message.content) {
241
+ state.content += chunk.message.content;
242
+ events.push({
243
+ type: "text_delta",
244
+ index: 0,
245
+ delta: { text: chunk.message.content }
246
+ });
247
+ }
248
+ if (chunk.message.thinking) {
249
+ state.thinking += chunk.message.thinking;
250
+ events.push({
251
+ type: "reasoning_delta",
252
+ index: 0,
253
+ delta: { text: chunk.message.thinking }
254
+ });
255
+ }
256
+ if (chunk.message.tool_calls) {
257
+ for (const call of chunk.message.tool_calls) {
258
+ state.toolCalls.push({
259
+ name: call.function.name,
260
+ args: call.function.arguments
261
+ });
262
+ events.push({
263
+ type: "tool_call_delta",
264
+ index: state.toolCalls.length - 1,
265
+ delta: {
266
+ toolCallId: call.function.name,
267
+ toolName: call.function.name,
268
+ argumentsJson: JSON.stringify(call.function.arguments)
269
+ }
270
+ });
271
+ }
272
+ }
273
+ }
274
+ if (chunk.done) {
275
+ state.doneReason = chunk.done_reason ?? null;
276
+ state.promptEvalCount = chunk.prompt_eval_count ?? 0;
277
+ state.evalCount = chunk.eval_count ?? 0;
278
+ state.totalDuration = chunk.total_duration ?? 0;
279
+ events.push({ type: "message_stop", index: 0, delta: {} });
280
+ }
281
+ return events;
282
+ }
283
+ function buildResponseFromState(state) {
284
+ const textContent = [];
285
+ const toolCalls = [];
286
+ let structuredData;
287
+ if (state.content) {
288
+ textContent.push({ type: "text", text: state.content });
289
+ try {
290
+ structuredData = JSON.parse(state.content);
291
+ } catch {
292
+ }
293
+ }
294
+ for (const tc of state.toolCalls) {
295
+ toolCalls.push({
296
+ toolCallId: tc.name,
297
+ toolName: tc.name,
298
+ arguments: tc.args
299
+ });
300
+ }
301
+ const message = new AssistantMessage(
302
+ textContent,
303
+ toolCalls.length > 0 ? toolCalls : void 0,
304
+ {
305
+ metadata: {
306
+ ollama: {
307
+ model: state.model,
308
+ created_at: state.createdAt,
309
+ done_reason: state.doneReason,
310
+ thinking: state.thinking || void 0,
311
+ total_duration: state.totalDuration
312
+ }
313
+ }
314
+ }
315
+ );
316
+ const usage = {
317
+ inputTokens: state.promptEvalCount,
318
+ outputTokens: state.evalCount,
319
+ totalTokens: state.promptEvalCount + state.evalCount
320
+ };
321
+ let stopReason = "end_turn";
322
+ if (state.doneReason === "length") {
323
+ stopReason = "max_tokens";
324
+ } else if (toolCalls.length > 0) {
325
+ stopReason = "tool_use";
326
+ }
327
+ return {
328
+ message,
329
+ usage,
330
+ stopReason,
331
+ data: structuredData
332
+ };
333
+ }
334
+
335
+ // src/providers/ollama/llm.ts
336
+ var OLLAMA_DEFAULT_URL = "http://localhost:11434";
337
+ var OLLAMA_CAPABILITIES = {
338
+ streaming: true,
339
+ tools: false,
340
+ structuredOutput: true,
341
+ imageInput: true,
342
+ videoInput: false,
343
+ audioInput: false
344
+ };
345
+ async function* parseOllamaStream(body) {
346
+ const reader = body.getReader();
347
+ const decoder = new TextDecoder();
348
+ let buffer = "";
349
+ try {
350
+ while (true) {
351
+ const { done, value } = await reader.read();
352
+ if (done) break;
353
+ buffer += decoder.decode(value, { stream: true });
354
+ const lines = buffer.split("\n");
355
+ buffer = lines.pop() ?? "";
356
+ for (const line of lines) {
357
+ const trimmed = line.trim();
358
+ if (!trimmed) continue;
359
+ try {
360
+ const chunk = JSON.parse(trimmed);
361
+ yield chunk;
362
+ } catch {
363
+ }
364
+ }
365
+ }
366
+ if (buffer.trim()) {
367
+ try {
368
+ const chunk = JSON.parse(buffer.trim());
369
+ yield chunk;
370
+ } catch {
371
+ }
372
+ }
373
+ } finally {
374
+ reader.releaseLock();
375
+ }
376
+ }
377
+ function createLLMHandler() {
378
+ let providerRef = null;
379
+ return {
380
+ _setProvider(provider) {
381
+ providerRef = provider;
382
+ },
383
+ bind(modelId) {
384
+ if (!providerRef) {
385
+ throw new UPPError(
386
+ "Provider reference not set. Handler must be used with createProvider().",
387
+ "INVALID_REQUEST",
388
+ "ollama",
389
+ "llm"
390
+ );
391
+ }
392
+ const model = {
393
+ modelId,
394
+ capabilities: OLLAMA_CAPABILITIES,
395
+ get provider() {
396
+ return providerRef;
397
+ },
398
+ async complete(request) {
399
+ let apiKey;
400
+ try {
401
+ apiKey = await resolveApiKey(
402
+ request.config,
403
+ "OLLAMA_API_KEY",
404
+ "ollama",
405
+ "llm"
406
+ );
407
+ } catch {
408
+ }
409
+ const baseUrl = request.config.baseUrl ?? OLLAMA_DEFAULT_URL;
410
+ const url = `${baseUrl}/api/chat`;
411
+ const body = transformRequest(request, modelId);
412
+ body.stream = false;
413
+ const headers = {
414
+ "Content-Type": "application/json"
415
+ };
416
+ if (apiKey) {
417
+ headers["Authorization"] = `Bearer ${apiKey}`;
418
+ }
419
+ const response = await doFetch(
420
+ url,
421
+ {
422
+ method: "POST",
423
+ headers,
424
+ body: JSON.stringify(body),
425
+ signal: request.signal
426
+ },
427
+ request.config,
428
+ "ollama",
429
+ "llm"
430
+ );
431
+ const data = await response.json();
432
+ return transformResponse(data);
433
+ },
434
+ stream(request) {
435
+ const state = createStreamState();
436
+ let responseResolve;
437
+ let responseReject;
438
+ const responsePromise = new Promise((resolve, reject) => {
439
+ responseResolve = resolve;
440
+ responseReject = reject;
441
+ });
442
+ async function* generateEvents() {
443
+ try {
444
+ let apiKey;
445
+ try {
446
+ apiKey = await resolveApiKey(
447
+ request.config,
448
+ "OLLAMA_API_KEY",
449
+ "ollama",
450
+ "llm"
451
+ );
452
+ } catch {
453
+ }
454
+ const baseUrl = request.config.baseUrl ?? OLLAMA_DEFAULT_URL;
455
+ const url = `${baseUrl}/api/chat`;
456
+ const body = transformRequest(request, modelId);
457
+ body.stream = true;
458
+ const headers = {
459
+ "Content-Type": "application/json"
460
+ };
461
+ if (apiKey) {
462
+ headers["Authorization"] = `Bearer ${apiKey}`;
463
+ }
464
+ const response = await doStreamFetch(
465
+ url,
466
+ {
467
+ method: "POST",
468
+ headers,
469
+ body: JSON.stringify(body),
470
+ signal: request.signal
471
+ },
472
+ request.config,
473
+ "ollama",
474
+ "llm"
475
+ );
476
+ if (!response.ok) {
477
+ const error = await normalizeHttpError(response, "ollama", "llm");
478
+ responseReject(error);
479
+ throw error;
480
+ }
481
+ if (!response.body) {
482
+ const error = new UPPError(
483
+ "No response body for streaming request",
484
+ "PROVIDER_ERROR",
485
+ "ollama",
486
+ "llm"
487
+ );
488
+ responseReject(error);
489
+ throw error;
490
+ }
491
+ for await (const chunk of parseOllamaStream(response.body)) {
492
+ if ("error" in chunk && typeof chunk.error === "string") {
493
+ const error = new UPPError(
494
+ chunk.error,
495
+ "PROVIDER_ERROR",
496
+ "ollama",
497
+ "llm"
498
+ );
499
+ responseReject(error);
500
+ throw error;
501
+ }
502
+ const events = transformStreamChunk(chunk, state);
503
+ for (const event of events) {
504
+ yield event;
505
+ }
506
+ }
507
+ responseResolve(buildResponseFromState(state));
508
+ } catch (error) {
509
+ responseReject(error);
510
+ throw error;
511
+ }
512
+ }
513
+ return {
514
+ [Symbol.asyncIterator]() {
515
+ return generateEvents();
516
+ },
517
+ response: responsePromise
518
+ };
519
+ }
520
+ };
521
+ return model;
522
+ }
523
+ };
524
+ }
525
+
526
+ // src/providers/ollama/index.ts
527
+ var ollama = createProvider({
528
+ name: "ollama",
529
+ version: "1.0.0",
530
+ modalities: {
531
+ llm: createLLMHandler()
532
+ }
533
+ });
534
+ export {
535
+ ollama
536
+ };
537
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/providers/ollama/transform.ts","../../src/providers/ollama/llm.ts","../../src/providers/ollama/index.ts"],"sourcesContent":["import type { LLMRequest, LLMResponse } from '../../types/llm.ts';\nimport type { Message } from '../../types/messages.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { Tool, ToolCall } from '../../types/tool.ts';\nimport type { TokenUsage } from '../../types/turn.ts';\nimport type { ContentBlock, TextBlock, ImageBlock } from '../../types/content.ts';\nimport {\n AssistantMessage,\n isUserMessage,\n isAssistantMessage,\n isToolResultMessage,\n} from '../../types/messages.ts';\nimport type {\n OllamaLLMParams,\n OllamaRequest,\n OllamaMessage,\n OllamaTool,\n OllamaResponse,\n OllamaStreamChunk,\n OllamaToolCall,\n OllamaOptions,\n} from './types.ts';\n\n/**\n * Transform UPP request to Ollama format\n */\nexport function transformRequest<TParams extends OllamaLLMParams>(\n request: LLMRequest<TParams>,\n modelId: string\n): OllamaRequest {\n const params = (request.params ?? {}) as OllamaLLMParams;\n\n const ollamaRequest: OllamaRequest = {\n model: modelId,\n messages: transformMessages(request.messages, request.system),\n };\n\n // Build options object for runtime parameters\n const options: OllamaOptions = {};\n\n if (params.num_predict !== undefined) options.num_predict = params.num_predict;\n if (params.temperature !== undefined) options.temperature = params.temperature;\n if (params.top_p !== undefined) options.top_p = params.top_p;\n if (params.top_k !== undefined) options.top_k = params.top_k;\n if (params.min_p !== undefined) options.min_p = params.min_p;\n if (params.typical_p !== undefined) options.typical_p = params.typical_p;\n if (params.repeat_penalty !== undefined) options.repeat_penalty = params.repeat_penalty;\n if (params.repeat_last_n !== undefined) options.repeat_last_n = params.repeat_last_n;\n if (params.presence_penalty !== undefined) options.presence_penalty = params.presence_penalty;\n if (params.frequency_penalty !== undefined) options.frequency_penalty = params.frequency_penalty;\n if (params.mirostat !== undefined) options.mirostat = params.mirostat;\n if (params.mirostat_eta !== undefined) options.mirostat_eta = params.mirostat_eta;\n if (params.mirostat_tau !== undefined) options.mirostat_tau = params.mirostat_tau;\n if (params.penalize_newline !== undefined) options.penalize_newline = params.penalize_newline;\n if (params.stop !== undefined) options.stop = params.stop;\n if (params.seed !== undefined) options.seed = params.seed;\n if (params.num_keep !== undefined) options.num_keep = params.num_keep;\n if (params.num_ctx !== undefined) options.num_ctx = params.num_ctx;\n if (params.num_batch !== undefined) options.num_batch = params.num_batch;\n if (params.num_thread !== undefined) options.num_thread = params.num_thread;\n if (params.num_gpu !== undefined) options.num_gpu = params.num_gpu;\n if (params.main_gpu !== undefined) options.main_gpu = params.main_gpu;\n if (params.low_vram !== undefined) options.low_vram = params.low_vram;\n if (params.f16_kv !== undefined) options.f16_kv = params.f16_kv;\n if (params.use_mmap !== undefined) options.use_mmap = params.use_mmap;\n if (params.use_mlock !== undefined) options.use_mlock = params.use_mlock;\n if (params.vocab_only !== undefined) options.vocab_only = params.vocab_only;\n if (params.numa !== undefined) options.numa = params.numa;\n if (params.tfs_z !== undefined) options.tfs_z = params.tfs_z;\n\n if (Object.keys(options).length > 0) {\n ollamaRequest.options = options;\n }\n\n // Top-level parameters\n if (params.keep_alive !== undefined) {\n ollamaRequest.keep_alive = params.keep_alive;\n }\n if (params.think !== undefined) {\n ollamaRequest.think = params.think;\n }\n if (params.logprobs !== undefined) {\n ollamaRequest.logprobs = params.logprobs;\n }\n if (params.top_logprobs !== undefined) {\n ollamaRequest.top_logprobs = params.top_logprobs;\n }\n\n // Tools\n if (request.tools && request.tools.length > 0) {\n ollamaRequest.tools = request.tools.map(transformTool);\n }\n\n // Structured output via format field\n if (request.structure) {\n ollamaRequest.format = request.structure as unknown as Record<string, unknown>;\n }\n\n return ollamaRequest;\n}\n\n/**\n * Transform UPP Messages to Ollama messages\n */\nfunction transformMessages(messages: Message[], system?: string): OllamaMessage[] {\n const ollamaMessages: OllamaMessage[] = [];\n\n // System prompt as first message\n if (system) {\n ollamaMessages.push({\n role: 'system',\n content: system,\n });\n }\n\n for (const msg of messages) {\n if (isUserMessage(msg)) {\n const textContent: string[] = [];\n const images: string[] = [];\n\n for (const block of msg.content) {\n if (block.type === 'text') {\n textContent.push(block.text);\n } else if (block.type === 'image') {\n const imageBlock = block as ImageBlock;\n if (imageBlock.source.type === 'base64') {\n images.push(imageBlock.source.data);\n } else if (imageBlock.source.type === 'bytes') {\n // Convert bytes to base64\n const base64 = btoa(\n Array.from(imageBlock.source.data)\n .map((b) => String.fromCharCode(b))\n .join('')\n );\n images.push(base64);\n } else if (imageBlock.source.type === 'url') {\n // Ollama doesn't support URL images directly\n // Would need to fetch and convert, for now just add as text\n textContent.push(`[Image: ${imageBlock.source.url}]`);\n }\n }\n }\n\n const message: OllamaMessage = {\n role: 'user',\n content: textContent.join('\\n'),\n };\n\n if (images.length > 0) {\n message.images = images;\n }\n\n ollamaMessages.push(message);\n } else if (isAssistantMessage(msg)) {\n const textContent = msg.content\n .filter((block): block is TextBlock => block.type === 'text')\n .map((block) => block.text)\n .join('\\n');\n\n const message: OllamaMessage = {\n role: 'assistant',\n content: textContent,\n };\n\n // Add tool calls if present\n if (msg.toolCalls && msg.toolCalls.length > 0) {\n message.tool_calls = msg.toolCalls.map((call) => ({\n function: {\n name: call.toolName,\n arguments: call.arguments,\n },\n }));\n }\n\n ollamaMessages.push(message);\n } else if (isToolResultMessage(msg)) {\n // Tool results are sent as 'tool' role messages\n for (const result of msg.results) {\n ollamaMessages.push({\n role: 'tool',\n tool_name: result.toolCallId, // In our UPP, toolCallId maps to tool name for Ollama\n content:\n typeof result.result === 'string'\n ? result.result\n : JSON.stringify(result.result),\n });\n }\n }\n }\n\n return ollamaMessages;\n}\n\n/**\n * Transform a UPP Tool to Ollama format\n */\nfunction transformTool(tool: Tool): OllamaTool {\n return {\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: {\n type: 'object',\n properties: tool.parameters.properties,\n required: tool.parameters.required,\n },\n },\n };\n}\n\n/**\n * Transform Ollama response to UPP LLMResponse\n */\nexport function transformResponse(data: OllamaResponse): LLMResponse {\n const textContent: TextBlock[] = [];\n const toolCalls: ToolCall[] = [];\n let structuredData: unknown;\n\n // Add main content\n if (data.message.content) {\n textContent.push({ type: 'text', text: data.message.content });\n\n // Try to parse as JSON for structured output\n try {\n structuredData = JSON.parse(data.message.content);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n\n // Extract tool calls\n if (data.message.tool_calls) {\n for (const call of data.message.tool_calls) {\n toolCalls.push({\n toolCallId: call.function.name, // Ollama doesn't have separate IDs, use name\n toolName: call.function.name,\n arguments: call.function.arguments,\n });\n }\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n metadata: {\n ollama: {\n model: data.model,\n created_at: data.created_at,\n done_reason: data.done_reason,\n thinking: data.message.thinking,\n total_duration: data.total_duration,\n load_duration: data.load_duration,\n prompt_eval_duration: data.prompt_eval_duration,\n eval_duration: data.eval_duration,\n logprobs: data.logprobs,\n },\n },\n }\n );\n\n // Calculate token usage\n const usage: TokenUsage = {\n inputTokens: data.prompt_eval_count ?? 0,\n outputTokens: data.eval_count ?? 0,\n totalTokens: (data.prompt_eval_count ?? 0) + (data.eval_count ?? 0),\n };\n\n // Map done_reason to standard stop reason\n let stopReason = 'end_turn';\n if (data.done_reason === 'length') {\n stopReason = 'max_tokens';\n } else if (data.done_reason === 'stop') {\n stopReason = 'end_turn';\n } else if (toolCalls.length > 0) {\n stopReason = 'tool_use';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n\n/**\n * State for accumulating streaming response\n */\nexport interface StreamState {\n model: string;\n content: string;\n thinking: string;\n toolCalls: Array<{ name: string; args: Record<string, unknown> }>;\n doneReason: string | null;\n promptEvalCount: number;\n evalCount: number;\n totalDuration: number;\n isFirstChunk: boolean;\n createdAt: string;\n}\n\n/**\n * Create initial stream state\n */\nexport function createStreamState(): StreamState {\n return {\n model: '',\n content: '',\n thinking: '',\n toolCalls: [],\n doneReason: null,\n promptEvalCount: 0,\n evalCount: 0,\n totalDuration: 0,\n isFirstChunk: true,\n createdAt: '',\n };\n}\n\n/**\n * Transform Ollama stream chunk to UPP StreamEvents\n */\nexport function transformStreamChunk(\n chunk: OllamaStreamChunk,\n state: StreamState\n): StreamEvent[] {\n const events: StreamEvent[] = [];\n\n // First chunk - emit message start\n if (state.isFirstChunk) {\n state.model = chunk.model;\n state.createdAt = chunk.created_at;\n events.push({ type: 'message_start', index: 0, delta: {} });\n state.isFirstChunk = false;\n }\n\n // Process message content\n if (chunk.message) {\n // Text content delta\n if (chunk.message.content) {\n state.content += chunk.message.content;\n events.push({\n type: 'text_delta',\n index: 0,\n delta: { text: chunk.message.content },\n });\n }\n\n // Thinking content delta\n if (chunk.message.thinking) {\n state.thinking += chunk.message.thinking;\n events.push({\n type: 'reasoning_delta',\n index: 0,\n delta: { text: chunk.message.thinking },\n });\n }\n\n // Tool calls (typically come in final chunk)\n if (chunk.message.tool_calls) {\n for (const call of chunk.message.tool_calls) {\n state.toolCalls.push({\n name: call.function.name,\n args: call.function.arguments,\n });\n events.push({\n type: 'tool_call_delta',\n index: state.toolCalls.length - 1,\n delta: {\n toolCallId: call.function.name,\n toolName: call.function.name,\n argumentsJson: JSON.stringify(call.function.arguments),\n },\n });\n }\n }\n }\n\n // Final chunk with metrics\n if (chunk.done) {\n state.doneReason = chunk.done_reason ?? null;\n state.promptEvalCount = chunk.prompt_eval_count ?? 0;\n state.evalCount = chunk.eval_count ?? 0;\n state.totalDuration = chunk.total_duration ?? 0;\n events.push({ type: 'message_stop', index: 0, delta: {} });\n }\n\n return events;\n}\n\n/**\n * Build LLMResponse from accumulated stream state\n */\nexport function buildResponseFromState(state: StreamState): LLMResponse {\n const textContent: TextBlock[] = [];\n const toolCalls: ToolCall[] = [];\n let structuredData: unknown;\n\n if (state.content) {\n textContent.push({ type: 'text', text: state.content });\n\n // Try to parse as JSON for structured output\n try {\n structuredData = JSON.parse(state.content);\n } catch {\n // Not valid JSON - that's fine\n }\n }\n\n for (const tc of state.toolCalls) {\n toolCalls.push({\n toolCallId: tc.name,\n toolName: tc.name,\n arguments: tc.args,\n });\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n metadata: {\n ollama: {\n model: state.model,\n created_at: state.createdAt,\n done_reason: state.doneReason,\n thinking: state.thinking || undefined,\n total_duration: state.totalDuration,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: state.promptEvalCount,\n outputTokens: state.evalCount,\n totalTokens: state.promptEvalCount + state.evalCount,\n };\n\n // Map done_reason to standard stop reason\n let stopReason = 'end_turn';\n if (state.doneReason === 'length') {\n stopReason = 'max_tokens';\n } else if (toolCalls.length > 0) {\n stopReason = 'tool_use';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n","import type {\n LLMHandler,\n BoundLLMModel,\n LLMRequest,\n LLMResponse,\n LLMStreamResult,\n LLMCapabilities,\n} from '../../types/llm.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { LLMProvider } from '../../types/provider.ts';\nimport { UPPError } from '../../types/errors.ts';\nimport { resolveApiKey } from '../../http/keys.ts';\nimport { doFetch, doStreamFetch } from '../../http/fetch.ts';\nimport { normalizeHttpError } from '../../http/errors.ts';\nimport type { OllamaLLMParams, OllamaResponse, OllamaStreamChunk } from './types.ts';\nimport {\n transformRequest,\n transformResponse,\n transformStreamChunk,\n createStreamState,\n buildResponseFromState,\n} from './transform.ts';\n\nconst OLLAMA_DEFAULT_URL = 'http://localhost:11434';\n\n/**\n * Ollama API capabilities\n * Note: Tool calling is disabled - Ollama recommends using their\n * OpenAI-compatible API (/v1/chat/completions) for tool calling.\n * Use the OpenAI provider with baseUrl pointed to Ollama for tools.\n */\nconst OLLAMA_CAPABILITIES: LLMCapabilities = {\n streaming: true,\n tools: false,\n structuredOutput: true,\n imageInput: true,\n videoInput: false,\n audioInput: false,\n};\n\n/**\n * Parse Ollama's newline-delimited JSON stream\n */\nasync function* parseOllamaStream(\n body: ReadableStream<Uint8Array>\n): AsyncGenerator<OllamaStreamChunk, void, unknown> {\n const reader = body.getReader();\n const decoder = new TextDecoder();\n let buffer = '';\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n\n buffer += decoder.decode(value, { stream: true });\n\n // Process complete lines (Ollama uses newline-delimited JSON)\n const lines = buffer.split('\\n');\n buffer = lines.pop() ?? ''; // Keep incomplete line in buffer\n\n for (const line of lines) {\n const trimmed = line.trim();\n if (!trimmed) continue;\n\n try {\n const chunk = JSON.parse(trimmed) as OllamaStreamChunk;\n yield chunk;\n } catch {\n // Skip invalid JSON lines\n }\n }\n }\n\n // Process any remaining buffer\n if (buffer.trim()) {\n try {\n const chunk = JSON.parse(buffer.trim()) as OllamaStreamChunk;\n yield chunk;\n } catch {\n // Skip invalid JSON\n }\n }\n } finally {\n reader.releaseLock();\n }\n}\n\n/**\n * Create Ollama LLM handler\n */\nexport function createLLMHandler(): LLMHandler<OllamaLLMParams> {\n // Provider reference injected by createProvider() after construction\n let providerRef: LLMProvider<OllamaLLMParams> | null = null;\n\n return {\n _setProvider(provider: LLMProvider<OllamaLLMParams>) {\n providerRef = provider;\n },\n\n bind(modelId: string): BoundLLMModel<OllamaLLMParams> {\n // Use the injected provider reference (set by createProvider)\n if (!providerRef) {\n throw new UPPError(\n 'Provider reference not set. Handler must be used with createProvider().',\n 'INVALID_REQUEST',\n 'ollama',\n 'llm'\n );\n }\n\n const model: BoundLLMModel<OllamaLLMParams> = {\n modelId,\n capabilities: OLLAMA_CAPABILITIES,\n\n get provider(): LLMProvider<OllamaLLMParams> {\n return providerRef!;\n },\n\n async complete(request: LLMRequest<OllamaLLMParams>): Promise<LLMResponse> {\n // Ollama doesn't require an API key by default, but may use one for auth\n let apiKey: string | undefined;\n try {\n apiKey = await resolveApiKey(\n request.config,\n 'OLLAMA_API_KEY',\n 'ollama',\n 'llm'\n );\n } catch {\n // API key is optional for Ollama\n }\n\n const baseUrl = request.config.baseUrl ?? OLLAMA_DEFAULT_URL;\n const url = `${baseUrl}/api/chat`;\n const body = transformRequest(request, modelId);\n body.stream = false;\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n };\n\n if (apiKey) {\n headers['Authorization'] = `Bearer ${apiKey}`;\n }\n\n const response = await doFetch(\n url,\n {\n method: 'POST',\n headers,\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'ollama',\n 'llm'\n );\n\n const data = (await response.json()) as OllamaResponse;\n return transformResponse(data);\n },\n\n stream(request: LLMRequest<OllamaLLMParams>): LLMStreamResult {\n const state = createStreamState();\n let responseResolve: (value: LLMResponse) => void;\n let responseReject: (error: Error) => void;\n\n const responsePromise = new Promise<LLMResponse>((resolve, reject) => {\n responseResolve = resolve;\n responseReject = reject;\n });\n\n async function* generateEvents(): AsyncGenerator<StreamEvent, void, unknown> {\n try {\n // Ollama doesn't require an API key by default\n let apiKey: string | undefined;\n try {\n apiKey = await resolveApiKey(\n request.config,\n 'OLLAMA_API_KEY',\n 'ollama',\n 'llm'\n );\n } catch {\n // API key is optional for Ollama\n }\n\n const baseUrl = request.config.baseUrl ?? OLLAMA_DEFAULT_URL;\n const url = `${baseUrl}/api/chat`;\n const body = transformRequest(request, modelId);\n body.stream = true;\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n };\n\n if (apiKey) {\n headers['Authorization'] = `Bearer ${apiKey}`;\n }\n\n const response = await doStreamFetch(\n url,\n {\n method: 'POST',\n headers,\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'ollama',\n 'llm'\n );\n\n if (!response.ok) {\n const error = await normalizeHttpError(response, 'ollama', 'llm');\n responseReject(error);\n throw error;\n }\n\n if (!response.body) {\n const error = new UPPError(\n 'No response body for streaming request',\n 'PROVIDER_ERROR',\n 'ollama',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n // Parse Ollama's newline-delimited JSON stream\n for await (const chunk of parseOllamaStream(response.body)) {\n // Check for error in chunk\n if ('error' in chunk && typeof (chunk as Record<string, unknown>).error === 'string') {\n const error = new UPPError(\n (chunk as Record<string, unknown>).error as string,\n 'PROVIDER_ERROR',\n 'ollama',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n const events = transformStreamChunk(chunk, state);\n for (const event of events) {\n yield event;\n }\n }\n\n // Build final response\n responseResolve(buildResponseFromState(state));\n } catch (error) {\n responseReject(error as Error);\n throw error;\n }\n }\n\n return {\n [Symbol.asyncIterator]() {\n return generateEvents();\n },\n response: responsePromise,\n };\n },\n };\n\n return model;\n },\n };\n}\n","import { createProvider } from '../../core/provider.ts';\nimport { createLLMHandler } from './llm.ts';\n\n/**\n * Ollama provider\n * Supports LLM modality with local Ollama models\n *\n * Ollama is a local LLM server that supports many open-source models including:\n * - Llama 3.x\n * - Mistral\n * - Mixtral\n * - Gemma\n * - Qwen\n * - DeepSeek\n * - Phi\n * - And many more\n *\n * @example\n * ```ts\n * import { llm } from 'provider-protocol';\n * import { ollama } from 'provider-protocol/ollama';\n *\n * const model = llm(ollama('llama3.2'));\n * const result = await model.generate('Hello, how are you?');\n * ```\n *\n * @example Custom server URL\n * ```ts\n * const model = llm(ollama('llama3.2'), {\n * baseUrl: 'http://my-ollama-server:11434',\n * });\n * ```\n */\nexport const ollama = createProvider({\n name: 'ollama',\n version: '1.0.0',\n modalities: {\n llm: createLLMHandler(),\n },\n});\n\n// Re-export types\nexport type { OllamaLLMParams } from './types.ts';\n"],"mappings":";;;;;;;;;;;;;;;;;;AA0BO,SAAS,iBACd,SACA,SACe;AACf,QAAM,SAAU,QAAQ,UAAU,CAAC;AAEnC,QAAM,gBAA+B;AAAA,IACnC,OAAO;AAAA,IACP,UAAU,kBAAkB,QAAQ,UAAU,QAAQ,MAAM;AAAA,EAC9D;AAGA,QAAM,UAAyB,CAAC;AAEhC,MAAI,OAAO,gBAAgB,OAAW,SAAQ,cAAc,OAAO;AACnE,MAAI,OAAO,gBAAgB,OAAW,SAAQ,cAAc,OAAO;AACnE,MAAI,OAAO,UAAU,OAAW,SAAQ,QAAQ,OAAO;AACvD,MAAI,OAAO,UAAU,OAAW,SAAQ,QAAQ,OAAO;AACvD,MAAI,OAAO,UAAU,OAAW,SAAQ,QAAQ,OAAO;AACvD,MAAI,OAAO,cAAc,OAAW,SAAQ,YAAY,OAAO;AAC/D,MAAI,OAAO,mBAAmB,OAAW,SAAQ,iBAAiB,OAAO;AACzE,MAAI,OAAO,kBAAkB,OAAW,SAAQ,gBAAgB,OAAO;AACvE,MAAI,OAAO,qBAAqB,OAAW,SAAQ,mBAAmB,OAAO;AAC7E,MAAI,OAAO,sBAAsB,OAAW,SAAQ,oBAAoB,OAAO;AAC/E,MAAI,OAAO,aAAa,OAAW,SAAQ,WAAW,OAAO;AAC7D,MAAI,OAAO,iBAAiB,OAAW,SAAQ,eAAe,OAAO;AACrE,MAAI,OAAO,iBAAiB,OAAW,SAAQ,eAAe,OAAO;AACrE,MAAI,OAAO,qBAAqB,OAAW,SAAQ,mBAAmB,OAAO;AAC7E,MAAI,OAAO,SAAS,OAAW,SAAQ,OAAO,OAAO;AACrD,MAAI,OAAO,SAAS,OAAW,SAAQ,OAAO,OAAO;AACrD,MAAI,OAAO,aAAa,OAAW,SAAQ,WAAW,OAAO;AAC7D,MAAI,OAAO,YAAY,OAAW,SAAQ,UAAU,OAAO;AAC3D,MAAI,OAAO,cAAc,OAAW,SAAQ,YAAY,OAAO;AAC/D,MAAI,OAAO,eAAe,OAAW,SAAQ,aAAa,OAAO;AACjE,MAAI,OAAO,YAAY,OAAW,SAAQ,UAAU,OAAO;AAC3D,MAAI,OAAO,aAAa,OAAW,SAAQ,WAAW,OAAO;AAC7D,MAAI,OAAO,aAAa,OAAW,SAAQ,WAAW,OAAO;AAC7D,MAAI,OAAO,WAAW,OAAW,SAAQ,SAAS,OAAO;AACzD,MAAI,OAAO,aAAa,OAAW,SAAQ,WAAW,OAAO;AAC7D,MAAI,OAAO,cAAc,OAAW,SAAQ,YAAY,OAAO;AAC/D,MAAI,OAAO,eAAe,OAAW,SAAQ,aAAa,OAAO;AACjE,MAAI,OAAO,SAAS,OAAW,SAAQ,OAAO,OAAO;AACrD,MAAI,OAAO,UAAU,OAAW,SAAQ,QAAQ,OAAO;AAEvD,MAAI,OAAO,KAAK,OAAO,EAAE,SAAS,GAAG;AACnC,kBAAc,UAAU;AAAA,EAC1B;AAGA,MAAI,OAAO,eAAe,QAAW;AACnC,kBAAc,aAAa,OAAO;AAAA,EACpC;AACA,MAAI,OAAO,UAAU,QAAW;AAC9B,kBAAc,QAAQ,OAAO;AAAA,EAC/B;AACA,MAAI,OAAO,aAAa,QAAW;AACjC,kBAAc,WAAW,OAAO;AAAA,EAClC;AACA,MAAI,OAAO,iBAAiB,QAAW;AACrC,kBAAc,eAAe,OAAO;AAAA,EACtC;AAGA,MAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,kBAAc,QAAQ,QAAQ,MAAM,IAAI,aAAa;AAAA,EACvD;AAGA,MAAI,QAAQ,WAAW;AACrB,kBAAc,SAAS,QAAQ;AAAA,EACjC;AAEA,SAAO;AACT;AAKA,SAAS,kBAAkB,UAAqB,QAAkC;AAChF,QAAM,iBAAkC,CAAC;AAGzC,MAAI,QAAQ;AACV,mBAAe,KAAK;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,aAAW,OAAO,UAAU;AAC1B,QAAI,cAAc,GAAG,GAAG;AACtB,YAAM,cAAwB,CAAC;AAC/B,YAAM,SAAmB,CAAC;AAE1B,iBAAW,SAAS,IAAI,SAAS;AAC/B,YAAI,MAAM,SAAS,QAAQ;AACzB,sBAAY,KAAK,MAAM,IAAI;AAAA,QAC7B,WAAW,MAAM,SAAS,SAAS;AACjC,gBAAM,aAAa;AACnB,cAAI,WAAW,OAAO,SAAS,UAAU;AACvC,mBAAO,KAAK,WAAW,OAAO,IAAI;AAAA,UACpC,WAAW,WAAW,OAAO,SAAS,SAAS;AAE7C,kBAAM,SAAS;AAAA,cACb,MAAM,KAAK,WAAW,OAAO,IAAI,EAC9B,IAAI,CAAC,MAAM,OAAO,aAAa,CAAC,CAAC,EACjC,KAAK,EAAE;AAAA,YACZ;AACA,mBAAO,KAAK,MAAM;AAAA,UACpB,WAAW,WAAW,OAAO,SAAS,OAAO;AAG3C,wBAAY,KAAK,WAAW,WAAW,OAAO,GAAG,GAAG;AAAA,UACtD;AAAA,QACF;AAAA,MACF;AAEA,YAAM,UAAyB;AAAA,QAC7B,MAAM;AAAA,QACN,SAAS,YAAY,KAAK,IAAI;AAAA,MAChC;AAEA,UAAI,OAAO,SAAS,GAAG;AACrB,gBAAQ,SAAS;AAAA,MACnB;AAEA,qBAAe,KAAK,OAAO;AAAA,IAC7B,WAAW,mBAAmB,GAAG,GAAG;AAClC,YAAM,cAAc,IAAI,QACrB,OAAO,CAAC,UAA8B,MAAM,SAAS,MAAM,EAC3D,IAAI,CAAC,UAAU,MAAM,IAAI,EACzB,KAAK,IAAI;AAEZ,YAAM,UAAyB;AAAA,QAC7B,MAAM;AAAA,QACN,SAAS;AAAA,MACX;AAGA,UAAI,IAAI,aAAa,IAAI,UAAU,SAAS,GAAG;AAC7C,gBAAQ,aAAa,IAAI,UAAU,IAAI,CAAC,UAAU;AAAA,UAChD,UAAU;AAAA,YACR,MAAM,KAAK;AAAA,YACX,WAAW,KAAK;AAAA,UAClB;AAAA,QACF,EAAE;AAAA,MACJ;AAEA,qBAAe,KAAK,OAAO;AAAA,IAC7B,WAAW,oBAAoB,GAAG,GAAG;AAEnC,iBAAW,UAAU,IAAI,SAAS;AAChC,uBAAe,KAAK;AAAA,UAClB,MAAM;AAAA,UACN,WAAW,OAAO;AAAA;AAAA,UAClB,SACE,OAAO,OAAO,WAAW,WACrB,OAAO,SACP,KAAK,UAAU,OAAO,MAAM;AAAA,QACpC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,cAAc,MAAwB;AAC7C,SAAO;AAAA,IACL,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,YAAY;AAAA,QACV,MAAM;AAAA,QACN,YAAY,KAAK,WAAW;AAAA,QAC5B,UAAU,KAAK,WAAW;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,kBAAkB,MAAmC;AACnE,QAAM,cAA2B,CAAC;AAClC,QAAM,YAAwB,CAAC;AAC/B,MAAI;AAGJ,MAAI,KAAK,QAAQ,SAAS;AACxB,gBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,QAAQ,QAAQ,CAAC;AAG7D,QAAI;AACF,uBAAiB,KAAK,MAAM,KAAK,QAAQ,OAAO;AAAA,IAClD,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI,KAAK,QAAQ,YAAY;AAC3B,eAAW,QAAQ,KAAK,QAAQ,YAAY;AAC1C,gBAAU,KAAK;AAAA,QACb,YAAY,KAAK,SAAS;AAAA;AAAA,QAC1B,UAAU,KAAK,SAAS;AAAA,QACxB,WAAW,KAAK,SAAS;AAAA,MAC3B,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,KAAK;AAAA,UACZ,YAAY,KAAK;AAAA,UACjB,aAAa,KAAK;AAAA,UAClB,UAAU,KAAK,QAAQ;AAAA,UACvB,gBAAgB,KAAK;AAAA,UACrB,eAAe,KAAK;AAAA,UACpB,sBAAsB,KAAK;AAAA,UAC3B,eAAe,KAAK;AAAA,UACpB,UAAU,KAAK;AAAA,QACjB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,QAAoB;AAAA,IACxB,aAAa,KAAK,qBAAqB;AAAA,IACvC,cAAc,KAAK,cAAc;AAAA,IACjC,cAAc,KAAK,qBAAqB,MAAM,KAAK,cAAc;AAAA,EACnE;AAGA,MAAI,aAAa;AACjB,MAAI,KAAK,gBAAgB,UAAU;AACjC,iBAAa;AAAA,EACf,WAAW,KAAK,gBAAgB,QAAQ;AACtC,iBAAa;AAAA,EACf,WAAW,UAAU,SAAS,GAAG;AAC/B,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;AAqBO,SAAS,oBAAiC;AAC/C,SAAO;AAAA,IACL,OAAO;AAAA,IACP,SAAS;AAAA,IACT,UAAU;AAAA,IACV,WAAW,CAAC;AAAA,IACZ,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB,WAAW;AAAA,IACX,eAAe;AAAA,IACf,cAAc;AAAA,IACd,WAAW;AAAA,EACb;AACF;AAKO,SAAS,qBACd,OACA,OACe;AACf,QAAM,SAAwB,CAAC;AAG/B,MAAI,MAAM,cAAc;AACtB,UAAM,QAAQ,MAAM;AACpB,UAAM,YAAY,MAAM;AACxB,WAAO,KAAK,EAAE,MAAM,iBAAiB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AAC1D,UAAM,eAAe;AAAA,EACvB;AAGA,MAAI,MAAM,SAAS;AAEjB,QAAI,MAAM,QAAQ,SAAS;AACzB,YAAM,WAAW,MAAM,QAAQ;AAC/B,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO;AAAA,QACP,OAAO,EAAE,MAAM,MAAM,QAAQ,QAAQ;AAAA,MACvC,CAAC;AAAA,IACH;AAGA,QAAI,MAAM,QAAQ,UAAU;AAC1B,YAAM,YAAY,MAAM,QAAQ;AAChC,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO;AAAA,QACP,OAAO,EAAE,MAAM,MAAM,QAAQ,SAAS;AAAA,MACxC,CAAC;AAAA,IACH;AAGA,QAAI,MAAM,QAAQ,YAAY;AAC5B,iBAAW,QAAQ,MAAM,QAAQ,YAAY;AAC3C,cAAM,UAAU,KAAK;AAAA,UACnB,MAAM,KAAK,SAAS;AAAA,UACpB,MAAM,KAAK,SAAS;AAAA,QACtB,CAAC;AACD,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,OAAO,MAAM,UAAU,SAAS;AAAA,UAChC,OAAO;AAAA,YACL,YAAY,KAAK,SAAS;AAAA,YAC1B,UAAU,KAAK,SAAS;AAAA,YACxB,eAAe,KAAK,UAAU,KAAK,SAAS,SAAS;AAAA,UACvD;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAGA,MAAI,MAAM,MAAM;AACd,UAAM,aAAa,MAAM,eAAe;AACxC,UAAM,kBAAkB,MAAM,qBAAqB;AACnD,UAAM,YAAY,MAAM,cAAc;AACtC,UAAM,gBAAgB,MAAM,kBAAkB;AAC9C,WAAO,KAAK,EAAE,MAAM,gBAAgB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AAAA,EAC3D;AAEA,SAAO;AACT;AAKO,SAAS,uBAAuB,OAAiC;AACtE,QAAM,cAA2B,CAAC;AAClC,QAAM,YAAwB,CAAC;AAC/B,MAAI;AAEJ,MAAI,MAAM,SAAS;AACjB,gBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,MAAM,QAAQ,CAAC;AAGtD,QAAI;AACF,uBAAiB,KAAK,MAAM,MAAM,OAAO;AAAA,IAC3C,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,aAAW,MAAM,MAAM,WAAW;AAChC,cAAU,KAAK;AAAA,MACb,YAAY,GAAG;AAAA,MACf,UAAU,GAAG;AAAA,MACb,WAAW,GAAG;AAAA,IAChB,CAAC;AAAA,EACH;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,MAAM;AAAA,UACb,YAAY,MAAM;AAAA,UAClB,aAAa,MAAM;AAAA,UACnB,UAAU,MAAM,YAAY;AAAA,UAC5B,gBAAgB,MAAM;AAAA,QACxB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,MAAM;AAAA,IACnB,cAAc,MAAM;AAAA,IACpB,aAAa,MAAM,kBAAkB,MAAM;AAAA,EAC7C;AAGA,MAAI,aAAa;AACjB,MAAI,MAAM,eAAe,UAAU;AACjC,iBAAa;AAAA,EACf,WAAW,UAAU,SAAS,GAAG;AAC/B,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;;;AChbA,IAAM,qBAAqB;AAQ3B,IAAM,sBAAuC;AAAA,EAC3C,WAAW;AAAA,EACX,OAAO;AAAA,EACP,kBAAkB;AAAA,EAClB,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,YAAY;AACd;AAKA,gBAAgB,kBACd,MACkD;AAClD,QAAM,SAAS,KAAK,UAAU;AAC9B,QAAM,UAAU,IAAI,YAAY;AAChC,MAAI,SAAS;AAEb,MAAI;AACF,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI,KAAM;AAEV,gBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAGhD,YAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,eAAS,MAAM,IAAI,KAAK;AAExB,iBAAW,QAAQ,OAAO;AACxB,cAAM,UAAU,KAAK,KAAK;AAC1B,YAAI,CAAC,QAAS;AAEd,YAAI;AACF,gBAAM,QAAQ,KAAK,MAAM,OAAO;AAChC,gBAAM;AAAA,QACR,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAGA,QAAI,OAAO,KAAK,GAAG;AACjB,UAAI;AACF,cAAM,QAAQ,KAAK,MAAM,OAAO,KAAK,CAAC;AACtC,cAAM;AAAA,MACR,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF,UAAE;AACA,WAAO,YAAY;AAAA,EACrB;AACF;AAKO,SAAS,mBAAgD;AAE9D,MAAI,cAAmD;AAEvD,SAAO;AAAA,IACL,aAAa,UAAwC;AACnD,oBAAc;AAAA,IAChB;AAAA,IAEA,KAAK,SAAiD;AAEpD,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAEA,YAAM,QAAwC;AAAA,QAC5C;AAAA,QACA,cAAc;AAAA,QAEd,IAAI,WAAyC;AAC3C,iBAAO;AAAA,QACT;AAAA,QAEA,MAAM,SAAS,SAA4D;AAEzE,cAAI;AACJ,cAAI;AACF,qBAAS,MAAM;AAAA,cACb,QAAQ;AAAA,cACR;AAAA,cACA;AAAA,cACA;AAAA,YACF;AAAA,UACF,QAAQ;AAAA,UAER;AAEA,gBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,gBAAM,MAAM,GAAG,OAAO;AACtB,gBAAM,OAAO,iBAAiB,SAAS,OAAO;AAC9C,eAAK,SAAS;AAEd,gBAAM,UAAkC;AAAA,YACtC,gBAAgB;AAAA,UAClB;AAEA,cAAI,QAAQ;AACV,oBAAQ,eAAe,IAAI,UAAU,MAAM;AAAA,UAC7C;AAEA,gBAAM,WAAW,MAAM;AAAA,YACrB;AAAA,YACA;AAAA,cACE,QAAQ;AAAA,cACR;AAAA,cACA,MAAM,KAAK,UAAU,IAAI;AAAA,cACzB,QAAQ,QAAQ;AAAA,YAClB;AAAA,YACA,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,iBAAO,kBAAkB,IAAI;AAAA,QAC/B;AAAA,QAEA,OAAO,SAAuD;AAC5D,gBAAM,QAAQ,kBAAkB;AAChC,cAAI;AACJ,cAAI;AAEJ,gBAAM,kBAAkB,IAAI,QAAqB,CAAC,SAAS,WAAW;AACpE,8BAAkB;AAClB,6BAAiB;AAAA,UACnB,CAAC;AAED,0BAAgB,iBAA6D;AAC3E,gBAAI;AAEF,kBAAI;AACJ,kBAAI;AACF,yBAAS,MAAM;AAAA,kBACb,QAAQ;AAAA,kBACR;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AAAA,cACF,QAAQ;AAAA,cAER;AAEA,oBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,oBAAM,MAAM,GAAG,OAAO;AACtB,oBAAM,OAAO,iBAAiB,SAAS,OAAO;AAC9C,mBAAK,SAAS;AAEd,oBAAM,UAAkC;AAAA,gBACtC,gBAAgB;AAAA,cAClB;AAEA,kBAAI,QAAQ;AACV,wBAAQ,eAAe,IAAI,UAAU,MAAM;AAAA,cAC7C;AAEA,oBAAM,WAAW,MAAM;AAAA,gBACrB;AAAA,gBACA;AAAA,kBACE,QAAQ;AAAA,kBACR;AAAA,kBACA,MAAM,KAAK,UAAU,IAAI;AAAA,kBACzB,QAAQ,QAAQ;AAAA,gBAClB;AAAA,gBACA,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,cACF;AAEA,kBAAI,CAAC,SAAS,IAAI;AAChB,sBAAM,QAAQ,MAAM,mBAAmB,UAAU,UAAU,KAAK;AAChE,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,kBAAI,CAAC,SAAS,MAAM;AAClB,sBAAM,QAAQ,IAAI;AAAA,kBAChB;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAGA,+BAAiB,SAAS,kBAAkB,SAAS,IAAI,GAAG;AAE1D,oBAAI,WAAW,SAAS,OAAQ,MAAkC,UAAU,UAAU;AACpF,wBAAM,QAAQ,IAAI;AAAA,oBACf,MAAkC;AAAA,oBACnC;AAAA,oBACA;AAAA,oBACA;AAAA,kBACF;AACA,iCAAe,KAAK;AACpB,wBAAM;AAAA,gBACR;AAEA,sBAAM,SAAS,qBAAqB,OAAO,KAAK;AAChD,2BAAW,SAAS,QAAQ;AAC1B,wBAAM;AAAA,gBACR;AAAA,cACF;AAGA,8BAAgB,uBAAuB,KAAK,CAAC;AAAA,YAC/C,SAAS,OAAO;AACd,6BAAe,KAAc;AAC7B,oBAAM;AAAA,YACR;AAAA,UACF;AAEA,iBAAO;AAAA,YACL,CAAC,OAAO,aAAa,IAAI;AACvB,qBAAO,eAAe;AAAA,YACxB;AAAA,YACA,UAAU;AAAA,UACZ;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;AC9OO,IAAM,SAAS,eAAe;AAAA,EACnC,MAAM;AAAA,EACN,SAAS;AAAA,EACT,YAAY;AAAA,IACV,KAAK,iBAAiB;AAAA,EACxB;AACF,CAAC;","names":[]}
@@ -4,14 +4,16 @@ import {
4
4
  isToolResultMessage,
5
5
  isUserMessage
6
6
  } from "../chunk-QUUX4G7U.js";
7
+ import {
8
+ parseSSEStream
9
+ } from "../chunk-X5G4EHL7.js";
7
10
  import {
8
11
  UPPError,
9
12
  doFetch,
10
13
  doStreamFetch,
11
14
  normalizeHttpError,
12
- parseSSEStream,
13
15
  resolveApiKey
14
- } from "../chunk-FTFX2VET.js";
16
+ } from "../chunk-SUNYWHTH.js";
15
17
 
16
18
  // src/providers/openai/transform.completions.ts
17
19
  function transformRequest(request, modelId) {