ollama-ai-provider-v2 2.0.0-beta.0 → 3.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -7,20 +7,6 @@ Use Ollama with the Vercel AI SDK, implementing the official Ollama API. This pr
7
7
  [![Node.js](https://img.shields.io/badge/Node.js-18+-green.svg)](https://nodejs.org/)
8
8
  [![License: Apache-2.0](https://img.shields.io/badge/License-Apache--2.0-yellow.svg)](https://opensource.org/licenses/Apache-2.0)
9
9
 
10
- ## 🎉 AI SDK 6 Beta Support
11
-
12
- This provider now supports **AI SDK 6 Beta** features including:
13
-
14
- - **🤖 Agent Abstraction** - Build complex agents with `ToolLoopAgent`
15
- - **🔐 Tool Approval** - Request user confirmation before executing tools
16
- - **📊 Structured Output** - Generate typed data alongside tool calling
17
- - **⚡ Enhanced Performance** - Optimized for the latest AI SDK features
18
-
19
- ```bash
20
- # Install AI SDK 6 Beta + Ollama Provider
21
- npm install ai@beta ollama-ai-provider-v2
22
- ```
23
-
24
10
  ## Why Choose Ollama Provider V2?
25
11
 
26
12
  - ✅ **Minimal Dependencies** - Lean codebase with just 2 core dependencies
@@ -206,147 +192,6 @@ ollama serve
206
192
  ollama pull llama3.2
207
193
  ```
208
194
 
209
- # AI SDK 6 Beta examples
210
- ## Agent Abstraction
211
-
212
- AI SDK 6 introduces the `ToolLoopAgent` class for building agents with full control over execution flow.
213
-
214
- ### Basic Agent
215
-
216
- ```typescript
217
- import { ToolLoopAgent } from 'ai';
218
- import { ollama } from 'ollama-ai-provider-v2';
219
-
220
- const weatherAgent = new ToolLoopAgent({
221
- model: ollama('llama3.3:70b'),
222
- instructions: 'You are a helpful weather assistant.',
223
- tools: {
224
- weather: weatherTool,
225
- },
226
- });
227
-
228
- const result = await weatherAgent.generate({
229
- prompt: 'What is the weather in San Francisco?',
230
- });
231
- ```
232
-
233
- ### Agent with Call Options
234
-
235
- Use call options to pass runtime configuration to agents:
236
-
237
- ```typescript
238
- import { ToolLoopAgent } from 'ai';
239
- import { ollama } from 'ollama-ai-provider-v2';
240
- import { z } from 'zod';
241
-
242
- const supportAgent = new ToolLoopAgent({
243
- model: ollama('qwen2.5:32b'),
244
- callOptionsSchema: z.object({
245
- userId: z.string(),
246
- accountType: z.enum(['free', 'pro', 'enterprise']),
247
- }),
248
- instructions: 'You are a helpful customer support agent.',
249
- prepareCall: ({ options, ...settings }) => ({
250
- ...settings,
251
- instructions: `${settings.instructions}
252
-
253
- User context:
254
- - Account type: ${options.accountType}
255
- - User ID: ${options.userId}
256
-
257
- Adjust your response based on the user's account level.`,
258
- }),
259
- });
260
-
261
- const result = await supportAgent.generate({
262
- prompt: 'How do I upgrade my account?',
263
- options: {
264
- userId: 'user_123',
265
- accountType: 'free',
266
- },
267
- });
268
- ```
269
-
270
- ## Tool Execution Approval
271
-
272
- AI SDK 6 allows you to require user approval before executing tools.
273
-
274
- ### Basic Tool Approval
275
-
276
- ```typescript
277
- import { tool } from 'ai';
278
- import { z } from 'zod';
279
-
280
- export const weatherTool = tool({
281
- description: 'Get the weather in a location',
282
- inputSchema: z.object({
283
- city: z.string(),
284
- }),
285
- needsApproval: true, // Always require approval
286
- execute: async ({ city }) => {
287
- const weather = await fetchWeather(city);
288
- return weather;
289
- },
290
- });
291
- ```
292
-
293
- ### Dynamic Approval
294
-
295
- Make approval decisions based on tool input:
296
-
297
- ```typescript
298
- export const paymentTool = tool({
299
- description: 'Process a payment',
300
- inputSchema: z.object({
301
- amount: z.number(),
302
- recipient: z.string(),
303
- }),
304
- needsApproval: async ({ amount }) => amount > 1000, // Only large payments
305
- execute: async ({ amount, recipient }) => {
306
- return await processPayment(amount, recipient);
307
- },
308
- });
309
- ```
310
-
311
- ## UI Integration
312
-
313
- ### Server-side API Route
314
-
315
- ```typescript
316
- import { createAgentUIStreamResponse } from 'ai';
317
- import { weatherAgent } from '@/lib/agents';
318
-
319
- export async function POST(request: Request) {
320
- const { messages } = await request.json();
321
-
322
- return createAgentUIStreamResponse({
323
- agent: weatherAgent,
324
- messages,
325
- });
326
- }
327
- ```
328
-
329
- ### Client-side with Type Safety
330
-
331
- ```typescript
332
- import { useChat } from '@ai-sdk/react';
333
- import { InferAgentUIMessage } from 'ai';
334
- import { weatherAgent } from '@/lib/agents';
335
-
336
- type WeatherAgentUIMessage = InferAgentUIMessage<typeof weatherAgent>;
337
-
338
- export function WeatherChat() {
339
- const { messages, sendMessage } = useChat<WeatherAgentUIMessage>();
340
-
341
- return (
342
- <div>
343
- {/* Your chat UI */}
344
- </div>
345
- );
346
- }
347
- ```
348
-
349
-
350
195
  ## Contributing
351
196
 
352
197
  Contributions are welcome! Here's how to get started:
package/dist/index.d.mts CHANGED
@@ -1,4 +1,4 @@
1
- import { ProviderV2, LanguageModelV2, EmbeddingModelV2 } from '@ai-sdk/provider';
1
+ import { ProviderV3, LanguageModelV3, EmbeddingModelV3 } from '@ai-sdk/provider';
2
2
  import { FetchFunction } from '@ai-sdk/provider-utils';
3
3
  import { z } from 'zod/v4';
4
4
 
@@ -65,36 +65,44 @@ interface OllamaEmbeddingSettings {
65
65
  monitor and detect abuse. Learn more.
66
66
  */
67
67
  user?: string;
68
+ /**
69
+ Whether to truncate input text to fit within model's context length.
70
+ */
71
+ truncate?: boolean;
72
+ /**
73
+ How long to keep the model loaded in memory (e.g., "5m" for 5 minutes).
74
+ */
75
+ keepAlive?: string;
68
76
  }
69
77
 
70
- interface OllamaProvider extends ProviderV2 {
71
- (modelId: OllamaChatModelId): LanguageModelV2;
78
+ interface OllamaProvider extends ProviderV3 {
79
+ (modelId: OllamaChatModelId): LanguageModelV3;
72
80
  /**
73
81
  Creates an Ollama model for text generation.
74
82
  */
75
- languageModel(modelId: OllamaChatModelId): LanguageModelV2;
83
+ languageModel(modelId: OllamaChatModelId): LanguageModelV3;
76
84
  /**
77
85
  Creates an Ollama chat model for text generation.
78
86
  */
79
- chat(modelId: OllamaChatModelId, settings?: OllamaProviderOptions): LanguageModelV2;
87
+ chat(modelId: OllamaChatModelId, settings?: OllamaProviderOptions): LanguageModelV3;
80
88
  /**
81
89
  Creates an Ollama completion model for text generation.
82
90
  */
83
- completion(modelId: OllamaCompletionModelId, settings?: OllamaCompletionSettings): LanguageModelV2;
91
+ completion(modelId: OllamaCompletionModelId, settings?: OllamaCompletionSettings): LanguageModelV3;
84
92
  /**
85
93
  Creates a model for text embeddings.
86
94
  */
87
- embedding(modelId: OllamaEmbeddingModelId, settings?: OllamaEmbeddingSettings): EmbeddingModelV2<string>;
95
+ embedding(modelId: OllamaEmbeddingModelId, settings?: OllamaEmbeddingSettings): EmbeddingModelV3;
88
96
  /**
89
97
  Creates a model for text embeddings.
90
98
 
91
99
  @deprecated Use `textEmbeddingModel` instead.
92
100
  */
93
- textEmbedding(modelId: OllamaEmbeddingModelId, settings?: OllamaEmbeddingSettings): EmbeddingModelV2<string>;
101
+ textEmbedding(modelId: OllamaEmbeddingModelId, settings?: OllamaEmbeddingSettings): EmbeddingModelV3;
94
102
  /**
95
103
  Creates a model for text embeddings.
96
104
  */
97
- textEmbeddingModel(modelId: OllamaEmbeddingModelId, settings?: OllamaEmbeddingSettings): EmbeddingModelV2<string>;
105
+ textEmbeddingModel(modelId: OllamaEmbeddingModelId, settings?: OllamaEmbeddingSettings): EmbeddingModelV3;
98
106
  }
99
107
  interface OllamaProviderSettings {
100
108
  /**
package/dist/index.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { ProviderV2, LanguageModelV2, EmbeddingModelV2 } from '@ai-sdk/provider';
1
+ import { ProviderV3, LanguageModelV3, EmbeddingModelV3 } from '@ai-sdk/provider';
2
2
  import { FetchFunction } from '@ai-sdk/provider-utils';
3
3
  import { z } from 'zod/v4';
4
4
 
@@ -65,36 +65,44 @@ interface OllamaEmbeddingSettings {
65
65
  monitor and detect abuse. Learn more.
66
66
  */
67
67
  user?: string;
68
+ /**
69
+ Whether to truncate input text to fit within model's context length.
70
+ */
71
+ truncate?: boolean;
72
+ /**
73
+ How long to keep the model loaded in memory (e.g., "5m" for 5 minutes).
74
+ */
75
+ keepAlive?: string;
68
76
  }
69
77
 
70
- interface OllamaProvider extends ProviderV2 {
71
- (modelId: OllamaChatModelId): LanguageModelV2;
78
+ interface OllamaProvider extends ProviderV3 {
79
+ (modelId: OllamaChatModelId): LanguageModelV3;
72
80
  /**
73
81
  Creates an Ollama model for text generation.
74
82
  */
75
- languageModel(modelId: OllamaChatModelId): LanguageModelV2;
83
+ languageModel(modelId: OllamaChatModelId): LanguageModelV3;
76
84
  /**
77
85
  Creates an Ollama chat model for text generation.
78
86
  */
79
- chat(modelId: OllamaChatModelId, settings?: OllamaProviderOptions): LanguageModelV2;
87
+ chat(modelId: OllamaChatModelId, settings?: OllamaProviderOptions): LanguageModelV3;
80
88
  /**
81
89
  Creates an Ollama completion model for text generation.
82
90
  */
83
- completion(modelId: OllamaCompletionModelId, settings?: OllamaCompletionSettings): LanguageModelV2;
91
+ completion(modelId: OllamaCompletionModelId, settings?: OllamaCompletionSettings): LanguageModelV3;
84
92
  /**
85
93
  Creates a model for text embeddings.
86
94
  */
87
- embedding(modelId: OllamaEmbeddingModelId, settings?: OllamaEmbeddingSettings): EmbeddingModelV2<string>;
95
+ embedding(modelId: OllamaEmbeddingModelId, settings?: OllamaEmbeddingSettings): EmbeddingModelV3;
88
96
  /**
89
97
  Creates a model for text embeddings.
90
98
 
91
99
  @deprecated Use `textEmbeddingModel` instead.
92
100
  */
93
- textEmbedding(modelId: OllamaEmbeddingModelId, settings?: OllamaEmbeddingSettings): EmbeddingModelV2<string>;
101
+ textEmbedding(modelId: OllamaEmbeddingModelId, settings?: OllamaEmbeddingSettings): EmbeddingModelV3;
94
102
  /**
95
103
  Creates a model for text embeddings.
96
104
  */
97
- textEmbeddingModel(modelId: OllamaEmbeddingModelId, settings?: OllamaEmbeddingSettings): EmbeddingModelV2<string>;
105
+ textEmbeddingModel(modelId: OllamaEmbeddingModelId, settings?: OllamaEmbeddingSettings): EmbeddingModelV3;
98
106
  }
99
107
  interface OllamaProviderSettings {
100
108
  /**