@teamflojo/floimg-ollama 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,138 @@
1
+ # @teamflojo/floimg-ollama
2
+
3
+ Local AI provider for floimg using [Ollama](https://ollama.ai). Run vision and text models locally with no API key required.
4
+
5
+ ## Features
6
+
7
+ - **Vision Analysis** - Analyze images with LLaVA or other vision models
8
+ - **Text Generation** - Generate text with Llama, Mistral, or other models
9
+ - **100% Local** - No API keys, no data leaves your machine
10
+ - **Privacy-First** - Perfect for sensitive content
11
+
12
+ ## Prerequisites
13
+
14
+ 1. Install [Ollama](https://ollama.ai)
15
+ 2. Pull the models you want to use:
16
+
17
+ ```bash
18
+ # Vision model
19
+ ollama pull llava
20
+
21
+ # Text model
22
+ ollama pull llama3.2
23
+ ```
24
+
25
+ ## Installation
26
+
27
+ ```bash
28
+ npm install @teamflojo/floimg-ollama
29
+ ```
30
+
31
+ ## Usage
32
+
33
+ ### Vision Analysis
34
+
35
+ ```typescript
36
+ import createClient from "@teamflojo/floimg";
37
+ import { ollamaVision } from "@teamflojo/floimg-ollama";
38
+
39
+ const client = createClient();
40
+ client.registerVisionProvider(ollamaVision({ model: "llava" }));
41
+
42
+ const result = await client.analyzeImage({
43
+ provider: "ollama-vision",
44
+ blob: imageBlob,
45
+ params: {
46
+ prompt: "What objects are in this image?",
47
+ outputFormat: "json"
48
+ }
49
+ });
50
+
51
+ console.log(result.content);
52
+ ```
53
+
54
+ ### Text Generation
55
+
56
+ ```typescript
57
+ import createClient from "@teamflojo/floimg";
58
+ import { ollamaText } from "@teamflojo/floimg-ollama";
59
+
60
+ const client = createClient();
61
+ client.registerTextProvider(ollamaText({ model: "llama3.2" }));
62
+
63
+ const result = await client.generateText({
64
+ provider: "ollama-text",
65
+ params: {
66
+ prompt: "Write a creative caption for a sunset photo",
67
+ temperature: 0.8
68
+ }
69
+ });
70
+
71
+ console.log(result.content);
72
+ ```
73
+
74
+ ### Register Both Providers
75
+
76
+ ```typescript
77
+ import createClient from "@teamflojo/floimg";
78
+ import ollama from "@teamflojo/floimg-ollama";
79
+
80
+ const client = createClient();
81
+ const [vision, text] = ollama({
82
+ baseUrl: "http://localhost:11434",
83
+ visionModel: "llava",
84
+ textModel: "llama3.2"
85
+ });
86
+
87
+ client.registerVisionProvider(vision);
88
+ client.registerTextProvider(text);
89
+ ```
90
+
91
+ ### In Pipelines
92
+
93
+ ```typescript
94
+ await client.run({
95
+ name: "local-image-analysis",
96
+ steps: [
97
+ { kind: "generate", generator: "qr", params: { text: "hello" }, out: "qr" },
98
+ { kind: "vision", in: "qr", provider: "ollama-vision",
99
+ params: { prompt: "What does this QR code contain?" }, out: "analysis" },
100
+ { kind: "text", in: "analysis", provider: "ollama-text",
101
+ params: { prompt: "Summarize this in one sentence" }, out: "summary" }
102
+ ]
103
+ });
104
+ ```
105
+
106
+ ## Configuration
107
+
108
+ ### Vision Provider
109
+
110
+ | Option | Default | Description |
111
+ |--------|---------|-------------|
112
+ | `baseUrl` | `http://localhost:11434` | Ollama server URL |
113
+ | `model` | `llava` | Vision model to use |
114
+
115
+ ### Text Provider
116
+
117
+ | Option | Default | Description |
118
+ |--------|---------|-------------|
119
+ | `baseUrl` | `http://localhost:11434` | Ollama server URL |
120
+ | `model` | `llama3.2` | Text model to use |
121
+
122
+ ## Supported Models
123
+
124
+ ### Vision Models
125
+ - `llava` - LLaVA (default)
126
+ - `llava:13b` - LLaVA 13B
127
+ - `bakllava` - BakLLaVA
128
+
129
+ ### Text Models
130
+ - `llama3.2` - Llama 3.2 (default)
131
+ - `llama3.1` - Llama 3.1
132
+ - `mistral` - Mistral 7B
133
+ - `mixtral` - Mixtral 8x7B
134
+ - `codellama` - Code Llama
135
+
136
+ ## License
137
+
138
+ MIT
@@ -0,0 +1,96 @@
1
+ import type { VisionProvider, VisionProviderSchema, TextProvider, TextProviderSchema } from "@teamflojo/floimg";
2
+ export interface OllamaConfig {
3
+ /** Ollama server URL (default: http://localhost:11434) */
4
+ baseUrl?: string;
5
+ }
6
+ export interface OllamaVisionConfig extends OllamaConfig {
7
+ /** Vision model to use (default: llava) */
8
+ model?: string;
9
+ }
10
+ export interface OllamaTextConfig extends OllamaConfig {
11
+ /** Text model to use (default: llama3.2) */
12
+ model?: string;
13
+ }
14
+ export interface OllamaVisionParams {
15
+ prompt?: string;
16
+ outputFormat?: "text" | "json";
17
+ }
18
+ /**
19
+ * Schema for the Ollama Vision provider
20
+ */
21
+ export declare const ollamaVisionSchema: VisionProviderSchema;
22
+ /**
23
+ * Ollama Vision provider for local image analysis
24
+ *
25
+ * Uses LLaVA or other vision-capable models running locally via Ollama.
26
+ * No API key required - runs entirely on your machine.
27
+ *
28
+ * @example
29
+ * ```typescript
30
+ * import { ollamaVision } from "@teamflojo/floimg-ollama";
31
+ *
32
+ * const client = createClient();
33
+ * client.registerVisionProvider(ollamaVision({ model: "llava" }));
34
+ *
35
+ * const result = await client.analyzeImage({
36
+ * provider: "ollama-vision",
37
+ * blob: imageBlob,
38
+ * params: { prompt: "What objects are in this image?" }
39
+ * });
40
+ * ```
41
+ */
42
+ export declare function ollamaVision(config?: OllamaVisionConfig): VisionProvider;
43
+ export interface OllamaTextParams {
44
+ prompt: string;
45
+ systemPrompt?: string;
46
+ context?: string;
47
+ outputFormat?: "text" | "json";
48
+ temperature?: number;
49
+ }
50
+ /**
51
+ * Schema for the Ollama Text provider
52
+ */
53
+ export declare const ollamaTextSchema: TextProviderSchema;
54
+ /**
55
+ * Ollama Text provider for local text generation
56
+ *
57
+ * Uses Llama, Mistral, or other text models running locally via Ollama.
58
+ * No API key required - runs entirely on your machine.
59
+ *
60
+ * @example
61
+ * ```typescript
62
+ * import { ollamaText } from "@teamflojo/floimg-ollama";
63
+ *
64
+ * const client = createClient();
65
+ * client.registerTextProvider(ollamaText({ model: "llama3.2" }));
66
+ *
67
+ * const result = await client.generateText({
68
+ * provider: "ollama-text",
69
+ * params: {
70
+ * prompt: "Write a creative description for this image",
71
+ * context: "A serene mountain lake at sunset"
72
+ * }
73
+ * });
74
+ * ```
75
+ */
76
+ export declare function ollamaText(config?: OllamaTextConfig): TextProvider;
77
+ /**
78
+ * Create both vision and text providers with shared config
79
+ *
80
+ * @example
81
+ * ```typescript
82
+ * import ollama from "@teamflojo/floimg-ollama";
83
+ *
84
+ * const providers = ollama({ baseUrl: "http://localhost:11434" });
85
+ * providers.forEach(p => {
86
+ * if ('analyze' in p) client.registerVisionProvider(p);
87
+ * else client.registerTextProvider(p);
88
+ * });
89
+ * ```
90
+ */
91
+ export default function ollama(config?: OllamaConfig & {
92
+ visionModel?: string;
93
+ textModel?: string;
94
+ }): [VisionProvider, TextProvider];
95
+ export { ollama };
96
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACV,cAAc,EACd,oBAAoB,EACpB,YAAY,EACZ,kBAAkB,EAGnB,MAAM,mBAAmB,CAAC;AAM3B,MAAM,WAAW,YAAY;IAC3B,0DAA0D;IAC1D,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,kBAAmB,SAAQ,YAAY;IACtD,2CAA2C;IAC3C,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,gBAAiB,SAAQ,YAAY;IACpD,4CAA4C;IAC5C,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAMD,MAAM,WAAW,kBAAkB;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,YAAY,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;CAChC;AAED;;GAEG;AACH,eAAO,MAAM,kBAAkB,EAAE,oBAkBhC,CAAC;AAEF;;;;;;;;;;;;;;;;;;;GAmBG;AACH,wBAAgB,YAAY,CAAC,MAAM,GAAE,kBAAuB,GAAG,cAAc,CA+D5E;AAMD,MAAM,WAAW,gBAAgB;IAC/B,MAAM,EAAE,MAAM,CAAC;IACf,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,YAAY,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;IAC/B,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED;;GAEG;AACH,eAAO,MAAM,gBAAgB,EAAE,kBAkC9B,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,wBAAgB,UAAU,CAAC,MAAM,GAAE,gBAAqB,GAAG,YAAY,CAmFtE;AAMD;;;;;;;;;;;;;GAaG;AACH,MAAM,CAAC,OAAO,UAAU,MAAM,CAC5B,MAAM,GAAE,YAAY,GAAG;IAAE,WAAW,CAAC,EAAE,MAAM,CAAC;IAAC,SAAS,CAAC,EAAE,MAAM,CAAA;CAAO,GACvE,CAAC,cAAc,EAAE,YAAY,CAAC,CAOhC;AAED,OAAO,EAAE,MAAM,EAAE,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,247 @@
1
+ import { Ollama } from "ollama";
2
+ /**
3
+ * Schema for the Ollama Vision provider
4
+ */
5
+ export const ollamaVisionSchema = {
6
+ name: "ollama-vision",
7
+ description: "Analyze images locally using Ollama with LLaVA or similar vision models",
8
+ parameters: {
9
+ prompt: {
10
+ type: "string",
11
+ title: "Prompt",
12
+ description: "What to analyze or ask about the image",
13
+ default: "Describe this image in detail.",
14
+ },
15
+ outputFormat: {
16
+ type: "string",
17
+ title: "Output Format",
18
+ description: "Response format: plain text or structured JSON",
19
+ enum: ["text", "json"],
20
+ default: "text",
21
+ },
22
+ },
23
+ };
24
+ /**
25
+ * Ollama Vision provider for local image analysis
26
+ *
27
+ * Uses LLaVA or other vision-capable models running locally via Ollama.
28
+ * No API key required - runs entirely on your machine.
29
+ *
30
+ * @example
31
+ * ```typescript
32
+ * import { ollamaVision } from "@teamflojo/floimg-ollama";
33
+ *
34
+ * const client = createClient();
35
+ * client.registerVisionProvider(ollamaVision({ model: "llava" }));
36
+ *
37
+ * const result = await client.analyzeImage({
38
+ * provider: "ollama-vision",
39
+ * blob: imageBlob,
40
+ * params: { prompt: "What objects are in this image?" }
41
+ * });
42
+ * ```
43
+ */
44
+ export function ollamaVision(config = {}) {
45
+ const { baseUrl = "http://localhost:11434", model = "llava" } = config;
46
+ const client = new Ollama({ host: baseUrl });
47
+ return {
48
+ name: "ollama-vision",
49
+ schema: ollamaVisionSchema,
50
+ async analyze(input, params) {
51
+ const { prompt = "Describe this image in detail.", outputFormat = "text", } = params;
52
+ // Convert image to base64
53
+ const base64 = input.bytes.toString("base64");
54
+ // Build prompt for JSON output if requested
55
+ const userPrompt = outputFormat === "json"
56
+ ? `${prompt}\n\nRespond with a valid JSON object containing your analysis.`
57
+ : prompt;
58
+ const response = await client.chat({
59
+ model,
60
+ messages: [
61
+ {
62
+ role: "user",
63
+ content: userPrompt,
64
+ images: [base64],
65
+ },
66
+ ],
67
+ });
68
+ const content = response.message.content;
69
+ // Try to parse JSON if requested
70
+ let parsed;
71
+ if (outputFormat === "json") {
72
+ try {
73
+ parsed = JSON.parse(content);
74
+ }
75
+ catch {
76
+ // If JSON parsing fails, treat as text
77
+ }
78
+ }
79
+ return {
80
+ type: parsed ? "json" : "text",
81
+ content,
82
+ parsed,
83
+ source: `ai:ollama-vision:${model}`,
84
+ metadata: {
85
+ model,
86
+ prompt,
87
+ localExecution: true,
88
+ },
89
+ };
90
+ },
91
+ };
92
+ }
93
+ /**
94
+ * Schema for the Ollama Text provider
95
+ */
96
+ export const ollamaTextSchema = {
97
+ name: "ollama-text",
98
+ description: "Generate text locally using Ollama with Llama, Mistral, or other models",
99
+ parameters: {
100
+ prompt: {
101
+ type: "string",
102
+ title: "Prompt",
103
+ description: "The prompt for text generation",
104
+ },
105
+ systemPrompt: {
106
+ type: "string",
107
+ title: "System Prompt",
108
+ description: "Optional system prompt to guide the model's behavior",
109
+ },
110
+ context: {
111
+ type: "string",
112
+ title: "Context",
113
+ description: "Optional context from a previous step (e.g., vision analysis)",
114
+ },
115
+ outputFormat: {
116
+ type: "string",
117
+ title: "Output Format",
118
+ description: "Response format: plain text or structured JSON",
119
+ enum: ["text", "json"],
120
+ default: "text",
121
+ },
122
+ temperature: {
123
+ type: "number",
124
+ title: "Temperature",
125
+ description: "Creativity level (0-2)",
126
+ default: 0.7,
127
+ },
128
+ },
129
+ requiredParameters: ["prompt"],
130
+ };
131
+ /**
132
+ * Ollama Text provider for local text generation
133
+ *
134
+ * Uses Llama, Mistral, or other text models running locally via Ollama.
135
+ * No API key required - runs entirely on your machine.
136
+ *
137
+ * @example
138
+ * ```typescript
139
+ * import { ollamaText } from "@teamflojo/floimg-ollama";
140
+ *
141
+ * const client = createClient();
142
+ * client.registerTextProvider(ollamaText({ model: "llama3.2" }));
143
+ *
144
+ * const result = await client.generateText({
145
+ * provider: "ollama-text",
146
+ * params: {
147
+ * prompt: "Write a creative description for this image",
148
+ * context: "A serene mountain lake at sunset"
149
+ * }
150
+ * });
151
+ * ```
152
+ */
153
+ export function ollamaText(config = {}) {
154
+ const { baseUrl = "http://localhost:11434", model = "llama3.2" } = config;
155
+ const client = new Ollama({ host: baseUrl });
156
+ return {
157
+ name: "ollama-text",
158
+ schema: ollamaTextSchema,
159
+ async generate(params) {
160
+ const { prompt, systemPrompt, context, outputFormat = "text", temperature = 0.7, } = params;
161
+ if (!prompt) {
162
+ throw new Error("prompt is required for Ollama text generation");
163
+ }
164
+ // Build messages
165
+ const messages = [];
166
+ // Add system prompt if provided
167
+ if (systemPrompt) {
168
+ let system = systemPrompt;
169
+ if (outputFormat === "json") {
170
+ system += " Always respond with valid JSON.";
171
+ }
172
+ messages.push({ role: "system", content: system });
173
+ }
174
+ else if (outputFormat === "json") {
175
+ messages.push({
176
+ role: "system",
177
+ content: "You are a helpful assistant. Always respond with valid JSON.",
178
+ });
179
+ }
180
+ // Build user message with optional context
181
+ let userMessage = prompt;
182
+ if (context) {
183
+ userMessage = `Context from previous analysis:\n${context}\n\n${prompt}`;
184
+ }
185
+ if (outputFormat === "json") {
186
+ userMessage += "\n\nRespond with a JSON object.";
187
+ }
188
+ messages.push({ role: "user", content: userMessage });
189
+ const response = await client.chat({
190
+ model,
191
+ messages,
192
+ options: {
193
+ temperature,
194
+ },
195
+ });
196
+ const content = response.message.content;
197
+ // Try to parse JSON if requested
198
+ let parsed;
199
+ if (outputFormat === "json") {
200
+ try {
201
+ parsed = JSON.parse(content);
202
+ }
203
+ catch {
204
+ // If JSON parsing fails, treat as text
205
+ }
206
+ }
207
+ return {
208
+ type: parsed ? "json" : "text",
209
+ content,
210
+ parsed,
211
+ source: `ai:ollama-text:${model}`,
212
+ metadata: {
213
+ model,
214
+ prompt,
215
+ temperature,
216
+ localExecution: true,
217
+ },
218
+ };
219
+ },
220
+ };
221
+ }
222
+ // ============================================================================
223
+ // Convenience Exports
224
+ // ============================================================================
225
+ /**
226
+ * Create both vision and text providers with shared config
227
+ *
228
+ * @example
229
+ * ```typescript
230
+ * import ollama from "@teamflojo/floimg-ollama";
231
+ *
232
+ * const providers = ollama({ baseUrl: "http://localhost:11434" });
233
+ * providers.forEach(p => {
234
+ * if ('analyze' in p) client.registerVisionProvider(p);
235
+ * else client.registerTextProvider(p);
236
+ * });
237
+ * ```
238
+ */
239
+ export default function ollama(config = {}) {
240
+ const { baseUrl, visionModel, textModel } = config;
241
+ return [
242
+ ollamaVision({ baseUrl, model: visionModel }),
243
+ ollamaText({ baseUrl, model: textModel }),
244
+ ];
245
+ }
246
+ export { ollama };
247
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAsChC;;GAEG;AACH,MAAM,CAAC,MAAM,kBAAkB,GAAyB;IACtD,IAAI,EAAE,eAAe;IACrB,WAAW,EAAE,yEAAyE;IACtF,UAAU,EAAE;QACV,MAAM,EAAE;YACN,IAAI,EAAE,QAAQ;YACd,KAAK,EAAE,QAAQ;YACf,WAAW,EAAE,wCAAwC;YACrD,OAAO,EAAE,gCAAgC;SAC1C;QACD,YAAY,EAAE;YACZ,IAAI,EAAE,QAAQ;YACd,KAAK,EAAE,eAAe;YACtB,WAAW,EAAE,gDAAgD;YAC7D,IAAI,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;YACtB,OAAO,EAAE,MAAM;SAChB;KACF;CACF,CAAC;AAEF;;;;;;;;;;;;;;;;;;;GAmBG;AACH,MAAM,UAAU,YAAY,CAAC,SAA6B,EAAE;IAC1D,MAAM,EAAE,OAAO,GAAG,wBAAwB,EAAE,KAAK,GAAG,OAAO,EAAE,GAAG,MAAM,CAAC;IAEvE,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;IAE7C,OAAO;QACL,IAAI,EAAE,eAAe;QACrB,MAAM,EAAE,kBAAkB;QAE1B,KAAK,CAAC,OAAO,CACX,KAAgB,EAChB,MAA+B;YAE/B,MAAM,EACJ,MAAM,GAAG,gCAAgC,EACzC,YAAY,GAAG,MAAM,GACtB,GAAG,MAAqC,CAAC;YAE1C,0BAA0B;YAC1B,MAAM,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;YAE9C,4CAA4C;YAC5C,MAAM,UAAU,GACd,YAAY,KAAK,MAAM;gBACrB,CAAC,CAAC,GAAG,MAAM,gEAAgE;gBAC3E,CAAC,CAAC,MAAM,CAAC;YAEb,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC;gBACjC,KAAK;gBACL,QAAQ,EAAE;oBACR;wBACE,IAAI,EAAE,MAAM;wBACZ,OAAO,EAAE,UAAU;wBACnB,MAAM,EAAE,CAAC,MAAM,CAAC;qBACjB;iBACF;aACF,CAAC,CAAC;YAEH,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC;YAEzC,iCAAiC;YACjC,IAAI,MAA2C,CAAC;YAChD,IAAI,YAAY,KAAK,MAAM,EAAE,CAAC;gBAC5B,IAAI,CAAC;oBACH,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;gBAC/B,CAAC;gBAAC,MAAM,CAAC;oBACP,uCAAuC;gBACzC,CAAC;YACH,CAAC;YAED,OAAO;gBACL,IAAI,EAAE,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM;gBAC9B,OAAO;gBACP,MAAM;gBACN,MAAM,EAAE,oBAAoB,KAAK,EAAE;gBACnC,QAAQ,EAAE;oBACR,KAAK;oBACL,MAAM;oBACN,cAAc,EAAE,IAAI;iBACrB;aACF,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAcD;;GAEG;AACH,MAAM,CAAC,MAAM,gBAAgB,GAAuB;IAClD,IAAI,EAAE,aAAa;IACnB,WAAW,EAAE,yEAAyE;IACtF,UAAU,EAAE;QACV,MAAM,EAAE;YACN,IAAI,EAAE,QAAQ;YACd,KAAK,EAAE,QAAQ;YACf,WAAW,EAAE,gCAAgC;SAC9C;QACD,YAAY,EAAE;YACZ,IAAI,EAAE,QAAQ;YACd,KAAK,EAAE,eAAe;YACtB,WAAW,EAAE,sDAAsD;SACpE;QACD,OAAO,EAAE;YACP,IAAI,EAAE,QAAQ;YACd,KAAK,EAAE,SAAS;YAChB,WAAW,EAAE,+DAA+D;SAC7E;QACD,YAAY,EAAE;YACZ,IAAI,EAAE,QAAQ;YACd,KAAK,EAAE,eAAe;YACtB,WAAW,EAAE,gDAAgD;YAC7D,IAAI,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;YACtB,OAAO,EAAE,MAAM;SAChB;QACD,WAAW,EAAE;YACX,IAAI,EAAE,QAAQ;YACd,KAAK,EAAE,aAAa;YACpB,WAAW,EAAE,wBAAwB;YACrC,OAAO,EAAE,GAAG;SACb;KACF;IACD,kBAAkB,EAAE,CAAC,QAAQ,CAAC;CAC/B,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,MAAM,UAAU,UAAU,CAAC,SAA2B,EAAE;IACtD,MAAM,EAAE,OAAO,GAAG,wBAAwB,EAAE,KAAK,GAAG,UAAU,EAAE,GAAG,MAAM,CAAC;IAE1E,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;IAE7C,OAAO;QACL,IAAI,EAAE,aAAa;QACnB,MAAM,EAAE,gBAAgB;QAExB,KAAK,CAAC,QAAQ,CAAC,MAA+B;YAC5C,MAAM,EACJ,MAAM,EACN,YAAY,EACZ,OAAO,EACP,YAAY,GAAG,MAAM,EACrB,WAAW,GAAG,GAAG,GAClB,GAAG,MAAmC,CAAC;YAExC,IAAI,CAAC,MAAM,EAAE,CAAC;gBACZ,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;YACnE,CAAC;YAED,iBAAiB;YACjB,MAAM,QAAQ,GAAwD,EAAE,CAAC;YAEzE,gCAAgC;YAChC,IAAI,YAAY,EAAE,CAAC;gBACjB,IAAI,MAAM,GAAG,YAAY,CAAC;gBAC1B,IAAI,YAAY,KAAK,MAAM,EAAE,CAAC;oBAC5B,MAAM,IAAI,kCAAkC,CAAC;gBAC/C,CAAC;gBACD,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YACrD,CAAC;iBAAM,IAAI,YAAY,KAAK,MAAM,EAAE,CAAC;gBACnC,QAAQ,CAAC,IAAI,CAAC;oBACZ,IAAI,EAAE,QAAQ;oBACd,OAAO,EAAE,8DAA8D;iBACxE,CAAC,CAAC;YACL,CAAC;YAED,2CAA2C;YAC3C,IAAI,WAAW,GAAG,MAAM,CAAC;YACzB,IAAI,OAAO,EAAE,CAAC;gBACZ,WAAW,GAAG,oCAAoC,OAAO,OAAO,MAAM,EAAE,CAAC;YAC3E,CAAC;YACD,IAAI,YAAY,KAAK,MAAM,EAAE,CAAC;gBAC5B,WAAW,IAAI,iCAAiC,CAAC;YACnD,CAAC;YACD,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC,CAAC;YAEtD,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC;gBACjC,KAAK;gBACL,QAAQ;gBACR,OAAO,EAAE;oBACP,WAAW;iBACZ;aACF,CAAC,CAAC;YAEH,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC;YAEzC,iCAAiC;YACjC,IAAI,MAA2C,CAAC;YAChD,IAAI,YAAY,KAAK,MAAM,EAAE,CAAC;gBAC5B,IAAI,CAAC;oBACH,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;gBAC/B,CAAC;gBAAC,MAAM,CAAC;oBACP,uCAAuC;gBACzC,CAAC;YACH,CAAC;YAED,OAAO;gBACL,IAAI,EAAE,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM;gBAC9B,OAAO;gBACP,MAAM;gBACN,MAAM,EAAE,kBAAkB,KAAK,EAAE;gBACjC,QAAQ,EAAE;oBACR,KAAK;oBACL,MAAM;oBACN,WAAW;oBACX,cAAc,EAAE,IAAI;iBACrB;aACF,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED,+EAA+E;AAC/E,sBAAsB;AACtB,+EAA+E;AAE/E;;;;;;;;;;;;;GAaG;AACH,MAAM,CAAC,OAAO,UAAU,MAAM,CAC5B,SAAsE,EAAE;IAExE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,SAAS,EAAE,GAAG,MAAM,CAAC;IAEnD,OAAO;QACL,YAAY,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,WAAW,EAAE,CAAC;QAC7C,UAAU,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,CAAC;KAC1C,CAAC;AACJ,CAAC;AAED,OAAO,EAAE,MAAM,EAAE,CAAC"}
package/package.json ADDED
@@ -0,0 +1,59 @@
1
+ {
2
+ "name": "@teamflojo/floimg-ollama",
3
+ "version": "0.2.0",
4
+ "description": "Ollama local AI provider for floimg - vision and text generation with local models",
5
+ "type": "module",
6
+ "main": "./dist/index.js",
7
+ "types": "./dist/index.d.ts",
8
+ "exports": {
9
+ ".": {
10
+ "import": "./dist/index.js",
11
+ "types": "./dist/index.d.ts"
12
+ }
13
+ },
14
+ "files": [
15
+ "dist",
16
+ "README.md",
17
+ "LICENSE"
18
+ ],
19
+ "scripts": {
20
+ "build": "tsc",
21
+ "dev": "tsc --watch",
22
+ "test": "vitest",
23
+ "typecheck": "tsc --noEmit",
24
+ "clean": "rm -rf dist",
25
+ "prepublishOnly": "npm run build"
26
+ },
27
+ "keywords": [
28
+ "floimg",
29
+ "ollama",
30
+ "llama",
31
+ "llava",
32
+ "local-ai",
33
+ "vision",
34
+ "text-generation",
35
+ "image-analysis"
36
+ ],
37
+ "author": "Brett Cooke",
38
+ "license": "MIT",
39
+ "repository": {
40
+ "type": "git",
41
+ "url": "https://github.com/TeamFlojo/floimg.git",
42
+ "directory": "packages/floimg-ollama"
43
+ },
44
+ "peerDependencies": {
45
+ "@teamflojo/floimg": "^0.1.0"
46
+ },
47
+ "dependencies": {
48
+ "ollama": "^0.5.11"
49
+ },
50
+ "devDependencies": {
51
+ "@types/node": "^22.10.2",
52
+ "@teamflojo/floimg": "workspace:*",
53
+ "typescript": "^5.7.2",
54
+ "vitest": "^2.1.8"
55
+ },
56
+ "engines": {
57
+ "node": ">=18.0.0"
58
+ }
59
+ }