@memberjunction/ai-ollama 2.85.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3 @@
1
+ export * from './models/ollama-llm';
2
+ export * from './models/ollama-embeddings';
3
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,24 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ __exportStar(require("./models/ollama-llm"), exports);
18
+ __exportStar(require("./models/ollama-embeddings"), exports);
19
+ const ollama_llm_1 = require("./models/ollama-llm");
20
+ const ollama_embeddings_1 = require("./models/ollama-embeddings");
21
+ // Prevent tree shaking
22
+ (0, ollama_llm_1.LoadOllamaLLM)();
23
+ (0, ollama_embeddings_1.LoadOllamaEmbedding)();
24
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,sDAAoC;AACpC,6DAA2C;AAE3C,oDAAoD;AACpD,kEAAiE;AAEjE,uBAAuB;AACvB,IAAA,0BAAa,GAAE,CAAC;AAChB,IAAA,uCAAmB,GAAE,CAAC"}
@@ -0,0 +1,58 @@
1
+ import { BaseEmbeddings, EmbedTextParams, EmbedTextResult, EmbedTextsParams, EmbedTextsResult } from '@memberjunction/ai';
2
+ import { Ollama } from 'ollama';
3
+ /**
4
+ * Ollama implementation of the BaseEmbeddings class for local embedding generation
5
+ * Supports various embedding models like nomic-embed-text, mxbai-embed-large, etc.
6
+ */
7
+ export declare class OllamaEmbedding extends BaseEmbeddings {
8
+ private _client;
9
+ private _baseUrl;
10
+ private _keepAlive;
11
+ constructor(apiKey?: string);
12
+ /**
13
+ * Read only getter method to get the Ollama client instance
14
+ */
15
+ get OllamaClient(): Ollama;
16
+ /**
17
+ * Read only getter method to get the Ollama client instance
18
+ */
19
+ get client(): Ollama;
20
+ /**
21
+ * Override SetAdditionalSettings to handle Ollama specific settings
22
+ */
23
+ SetAdditionalSettings(settings: Record<string, any>): void;
24
+ /**
25
+ * Embed a single text string using Ollama
26
+ */
27
+ EmbedText(params: EmbedTextParams): Promise<EmbedTextResult>;
28
+ /**
29
+ * Embed multiple texts in batch using Ollama
30
+ * Note: Ollama doesn't have native batch support, so we process sequentially
31
+ * For better performance, consider running multiple Ollama instances or using async processing
32
+ */
33
+ EmbedTexts(params: EmbedTextsParams): Promise<EmbedTextsResult>;
34
+ /**
35
+ * Ensure a model is available locally, pulling it if necessary
36
+ */
37
+ private ensureModelAvailable;
38
+ /**
39
+ * Get available embedding models
40
+ * Required by BaseEmbeddings abstract class
41
+ */
42
+ GetEmbeddingModels(): Promise<any>;
43
+ /**
44
+ * List available embedding models in Ollama
45
+ */
46
+ listEmbeddingModels(): Promise<string[]>;
47
+ /**
48
+ * Get information about a specific embedding model
49
+ */
50
+ getModelInfo(modelName: string): Promise<any>;
51
+ /**
52
+ * Get the dimension size for a specific embedding model
53
+ * This is useful for setting up vector databases
54
+ */
55
+ getEmbeddingDimension(modelName: string): Promise<number | null>;
56
+ }
57
+ export declare function LoadOllamaEmbedding(): void;
58
+ //# sourceMappingURL=ollama-embeddings.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ollama-embeddings.d.ts","sourceRoot":"","sources":["../../src/models/ollama-embeddings.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,eAAe,EAAE,eAAe,EAAE,gBAAgB,EAAE,gBAAgB,EAAc,MAAM,oBAAoB,CAAC;AAEtI,OAAO,EAAE,MAAM,EAAyC,MAAM,QAAQ,CAAC;AAEvE;;;GAGG;AACH,qBACa,eAAgB,SAAQ,cAAc;IAC/C,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,QAAQ,CAAoC;IACpD,OAAO,CAAC,UAAU,CAAyB;gBAE/B,MAAM,CAAC,EAAE,MAAM;IAK3B;;OAEG;IACH,IAAW,YAAY,IAAI,MAAM,CAEhC;IAED;;OAEG;IACH,IAAW,MAAM,IAAI,MAAM,CAE1B;IAED;;OAEG;IACa,qBAAqB,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI;IAc1E;;OAEG;IACU,SAAS,CAAC,MAAM,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAmDzE;;;;OAIG;IACU,UAAU,CAAC,MAAM,EAAE,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;IAqD5E;;OAEG;YACW,oBAAoB;IAmBlC;;;OAGG;IACU,kBAAkB,IAAI,OAAO,CAAC,GAAG,CAAC;IAuB/C;;OAEG;IACU,mBAAmB,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAiBrD;;OAEG;IACU,YAAY,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC;IAU1D;;;OAGG;IACU,qBAAqB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;CAehF;AAED,wBAAgB,mBAAmB,SAElC"}
@@ -0,0 +1,239 @@
1
+ "use strict";
2
+ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
3
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
4
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
5
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
7
+ };
8
+ Object.defineProperty(exports, "__esModule", { value: true });
9
+ exports.LoadOllamaEmbedding = exports.OllamaEmbedding = void 0;
10
+ const ai_1 = require("@memberjunction/ai");
11
+ const global_1 = require("@memberjunction/global");
12
+ const ollama_1 = require("ollama");
13
+ /**
14
+ * Ollama implementation of the BaseEmbeddings class for local embedding generation
15
+ * Supports various embedding models like nomic-embed-text, mxbai-embed-large, etc.
16
+ */
17
+ let OllamaEmbedding = class OllamaEmbedding extends ai_1.BaseEmbeddings {
18
+ constructor(apiKey) {
19
+ super(apiKey || ''); // Ollama doesn't require API key for local usage
20
+ this._baseUrl = 'http://localhost:11434';
21
+ this._keepAlive = '5m'; // Default keep model loaded for 5 minutes
22
+ this._client = new ollama_1.Ollama({ host: this._baseUrl });
23
+ }
24
+ /**
25
+ * Read only getter method to get the Ollama client instance
26
+ */
27
+ get OllamaClient() {
28
+ return this._client;
29
+ }
30
+ /**
31
+ * Read only getter method to get the Ollama client instance
32
+ */
33
+ get client() {
34
+ return this.OllamaClient;
35
+ }
36
+ /**
37
+ * Override SetAdditionalSettings to handle Ollama specific settings
38
+ */
39
+ SetAdditionalSettings(settings) {
40
+ super.SetAdditionalSettings(settings);
41
+ // Handle Ollama-specific settings
42
+ if (settings.baseUrl || settings.host) {
43
+ this._baseUrl = settings.baseUrl || settings.host;
44
+ this._client = new ollama_1.Ollama({ host: this._baseUrl });
45
+ }
46
+ if (settings.keepAlive !== undefined) {
47
+ this._keepAlive = settings.keepAlive;
48
+ }
49
+ }
50
+ /**
51
+ * Embed a single text string using Ollama
52
+ */
53
+ async EmbedText(params) {
54
+ if (!params.model) {
55
+ throw new Error('Model name is required for Ollama embedding provider');
56
+ }
57
+ const startTime = new Date();
58
+ try {
59
+ // Ensure the model is available
60
+ await this.ensureModelAvailable(params.model);
61
+ // Create embeddings request
62
+ const embeddingsRequest = {
63
+ model: params.model,
64
+ prompt: params.text,
65
+ keep_alive: this._keepAlive
66
+ };
67
+ // Additional options can be passed through additionalParams if needed
68
+ if (params.additionalParams) {
69
+ Object.assign(embeddingsRequest, params.additionalParams);
70
+ }
71
+ // Make the embeddings request
72
+ const response = await this.client.embeddings(embeddingsRequest);
73
+ const endTime = new Date();
74
+ // Return the embedding result
75
+ const result = {
76
+ object: 'object',
77
+ model: params.model,
78
+ ModelUsage: new ai_1.ModelUsage(response.prompt_eval_count || 0, 0),
79
+ vector: response.embedding
80
+ };
81
+ return result;
82
+ }
83
+ catch (error) {
84
+ const endTime = new Date();
85
+ const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
86
+ // On error, return a minimal valid result structure
87
+ // The BaseEmbeddings class expects a specific format
88
+ throw error;
89
+ }
90
+ }
91
+ /**
92
+ * Embed multiple texts in batch using Ollama
93
+ * Note: Ollama doesn't have native batch support, so we process sequentially
94
+ * For better performance, consider running multiple Ollama instances or using async processing
95
+ */
96
+ async EmbedTexts(params) {
97
+ if (!params.model) {
98
+ throw new Error('Model name is required for Ollama embedding provider');
99
+ }
100
+ const startTime = new Date();
101
+ const embeddings = [];
102
+ let totalPromptTokens = 0;
103
+ try {
104
+ // Ensure the model is available
105
+ await this.ensureModelAvailable(params.model);
106
+ // Process each text sequentially
107
+ // Note: Ollama doesn't support true batch processing, but we can optimize by keeping the model loaded
108
+ for (const text of params.texts) {
109
+ const embeddingsRequest = {
110
+ model: params.model,
111
+ prompt: text,
112
+ keep_alive: this._keepAlive
113
+ };
114
+ // Additional options can be passed through additionalParams if needed
115
+ if (params.additionalParams) {
116
+ Object.assign(embeddingsRequest, params.additionalParams);
117
+ }
118
+ const response = await this.client.embeddings(embeddingsRequest);
119
+ embeddings.push(response.embedding);
120
+ totalPromptTokens += response.prompt_eval_count || 0;
121
+ }
122
+ const endTime = new Date();
123
+ // Return the batch embedding result
124
+ const result = {
125
+ object: 'list',
126
+ model: params.model,
127
+ ModelUsage: new ai_1.ModelUsage(totalPromptTokens, 0),
128
+ vectors: embeddings
129
+ };
130
+ return result;
131
+ }
132
+ catch (error) {
133
+ const endTime = new Date();
134
+ const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
135
+ // On error, throw to let caller handle
136
+ throw error;
137
+ }
138
+ }
139
+ /**
140
+ * Ensure a model is available locally, pulling it if necessary
141
+ */
142
+ async ensureModelAvailable(modelName) {
143
+ try {
144
+ // Check if model is available
145
+ const models = await this.client.list();
146
+ const isAvailable = models.models.some((m) => m.name === modelName || m.name.startsWith(modelName + ':'));
147
+ if (!isAvailable) {
148
+ console.log(`Model ${modelName} not found locally. Attempting to pull...`);
149
+ await this.client.pull({ model: modelName, stream: false });
150
+ console.log(`Model ${modelName} pulled successfully.`);
151
+ }
152
+ }
153
+ catch (error) {
154
+ // If we can't check or pull, continue anyway - the embeddings call will fail with a clear error
155
+ console.warn(`Could not verify model availability: ${error}`);
156
+ }
157
+ }
158
+ /**
159
+ * Get available embedding models
160
+ * Required by BaseEmbeddings abstract class
161
+ */
162
+ async GetEmbeddingModels() {
163
+ try {
164
+ const models = await this.client.list();
165
+ // Filter for common embedding models
166
+ const embeddingKeywords = ['embed', 'e5', 'bge', 'gte', 'nomic', 'mxbai', 'all-minilm'];
167
+ const embeddingModels = models.models
168
+ .filter((m) => embeddingKeywords.some(keyword => m.name.toLowerCase().includes(keyword)))
169
+ .map((m) => ({
170
+ name: m.name,
171
+ size: m.size,
172
+ modified: m.modified_at
173
+ }));
174
+ return embeddingModels;
175
+ }
176
+ catch (error) {
177
+ console.error('Failed to get embedding models:', error);
178
+ return [];
179
+ }
180
+ }
181
+ /**
182
+ * List available embedding models in Ollama
183
+ */
184
+ async listEmbeddingModels() {
185
+ try {
186
+ const models = await this.client.list();
187
+ // Filter for common embedding models (this is a heuristic as Ollama doesn't strictly categorize)
188
+ const embeddingKeywords = ['embed', 'e5', 'bge', 'gte', 'nomic', 'mxbai', 'all-minilm'];
189
+ return models.models
190
+ .map((m) => m.name)
191
+ .filter((name) => embeddingKeywords.some(keyword => name.toLowerCase().includes(keyword)));
192
+ }
193
+ catch (error) {
194
+ console.error('Failed to list embedding models:', error);
195
+ return [];
196
+ }
197
+ }
198
+ /**
199
+ * Get information about a specific embedding model
200
+ */
201
+ async getModelInfo(modelName) {
202
+ try {
203
+ const response = await this.client.show({ model: modelName });
204
+ return response;
205
+ }
206
+ catch (error) {
207
+ console.error(`Failed to get info for model ${modelName}:`, error);
208
+ return null;
209
+ }
210
+ }
211
+ /**
212
+ * Get the dimension size for a specific embedding model
213
+ * This is useful for setting up vector databases
214
+ */
215
+ async getEmbeddingDimension(modelName) {
216
+ try {
217
+ // Generate a sample embedding to get dimensions
218
+ const response = await this.client.embeddings({
219
+ model: modelName,
220
+ prompt: "test",
221
+ keep_alive: 0 // Don't keep model loaded for this test
222
+ });
223
+ return response.embedding.length;
224
+ }
225
+ catch (error) {
226
+ console.error(`Failed to get embedding dimension for ${modelName}:`, error);
227
+ return null;
228
+ }
229
+ }
230
+ };
231
+ exports.OllamaEmbedding = OllamaEmbedding;
232
+ exports.OllamaEmbedding = OllamaEmbedding = __decorate([
233
+ (0, global_1.RegisterClass)(ai_1.BaseEmbeddings, "OllamaEmbedding")
234
+ ], OllamaEmbedding);
235
+ function LoadOllamaEmbedding() {
236
+ // this does nothing but prevents the class from being removed by the tree shaker
237
+ }
238
+ exports.LoadOllamaEmbedding = LoadOllamaEmbedding;
239
+ //# sourceMappingURL=ollama-embeddings.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ollama-embeddings.js","sourceRoot":"","sources":["../../src/models/ollama-embeddings.ts"],"names":[],"mappings":";;;;;;;;;AAAA,2CAAsI;AACtI,mDAAuD;AACvD,mCAAuE;AAEvE;;;GAGG;AAEI,IAAM,eAAe,GAArB,MAAM,eAAgB,SAAQ,mBAAc;IAK/C,YAAY,MAAe;QACvB,KAAK,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,CAAC,iDAAiD;QAJlE,aAAQ,GAAW,wBAAwB,CAAC;QAC5C,eAAU,GAAoB,IAAI,CAAC,CAAC,0CAA0C;QAIlF,IAAI,CAAC,OAAO,GAAG,IAAI,eAAM,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;IACvD,CAAC;IAED;;OAEG;IACH,IAAW,YAAY;QACnB,OAAO,IAAI,CAAC,OAAO,CAAC;IACxB,CAAC;IAED;;OAEG;IACH,IAAW,MAAM;QACb,OAAO,IAAI,CAAC,YAAY,CAAC;IAC7B,CAAC;IAED;;OAEG;IACa,qBAAqB,CAAC,QAA6B;QAC/D,KAAK,CAAC,qBAAqB,CAAC,QAAQ,CAAC,CAAC;QAEtC,kCAAkC;QAClC,IAAI,QAAQ,CAAC,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,CAAC;YACpC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,IAAI,QAAQ,CAAC,IAAI,CAAC;YAClD,IAAI,CAAC,OAAO,GAAG,IAAI,eAAM,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;QACvD,CAAC;QAED,IAAI,QAAQ,CAAC,SAAS,KAAK,SAAS,EAAE,CAAC;YACnC,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,SAAS,CAAC;QACzC,CAAC;IACL,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,SAAS,CAAC,MAAuB;QAC1C,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;YAChB,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAC;QAC5E,CAAC;QAED,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;QAE7B,IAAI,CAAC;YACD,gCAAgC;YAChC,MAAM,IAAI,CAAC,oBAAoB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAE9C,4BAA4B;YAC5B,MAAM,iBAAiB,GAAsB;gBACzC,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,MAAM,EAAE,MAAM,CAAC,IAAI;gBACnB,UAAU,EAAE,IAAI,CAAC,UAAU;aAC9B,CAAC;YAEF,sEAAsE;YACtE,IAAK,MAAc,CAAC,gBAAgB,EAAE,CAAC;gBACnC,MAAM,CAAC,MAAM,CAAC,iBAAiB,EAAG,MAAc,CAAC,gBAAgB,CAAC,CAAC;YACvE,CAAC;YAED,8BAA8B;YAC9B,MAAM,QAAQ,GAAuB,MAAM,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,iBAAiB,CAAC,CAAC;YAErF,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC;YAE3B,8BAA8B;YAC9B,MAAM,MAAM,GAAoB;gBAC5B,MAAM,EAAE,QAAQ;gBAChB,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,UAAU,EAAE,IAAI,eAAU,CACrB,QAAgB,CAAC,iBAAiB,IAAI,CAAC,EACxC,CAAC,CACJ;gBACD,MAAM,EAAE,QAAQ,CAAC,SAAS;aAC7B,CAAC;YAEF,OAAO,MAAM,CAAC;QAElB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC;YAC3B,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,wBAAwB,CAAC;YAEvF,oDAAoD;YACpD,qDAAqD;YACrD,MAAM,KAAK,CAAC;QAChB,CAAC;IACL,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,UAAU,CAAC,MAAwB;QAC5C,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;YAChB,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAC;QAC5E,CAAC;QAED,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;QAC7B,MAAM,UAAU,GAAe,EAAE,CAAC;QAClC,IAAI,iBAAiB,GAAG,CAAC,CAAC;QAE1B,IAAI,CAAC;YACD,gCAAgC;YAChC,MAAM,IAAI,CAAC,oBAAoB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAE9C,iCAAiC;YACjC,sGAAsG;YACtG,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,KAAK,EAAE,CAAC;gBAC9B,MAAM,iBAAiB,GAAsB;oBACzC,KAAK,EAAE,MAAM,CAAC,KAAK;oBACnB,MAAM,EAAE,IAAI;oBACZ,UAAU,EAAE,IAAI,CAAC,UAAU;iBAC9B,CAAC;gBAEF,sEAAsE;gBACtE,IAAK,MAAc,CAAC,gBAAgB,EAAE,CAAC;oBACnC,MAAM,CAAC,MAAM,CAAC,iBAAiB,EAAG,MAAc,CAAC,gBAAgB,CAAC,CAAC;gBACvE,CAAC;gBAED,MAAM,QAAQ,GAAuB,MAAM,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,iBAAiB,CAAC,CAAC;gBACrF,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;gBACpC,iBAAiB,IAAK,QAAgB,CAAC,iBAAiB,IAAI,CAAC,CAAC;YAClE,CAAC;YAED,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC;YAE3B,oCAAoC;YACpC,MAAM,MAAM,GAAqB;gBAC7B,MAAM,EAAE,MAAM;gBACd,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,UAAU,EAAE,IAAI,eAAU,CAAC,iBAAiB,EAAE,CAAC,CAAC;gBAChD,OAAO,EAAE,UAAU;aACtB,CAAC;YAEF,OAAO,MAAM,CAAC;QAElB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC;YAC3B,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,wBAAwB,CAAC;YAEvF,uCAAuC;YACvC,MAAM,KAAK,CAAC;QAChB,CAAC;IACL,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,oBAAoB,CAAC,SAAiB;QAChD,IAAI,CAAC;YACD,8BAA8B;YAC9B,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC;YACxC,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAM,EAAE,EAAE,CAC9C,CAAC,CAAC,IAAI,KAAK,SAAS,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,SAAS,GAAG,GAAG,CAAC,CAC7D,CAAC;YAEF,IAAI,CAAC,WAAW,EAAE,CAAC;gBACf,OAAO,CAAC,GAAG,CAAC,SAAS,SAAS,2CAA2C,CAAC,CAAC;gBAC3E,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,CAAC;gBAC5D,OAAO,CAAC,GAAG,CAAC,SAAS,SAAS,uBAAuB,CAAC,CAAC;YAC3D,CAAC;QACL,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,gGAAgG;YAChG,OAAO,CAAC,IAAI,CAAC,wCAAwC,KAAK,EAAE,CAAC,CAAC;QAClE,CAAC;IACL,CAAC;IAED;;;OAGG;IACI,KAAK,CAAC,kBAAkB;QAC3B,IAAI,CAAC;YACD,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC;YACxC,qCAAqC;YACrC,MAAM,iBAAiB,GAAG,CAAC,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;YAExF,MAAM,eAAe,GAAG,MAAM,CAAC,MAAM;iBAChC,MAAM,CAAC,CAAC,CAAM,EAAE,EAAE,CACf,iBAAiB,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAC5E;iBACA,GAAG,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,CAAC;gBACd,IAAI,EAAE,CAAC,CAAC,IAAI;gBACZ,IAAI,EAAE,CAAC,CAAC,IAAI;gBACZ,QAAQ,EAAE,CAAC,CAAC,WAAW;aAC1B,CAAC,CAAC,CAAC;YAER,OAAO,eAAe,CAAC;QAC3B,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,OAAO,CAAC,KAAK,CAAC,iCAAiC,EAAE,KAAK,CAAC,CAAC;YACxD,OAAO,EAAE,CAAC;QACd,CAAC;IACL,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,mBAAmB;QAC5B,IAAI,CAAC;YACD,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC;YACxC,iGAAiG;YACjG,MAAM,iBAAiB,GAAG,CAAC,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;YAExF,OAAO,MAAM,CAAC,MAAM;iBACf,GAAG,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;iBACvB,MAAM,CAAC,CAAC,IAAY,EAAE,EAAE,CACrB,iBAAiB,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAC1E,CAAC;QACV,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,OAAO,CAAC,KAAK,CAAC,kCAAkC,EAAE,KAAK,CAAC,CAAC;YACzD,OAAO,EAAE,CAAC;QACd,CAAC;IACL,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,YAAY,CAAC,SAAiB;QACvC,IAAI,CAAC;YACD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,CAAC,CAAC;YAC9D,OAAO,QAAQ,CAAC;QACpB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,OAAO,CAAC,KAAK,CAAC,gCAAgC,SAAS,GAAG,EAAE,KAAK,CAAC,CAAC;YACnE,OAAO,IAAI,CAAC;QAChB,CAAC;IACL,CAAC;IAED;;;OAGG;IACI,KAAK,CAAC,qBAAqB,CAAC,SAAiB;QAChD,IAAI,CAAC;YACD,gDAAgD;YAChD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC;gBAC1C,KAAK,EAAE,SAAS;gBAChB,MAAM,EAAE,MAAM;gBACd,UAAU,EAAE,CAAC,CAAC,wCAAwC;aACzD,CAAC,CAAC;YAEH,OAAO,QAAQ,CAAC,SAAS,CAAC,MAAM,CAAC;QACrC,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,OAAO,CAAC,KAAK,CAAC,yCAAyC,SAAS,GAAG,EAAE,KAAK,CAAC,CAAC;YAC5E,OAAO,IAAI,CAAC;QAChB,CAAC;IACL,CAAC;CACJ,CAAA;AA9PY,0CAAe;0BAAf,eAAe;IAD3B,IAAA,sBAAa,EAAC,mBAAc,EAAE,iBAAiB,CAAC;GACpC,eAAe,CA8P3B;AAED,SAAgB,mBAAmB;IAC/B,iFAAiF;AACrF,CAAC;AAFD,kDAEC"}
@@ -0,0 +1,80 @@
1
+ import { BaseLLM, ChatParams, ChatResult, ClassifyParams, ClassifyResult, SummarizeParams, SummarizeResult } from '@memberjunction/ai';
2
+ import { Ollama } from 'ollama';
3
+ /**
4
+ * Ollama implementation of the BaseLLM class for local LLM inference
5
+ * Supports chat, generation, and streaming with various open-source models
6
+ */
7
+ export declare class OllamaLLM extends BaseLLM {
8
+ private _client;
9
+ private _baseUrl;
10
+ private _keepAlive;
11
+ constructor(apiKey?: string);
12
+ /**
13
+ * Read only getter method to get the Ollama client instance
14
+ */
15
+ get OllamaClient(): Ollama;
16
+ /**
17
+ * Read only getter method to get the Ollama client instance
18
+ */
19
+ get client(): Ollama;
20
+ /**
21
+ * Ollama supports streaming
22
+ */
23
+ get SupportsStreaming(): boolean;
24
+ /**
25
+ * Check if the provider supports thinking models
26
+ * Ollama can support thinking models depending on the loaded model
27
+ */
28
+ protected supportsThinkingModels(): boolean;
29
+ /**
30
+ * Override SetAdditionalSettings to handle Ollama specific settings
31
+ */
32
+ SetAdditionalSettings(settings: Record<string, any>): void;
33
+ /**
34
+ * Implementation of non-streaming chat completion for Ollama
35
+ */
36
+ protected nonStreamingChatCompletion(params: ChatParams): Promise<ChatResult>;
37
+ /**
38
+ * Create a streaming request for Ollama
39
+ */
40
+ protected createStreamingRequest(params: ChatParams): Promise<any>;
41
+ /**
42
+ * Process a streaming chunk from Ollama
43
+ */
44
+ protected processStreamingChunk(chunk: any): {
45
+ content: string;
46
+ finishReason?: string;
47
+ usage?: any;
48
+ };
49
+ /**
50
+ * Create the final response from streaming results for Ollama
51
+ */
52
+ protected finalizeStreamingResponse(accumulatedContent: string | null | undefined, lastChunk: any | null | undefined, usage: any | null | undefined): ChatResult;
53
+ /**
54
+ * Generate endpoint implementation for Ollama (alternative to chat)
55
+ * This can be useful for simple completion tasks
56
+ */
57
+ generate(params: {
58
+ model: string;
59
+ prompt: string;
60
+ temperature?: number;
61
+ maxOutputTokens?: number;
62
+ stream?: boolean;
63
+ }): Promise<any>;
64
+ /**
65
+ * List available models in Ollama
66
+ */
67
+ listModels(): Promise<any>;
68
+ /**
69
+ * Pull a model from Ollama registry
70
+ */
71
+ pullModel(modelName: string): Promise<void>;
72
+ /**
73
+ * Check if a model is available locally
74
+ */
75
+ isModelAvailable(modelName: string): Promise<boolean>;
76
+ SummarizeText(_params: SummarizeParams): Promise<SummarizeResult>;
77
+ ClassifyText(_params: ClassifyParams): Promise<ClassifyResult>;
78
+ }
79
+ export declare function LoadOllamaLLM(): void;
80
+ //# sourceMappingURL=ollama-llm.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ollama-llm.d.ts","sourceRoot":"","sources":["../../src/models/ollama-llm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,UAAU,EAAqC,cAAc,EAAE,cAAc,EAAE,eAAe,EAAE,eAAe,EAAc,MAAM,oBAAoB,CAAC;AAEtL,OAAO,EAAE,MAAM,EAAgE,MAAM,QAAQ,CAAC;AAE9F;;;GAGG;AACH,qBACa,SAAU,SAAQ,OAAO;IAClC,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,QAAQ,CAAoC;IACpD,OAAO,CAAC,UAAU,CAAyB;gBAE/B,MAAM,CAAC,EAAE,MAAM;IAK3B;;OAEG;IACH,IAAW,YAAY,IAAI,MAAM,CAEhC;IAED;;OAEG;IACH,IAAW,MAAM,IAAI,MAAM,CAE1B;IAED;;OAEG;IACH,IAAoB,iBAAiB,IAAI,OAAO,CAE/C;IAED;;;OAGG;IACH,SAAS,CAAC,sBAAsB,IAAI,OAAO;IAI3C;;OAEG;IACa,qBAAqB,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI;IAc1E;;OAEG;cACa,0BAA0B,CAAC,MAAM,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;IAoKnF;;OAEG;cACa,sBAAsB,CAAC,MAAM,EAAE,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC;IA8FxE;;OAEG;IACH,SAAS,CAAC,qBAAqB,CAAC,KAAK,EAAE,GAAG,GAAG;QACzC,OAAO,EAAE,MAAM,CAAC;QAChB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,KAAK,CAAC,EAAE,GAAG,CAAC;KACf;IAqCD;;OAEG;IACH,SAAS,CAAC,yBAAyB,CAC/B,kBAAkB,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,EAC7C,SAAS,EAAE,GAAG,GAAG,IAAI,GAAG,SAAS,EACjC,KAAK,EAAE,GAAG,GAAG,IAAI,GAAG,SAAS,GAC9B,UAAU;IA4Db;;;OAGG;IACU,QAAQ,CAAC,MAAM,EAAE;QAC1B,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC;QACf,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,eAAe,CAAC,EAAE,MAAM,CAAC;QACzB,MAAM,CAAC,EAAE,OAAO,CAAC;KACpB,GAAG,OAAO,CAAC,GAAG,CAAC;IA0BhB;;OAEG;IACU,UAAU,IAAI,OAAO,CAAC,GAAG,CAAC;IAIvC;;OAEG;IACU,SAAS,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAIxD;;OAEG;IACU,gBAAgB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IASrD,aAAa,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAIjE,YAAY,CAAC,OAAO,EAAE,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;CAG9E;AAED,wBAAgB,aAAa,SAE5B"}
@@ -0,0 +1,464 @@
1
+ "use strict";
2
+ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
3
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
4
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
5
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
7
+ };
8
+ Object.defineProperty(exports, "__esModule", { value: true });
9
+ exports.LoadOllamaLLM = exports.OllamaLLM = void 0;
10
+ const ai_1 = require("@memberjunction/ai");
11
+ const global_1 = require("@memberjunction/global");
12
+ const ollama_1 = require("ollama");
13
+ /**
14
+ * Ollama implementation of the BaseLLM class for local LLM inference
15
+ * Supports chat, generation, and streaming with various open-source models
16
+ */
17
+ let OllamaLLM = class OllamaLLM extends ai_1.BaseLLM {
18
+ constructor(apiKey) {
19
+ super(apiKey || ''); // Ollama doesn't require API key for local usage
20
+ this._baseUrl = 'http://localhost:11434';
21
+ this._keepAlive = '5m'; // Default keep model loaded for 5 minutes
22
+ this._client = new ollama_1.Ollama({ host: this._baseUrl });
23
+ }
24
+ /**
25
+ * Read only getter method to get the Ollama client instance
26
+ */
27
+ get OllamaClient() {
28
+ return this._client;
29
+ }
30
+ /**
31
+ * Read only getter method to get the Ollama client instance
32
+ */
33
+ get client() {
34
+ return this.OllamaClient;
35
+ }
36
+ /**
37
+ * Ollama supports streaming
38
+ */
39
+ get SupportsStreaming() {
40
+ return true;
41
+ }
42
+ /**
43
+ * Check if the provider supports thinking models
44
+ * Ollama can support thinking models depending on the loaded model
45
+ */
46
+ supportsThinkingModels() {
47
+ return true;
48
+ }
49
+ /**
50
+ * Override SetAdditionalSettings to handle Ollama specific settings
51
+ */
52
+ SetAdditionalSettings(settings) {
53
+ super.SetAdditionalSettings(settings);
54
+ // Handle Ollama-specific settings
55
+ if (settings.baseUrl || settings.host) {
56
+ this._baseUrl = settings.baseUrl || settings.host;
57
+ this._client = new ollama_1.Ollama({ host: this._baseUrl });
58
+ }
59
+ if (settings.keepAlive !== undefined) {
60
+ this._keepAlive = settings.keepAlive;
61
+ }
62
+ }
63
+ /**
64
+ * Implementation of non-streaming chat completion for Ollama
65
+ */
66
+ async nonStreamingChatCompletion(params) {
67
+ const startTime = new Date();
68
+ try {
69
+ // Convert MJ messages to Ollama format
70
+ const messages = params.messages.map(m => ({
71
+ role: m.role,
72
+ content: Array.isArray(m.content) ?
73
+ m.content.map(block => {
74
+ if (typeof block === 'string') {
75
+ return block;
76
+ }
77
+ else if (block.type === 'text') {
78
+ return block.content;
79
+ }
80
+ else {
81
+ // For other content types including images
82
+ return block.content;
83
+ }
84
+ }).join('\n') :
85
+ m.content
86
+ }));
87
+ // Create chat request parameters
88
+ const chatRequest = {
89
+ model: params.model,
90
+ messages: messages,
91
+ stream: false,
92
+ options: {
93
+ temperature: params.temperature
94
+ },
95
+ keep_alive: this._keepAlive
96
+ };
97
+ // Add optional parameters
98
+ if (params.maxOutputTokens != null && params.maxOutputTokens > 0) {
99
+ chatRequest.options = {
100
+ ...chatRequest.options,
101
+ num_predict: params.maxOutputTokens
102
+ };
103
+ }
104
+ if (params.topP != null) {
105
+ chatRequest.options = {
106
+ ...chatRequest.options,
107
+ top_p: params.topP
108
+ };
109
+ }
110
+ if (params.topK != null) {
111
+ chatRequest.options = {
112
+ ...chatRequest.options,
113
+ top_k: params.topK
114
+ };
115
+ }
116
+ if (params.seed != null) {
117
+ chatRequest.options = {
118
+ ...chatRequest.options,
119
+ seed: params.seed
120
+ };
121
+ }
122
+ if (params.stopSequences != null && params.stopSequences.length > 0) {
123
+ chatRequest.options = {
124
+ ...chatRequest.options,
125
+ stop: params.stopSequences
126
+ };
127
+ }
128
+ if (params.frequencyPenalty != null) {
129
+ chatRequest.options = {
130
+ ...chatRequest.options,
131
+ frequency_penalty: params.frequencyPenalty
132
+ };
133
+ }
134
+ if (params.presencePenalty != null) {
135
+ chatRequest.options = {
136
+ ...chatRequest.options,
137
+ presence_penalty: params.presencePenalty
138
+ };
139
+ }
140
+ // Handle response format
141
+ switch (params.responseFormat) {
142
+ case 'JSON':
143
+ // Ollama supports JSON mode through format parameter
144
+ chatRequest.format = 'json';
145
+ break;
146
+ case 'ModelSpecific':
147
+ if (params.modelSpecificResponseFormat) {
148
+ chatRequest.format = params.modelSpecificResponseFormat;
149
+ }
150
+ break;
151
+ }
152
+ // Make the chat completion request
153
+ const response = await this.client.chat(chatRequest);
154
+ const endTime = new Date();
155
+ // Process thinking content if present (for models that support it)
156
+ let content = response.message.content;
157
+ let thinking = undefined;
158
+ if (this.supportsThinkingModels() && content) {
159
+ const extracted = this.extractThinkingFromContent(content);
160
+ content = extracted.content;
161
+ thinking = extracted.thinking;
162
+ }
163
+ const choices = [{
164
+ message: {
165
+ role: ai_1.ChatMessageRole.assistant,
166
+ content: content,
167
+ thinking: thinking
168
+ },
169
+ finish_reason: response.done ? 'stop' : 'length',
170
+ index: 0
171
+ }];
172
+ // Create ModelUsage from Ollama response
173
+ const usage = new ai_1.ModelUsage(response.prompt_eval_count || 0, response.eval_count || 0);
174
+ const result = {
175
+ success: true,
176
+ statusText: "OK",
177
+ startTime: startTime,
178
+ endTime: endTime,
179
+ timeElapsed: endTime.getTime() - startTime.getTime(),
180
+ data: {
181
+ choices: choices,
182
+ usage: usage
183
+ },
184
+ errorMessage: "",
185
+ exception: null,
186
+ };
187
+ // Add model-specific response details
188
+ result.modelSpecificResponseDetails = {
189
+ provider: 'ollama',
190
+ model: params.model,
191
+ total_duration: response.total_duration,
192
+ load_duration: response.load_duration,
193
+ prompt_eval_duration: response.prompt_eval_duration,
194
+ eval_duration: response.eval_duration
195
+ };
196
+ return result;
197
+ }
198
+ catch (error) {
199
+ const endTime = new Date();
200
+ const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
201
+ return {
202
+ success: false,
203
+ statusText: "Error",
204
+ startTime: startTime,
205
+ endTime: endTime,
206
+ timeElapsed: endTime.getTime() - startTime.getTime(),
207
+ data: {
208
+ choices: [],
209
+ usage: new ai_1.ModelUsage(0, 0)
210
+ },
211
+ errorMessage: errorMessage,
212
+ exception: error,
213
+ };
214
+ }
215
+ }
216
+ /**
217
+ * Create a streaming request for Ollama
218
+ */
219
+ async createStreamingRequest(params) {
220
+ // Initialize streaming state for thinking extraction if supported
221
+ if (this.supportsThinkingModels()) {
222
+ this.initializeThinkingStreamState();
223
+ }
224
+ // Convert MJ messages to Ollama format
225
+ const messages = params.messages.map(m => ({
226
+ role: m.role,
227
+ content: Array.isArray(m.content) ?
228
+ m.content.map(block => {
229
+ if (typeof block === 'string') {
230
+ return block;
231
+ }
232
+ else if (block.type === 'text') {
233
+ return block.content;
234
+ }
235
+ else {
236
+ return block.content;
237
+ }
238
+ }).join('\n') :
239
+ m.content
240
+ }));
241
+ // Create streaming chat request parameters
242
+ const chatRequest = {
243
+ model: params.model,
244
+ messages: messages,
245
+ stream: true,
246
+ options: {
247
+ temperature: params.temperature
248
+ },
249
+ keep_alive: this._keepAlive
250
+ };
251
+ // Add optional parameters
252
+ if (params.maxOutputTokens != null && params.maxOutputTokens > 0) {
253
+ chatRequest.options = {
254
+ ...chatRequest.options,
255
+ num_predict: params.maxOutputTokens
256
+ };
257
+ }
258
+ if (params.topP != null) {
259
+ chatRequest.options = {
260
+ ...chatRequest.options,
261
+ top_p: params.topP
262
+ };
263
+ }
264
+ if (params.topK != null) {
265
+ chatRequest.options = {
266
+ ...chatRequest.options,
267
+ top_k: params.topK
268
+ };
269
+ }
270
+ if (params.seed != null) {
271
+ chatRequest.options = {
272
+ ...chatRequest.options,
273
+ seed: params.seed
274
+ };
275
+ }
276
+ if (params.stopSequences != null && params.stopSequences.length > 0) {
277
+ chatRequest.options = {
278
+ ...chatRequest.options,
279
+ stop: params.stopSequences
280
+ };
281
+ }
282
+ if (params.frequencyPenalty != null) {
283
+ chatRequest.options = {
284
+ ...chatRequest.options,
285
+ frequency_penalty: params.frequencyPenalty
286
+ };
287
+ }
288
+ if (params.presencePenalty != null) {
289
+ chatRequest.options = {
290
+ ...chatRequest.options,
291
+ presence_penalty: params.presencePenalty
292
+ };
293
+ }
294
+ // Handle response format
295
+ switch (params.responseFormat) {
296
+ case 'JSON':
297
+ chatRequest.format = 'json';
298
+ break;
299
+ case 'ModelSpecific':
300
+ if (params.modelSpecificResponseFormat) {
301
+ chatRequest.format = params.modelSpecificResponseFormat;
302
+ }
303
+ break;
304
+ }
305
+ // Return the streaming response
306
+ // Cast stream to true for TypeScript overload resolution
307
+ return this.client.chat({ ...chatRequest, stream: true });
308
+ }
309
+ /**
310
+ * Process a streaming chunk from Ollama
311
+ */
312
+ processStreamingChunk(chunk) {
313
+ let content = '';
314
+ let finishReason = undefined;
315
+ let usage = undefined;
316
+ // Ollama streaming chunks have a specific format
317
+ if (chunk && typeof chunk === 'object') {
318
+ if (chunk.message && chunk.message.content) {
319
+ const rawContent = chunk.message.content;
320
+ // Process the content with thinking extraction if supported
321
+ content = this.supportsThinkingModels()
322
+ ? this.processStreamChunkWithThinking(rawContent)
323
+ : rawContent;
324
+ }
325
+ // Check if this is the final chunk
326
+ if (chunk.done === true) {
327
+ finishReason = 'stop';
328
+ // Extract usage information from final chunk
329
+ if (chunk.prompt_eval_count || chunk.eval_count) {
330
+ usage = {
331
+ promptTokens: chunk.prompt_eval_count || 0,
332
+ completionTokens: chunk.eval_count || 0
333
+ };
334
+ }
335
+ }
336
+ }
337
+ return {
338
+ content,
339
+ finishReason,
340
+ usage
341
+ };
342
+ }
343
+ /**
344
+ * Create the final response from streaming results for Ollama
345
+ */
346
+ finalizeStreamingResponse(accumulatedContent, lastChunk, usage) {
347
+ // Extract finish reason from last chunk if available
348
+ let finishReason = 'stop';
349
+ if (lastChunk?.done === false) {
350
+ finishReason = 'length';
351
+ }
352
+ // Extract usage metrics from accumulated usage or last chunk
353
+ let promptTokens = 0;
354
+ let completionTokens = 0;
355
+ if (usage) {
356
+ promptTokens = usage.promptTokens || 0;
357
+ completionTokens = usage.completionTokens || 0;
358
+ }
359
+ else if (lastChunk) {
360
+ promptTokens = lastChunk.prompt_eval_count || 0;
361
+ completionTokens = lastChunk.eval_count || 0;
362
+ }
363
+ // Create dates (will be overridden by base class)
364
+ const now = new Date();
365
+ // Create a proper ChatResult instance with constructor params
366
+ const result = new ai_1.ChatResult(true, now, now);
367
+ // Get thinking content from streaming state if available
368
+ const thinkingContent = this.thinkingStreamState?.accumulatedThinking.trim();
369
+ // Set all properties
370
+ result.data = {
371
+ choices: [{
372
+ message: this.addThinkingToMessage({
373
+ role: ai_1.ChatMessageRole.assistant,
374
+ content: accumulatedContent ? accumulatedContent : ''
375
+ }, thinkingContent),
376
+ finish_reason: finishReason,
377
+ index: 0
378
+ }],
379
+ usage: new ai_1.ModelUsage(promptTokens, completionTokens)
380
+ };
381
+ result.statusText = 'success';
382
+ result.errorMessage = null;
383
+ result.exception = null;
384
+ // Add Ollama-specific details if available
385
+ if (lastChunk) {
386
+ result.modelSpecificResponseDetails = {
387
+ provider: 'ollama',
388
+ model: lastChunk.model,
389
+ total_duration: lastChunk.total_duration,
390
+ load_duration: lastChunk.load_duration,
391
+ prompt_eval_duration: lastChunk.prompt_eval_duration,
392
+ eval_duration: lastChunk.eval_duration
393
+ };
394
+ }
395
+ return result;
396
+ }
397
+ /**
398
+ * Generate endpoint implementation for Ollama (alternative to chat)
399
+ * This can be useful for simple completion tasks
400
+ */
401
+ async generate(params) {
402
+ const generateRequest = {
403
+ model: params.model,
404
+ prompt: params.prompt,
405
+ stream: params.stream || false,
406
+ options: {
407
+ temperature: params.temperature
408
+ },
409
+ keep_alive: this._keepAlive
410
+ };
411
+ if (params.maxOutputTokens) {
412
+ generateRequest.options = {
413
+ ...generateRequest.options,
414
+ num_predict: params.maxOutputTokens
415
+ };
416
+ }
417
+ // Handle TypeScript overload by explicitly typing based on stream value
418
+ if (params.stream) {
419
+ return await this.client.generate({ ...generateRequest, stream: true });
420
+ }
421
+ else {
422
+ return await this.client.generate({ ...generateRequest, stream: false });
423
+ }
424
+ }
425
+ /**
426
+ * List available models in Ollama
427
+ */
428
+ async listModels() {
429
+ return await this.client.list();
430
+ }
431
+ /**
432
+ * Pull a model from Ollama registry
433
+ */
434
+ async pullModel(modelName) {
435
+ await this.client.pull({ model: modelName, stream: false });
436
+ }
437
+ /**
438
+ * Check if a model is available locally
439
+ */
440
+ async isModelAvailable(modelName) {
441
+ try {
442
+ const models = await this.listModels();
443
+ return models.models.some((m) => m.name === modelName || m.name.startsWith(modelName + ':'));
444
+ }
445
+ catch {
446
+ return false;
447
+ }
448
+ }
449
+ async SummarizeText(_params) {
450
+ throw new Error("Method not implemented. Use Chat with a summarization prompt instead.");
451
+ }
452
+ async ClassifyText(_params) {
453
+ throw new Error("Method not implemented. Use Chat with a classification prompt instead.");
454
+ }
455
+ };
456
+ exports.OllamaLLM = OllamaLLM;
457
+ exports.OllamaLLM = OllamaLLM = __decorate([
458
+ (0, global_1.RegisterClass)(ai_1.BaseLLM, "OllamaLLM")
459
+ ], OllamaLLM);
460
+ function LoadOllamaLLM() {
461
+ // this does nothing but prevents the class from being removed by the tree shaker
462
+ }
463
+ exports.LoadOllamaLLM = LoadOllamaLLM;
464
+ //# sourceMappingURL=ollama-llm.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ollama-llm.js","sourceRoot":"","sources":["../../src/models/ollama-llm.ts"],"names":[],"mappings":";;;;;;;;;AAAA,2CAAsL;AACtL,mDAAuD;AACvD,mCAA8F;AAE9F;;;GAGG;AAEI,IAAM,SAAS,GAAf,MAAM,SAAU,SAAQ,YAAO;IAKlC,YAAY,MAAe;QACvB,KAAK,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,CAAC,iDAAiD;QAJlE,aAAQ,GAAW,wBAAwB,CAAC;QAC5C,eAAU,GAAoB,IAAI,CAAC,CAAC,0CAA0C;QAIlF,IAAI,CAAC,OAAO,GAAG,IAAI,eAAM,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;IACvD,CAAC;IAED;;OAEG;IACH,IAAW,YAAY;QACnB,OAAO,IAAI,CAAC,OAAO,CAAC;IACxB,CAAC;IAED;;OAEG;IACH,IAAW,MAAM;QACb,OAAO,IAAI,CAAC,YAAY,CAAC;IAC7B,CAAC;IAED;;OAEG;IACH,IAAoB,iBAAiB;QACjC,OAAO,IAAI,CAAC;IAChB,CAAC;IAED;;;OAGG;IACO,sBAAsB;QAC5B,OAAO,IAAI,CAAC;IAChB,CAAC;IAED;;OAEG;IACa,qBAAqB,CAAC,QAA6B;QAC/D,KAAK,CAAC,qBAAqB,CAAC,QAAQ,CAAC,CAAC;QAEtC,kCAAkC;QAClC,IAAI,QAAQ,CAAC,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,CAAC;YACpC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,IAAI,QAAQ,CAAC,IAAI,CAAC;YAClD,IAAI,CAAC,OAAO,GAAG,IAAI,eAAM,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;QACvD,CAAC;QAED,IAAI,QAAQ,CAAC,SAAS,KAAK,SAAS,EAAE,CAAC;YACnC,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,SAAS,CAAC;QACzC,CAAC;IACL,CAAC;IAED;;OAEG;IACO,KAAK,CAAC,0BAA0B,CAAC,MAAkB;QACzD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;QAE7B,IAAI,CAAC;YACD,uCAAuC;YACvC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;gBACvC,IAAI,EAAE,CAAC,CAAC,IAAuC;gBAC/C,OAAO,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;oBAC/B,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;wBAClB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE,CAAC;4BAC5B,OAAO,KAAK,CAAC;wBACjB,CAAC;6BAAM,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;4BAC/B,OAAO,KAAK,CAAC,OAAO,CAAC;wBACzB,CAAC;6BAAM,CAAC;4BACJ,2CAA2C;4BAC3C,OAAO,KAAK,CAAC,OAAO,CAAC;wBACzB,CAAC;oBACL,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACf,CAAC,CAAC,OAAO;aAChB,CAAC,CAAC,CAAC;YAEJ,iCAAiC;YACjC,MAAM,WAAW,GAAqC;gBAClD,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,QAAQ,EAAE,QAAQ;gBAClB,MAAM,EAAE,KAAK;gBACb,OAAO,EAAE;oBACL,WAAW,EAAE,MAAM,CAAC,WAAW;iBAClC;gBACD,UAAU,EAAE,IAAI,CAAC,UAAU;aAC9B,CAAC;YAEF,0BAA0B;YAC1B,IAAI,MAAM,CAAC,eAAe,IAAI,IAAI,IAAI,MAAM,CAAC,eAAe,GAAG,CAAC,EAAE,CAAC;gBAC/D,WAAW,CAAC,OAAO,GAAG;oBAClB,GAAG,WAAW,CAAC,OAAO;oBACtB,WAAW,EAAE,MAAM,CAAC,eAAe;iBACtC,CAAC;YACN,CAAC;YACD,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,EAAE,CAAC;gBACtB,WAAW,CAAC,OAAO,GAAG;oBAClB,GAAG,WAAW,CAAC,OAAO;oBACtB,KAAK,EAAE,MAAM,CAAC,IAAI;iBACrB,CAAC;YACN,CAAC;YACD,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,EAAE,CAAC;gBACtB,WAAW,CAAC,OAAO,GAAG;oBAClB,GAAG,WAAW,CAAC,OAAO;oBACtB,KAAK,EAAE,MAAM,CAAC,IAAI;iBACrB,CAAC;YACN,CAAC;YACD,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,EAAE,CAAC;gBACtB,WAAW,CAAC,OAAO,GAAG;oBAClB,GAAG,WAAW,CAAC,OAAO;oBACtB,IAAI,EAAE,MAAM,CAAC,IAAI;iBACpB,CAAC;YACN,CAAC;YACD,IAAI,MAAM,CAAC,aAAa,IAAI,IAAI,IAAI,MAAM,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAClE,WAAW,CAAC,OAAO,GAAG;oBAClB,GAAG,WAAW,CAAC,OAAO;oBACtB,IAAI,EAAE,MAAM,CAAC,aAAa;iBAC7B,CAAC;YACN,CAAC;YACD,IAAI,MAAM,CAAC,gBAAgB,IAAI,IAAI,EAAE,CAAC;gBAClC,WAAW,CAAC,OAAO,GAAG;oBAClB,GAAG,WAAW,CAAC,OAAO;oBACtB,iBAAiB,EAAE,MAAM,CAAC,gBAAgB;iBAC7C,CAAC;YACN,CAAC;YACD,IAAI,MAAM,CAAC,eAAe,IAAI,IAAI,EAAE,CAAC;gBACjC,WAAW,CAAC,OAAO,GAAG;oBAClB,GAAG,WAAW,CAAC,OAAO;oBACtB,gBAAgB,EAAE,MAAM,CAAC,eAAe;iBAC3C,CAAC;YACN,CAAC;YAED,yBAAyB;YACzB,QAAQ,MAAM,CAAC,cAAc,EAAE,CAAC;gBAC5B,KAAK,MAAM;oBACP,qDAAqD;oBACrD,WAAW,CAAC,MAAM,GAAG,MAAM,CAAC;oBAC5B,MAAM;gBACV,KAAK,eAAe;oBAChB,IAAI,MAAM,CAAC,2BAA2B,EAAE,CAAC;wBACrC,WAAW,CAAC,MAAM,GAAG,MAAM,CAAC,2BAA2B,CAAC;oBAC5D,CAAC;oBACD,MAAM;YACd,CAAC;YAED,mCAAmC;YACnC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAiB,CAAC;YACrE,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC;YAE3B,mEAAmE;YACnE,IAAI,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC;YACvC,IAAI,QAAQ,GAAuB,SAAS,CAAC;YAE7C,IAAI,IAAI,CAAC,sBAAsB,EAAE,IAAI,OAAO,EAAE,CAAC;gBAC3C,MAAM,SAAS,GAAG,IAAI,CAAC,0BAA0B,CAAC,OAAO,CAAC,CAAC;gBAC3D,OAAO,GAAG,SAAS,CAAC,OAAO,CAAC;gBAC5B,QAAQ,GAAG,SAAS,CAAC,QAAQ,CAAC;YAClC,CAAC;YAED,MAAM,OAAO,GAAuB,CAAC;oBACjC,OAAO,EAAE;wBACL,IAAI,EAAE,oBAAe,CAAC,SAAS;wBAC/B,OAAO,EAAE,OAAO;wBAChB,QAAQ,EAAE,QAAQ;qBACrB;oBACD,aAAa,EAAE,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ;oBAChD,KAAK,EAAE,CAAC;iBACX,CAAC,CAAC;YAEH,yCAAyC;YACzC,MAAM,KAAK,GAAG,IAAI,eAAU,CACxB,QAAQ,CAAC,iBAAiB,IAAI,CAAC,EAC/B,QAAQ,CAAC,UAAU,IAAI,CAAC,CAC3B,CAAC;YAEF,MAAM,MAAM,GAAG;gBACX,OAAO,EAAE,IAAI;gBACb,UAAU,EAAE,IAAI;gBAChB,SAAS,EAAE,SAAS;gBACpB,OAAO,EAAE,OAAO;gBAChB,WAAW,EAAE,OAAO,CAAC,OAAO,EAAE,GAAG,SAAS,CAAC,OAAO,EAAE;gBACpD,IAAI,EAAE;oBACF,OAAO,EAAE,OAAO;oBAChB,KAAK,EAAE,KAAK;iBACf;gBACD,YAAY,EAAE,EAAE;gBAChB,SAAS,EAAE,IAAI;aACJ,CAAC;YAEhB,sCAAsC;YACtC,MAAM,CAAC,4BAA4B,GAAG;gBAClC,QAAQ,EAAE,QAAQ;gBAClB,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,cAAc,EAAE,QAAQ,CAAC,cAAc;gBACvC,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,oBAAoB,EAAE,QAAQ,CAAC,oBAAoB;gBACnD,aAAa,EAAE,QAAQ,CAAC,aAAa;aACxC,CAAC;YAEF,OAAO,MAAM,CAAC;QAClB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC;YAC3B,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,wBAAwB,CAAC;YAEvF,OAAO;gBACH,OAAO,EAAE,KAAK;gBACd,UAAU,EAAE,OAAO;gBACnB,SAAS,EAAE,SAAS;gBACpB,OAAO,EAAE,OAAO;gBAChB,WAAW,EAAE,OAAO,CAAC,OAAO,EAAE,GAAG,SAAS,CAAC,OAAO,EAAE;gBACpD,IAAI,EAAE;oBACF,OAAO,EAAE,EAAE;oBACX,KAAK,EAAE,IAAI,eAAU,CAAC,CAAC,EAAE,CAAC,CAAC;iBAC9B;gBACD,YAAY,EAAE,YAAY;gBAC1B,SAAS,EAAE,KAAK;aACL,CAAC;QACpB,CAAC;IACL,CAAC;IAED;;OAEG;IACO,KAAK,CAAC,sBAAsB,CAAC,MAAkB;QACrD,kEAAkE;QAClE,IAAI,IAAI,CAAC,sBAAsB,EAAE,EAAE,CAAC;YAChC,IAAI,CAAC,6BAA6B,EAAE,CAAC;QACzC,CAAC;QAED,uCAAuC;QACvC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;YACvC,IAAI,EAAE,CAAC,CAAC,IAAuC;YAC/C,OAAO,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;gBAC/B,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;oBAClB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE,CAAC;wBAC5B,OAAO,KAAK,CAAC;oBACjB,CAAC;yBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBAC/B,OAAO,KAAK,CAAC,OAAO,CAAC;oBACzB,CAAC;yBAAM,CAAC;wBACJ,OAAO,KAAK,CAAC,OAAO,CAAC;oBACzB,CAAC;gBACL,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;gBACf,CAAC,CAAC,OAAO;SAChB,CAAC,CAAC,CAAC;QAEJ,2CAA2C;QAC3C,MAAM,WAAW,GAAgB;YAC7B,KAAK,EAAE,MAAM,CAAC,KAAK;YACnB,QAAQ,EAAE,QAAQ;YAClB,MAAM,EAAE,IAAI;YACZ,OAAO,EAAE;gBACL,WAAW,EAAE,MAAM,CAAC,WAAW;aAClC;YACD,UAAU,EAAE,IAAI,CAAC,UAAU;SAC9B,CAAC;QAEF,0BAA0B;QAC1B,IAAI,MAAM,CAAC,eAAe,IAAI,IAAI,IAAI,MAAM,CAAC,eAAe,GAAG,CAAC,EAAE,CAAC;YAC/D,WAAW,CAAC,OAAO,GAAG;gBAClB,GAAG,WAAW,CAAC,OAAO;gBACtB,WAAW,EAAE,MAAM,CAAC,eAAe;aACtC,CAAC;QACN,CAAC;QACD,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,EAAE,CAAC;YACtB,WAAW,CAAC,OAAO,GAAG;gBAClB,GAAG,WAAW,CAAC,OAAO;gBACtB,KAAK,EAAE,MAAM,CAAC,IAAI;aACrB,CAAC;QACN,CAAC;QACD,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,EAAE,CAAC;YACtB,WAAW,CAAC,OAAO,GAAG;gBAClB,GAAG,WAAW,CAAC,OAAO;gBACtB,KAAK,EAAE,MAAM,CAAC,IAAI;aACrB,CAAC;QACN,CAAC;QACD,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,EAAE,CAAC;YACtB,WAAW,CAAC,OAAO,GAAG;gBAClB,GAAG,WAAW,CAAC,OAAO;gBACtB,IAAI,EAAE,MAAM,CAAC,IAAI;aACpB,CAAC;QACN,CAAC;QACD,IAAI,MAAM,CAAC,aAAa,IAAI,IAAI,IAAI,MAAM,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAClE,WAAW,CAAC,OAAO,GAAG;gBAClB,GAAG,WAAW,CAAC,OAAO;gBACtB,IAAI,EAAE,MAAM,CAAC,aAAa;aAC7B,CAAC;QACN,CAAC;QACD,IAAI,MAAM,CAAC,gBAAgB,IAAI,IAAI,EAAE,CAAC;YAClC,WAAW,CAAC,OAAO,GAAG;gBAClB,GAAG,WAAW,CAAC,OAAO;gBACtB,iBAAiB,EAAE,MAAM,CAAC,gBAAgB;aAC7C,CAAC;QACN,CAAC;QACD,IAAI,MAAM,CAAC,eAAe,IAAI,IAAI,EAAE,CAAC;YACjC,WAAW,CAAC,OAAO,GAAG;gBAClB,GAAG,WAAW,CAAC,OAAO;gBACtB,gBAAgB,EAAE,MAAM,CAAC,eAAe;aAC3C,CAAC;QACN,CAAC;QAED,yBAAyB;QACzB,QAAQ,MAAM,CAAC,cAAc,EAAE,CAAC;YAC5B,KAAK,MAAM;gBACP,WAAW,CAAC,MAAM,GAAG,MAAM,CAAC;gBAC5B,MAAM;YACV,KAAK,eAAe;gBAChB,IAAI,MAAM,CAAC,2BAA2B,EAAE,CAAC;oBACrC,WAAW,CAAC,MAAM,GAAG,MAAM,CAAC,2BAA2B,CAAC;gBAC5D,CAAC;gBACD,MAAM;QACd,CAAC;QAED,gCAAgC;QAChC,yDAAyD;QACzD,OAAO,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,GAAG,WAAW,EAAE,MAAM,EAAE,IAAI,EAAoC,CAAC,CAAC;IAChG,CAAC;IAED;;OAEG;IACO,qBAAqB,CAAC,KAAU;QAKtC,IAAI,OAAO,GAAG,EAAE,CAAC;QACjB,IAAI,YAAY,GAAG,SAAS,CAAC;QAC7B,IAAI,KAAK,GAAG,SAAS,CAAC;QAEtB,iDAAiD;QACjD,IAAI,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE,CAAC;YACrC,IAAI,KAAK,CAAC,OAAO,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC;gBACzC,MAAM,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC;gBAEzC,4DAA4D;gBAC5D,OAAO,GAAG,IAAI,CAAC,sBAAsB,EAAE;oBACnC,CAAC,CAAC,IAAI,CAAC,8BAA8B,CAAC,UAAU,CAAC;oBACjD,CAAC,CAAC,UAAU,CAAC;YACrB,CAAC;YAED,mCAAmC;YACnC,IAAI,KAAK,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;gBACtB,YAAY,GAAG,MAAM,CAAC;gBAEtB,6CAA6C;gBAC7C,IAAI,KAAK,CAAC,iBAAiB,IAAI,KAAK,CAAC,UAAU,EAAE,CAAC;oBAC9C,KAAK,GAAG;wBACJ,YAAY,EAAE,KAAK,CAAC,iBAAiB,IAAI,CAAC;wBAC1C,gBAAgB,EAAE,KAAK,CAAC,UAAU,IAAI,CAAC;qBAC1C,CAAC;gBACN,CAAC;YACL,CAAC;QACL,CAAC;QAED,OAAO;YACH,OAAO;YACP,YAAY;YACZ,KAAK;SACR,CAAC;IACN,CAAC;IAED;;OAEG;IACO,yBAAyB,CAC/B,kBAA6C,EAC7C,SAAiC,EACjC,KAA6B;QAE7B,qDAAqD;QACrD,IAAI,YAAY,GAAG,MAAM,CAAC;QAC1B,IAAI,SAAS,EAAE,IAAI,KAAK,KAAK,EAAE,CAAC;YAC5B,YAAY,GAAG,QAAQ,CAAC;QAC5B,CAAC;QAED,6DAA6D;QAC7D,IAAI,YAAY,GAAG,CAAC,CAAC;QACrB,IAAI,gBAAgB,GAAG,CAAC,CAAC;QAEzB,IAAI,KAAK,EAAE,CAAC;YACR,YAAY,GAAG,KAAK,CAAC,YAAY,IAAI,CAAC,CAAC;YACvC,gBAAgB,GAAG,KAAK,CAAC,gBAAgB,IAAI,CAAC,CAAC;QACnD,CAAC;aAAM,IAAI,SAAS,EAAE,CAAC;YACnB,YAAY,GAAG,SAAS,CAAC,iBAAiB,IAAI,CAAC,CAAC;YAChD,gBAAgB,GAAG,SAAS,CAAC,UAAU,IAAI,CAAC,CAAC;QACjD,CAAC;QAED,kDAAkD;QAClD,MAAM,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC;QAEvB,8DAA8D;QAC9D,MAAM,MAAM,GAAG,IAAI,eAAU,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAE9C,yDAAyD;QACzD,MAAM,eAAe,GAAG,IAAI,CAAC,mBAAmB,EAAE,mBAAmB,CAAC,IAAI,EAAE,CAAC;QAE7E,qBAAqB;QACrB,MAAM,CAAC,IAAI,GAAG;YACV,OAAO,EAAE,CAAC;oBACN,OAAO,EAAE,IAAI,CAAC,oBAAoB,CAAC;wBAC/B,IAAI,EAAE,oBAAe,CAAC,SAAS;wBAC/B,OAAO,EAAE,kBAAkB,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;qBACxD,EAAE,eAAe,CAAC;oBACnB,aAAa,EAAE,YAAY;oBAC3B,KAAK,EAAE,CAAC;iBACX,CAAC;YACF,KAAK,EAAE,IAAI,eAAU,CAAC,YAAY,EAAE,gBAAgB,CAAC;SACxD,CAAC;QAEF,MAAM,CAAC,UAAU,GAAG,SAAS,CAAC;QAC9B,MAAM,CAAC,YAAY,GAAG,IAAI,CAAC;QAC3B,MAAM,CAAC,SAAS,GAAG,IAAI,CAAC;QAExB,2CAA2C;QAC3C,IAAI,SAAS,EAAE,CAAC;YACZ,MAAM,CAAC,4BAA4B,GAAG;gBAClC,QAAQ,EAAE,QAAQ;gBAClB,KAAK,EAAE,SAAS,CAAC,KAAK;gBACtB,cAAc,EAAE,SAAS,CAAC,cAAc;gBACxC,aAAa,EAAE,SAAS,CAAC,aAAa;gBACtC,oBAAoB,EAAE,SAAS,CAAC,oBAAoB;gBACpD,aAAa,EAAE,SAAS,CAAC,aAAa;aACzC,CAAC;QACN,CAAC;QAED,OAAO,MAAM,CAAC;IAClB,CAAC;IAED;;;OAGG;IACI,KAAK,CAAC,QAAQ,CAAC,MAMrB;QACG,MAAM,eAAe,GAAoB;YACrC,KAAK,EAAE,MAAM,CAAC,KAAK;YACnB,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,MAAM,EAAE,MAAM,CAAC,MAAM,IAAI,KAAK;YAC9B,OAAO,EAAE;gBACL,WAAW,EAAE,MAAM,CAAC,WAAW;aAClC;YACD,UAAU,EAAE,IAAI,CAAC,UAAU;SAC9B,CAAC;QAEF,IAAI,MAAM,CAAC,eAAe,EAAE,CAAC;YACzB,eAAe,CAAC,OAAO,GAAG;gBACtB,GAAG,eAAe,CAAC,OAAO;gBAC1B,WAAW,EAAE,MAAM,CAAC,eAAe;aACtC,CAAC;QACN,CAAC;QAED,wEAAwE;QACxE,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;YAChB,OAAO,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,GAAG,eAAe,EAAE,MAAM,EAAE,IAAI,EAAwC,CAAC,CAAC;QAClH,CAAC;aAAM,CAAC;YACJ,OAAO,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,EAAyC,CAAC,CAAC;QACpH,CAAC;IACL,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,UAAU;QACnB,OAAO,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC;IACpC,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,SAAS,CAAC,SAAiB;QACpC,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,CAAC;IAChE,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,gBAAgB,CAAC,SAAiB;QAC3C,IAAI,CAAC;YACD,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,UAAU,EAAE,CAAC;YACvC,OAAO,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,SAAS,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,SAAS,GAAG,GAAG,CAAC,CAAC,CAAC;QACtG,CAAC;QAAC,MAAM,CAAC;YACL,OAAO,KAAK,CAAC;QACjB,CAAC;IACL,CAAC;IAEM,KAAK,CAAC,aAAa,CAAC,OAAwB;QAC/C,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;IAC7F,CAAC;IAEM,KAAK,CAAC,YAAY,CAAC,OAAuB;QAC7C,MAAM,IAAI,KAAK,CAAC,wEAAwE,CAAC,CAAC;IAC9F,CAAC;CACJ,CAAA;AApfY,8BAAS;oBAAT,SAAS;IADrB,IAAA,sBAAa,EAAC,YAAO,EAAE,WAAW,CAAC;GACvB,SAAS,CAofrB;AAED,SAAgB,aAAa;IACzB,iFAAiF;AACrF,CAAC;AAFD,sCAEC"}
package/package.json ADDED
@@ -0,0 +1,26 @@
1
+ {
2
+ "name": "@memberjunction/ai-ollama",
3
+ "version": "2.85.0",
4
+ "description": "MemberJunction Wrapper for Ollama - Local Inference",
5
+ "main": "dist/index.js",
6
+ "types": "dist/index.d.ts",
7
+ "files": [
8
+ "/dist"
9
+ ],
10
+ "scripts": {
11
+ "start": "ts-node-dev src/index.ts",
12
+ "build": "tsc",
13
+ "test": "echo \"Error: no test specified\" && exit 1"
14
+ },
15
+ "author": "MemberJunction.com",
16
+ "license": "ISC",
17
+ "devDependencies": {
18
+ "ts-node-dev": "^2.0.0",
19
+ "typescript": "^5.4.5"
20
+ },
21
+ "dependencies": {
22
+ "@memberjunction/ai": "2.85.0",
23
+ "@memberjunction/global": "2.85.0",
24
+ "ollama": "^0.5.17"
25
+ }
26
+ }