@contractspec/lib.ai-providers 1.57.0 → 1.58.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/factory.js CHANGED
@@ -1,226 +1,505 @@
1
- import { DEFAULT_MODELS, getModelsForProvider } from "./models.js";
1
+ // @bun
2
+ // src/models.ts
3
+ var DEFAULT_MODELS = {
4
+ ollama: "llama3.2",
5
+ openai: "gpt-4o",
6
+ anthropic: "claude-sonnet-4-20250514",
7
+ mistral: "mistral-large-latest",
8
+ gemini: "gemini-2.0-flash"
9
+ };
10
+ var MODELS = [
11
+ {
12
+ id: "llama3.2",
13
+ name: "Llama 3.2",
14
+ provider: "ollama",
15
+ contextWindow: 128000,
16
+ capabilities: {
17
+ vision: false,
18
+ tools: true,
19
+ reasoning: false,
20
+ streaming: true
21
+ }
22
+ },
23
+ {
24
+ id: "codellama",
25
+ name: "Code Llama",
26
+ provider: "ollama",
27
+ contextWindow: 16000,
28
+ capabilities: {
29
+ vision: false,
30
+ tools: false,
31
+ reasoning: false,
32
+ streaming: true
33
+ }
34
+ },
35
+ {
36
+ id: "deepseek-coder",
37
+ name: "DeepSeek Coder",
38
+ provider: "ollama",
39
+ contextWindow: 16000,
40
+ capabilities: {
41
+ vision: false,
42
+ tools: false,
43
+ reasoning: false,
44
+ streaming: true
45
+ }
46
+ },
47
+ {
48
+ id: "mistral",
49
+ name: "Mistral 7B",
50
+ provider: "ollama",
51
+ contextWindow: 32000,
52
+ capabilities: {
53
+ vision: false,
54
+ tools: false,
55
+ reasoning: false,
56
+ streaming: true
57
+ }
58
+ },
59
+ {
60
+ id: "gpt-4o",
61
+ name: "GPT-4o",
62
+ provider: "openai",
63
+ contextWindow: 128000,
64
+ capabilities: {
65
+ vision: true,
66
+ tools: true,
67
+ reasoning: false,
68
+ streaming: true
69
+ },
70
+ costPerMillion: { input: 2.5, output: 10 }
71
+ },
72
+ {
73
+ id: "gpt-4o-mini",
74
+ name: "GPT-4o Mini",
75
+ provider: "openai",
76
+ contextWindow: 128000,
77
+ capabilities: {
78
+ vision: true,
79
+ tools: true,
80
+ reasoning: false,
81
+ streaming: true
82
+ },
83
+ costPerMillion: { input: 0.15, output: 0.6 }
84
+ },
85
+ {
86
+ id: "o1",
87
+ name: "o1",
88
+ provider: "openai",
89
+ contextWindow: 200000,
90
+ capabilities: {
91
+ vision: true,
92
+ tools: true,
93
+ reasoning: true,
94
+ streaming: true
95
+ },
96
+ costPerMillion: { input: 15, output: 60 }
97
+ },
98
+ {
99
+ id: "o1-mini",
100
+ name: "o1 Mini",
101
+ provider: "openai",
102
+ contextWindow: 128000,
103
+ capabilities: {
104
+ vision: false,
105
+ tools: true,
106
+ reasoning: true,
107
+ streaming: true
108
+ },
109
+ costPerMillion: { input: 3, output: 12 }
110
+ },
111
+ {
112
+ id: "claude-sonnet-4-20250514",
113
+ name: "Claude Sonnet 4",
114
+ provider: "anthropic",
115
+ contextWindow: 200000,
116
+ capabilities: {
117
+ vision: true,
118
+ tools: true,
119
+ reasoning: true,
120
+ streaming: true
121
+ },
122
+ costPerMillion: { input: 3, output: 15 }
123
+ },
124
+ {
125
+ id: "claude-3-5-sonnet-20241022",
126
+ name: "Claude 3.5 Sonnet",
127
+ provider: "anthropic",
128
+ contextWindow: 200000,
129
+ capabilities: {
130
+ vision: true,
131
+ tools: true,
132
+ reasoning: false,
133
+ streaming: true
134
+ },
135
+ costPerMillion: { input: 3, output: 15 }
136
+ },
137
+ {
138
+ id: "claude-3-5-haiku-20241022",
139
+ name: "Claude 3.5 Haiku",
140
+ provider: "anthropic",
141
+ contextWindow: 200000,
142
+ capabilities: {
143
+ vision: true,
144
+ tools: true,
145
+ reasoning: false,
146
+ streaming: true
147
+ },
148
+ costPerMillion: { input: 0.8, output: 4 }
149
+ },
150
+ {
151
+ id: "mistral-large-latest",
152
+ name: "Mistral Large",
153
+ provider: "mistral",
154
+ contextWindow: 128000,
155
+ capabilities: {
156
+ vision: false,
157
+ tools: true,
158
+ reasoning: false,
159
+ streaming: true
160
+ },
161
+ costPerMillion: { input: 2, output: 6 }
162
+ },
163
+ {
164
+ id: "codestral-latest",
165
+ name: "Codestral",
166
+ provider: "mistral",
167
+ contextWindow: 32000,
168
+ capabilities: {
169
+ vision: false,
170
+ tools: true,
171
+ reasoning: false,
172
+ streaming: true
173
+ },
174
+ costPerMillion: { input: 0.2, output: 0.6 }
175
+ },
176
+ {
177
+ id: "mistral-small-latest",
178
+ name: "Mistral Small",
179
+ provider: "mistral",
180
+ contextWindow: 32000,
181
+ capabilities: {
182
+ vision: false,
183
+ tools: true,
184
+ reasoning: false,
185
+ streaming: true
186
+ },
187
+ costPerMillion: { input: 0.2, output: 0.6 }
188
+ },
189
+ {
190
+ id: "gemini-2.0-flash",
191
+ name: "Gemini 2.0 Flash",
192
+ provider: "gemini",
193
+ contextWindow: 1e6,
194
+ capabilities: {
195
+ vision: true,
196
+ tools: true,
197
+ reasoning: false,
198
+ streaming: true
199
+ },
200
+ costPerMillion: { input: 0.075, output: 0.3 }
201
+ },
202
+ {
203
+ id: "gemini-2.5-pro-preview-06-05",
204
+ name: "Gemini 2.5 Pro",
205
+ provider: "gemini",
206
+ contextWindow: 1e6,
207
+ capabilities: {
208
+ vision: true,
209
+ tools: true,
210
+ reasoning: true,
211
+ streaming: true
212
+ },
213
+ costPerMillion: { input: 1.25, output: 10 }
214
+ },
215
+ {
216
+ id: "gemini-2.5-flash-preview-05-20",
217
+ name: "Gemini 2.5 Flash",
218
+ provider: "gemini",
219
+ contextWindow: 1e6,
220
+ capabilities: {
221
+ vision: true,
222
+ tools: true,
223
+ reasoning: true,
224
+ streaming: true
225
+ },
226
+ costPerMillion: { input: 0.15, output: 0.6 }
227
+ }
228
+ ];
229
+ function getModelsForProvider(provider) {
230
+ return MODELS.filter((m) => m.provider === provider);
231
+ }
232
+ function getModelInfo(modelId) {
233
+ return MODELS.find((m) => m.id === modelId);
234
+ }
235
+ function getRecommendedModels(provider) {
236
+ const normalizedProvider = provider === "claude" ? "anthropic" : provider === "custom" ? "openai" : provider;
237
+ return getModelsForProvider(normalizedProvider).map((m) => m.id);
238
+ }
239
+ function getDefaultModel(provider) {
240
+ return DEFAULT_MODELS[provider];
241
+ }
242
+
243
+ // src/factory.ts
2
244
  import { anthropic } from "@ai-sdk/anthropic";
3
245
  import { google } from "@ai-sdk/google";
4
246
  import { mistral } from "@ai-sdk/mistral";
5
247
  import { openai } from "@ai-sdk/openai";
6
248
  import { ollama } from "ollama-ai-provider";
7
-
8
- //#region src/factory.ts
9
- /**
10
- * Base provider implementation
11
- */
12
- var BaseProvider = class {
13
- name;
14
- model;
15
- mode;
16
- config;
17
- cachedModel = null;
18
- constructor(config) {
19
- this.name = config.provider;
20
- this.model = config.model ?? DEFAULT_MODELS[config.provider];
21
- this.mode = this.determineMode(config);
22
- this.config = config;
23
- }
24
- getModel() {
25
- if (!this.cachedModel) this.cachedModel = this.createModel();
26
- return this.cachedModel;
27
- }
28
- async listModels() {
29
- if (this.name === "ollama") return this.listOllamaModels();
30
- return getModelsForProvider(this.name);
31
- }
32
- async validate() {
33
- if (this.name === "ollama") return this.validateOllama();
34
- if (this.mode === "byok" && !this.config.apiKey) return {
35
- valid: false,
36
- error: `API key required for ${this.name}`
37
- };
38
- if (this.mode === "managed" && !this.config.proxyUrl && !this.config.organizationId) return {
39
- valid: false,
40
- error: "Managed mode requires proxyUrl or organizationId"
41
- };
42
- return { valid: true };
43
- }
44
- determineMode(config) {
45
- if (config.provider === "ollama") return "local";
46
- if (config.apiKey) return "byok";
47
- return "managed";
48
- }
49
- createModel() {
50
- const { baseUrl, proxyUrl } = this.config;
51
- switch (this.name) {
52
- case "ollama": {
53
- const originalBaseUrl = process.env.OLLAMA_BASE_URL;
54
- if (baseUrl && baseUrl !== "http://localhost:11434") process.env.OLLAMA_BASE_URL = baseUrl;
55
- const ollamaModel = ollama(this.model);
56
- if (originalBaseUrl !== void 0) process.env.OLLAMA_BASE_URL = originalBaseUrl;
57
- else if (baseUrl && baseUrl !== "http://localhost:11434") delete process.env.OLLAMA_BASE_URL;
58
- return ollamaModel;
59
- }
60
- case "openai":
61
- if (this.mode === "managed") {
62
- const originalBaseUrl = process.env.OPENAI_BASE_URL;
63
- if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
64
- const model = openai(this.model);
65
- if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
66
- else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
67
- return model;
68
- }
69
- return openai(this.model);
70
- case "anthropic":
71
- if (this.mode === "managed") {
72
- const originalBaseUrl = process.env.OPENAI_BASE_URL;
73
- if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
74
- const model = openai(this.model);
75
- if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
76
- else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
77
- return model;
78
- }
79
- return anthropic(this.model);
80
- case "mistral":
81
- if (this.mode === "managed") {
82
- const originalBaseUrl = process.env.OPENAI_BASE_URL;
83
- if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
84
- const model = openai(this.model);
85
- if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
86
- else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
87
- return model;
88
- }
89
- return mistral(this.model);
90
- case "gemini":
91
- if (this.mode === "managed") {
92
- const originalBaseUrl = process.env.OPENAI_BASE_URL;
93
- if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
94
- const model = openai(this.model);
95
- if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
96
- else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
97
- return model;
98
- }
99
- return google(this.model);
100
- default: throw new Error(`Unknown provider: ${this.name}`);
101
- }
102
- }
103
- async listOllamaModels() {
104
- try {
105
- const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
106
- const response = await fetch(`${baseUrl}/api/tags`);
107
- if (!response.ok) return getModelsForProvider("ollama");
108
- return ((await response.json()).models ?? []).map((m) => ({
109
- id: m.name,
110
- name: m.name,
111
- provider: "ollama",
112
- contextWindow: 8e3,
113
- capabilities: {
114
- vision: false,
115
- tools: false,
116
- reasoning: false,
117
- streaming: true
118
- }
119
- }));
120
- } catch {
121
- return getModelsForProvider("ollama");
122
- }
123
- }
124
- async validateOllama() {
125
- try {
126
- const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
127
- const response = await fetch(`${baseUrl}/api/tags`);
128
- if (!response.ok) return {
129
- valid: false,
130
- error: `Ollama server returned ${response.status}`
131
- };
132
- const models = (await response.json()).models ?? [];
133
- if (!models.some((m) => m.name === this.model)) return {
134
- valid: false,
135
- error: `Model "${this.model}" not found. Available: ${models.map((m) => m.name).join(", ")}`
136
- };
137
- return { valid: true };
138
- } catch (error) {
139
- return {
140
- valid: false,
141
- error: `Cannot connect to Ollama at ${this.config.baseUrl ?? "http://localhost:11434"}: ${error instanceof Error ? error.message : String(error)}`
142
- };
143
- }
144
- }
145
- };
146
- /**
147
- * Create a provider from configuration
148
- */
249
+ class BaseProvider {
250
+ name;
251
+ model;
252
+ mode;
253
+ config;
254
+ cachedModel = null;
255
+ constructor(config) {
256
+ this.name = config.provider;
257
+ this.model = config.model ?? DEFAULT_MODELS[config.provider];
258
+ this.mode = this.determineMode(config);
259
+ this.config = config;
260
+ }
261
+ getModel() {
262
+ if (!this.cachedModel) {
263
+ this.cachedModel = this.createModel();
264
+ }
265
+ return this.cachedModel;
266
+ }
267
+ async listModels() {
268
+ if (this.name === "ollama") {
269
+ return this.listOllamaModels();
270
+ }
271
+ return getModelsForProvider(this.name);
272
+ }
273
+ async validate() {
274
+ if (this.name === "ollama") {
275
+ return this.validateOllama();
276
+ }
277
+ if (this.mode === "byok" && !this.config.apiKey) {
278
+ return {
279
+ valid: false,
280
+ error: `API key required for ${this.name}`
281
+ };
282
+ }
283
+ if (this.mode === "managed" && !this.config.proxyUrl && !this.config.organizationId) {
284
+ return {
285
+ valid: false,
286
+ error: "Managed mode requires proxyUrl or organizationId"
287
+ };
288
+ }
289
+ return { valid: true };
290
+ }
291
+ determineMode(config) {
292
+ if (config.provider === "ollama")
293
+ return "local";
294
+ if (config.apiKey)
295
+ return "byok";
296
+ return "managed";
297
+ }
298
+ createModel() {
299
+ const { baseUrl, proxyUrl } = this.config;
300
+ switch (this.name) {
301
+ case "ollama": {
302
+ const originalBaseUrl = process.env.OLLAMA_BASE_URL;
303
+ if (baseUrl && baseUrl !== "http://localhost:11434") {
304
+ process.env.OLLAMA_BASE_URL = baseUrl;
305
+ }
306
+ const ollamaModel = ollama(this.model);
307
+ if (originalBaseUrl !== undefined) {
308
+ process.env.OLLAMA_BASE_URL = originalBaseUrl;
309
+ } else if (baseUrl && baseUrl !== "http://localhost:11434") {
310
+ delete process.env.OLLAMA_BASE_URL;
311
+ }
312
+ return ollamaModel;
313
+ }
314
+ case "openai":
315
+ if (this.mode === "managed") {
316
+ const originalBaseUrl = process.env.OPENAI_BASE_URL;
317
+ if (proxyUrl) {
318
+ process.env.OPENAI_BASE_URL = proxyUrl;
319
+ }
320
+ const model = openai(this.model);
321
+ if (originalBaseUrl !== undefined) {
322
+ process.env.OPENAI_BASE_URL = originalBaseUrl;
323
+ } else if (proxyUrl) {
324
+ delete process.env.OPENAI_BASE_URL;
325
+ }
326
+ return model;
327
+ }
328
+ return openai(this.model);
329
+ case "anthropic":
330
+ if (this.mode === "managed") {
331
+ const originalBaseUrl = process.env.OPENAI_BASE_URL;
332
+ if (proxyUrl) {
333
+ process.env.OPENAI_BASE_URL = proxyUrl;
334
+ }
335
+ const model = openai(this.model);
336
+ if (originalBaseUrl !== undefined) {
337
+ process.env.OPENAI_BASE_URL = originalBaseUrl;
338
+ } else if (proxyUrl) {
339
+ delete process.env.OPENAI_BASE_URL;
340
+ }
341
+ return model;
342
+ }
343
+ return anthropic(this.model);
344
+ case "mistral":
345
+ if (this.mode === "managed") {
346
+ const originalBaseUrl = process.env.OPENAI_BASE_URL;
347
+ if (proxyUrl) {
348
+ process.env.OPENAI_BASE_URL = proxyUrl;
349
+ }
350
+ const model = openai(this.model);
351
+ if (originalBaseUrl !== undefined) {
352
+ process.env.OPENAI_BASE_URL = originalBaseUrl;
353
+ } else if (proxyUrl) {
354
+ delete process.env.OPENAI_BASE_URL;
355
+ }
356
+ return model;
357
+ }
358
+ return mistral(this.model);
359
+ case "gemini":
360
+ if (this.mode === "managed") {
361
+ const originalBaseUrl = process.env.OPENAI_BASE_URL;
362
+ if (proxyUrl) {
363
+ process.env.OPENAI_BASE_URL = proxyUrl;
364
+ }
365
+ const model = openai(this.model);
366
+ if (originalBaseUrl !== undefined) {
367
+ process.env.OPENAI_BASE_URL = originalBaseUrl;
368
+ } else if (proxyUrl) {
369
+ delete process.env.OPENAI_BASE_URL;
370
+ }
371
+ return model;
372
+ }
373
+ return google(this.model);
374
+ default:
375
+ throw new Error(`Unknown provider: ${this.name}`);
376
+ }
377
+ }
378
+ async listOllamaModels() {
379
+ try {
380
+ const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
381
+ const response = await fetch(`${baseUrl}/api/tags`);
382
+ if (!response.ok) {
383
+ return getModelsForProvider("ollama");
384
+ }
385
+ const data = await response.json();
386
+ const models = data.models ?? [];
387
+ return models.map((m) => ({
388
+ id: m.name,
389
+ name: m.name,
390
+ provider: "ollama",
391
+ contextWindow: 8000,
392
+ capabilities: {
393
+ vision: false,
394
+ tools: false,
395
+ reasoning: false,
396
+ streaming: true
397
+ }
398
+ }));
399
+ } catch {
400
+ return getModelsForProvider("ollama");
401
+ }
402
+ }
403
+ async validateOllama() {
404
+ try {
405
+ const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
406
+ const response = await fetch(`${baseUrl}/api/tags`);
407
+ if (!response.ok) {
408
+ return {
409
+ valid: false,
410
+ error: `Ollama server returned ${response.status}`
411
+ };
412
+ }
413
+ const data = await response.json();
414
+ const models = data.models ?? [];
415
+ const hasModel = models.some((m) => m.name === this.model);
416
+ if (!hasModel) {
417
+ return {
418
+ valid: false,
419
+ error: `Model "${this.model}" not found. Available: ${models.map((m) => m.name).join(", ")}`
420
+ };
421
+ }
422
+ return { valid: true };
423
+ } catch (error) {
424
+ const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
425
+ return {
426
+ valid: false,
427
+ error: `Cannot connect to Ollama at ${baseUrl}: ${error instanceof Error ? error.message : String(error)}`
428
+ };
429
+ }
430
+ }
431
+ }
149
432
  function createProvider(config) {
150
- return new BaseProvider(config);
433
+ return new BaseProvider(config);
151
434
  }
152
- /**
153
- * Create a provider from environment variables
154
- */
155
435
  function createProviderFromEnv() {
156
- const provider = process.env.CONTRACTSPEC_AI_PROVIDER ?? "openai";
157
- const model = process.env.CONTRACTSPEC_AI_MODEL;
158
- let apiKey;
159
- switch (provider) {
160
- case "openai":
161
- apiKey = process.env.OPENAI_API_KEY;
162
- break;
163
- case "anthropic":
164
- apiKey = process.env.ANTHROPIC_API_KEY;
165
- break;
166
- case "mistral":
167
- apiKey = process.env.MISTRAL_API_KEY;
168
- break;
169
- case "gemini":
170
- apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
171
- break;
172
- case "ollama": break;
173
- }
174
- return createProvider({
175
- provider,
176
- model,
177
- apiKey,
178
- baseUrl: process.env.OLLAMA_BASE_URL,
179
- proxyUrl: process.env.CONTRACTSPEC_AI_PROXY_URL,
180
- organizationId: process.env.CONTRACTSPEC_ORG_ID
181
- });
436
+ const provider = process.env.CONTRACTSPEC_AI_PROVIDER ?? "openai";
437
+ const model = process.env.CONTRACTSPEC_AI_MODEL;
438
+ let apiKey;
439
+ switch (provider) {
440
+ case "openai":
441
+ apiKey = process.env.OPENAI_API_KEY;
442
+ break;
443
+ case "anthropic":
444
+ apiKey = process.env.ANTHROPIC_API_KEY;
445
+ break;
446
+ case "mistral":
447
+ apiKey = process.env.MISTRAL_API_KEY;
448
+ break;
449
+ case "gemini":
450
+ apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
451
+ break;
452
+ case "ollama":
453
+ break;
454
+ }
455
+ return createProvider({
456
+ provider,
457
+ model,
458
+ apiKey,
459
+ baseUrl: process.env.OLLAMA_BASE_URL,
460
+ proxyUrl: process.env.CONTRACTSPEC_AI_PROXY_URL,
461
+ organizationId: process.env.CONTRACTSPEC_ORG_ID
462
+ });
182
463
  }
183
- /**
184
- * Get all available providers with their status
185
- */
186
464
  function getAvailableProviders() {
187
- const providers = [];
188
- providers.push({
189
- provider: "ollama",
190
- available: true,
191
- mode: "local"
192
- });
193
- const openaiKey = process.env.OPENAI_API_KEY;
194
- providers.push({
195
- provider: "openai",
196
- available: Boolean(openaiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
197
- mode: openaiKey ? "byok" : "managed",
198
- reason: !openaiKey ? "Set OPENAI_API_KEY for BYOK mode" : void 0
199
- });
200
- const anthropicKey = process.env.ANTHROPIC_API_KEY;
201
- providers.push({
202
- provider: "anthropic",
203
- available: Boolean(anthropicKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
204
- mode: anthropicKey ? "byok" : "managed",
205
- reason: !anthropicKey ? "Set ANTHROPIC_API_KEY for BYOK mode" : void 0
206
- });
207
- const mistralKey = process.env.MISTRAL_API_KEY;
208
- providers.push({
209
- provider: "mistral",
210
- available: Boolean(mistralKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
211
- mode: mistralKey ? "byok" : "managed",
212
- reason: !mistralKey ? "Set MISTRAL_API_KEY for BYOK mode" : void 0
213
- });
214
- const geminiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
215
- providers.push({
216
- provider: "gemini",
217
- available: Boolean(geminiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
218
- mode: geminiKey ? "byok" : "managed",
219
- reason: !geminiKey ? "Set GOOGLE_API_KEY for BYOK mode" : void 0
220
- });
221
- return providers;
465
+ const providers = [];
466
+ providers.push({
467
+ provider: "ollama",
468
+ available: true,
469
+ mode: "local"
470
+ });
471
+ const openaiKey = process.env.OPENAI_API_KEY;
472
+ providers.push({
473
+ provider: "openai",
474
+ available: Boolean(openaiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
475
+ mode: openaiKey ? "byok" : "managed",
476
+ reason: !openaiKey ? "Set OPENAI_API_KEY for BYOK mode" : undefined
477
+ });
478
+ const anthropicKey = process.env.ANTHROPIC_API_KEY;
479
+ providers.push({
480
+ provider: "anthropic",
481
+ available: Boolean(anthropicKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
482
+ mode: anthropicKey ? "byok" : "managed",
483
+ reason: !anthropicKey ? "Set ANTHROPIC_API_KEY for BYOK mode" : undefined
484
+ });
485
+ const mistralKey = process.env.MISTRAL_API_KEY;
486
+ providers.push({
487
+ provider: "mistral",
488
+ available: Boolean(mistralKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
489
+ mode: mistralKey ? "byok" : "managed",
490
+ reason: !mistralKey ? "Set MISTRAL_API_KEY for BYOK mode" : undefined
491
+ });
492
+ const geminiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
493
+ providers.push({
494
+ provider: "gemini",
495
+ available: Boolean(geminiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
496
+ mode: geminiKey ? "byok" : "managed",
497
+ reason: !geminiKey ? "Set GOOGLE_API_KEY for BYOK mode" : undefined
498
+ });
499
+ return providers;
222
500
  }
223
-
224
- //#endregion
225
- export { createProvider, createProviderFromEnv, getAvailableProviders };
226
- //# sourceMappingURL=factory.js.map
501
+ export {
502
+ getAvailableProviders,
503
+ createProviderFromEnv,
504
+ createProvider
505
+ };