@contractspec/lib.ai-providers 1.57.0 → 1.58.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser/factory.js +504 -0
- package/dist/browser/index.js +641 -0
- package/dist/browser/legacy.js +572 -0
- package/dist/browser/models.js +248 -0
- package/dist/browser/types.js +0 -0
- package/dist/browser/validation.js +563 -0
- package/dist/factory.d.ts +4 -8
- package/dist/factory.d.ts.map +1 -1
- package/dist/factory.js +494 -215
- package/dist/index.d.ts +11 -6
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +641 -5
- package/dist/legacy.d.ts +19 -11
- package/dist/legacy.d.ts.map +1 -1
- package/dist/legacy.js +566 -66
- package/dist/models.d.ts +10 -11
- package/dist/models.d.ts.map +1 -1
- package/dist/models.js +239 -290
- package/dist/node/factory.js +504 -0
- package/dist/node/index.js +641 -0
- package/dist/node/legacy.js +572 -0
- package/dist/node/models.js +248 -0
- package/dist/node/types.js +0 -0
- package/dist/node/validation.js +563 -0
- package/dist/types.d.ts +76 -77
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js +1 -0
- package/dist/validation.d.ts +13 -14
- package/dist/validation.d.ts.map +1 -1
- package/dist/validation.js +556 -53
- package/package.json +59 -24
- package/dist/factory.js.map +0 -1
- package/dist/legacy.js.map +0 -1
- package/dist/models.js.map +0 -1
- package/dist/validation.js.map +0 -1
|
@@ -0,0 +1,504 @@
|
|
|
1
|
+
// src/models.ts
|
|
2
|
+
var DEFAULT_MODELS = {
|
|
3
|
+
ollama: "llama3.2",
|
|
4
|
+
openai: "gpt-4o",
|
|
5
|
+
anthropic: "claude-sonnet-4-20250514",
|
|
6
|
+
mistral: "mistral-large-latest",
|
|
7
|
+
gemini: "gemini-2.0-flash"
|
|
8
|
+
};
|
|
9
|
+
var MODELS = [
|
|
10
|
+
{
|
|
11
|
+
id: "llama3.2",
|
|
12
|
+
name: "Llama 3.2",
|
|
13
|
+
provider: "ollama",
|
|
14
|
+
contextWindow: 128000,
|
|
15
|
+
capabilities: {
|
|
16
|
+
vision: false,
|
|
17
|
+
tools: true,
|
|
18
|
+
reasoning: false,
|
|
19
|
+
streaming: true
|
|
20
|
+
}
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
id: "codellama",
|
|
24
|
+
name: "Code Llama",
|
|
25
|
+
provider: "ollama",
|
|
26
|
+
contextWindow: 16000,
|
|
27
|
+
capabilities: {
|
|
28
|
+
vision: false,
|
|
29
|
+
tools: false,
|
|
30
|
+
reasoning: false,
|
|
31
|
+
streaming: true
|
|
32
|
+
}
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
id: "deepseek-coder",
|
|
36
|
+
name: "DeepSeek Coder",
|
|
37
|
+
provider: "ollama",
|
|
38
|
+
contextWindow: 16000,
|
|
39
|
+
capabilities: {
|
|
40
|
+
vision: false,
|
|
41
|
+
tools: false,
|
|
42
|
+
reasoning: false,
|
|
43
|
+
streaming: true
|
|
44
|
+
}
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
id: "mistral",
|
|
48
|
+
name: "Mistral 7B",
|
|
49
|
+
provider: "ollama",
|
|
50
|
+
contextWindow: 32000,
|
|
51
|
+
capabilities: {
|
|
52
|
+
vision: false,
|
|
53
|
+
tools: false,
|
|
54
|
+
reasoning: false,
|
|
55
|
+
streaming: true
|
|
56
|
+
}
|
|
57
|
+
},
|
|
58
|
+
{
|
|
59
|
+
id: "gpt-4o",
|
|
60
|
+
name: "GPT-4o",
|
|
61
|
+
provider: "openai",
|
|
62
|
+
contextWindow: 128000,
|
|
63
|
+
capabilities: {
|
|
64
|
+
vision: true,
|
|
65
|
+
tools: true,
|
|
66
|
+
reasoning: false,
|
|
67
|
+
streaming: true
|
|
68
|
+
},
|
|
69
|
+
costPerMillion: { input: 2.5, output: 10 }
|
|
70
|
+
},
|
|
71
|
+
{
|
|
72
|
+
id: "gpt-4o-mini",
|
|
73
|
+
name: "GPT-4o Mini",
|
|
74
|
+
provider: "openai",
|
|
75
|
+
contextWindow: 128000,
|
|
76
|
+
capabilities: {
|
|
77
|
+
vision: true,
|
|
78
|
+
tools: true,
|
|
79
|
+
reasoning: false,
|
|
80
|
+
streaming: true
|
|
81
|
+
},
|
|
82
|
+
costPerMillion: { input: 0.15, output: 0.6 }
|
|
83
|
+
},
|
|
84
|
+
{
|
|
85
|
+
id: "o1",
|
|
86
|
+
name: "o1",
|
|
87
|
+
provider: "openai",
|
|
88
|
+
contextWindow: 200000,
|
|
89
|
+
capabilities: {
|
|
90
|
+
vision: true,
|
|
91
|
+
tools: true,
|
|
92
|
+
reasoning: true,
|
|
93
|
+
streaming: true
|
|
94
|
+
},
|
|
95
|
+
costPerMillion: { input: 15, output: 60 }
|
|
96
|
+
},
|
|
97
|
+
{
|
|
98
|
+
id: "o1-mini",
|
|
99
|
+
name: "o1 Mini",
|
|
100
|
+
provider: "openai",
|
|
101
|
+
contextWindow: 128000,
|
|
102
|
+
capabilities: {
|
|
103
|
+
vision: false,
|
|
104
|
+
tools: true,
|
|
105
|
+
reasoning: true,
|
|
106
|
+
streaming: true
|
|
107
|
+
},
|
|
108
|
+
costPerMillion: { input: 3, output: 12 }
|
|
109
|
+
},
|
|
110
|
+
{
|
|
111
|
+
id: "claude-sonnet-4-20250514",
|
|
112
|
+
name: "Claude Sonnet 4",
|
|
113
|
+
provider: "anthropic",
|
|
114
|
+
contextWindow: 200000,
|
|
115
|
+
capabilities: {
|
|
116
|
+
vision: true,
|
|
117
|
+
tools: true,
|
|
118
|
+
reasoning: true,
|
|
119
|
+
streaming: true
|
|
120
|
+
},
|
|
121
|
+
costPerMillion: { input: 3, output: 15 }
|
|
122
|
+
},
|
|
123
|
+
{
|
|
124
|
+
id: "claude-3-5-sonnet-20241022",
|
|
125
|
+
name: "Claude 3.5 Sonnet",
|
|
126
|
+
provider: "anthropic",
|
|
127
|
+
contextWindow: 200000,
|
|
128
|
+
capabilities: {
|
|
129
|
+
vision: true,
|
|
130
|
+
tools: true,
|
|
131
|
+
reasoning: false,
|
|
132
|
+
streaming: true
|
|
133
|
+
},
|
|
134
|
+
costPerMillion: { input: 3, output: 15 }
|
|
135
|
+
},
|
|
136
|
+
{
|
|
137
|
+
id: "claude-3-5-haiku-20241022",
|
|
138
|
+
name: "Claude 3.5 Haiku",
|
|
139
|
+
provider: "anthropic",
|
|
140
|
+
contextWindow: 200000,
|
|
141
|
+
capabilities: {
|
|
142
|
+
vision: true,
|
|
143
|
+
tools: true,
|
|
144
|
+
reasoning: false,
|
|
145
|
+
streaming: true
|
|
146
|
+
},
|
|
147
|
+
costPerMillion: { input: 0.8, output: 4 }
|
|
148
|
+
},
|
|
149
|
+
{
|
|
150
|
+
id: "mistral-large-latest",
|
|
151
|
+
name: "Mistral Large",
|
|
152
|
+
provider: "mistral",
|
|
153
|
+
contextWindow: 128000,
|
|
154
|
+
capabilities: {
|
|
155
|
+
vision: false,
|
|
156
|
+
tools: true,
|
|
157
|
+
reasoning: false,
|
|
158
|
+
streaming: true
|
|
159
|
+
},
|
|
160
|
+
costPerMillion: { input: 2, output: 6 }
|
|
161
|
+
},
|
|
162
|
+
{
|
|
163
|
+
id: "codestral-latest",
|
|
164
|
+
name: "Codestral",
|
|
165
|
+
provider: "mistral",
|
|
166
|
+
contextWindow: 32000,
|
|
167
|
+
capabilities: {
|
|
168
|
+
vision: false,
|
|
169
|
+
tools: true,
|
|
170
|
+
reasoning: false,
|
|
171
|
+
streaming: true
|
|
172
|
+
},
|
|
173
|
+
costPerMillion: { input: 0.2, output: 0.6 }
|
|
174
|
+
},
|
|
175
|
+
{
|
|
176
|
+
id: "mistral-small-latest",
|
|
177
|
+
name: "Mistral Small",
|
|
178
|
+
provider: "mistral",
|
|
179
|
+
contextWindow: 32000,
|
|
180
|
+
capabilities: {
|
|
181
|
+
vision: false,
|
|
182
|
+
tools: true,
|
|
183
|
+
reasoning: false,
|
|
184
|
+
streaming: true
|
|
185
|
+
},
|
|
186
|
+
costPerMillion: { input: 0.2, output: 0.6 }
|
|
187
|
+
},
|
|
188
|
+
{
|
|
189
|
+
id: "gemini-2.0-flash",
|
|
190
|
+
name: "Gemini 2.0 Flash",
|
|
191
|
+
provider: "gemini",
|
|
192
|
+
contextWindow: 1e6,
|
|
193
|
+
capabilities: {
|
|
194
|
+
vision: true,
|
|
195
|
+
tools: true,
|
|
196
|
+
reasoning: false,
|
|
197
|
+
streaming: true
|
|
198
|
+
},
|
|
199
|
+
costPerMillion: { input: 0.075, output: 0.3 }
|
|
200
|
+
},
|
|
201
|
+
{
|
|
202
|
+
id: "gemini-2.5-pro-preview-06-05",
|
|
203
|
+
name: "Gemini 2.5 Pro",
|
|
204
|
+
provider: "gemini",
|
|
205
|
+
contextWindow: 1e6,
|
|
206
|
+
capabilities: {
|
|
207
|
+
vision: true,
|
|
208
|
+
tools: true,
|
|
209
|
+
reasoning: true,
|
|
210
|
+
streaming: true
|
|
211
|
+
},
|
|
212
|
+
costPerMillion: { input: 1.25, output: 10 }
|
|
213
|
+
},
|
|
214
|
+
{
|
|
215
|
+
id: "gemini-2.5-flash-preview-05-20",
|
|
216
|
+
name: "Gemini 2.5 Flash",
|
|
217
|
+
provider: "gemini",
|
|
218
|
+
contextWindow: 1e6,
|
|
219
|
+
capabilities: {
|
|
220
|
+
vision: true,
|
|
221
|
+
tools: true,
|
|
222
|
+
reasoning: true,
|
|
223
|
+
streaming: true
|
|
224
|
+
},
|
|
225
|
+
costPerMillion: { input: 0.15, output: 0.6 }
|
|
226
|
+
}
|
|
227
|
+
];
|
|
228
|
+
function getModelsForProvider(provider) {
|
|
229
|
+
return MODELS.filter((m) => m.provider === provider);
|
|
230
|
+
}
|
|
231
|
+
function getModelInfo(modelId) {
|
|
232
|
+
return MODELS.find((m) => m.id === modelId);
|
|
233
|
+
}
|
|
234
|
+
function getRecommendedModels(provider) {
|
|
235
|
+
const normalizedProvider = provider === "claude" ? "anthropic" : provider === "custom" ? "openai" : provider;
|
|
236
|
+
return getModelsForProvider(normalizedProvider).map((m) => m.id);
|
|
237
|
+
}
|
|
238
|
+
function getDefaultModel(provider) {
|
|
239
|
+
return DEFAULT_MODELS[provider];
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
// src/factory.ts
|
|
243
|
+
import { anthropic } from "@ai-sdk/anthropic";
|
|
244
|
+
import { google } from "@ai-sdk/google";
|
|
245
|
+
import { mistral } from "@ai-sdk/mistral";
|
|
246
|
+
import { openai } from "@ai-sdk/openai";
|
|
247
|
+
import { ollama } from "ollama-ai-provider";
|
|
248
|
+
class BaseProvider {
|
|
249
|
+
name;
|
|
250
|
+
model;
|
|
251
|
+
mode;
|
|
252
|
+
config;
|
|
253
|
+
cachedModel = null;
|
|
254
|
+
constructor(config) {
|
|
255
|
+
this.name = config.provider;
|
|
256
|
+
this.model = config.model ?? DEFAULT_MODELS[config.provider];
|
|
257
|
+
this.mode = this.determineMode(config);
|
|
258
|
+
this.config = config;
|
|
259
|
+
}
|
|
260
|
+
getModel() {
|
|
261
|
+
if (!this.cachedModel) {
|
|
262
|
+
this.cachedModel = this.createModel();
|
|
263
|
+
}
|
|
264
|
+
return this.cachedModel;
|
|
265
|
+
}
|
|
266
|
+
async listModels() {
|
|
267
|
+
if (this.name === "ollama") {
|
|
268
|
+
return this.listOllamaModels();
|
|
269
|
+
}
|
|
270
|
+
return getModelsForProvider(this.name);
|
|
271
|
+
}
|
|
272
|
+
async validate() {
|
|
273
|
+
if (this.name === "ollama") {
|
|
274
|
+
return this.validateOllama();
|
|
275
|
+
}
|
|
276
|
+
if (this.mode === "byok" && !this.config.apiKey) {
|
|
277
|
+
return {
|
|
278
|
+
valid: false,
|
|
279
|
+
error: `API key required for ${this.name}`
|
|
280
|
+
};
|
|
281
|
+
}
|
|
282
|
+
if (this.mode === "managed" && !this.config.proxyUrl && !this.config.organizationId) {
|
|
283
|
+
return {
|
|
284
|
+
valid: false,
|
|
285
|
+
error: "Managed mode requires proxyUrl or organizationId"
|
|
286
|
+
};
|
|
287
|
+
}
|
|
288
|
+
return { valid: true };
|
|
289
|
+
}
|
|
290
|
+
determineMode(config) {
|
|
291
|
+
if (config.provider === "ollama")
|
|
292
|
+
return "local";
|
|
293
|
+
if (config.apiKey)
|
|
294
|
+
return "byok";
|
|
295
|
+
return "managed";
|
|
296
|
+
}
|
|
297
|
+
createModel() {
|
|
298
|
+
const { baseUrl, proxyUrl } = this.config;
|
|
299
|
+
switch (this.name) {
|
|
300
|
+
case "ollama": {
|
|
301
|
+
const originalBaseUrl = process.env.OLLAMA_BASE_URL;
|
|
302
|
+
if (baseUrl && baseUrl !== "http://localhost:11434") {
|
|
303
|
+
process.env.OLLAMA_BASE_URL = baseUrl;
|
|
304
|
+
}
|
|
305
|
+
const ollamaModel = ollama(this.model);
|
|
306
|
+
if (originalBaseUrl !== undefined) {
|
|
307
|
+
process.env.OLLAMA_BASE_URL = originalBaseUrl;
|
|
308
|
+
} else if (baseUrl && baseUrl !== "http://localhost:11434") {
|
|
309
|
+
delete process.env.OLLAMA_BASE_URL;
|
|
310
|
+
}
|
|
311
|
+
return ollamaModel;
|
|
312
|
+
}
|
|
313
|
+
case "openai":
|
|
314
|
+
if (this.mode === "managed") {
|
|
315
|
+
const originalBaseUrl = process.env.OPENAI_BASE_URL;
|
|
316
|
+
if (proxyUrl) {
|
|
317
|
+
process.env.OPENAI_BASE_URL = proxyUrl;
|
|
318
|
+
}
|
|
319
|
+
const model = openai(this.model);
|
|
320
|
+
if (originalBaseUrl !== undefined) {
|
|
321
|
+
process.env.OPENAI_BASE_URL = originalBaseUrl;
|
|
322
|
+
} else if (proxyUrl) {
|
|
323
|
+
delete process.env.OPENAI_BASE_URL;
|
|
324
|
+
}
|
|
325
|
+
return model;
|
|
326
|
+
}
|
|
327
|
+
return openai(this.model);
|
|
328
|
+
case "anthropic":
|
|
329
|
+
if (this.mode === "managed") {
|
|
330
|
+
const originalBaseUrl = process.env.OPENAI_BASE_URL;
|
|
331
|
+
if (proxyUrl) {
|
|
332
|
+
process.env.OPENAI_BASE_URL = proxyUrl;
|
|
333
|
+
}
|
|
334
|
+
const model = openai(this.model);
|
|
335
|
+
if (originalBaseUrl !== undefined) {
|
|
336
|
+
process.env.OPENAI_BASE_URL = originalBaseUrl;
|
|
337
|
+
} else if (proxyUrl) {
|
|
338
|
+
delete process.env.OPENAI_BASE_URL;
|
|
339
|
+
}
|
|
340
|
+
return model;
|
|
341
|
+
}
|
|
342
|
+
return anthropic(this.model);
|
|
343
|
+
case "mistral":
|
|
344
|
+
if (this.mode === "managed") {
|
|
345
|
+
const originalBaseUrl = process.env.OPENAI_BASE_URL;
|
|
346
|
+
if (proxyUrl) {
|
|
347
|
+
process.env.OPENAI_BASE_URL = proxyUrl;
|
|
348
|
+
}
|
|
349
|
+
const model = openai(this.model);
|
|
350
|
+
if (originalBaseUrl !== undefined) {
|
|
351
|
+
process.env.OPENAI_BASE_URL = originalBaseUrl;
|
|
352
|
+
} else if (proxyUrl) {
|
|
353
|
+
delete process.env.OPENAI_BASE_URL;
|
|
354
|
+
}
|
|
355
|
+
return model;
|
|
356
|
+
}
|
|
357
|
+
return mistral(this.model);
|
|
358
|
+
case "gemini":
|
|
359
|
+
if (this.mode === "managed") {
|
|
360
|
+
const originalBaseUrl = process.env.OPENAI_BASE_URL;
|
|
361
|
+
if (proxyUrl) {
|
|
362
|
+
process.env.OPENAI_BASE_URL = proxyUrl;
|
|
363
|
+
}
|
|
364
|
+
const model = openai(this.model);
|
|
365
|
+
if (originalBaseUrl !== undefined) {
|
|
366
|
+
process.env.OPENAI_BASE_URL = originalBaseUrl;
|
|
367
|
+
} else if (proxyUrl) {
|
|
368
|
+
delete process.env.OPENAI_BASE_URL;
|
|
369
|
+
}
|
|
370
|
+
return model;
|
|
371
|
+
}
|
|
372
|
+
return google(this.model);
|
|
373
|
+
default:
|
|
374
|
+
throw new Error(`Unknown provider: ${this.name}`);
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
async listOllamaModels() {
|
|
378
|
+
try {
|
|
379
|
+
const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
|
|
380
|
+
const response = await fetch(`${baseUrl}/api/tags`);
|
|
381
|
+
if (!response.ok) {
|
|
382
|
+
return getModelsForProvider("ollama");
|
|
383
|
+
}
|
|
384
|
+
const data = await response.json();
|
|
385
|
+
const models = data.models ?? [];
|
|
386
|
+
return models.map((m) => ({
|
|
387
|
+
id: m.name,
|
|
388
|
+
name: m.name,
|
|
389
|
+
provider: "ollama",
|
|
390
|
+
contextWindow: 8000,
|
|
391
|
+
capabilities: {
|
|
392
|
+
vision: false,
|
|
393
|
+
tools: false,
|
|
394
|
+
reasoning: false,
|
|
395
|
+
streaming: true
|
|
396
|
+
}
|
|
397
|
+
}));
|
|
398
|
+
} catch {
|
|
399
|
+
return getModelsForProvider("ollama");
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
async validateOllama() {
|
|
403
|
+
try {
|
|
404
|
+
const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
|
|
405
|
+
const response = await fetch(`${baseUrl}/api/tags`);
|
|
406
|
+
if (!response.ok) {
|
|
407
|
+
return {
|
|
408
|
+
valid: false,
|
|
409
|
+
error: `Ollama server returned ${response.status}`
|
|
410
|
+
};
|
|
411
|
+
}
|
|
412
|
+
const data = await response.json();
|
|
413
|
+
const models = data.models ?? [];
|
|
414
|
+
const hasModel = models.some((m) => m.name === this.model);
|
|
415
|
+
if (!hasModel) {
|
|
416
|
+
return {
|
|
417
|
+
valid: false,
|
|
418
|
+
error: `Model "${this.model}" not found. Available: ${models.map((m) => m.name).join(", ")}`
|
|
419
|
+
};
|
|
420
|
+
}
|
|
421
|
+
return { valid: true };
|
|
422
|
+
} catch (error) {
|
|
423
|
+
const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
|
|
424
|
+
return {
|
|
425
|
+
valid: false,
|
|
426
|
+
error: `Cannot connect to Ollama at ${baseUrl}: ${error instanceof Error ? error.message : String(error)}`
|
|
427
|
+
};
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
function createProvider(config) {
|
|
432
|
+
return new BaseProvider(config);
|
|
433
|
+
}
|
|
434
|
+
function createProviderFromEnv() {
|
|
435
|
+
const provider = process.env.CONTRACTSPEC_AI_PROVIDER ?? "openai";
|
|
436
|
+
const model = process.env.CONTRACTSPEC_AI_MODEL;
|
|
437
|
+
let apiKey;
|
|
438
|
+
switch (provider) {
|
|
439
|
+
case "openai":
|
|
440
|
+
apiKey = process.env.OPENAI_API_KEY;
|
|
441
|
+
break;
|
|
442
|
+
case "anthropic":
|
|
443
|
+
apiKey = process.env.ANTHROPIC_API_KEY;
|
|
444
|
+
break;
|
|
445
|
+
case "mistral":
|
|
446
|
+
apiKey = process.env.MISTRAL_API_KEY;
|
|
447
|
+
break;
|
|
448
|
+
case "gemini":
|
|
449
|
+
apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
|
|
450
|
+
break;
|
|
451
|
+
case "ollama":
|
|
452
|
+
break;
|
|
453
|
+
}
|
|
454
|
+
return createProvider({
|
|
455
|
+
provider,
|
|
456
|
+
model,
|
|
457
|
+
apiKey,
|
|
458
|
+
baseUrl: process.env.OLLAMA_BASE_URL,
|
|
459
|
+
proxyUrl: process.env.CONTRACTSPEC_AI_PROXY_URL,
|
|
460
|
+
organizationId: process.env.CONTRACTSPEC_ORG_ID
|
|
461
|
+
});
|
|
462
|
+
}
|
|
463
|
+
function getAvailableProviders() {
|
|
464
|
+
const providers = [];
|
|
465
|
+
providers.push({
|
|
466
|
+
provider: "ollama",
|
|
467
|
+
available: true,
|
|
468
|
+
mode: "local"
|
|
469
|
+
});
|
|
470
|
+
const openaiKey = process.env.OPENAI_API_KEY;
|
|
471
|
+
providers.push({
|
|
472
|
+
provider: "openai",
|
|
473
|
+
available: Boolean(openaiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
|
|
474
|
+
mode: openaiKey ? "byok" : "managed",
|
|
475
|
+
reason: !openaiKey ? "Set OPENAI_API_KEY for BYOK mode" : undefined
|
|
476
|
+
});
|
|
477
|
+
const anthropicKey = process.env.ANTHROPIC_API_KEY;
|
|
478
|
+
providers.push({
|
|
479
|
+
provider: "anthropic",
|
|
480
|
+
available: Boolean(anthropicKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
|
|
481
|
+
mode: anthropicKey ? "byok" : "managed",
|
|
482
|
+
reason: !anthropicKey ? "Set ANTHROPIC_API_KEY for BYOK mode" : undefined
|
|
483
|
+
});
|
|
484
|
+
const mistralKey = process.env.MISTRAL_API_KEY;
|
|
485
|
+
providers.push({
|
|
486
|
+
provider: "mistral",
|
|
487
|
+
available: Boolean(mistralKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
|
|
488
|
+
mode: mistralKey ? "byok" : "managed",
|
|
489
|
+
reason: !mistralKey ? "Set MISTRAL_API_KEY for BYOK mode" : undefined
|
|
490
|
+
});
|
|
491
|
+
const geminiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
|
|
492
|
+
providers.push({
|
|
493
|
+
provider: "gemini",
|
|
494
|
+
available: Boolean(geminiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
|
|
495
|
+
mode: geminiKey ? "byok" : "managed",
|
|
496
|
+
reason: !geminiKey ? "Set GOOGLE_API_KEY for BYOK mode" : undefined
|
|
497
|
+
});
|
|
498
|
+
return providers;
|
|
499
|
+
}
|
|
500
|
+
export {
|
|
501
|
+
getAvailableProviders,
|
|
502
|
+
createProviderFromEnv,
|
|
503
|
+
createProvider
|
|
504
|
+
};
|