@lssm/lib.ai-providers 0.0.0-canary-20251217063201 → 0.0.0-canary-20251217073102

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/factory.js CHANGED
@@ -1 +1,225 @@
1
- import{DEFAULT_MODELS as e,getModelsForProvider as t}from"./models.js";import{anthropic as n}from"@ai-sdk/anthropic";import{google as r}from"@ai-sdk/google";import{mistral as i}from"@ai-sdk/mistral";import{openai as a}from"@ai-sdk/openai";import{ollama as o}from"ollama-ai-provider";var s=class{name;model;mode;config;cachedModel=null;constructor(t){this.name=t.provider,this.model=t.model??e[t.provider],this.mode=this.determineMode(t),this.config=t}determineMode(e){return e.provider===`ollama`?`local`:e.apiKey?`byok`:`managed`}getModel(){return this.cachedModel||=this.createModel(),this.cachedModel}createModel(){let{apiKey:e,baseUrl:t,proxyUrl:s,organizationId:c}=this.config;switch(this.name){case`ollama`:{let e=process.env.OLLAMA_BASE_URL;t&&t!==`http://localhost:11434`&&(process.env.OLLAMA_BASE_URL=t);let n=o(this.model);return e===void 0?t&&t!==`http://localhost:11434`&&delete process.env.OLLAMA_BASE_URL:process.env.OLLAMA_BASE_URL=e,n}case`openai`:if(this.mode===`managed`){let e=process.env.OPENAI_BASE_URL;s&&(process.env.OPENAI_BASE_URL=s);let t=a(this.model);return e===void 0?s&&delete process.env.OPENAI_BASE_URL:process.env.OPENAI_BASE_URL=e,t}return a(this.model);case`anthropic`:if(this.mode===`managed`){let e=process.env.OPENAI_BASE_URL;s&&(process.env.OPENAI_BASE_URL=s);let t=a(this.model);return e===void 0?s&&delete process.env.OPENAI_BASE_URL:process.env.OPENAI_BASE_URL=e,t}return n(this.model);case`mistral`:if(this.mode===`managed`){let e=process.env.OPENAI_BASE_URL;s&&(process.env.OPENAI_BASE_URL=s);let t=a(this.model);return e===void 0?s&&delete process.env.OPENAI_BASE_URL:process.env.OPENAI_BASE_URL=e,t}return i(this.model);case`gemini`:if(this.mode===`managed`){let e=process.env.OPENAI_BASE_URL;s&&(process.env.OPENAI_BASE_URL=s);let t=a(this.model);return e===void 0?s&&delete process.env.OPENAI_BASE_URL:process.env.OPENAI_BASE_URL=e,t}return r(this.model);default:throw Error(`Unknown provider: ${this.name}`)}}async listModels(){return this.name===`ollama`?this.listOllamaModels():t(this.name)}async listOllamaModels(){try{let e=this.config.baseUrl??`http://localhost:11434`,n=await fetch(`${e}/api/tags`);return n.ok?((await n.json()).models??[]).map(e=>({id:e.name,name:e.name,provider:`ollama`,contextWindow:8e3,capabilities:{vision:!1,tools:!1,reasoning:!1,streaming:!0}})):t(`ollama`)}catch{return t(`ollama`)}}async validate(){return this.name===`ollama`?this.validateOllama():this.mode===`byok`&&!this.config.apiKey?{valid:!1,error:`API key required for ${this.name}`}:this.mode===`managed`&&!this.config.proxyUrl&&!this.config.organizationId?{valid:!1,error:`Managed mode requires proxyUrl or organizationId`}:{valid:!0}}async validateOllama(){try{let e=this.config.baseUrl??`http://localhost:11434`,t=await fetch(`${e}/api/tags`);if(!t.ok)return{valid:!1,error:`Ollama server returned ${t.status}`};let n=(await t.json()).models??[];return n.some(e=>e.name===this.model)?{valid:!0}:{valid:!1,error:`Model "${this.model}" not found. Available: ${n.map(e=>e.name).join(`, `)}`}}catch(e){return{valid:!1,error:`Cannot connect to Ollama at ${this.config.baseUrl??`http://localhost:11434`}: ${e instanceof Error?e.message:String(e)}`}}}};function c(e){return new s(e)}function l(){let e=process.env.CONTRACTSPEC_AI_PROVIDER??`openai`,t=process.env.CONTRACTSPEC_AI_MODEL,n;switch(e){case`openai`:n=process.env.OPENAI_API_KEY;break;case`anthropic`:n=process.env.ANTHROPIC_API_KEY;break;case`mistral`:n=process.env.MISTRAL_API_KEY;break;case`gemini`:n=process.env.GOOGLE_API_KEY??process.env.GEMINI_API_KEY;break;case`ollama`:break}return c({provider:e,model:t,apiKey:n,baseUrl:process.env.OLLAMA_BASE_URL,proxyUrl:process.env.CONTRACTSPEC_AI_PROXY_URL,organizationId:process.env.CONTRACTSPEC_ORG_ID})}function u(){let e=[];e.push({provider:`ollama`,available:!0,mode:`local`});let t=process.env.OPENAI_API_KEY;e.push({provider:`openai`,available:!!t||!!process.env.CONTRACTSPEC_AI_PROXY_URL,mode:t?`byok`:`managed`,reason:t?void 0:`Set OPENAI_API_KEY for BYOK mode`});let n=process.env.ANTHROPIC_API_KEY;e.push({provider:`anthropic`,available:!!n||!!process.env.CONTRACTSPEC_AI_PROXY_URL,mode:n?`byok`:`managed`,reason:n?void 0:`Set ANTHROPIC_API_KEY for BYOK mode`});let r=process.env.MISTRAL_API_KEY;e.push({provider:`mistral`,available:!!r||!!process.env.CONTRACTSPEC_AI_PROXY_URL,mode:r?`byok`:`managed`,reason:r?void 0:`Set MISTRAL_API_KEY for BYOK mode`});let i=process.env.GOOGLE_API_KEY??process.env.GEMINI_API_KEY;return e.push({provider:`gemini`,available:!!i||!!process.env.CONTRACTSPEC_AI_PROXY_URL,mode:i?`byok`:`managed`,reason:i?void 0:`Set GOOGLE_API_KEY for BYOK mode`}),e}export{c as createProvider,l as createProviderFromEnv,u as getAvailableProviders};
1
+ import { DEFAULT_MODELS, getModelsForProvider } from "./models.js";
2
+ import { anthropic } from "@ai-sdk/anthropic";
3
+ import { google } from "@ai-sdk/google";
4
+ import { mistral } from "@ai-sdk/mistral";
5
+ import { openai } from "@ai-sdk/openai";
6
+ import { ollama } from "ollama-ai-provider";
7
+
8
+ //#region src/factory.ts
9
+ /**
10
+ * Base provider implementation
11
+ */
12
+ var BaseProvider = class {
13
+ name;
14
+ model;
15
+ mode;
16
+ config;
17
+ cachedModel = null;
18
+ constructor(config) {
19
+ this.name = config.provider;
20
+ this.model = config.model ?? DEFAULT_MODELS[config.provider];
21
+ this.mode = this.determineMode(config);
22
+ this.config = config;
23
+ }
24
+ determineMode(config) {
25
+ if (config.provider === "ollama") return "local";
26
+ if (config.apiKey) return "byok";
27
+ return "managed";
28
+ }
29
+ getModel() {
30
+ if (!this.cachedModel) this.cachedModel = this.createModel();
31
+ return this.cachedModel;
32
+ }
33
+ createModel() {
34
+ const { apiKey, baseUrl, proxyUrl, organizationId } = this.config;
35
+ switch (this.name) {
36
+ case "ollama": {
37
+ const originalBaseUrl = process.env.OLLAMA_BASE_URL;
38
+ if (baseUrl && baseUrl !== "http://localhost:11434") process.env.OLLAMA_BASE_URL = baseUrl;
39
+ const ollamaModel = ollama(this.model);
40
+ if (originalBaseUrl !== void 0) process.env.OLLAMA_BASE_URL = originalBaseUrl;
41
+ else if (baseUrl && baseUrl !== "http://localhost:11434") delete process.env.OLLAMA_BASE_URL;
42
+ return ollamaModel;
43
+ }
44
+ case "openai":
45
+ if (this.mode === "managed") {
46
+ const originalBaseUrl = process.env.OPENAI_BASE_URL;
47
+ if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
48
+ const model = openai(this.model);
49
+ if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
50
+ else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
51
+ return model;
52
+ }
53
+ return openai(this.model);
54
+ case "anthropic":
55
+ if (this.mode === "managed") {
56
+ const originalBaseUrl = process.env.OPENAI_BASE_URL;
57
+ if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
58
+ const model = openai(this.model);
59
+ if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
60
+ else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
61
+ return model;
62
+ }
63
+ return anthropic(this.model);
64
+ case "mistral":
65
+ if (this.mode === "managed") {
66
+ const originalBaseUrl = process.env.OPENAI_BASE_URL;
67
+ if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
68
+ const model = openai(this.model);
69
+ if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
70
+ else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
71
+ return model;
72
+ }
73
+ return mistral(this.model);
74
+ case "gemini":
75
+ if (this.mode === "managed") {
76
+ const originalBaseUrl = process.env.OPENAI_BASE_URL;
77
+ if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
78
+ const model = openai(this.model);
79
+ if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
80
+ else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
81
+ return model;
82
+ }
83
+ return google(this.model);
84
+ default: throw new Error(`Unknown provider: ${this.name}`);
85
+ }
86
+ }
87
+ async listModels() {
88
+ if (this.name === "ollama") return this.listOllamaModels();
89
+ return getModelsForProvider(this.name);
90
+ }
91
+ async listOllamaModels() {
92
+ try {
93
+ const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
94
+ const response = await fetch(`${baseUrl}/api/tags`);
95
+ if (!response.ok) return getModelsForProvider("ollama");
96
+ return ((await response.json()).models ?? []).map((m) => ({
97
+ id: m.name,
98
+ name: m.name,
99
+ provider: "ollama",
100
+ contextWindow: 8e3,
101
+ capabilities: {
102
+ vision: false,
103
+ tools: false,
104
+ reasoning: false,
105
+ streaming: true
106
+ }
107
+ }));
108
+ } catch {
109
+ return getModelsForProvider("ollama");
110
+ }
111
+ }
112
+ async validate() {
113
+ if (this.name === "ollama") return this.validateOllama();
114
+ if (this.mode === "byok" && !this.config.apiKey) return {
115
+ valid: false,
116
+ error: `API key required for ${this.name}`
117
+ };
118
+ if (this.mode === "managed" && !this.config.proxyUrl && !this.config.organizationId) return {
119
+ valid: false,
120
+ error: "Managed mode requires proxyUrl or organizationId"
121
+ };
122
+ return { valid: true };
123
+ }
124
+ async validateOllama() {
125
+ try {
126
+ const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
127
+ const response = await fetch(`${baseUrl}/api/tags`);
128
+ if (!response.ok) return {
129
+ valid: false,
130
+ error: `Ollama server returned ${response.status}`
131
+ };
132
+ const models = (await response.json()).models ?? [];
133
+ if (!models.some((m) => m.name === this.model)) return {
134
+ valid: false,
135
+ error: `Model "${this.model}" not found. Available: ${models.map((m) => m.name).join(", ")}`
136
+ };
137
+ return { valid: true };
138
+ } catch (error) {
139
+ return {
140
+ valid: false,
141
+ error: `Cannot connect to Ollama at ${this.config.baseUrl ?? "http://localhost:11434"}: ${error instanceof Error ? error.message : String(error)}`
142
+ };
143
+ }
144
+ }
145
+ };
146
+ /**
147
+ * Create a provider from configuration
148
+ */
149
+ function createProvider(config) {
150
+ return new BaseProvider(config);
151
+ }
152
+ /**
153
+ * Create a provider from environment variables
154
+ */
155
+ function createProviderFromEnv() {
156
+ const provider = process.env.CONTRACTSPEC_AI_PROVIDER ?? "openai";
157
+ const model = process.env.CONTRACTSPEC_AI_MODEL;
158
+ let apiKey;
159
+ switch (provider) {
160
+ case "openai":
161
+ apiKey = process.env.OPENAI_API_KEY;
162
+ break;
163
+ case "anthropic":
164
+ apiKey = process.env.ANTHROPIC_API_KEY;
165
+ break;
166
+ case "mistral":
167
+ apiKey = process.env.MISTRAL_API_KEY;
168
+ break;
169
+ case "gemini":
170
+ apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
171
+ break;
172
+ case "ollama": break;
173
+ }
174
+ return createProvider({
175
+ provider,
176
+ model,
177
+ apiKey,
178
+ baseUrl: process.env.OLLAMA_BASE_URL,
179
+ proxyUrl: process.env.CONTRACTSPEC_AI_PROXY_URL,
180
+ organizationId: process.env.CONTRACTSPEC_ORG_ID
181
+ });
182
+ }
183
+ /**
184
+ * Get all available providers with their status
185
+ */
186
+ function getAvailableProviders() {
187
+ const providers = [];
188
+ providers.push({
189
+ provider: "ollama",
190
+ available: true,
191
+ mode: "local"
192
+ });
193
+ const openaiKey = process.env.OPENAI_API_KEY;
194
+ providers.push({
195
+ provider: "openai",
196
+ available: Boolean(openaiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
197
+ mode: openaiKey ? "byok" : "managed",
198
+ reason: !openaiKey ? "Set OPENAI_API_KEY for BYOK mode" : void 0
199
+ });
200
+ const anthropicKey = process.env.ANTHROPIC_API_KEY;
201
+ providers.push({
202
+ provider: "anthropic",
203
+ available: Boolean(anthropicKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
204
+ mode: anthropicKey ? "byok" : "managed",
205
+ reason: !anthropicKey ? "Set ANTHROPIC_API_KEY for BYOK mode" : void 0
206
+ });
207
+ const mistralKey = process.env.MISTRAL_API_KEY;
208
+ providers.push({
209
+ provider: "mistral",
210
+ available: Boolean(mistralKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
211
+ mode: mistralKey ? "byok" : "managed",
212
+ reason: !mistralKey ? "Set MISTRAL_API_KEY for BYOK mode" : void 0
213
+ });
214
+ const geminiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
215
+ providers.push({
216
+ provider: "gemini",
217
+ available: Boolean(geminiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
218
+ mode: geminiKey ? "byok" : "managed",
219
+ reason: !geminiKey ? "Set GOOGLE_API_KEY for BYOK mode" : void 0
220
+ });
221
+ return providers;
222
+ }
223
+
224
+ //#endregion
225
+ export { createProvider, createProviderFromEnv, getAvailableProviders };
package/dist/index.js CHANGED
@@ -1 +1,6 @@
1
- import{DEFAULT_MODELS as e,MODELS as t,getDefaultModel as n,getModelInfo as r,getModelsForProvider as i,getRecommendedModels as a}from"./models.js";import{createProvider as o,createProviderFromEnv as s,getAvailableProviders as c}from"./factory.js";import{getEnvVarName as l,hasCredentials as u,isOllamaRunning as d,listOllamaModels as f,validateProvider as p}from"./validation.js";import{getAIProvider as m,validateProvider as h}from"./legacy.js";export{e as DEFAULT_MODELS,t as MODELS,o as createProvider,s as createProviderFromEnv,m as getAIProvider,c as getAvailableProviders,n as getDefaultModel,l as getEnvVarName,r as getModelInfo,i as getModelsForProvider,a as getRecommendedModels,u as hasCredentials,d as isOllamaRunning,f as listOllamaModels,h as validateLegacyProvider,p as validateProvider};
1
+ import { DEFAULT_MODELS, MODELS, getDefaultModel, getModelInfo, getModelsForProvider, getRecommendedModels } from "./models.js";
2
+ import { createProvider, createProviderFromEnv, getAvailableProviders } from "./factory.js";
3
+ import { getEnvVarName, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider as validateProvider$1 } from "./validation.js";
4
+ import { getAIProvider, validateProvider } from "./legacy.js";
5
+
6
+ export { DEFAULT_MODELS, MODELS, createProvider, createProviderFromEnv, getAIProvider, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider as validateLegacyProvider, validateProvider$1 as validateProvider };
package/dist/legacy.js CHANGED
@@ -1 +1,72 @@
1
- import{getRecommendedModels as e}from"./models.js";import{createProvider as t}from"./factory.js";function n(e){switch(e){case`claude`:return`anthropic`;case`custom`:return`openai`;default:return e}}function r(e){let r=n(e.aiProvider),i;switch(r){case`openai`:i=process.env.OPENAI_API_KEY;break;case`anthropic`:i=process.env.ANTHROPIC_API_KEY;break;case`mistral`:i=process.env.MISTRAL_API_KEY;break;case`gemini`:i=process.env.GOOGLE_API_KEY??process.env.GEMINI_API_KEY;break}return t({provider:r,model:e.aiModel,apiKey:i,baseUrl:e.customEndpoint}).getModel()}async function i(e){let t=n(e.aiProvider);return t===`ollama`?{success:!0}:t===`anthropic`&&!process.env.ANTHROPIC_API_KEY?{success:!1,error:`ANTHROPIC_API_KEY environment variable not set`}:t===`openai`&&!process.env.OPENAI_API_KEY?{success:!1,error:`OPENAI_API_KEY environment variable not set`}:t===`mistral`&&!process.env.MISTRAL_API_KEY?{success:!1,error:`MISTRAL_API_KEY environment variable not set`}:t===`gemini`&&!process.env.GOOGLE_API_KEY&&!process.env.GEMINI_API_KEY?{success:!1,error:`GOOGLE_API_KEY or GEMINI_API_KEY environment variable not set`}:{success:!0}}export{r as getAIProvider,e as getRecommendedModels,i as validateProvider};
1
+ import { getRecommendedModels } from "./models.js";
2
+ import { createProvider } from "./factory.js";
3
+
4
+ //#region src/legacy.ts
5
+ /**
6
+ * Map legacy provider names to new ones
7
+ */
8
+ function mapLegacyProvider(legacy) {
9
+ switch (legacy) {
10
+ case "claude": return "anthropic";
11
+ case "custom": return "openai";
12
+ default: return legacy;
13
+ }
14
+ }
15
+ /**
16
+ * Get AI provider from legacy Config type
17
+ *
18
+ * @deprecated Use createProvider() instead
19
+ */
20
+ function getAIProvider(config) {
21
+ const provider = mapLegacyProvider(config.aiProvider);
22
+ let apiKey;
23
+ switch (provider) {
24
+ case "openai":
25
+ apiKey = process.env.OPENAI_API_KEY;
26
+ break;
27
+ case "anthropic":
28
+ apiKey = process.env.ANTHROPIC_API_KEY;
29
+ break;
30
+ case "mistral":
31
+ apiKey = process.env.MISTRAL_API_KEY;
32
+ break;
33
+ case "gemini":
34
+ apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
35
+ break;
36
+ }
37
+ return createProvider({
38
+ provider,
39
+ model: config.aiModel,
40
+ apiKey,
41
+ baseUrl: config.customEndpoint
42
+ }).getModel();
43
+ }
44
+ /**
45
+ * Validate provider from legacy Config type
46
+ *
47
+ * @deprecated Use validateProvider() from './validation' instead
48
+ */
49
+ async function validateProvider(config) {
50
+ const provider = mapLegacyProvider(config.aiProvider);
51
+ if (provider === "ollama") return { success: true };
52
+ if (provider === "anthropic" && !process.env.ANTHROPIC_API_KEY) return {
53
+ success: false,
54
+ error: "ANTHROPIC_API_KEY environment variable not set"
55
+ };
56
+ if (provider === "openai" && !process.env.OPENAI_API_KEY) return {
57
+ success: false,
58
+ error: "OPENAI_API_KEY environment variable not set"
59
+ };
60
+ if (provider === "mistral" && !process.env.MISTRAL_API_KEY) return {
61
+ success: false,
62
+ error: "MISTRAL_API_KEY environment variable not set"
63
+ };
64
+ if (provider === "gemini" && !process.env.GOOGLE_API_KEY && !process.env.GEMINI_API_KEY) return {
65
+ success: false,
66
+ error: "GOOGLE_API_KEY or GEMINI_API_KEY environment variable not set"
67
+ };
68
+ return { success: true };
69
+ }
70
+
71
+ //#endregion
72
+ export { getAIProvider, getRecommendedModels, validateProvider };
package/dist/models.js CHANGED
@@ -1 +1,299 @@
1
- const e={ollama:`llama3.2`,openai:`gpt-4o`,anthropic:`claude-sonnet-4-20250514`,mistral:`mistral-large-latest`,gemini:`gemini-2.0-flash`},t=[{id:`llama3.2`,name:`Llama 3.2`,provider:`ollama`,contextWindow:128e3,capabilities:{vision:!1,tools:!0,reasoning:!1,streaming:!0}},{id:`codellama`,name:`Code Llama`,provider:`ollama`,contextWindow:16e3,capabilities:{vision:!1,tools:!1,reasoning:!1,streaming:!0}},{id:`deepseek-coder`,name:`DeepSeek Coder`,provider:`ollama`,contextWindow:16e3,capabilities:{vision:!1,tools:!1,reasoning:!1,streaming:!0}},{id:`mistral`,name:`Mistral 7B`,provider:`ollama`,contextWindow:32e3,capabilities:{vision:!1,tools:!1,reasoning:!1,streaming:!0}},{id:`gpt-4o`,name:`GPT-4o`,provider:`openai`,contextWindow:128e3,capabilities:{vision:!0,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:2.5,output:10}},{id:`gpt-4o-mini`,name:`GPT-4o Mini`,provider:`openai`,contextWindow:128e3,capabilities:{vision:!0,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:.15,output:.6}},{id:`o1`,name:`o1`,provider:`openai`,contextWindow:2e5,capabilities:{vision:!0,tools:!0,reasoning:!0,streaming:!0},costPerMillion:{input:15,output:60}},{id:`o1-mini`,name:`o1 Mini`,provider:`openai`,contextWindow:128e3,capabilities:{vision:!1,tools:!0,reasoning:!0,streaming:!0},costPerMillion:{input:3,output:12}},{id:`claude-sonnet-4-20250514`,name:`Claude Sonnet 4`,provider:`anthropic`,contextWindow:2e5,capabilities:{vision:!0,tools:!0,reasoning:!0,streaming:!0},costPerMillion:{input:3,output:15}},{id:`claude-3-5-sonnet-20241022`,name:`Claude 3.5 Sonnet`,provider:`anthropic`,contextWindow:2e5,capabilities:{vision:!0,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:3,output:15}},{id:`claude-3-5-haiku-20241022`,name:`Claude 3.5 Haiku`,provider:`anthropic`,contextWindow:2e5,capabilities:{vision:!0,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:.8,output:4}},{id:`mistral-large-latest`,name:`Mistral Large`,provider:`mistral`,contextWindow:128e3,capabilities:{vision:!1,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:2,output:6}},{id:`codestral-latest`,name:`Codestral`,provider:`mistral`,contextWindow:32e3,capabilities:{vision:!1,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:.2,output:.6}},{id:`mistral-small-latest`,name:`Mistral Small`,provider:`mistral`,contextWindow:32e3,capabilities:{vision:!1,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:.2,output:.6}},{id:`gemini-2.0-flash`,name:`Gemini 2.0 Flash`,provider:`gemini`,contextWindow:1e6,capabilities:{vision:!0,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:.075,output:.3}},{id:`gemini-2.5-pro-preview-06-05`,name:`Gemini 2.5 Pro`,provider:`gemini`,contextWindow:1e6,capabilities:{vision:!0,tools:!0,reasoning:!0,streaming:!0},costPerMillion:{input:1.25,output:10}},{id:`gemini-2.5-flash-preview-05-20`,name:`Gemini 2.5 Flash`,provider:`gemini`,contextWindow:1e6,capabilities:{vision:!0,tools:!0,reasoning:!0,streaming:!0},costPerMillion:{input:.15,output:.6}}];function n(e){return t.filter(t=>t.provider===e)}function r(e){return t.find(t=>t.id===e)}function i(e){return n(e===`claude`?`anthropic`:e===`custom`?`openai`:e).map(e=>e.id)}function a(t){return e[t]}export{e as DEFAULT_MODELS,t as MODELS,a as getDefaultModel,r as getModelInfo,n as getModelsForProvider,i as getRecommendedModels};
1
+ //#region src/models.ts
2
+ /**
3
+ * Default models per provider
4
+ */
5
+ const DEFAULT_MODELS = {
6
+ ollama: "llama3.2",
7
+ openai: "gpt-4o",
8
+ anthropic: "claude-sonnet-4-20250514",
9
+ mistral: "mistral-large-latest",
10
+ gemini: "gemini-2.0-flash"
11
+ };
12
+ /**
13
+ * All recommended models with metadata
14
+ */
15
+ const MODELS = [
16
+ {
17
+ id: "llama3.2",
18
+ name: "Llama 3.2",
19
+ provider: "ollama",
20
+ contextWindow: 128e3,
21
+ capabilities: {
22
+ vision: false,
23
+ tools: true,
24
+ reasoning: false,
25
+ streaming: true
26
+ }
27
+ },
28
+ {
29
+ id: "codellama",
30
+ name: "Code Llama",
31
+ provider: "ollama",
32
+ contextWindow: 16e3,
33
+ capabilities: {
34
+ vision: false,
35
+ tools: false,
36
+ reasoning: false,
37
+ streaming: true
38
+ }
39
+ },
40
+ {
41
+ id: "deepseek-coder",
42
+ name: "DeepSeek Coder",
43
+ provider: "ollama",
44
+ contextWindow: 16e3,
45
+ capabilities: {
46
+ vision: false,
47
+ tools: false,
48
+ reasoning: false,
49
+ streaming: true
50
+ }
51
+ },
52
+ {
53
+ id: "mistral",
54
+ name: "Mistral 7B",
55
+ provider: "ollama",
56
+ contextWindow: 32e3,
57
+ capabilities: {
58
+ vision: false,
59
+ tools: false,
60
+ reasoning: false,
61
+ streaming: true
62
+ }
63
+ },
64
+ {
65
+ id: "gpt-4o",
66
+ name: "GPT-4o",
67
+ provider: "openai",
68
+ contextWindow: 128e3,
69
+ capabilities: {
70
+ vision: true,
71
+ tools: true,
72
+ reasoning: false,
73
+ streaming: true
74
+ },
75
+ costPerMillion: {
76
+ input: 2.5,
77
+ output: 10
78
+ }
79
+ },
80
+ {
81
+ id: "gpt-4o-mini",
82
+ name: "GPT-4o Mini",
83
+ provider: "openai",
84
+ contextWindow: 128e3,
85
+ capabilities: {
86
+ vision: true,
87
+ tools: true,
88
+ reasoning: false,
89
+ streaming: true
90
+ },
91
+ costPerMillion: {
92
+ input: .15,
93
+ output: .6
94
+ }
95
+ },
96
+ {
97
+ id: "o1",
98
+ name: "o1",
99
+ provider: "openai",
100
+ contextWindow: 2e5,
101
+ capabilities: {
102
+ vision: true,
103
+ tools: true,
104
+ reasoning: true,
105
+ streaming: true
106
+ },
107
+ costPerMillion: {
108
+ input: 15,
109
+ output: 60
110
+ }
111
+ },
112
+ {
113
+ id: "o1-mini",
114
+ name: "o1 Mini",
115
+ provider: "openai",
116
+ contextWindow: 128e3,
117
+ capabilities: {
118
+ vision: false,
119
+ tools: true,
120
+ reasoning: true,
121
+ streaming: true
122
+ },
123
+ costPerMillion: {
124
+ input: 3,
125
+ output: 12
126
+ }
127
+ },
128
+ {
129
+ id: "claude-sonnet-4-20250514",
130
+ name: "Claude Sonnet 4",
131
+ provider: "anthropic",
132
+ contextWindow: 2e5,
133
+ capabilities: {
134
+ vision: true,
135
+ tools: true,
136
+ reasoning: true,
137
+ streaming: true
138
+ },
139
+ costPerMillion: {
140
+ input: 3,
141
+ output: 15
142
+ }
143
+ },
144
+ {
145
+ id: "claude-3-5-sonnet-20241022",
146
+ name: "Claude 3.5 Sonnet",
147
+ provider: "anthropic",
148
+ contextWindow: 2e5,
149
+ capabilities: {
150
+ vision: true,
151
+ tools: true,
152
+ reasoning: false,
153
+ streaming: true
154
+ },
155
+ costPerMillion: {
156
+ input: 3,
157
+ output: 15
158
+ }
159
+ },
160
+ {
161
+ id: "claude-3-5-haiku-20241022",
162
+ name: "Claude 3.5 Haiku",
163
+ provider: "anthropic",
164
+ contextWindow: 2e5,
165
+ capabilities: {
166
+ vision: true,
167
+ tools: true,
168
+ reasoning: false,
169
+ streaming: true
170
+ },
171
+ costPerMillion: {
172
+ input: .8,
173
+ output: 4
174
+ }
175
+ },
176
+ {
177
+ id: "mistral-large-latest",
178
+ name: "Mistral Large",
179
+ provider: "mistral",
180
+ contextWindow: 128e3,
181
+ capabilities: {
182
+ vision: false,
183
+ tools: true,
184
+ reasoning: false,
185
+ streaming: true
186
+ },
187
+ costPerMillion: {
188
+ input: 2,
189
+ output: 6
190
+ }
191
+ },
192
+ {
193
+ id: "codestral-latest",
194
+ name: "Codestral",
195
+ provider: "mistral",
196
+ contextWindow: 32e3,
197
+ capabilities: {
198
+ vision: false,
199
+ tools: true,
200
+ reasoning: false,
201
+ streaming: true
202
+ },
203
+ costPerMillion: {
204
+ input: .2,
205
+ output: .6
206
+ }
207
+ },
208
+ {
209
+ id: "mistral-small-latest",
210
+ name: "Mistral Small",
211
+ provider: "mistral",
212
+ contextWindow: 32e3,
213
+ capabilities: {
214
+ vision: false,
215
+ tools: true,
216
+ reasoning: false,
217
+ streaming: true
218
+ },
219
+ costPerMillion: {
220
+ input: .2,
221
+ output: .6
222
+ }
223
+ },
224
+ {
225
+ id: "gemini-2.0-flash",
226
+ name: "Gemini 2.0 Flash",
227
+ provider: "gemini",
228
+ contextWindow: 1e6,
229
+ capabilities: {
230
+ vision: true,
231
+ tools: true,
232
+ reasoning: false,
233
+ streaming: true
234
+ },
235
+ costPerMillion: {
236
+ input: .075,
237
+ output: .3
238
+ }
239
+ },
240
+ {
241
+ id: "gemini-2.5-pro-preview-06-05",
242
+ name: "Gemini 2.5 Pro",
243
+ provider: "gemini",
244
+ contextWindow: 1e6,
245
+ capabilities: {
246
+ vision: true,
247
+ tools: true,
248
+ reasoning: true,
249
+ streaming: true
250
+ },
251
+ costPerMillion: {
252
+ input: 1.25,
253
+ output: 10
254
+ }
255
+ },
256
+ {
257
+ id: "gemini-2.5-flash-preview-05-20",
258
+ name: "Gemini 2.5 Flash",
259
+ provider: "gemini",
260
+ contextWindow: 1e6,
261
+ capabilities: {
262
+ vision: true,
263
+ tools: true,
264
+ reasoning: true,
265
+ streaming: true
266
+ },
267
+ costPerMillion: {
268
+ input: .15,
269
+ output: .6
270
+ }
271
+ }
272
+ ];
273
+ /**
274
+ * Get models for a specific provider
275
+ */
276
+ function getModelsForProvider(provider) {
277
+ return MODELS.filter((m) => m.provider === provider);
278
+ }
279
+ /**
280
+ * Get model info by ID
281
+ */
282
+ function getModelInfo(modelId) {
283
+ return MODELS.find((m) => m.id === modelId);
284
+ }
285
+ /**
286
+ * Get recommended models for a provider (legacy format)
287
+ */
288
+ function getRecommendedModels(provider) {
289
+ return getModelsForProvider(provider === "claude" ? "anthropic" : provider === "custom" ? "openai" : provider).map((m) => m.id);
290
+ }
291
+ /**
292
+ * Get default model for a provider
293
+ */
294
+ function getDefaultModel(provider) {
295
+ return DEFAULT_MODELS[provider];
296
+ }
297
+
298
+ //#endregion
299
+ export { DEFAULT_MODELS, MODELS, getDefaultModel, getModelInfo, getModelsForProvider, getRecommendedModels };
@@ -1 +1,60 @@
1
- import{createProvider as e}from"./factory.js";async function t(t){return e(t).validate()}function n(e){switch(e){case`ollama`:return!0;case`openai`:return!!process.env.OPENAI_API_KEY;case`anthropic`:return!!process.env.ANTHROPIC_API_KEY;case`mistral`:return!!process.env.MISTRAL_API_KEY;case`gemini`:return!!(process.env.GOOGLE_API_KEY??process.env.GEMINI_API_KEY);default:return!1}}function r(e){switch(e){case`ollama`:return null;case`openai`:return`OPENAI_API_KEY`;case`anthropic`:return`ANTHROPIC_API_KEY`;case`mistral`:return`MISTRAL_API_KEY`;case`gemini`:return`GOOGLE_API_KEY`;default:return null}}async function i(e=`http://localhost:11434`){try{return(await fetch(`${e}/api/tags`)).ok}catch{return!1}}async function a(e=`http://localhost:11434`){try{let t=await fetch(`${e}/api/tags`);return t.ok?((await t.json()).models??[]).map(e=>e.name):[]}catch{return[]}}export{r as getEnvVarName,n as hasCredentials,i as isOllamaRunning,a as listOllamaModels,t as validateProvider};
1
+ import { createProvider } from "./factory.js";
2
+
3
+ //#region src/validation.ts
4
+ /**
5
+ * Validate a provider configuration
6
+ */
7
+ async function validateProvider(config) {
8
+ return createProvider(config).validate();
9
+ }
10
+ /**
11
+ * Check if a provider has required credentials
12
+ */
13
+ function hasCredentials(provider) {
14
+ switch (provider) {
15
+ case "ollama": return true;
16
+ case "openai": return Boolean(process.env.OPENAI_API_KEY);
17
+ case "anthropic": return Boolean(process.env.ANTHROPIC_API_KEY);
18
+ case "mistral": return Boolean(process.env.MISTRAL_API_KEY);
19
+ case "gemini": return Boolean(process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY);
20
+ default: return false;
21
+ }
22
+ }
23
+ /**
24
+ * Get the environment variable name for a provider's API key
25
+ */
26
+ function getEnvVarName(provider) {
27
+ switch (provider) {
28
+ case "ollama": return null;
29
+ case "openai": return "OPENAI_API_KEY";
30
+ case "anthropic": return "ANTHROPIC_API_KEY";
31
+ case "mistral": return "MISTRAL_API_KEY";
32
+ case "gemini": return "GOOGLE_API_KEY";
33
+ default: return null;
34
+ }
35
+ }
36
+ /**
37
+ * Check if Ollama is running
38
+ */
39
+ async function isOllamaRunning(baseUrl = "http://localhost:11434") {
40
+ try {
41
+ return (await fetch(`${baseUrl}/api/tags`)).ok;
42
+ } catch {
43
+ return false;
44
+ }
45
+ }
46
+ /**
47
+ * List available Ollama models
48
+ */
49
+ async function listOllamaModels(baseUrl = "http://localhost:11434") {
50
+ try {
51
+ const response = await fetch(`${baseUrl}/api/tags`);
52
+ if (!response.ok) return [];
53
+ return ((await response.json()).models ?? []).map((m) => m.name);
54
+ } catch {
55
+ return [];
56
+ }
57
+ }
58
+
59
+ //#endregion
60
+ export { getEnvVarName, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lssm/lib.ai-providers",
3
- "version": "0.0.0-canary-20251217063201",
3
+ "version": "0.0.0-canary-20251217073102",
4
4
  "type": "module",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.js",
@@ -32,8 +32,8 @@
32
32
  "zod": "^4.1.13"
33
33
  },
34
34
  "devDependencies": {
35
- "@lssm/tool.tsdown": "0.0.0-canary-20251217063201",
36
- "@lssm/tool.typescript": "0.0.0-canary-20251217063201",
35
+ "@lssm/tool.tsdown": "0.0.0-canary-20251217073102",
36
+ "@lssm/tool.typescript": "0.0.0-canary-20251217073102",
37
37
  "tsdown": "^0.17.4",
38
38
  "typescript": "^5.9.3"
39
39
  },