@contractspec/lib.ai-providers 1.44.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +93 -0
- package/dist/factory.d.ts +19 -0
- package/dist/factory.d.ts.map +1 -0
- package/dist/factory.js +226 -0
- package/dist/factory.js.map +1 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.js +6 -0
- package/dist/legacy.d.ts +24 -0
- package/dist/legacy.d.ts.map +1 -0
- package/dist/legacy.js +73 -0
- package/dist/legacy.js.map +1 -0
- package/dist/models.d.ts +31 -0
- package/dist/models.d.ts.map +1 -0
- package/dist/models.js +300 -0
- package/dist/models.js.map +1 -0
- package/dist/types.d.ts +113 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +0 -0
- package/dist/validation.d.ts +35 -0
- package/dist/validation.d.ts.map +1 -0
- package/dist/validation.js +61 -0
- package/dist/validation.js.map +1 -0
- package/package.json +69 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Chaman Ventures, SASU
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
# @contractspec/lib.ai-providers
|
|
2
|
+
|
|
3
|
+
Website: https://contractspec.io/
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
**Unified AI provider abstraction** for ContractSpec applications.
|
|
7
|
+
|
|
8
|
+
## Overview
|
|
9
|
+
|
|
10
|
+
This library provides a consistent interface for working with multiple LLM providers across ContractSpec. It's used by:
|
|
11
|
+
|
|
12
|
+
- `@contractspec/module.ai-chat` - Vibe coding chat
|
|
13
|
+
- `@contractspec/bundle.workspace` - CLI AI features
|
|
14
|
+
- `@contractspec/lib.ai-agent` - Agent orchestration
|
|
15
|
+
|
|
16
|
+
## Supported Providers
|
|
17
|
+
|
|
18
|
+
| Provider | Local | BYOK | Managed |
|
|
19
|
+
|----------|-------|------|---------|
|
|
20
|
+
| Ollama | ✅ | - | - |
|
|
21
|
+
| OpenAI | - | ✅ | ✅ |
|
|
22
|
+
| Anthropic | - | ✅ | ✅ |
|
|
23
|
+
| Mistral | - | ✅ | ✅ |
|
|
24
|
+
| Google Gemini | - | ✅ | ✅ |
|
|
25
|
+
|
|
26
|
+
## Usage
|
|
27
|
+
|
|
28
|
+
### Basic Provider Creation
|
|
29
|
+
|
|
30
|
+
```typescript
|
|
31
|
+
import { createProvider, type ProviderConfig } from '@contractspec/lib.ai-providers';
|
|
32
|
+
|
|
33
|
+
// Ollama (local)
|
|
34
|
+
const ollamaProvider = createProvider({
|
|
35
|
+
provider: 'ollama',
|
|
36
|
+
model: 'llama3.2',
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
// OpenAI (BYOK)
|
|
40
|
+
const openaiProvider = createProvider({
|
|
41
|
+
provider: 'openai',
|
|
42
|
+
apiKey: process.env.OPENAI_API_KEY,
|
|
43
|
+
model: 'gpt-4o',
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
// Use the model
|
|
47
|
+
const model = openaiProvider.getModel();
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
### From Environment
|
|
51
|
+
|
|
52
|
+
```typescript
|
|
53
|
+
import { createProviderFromEnv } from '@contractspec/lib.ai-providers';
|
|
54
|
+
|
|
55
|
+
// Reads from CONTRACTSPEC_AI_PROVIDER, OPENAI_API_KEY, etc.
|
|
56
|
+
const provider = createProviderFromEnv();
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
### Legacy Config Support
|
|
60
|
+
|
|
61
|
+
```typescript
|
|
62
|
+
import { getAIProvider } from '@contractspec/lib.ai-providers';
|
|
63
|
+
import type { Config } from '@contractspec/bundle.workspace';
|
|
64
|
+
|
|
65
|
+
// Backwards compatible with existing Config type
|
|
66
|
+
const model = getAIProvider(config);
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
## Provider Modes
|
|
70
|
+
|
|
71
|
+
- **Local**: Run models locally (Ollama only)
|
|
72
|
+
- **BYOK**: Bring Your Own Key for cloud providers
|
|
73
|
+
- **Managed**: Use ContractSpec-managed keys via API proxy
|
|
74
|
+
|
|
75
|
+
## API
|
|
76
|
+
|
|
77
|
+
### Types
|
|
78
|
+
|
|
79
|
+
- `ProviderName` - Supported provider names
|
|
80
|
+
- `ProviderMode` - local | byok | managed
|
|
81
|
+
- `ProviderConfig` - Configuration for creating a provider
|
|
82
|
+
- `Provider` - Provider interface with getModel()
|
|
83
|
+
- `ModelInfo` - Model metadata (context window, capabilities)
|
|
84
|
+
|
|
85
|
+
### Functions
|
|
86
|
+
|
|
87
|
+
- `createProvider(config)` - Create a provider instance
|
|
88
|
+
- `createProviderFromEnv()` - Create from environment variables
|
|
89
|
+
- `getAIProvider(config)` - Legacy compatibility function
|
|
90
|
+
- `validateProvider(config)` - Check if provider is properly configured
|
|
91
|
+
- `getRecommendedModels(provider)` - Get recommended models for a provider
|
|
92
|
+
- `getAvailableProviders()` - List available providers with status
|
|
93
|
+
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { Provider, ProviderAvailability, ProviderConfig } from "./types.js";
|
|
2
|
+
|
|
3
|
+
//#region src/factory.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Create a provider from configuration
|
|
7
|
+
*/
|
|
8
|
+
declare function createProvider(config: ProviderConfig): Provider;
|
|
9
|
+
/**
|
|
10
|
+
* Create a provider from environment variables
|
|
11
|
+
*/
|
|
12
|
+
declare function createProviderFromEnv(): Provider;
|
|
13
|
+
/**
|
|
14
|
+
* Get all available providers with their status
|
|
15
|
+
*/
|
|
16
|
+
declare function getAvailableProviders(): ProviderAvailability[];
|
|
17
|
+
//#endregion
|
|
18
|
+
export { createProvider, createProviderFromEnv, getAvailableProviders };
|
|
19
|
+
//# sourceMappingURL=factory.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"factory.d.ts","names":[],"sources":["../src/factory.ts"],"sourcesContent":[],"mappings":";;;;;;AAuQA;AAOgB,iBAPA,cAAA,CAOyB,MAAA,EAPF,cAOU,CAAA,EAPO,QAOP;AAqCjD;;;iBArCgB,qBAAA,CAAA,GAAyB;;;;iBAqCzB,qBAAA,CAAA,GAAyB"}
|
package/dist/factory.js
ADDED
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
import { DEFAULT_MODELS, getModelsForProvider } from "./models.js";
|
|
2
|
+
import { anthropic } from "@ai-sdk/anthropic";
|
|
3
|
+
import { google } from "@ai-sdk/google";
|
|
4
|
+
import { mistral } from "@ai-sdk/mistral";
|
|
5
|
+
import { openai } from "@ai-sdk/openai";
|
|
6
|
+
import { ollama } from "ollama-ai-provider";
|
|
7
|
+
|
|
8
|
+
//#region src/factory.ts
|
|
9
|
+
/**
|
|
10
|
+
* Base provider implementation
|
|
11
|
+
*/
|
|
12
|
+
var BaseProvider = class {
|
|
13
|
+
name;
|
|
14
|
+
model;
|
|
15
|
+
mode;
|
|
16
|
+
config;
|
|
17
|
+
cachedModel = null;
|
|
18
|
+
constructor(config) {
|
|
19
|
+
this.name = config.provider;
|
|
20
|
+
this.model = config.model ?? DEFAULT_MODELS[config.provider];
|
|
21
|
+
this.mode = this.determineMode(config);
|
|
22
|
+
this.config = config;
|
|
23
|
+
}
|
|
24
|
+
getModel() {
|
|
25
|
+
if (!this.cachedModel) this.cachedModel = this.createModel();
|
|
26
|
+
return this.cachedModel;
|
|
27
|
+
}
|
|
28
|
+
async listModels() {
|
|
29
|
+
if (this.name === "ollama") return this.listOllamaModels();
|
|
30
|
+
return getModelsForProvider(this.name);
|
|
31
|
+
}
|
|
32
|
+
async validate() {
|
|
33
|
+
if (this.name === "ollama") return this.validateOllama();
|
|
34
|
+
if (this.mode === "byok" && !this.config.apiKey) return {
|
|
35
|
+
valid: false,
|
|
36
|
+
error: `API key required for ${this.name}`
|
|
37
|
+
};
|
|
38
|
+
if (this.mode === "managed" && !this.config.proxyUrl && !this.config.organizationId) return {
|
|
39
|
+
valid: false,
|
|
40
|
+
error: "Managed mode requires proxyUrl or organizationId"
|
|
41
|
+
};
|
|
42
|
+
return { valid: true };
|
|
43
|
+
}
|
|
44
|
+
determineMode(config) {
|
|
45
|
+
if (config.provider === "ollama") return "local";
|
|
46
|
+
if (config.apiKey) return "byok";
|
|
47
|
+
return "managed";
|
|
48
|
+
}
|
|
49
|
+
createModel() {
|
|
50
|
+
const { baseUrl, proxyUrl } = this.config;
|
|
51
|
+
switch (this.name) {
|
|
52
|
+
case "ollama": {
|
|
53
|
+
const originalBaseUrl = process.env.OLLAMA_BASE_URL;
|
|
54
|
+
if (baseUrl && baseUrl !== "http://localhost:11434") process.env.OLLAMA_BASE_URL = baseUrl;
|
|
55
|
+
const ollamaModel = ollama(this.model);
|
|
56
|
+
if (originalBaseUrl !== void 0) process.env.OLLAMA_BASE_URL = originalBaseUrl;
|
|
57
|
+
else if (baseUrl && baseUrl !== "http://localhost:11434") delete process.env.OLLAMA_BASE_URL;
|
|
58
|
+
return ollamaModel;
|
|
59
|
+
}
|
|
60
|
+
case "openai":
|
|
61
|
+
if (this.mode === "managed") {
|
|
62
|
+
const originalBaseUrl = process.env.OPENAI_BASE_URL;
|
|
63
|
+
if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
|
|
64
|
+
const model = openai(this.model);
|
|
65
|
+
if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
|
|
66
|
+
else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
|
|
67
|
+
return model;
|
|
68
|
+
}
|
|
69
|
+
return openai(this.model);
|
|
70
|
+
case "anthropic":
|
|
71
|
+
if (this.mode === "managed") {
|
|
72
|
+
const originalBaseUrl = process.env.OPENAI_BASE_URL;
|
|
73
|
+
if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
|
|
74
|
+
const model = openai(this.model);
|
|
75
|
+
if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
|
|
76
|
+
else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
|
|
77
|
+
return model;
|
|
78
|
+
}
|
|
79
|
+
return anthropic(this.model);
|
|
80
|
+
case "mistral":
|
|
81
|
+
if (this.mode === "managed") {
|
|
82
|
+
const originalBaseUrl = process.env.OPENAI_BASE_URL;
|
|
83
|
+
if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
|
|
84
|
+
const model = openai(this.model);
|
|
85
|
+
if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
|
|
86
|
+
else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
|
|
87
|
+
return model;
|
|
88
|
+
}
|
|
89
|
+
return mistral(this.model);
|
|
90
|
+
case "gemini":
|
|
91
|
+
if (this.mode === "managed") {
|
|
92
|
+
const originalBaseUrl = process.env.OPENAI_BASE_URL;
|
|
93
|
+
if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
|
|
94
|
+
const model = openai(this.model);
|
|
95
|
+
if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
|
|
96
|
+
else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
|
|
97
|
+
return model;
|
|
98
|
+
}
|
|
99
|
+
return google(this.model);
|
|
100
|
+
default: throw new Error(`Unknown provider: ${this.name}`);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
async listOllamaModels() {
|
|
104
|
+
try {
|
|
105
|
+
const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
|
|
106
|
+
const response = await fetch(`${baseUrl}/api/tags`);
|
|
107
|
+
if (!response.ok) return getModelsForProvider("ollama");
|
|
108
|
+
return ((await response.json()).models ?? []).map((m) => ({
|
|
109
|
+
id: m.name,
|
|
110
|
+
name: m.name,
|
|
111
|
+
provider: "ollama",
|
|
112
|
+
contextWindow: 8e3,
|
|
113
|
+
capabilities: {
|
|
114
|
+
vision: false,
|
|
115
|
+
tools: false,
|
|
116
|
+
reasoning: false,
|
|
117
|
+
streaming: true
|
|
118
|
+
}
|
|
119
|
+
}));
|
|
120
|
+
} catch {
|
|
121
|
+
return getModelsForProvider("ollama");
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
async validateOllama() {
|
|
125
|
+
try {
|
|
126
|
+
const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
|
|
127
|
+
const response = await fetch(`${baseUrl}/api/tags`);
|
|
128
|
+
if (!response.ok) return {
|
|
129
|
+
valid: false,
|
|
130
|
+
error: `Ollama server returned ${response.status}`
|
|
131
|
+
};
|
|
132
|
+
const models = (await response.json()).models ?? [];
|
|
133
|
+
if (!models.some((m) => m.name === this.model)) return {
|
|
134
|
+
valid: false,
|
|
135
|
+
error: `Model "${this.model}" not found. Available: ${models.map((m) => m.name).join(", ")}`
|
|
136
|
+
};
|
|
137
|
+
return { valid: true };
|
|
138
|
+
} catch (error) {
|
|
139
|
+
return {
|
|
140
|
+
valid: false,
|
|
141
|
+
error: `Cannot connect to Ollama at ${this.config.baseUrl ?? "http://localhost:11434"}: ${error instanceof Error ? error.message : String(error)}`
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
};
|
|
146
|
+
/**
|
|
147
|
+
* Create a provider from configuration
|
|
148
|
+
*/
|
|
149
|
+
function createProvider(config) {
|
|
150
|
+
return new BaseProvider(config);
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* Create a provider from environment variables
|
|
154
|
+
*/
|
|
155
|
+
function createProviderFromEnv() {
|
|
156
|
+
const provider = process.env.CONTRACTSPEC_AI_PROVIDER ?? "openai";
|
|
157
|
+
const model = process.env.CONTRACTSPEC_AI_MODEL;
|
|
158
|
+
let apiKey;
|
|
159
|
+
switch (provider) {
|
|
160
|
+
case "openai":
|
|
161
|
+
apiKey = process.env.OPENAI_API_KEY;
|
|
162
|
+
break;
|
|
163
|
+
case "anthropic":
|
|
164
|
+
apiKey = process.env.ANTHROPIC_API_KEY;
|
|
165
|
+
break;
|
|
166
|
+
case "mistral":
|
|
167
|
+
apiKey = process.env.MISTRAL_API_KEY;
|
|
168
|
+
break;
|
|
169
|
+
case "gemini":
|
|
170
|
+
apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
|
|
171
|
+
break;
|
|
172
|
+
case "ollama": break;
|
|
173
|
+
}
|
|
174
|
+
return createProvider({
|
|
175
|
+
provider,
|
|
176
|
+
model,
|
|
177
|
+
apiKey,
|
|
178
|
+
baseUrl: process.env.OLLAMA_BASE_URL,
|
|
179
|
+
proxyUrl: process.env.CONTRACTSPEC_AI_PROXY_URL,
|
|
180
|
+
organizationId: process.env.CONTRACTSPEC_ORG_ID
|
|
181
|
+
});
|
|
182
|
+
}
|
|
183
|
+
/**
|
|
184
|
+
* Get all available providers with their status
|
|
185
|
+
*/
|
|
186
|
+
function getAvailableProviders() {
|
|
187
|
+
const providers = [];
|
|
188
|
+
providers.push({
|
|
189
|
+
provider: "ollama",
|
|
190
|
+
available: true,
|
|
191
|
+
mode: "local"
|
|
192
|
+
});
|
|
193
|
+
const openaiKey = process.env.OPENAI_API_KEY;
|
|
194
|
+
providers.push({
|
|
195
|
+
provider: "openai",
|
|
196
|
+
available: Boolean(openaiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
|
|
197
|
+
mode: openaiKey ? "byok" : "managed",
|
|
198
|
+
reason: !openaiKey ? "Set OPENAI_API_KEY for BYOK mode" : void 0
|
|
199
|
+
});
|
|
200
|
+
const anthropicKey = process.env.ANTHROPIC_API_KEY;
|
|
201
|
+
providers.push({
|
|
202
|
+
provider: "anthropic",
|
|
203
|
+
available: Boolean(anthropicKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
|
|
204
|
+
mode: anthropicKey ? "byok" : "managed",
|
|
205
|
+
reason: !anthropicKey ? "Set ANTHROPIC_API_KEY for BYOK mode" : void 0
|
|
206
|
+
});
|
|
207
|
+
const mistralKey = process.env.MISTRAL_API_KEY;
|
|
208
|
+
providers.push({
|
|
209
|
+
provider: "mistral",
|
|
210
|
+
available: Boolean(mistralKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
|
|
211
|
+
mode: mistralKey ? "byok" : "managed",
|
|
212
|
+
reason: !mistralKey ? "Set MISTRAL_API_KEY for BYOK mode" : void 0
|
|
213
|
+
});
|
|
214
|
+
const geminiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
|
|
215
|
+
providers.push({
|
|
216
|
+
provider: "gemini",
|
|
217
|
+
available: Boolean(geminiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
|
|
218
|
+
mode: geminiKey ? "byok" : "managed",
|
|
219
|
+
reason: !geminiKey ? "Set GOOGLE_API_KEY for BYOK mode" : void 0
|
|
220
|
+
});
|
|
221
|
+
return providers;
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
//#endregion
|
|
225
|
+
export { createProvider, createProviderFromEnv, getAvailableProviders };
|
|
226
|
+
//# sourceMappingURL=factory.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"factory.js","names":["apiKey: string | undefined","providers: ProviderAvailability[]"],"sources":["../src/factory.ts"],"sourcesContent":["/**\n * Provider factory and creation utilities\n */\nimport type { LanguageModel } from 'ai';\nimport { anthropic } from '@ai-sdk/anthropic';\nimport { google } from '@ai-sdk/google';\nimport { mistral } from '@ai-sdk/mistral';\nimport { openai } from '@ai-sdk/openai';\nimport { ollama } from 'ollama-ai-provider';\nimport type {\n ModelInfo,\n Provider,\n ProviderAvailability,\n ProviderConfig,\n ProviderMode,\n ProviderName,\n} from './types';\nimport { DEFAULT_MODELS, getModelsForProvider } from './models';\n\n/**\n * Base provider implementation\n */\nclass BaseProvider implements Provider {\n readonly name: ProviderName;\n readonly model: string;\n readonly mode: ProviderMode;\n\n private readonly config: ProviderConfig;\n private cachedModel: LanguageModel | null = null;\n\n constructor(config: ProviderConfig) {\n this.name = config.provider;\n this.model = config.model ?? DEFAULT_MODELS[config.provider];\n this.mode = this.determineMode(config);\n this.config = config;\n }\n\n getModel(): LanguageModel {\n if (!this.cachedModel) {\n this.cachedModel = this.createModel();\n }\n return this.cachedModel;\n }\n\n async listModels(): Promise<ModelInfo[]> {\n if (this.name === 'ollama') {\n return this.listOllamaModels();\n }\n return getModelsForProvider(this.name);\n }\n\n async validate(): Promise<{ valid: boolean; error?: string }> {\n if (this.name === 'ollama') {\n return this.validateOllama();\n }\n\n if (this.mode === 'byok' && !this.config.apiKey) {\n return {\n valid: false,\n error: `API key required for ${this.name}`,\n };\n }\n\n if (\n this.mode === 'managed' &&\n !this.config.proxyUrl &&\n !this.config.organizationId\n ) {\n return {\n valid: false,\n error: 'Managed mode requires proxyUrl or organizationId',\n };\n }\n\n return { valid: true };\n }\n\n private determineMode(config: ProviderConfig): ProviderMode {\n if (config.provider === 'ollama') return 'local';\n if (config.apiKey) return 'byok';\n return 'managed';\n }\n\n private createModel(): LanguageModel {\n const { baseUrl, proxyUrl } = this.config;\n\n switch (this.name) {\n case 'ollama': {\n // For Ollama, set the base URL via environment variable\n const originalBaseUrl = process.env.OLLAMA_BASE_URL;\n if (baseUrl && baseUrl !== 'http://localhost:11434') {\n process.env.OLLAMA_BASE_URL = baseUrl;\n }\n\n const ollamaModel = ollama(this.model);\n\n // Restore original environment variable\n if (originalBaseUrl !== undefined) {\n process.env.OLLAMA_BASE_URL = originalBaseUrl;\n } else if (baseUrl && baseUrl !== 'http://localhost:11434') {\n delete process.env.OLLAMA_BASE_URL;\n }\n\n return ollamaModel as unknown as LanguageModel;\n }\n\n case 'openai':\n if (this.mode === 'managed') {\n // For managed mode, use proxy URL via environment variable\n const originalBaseUrl = process.env.OPENAI_BASE_URL;\n if (proxyUrl) {\n process.env.OPENAI_BASE_URL = proxyUrl;\n }\n\n const model = openai(this.model);\n\n // Restore original environment variable\n if (originalBaseUrl !== undefined) {\n process.env.OPENAI_BASE_URL = originalBaseUrl;\n } else if (proxyUrl) {\n delete process.env.OPENAI_BASE_URL;\n }\n\n return model;\n }\n return openai(this.model);\n\n case 'anthropic':\n if (this.mode === 'managed') {\n // For managed mode with Anthropic, we use the proxy via openai\n const originalBaseUrl = process.env.OPENAI_BASE_URL;\n if (proxyUrl) {\n process.env.OPENAI_BASE_URL = proxyUrl;\n }\n\n const model = openai(this.model);\n\n // Restore original environment variable\n if (originalBaseUrl !== undefined) {\n process.env.OPENAI_BASE_URL = originalBaseUrl;\n } else if (proxyUrl) {\n delete process.env.OPENAI_BASE_URL;\n }\n\n return model;\n }\n return anthropic(this.model);\n\n case 'mistral':\n if (this.mode === 'managed') {\n // For managed mode with Mistral, we use the proxy via openai\n const originalBaseUrl = process.env.OPENAI_BASE_URL;\n if (proxyUrl) {\n process.env.OPENAI_BASE_URL = proxyUrl;\n }\n\n const model = openai(this.model);\n\n // Restore original environment variable\n if (originalBaseUrl !== undefined) {\n process.env.OPENAI_BASE_URL = originalBaseUrl;\n } else if (proxyUrl) {\n delete process.env.OPENAI_BASE_URL;\n }\n\n return model;\n }\n return mistral(this.model);\n\n case 'gemini':\n if (this.mode === 'managed') {\n // For managed mode with Gemini, we use the proxy via openai\n const originalBaseUrl = process.env.OPENAI_BASE_URL;\n if (proxyUrl) {\n process.env.OPENAI_BASE_URL = proxyUrl;\n }\n\n const model = openai(this.model);\n\n // Restore original environment variable\n if (originalBaseUrl !== undefined) {\n process.env.OPENAI_BASE_URL = originalBaseUrl;\n } else if (proxyUrl) {\n delete process.env.OPENAI_BASE_URL;\n }\n\n return model;\n }\n return google(this.model);\n\n default:\n throw new Error(`Unknown provider: ${this.name}`);\n }\n }\n\n private async listOllamaModels(): Promise<ModelInfo[]> {\n try {\n const baseUrl = this.config.baseUrl ?? 'http://localhost:11434';\n const response = await fetch(`${baseUrl}/api/tags`);\n if (!response.ok) {\n return getModelsForProvider('ollama');\n }\n\n const data = (await response.json()) as {\n models?: { name: string; size?: number }[];\n };\n const models = data.models ?? [];\n\n return models.map((m) => ({\n id: m.name,\n name: m.name,\n provider: 'ollama' as const,\n contextWindow: 8000,\n capabilities: {\n vision: false,\n tools: false,\n reasoning: false,\n streaming: true,\n },\n }));\n } catch {\n return getModelsForProvider('ollama');\n }\n }\n\n private async validateOllama(): Promise<{ valid: boolean; error?: string }> {\n try {\n const baseUrl = this.config.baseUrl ?? 'http://localhost:11434';\n const response = await fetch(`${baseUrl}/api/tags`);\n if (!response.ok) {\n return {\n valid: false,\n error: `Ollama server returned ${response.status}`,\n };\n }\n\n const data = (await response.json()) as {\n models?: { name: string }[];\n };\n const models = data.models ?? [];\n const hasModel = models.some((m) => m.name === this.model);\n\n if (!hasModel) {\n return {\n valid: false,\n error: `Model \"${this.model}\" not found. Available: ${models.map((m) => m.name).join(', ')}`,\n };\n }\n\n return { valid: true };\n } catch (error) {\n const baseUrl = this.config.baseUrl ?? 'http://localhost:11434';\n return {\n valid: false,\n error: `Cannot connect to Ollama at ${baseUrl}: ${error instanceof Error ? error.message : String(error)}`,\n };\n }\n }\n}\n\n/**\n * Create a provider from configuration\n */\nexport function createProvider(config: ProviderConfig): Provider {\n return new BaseProvider(config);\n}\n\n/**\n * Create a provider from environment variables\n */\nexport function createProviderFromEnv(): Provider {\n const provider =\n (process.env.CONTRACTSPEC_AI_PROVIDER as ProviderName) ?? 'openai';\n const model = process.env.CONTRACTSPEC_AI_MODEL;\n\n let apiKey: string | undefined;\n switch (provider) {\n case 'openai':\n apiKey = process.env.OPENAI_API_KEY;\n break;\n case 'anthropic':\n apiKey = process.env.ANTHROPIC_API_KEY;\n break;\n case 'mistral':\n apiKey = process.env.MISTRAL_API_KEY;\n break;\n case 'gemini':\n apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;\n break;\n case 'ollama':\n // No API key needed\n break;\n }\n\n return createProvider({\n provider,\n model,\n apiKey,\n baseUrl: process.env.OLLAMA_BASE_URL,\n proxyUrl: process.env.CONTRACTSPEC_AI_PROXY_URL,\n organizationId: process.env.CONTRACTSPEC_ORG_ID,\n });\n}\n\n/**\n * Get all available providers with their status\n */\nexport function getAvailableProviders(): ProviderAvailability[] {\n const providers: ProviderAvailability[] = [];\n\n // Ollama (local)\n providers.push({\n provider: 'ollama',\n available: true,\n mode: 'local',\n });\n\n // OpenAI\n const openaiKey = process.env.OPENAI_API_KEY;\n providers.push({\n provider: 'openai',\n available:\n Boolean(openaiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),\n mode: openaiKey ? 'byok' : 'managed',\n reason: !openaiKey ? 'Set OPENAI_API_KEY for BYOK mode' : undefined,\n });\n\n // Anthropic\n const anthropicKey = process.env.ANTHROPIC_API_KEY;\n providers.push({\n provider: 'anthropic',\n available:\n Boolean(anthropicKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),\n mode: anthropicKey ? 'byok' : 'managed',\n reason: !anthropicKey ? 'Set ANTHROPIC_API_KEY for BYOK mode' : undefined,\n });\n\n // Mistral\n const mistralKey = process.env.MISTRAL_API_KEY;\n providers.push({\n provider: 'mistral',\n available:\n Boolean(mistralKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),\n mode: mistralKey ? 'byok' : 'managed',\n reason: !mistralKey ? 'Set MISTRAL_API_KEY for BYOK mode' : undefined,\n });\n\n // Gemini\n const geminiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;\n providers.push({\n provider: 'gemini',\n available:\n Boolean(geminiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),\n mode: geminiKey ? 'byok' : 'managed',\n reason: !geminiKey ? 'Set GOOGLE_API_KEY for BYOK mode' : undefined,\n });\n\n return providers;\n}\n"],"mappings":";;;;;;;;;;;AAsBA,IAAM,eAAN,MAAuC;CACrC,AAAS;CACT,AAAS;CACT,AAAS;CAET,AAAiB;CACjB,AAAQ,cAAoC;CAE5C,YAAY,QAAwB;AAClC,OAAK,OAAO,OAAO;AACnB,OAAK,QAAQ,OAAO,SAAS,eAAe,OAAO;AACnD,OAAK,OAAO,KAAK,cAAc,OAAO;AACtC,OAAK,SAAS;;CAGhB,WAA0B;AACxB,MAAI,CAAC,KAAK,YACR,MAAK,cAAc,KAAK,aAAa;AAEvC,SAAO,KAAK;;CAGd,MAAM,aAAmC;AACvC,MAAI,KAAK,SAAS,SAChB,QAAO,KAAK,kBAAkB;AAEhC,SAAO,qBAAqB,KAAK,KAAK;;CAGxC,MAAM,WAAwD;AAC5D,MAAI,KAAK,SAAS,SAChB,QAAO,KAAK,gBAAgB;AAG9B,MAAI,KAAK,SAAS,UAAU,CAAC,KAAK,OAAO,OACvC,QAAO;GACL,OAAO;GACP,OAAO,wBAAwB,KAAK;GACrC;AAGH,MACE,KAAK,SAAS,aACd,CAAC,KAAK,OAAO,YACb,CAAC,KAAK,OAAO,eAEb,QAAO;GACL,OAAO;GACP,OAAO;GACR;AAGH,SAAO,EAAE,OAAO,MAAM;;CAGxB,AAAQ,cAAc,QAAsC;AAC1D,MAAI,OAAO,aAAa,SAAU,QAAO;AACzC,MAAI,OAAO,OAAQ,QAAO;AAC1B,SAAO;;CAGT,AAAQ,cAA6B;EACnC,MAAM,EAAE,SAAS,aAAa,KAAK;AAEnC,UAAQ,KAAK,MAAb;GACE,KAAK,UAAU;IAEb,MAAM,kBAAkB,QAAQ,IAAI;AACpC,QAAI,WAAW,YAAY,yBACzB,SAAQ,IAAI,kBAAkB;IAGhC,MAAM,cAAc,OAAO,KAAK,MAAM;AAGtC,QAAI,oBAAoB,OACtB,SAAQ,IAAI,kBAAkB;aACrB,WAAW,YAAY,yBAChC,QAAO,QAAQ,IAAI;AAGrB,WAAO;;GAGT,KAAK;AACH,QAAI,KAAK,SAAS,WAAW;KAE3B,MAAM,kBAAkB,QAAQ,IAAI;AACpC,SAAI,SACF,SAAQ,IAAI,kBAAkB;KAGhC,MAAM,QAAQ,OAAO,KAAK,MAAM;AAGhC,SAAI,oBAAoB,OACtB,SAAQ,IAAI,kBAAkB;cACrB,SACT,QAAO,QAAQ,IAAI;AAGrB,YAAO;;AAET,WAAO,OAAO,KAAK,MAAM;GAE3B,KAAK;AACH,QAAI,KAAK,SAAS,WAAW;KAE3B,MAAM,kBAAkB,QAAQ,IAAI;AACpC,SAAI,SACF,SAAQ,IAAI,kBAAkB;KAGhC,MAAM,QAAQ,OAAO,KAAK,MAAM;AAGhC,SAAI,oBAAoB,OACtB,SAAQ,IAAI,kBAAkB;cACrB,SACT,QAAO,QAAQ,IAAI;AAGrB,YAAO;;AAET,WAAO,UAAU,KAAK,MAAM;GAE9B,KAAK;AACH,QAAI,KAAK,SAAS,WAAW;KAE3B,MAAM,kBAAkB,QAAQ,IAAI;AACpC,SAAI,SACF,SAAQ,IAAI,kBAAkB;KAGhC,MAAM,QAAQ,OAAO,KAAK,MAAM;AAGhC,SAAI,oBAAoB,OACtB,SAAQ,IAAI,kBAAkB;cACrB,SACT,QAAO,QAAQ,IAAI;AAGrB,YAAO;;AAET,WAAO,QAAQ,KAAK,MAAM;GAE5B,KAAK;AACH,QAAI,KAAK,SAAS,WAAW;KAE3B,MAAM,kBAAkB,QAAQ,IAAI;AACpC,SAAI,SACF,SAAQ,IAAI,kBAAkB;KAGhC,MAAM,QAAQ,OAAO,KAAK,MAAM;AAGhC,SAAI,oBAAoB,OACtB,SAAQ,IAAI,kBAAkB;cACrB,SACT,QAAO,QAAQ,IAAI;AAGrB,YAAO;;AAET,WAAO,OAAO,KAAK,MAAM;GAE3B,QACE,OAAM,IAAI,MAAM,qBAAqB,KAAK,OAAO;;;CAIvD,MAAc,mBAAyC;AACrD,MAAI;GACF,MAAM,UAAU,KAAK,OAAO,WAAW;GACvC,MAAM,WAAW,MAAM,MAAM,GAAG,QAAQ,WAAW;AACnD,OAAI,CAAC,SAAS,GACZ,QAAO,qBAAqB,SAAS;AAQvC,YALc,MAAM,SAAS,MAAM,EAGf,UAAU,EAAE,EAElB,KAAK,OAAO;IACxB,IAAI,EAAE;IACN,MAAM,EAAE;IACR,UAAU;IACV,eAAe;IACf,cAAc;KACZ,QAAQ;KACR,OAAO;KACP,WAAW;KACX,WAAW;KACZ;IACF,EAAE;UACG;AACN,UAAO,qBAAqB,SAAS;;;CAIzC,MAAc,iBAA8D;AAC1E,MAAI;GACF,MAAM,UAAU,KAAK,OAAO,WAAW;GACvC,MAAM,WAAW,MAAM,MAAM,GAAG,QAAQ,WAAW;AACnD,OAAI,CAAC,SAAS,GACZ,QAAO;IACL,OAAO;IACP,OAAO,0BAA0B,SAAS;IAC3C;GAMH,MAAM,UAHQ,MAAM,SAAS,MAAM,EAGf,UAAU,EAAE;AAGhC,OAAI,CAFa,OAAO,MAAM,MAAM,EAAE,SAAS,KAAK,MAAM,CAGxD,QAAO;IACL,OAAO;IACP,OAAO,UAAU,KAAK,MAAM,0BAA0B,OAAO,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,KAAK;IAC3F;AAGH,UAAO,EAAE,OAAO,MAAM;WACf,OAAO;AAEd,UAAO;IACL,OAAO;IACP,OAAO,+BAHO,KAAK,OAAO,WAAW,yBAGS,IAAI,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;IACzG;;;;;;;AAQP,SAAgB,eAAe,QAAkC;AAC/D,QAAO,IAAI,aAAa,OAAO;;;;;AAMjC,SAAgB,wBAAkC;CAChD,MAAM,WACH,QAAQ,IAAI,4BAA6C;CAC5D,MAAM,QAAQ,QAAQ,IAAI;CAE1B,IAAIA;AACJ,SAAQ,UAAR;EACE,KAAK;AACH,YAAS,QAAQ,IAAI;AACrB;EACF,KAAK;AACH,YAAS,QAAQ,IAAI;AACrB;EACF,KAAK;AACH,YAAS,QAAQ,IAAI;AACrB;EACF,KAAK;AACH,YAAS,QAAQ,IAAI,kBAAkB,QAAQ,IAAI;AACnD;EACF,KAAK,SAEH;;AAGJ,QAAO,eAAe;EACpB;EACA;EACA;EACA,SAAS,QAAQ,IAAI;EACrB,UAAU,QAAQ,IAAI;EACtB,gBAAgB,QAAQ,IAAI;EAC7B,CAAC;;;;;AAMJ,SAAgB,wBAAgD;CAC9D,MAAMC,YAAoC,EAAE;AAG5C,WAAU,KAAK;EACb,UAAU;EACV,WAAW;EACX,MAAM;EACP,CAAC;CAGF,MAAM,YAAY,QAAQ,IAAI;AAC9B,WAAU,KAAK;EACb,UAAU;EACV,WACE,QAAQ,UAAU,IAAI,QAAQ,QAAQ,IAAI,0BAA0B;EACtE,MAAM,YAAY,SAAS;EAC3B,QAAQ,CAAC,YAAY,qCAAqC;EAC3D,CAAC;CAGF,MAAM,eAAe,QAAQ,IAAI;AACjC,WAAU,KAAK;EACb,UAAU;EACV,WACE,QAAQ,aAAa,IAAI,QAAQ,QAAQ,IAAI,0BAA0B;EACzE,MAAM,eAAe,SAAS;EAC9B,QAAQ,CAAC,eAAe,wCAAwC;EACjE,CAAC;CAGF,MAAM,aAAa,QAAQ,IAAI;AAC/B,WAAU,KAAK;EACb,UAAU;EACV,WACE,QAAQ,WAAW,IAAI,QAAQ,QAAQ,IAAI,0BAA0B;EACvE,MAAM,aAAa,SAAS;EAC5B,QAAQ,CAAC,aAAa,sCAAsC;EAC7D,CAAC;CAGF,MAAM,YAAY,QAAQ,IAAI,kBAAkB,QAAQ,IAAI;AAC5D,WAAU,KAAK;EACb,UAAU;EACV,WACE,QAAQ,UAAU,IAAI,QAAQ,QAAQ,IAAI,0BAA0B;EACtE,MAAM,YAAY,SAAS;EAC3B,QAAQ,CAAC,YAAY,qCAAqC;EAC3D,CAAC;AAEF,QAAO"}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import { LegacyConfig, LegacyProviderName, ModelCapabilities, ModelInfo, Provider, ProviderAvailability, ProviderConfig, ProviderMode, ProviderName } from "./types.js";
|
|
2
|
+
import { createProvider, createProviderFromEnv, getAvailableProviders } from "./factory.js";
|
|
3
|
+
import { DEFAULT_MODELS, MODELS, getDefaultModel, getModelInfo, getModelsForProvider, getRecommendedModels } from "./models.js";
|
|
4
|
+
import { ValidationResult, getEnvVarName, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider as validateProvider$1 } from "./validation.js";
|
|
5
|
+
import { getAIProvider, validateProvider } from "./legacy.js";
|
|
6
|
+
export { DEFAULT_MODELS, LegacyConfig, LegacyProviderName, MODELS, ModelCapabilities, ModelInfo, Provider, ProviderAvailability, ProviderConfig, ProviderMode, ProviderName, ValidationResult, createProvider, createProviderFromEnv, getAIProvider, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider as validateLegacyProvider, validateProvider$1 as validateProvider };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import { DEFAULT_MODELS, MODELS, getDefaultModel, getModelInfo, getModelsForProvider, getRecommendedModels } from "./models.js";
|
|
2
|
+
import { createProvider, createProviderFromEnv, getAvailableProviders } from "./factory.js";
|
|
3
|
+
import { getEnvVarName, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider as validateProvider$1 } from "./validation.js";
|
|
4
|
+
import { getAIProvider, validateProvider } from "./legacy.js";
|
|
5
|
+
|
|
6
|
+
export { DEFAULT_MODELS, MODELS, createProvider, createProviderFromEnv, getAIProvider, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider as validateLegacyProvider, validateProvider$1 as validateProvider };
|
package/dist/legacy.d.ts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { LegacyConfig } from "./types.js";
|
|
2
|
+
import { getRecommendedModels } from "./models.js";
|
|
3
|
+
import { LanguageModel } from "ai";
|
|
4
|
+
|
|
5
|
+
//#region src/legacy.d.ts
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Get AI provider from legacy Config type
|
|
9
|
+
*
|
|
10
|
+
* @deprecated Use createProvider() instead
|
|
11
|
+
*/
|
|
12
|
+
declare function getAIProvider(config: LegacyConfig): LanguageModel;
|
|
13
|
+
/**
|
|
14
|
+
* Validate provider from legacy Config type
|
|
15
|
+
*
|
|
16
|
+
* @deprecated Use validateProvider() from './validation' instead
|
|
17
|
+
*/
|
|
18
|
+
declare function validateProvider(config: LegacyConfig): Promise<{
|
|
19
|
+
success: boolean;
|
|
20
|
+
error?: string;
|
|
21
|
+
}>;
|
|
22
|
+
//#endregion
|
|
23
|
+
export { getAIProvider, getRecommendedModels, validateProvider };
|
|
24
|
+
//# sourceMappingURL=legacy.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"legacy.d.ts","names":[],"sources":["../src/legacy.ts"],"sourcesContent":[],"mappings":";;;;;;;;;;;iBA8BgB,aAAA,SAAsB,eAAe;;;;;;iBAmC/B,gBAAA,SACZ,eACP"}
|
package/dist/legacy.js
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import { getRecommendedModels } from "./models.js";
|
|
2
|
+
import { createProvider } from "./factory.js";
|
|
3
|
+
|
|
4
|
+
//#region src/legacy.ts
|
|
5
|
+
/**
|
|
6
|
+
* Map legacy provider names to new ones
|
|
7
|
+
*/
|
|
8
|
+
function mapLegacyProvider(legacy) {
|
|
9
|
+
switch (legacy) {
|
|
10
|
+
case "claude": return "anthropic";
|
|
11
|
+
case "custom": return "openai";
|
|
12
|
+
default: return legacy;
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Get AI provider from legacy Config type
|
|
17
|
+
*
|
|
18
|
+
* @deprecated Use createProvider() instead
|
|
19
|
+
*/
|
|
20
|
+
function getAIProvider(config) {
|
|
21
|
+
const provider = mapLegacyProvider(config.aiProvider);
|
|
22
|
+
let apiKey;
|
|
23
|
+
switch (provider) {
|
|
24
|
+
case "openai":
|
|
25
|
+
apiKey = process.env.OPENAI_API_KEY;
|
|
26
|
+
break;
|
|
27
|
+
case "anthropic":
|
|
28
|
+
apiKey = process.env.ANTHROPIC_API_KEY;
|
|
29
|
+
break;
|
|
30
|
+
case "mistral":
|
|
31
|
+
apiKey = process.env.MISTRAL_API_KEY;
|
|
32
|
+
break;
|
|
33
|
+
case "gemini":
|
|
34
|
+
apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
|
|
35
|
+
break;
|
|
36
|
+
}
|
|
37
|
+
return createProvider({
|
|
38
|
+
provider,
|
|
39
|
+
model: config.aiModel,
|
|
40
|
+
apiKey,
|
|
41
|
+
baseUrl: config.customEndpoint
|
|
42
|
+
}).getModel();
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Validate provider from legacy Config type
|
|
46
|
+
*
|
|
47
|
+
* @deprecated Use validateProvider() from './validation' instead
|
|
48
|
+
*/
|
|
49
|
+
async function validateProvider(config) {
|
|
50
|
+
const provider = mapLegacyProvider(config.aiProvider);
|
|
51
|
+
if (provider === "ollama") return { success: true };
|
|
52
|
+
if (provider === "anthropic" && !process.env.ANTHROPIC_API_KEY) return {
|
|
53
|
+
success: false,
|
|
54
|
+
error: "ANTHROPIC_API_KEY environment variable not set"
|
|
55
|
+
};
|
|
56
|
+
if (provider === "openai" && !process.env.OPENAI_API_KEY) return {
|
|
57
|
+
success: false,
|
|
58
|
+
error: "OPENAI_API_KEY environment variable not set"
|
|
59
|
+
};
|
|
60
|
+
if (provider === "mistral" && !process.env.MISTRAL_API_KEY) return {
|
|
61
|
+
success: false,
|
|
62
|
+
error: "MISTRAL_API_KEY environment variable not set"
|
|
63
|
+
};
|
|
64
|
+
if (provider === "gemini" && !process.env.GOOGLE_API_KEY && !process.env.GEMINI_API_KEY) return {
|
|
65
|
+
success: false,
|
|
66
|
+
error: "GOOGLE_API_KEY or GEMINI_API_KEY environment variable not set"
|
|
67
|
+
};
|
|
68
|
+
return { success: true };
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
//#endregion
|
|
72
|
+
export { getAIProvider, getRecommendedModels, validateProvider };
|
|
73
|
+
//# sourceMappingURL=legacy.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"legacy.js","names":["apiKey: string | undefined"],"sources":["../src/legacy.ts"],"sourcesContent":["/**\n * Legacy compatibility layer\n *\n * Provides backwards-compatible functions for existing code\n * that uses the old provider API from contractspec-workspace.\n */\nimport type { LanguageModel } from 'ai';\nimport type { LegacyConfig, ProviderName } from './types';\nimport { createProvider } from './factory';\nimport { getRecommendedModels as getModels } from './models';\n\n/**\n * Map legacy provider names to new ones\n */\nfunction mapLegacyProvider(legacy: LegacyConfig['aiProvider']): ProviderName {\n switch (legacy) {\n case 'claude':\n return 'anthropic';\n case 'custom':\n return 'openai'; // Custom endpoints use OpenAI-compatible API\n default:\n return legacy as ProviderName;\n }\n}\n\n/**\n * Get AI provider from legacy Config type\n *\n * @deprecated Use createProvider() instead\n */\nexport function getAIProvider(config: LegacyConfig): LanguageModel {\n const provider = mapLegacyProvider(config.aiProvider);\n\n // Get API key from environment\n let apiKey: string | undefined;\n switch (provider) {\n case 'openai':\n apiKey = process.env.OPENAI_API_KEY;\n break;\n case 'anthropic':\n apiKey = process.env.ANTHROPIC_API_KEY;\n break;\n case 'mistral':\n apiKey = process.env.MISTRAL_API_KEY;\n break;\n case 'gemini':\n apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;\n break;\n }\n\n const instance = createProvider({\n provider,\n model: config.aiModel,\n apiKey,\n baseUrl: config.customEndpoint,\n });\n\n return instance.getModel();\n}\n\n/**\n * Validate provider from legacy Config type\n *\n * @deprecated Use validateProvider() from './validation' instead\n */\nexport async function validateProvider(\n config: LegacyConfig\n): Promise<{ success: boolean; error?: string }> {\n const provider = mapLegacyProvider(config.aiProvider);\n\n // For Ollama, we can't easily validate without making a request\n if (provider === 'ollama') {\n return { success: true };\n }\n\n // For cloud providers, check API key exists\n if (provider === 'anthropic' && !process.env.ANTHROPIC_API_KEY) {\n return {\n success: false,\n error: 'ANTHROPIC_API_KEY environment variable not set',\n };\n }\n\n if (provider === 'openai' && !process.env.OPENAI_API_KEY) {\n return {\n success: false,\n error: 'OPENAI_API_KEY environment variable not set',\n };\n }\n\n if (provider === 'mistral' && !process.env.MISTRAL_API_KEY) {\n return {\n success: false,\n error: 'MISTRAL_API_KEY environment variable not set',\n };\n }\n\n if (\n provider === 'gemini' &&\n !process.env.GOOGLE_API_KEY &&\n !process.env.GEMINI_API_KEY\n ) {\n return {\n success: false,\n error: 'GOOGLE_API_KEY or GEMINI_API_KEY environment variable not set',\n };\n }\n\n return { success: true };\n}\n\n/**\n * Get recommended models for legacy provider name\n *\n * @deprecated Use getModelsForProvider() instead\n */\nexport { getModels as getRecommendedModels };\n"],"mappings":";;;;;;;AAcA,SAAS,kBAAkB,QAAkD;AAC3E,SAAQ,QAAR;EACE,KAAK,SACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,QACE,QAAO;;;;;;;;AASb,SAAgB,cAAc,QAAqC;CACjE,MAAM,WAAW,kBAAkB,OAAO,WAAW;CAGrD,IAAIA;AACJ,SAAQ,UAAR;EACE,KAAK;AACH,YAAS,QAAQ,IAAI;AACrB;EACF,KAAK;AACH,YAAS,QAAQ,IAAI;AACrB;EACF,KAAK;AACH,YAAS,QAAQ,IAAI;AACrB;EACF,KAAK;AACH,YAAS,QAAQ,IAAI,kBAAkB,QAAQ,IAAI;AACnD;;AAUJ,QAPiB,eAAe;EAC9B;EACA,OAAO,OAAO;EACd;EACA,SAAS,OAAO;EACjB,CAAC,CAEc,UAAU;;;;;;;AAQ5B,eAAsB,iBACpB,QAC+C;CAC/C,MAAM,WAAW,kBAAkB,OAAO,WAAW;AAGrD,KAAI,aAAa,SACf,QAAO,EAAE,SAAS,MAAM;AAI1B,KAAI,aAAa,eAAe,CAAC,QAAQ,IAAI,kBAC3C,QAAO;EACL,SAAS;EACT,OAAO;EACR;AAGH,KAAI,aAAa,YAAY,CAAC,QAAQ,IAAI,eACxC,QAAO;EACL,SAAS;EACT,OAAO;EACR;AAGH,KAAI,aAAa,aAAa,CAAC,QAAQ,IAAI,gBACzC,QAAO;EACL,SAAS;EACT,OAAO;EACR;AAGH,KACE,aAAa,YACb,CAAC,QAAQ,IAAI,kBACb,CAAC,QAAQ,IAAI,eAEb,QAAO;EACL,SAAS;EACT,OAAO;EACR;AAGH,QAAO,EAAE,SAAS,MAAM"}
|
package/dist/models.d.ts
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { ModelInfo, ProviderName } from "./types.js";
|
|
2
|
+
|
|
3
|
+
//#region src/models.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Default models per provider
|
|
7
|
+
*/
|
|
8
|
+
declare const DEFAULT_MODELS: Record<ProviderName, string>;
|
|
9
|
+
/**
|
|
10
|
+
* All recommended models with metadata
|
|
11
|
+
*/
|
|
12
|
+
declare const MODELS: ModelInfo[];
|
|
13
|
+
/**
|
|
14
|
+
* Get models for a specific provider
|
|
15
|
+
*/
|
|
16
|
+
declare function getModelsForProvider(provider: ProviderName): ModelInfo[];
|
|
17
|
+
/**
|
|
18
|
+
* Get model info by ID
|
|
19
|
+
*/
|
|
20
|
+
declare function getModelInfo(modelId: string): ModelInfo | undefined;
|
|
21
|
+
/**
|
|
22
|
+
* Get recommended models for a provider (legacy format)
|
|
23
|
+
*/
|
|
24
|
+
declare function getRecommendedModels(provider: ProviderName | 'claude' | 'custom'): string[];
|
|
25
|
+
/**
|
|
26
|
+
* Get default model for a provider
|
|
27
|
+
*/
|
|
28
|
+
declare function getDefaultModel(provider: ProviderName): string;
|
|
29
|
+
//#endregion
|
|
30
|
+
export { DEFAULT_MODELS, MODELS, getDefaultModel, getModelInfo, getModelsForProvider, getRecommendedModels };
|
|
31
|
+
//# sourceMappingURL=models.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"models.d.ts","names":[],"sources":["../src/models.ts"],"sourcesContent":[],"mappings":";;;;AAmBA;AAwOA;AAOA;AAOgB,cAjQH,cAiQuB,EAjQP,MAkQjB,CAlQwB,YAkQZ,EAAA,MAAA,CAAA;AAkBxB;;;cAzQa,QAAQ;;;;iBAwOL,oBAAA,WAA+B,eAAe;;;;iBAO9C,YAAA,mBAA+B;;;;iBAO/B,oBAAA,WACJ;;;;iBAkBI,eAAA,WAA0B"}
|
package/dist/models.js
ADDED
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
//#region src/models.ts
|
|
2
|
+
/**
|
|
3
|
+
* Default models per provider
|
|
4
|
+
*/
|
|
5
|
+
const DEFAULT_MODELS = {
|
|
6
|
+
ollama: "llama3.2",
|
|
7
|
+
openai: "gpt-4o",
|
|
8
|
+
anthropic: "claude-sonnet-4-20250514",
|
|
9
|
+
mistral: "mistral-large-latest",
|
|
10
|
+
gemini: "gemini-2.0-flash"
|
|
11
|
+
};
|
|
12
|
+
/**
|
|
13
|
+
* All recommended models with metadata
|
|
14
|
+
*/
|
|
15
|
+
const MODELS = [
|
|
16
|
+
{
|
|
17
|
+
id: "llama3.2",
|
|
18
|
+
name: "Llama 3.2",
|
|
19
|
+
provider: "ollama",
|
|
20
|
+
contextWindow: 128e3,
|
|
21
|
+
capabilities: {
|
|
22
|
+
vision: false,
|
|
23
|
+
tools: true,
|
|
24
|
+
reasoning: false,
|
|
25
|
+
streaming: true
|
|
26
|
+
}
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
id: "codellama",
|
|
30
|
+
name: "Code Llama",
|
|
31
|
+
provider: "ollama",
|
|
32
|
+
contextWindow: 16e3,
|
|
33
|
+
capabilities: {
|
|
34
|
+
vision: false,
|
|
35
|
+
tools: false,
|
|
36
|
+
reasoning: false,
|
|
37
|
+
streaming: true
|
|
38
|
+
}
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
id: "deepseek-coder",
|
|
42
|
+
name: "DeepSeek Coder",
|
|
43
|
+
provider: "ollama",
|
|
44
|
+
contextWindow: 16e3,
|
|
45
|
+
capabilities: {
|
|
46
|
+
vision: false,
|
|
47
|
+
tools: false,
|
|
48
|
+
reasoning: false,
|
|
49
|
+
streaming: true
|
|
50
|
+
}
|
|
51
|
+
},
|
|
52
|
+
{
|
|
53
|
+
id: "mistral",
|
|
54
|
+
name: "Mistral 7B",
|
|
55
|
+
provider: "ollama",
|
|
56
|
+
contextWindow: 32e3,
|
|
57
|
+
capabilities: {
|
|
58
|
+
vision: false,
|
|
59
|
+
tools: false,
|
|
60
|
+
reasoning: false,
|
|
61
|
+
streaming: true
|
|
62
|
+
}
|
|
63
|
+
},
|
|
64
|
+
{
|
|
65
|
+
id: "gpt-4o",
|
|
66
|
+
name: "GPT-4o",
|
|
67
|
+
provider: "openai",
|
|
68
|
+
contextWindow: 128e3,
|
|
69
|
+
capabilities: {
|
|
70
|
+
vision: true,
|
|
71
|
+
tools: true,
|
|
72
|
+
reasoning: false,
|
|
73
|
+
streaming: true
|
|
74
|
+
},
|
|
75
|
+
costPerMillion: {
|
|
76
|
+
input: 2.5,
|
|
77
|
+
output: 10
|
|
78
|
+
}
|
|
79
|
+
},
|
|
80
|
+
{
|
|
81
|
+
id: "gpt-4o-mini",
|
|
82
|
+
name: "GPT-4o Mini",
|
|
83
|
+
provider: "openai",
|
|
84
|
+
contextWindow: 128e3,
|
|
85
|
+
capabilities: {
|
|
86
|
+
vision: true,
|
|
87
|
+
tools: true,
|
|
88
|
+
reasoning: false,
|
|
89
|
+
streaming: true
|
|
90
|
+
},
|
|
91
|
+
costPerMillion: {
|
|
92
|
+
input: .15,
|
|
93
|
+
output: .6
|
|
94
|
+
}
|
|
95
|
+
},
|
|
96
|
+
{
|
|
97
|
+
id: "o1",
|
|
98
|
+
name: "o1",
|
|
99
|
+
provider: "openai",
|
|
100
|
+
contextWindow: 2e5,
|
|
101
|
+
capabilities: {
|
|
102
|
+
vision: true,
|
|
103
|
+
tools: true,
|
|
104
|
+
reasoning: true,
|
|
105
|
+
streaming: true
|
|
106
|
+
},
|
|
107
|
+
costPerMillion: {
|
|
108
|
+
input: 15,
|
|
109
|
+
output: 60
|
|
110
|
+
}
|
|
111
|
+
},
|
|
112
|
+
{
|
|
113
|
+
id: "o1-mini",
|
|
114
|
+
name: "o1 Mini",
|
|
115
|
+
provider: "openai",
|
|
116
|
+
contextWindow: 128e3,
|
|
117
|
+
capabilities: {
|
|
118
|
+
vision: false,
|
|
119
|
+
tools: true,
|
|
120
|
+
reasoning: true,
|
|
121
|
+
streaming: true
|
|
122
|
+
},
|
|
123
|
+
costPerMillion: {
|
|
124
|
+
input: 3,
|
|
125
|
+
output: 12
|
|
126
|
+
}
|
|
127
|
+
},
|
|
128
|
+
{
|
|
129
|
+
id: "claude-sonnet-4-20250514",
|
|
130
|
+
name: "Claude Sonnet 4",
|
|
131
|
+
provider: "anthropic",
|
|
132
|
+
contextWindow: 2e5,
|
|
133
|
+
capabilities: {
|
|
134
|
+
vision: true,
|
|
135
|
+
tools: true,
|
|
136
|
+
reasoning: true,
|
|
137
|
+
streaming: true
|
|
138
|
+
},
|
|
139
|
+
costPerMillion: {
|
|
140
|
+
input: 3,
|
|
141
|
+
output: 15
|
|
142
|
+
}
|
|
143
|
+
},
|
|
144
|
+
{
|
|
145
|
+
id: "claude-3-5-sonnet-20241022",
|
|
146
|
+
name: "Claude 3.5 Sonnet",
|
|
147
|
+
provider: "anthropic",
|
|
148
|
+
contextWindow: 2e5,
|
|
149
|
+
capabilities: {
|
|
150
|
+
vision: true,
|
|
151
|
+
tools: true,
|
|
152
|
+
reasoning: false,
|
|
153
|
+
streaming: true
|
|
154
|
+
},
|
|
155
|
+
costPerMillion: {
|
|
156
|
+
input: 3,
|
|
157
|
+
output: 15
|
|
158
|
+
}
|
|
159
|
+
},
|
|
160
|
+
{
|
|
161
|
+
id: "claude-3-5-haiku-20241022",
|
|
162
|
+
name: "Claude 3.5 Haiku",
|
|
163
|
+
provider: "anthropic",
|
|
164
|
+
contextWindow: 2e5,
|
|
165
|
+
capabilities: {
|
|
166
|
+
vision: true,
|
|
167
|
+
tools: true,
|
|
168
|
+
reasoning: false,
|
|
169
|
+
streaming: true
|
|
170
|
+
},
|
|
171
|
+
costPerMillion: {
|
|
172
|
+
input: .8,
|
|
173
|
+
output: 4
|
|
174
|
+
}
|
|
175
|
+
},
|
|
176
|
+
{
|
|
177
|
+
id: "mistral-large-latest",
|
|
178
|
+
name: "Mistral Large",
|
|
179
|
+
provider: "mistral",
|
|
180
|
+
contextWindow: 128e3,
|
|
181
|
+
capabilities: {
|
|
182
|
+
vision: false,
|
|
183
|
+
tools: true,
|
|
184
|
+
reasoning: false,
|
|
185
|
+
streaming: true
|
|
186
|
+
},
|
|
187
|
+
costPerMillion: {
|
|
188
|
+
input: 2,
|
|
189
|
+
output: 6
|
|
190
|
+
}
|
|
191
|
+
},
|
|
192
|
+
{
|
|
193
|
+
id: "codestral-latest",
|
|
194
|
+
name: "Codestral",
|
|
195
|
+
provider: "mistral",
|
|
196
|
+
contextWindow: 32e3,
|
|
197
|
+
capabilities: {
|
|
198
|
+
vision: false,
|
|
199
|
+
tools: true,
|
|
200
|
+
reasoning: false,
|
|
201
|
+
streaming: true
|
|
202
|
+
},
|
|
203
|
+
costPerMillion: {
|
|
204
|
+
input: .2,
|
|
205
|
+
output: .6
|
|
206
|
+
}
|
|
207
|
+
},
|
|
208
|
+
{
|
|
209
|
+
id: "mistral-small-latest",
|
|
210
|
+
name: "Mistral Small",
|
|
211
|
+
provider: "mistral",
|
|
212
|
+
contextWindow: 32e3,
|
|
213
|
+
capabilities: {
|
|
214
|
+
vision: false,
|
|
215
|
+
tools: true,
|
|
216
|
+
reasoning: false,
|
|
217
|
+
streaming: true
|
|
218
|
+
},
|
|
219
|
+
costPerMillion: {
|
|
220
|
+
input: .2,
|
|
221
|
+
output: .6
|
|
222
|
+
}
|
|
223
|
+
},
|
|
224
|
+
{
|
|
225
|
+
id: "gemini-2.0-flash",
|
|
226
|
+
name: "Gemini 2.0 Flash",
|
|
227
|
+
provider: "gemini",
|
|
228
|
+
contextWindow: 1e6,
|
|
229
|
+
capabilities: {
|
|
230
|
+
vision: true,
|
|
231
|
+
tools: true,
|
|
232
|
+
reasoning: false,
|
|
233
|
+
streaming: true
|
|
234
|
+
},
|
|
235
|
+
costPerMillion: {
|
|
236
|
+
input: .075,
|
|
237
|
+
output: .3
|
|
238
|
+
}
|
|
239
|
+
},
|
|
240
|
+
{
|
|
241
|
+
id: "gemini-2.5-pro-preview-06-05",
|
|
242
|
+
name: "Gemini 2.5 Pro",
|
|
243
|
+
provider: "gemini",
|
|
244
|
+
contextWindow: 1e6,
|
|
245
|
+
capabilities: {
|
|
246
|
+
vision: true,
|
|
247
|
+
tools: true,
|
|
248
|
+
reasoning: true,
|
|
249
|
+
streaming: true
|
|
250
|
+
},
|
|
251
|
+
costPerMillion: {
|
|
252
|
+
input: 1.25,
|
|
253
|
+
output: 10
|
|
254
|
+
}
|
|
255
|
+
},
|
|
256
|
+
{
|
|
257
|
+
id: "gemini-2.5-flash-preview-05-20",
|
|
258
|
+
name: "Gemini 2.5 Flash",
|
|
259
|
+
provider: "gemini",
|
|
260
|
+
contextWindow: 1e6,
|
|
261
|
+
capabilities: {
|
|
262
|
+
vision: true,
|
|
263
|
+
tools: true,
|
|
264
|
+
reasoning: true,
|
|
265
|
+
streaming: true
|
|
266
|
+
},
|
|
267
|
+
costPerMillion: {
|
|
268
|
+
input: .15,
|
|
269
|
+
output: .6
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
];
|
|
273
|
+
/**
|
|
274
|
+
* Get models for a specific provider
|
|
275
|
+
*/
|
|
276
|
+
function getModelsForProvider(provider) {
|
|
277
|
+
return MODELS.filter((m) => m.provider === provider);
|
|
278
|
+
}
|
|
279
|
+
/**
|
|
280
|
+
* Get model info by ID
|
|
281
|
+
*/
|
|
282
|
+
function getModelInfo(modelId) {
|
|
283
|
+
return MODELS.find((m) => m.id === modelId);
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* Get recommended models for a provider (legacy format)
|
|
287
|
+
*/
|
|
288
|
+
function getRecommendedModels(provider) {
|
|
289
|
+
return getModelsForProvider(provider === "claude" ? "anthropic" : provider === "custom" ? "openai" : provider).map((m) => m.id);
|
|
290
|
+
}
|
|
291
|
+
/**
|
|
292
|
+
* Get default model for a provider
|
|
293
|
+
*/
|
|
294
|
+
function getDefaultModel(provider) {
|
|
295
|
+
return DEFAULT_MODELS[provider];
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
//#endregion
|
|
299
|
+
export { DEFAULT_MODELS, MODELS, getDefaultModel, getModelInfo, getModelsForProvider, getRecommendedModels };
|
|
300
|
+
//# sourceMappingURL=models.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"models.js","names":["DEFAULT_MODELS: Record<ProviderName, string>","MODELS: ModelInfo[]"],"sources":["../src/models.ts"],"sourcesContent":["/**\n * Model definitions and recommendations\n */\nimport type { ModelInfo, ProviderName } from './types';\n\n/**\n * Default models per provider\n */\nexport const DEFAULT_MODELS: Record<ProviderName, string> = {\n ollama: 'llama3.2',\n openai: 'gpt-4o',\n anthropic: 'claude-sonnet-4-20250514',\n mistral: 'mistral-large-latest',\n gemini: 'gemini-2.0-flash',\n};\n\n/**\n * All recommended models with metadata\n */\nexport const MODELS: ModelInfo[] = [\n // Ollama\n {\n id: 'llama3.2',\n name: 'Llama 3.2',\n provider: 'ollama',\n contextWindow: 128000,\n capabilities: {\n vision: false,\n tools: true,\n reasoning: false,\n streaming: true,\n },\n },\n {\n id: 'codellama',\n name: 'Code Llama',\n provider: 'ollama',\n contextWindow: 16000,\n capabilities: {\n vision: false,\n tools: false,\n reasoning: false,\n streaming: true,\n },\n },\n {\n id: 'deepseek-coder',\n name: 'DeepSeek Coder',\n provider: 'ollama',\n contextWindow: 16000,\n capabilities: {\n vision: false,\n tools: false,\n reasoning: false,\n streaming: true,\n },\n },\n {\n id: 'mistral',\n name: 'Mistral 7B',\n provider: 'ollama',\n contextWindow: 32000,\n capabilities: {\n vision: false,\n tools: false,\n reasoning: false,\n streaming: true,\n },\n },\n\n // OpenAI\n {\n id: 'gpt-4o',\n name: 'GPT-4o',\n provider: 'openai',\n contextWindow: 128000,\n capabilities: {\n vision: true,\n tools: true,\n reasoning: false,\n streaming: true,\n },\n costPerMillion: { input: 2.5, output: 10 },\n },\n {\n id: 'gpt-4o-mini',\n name: 'GPT-4o Mini',\n provider: 'openai',\n contextWindow: 128000,\n capabilities: {\n vision: true,\n tools: true,\n reasoning: false,\n streaming: true,\n },\n costPerMillion: { input: 0.15, output: 0.6 },\n },\n {\n id: 'o1',\n name: 'o1',\n provider: 'openai',\n contextWindow: 200000,\n capabilities: {\n vision: true,\n tools: true,\n reasoning: true,\n streaming: true,\n },\n costPerMillion: { input: 15, output: 60 },\n },\n {\n id: 'o1-mini',\n name: 'o1 Mini',\n provider: 'openai',\n contextWindow: 128000,\n capabilities: {\n vision: false,\n tools: true,\n reasoning: true,\n streaming: true,\n },\n costPerMillion: { input: 3, output: 12 },\n },\n\n // Anthropic\n {\n id: 'claude-sonnet-4-20250514',\n name: 'Claude Sonnet 4',\n provider: 'anthropic',\n contextWindow: 200000,\n capabilities: {\n vision: true,\n tools: true,\n reasoning: true,\n streaming: true,\n },\n costPerMillion: { input: 3, output: 15 },\n },\n {\n id: 'claude-3-5-sonnet-20241022',\n name: 'Claude 3.5 Sonnet',\n provider: 'anthropic',\n contextWindow: 200000,\n capabilities: {\n vision: true,\n tools: true,\n reasoning: false,\n streaming: true,\n },\n costPerMillion: { input: 3, output: 15 },\n },\n {\n id: 'claude-3-5-haiku-20241022',\n name: 'Claude 3.5 Haiku',\n provider: 'anthropic',\n contextWindow: 200000,\n capabilities: {\n vision: true,\n tools: true,\n reasoning: false,\n streaming: true,\n },\n costPerMillion: { input: 0.8, output: 4 },\n },\n\n // Mistral\n {\n id: 'mistral-large-latest',\n name: 'Mistral Large',\n provider: 'mistral',\n contextWindow: 128000,\n capabilities: {\n vision: false,\n tools: true,\n reasoning: false,\n streaming: true,\n },\n costPerMillion: { input: 2, output: 6 },\n },\n {\n id: 'codestral-latest',\n name: 'Codestral',\n provider: 'mistral',\n contextWindow: 32000,\n capabilities: {\n vision: false,\n tools: true,\n reasoning: false,\n streaming: true,\n },\n costPerMillion: { input: 0.2, output: 0.6 },\n },\n {\n id: 'mistral-small-latest',\n name: 'Mistral Small',\n provider: 'mistral',\n contextWindow: 32000,\n capabilities: {\n vision: false,\n tools: true,\n reasoning: false,\n streaming: true,\n },\n costPerMillion: { input: 0.2, output: 0.6 },\n },\n\n // Gemini\n {\n id: 'gemini-2.0-flash',\n name: 'Gemini 2.0 Flash',\n provider: 'gemini',\n contextWindow: 1000000,\n capabilities: {\n vision: true,\n tools: true,\n reasoning: false,\n streaming: true,\n },\n costPerMillion: { input: 0.075, output: 0.3 },\n },\n {\n id: 'gemini-2.5-pro-preview-06-05',\n name: 'Gemini 2.5 Pro',\n provider: 'gemini',\n contextWindow: 1000000,\n capabilities: {\n vision: true,\n tools: true,\n reasoning: true,\n streaming: true,\n },\n costPerMillion: { input: 1.25, output: 10 },\n },\n {\n id: 'gemini-2.5-flash-preview-05-20',\n name: 'Gemini 2.5 Flash',\n provider: 'gemini',\n contextWindow: 1000000,\n capabilities: {\n vision: true,\n tools: true,\n reasoning: true,\n streaming: true,\n },\n costPerMillion: { input: 0.15, output: 0.6 },\n },\n];\n\n/**\n * Get models for a specific provider\n */\nexport function getModelsForProvider(provider: ProviderName): ModelInfo[] {\n return MODELS.filter((m) => m.provider === provider);\n}\n\n/**\n * Get model info by ID\n */\nexport function getModelInfo(modelId: string): ModelInfo | undefined {\n return MODELS.find((m) => m.id === modelId);\n}\n\n/**\n * Get recommended models for a provider (legacy format)\n */\nexport function getRecommendedModels(\n provider: ProviderName | 'claude' | 'custom'\n): string[] {\n // Handle legacy provider names\n const normalizedProvider =\n provider === 'claude'\n ? 'anthropic'\n : provider === 'custom'\n ? 'openai'\n : provider;\n\n return getModelsForProvider(normalizedProvider as ProviderName).map(\n (m) => m.id\n );\n}\n\n/**\n * Get default model for a provider\n */\nexport function getDefaultModel(provider: ProviderName): string {\n return DEFAULT_MODELS[provider];\n}\n"],"mappings":";;;;AAQA,MAAaA,iBAA+C;CAC1D,QAAQ;CACR,QAAQ;CACR,WAAW;CACX,SAAS;CACT,QAAQ;CACT;;;;AAKD,MAAaC,SAAsB;CAEjC;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACF;CACD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACF;CACD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACF;CACD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACF;CAGD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAK,QAAQ;GAAI;EAC3C;CACD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAM,QAAQ;GAAK;EAC7C;CACD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAI,QAAQ;GAAI;EAC1C;CACD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAG,QAAQ;GAAI;EACzC;CAGD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAG,QAAQ;GAAI;EACzC;CACD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAG,QAAQ;GAAI;EACzC;CACD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAK,QAAQ;GAAG;EAC1C;CAGD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAG,QAAQ;GAAG;EACxC;CACD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAK,QAAQ;GAAK;EAC5C;CACD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAK,QAAQ;GAAK;EAC5C;CAGD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAO,QAAQ;GAAK;EAC9C;CACD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAM,QAAQ;GAAI;EAC5C;CACD;EACE,IAAI;EACJ,MAAM;EACN,UAAU;EACV,eAAe;EACf,cAAc;GACZ,QAAQ;GACR,OAAO;GACP,WAAW;GACX,WAAW;GACZ;EACD,gBAAgB;GAAE,OAAO;GAAM,QAAQ;GAAK;EAC7C;CACF;;;;AAKD,SAAgB,qBAAqB,UAAqC;AACxE,QAAO,OAAO,QAAQ,MAAM,EAAE,aAAa,SAAS;;;;;AAMtD,SAAgB,aAAa,SAAwC;AACnE,QAAO,OAAO,MAAM,MAAM,EAAE,OAAO,QAAQ;;;;;AAM7C,SAAgB,qBACd,UACU;AASV,QAAO,qBANL,aAAa,WACT,cACA,aAAa,WACX,WACA,SAEuD,CAAC,KAC7D,MAAM,EAAE,GACV;;;;;AAMH,SAAgB,gBAAgB,UAAgC;AAC9D,QAAO,eAAe"}
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import { LanguageModel } from "ai";
|
|
2
|
+
|
|
3
|
+
//#region src/types.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Supported AI providers
|
|
7
|
+
*/
|
|
8
|
+
type ProviderName = 'ollama' | 'openai' | 'anthropic' | 'mistral' | 'gemini';
|
|
9
|
+
/**
|
|
10
|
+
* Legacy provider names (for backwards compatibility)
|
|
11
|
+
*/
|
|
12
|
+
type LegacyProviderName = 'claude' | 'openai' | 'ollama' | 'custom';
|
|
13
|
+
/**
|
|
14
|
+
* Provider mode determines how API keys are resolved
|
|
15
|
+
*/
|
|
16
|
+
type ProviderMode = 'local' | 'byok' | 'managed';
|
|
17
|
+
/**
|
|
18
|
+
* Configuration for creating a provider
|
|
19
|
+
*/
|
|
20
|
+
interface ProviderConfig {
|
|
21
|
+
/** Which provider to use */
|
|
22
|
+
provider: ProviderName;
|
|
23
|
+
/** Specific model to use (optional, uses provider default) */
|
|
24
|
+
model?: string;
|
|
25
|
+
/** API key for BYOK mode */
|
|
26
|
+
apiKey?: string;
|
|
27
|
+
/** Custom base URL (for Ollama or custom endpoints) */
|
|
28
|
+
baseUrl?: string;
|
|
29
|
+
/** API proxy URL for managed mode */
|
|
30
|
+
proxyUrl?: string;
|
|
31
|
+
/** Organization/tenant ID for managed mode */
|
|
32
|
+
organizationId?: string;
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Model capability flags
|
|
36
|
+
*/
|
|
37
|
+
interface ModelCapabilities {
|
|
38
|
+
/** Supports image/vision input */
|
|
39
|
+
vision: boolean;
|
|
40
|
+
/** Supports tool/function calling */
|
|
41
|
+
tools: boolean;
|
|
42
|
+
/** Supports extended thinking/reasoning */
|
|
43
|
+
reasoning: boolean;
|
|
44
|
+
/** Supports streaming */
|
|
45
|
+
streaming: boolean;
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Model information
|
|
49
|
+
*/
|
|
50
|
+
interface ModelInfo {
|
|
51
|
+
/** Model identifier */
|
|
52
|
+
id: string;
|
|
53
|
+
/** Human-readable name */
|
|
54
|
+
name: string;
|
|
55
|
+
/** Provider this model belongs to */
|
|
56
|
+
provider: ProviderName;
|
|
57
|
+
/** Context window size in tokens */
|
|
58
|
+
contextWindow: number;
|
|
59
|
+
/** Model capabilities */
|
|
60
|
+
capabilities: ModelCapabilities;
|
|
61
|
+
/** Approximate cost per million tokens (input, output) */
|
|
62
|
+
costPerMillion?: {
|
|
63
|
+
input: number;
|
|
64
|
+
output: number;
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* AI Provider interface
|
|
69
|
+
*/
|
|
70
|
+
interface Provider {
|
|
71
|
+
/** Provider name */
|
|
72
|
+
readonly name: ProviderName;
|
|
73
|
+
/** Current model ID */
|
|
74
|
+
readonly model: string;
|
|
75
|
+
/** Provider mode */
|
|
76
|
+
readonly mode: ProviderMode;
|
|
77
|
+
/**
|
|
78
|
+
* Get the underlying AI SDK language model
|
|
79
|
+
*/
|
|
80
|
+
getModel(): LanguageModel;
|
|
81
|
+
/**
|
|
82
|
+
* List available models for this provider
|
|
83
|
+
*/
|
|
84
|
+
listModels(): Promise<ModelInfo[]>;
|
|
85
|
+
/**
|
|
86
|
+
* Validate the provider configuration/credentials
|
|
87
|
+
*/
|
|
88
|
+
validate(): Promise<{
|
|
89
|
+
valid: boolean;
|
|
90
|
+
error?: string;
|
|
91
|
+
}>;
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* Provider availability info
|
|
95
|
+
*/
|
|
96
|
+
interface ProviderAvailability {
|
|
97
|
+
provider: ProviderName;
|
|
98
|
+
available: boolean;
|
|
99
|
+
mode: ProviderMode;
|
|
100
|
+
reason?: string;
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Legacy Config type for backwards compatibility
|
|
104
|
+
* This matches the Config type from contractspec-workspace
|
|
105
|
+
*/
|
|
106
|
+
interface LegacyConfig {
|
|
107
|
+
aiProvider: LegacyProviderName;
|
|
108
|
+
aiModel?: string;
|
|
109
|
+
customEndpoint?: string;
|
|
110
|
+
}
|
|
111
|
+
//#endregion
|
|
112
|
+
export { LegacyConfig, LegacyProviderName, ModelCapabilities, ModelInfo, Provider, ProviderAvailability, ProviderConfig, ProviderMode, ProviderName };
|
|
113
|
+
//# sourceMappingURL=types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.d.ts","names":[],"sources":["../src/types.ts"],"sourcesContent":[],"mappings":";;;;AAkBA;AAKA;AAQA;AAkBiB,KAzCL,YAAA,GAyCsB,QAAA,GAAA,QAAA,GAAA,WAAA,GAAA,SAAA,GAAA,QAAA;AAclC;AAkBA;;AAMiB,KArEL,kBAAA,GAqEK,QAAA,GAAA,QAAA,GAAA,QAAA,GAAA,QAAA;;;;AAeH,KA/EF,YAAA,GA+EE,OAAA,GAAA,MAAA,GAAA,SAAA;;AAMd;AAWA;UAxFiB,cAAA;;YAEL;;;;;;;;;;;;;;;UAgBK,iBAAA;;;;;;;;;;;;;UAcA,SAAA;;;;;;YAML;;;;gBAII;;;;;;;;;;UAQC,QAAA;;iBAEA;;;;iBAIA;;;;cAKH;;;;gBAKE,QAAQ;;;;cAKV;;;;;;;;UAMG,oBAAA;YACL;;QAEJ;;;;;;;UAQS,YAAA;cACH"}
|
package/dist/types.js
ADDED
|
File without changes
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { ProviderConfig, ProviderName } from "./types.js";
|
|
2
|
+
|
|
3
|
+
//#region src/validation.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Validation result
|
|
7
|
+
*/
|
|
8
|
+
interface ValidationResult {
|
|
9
|
+
valid: boolean;
|
|
10
|
+
error?: string;
|
|
11
|
+
warnings?: string[];
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Validate a provider configuration
|
|
15
|
+
*/
|
|
16
|
+
declare function validateProvider(config: ProviderConfig): Promise<ValidationResult>;
|
|
17
|
+
/**
|
|
18
|
+
* Check if a provider has required credentials
|
|
19
|
+
*/
|
|
20
|
+
declare function hasCredentials(provider: ProviderName): boolean;
|
|
21
|
+
/**
|
|
22
|
+
* Get the environment variable name for a provider's API key
|
|
23
|
+
*/
|
|
24
|
+
declare function getEnvVarName(provider: ProviderName): string | null;
|
|
25
|
+
/**
|
|
26
|
+
* Check if Ollama is running
|
|
27
|
+
*/
|
|
28
|
+
declare function isOllamaRunning(baseUrl?: string): Promise<boolean>;
|
|
29
|
+
/**
|
|
30
|
+
* List available Ollama models
|
|
31
|
+
*/
|
|
32
|
+
declare function listOllamaModels(baseUrl?: string): Promise<string[]>;
|
|
33
|
+
//#endregion
|
|
34
|
+
export { ValidationResult, getEnvVarName, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider };
|
|
35
|
+
//# sourceMappingURL=validation.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"validation.d.ts","names":[],"sources":["../src/validation.ts"],"sourcesContent":[],"mappings":";;;;AAkBA;;;AAEG,UAXc,gBAAA,CAWd;EAAO,KAAA,EAAA,OAAA;EAQM,KAAA,CAAA,EAAA,MAAA;EAoBA,QAAA,CAAA,EAAA,MAAa,EAAA;AAoB7B;AAcA;;;iBAhEsB,gBAAA,SACZ,iBACP,QAAQ;;;;iBAQK,cAAA,WAAyB;;;;iBAoBzB,aAAA,WAAwB;;;;iBAoBlB,eAAA,oBAEnB;;;;iBAYmB,gBAAA,oBAEnB"}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { createProvider } from "./factory.js";
|
|
2
|
+
|
|
3
|
+
//#region src/validation.ts
|
|
4
|
+
/**
|
|
5
|
+
* Validate a provider configuration
|
|
6
|
+
*/
|
|
7
|
+
async function validateProvider(config) {
|
|
8
|
+
return createProvider(config).validate();
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Check if a provider has required credentials
|
|
12
|
+
*/
|
|
13
|
+
function hasCredentials(provider) {
|
|
14
|
+
switch (provider) {
|
|
15
|
+
case "ollama": return true;
|
|
16
|
+
case "openai": return Boolean(process.env.OPENAI_API_KEY);
|
|
17
|
+
case "anthropic": return Boolean(process.env.ANTHROPIC_API_KEY);
|
|
18
|
+
case "mistral": return Boolean(process.env.MISTRAL_API_KEY);
|
|
19
|
+
case "gemini": return Boolean(process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY);
|
|
20
|
+
default: return false;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Get the environment variable name for a provider's API key
|
|
25
|
+
*/
|
|
26
|
+
function getEnvVarName(provider) {
|
|
27
|
+
switch (provider) {
|
|
28
|
+
case "ollama": return null;
|
|
29
|
+
case "openai": return "OPENAI_API_KEY";
|
|
30
|
+
case "anthropic": return "ANTHROPIC_API_KEY";
|
|
31
|
+
case "mistral": return "MISTRAL_API_KEY";
|
|
32
|
+
case "gemini": return "GOOGLE_API_KEY";
|
|
33
|
+
default: return null;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Check if Ollama is running
|
|
38
|
+
*/
|
|
39
|
+
async function isOllamaRunning(baseUrl = "http://localhost:11434") {
|
|
40
|
+
try {
|
|
41
|
+
return (await fetch(`${baseUrl}/api/tags`)).ok;
|
|
42
|
+
} catch {
|
|
43
|
+
return false;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* List available Ollama models
|
|
48
|
+
*/
|
|
49
|
+
async function listOllamaModels(baseUrl = "http://localhost:11434") {
|
|
50
|
+
try {
|
|
51
|
+
const response = await fetch(`${baseUrl}/api/tags`);
|
|
52
|
+
if (!response.ok) return [];
|
|
53
|
+
return ((await response.json()).models ?? []).map((m) => m.name);
|
|
54
|
+
} catch {
|
|
55
|
+
return [];
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
//#endregion
|
|
60
|
+
export { getEnvVarName, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider };
|
|
61
|
+
//# sourceMappingURL=validation.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"validation.js","names":[],"sources":["../src/validation.ts"],"sourcesContent":["/**\n * Provider validation utilities\n */\nimport type { ProviderName, ProviderConfig } from './types';\nimport { createProvider } from './factory';\n\n/**\n * Validation result\n */\nexport interface ValidationResult {\n valid: boolean;\n error?: string;\n warnings?: string[];\n}\n\n/**\n * Validate a provider configuration\n */\nexport async function validateProvider(\n config: ProviderConfig\n): Promise<ValidationResult> {\n const provider = createProvider(config);\n return provider.validate();\n}\n\n/**\n * Check if a provider has required credentials\n */\nexport function hasCredentials(provider: ProviderName): boolean {\n switch (provider) {\n case 'ollama':\n return true; // No credentials needed\n case 'openai':\n return Boolean(process.env.OPENAI_API_KEY);\n case 'anthropic':\n return Boolean(process.env.ANTHROPIC_API_KEY);\n case 'mistral':\n return Boolean(process.env.MISTRAL_API_KEY);\n case 'gemini':\n return Boolean(process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY);\n default:\n return false;\n }\n}\n\n/**\n * Get the environment variable name for a provider's API key\n */\nexport function getEnvVarName(provider: ProviderName): string | null {\n switch (provider) {\n case 'ollama':\n return null;\n case 'openai':\n return 'OPENAI_API_KEY';\n case 'anthropic':\n return 'ANTHROPIC_API_KEY';\n case 'mistral':\n return 'MISTRAL_API_KEY';\n case 'gemini':\n return 'GOOGLE_API_KEY';\n default:\n return null;\n }\n}\n\n/**\n * Check if Ollama is running\n */\nexport async function isOllamaRunning(\n baseUrl = 'http://localhost:11434'\n): Promise<boolean> {\n try {\n const response = await fetch(`${baseUrl}/api/tags`);\n return response.ok;\n } catch {\n return false;\n }\n}\n\n/**\n * List available Ollama models\n */\nexport async function listOllamaModels(\n baseUrl = 'http://localhost:11434'\n): Promise<string[]> {\n try {\n const response = await fetch(`${baseUrl}/api/tags`);\n if (!response.ok) return [];\n\n const data = (await response.json()) as {\n models?: { name: string }[];\n };\n return (data.models ?? []).map((m) => m.name);\n } catch {\n return [];\n }\n}\n"],"mappings":";;;;;;AAkBA,eAAsB,iBACpB,QAC2B;AAE3B,QADiB,eAAe,OAAO,CACvB,UAAU;;;;;AAM5B,SAAgB,eAAe,UAAiC;AAC9D,SAAQ,UAAR;EACE,KAAK,SACH,QAAO;EACT,KAAK,SACH,QAAO,QAAQ,QAAQ,IAAI,eAAe;EAC5C,KAAK,YACH,QAAO,QAAQ,QAAQ,IAAI,kBAAkB;EAC/C,KAAK,UACH,QAAO,QAAQ,QAAQ,IAAI,gBAAgB;EAC7C,KAAK,SACH,QAAO,QAAQ,QAAQ,IAAI,kBAAkB,QAAQ,IAAI,eAAe;EAC1E,QACE,QAAO;;;;;;AAOb,SAAgB,cAAc,UAAuC;AACnE,SAAQ,UAAR;EACE,KAAK,SACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,KAAK,YACH,QAAO;EACT,KAAK,UACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,QACE,QAAO;;;;;;AAOb,eAAsB,gBACpB,UAAU,0BACQ;AAClB,KAAI;AAEF,UADiB,MAAM,MAAM,GAAG,QAAQ,WAAW,EACnC;SACV;AACN,SAAO;;;;;;AAOX,eAAsB,iBACpB,UAAU,0BACS;AACnB,KAAI;EACF,MAAM,WAAW,MAAM,MAAM,GAAG,QAAQ,WAAW;AACnD,MAAI,CAAC,SAAS,GAAI,QAAO,EAAE;AAK3B,WAHc,MAAM,SAAS,MAAM,EAGtB,UAAU,EAAE,EAAE,KAAK,MAAM,EAAE,KAAK;SACvC;AACN,SAAO,EAAE"}
|
package/package.json
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@contractspec/lib.ai-providers",
|
|
3
|
+
"version": "1.44.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"module": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"files": [
|
|
9
|
+
"dist",
|
|
10
|
+
"README.md"
|
|
11
|
+
],
|
|
12
|
+
"scripts": {
|
|
13
|
+
"publish:pkg": "bun publish --tolerate-republish --ignore-scripts --verbose",
|
|
14
|
+
"publish:pkg:canary": "bun publish:pkg --tag canary",
|
|
15
|
+
"build": "bun build:types && bun build:bundle",
|
|
16
|
+
"build:bundle": "tsdown",
|
|
17
|
+
"build:types": "tsc --noEmit",
|
|
18
|
+
"dev": "bun build:bundle --watch",
|
|
19
|
+
"clean": "rimraf dist .turbo",
|
|
20
|
+
"lint": "bun lint:fix",
|
|
21
|
+
"lint:fix": "eslint src --fix",
|
|
22
|
+
"lint:check": "eslint src",
|
|
23
|
+
"test": "bun test"
|
|
24
|
+
},
|
|
25
|
+
"dependencies": {
|
|
26
|
+
"@ai-sdk/anthropic": "3.0.1",
|
|
27
|
+
"@ai-sdk/google": "3.0.1",
|
|
28
|
+
"@ai-sdk/mistral": "3.0.1",
|
|
29
|
+
"@ai-sdk/openai": "3.0.1",
|
|
30
|
+
"ai": "6.0.3",
|
|
31
|
+
"ollama-ai-provider": "^1.2.0",
|
|
32
|
+
"zod": "^4.1.13"
|
|
33
|
+
},
|
|
34
|
+
"devDependencies": {
|
|
35
|
+
"@contractspec/tool.tsdown": "1.44.0",
|
|
36
|
+
"@contractspec/tool.typescript": "1.44.0",
|
|
37
|
+
"tsdown": "^0.18.3",
|
|
38
|
+
"typescript": "^5.9.3"
|
|
39
|
+
},
|
|
40
|
+
"exports": {
|
|
41
|
+
".": "./dist/index.js",
|
|
42
|
+
"./factory": "./dist/factory.js",
|
|
43
|
+
"./legacy": "./dist/legacy.js",
|
|
44
|
+
"./models": "./dist/models.js",
|
|
45
|
+
"./types": "./dist/types.js",
|
|
46
|
+
"./validation": "./dist/validation.js",
|
|
47
|
+
"./*": "./*"
|
|
48
|
+
},
|
|
49
|
+
"publishConfig": {
|
|
50
|
+
"access": "public",
|
|
51
|
+
"exports": {
|
|
52
|
+
".": "./dist/index.js",
|
|
53
|
+
"./factory": "./dist/factory.js",
|
|
54
|
+
"./legacy": "./dist/legacy.js",
|
|
55
|
+
"./models": "./dist/models.js",
|
|
56
|
+
"./types": "./dist/types.js",
|
|
57
|
+
"./validation": "./dist/validation.js",
|
|
58
|
+
"./*": "./*"
|
|
59
|
+
},
|
|
60
|
+
"registry": "https://registry.npmjs.org/"
|
|
61
|
+
},
|
|
62
|
+
"license": "MIT",
|
|
63
|
+
"repository": {
|
|
64
|
+
"type": "git",
|
|
65
|
+
"url": "https://github.com/lssm-tech/contractspec.git",
|
|
66
|
+
"directory": "packages/libs/ai-providers"
|
|
67
|
+
},
|
|
68
|
+
"homepage": "https://contractspec.io"
|
|
69
|
+
}
|