@contractspec/lib.ai-providers 1.57.0 → 1.58.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,248 @@
1
+ // src/models.ts
2
+ var DEFAULT_MODELS = {
3
+ ollama: "llama3.2",
4
+ openai: "gpt-4o",
5
+ anthropic: "claude-sonnet-4-20250514",
6
+ mistral: "mistral-large-latest",
7
+ gemini: "gemini-2.0-flash"
8
+ };
9
+ var MODELS = [
10
+ {
11
+ id: "llama3.2",
12
+ name: "Llama 3.2",
13
+ provider: "ollama",
14
+ contextWindow: 128000,
15
+ capabilities: {
16
+ vision: false,
17
+ tools: true,
18
+ reasoning: false,
19
+ streaming: true
20
+ }
21
+ },
22
+ {
23
+ id: "codellama",
24
+ name: "Code Llama",
25
+ provider: "ollama",
26
+ contextWindow: 16000,
27
+ capabilities: {
28
+ vision: false,
29
+ tools: false,
30
+ reasoning: false,
31
+ streaming: true
32
+ }
33
+ },
34
+ {
35
+ id: "deepseek-coder",
36
+ name: "DeepSeek Coder",
37
+ provider: "ollama",
38
+ contextWindow: 16000,
39
+ capabilities: {
40
+ vision: false,
41
+ tools: false,
42
+ reasoning: false,
43
+ streaming: true
44
+ }
45
+ },
46
+ {
47
+ id: "mistral",
48
+ name: "Mistral 7B",
49
+ provider: "ollama",
50
+ contextWindow: 32000,
51
+ capabilities: {
52
+ vision: false,
53
+ tools: false,
54
+ reasoning: false,
55
+ streaming: true
56
+ }
57
+ },
58
+ {
59
+ id: "gpt-4o",
60
+ name: "GPT-4o",
61
+ provider: "openai",
62
+ contextWindow: 128000,
63
+ capabilities: {
64
+ vision: true,
65
+ tools: true,
66
+ reasoning: false,
67
+ streaming: true
68
+ },
69
+ costPerMillion: { input: 2.5, output: 10 }
70
+ },
71
+ {
72
+ id: "gpt-4o-mini",
73
+ name: "GPT-4o Mini",
74
+ provider: "openai",
75
+ contextWindow: 128000,
76
+ capabilities: {
77
+ vision: true,
78
+ tools: true,
79
+ reasoning: false,
80
+ streaming: true
81
+ },
82
+ costPerMillion: { input: 0.15, output: 0.6 }
83
+ },
84
+ {
85
+ id: "o1",
86
+ name: "o1",
87
+ provider: "openai",
88
+ contextWindow: 200000,
89
+ capabilities: {
90
+ vision: true,
91
+ tools: true,
92
+ reasoning: true,
93
+ streaming: true
94
+ },
95
+ costPerMillion: { input: 15, output: 60 }
96
+ },
97
+ {
98
+ id: "o1-mini",
99
+ name: "o1 Mini",
100
+ provider: "openai",
101
+ contextWindow: 128000,
102
+ capabilities: {
103
+ vision: false,
104
+ tools: true,
105
+ reasoning: true,
106
+ streaming: true
107
+ },
108
+ costPerMillion: { input: 3, output: 12 }
109
+ },
110
+ {
111
+ id: "claude-sonnet-4-20250514",
112
+ name: "Claude Sonnet 4",
113
+ provider: "anthropic",
114
+ contextWindow: 200000,
115
+ capabilities: {
116
+ vision: true,
117
+ tools: true,
118
+ reasoning: true,
119
+ streaming: true
120
+ },
121
+ costPerMillion: { input: 3, output: 15 }
122
+ },
123
+ {
124
+ id: "claude-3-5-sonnet-20241022",
125
+ name: "Claude 3.5 Sonnet",
126
+ provider: "anthropic",
127
+ contextWindow: 200000,
128
+ capabilities: {
129
+ vision: true,
130
+ tools: true,
131
+ reasoning: false,
132
+ streaming: true
133
+ },
134
+ costPerMillion: { input: 3, output: 15 }
135
+ },
136
+ {
137
+ id: "claude-3-5-haiku-20241022",
138
+ name: "Claude 3.5 Haiku",
139
+ provider: "anthropic",
140
+ contextWindow: 200000,
141
+ capabilities: {
142
+ vision: true,
143
+ tools: true,
144
+ reasoning: false,
145
+ streaming: true
146
+ },
147
+ costPerMillion: { input: 0.8, output: 4 }
148
+ },
149
+ {
150
+ id: "mistral-large-latest",
151
+ name: "Mistral Large",
152
+ provider: "mistral",
153
+ contextWindow: 128000,
154
+ capabilities: {
155
+ vision: false,
156
+ tools: true,
157
+ reasoning: false,
158
+ streaming: true
159
+ },
160
+ costPerMillion: { input: 2, output: 6 }
161
+ },
162
+ {
163
+ id: "codestral-latest",
164
+ name: "Codestral",
165
+ provider: "mistral",
166
+ contextWindow: 32000,
167
+ capabilities: {
168
+ vision: false,
169
+ tools: true,
170
+ reasoning: false,
171
+ streaming: true
172
+ },
173
+ costPerMillion: { input: 0.2, output: 0.6 }
174
+ },
175
+ {
176
+ id: "mistral-small-latest",
177
+ name: "Mistral Small",
178
+ provider: "mistral",
179
+ contextWindow: 32000,
180
+ capabilities: {
181
+ vision: false,
182
+ tools: true,
183
+ reasoning: false,
184
+ streaming: true
185
+ },
186
+ costPerMillion: { input: 0.2, output: 0.6 }
187
+ },
188
+ {
189
+ id: "gemini-2.0-flash",
190
+ name: "Gemini 2.0 Flash",
191
+ provider: "gemini",
192
+ contextWindow: 1e6,
193
+ capabilities: {
194
+ vision: true,
195
+ tools: true,
196
+ reasoning: false,
197
+ streaming: true
198
+ },
199
+ costPerMillion: { input: 0.075, output: 0.3 }
200
+ },
201
+ {
202
+ id: "gemini-2.5-pro-preview-06-05",
203
+ name: "Gemini 2.5 Pro",
204
+ provider: "gemini",
205
+ contextWindow: 1e6,
206
+ capabilities: {
207
+ vision: true,
208
+ tools: true,
209
+ reasoning: true,
210
+ streaming: true
211
+ },
212
+ costPerMillion: { input: 1.25, output: 10 }
213
+ },
214
+ {
215
+ id: "gemini-2.5-flash-preview-05-20",
216
+ name: "Gemini 2.5 Flash",
217
+ provider: "gemini",
218
+ contextWindow: 1e6,
219
+ capabilities: {
220
+ vision: true,
221
+ tools: true,
222
+ reasoning: true,
223
+ streaming: true
224
+ },
225
+ costPerMillion: { input: 0.15, output: 0.6 }
226
+ }
227
+ ];
228
+ function getModelsForProvider(provider) {
229
+ return MODELS.filter((m) => m.provider === provider);
230
+ }
231
+ function getModelInfo(modelId) {
232
+ return MODELS.find((m) => m.id === modelId);
233
+ }
234
+ function getRecommendedModels(provider) {
235
+ const normalizedProvider = provider === "claude" ? "anthropic" : provider === "custom" ? "openai" : provider;
236
+ return getModelsForProvider(normalizedProvider).map((m) => m.id);
237
+ }
238
+ function getDefaultModel(provider) {
239
+ return DEFAULT_MODELS[provider];
240
+ }
241
+ export {
242
+ getRecommendedModels,
243
+ getModelsForProvider,
244
+ getModelInfo,
245
+ getDefaultModel,
246
+ MODELS,
247
+ DEFAULT_MODELS
248
+ };
File without changes