@botpress/zai 1.1.0 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/models.ts DELETED
@@ -1,394 +0,0 @@
1
-
2
- // This file is generated. Do not edit it manually.
3
- // See 'scripts/update-models.ts'
4
-
5
- /* eslint-disable */
6
- /* tslint:disable */
7
-
8
- export const Models = [
9
- {
10
- "id": "anthropic__claude-3-haiku-20240307",
11
- "name": "Claude 3 Haiku",
12
- "integration": "anthropic",
13
- "input": {
14
- "maxTokens": 200000
15
- },
16
- "output": {
17
- "maxTokens": 4096
18
- }
19
- },
20
- {
21
- "id": "anthropic__claude-3-5-sonnet-20240620",
22
- "name": "Claude 3.5 Sonnet",
23
- "integration": "anthropic",
24
- "input": {
25
- "maxTokens": 200000
26
- },
27
- "output": {
28
- "maxTokens": 4096
29
- }
30
- },
31
- {
32
- "id": "cerebras__llama3.1-70b",
33
- "name": "Llama 3.1 70B",
34
- "integration": "cerebras",
35
- "input": {
36
- "maxTokens": 8192
37
- },
38
- "output": {
39
- "maxTokens": 8192
40
- }
41
- },
42
- {
43
- "id": "cerebras__llama3.1-8b",
44
- "name": "Llama 3.1 8B",
45
- "integration": "cerebras",
46
- "input": {
47
- "maxTokens": 8192
48
- },
49
- "output": {
50
- "maxTokens": 8192
51
- }
52
- },
53
- {
54
- "id": "fireworks-ai__accounts/fireworks/models/deepseek-coder-v2-instruct",
55
- "name": "DeepSeek Coder V2 Instruct",
56
- "integration": "fireworks-ai",
57
- "input": {
58
- "maxTokens": 131072
59
- },
60
- "output": {
61
- "maxTokens": 131072
62
- }
63
- },
64
- {
65
- "id": "fireworks-ai__accounts/fireworks/models/deepseek-coder-v2-lite-instruct",
66
- "name": "DeepSeek Coder V2 Lite",
67
- "integration": "fireworks-ai",
68
- "input": {
69
- "maxTokens": 163840
70
- },
71
- "output": {
72
- "maxTokens": 163840
73
- }
74
- },
75
- {
76
- "id": "fireworks-ai__accounts/fireworks/models/firellava-13b",
77
- "name": "FireLLaVA-13B",
78
- "integration": "fireworks-ai",
79
- "input": {
80
- "maxTokens": 4096
81
- },
82
- "output": {
83
- "maxTokens": 4096
84
- }
85
- },
86
- {
87
- "id": "fireworks-ai__accounts/fireworks/models/firefunction-v2",
88
- "name": "Firefunction V2",
89
- "integration": "fireworks-ai",
90
- "input": {
91
- "maxTokens": 8192
92
- },
93
- "output": {
94
- "maxTokens": 8192
95
- }
96
- },
97
- {
98
- "id": "fireworks-ai__accounts/fireworks/models/gemma2-9b-it",
99
- "name": "Gemma 2 9B Instruct",
100
- "integration": "fireworks-ai",
101
- "input": {
102
- "maxTokens": 8192
103
- },
104
- "output": {
105
- "maxTokens": 8192
106
- }
107
- },
108
- {
109
- "id": "fireworks-ai__accounts/fireworks/models/llama-v3p1-405b-instruct",
110
- "name": "Llama 3.1 405B Instruct",
111
- "integration": "fireworks-ai",
112
- "input": {
113
- "maxTokens": 131072
114
- },
115
- "output": {
116
- "maxTokens": 131072
117
- }
118
- },
119
- {
120
- "id": "fireworks-ai__accounts/fireworks/models/llama-v3p1-70b-instruct",
121
- "name": "Llama 3.1 70B Instruct",
122
- "integration": "fireworks-ai",
123
- "input": {
124
- "maxTokens": 131072
125
- },
126
- "output": {
127
- "maxTokens": 131072
128
- }
129
- },
130
- {
131
- "id": "fireworks-ai__accounts/fireworks/models/llama-v3p1-8b-instruct",
132
- "name": "Llama 3.1 8B Instruct",
133
- "integration": "fireworks-ai",
134
- "input": {
135
- "maxTokens": 131072
136
- },
137
- "output": {
138
- "maxTokens": 131072
139
- }
140
- },
141
- {
142
- "id": "fireworks-ai__accounts/fireworks/models/mixtral-8x22b-instruct",
143
- "name": "Mixtral MoE 8x22B Instruct",
144
- "integration": "fireworks-ai",
145
- "input": {
146
- "maxTokens": 65536
147
- },
148
- "output": {
149
- "maxTokens": 65536
150
- }
151
- },
152
- {
153
- "id": "fireworks-ai__accounts/fireworks/models/mixtral-8x7b-instruct",
154
- "name": "Mixtral MoE 8x7B Instruct",
155
- "integration": "fireworks-ai",
156
- "input": {
157
- "maxTokens": 32768
158
- },
159
- "output": {
160
- "maxTokens": 32768
161
- }
162
- },
163
- {
164
- "id": "fireworks-ai__accounts/fireworks/models/mythomax-l2-13b",
165
- "name": "MythoMax L2 13b",
166
- "integration": "fireworks-ai",
167
- "input": {
168
- "maxTokens": 4096
169
- },
170
- "output": {
171
- "maxTokens": 4096
172
- }
173
- },
174
- {
175
- "id": "fireworks-ai__accounts/fireworks/models/qwen2-72b-instruct",
176
- "name": "Qwen2 72b Instruct",
177
- "integration": "fireworks-ai",
178
- "input": {
179
- "maxTokens": 32768
180
- },
181
- "output": {
182
- "maxTokens": 32768
183
- }
184
- },
185
- {
186
- "id": "groq__gemma2-9b-it",
187
- "name": "Gemma2 9B",
188
- "integration": "groq",
189
- "input": {
190
- "maxTokens": 8192
191
- },
192
- "output": {
193
- "maxTokens": 8192
194
- }
195
- },
196
- {
197
- "id": "groq__llama3-70b-8192",
198
- "name": "LLaMA 3 70B",
199
- "integration": "groq",
200
- "input": {
201
- "maxTokens": 8192
202
- },
203
- "output": {
204
- "maxTokens": 8192
205
- }
206
- },
207
- {
208
- "id": "groq__llama3-8b-8192",
209
- "name": "LLaMA 3 8B",
210
- "integration": "groq",
211
- "input": {
212
- "maxTokens": 8192
213
- },
214
- "output": {
215
- "maxTokens": 8192
216
- }
217
- },
218
- {
219
- "id": "groq__llama-3.1-70b-versatile",
220
- "name": "LLaMA 3.1 70B",
221
- "integration": "groq",
222
- "input": {
223
- "maxTokens": 128000
224
- },
225
- "output": {
226
- "maxTokens": 8192
227
- }
228
- },
229
- {
230
- "id": "groq__llama-3.1-8b-instant",
231
- "name": "LLaMA 3.1 8B",
232
- "integration": "groq",
233
- "input": {
234
- "maxTokens": 128000
235
- },
236
- "output": {
237
- "maxTokens": 8192
238
- }
239
- },
240
- {
241
- "id": "groq__llama-3.2-11b-vision-preview",
242
- "name": "LLaMA 3.2 11B Vision",
243
- "integration": "groq",
244
- "input": {
245
- "maxTokens": 128000
246
- },
247
- "output": {
248
- "maxTokens": 8192
249
- }
250
- },
251
- {
252
- "id": "groq__llama-3.2-1b-preview",
253
- "name": "LLaMA 3.2 1B",
254
- "integration": "groq",
255
- "input": {
256
- "maxTokens": 128000
257
- },
258
- "output": {
259
- "maxTokens": 8192
260
- }
261
- },
262
- {
263
- "id": "groq__llama-3.2-3b-preview",
264
- "name": "LLaMA 3.2 3B",
265
- "integration": "groq",
266
- "input": {
267
- "maxTokens": 128000
268
- },
269
- "output": {
270
- "maxTokens": 8192
271
- }
272
- },
273
- {
274
- "id": "groq__llama-3.2-90b-vision-preview",
275
- "name": "LLaMA 3.2 90B Vision",
276
- "integration": "groq",
277
- "input": {
278
- "maxTokens": 128000
279
- },
280
- "output": {
281
- "maxTokens": 8192
282
- }
283
- },
284
- {
285
- "id": "groq__llama-3.3-70b-versatile",
286
- "name": "LLaMA 3.3 70B",
287
- "integration": "groq",
288
- "input": {
289
- "maxTokens": 128000
290
- },
291
- "output": {
292
- "maxTokens": 32768
293
- }
294
- },
295
- {
296
- "id": "groq__mixtral-8x7b-32768",
297
- "name": "Mixtral 8x7B",
298
- "integration": "groq",
299
- "input": {
300
- "maxTokens": 32768
301
- },
302
- "output": {
303
- "maxTokens": 32768
304
- }
305
- },
306
- {
307
- "id": "openai__o1-2024-12-17",
308
- "name": "GPT o1",
309
- "integration": "openai",
310
- "input": {
311
- "maxTokens": 200000
312
- },
313
- "output": {
314
- "maxTokens": 100000
315
- }
316
- },
317
- {
318
- "id": "openai__o1-mini-2024-09-12",
319
- "name": "GPT o1-mini",
320
- "integration": "openai",
321
- "input": {
322
- "maxTokens": 128000
323
- },
324
- "output": {
325
- "maxTokens": 65536
326
- }
327
- },
328
- {
329
- "id": "openai__gpt-3.5-turbo-0125",
330
- "name": "GPT-3.5 Turbo",
331
- "integration": "openai",
332
- "input": {
333
- "maxTokens": 128000
334
- },
335
- "output": {
336
- "maxTokens": 4096
337
- }
338
- },
339
- {
340
- "id": "openai__gpt-4-turbo-2024-04-09",
341
- "name": "GPT-4 Turbo",
342
- "integration": "openai",
343
- "input": {
344
- "maxTokens": 128000
345
- },
346
- "output": {
347
- "maxTokens": 4096
348
- }
349
- },
350
- {
351
- "id": "openai__gpt-4o-2024-08-06",
352
- "name": "GPT-4o (August 2024)",
353
- "integration": "openai",
354
- "input": {
355
- "maxTokens": 128000
356
- },
357
- "output": {
358
- "maxTokens": 16384
359
- }
360
- },
361
- {
362
- "id": "openai__gpt-4o-2024-05-13",
363
- "name": "GPT-4o (May 2024)",
364
- "integration": "openai",
365
- "input": {
366
- "maxTokens": 128000
367
- },
368
- "output": {
369
- "maxTokens": 4096
370
- }
371
- },
372
- {
373
- "id": "openai__gpt-4o-2024-11-20",
374
- "name": "GPT-4o (November 2024)",
375
- "integration": "openai",
376
- "input": {
377
- "maxTokens": 128000
378
- },
379
- "output": {
380
- "maxTokens": 16384
381
- }
382
- },
383
- {
384
- "id": "openai__gpt-4o-mini-2024-07-18",
385
- "name": "GPT-4o Mini",
386
- "integration": "openai",
387
- "input": {
388
- "maxTokens": 128000
389
- },
390
- "output": {
391
- "maxTokens": 16384
392
- }
393
- }
394
- ] as const