@chainfuse/types 1.6.7 → 1.6.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -11,6 +11,7 @@ export declare const workersAiCatalog: {
|
|
|
11
11
|
readonly tags: readonly [];
|
|
12
12
|
readonly properties: {
|
|
13
13
|
readonly beta: true;
|
|
14
|
+
readonly context_window: 32000;
|
|
14
15
|
readonly info: "https://huggingface.co/qwen/qwen1.5-0.5b-chat";
|
|
15
16
|
};
|
|
16
17
|
}, {
|
|
@@ -21,6 +22,7 @@ export declare const workersAiCatalog: {
|
|
|
21
22
|
readonly tags: readonly [];
|
|
22
23
|
readonly properties: {
|
|
23
24
|
readonly beta: true;
|
|
25
|
+
readonly context_window: 8192;
|
|
24
26
|
readonly lora: true;
|
|
25
27
|
};
|
|
26
28
|
}, {
|
|
@@ -31,6 +33,7 @@ export declare const workersAiCatalog: {
|
|
|
31
33
|
readonly tags: readonly [];
|
|
32
34
|
readonly properties: {
|
|
33
35
|
readonly beta: true;
|
|
36
|
+
readonly context_window: 4096;
|
|
34
37
|
readonly info: "https://huggingface.co/Nexusflow/Starling-LM-7B-beta";
|
|
35
38
|
readonly max_batch_prefill_tokens: 8192;
|
|
36
39
|
readonly max_input_length: 3072;
|
|
@@ -43,6 +46,7 @@ export declare const workersAiCatalog: {
|
|
|
43
46
|
readonly description: "Generation over generation, Meta Llama 3 demonstrates state-of-the-art performance on a wide range of industry benchmarks and offers new capabilities, including improved reasoning.";
|
|
44
47
|
readonly tags: readonly [];
|
|
45
48
|
readonly properties: {
|
|
49
|
+
readonly context_window: 7968;
|
|
46
50
|
readonly info: "https://llama.meta.com";
|
|
47
51
|
readonly terms: "https://llama.meta.com/llama3/license/#";
|
|
48
52
|
};
|
|
@@ -53,6 +57,7 @@ export declare const workersAiCatalog: {
|
|
|
53
57
|
readonly description: "The Llama 3.2 instruction-tuned text only models are optimized for multilingual dialogue use cases, including agentic retrieval and summarization tasks.";
|
|
54
58
|
readonly tags: readonly [];
|
|
55
59
|
readonly properties: {
|
|
60
|
+
readonly context_window: 128000;
|
|
56
61
|
readonly terms: "https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/LICENSE";
|
|
57
62
|
};
|
|
58
63
|
}, {
|
|
@@ -63,6 +68,7 @@ export declare const workersAiCatalog: {
|
|
|
63
68
|
readonly tags: readonly [];
|
|
64
69
|
readonly properties: {
|
|
65
70
|
readonly beta: true;
|
|
71
|
+
readonly context_window: 4096;
|
|
66
72
|
};
|
|
67
73
|
}, {
|
|
68
74
|
readonly id: "d2ba5c6b-bbb7-49d6-b466-900654870cd6";
|
|
@@ -72,6 +78,7 @@ export declare const workersAiCatalog: {
|
|
|
72
78
|
readonly tags: readonly [];
|
|
73
79
|
readonly properties: {
|
|
74
80
|
readonly beta: true;
|
|
81
|
+
readonly context_window: 4096;
|
|
75
82
|
};
|
|
76
83
|
}, {
|
|
77
84
|
readonly id: "ca54bcd6-0d98-4739-9b3b-5c8b4402193d";
|
|
@@ -81,6 +88,7 @@ export declare const workersAiCatalog: {
|
|
|
81
88
|
readonly tags: readonly [];
|
|
82
89
|
readonly properties: {
|
|
83
90
|
readonly beta: true;
|
|
91
|
+
readonly context_window: 4096;
|
|
84
92
|
readonly info: "https://ai.meta.com/llama/";
|
|
85
93
|
readonly terms: "https://ai.meta.com/resources/models-and-libraries/llama-downloads/";
|
|
86
94
|
};
|
|
@@ -92,6 +100,7 @@ export declare const workersAiCatalog: {
|
|
|
92
100
|
readonly tags: readonly [];
|
|
93
101
|
readonly properties: {
|
|
94
102
|
readonly beta: true;
|
|
103
|
+
readonly context_window: 2824;
|
|
95
104
|
readonly info: "https://mistral.ai/news/announcing-mistral-7b/";
|
|
96
105
|
readonly lora: true;
|
|
97
106
|
};
|
|
@@ -103,6 +112,7 @@ export declare const workersAiCatalog: {
|
|
|
103
112
|
readonly tags: readonly [];
|
|
104
113
|
readonly properties: {
|
|
105
114
|
readonly beta: true;
|
|
115
|
+
readonly context_window: 15000;
|
|
106
116
|
readonly lora: true;
|
|
107
117
|
};
|
|
108
118
|
}, {
|
|
@@ -113,6 +123,7 @@ export declare const workersAiCatalog: {
|
|
|
113
123
|
readonly tags: readonly [];
|
|
114
124
|
readonly properties: {
|
|
115
125
|
readonly beta: true;
|
|
126
|
+
readonly context_window: 2048;
|
|
116
127
|
readonly info: "https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0";
|
|
117
128
|
};
|
|
118
129
|
}, {
|
|
@@ -123,6 +134,7 @@ export declare const workersAiCatalog: {
|
|
|
123
134
|
readonly tags: readonly [];
|
|
124
135
|
readonly properties: {
|
|
125
136
|
readonly beta: true;
|
|
137
|
+
readonly context_window: 3072;
|
|
126
138
|
readonly info: "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2";
|
|
127
139
|
readonly lora: true;
|
|
128
140
|
readonly max_batch_prefill_tokens: 8192;
|
|
@@ -137,6 +149,7 @@ export declare const workersAiCatalog: {
|
|
|
137
149
|
readonly tags: readonly [];
|
|
138
150
|
readonly properties: {
|
|
139
151
|
readonly beta: true;
|
|
152
|
+
readonly context_window: 15000;
|
|
140
153
|
};
|
|
141
154
|
}, {
|
|
142
155
|
readonly id: "ad01ab83-baf8-4e7b-8fed-a0a219d4eb45";
|
|
@@ -145,6 +158,7 @@ export declare const workersAiCatalog: {
|
|
|
145
158
|
readonly description: "DeepSeek-R1-Distill-Qwen-32B is a model distilled from DeepSeek-R1 based on Qwen2.5. It outperforms OpenAI-o1-mini across various benchmarks, achieving new state-of-the-art results for dense models.";
|
|
146
159
|
readonly tags: readonly [];
|
|
147
160
|
readonly properties: {
|
|
161
|
+
readonly context_window: 80000;
|
|
148
162
|
readonly terms: "https://github.com/deepseek-ai/DeepSeek-R1/blob/main/LICENSE";
|
|
149
163
|
};
|
|
150
164
|
}, {
|
|
@@ -155,6 +169,7 @@ export declare const workersAiCatalog: {
|
|
|
155
169
|
readonly tags: readonly [];
|
|
156
170
|
readonly properties: {
|
|
157
171
|
readonly beta: true;
|
|
172
|
+
readonly context_window: 4096;
|
|
158
173
|
readonly info: "https://huggingface.co/TheBloke/DiscoLM_German_7b_v1-AWQ";
|
|
159
174
|
};
|
|
160
175
|
}, {
|
|
@@ -163,7 +178,9 @@ export declare const workersAiCatalog: {
|
|
|
163
178
|
readonly name: "@cf/meta/llama-2-7b-chat-int8";
|
|
164
179
|
readonly description: "Quantized (int8) generative text model with 7 billion parameters from Meta";
|
|
165
180
|
readonly tags: readonly [];
|
|
166
|
-
readonly properties: {
|
|
181
|
+
readonly properties: {
|
|
182
|
+
readonly context_window: 8192;
|
|
183
|
+
};
|
|
167
184
|
}, {
|
|
168
185
|
readonly id: "9b9c87c6-d4b7-494c-b177-87feab5904db";
|
|
169
186
|
readonly source: 1;
|
|
@@ -171,6 +188,7 @@ export declare const workersAiCatalog: {
|
|
|
171
188
|
readonly description: "Llama 3.1 8B quantized to FP8 precision";
|
|
172
189
|
readonly tags: readonly [];
|
|
173
190
|
readonly properties: {
|
|
191
|
+
readonly context_window: 32000;
|
|
174
192
|
readonly terms: "https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE";
|
|
175
193
|
};
|
|
176
194
|
}, {
|
|
@@ -181,6 +199,7 @@ export declare const workersAiCatalog: {
|
|
|
181
199
|
readonly tags: readonly [];
|
|
182
200
|
readonly properties: {
|
|
183
201
|
readonly beta: true;
|
|
202
|
+
readonly context_window: 4096;
|
|
184
203
|
readonly info: "https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-AWQ";
|
|
185
204
|
};
|
|
186
205
|
}, {
|
|
@@ -191,6 +210,7 @@ export declare const workersAiCatalog: {
|
|
|
191
210
|
readonly tags: readonly [];
|
|
192
211
|
readonly properties: {
|
|
193
212
|
readonly beta: true;
|
|
213
|
+
readonly context_window: 20000;
|
|
194
214
|
readonly info: "https://huggingface.co/qwen/qwen1.5-7b-chat-awq";
|
|
195
215
|
};
|
|
196
216
|
}, {
|
|
@@ -200,6 +220,7 @@ export declare const workersAiCatalog: {
|
|
|
200
220
|
readonly description: "The Llama 3.2 instruction-tuned text only models are optimized for multilingual dialogue use cases, including agentic retrieval and summarization tasks.";
|
|
201
221
|
readonly tags: readonly [];
|
|
202
222
|
readonly properties: {
|
|
223
|
+
readonly context_window: 128000;
|
|
203
224
|
readonly terms: "https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/LICENSE";
|
|
204
225
|
};
|
|
205
226
|
}, {
|
|
@@ -210,6 +231,7 @@ export declare const workersAiCatalog: {
|
|
|
210
231
|
readonly tags: readonly [];
|
|
211
232
|
readonly properties: {
|
|
212
233
|
readonly beta: true;
|
|
234
|
+
readonly context_window: 4096;
|
|
213
235
|
readonly info: "https://huggingface.co/TheBloke/Llama-2-13B-chat-AWQ";
|
|
214
236
|
};
|
|
215
237
|
}, {
|
|
@@ -220,6 +242,7 @@ export declare const workersAiCatalog: {
|
|
|
220
242
|
readonly tags: readonly [];
|
|
221
243
|
readonly properties: {
|
|
222
244
|
readonly beta: true;
|
|
245
|
+
readonly context_window: 4096;
|
|
223
246
|
readonly terms: "https://huggingface.co/TheBloke/deepseek-coder-6.7B-base-AWQ";
|
|
224
247
|
};
|
|
225
248
|
}, {
|
|
@@ -230,6 +253,7 @@ export declare const workersAiCatalog: {
|
|
|
230
253
|
readonly tags: readonly [];
|
|
231
254
|
readonly properties: {
|
|
232
255
|
readonly beta: true;
|
|
256
|
+
readonly context_window: 8192;
|
|
233
257
|
readonly lora: true;
|
|
234
258
|
};
|
|
235
259
|
}, {
|
|
@@ -239,6 +263,7 @@ export declare const workersAiCatalog: {
|
|
|
239
263
|
readonly description: "Llama 3.3 70B quantized to fp8 precision, optimized to be faster.";
|
|
240
264
|
readonly tags: readonly [];
|
|
241
265
|
readonly properties: {
|
|
266
|
+
readonly context_window: 24000;
|
|
242
267
|
readonly terms: "https://github.com/meta-llama/llama-models/blob/main/models/llama3_3/LICENSE";
|
|
243
268
|
};
|
|
244
269
|
}, {
|
|
@@ -249,6 +274,7 @@ export declare const workersAiCatalog: {
|
|
|
249
274
|
readonly tags: readonly [];
|
|
250
275
|
readonly properties: {
|
|
251
276
|
readonly beta: true;
|
|
277
|
+
readonly context_window: 4096;
|
|
252
278
|
};
|
|
253
279
|
}, {
|
|
254
280
|
readonly id: "60474554-f03b-4ff4-8ecc-c1b7c71d7b29";
|
|
@@ -258,6 +284,7 @@ export declare const workersAiCatalog: {
|
|
|
258
284
|
readonly tags: readonly [];
|
|
259
285
|
readonly properties: {
|
|
260
286
|
readonly beta: true;
|
|
287
|
+
readonly context_window: 4096;
|
|
261
288
|
readonly terms: "https://huggingface.co/TheBloke/deepseek-coder-6.7B-instruct-AWQ";
|
|
262
289
|
};
|
|
263
290
|
}, {
|
|
@@ -268,6 +295,7 @@ export declare const workersAiCatalog: {
|
|
|
268
295
|
readonly tags: readonly [];
|
|
269
296
|
readonly properties: {
|
|
270
297
|
readonly beta: true;
|
|
298
|
+
readonly context_window: 4096;
|
|
271
299
|
readonly info: "https://huggingface.co/deepseek-ai/deepseek-math-7b-instruct";
|
|
272
300
|
readonly terms: "https://github.com/deepseek-ai/DeepSeek-Math/blob/main/LICENSE-MODEL";
|
|
273
301
|
};
|
|
@@ -279,6 +307,7 @@ export declare const workersAiCatalog: {
|
|
|
279
307
|
readonly tags: readonly [];
|
|
280
308
|
readonly properties: {
|
|
281
309
|
readonly beta: true;
|
|
310
|
+
readonly context_window: 4096;
|
|
282
311
|
readonly info: "https://huggingface.co/tiiuae/falcon-7b-instruct";
|
|
283
312
|
};
|
|
284
313
|
}, {
|
|
@@ -289,6 +318,7 @@ export declare const workersAiCatalog: {
|
|
|
289
318
|
readonly tags: readonly [];
|
|
290
319
|
readonly properties: {
|
|
291
320
|
readonly beta: true;
|
|
321
|
+
readonly context_window: 24000;
|
|
292
322
|
readonly function_calling: true;
|
|
293
323
|
readonly info: "https://huggingface.co/NousResearch/Hermes-2-Pro-Mistral-7B";
|
|
294
324
|
};
|
|
@@ -299,6 +329,7 @@ export declare const workersAiCatalog: {
|
|
|
299
329
|
readonly description: "The Meta Llama 3.1 collection of multilingual large language models (LLMs) is a collection of pretrained and instruction tuned generative models. The Llama 3.1 instruction tuned text only models are optimized for multilingual dialogue use cases and outperform many of the available open source and closed chat models on common industry benchmarks.";
|
|
300
330
|
readonly tags: readonly [];
|
|
301
331
|
readonly properties: {
|
|
332
|
+
readonly context_window: 7968;
|
|
302
333
|
readonly terms: "https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE";
|
|
303
334
|
};
|
|
304
335
|
}, {
|
|
@@ -308,6 +339,7 @@ export declare const workersAiCatalog: {
|
|
|
308
339
|
readonly description: "Quantized (int4) generative text model with 8 billion parameters from Meta.\n";
|
|
309
340
|
readonly tags: readonly [];
|
|
310
341
|
readonly properties: {
|
|
342
|
+
readonly context_window: 8192;
|
|
311
343
|
readonly terms: "https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE";
|
|
312
344
|
};
|
|
313
345
|
}, {
|
|
@@ -318,6 +350,7 @@ export declare const workersAiCatalog: {
|
|
|
318
350
|
readonly tags: readonly [];
|
|
319
351
|
readonly properties: {
|
|
320
352
|
readonly beta: true;
|
|
353
|
+
readonly context_window: 4096;
|
|
321
354
|
readonly info: "https://huggingface.co/TheBloke/zephyr-7B-beta-AWQ";
|
|
322
355
|
};
|
|
323
356
|
}, {
|
|
@@ -328,6 +361,7 @@ export declare const workersAiCatalog: {
|
|
|
328
361
|
readonly tags: readonly [];
|
|
329
362
|
readonly properties: {
|
|
330
363
|
readonly beta: true;
|
|
364
|
+
readonly context_window: 3500;
|
|
331
365
|
readonly lora: true;
|
|
332
366
|
};
|
|
333
367
|
}, {
|
|
@@ -338,6 +372,7 @@ export declare const workersAiCatalog: {
|
|
|
338
372
|
readonly tags: readonly [];
|
|
339
373
|
readonly properties: {
|
|
340
374
|
readonly beta: true;
|
|
375
|
+
readonly context_window: 32000;
|
|
341
376
|
readonly info: "https://huggingface.co/qwen/qwen1.5-1.8b-chat";
|
|
342
377
|
};
|
|
343
378
|
}, {
|
|
@@ -347,6 +382,7 @@ export declare const workersAiCatalog: {
|
|
|
347
382
|
readonly description: "Quantized (int4) generative text model with 8 billion parameters from Meta.";
|
|
348
383
|
readonly tags: readonly [];
|
|
349
384
|
readonly properties: {
|
|
385
|
+
readonly context_window: 8192;
|
|
350
386
|
readonly info: "https://llama.meta.com";
|
|
351
387
|
readonly terms: "https://llama.meta.com/llama3/license/#";
|
|
352
388
|
};
|
|
@@ -367,6 +403,7 @@ export declare const workersAiCatalog: {
|
|
|
367
403
|
readonly tags: readonly [];
|
|
368
404
|
readonly properties: {
|
|
369
405
|
readonly beta: true;
|
|
406
|
+
readonly context_window: 10000;
|
|
370
407
|
readonly info: "https://huggingface.co/defog/sqlcoder-7b-2";
|
|
371
408
|
readonly terms: "https://creativecommons.org/licenses/by-sa/4.0/deed.en";
|
|
372
409
|
};
|
|
@@ -378,6 +415,7 @@ export declare const workersAiCatalog: {
|
|
|
378
415
|
readonly tags: readonly [];
|
|
379
416
|
readonly properties: {
|
|
380
417
|
readonly beta: true;
|
|
418
|
+
readonly context_window: 2048;
|
|
381
419
|
readonly info: "https://huggingface.co/microsoft/phi-2";
|
|
382
420
|
};
|
|
383
421
|
}, {
|
|
@@ -386,7 +424,9 @@ export declare const workersAiCatalog: {
|
|
|
386
424
|
readonly name: "@hf/meta-llama/meta-llama-3-8b-instruct";
|
|
387
425
|
readonly description: "Generation over generation, Meta Llama 3 demonstrates state-of-the-art performance on a wide range of industry benchmarks and offers new capabilities, including improved reasoning.\t";
|
|
388
426
|
readonly tags: readonly [];
|
|
389
|
-
readonly properties: {
|
|
427
|
+
readonly properties: {
|
|
428
|
+
readonly context_window: 8192;
|
|
429
|
+
};
|
|
390
430
|
}, {
|
|
391
431
|
readonly id: "0f002249-7d86-4698-aabf-8529ed86cefb";
|
|
392
432
|
readonly source: 2;
|
|
@@ -395,11 +435,9 @@ export declare const workersAiCatalog: {
|
|
|
395
435
|
readonly tags: readonly [];
|
|
396
436
|
readonly properties: {
|
|
397
437
|
readonly beta: true;
|
|
438
|
+
readonly context_window: 8192;
|
|
398
439
|
readonly info: "https://ai.google.dev/gemma/docs";
|
|
399
440
|
readonly lora: true;
|
|
400
|
-
readonly max_batch_prefill_tokens: 2048;
|
|
401
|
-
readonly max_input_length: 1512;
|
|
402
|
-
readonly max_total_tokens: 2048;
|
|
403
441
|
readonly terms: "https://ai.google.dev/gemma/terms";
|
|
404
442
|
};
|
|
405
443
|
}, {
|
|
@@ -410,6 +448,7 @@ export declare const workersAiCatalog: {
|
|
|
410
448
|
readonly tags: readonly [];
|
|
411
449
|
readonly properties: {
|
|
412
450
|
readonly beta: true;
|
|
451
|
+
readonly context_window: 7500;
|
|
413
452
|
readonly info: "https://huggingface.co/qwen/qwen1.5-14b-chat-awq";
|
|
414
453
|
};
|
|
415
454
|
}, {
|
|
@@ -420,6 +459,7 @@ export declare const workersAiCatalog: {
|
|
|
420
459
|
readonly tags: readonly [];
|
|
421
460
|
readonly properties: {
|
|
422
461
|
readonly beta: true;
|
|
462
|
+
readonly context_window: 8192;
|
|
423
463
|
readonly info: "https://huggingface.co/openchat/openchat-3.5-0106";
|
|
424
464
|
};
|
|
425
465
|
}];
|
|
@@ -12,6 +12,7 @@ export const workersAiCatalog = {
|
|
|
12
12
|
tags: [],
|
|
13
13
|
properties: {
|
|
14
14
|
beta: true,
|
|
15
|
+
context_window: 32000,
|
|
15
16
|
info: 'https://huggingface.co/qwen/qwen1.5-0.5b-chat',
|
|
16
17
|
},
|
|
17
18
|
},
|
|
@@ -23,6 +24,7 @@ export const workersAiCatalog = {
|
|
|
23
24
|
tags: [],
|
|
24
25
|
properties: {
|
|
25
26
|
beta: true,
|
|
27
|
+
context_window: 8192,
|
|
26
28
|
lora: true,
|
|
27
29
|
},
|
|
28
30
|
},
|
|
@@ -34,6 +36,7 @@ export const workersAiCatalog = {
|
|
|
34
36
|
tags: [],
|
|
35
37
|
properties: {
|
|
36
38
|
beta: true,
|
|
39
|
+
context_window: 4096,
|
|
37
40
|
info: 'https://huggingface.co/Nexusflow/Starling-LM-7B-beta',
|
|
38
41
|
max_batch_prefill_tokens: 8192,
|
|
39
42
|
max_input_length: 3072,
|
|
@@ -47,6 +50,7 @@ export const workersAiCatalog = {
|
|
|
47
50
|
description: 'Generation over generation, Meta Llama 3 demonstrates state-of-the-art performance on a wide range of industry benchmarks and offers new capabilities, including improved reasoning.',
|
|
48
51
|
tags: [],
|
|
49
52
|
properties: {
|
|
53
|
+
context_window: 7968,
|
|
50
54
|
info: 'https://llama.meta.com',
|
|
51
55
|
terms: 'https://llama.meta.com/llama3/license/#',
|
|
52
56
|
},
|
|
@@ -58,6 +62,7 @@ export const workersAiCatalog = {
|
|
|
58
62
|
description: 'The Llama 3.2 instruction-tuned text only models are optimized for multilingual dialogue use cases, including agentic retrieval and summarization tasks.',
|
|
59
63
|
tags: [],
|
|
60
64
|
properties: {
|
|
65
|
+
context_window: 128000,
|
|
61
66
|
terms: 'https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/LICENSE',
|
|
62
67
|
},
|
|
63
68
|
},
|
|
@@ -69,6 +74,7 @@ export const workersAiCatalog = {
|
|
|
69
74
|
tags: [],
|
|
70
75
|
properties: {
|
|
71
76
|
beta: true,
|
|
77
|
+
context_window: 4096,
|
|
72
78
|
},
|
|
73
79
|
},
|
|
74
80
|
{
|
|
@@ -79,6 +85,7 @@ export const workersAiCatalog = {
|
|
|
79
85
|
tags: [],
|
|
80
86
|
properties: {
|
|
81
87
|
beta: true,
|
|
88
|
+
context_window: 4096,
|
|
82
89
|
},
|
|
83
90
|
},
|
|
84
91
|
{
|
|
@@ -89,6 +96,7 @@ export const workersAiCatalog = {
|
|
|
89
96
|
tags: [],
|
|
90
97
|
properties: {
|
|
91
98
|
beta: true,
|
|
99
|
+
context_window: 4096,
|
|
92
100
|
info: 'https://ai.meta.com/llama/',
|
|
93
101
|
terms: 'https://ai.meta.com/resources/models-and-libraries/llama-downloads/',
|
|
94
102
|
},
|
|
@@ -101,6 +109,7 @@ export const workersAiCatalog = {
|
|
|
101
109
|
tags: [],
|
|
102
110
|
properties: {
|
|
103
111
|
beta: true,
|
|
112
|
+
context_window: 2824,
|
|
104
113
|
info: 'https://mistral.ai/news/announcing-mistral-7b/',
|
|
105
114
|
lora: true,
|
|
106
115
|
},
|
|
@@ -113,6 +122,7 @@ export const workersAiCatalog = {
|
|
|
113
122
|
tags: [],
|
|
114
123
|
properties: {
|
|
115
124
|
beta: true,
|
|
125
|
+
context_window: 15000,
|
|
116
126
|
lora: true,
|
|
117
127
|
},
|
|
118
128
|
},
|
|
@@ -124,6 +134,7 @@ export const workersAiCatalog = {
|
|
|
124
134
|
tags: [],
|
|
125
135
|
properties: {
|
|
126
136
|
beta: true,
|
|
137
|
+
context_window: 2048,
|
|
127
138
|
info: 'https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0',
|
|
128
139
|
},
|
|
129
140
|
},
|
|
@@ -135,6 +146,7 @@ export const workersAiCatalog = {
|
|
|
135
146
|
tags: [],
|
|
136
147
|
properties: {
|
|
137
148
|
beta: true,
|
|
149
|
+
context_window: 3072,
|
|
138
150
|
info: 'https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2',
|
|
139
151
|
lora: true,
|
|
140
152
|
max_batch_prefill_tokens: 8192,
|
|
@@ -150,6 +162,7 @@ export const workersAiCatalog = {
|
|
|
150
162
|
tags: [],
|
|
151
163
|
properties: {
|
|
152
164
|
beta: true,
|
|
165
|
+
context_window: 15000,
|
|
153
166
|
},
|
|
154
167
|
},
|
|
155
168
|
{
|
|
@@ -159,6 +172,7 @@ export const workersAiCatalog = {
|
|
|
159
172
|
description: 'DeepSeek-R1-Distill-Qwen-32B is a model distilled from DeepSeek-R1 based on Qwen2.5. It outperforms OpenAI-o1-mini across various benchmarks, achieving new state-of-the-art results for dense models.',
|
|
160
173
|
tags: [],
|
|
161
174
|
properties: {
|
|
175
|
+
context_window: 80000,
|
|
162
176
|
terms: 'https://github.com/deepseek-ai/DeepSeek-R1/blob/main/LICENSE',
|
|
163
177
|
},
|
|
164
178
|
},
|
|
@@ -170,6 +184,7 @@ export const workersAiCatalog = {
|
|
|
170
184
|
tags: [],
|
|
171
185
|
properties: {
|
|
172
186
|
beta: true,
|
|
187
|
+
context_window: 4096,
|
|
173
188
|
info: 'https://huggingface.co/TheBloke/DiscoLM_German_7b_v1-AWQ',
|
|
174
189
|
},
|
|
175
190
|
},
|
|
@@ -179,7 +194,9 @@ export const workersAiCatalog = {
|
|
|
179
194
|
name: '@cf/meta/llama-2-7b-chat-int8',
|
|
180
195
|
description: 'Quantized (int8) generative text model with 7 billion parameters from Meta',
|
|
181
196
|
tags: [],
|
|
182
|
-
properties: {
|
|
197
|
+
properties: {
|
|
198
|
+
context_window: 8192,
|
|
199
|
+
},
|
|
183
200
|
},
|
|
184
201
|
{
|
|
185
202
|
id: '9b9c87c6-d4b7-494c-b177-87feab5904db',
|
|
@@ -188,6 +205,7 @@ export const workersAiCatalog = {
|
|
|
188
205
|
description: 'Llama 3.1 8B quantized to FP8 precision',
|
|
189
206
|
tags: [],
|
|
190
207
|
properties: {
|
|
208
|
+
context_window: 32000,
|
|
191
209
|
terms: 'https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE',
|
|
192
210
|
},
|
|
193
211
|
},
|
|
@@ -199,6 +217,7 @@ export const workersAiCatalog = {
|
|
|
199
217
|
tags: [],
|
|
200
218
|
properties: {
|
|
201
219
|
beta: true,
|
|
220
|
+
context_window: 4096,
|
|
202
221
|
info: 'https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-AWQ',
|
|
203
222
|
},
|
|
204
223
|
},
|
|
@@ -210,6 +229,7 @@ export const workersAiCatalog = {
|
|
|
210
229
|
tags: [],
|
|
211
230
|
properties: {
|
|
212
231
|
beta: true,
|
|
232
|
+
context_window: 20000,
|
|
213
233
|
info: 'https://huggingface.co/qwen/qwen1.5-7b-chat-awq',
|
|
214
234
|
},
|
|
215
235
|
},
|
|
@@ -220,6 +240,7 @@ export const workersAiCatalog = {
|
|
|
220
240
|
description: 'The Llama 3.2 instruction-tuned text only models are optimized for multilingual dialogue use cases, including agentic retrieval and summarization tasks.',
|
|
221
241
|
tags: [],
|
|
222
242
|
properties: {
|
|
243
|
+
context_window: 128000,
|
|
223
244
|
terms: 'https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/LICENSE',
|
|
224
245
|
},
|
|
225
246
|
},
|
|
@@ -231,6 +252,7 @@ export const workersAiCatalog = {
|
|
|
231
252
|
tags: [],
|
|
232
253
|
properties: {
|
|
233
254
|
beta: true,
|
|
255
|
+
context_window: 4096,
|
|
234
256
|
info: 'https://huggingface.co/TheBloke/Llama-2-13B-chat-AWQ',
|
|
235
257
|
},
|
|
236
258
|
},
|
|
@@ -242,6 +264,7 @@ export const workersAiCatalog = {
|
|
|
242
264
|
tags: [],
|
|
243
265
|
properties: {
|
|
244
266
|
beta: true,
|
|
267
|
+
context_window: 4096,
|
|
245
268
|
terms: 'https://huggingface.co/TheBloke/deepseek-coder-6.7B-base-AWQ',
|
|
246
269
|
},
|
|
247
270
|
},
|
|
@@ -253,6 +276,7 @@ export const workersAiCatalog = {
|
|
|
253
276
|
tags: [],
|
|
254
277
|
properties: {
|
|
255
278
|
beta: true,
|
|
279
|
+
context_window: 8192,
|
|
256
280
|
lora: true,
|
|
257
281
|
},
|
|
258
282
|
},
|
|
@@ -263,6 +287,7 @@ export const workersAiCatalog = {
|
|
|
263
287
|
description: 'Llama 3.3 70B quantized to fp8 precision, optimized to be faster.',
|
|
264
288
|
tags: [],
|
|
265
289
|
properties: {
|
|
290
|
+
context_window: 24000,
|
|
266
291
|
terms: 'https://github.com/meta-llama/llama-models/blob/main/models/llama3_3/LICENSE',
|
|
267
292
|
},
|
|
268
293
|
},
|
|
@@ -274,6 +299,7 @@ export const workersAiCatalog = {
|
|
|
274
299
|
tags: [],
|
|
275
300
|
properties: {
|
|
276
301
|
beta: true,
|
|
302
|
+
context_window: 4096,
|
|
277
303
|
},
|
|
278
304
|
},
|
|
279
305
|
{
|
|
@@ -284,6 +310,7 @@ export const workersAiCatalog = {
|
|
|
284
310
|
tags: [],
|
|
285
311
|
properties: {
|
|
286
312
|
beta: true,
|
|
313
|
+
context_window: 4096,
|
|
287
314
|
terms: 'https://huggingface.co/TheBloke/deepseek-coder-6.7B-instruct-AWQ',
|
|
288
315
|
},
|
|
289
316
|
},
|
|
@@ -295,6 +322,7 @@ export const workersAiCatalog = {
|
|
|
295
322
|
tags: [],
|
|
296
323
|
properties: {
|
|
297
324
|
beta: true,
|
|
325
|
+
context_window: 4096,
|
|
298
326
|
info: 'https://huggingface.co/deepseek-ai/deepseek-math-7b-instruct',
|
|
299
327
|
terms: 'https://github.com/deepseek-ai/DeepSeek-Math/blob/main/LICENSE-MODEL',
|
|
300
328
|
},
|
|
@@ -307,6 +335,7 @@ export const workersAiCatalog = {
|
|
|
307
335
|
tags: [],
|
|
308
336
|
properties: {
|
|
309
337
|
beta: true,
|
|
338
|
+
context_window: 4096,
|
|
310
339
|
info: 'https://huggingface.co/tiiuae/falcon-7b-instruct',
|
|
311
340
|
},
|
|
312
341
|
},
|
|
@@ -318,6 +347,7 @@ export const workersAiCatalog = {
|
|
|
318
347
|
tags: [],
|
|
319
348
|
properties: {
|
|
320
349
|
beta: true,
|
|
350
|
+
context_window: 24000,
|
|
321
351
|
function_calling: true,
|
|
322
352
|
info: 'https://huggingface.co/NousResearch/Hermes-2-Pro-Mistral-7B',
|
|
323
353
|
},
|
|
@@ -329,6 +359,7 @@ export const workersAiCatalog = {
|
|
|
329
359
|
description: 'The Meta Llama 3.1 collection of multilingual large language models (LLMs) is a collection of pretrained and instruction tuned generative models. The Llama 3.1 instruction tuned text only models are optimized for multilingual dialogue use cases and outperform many of the available open source and closed chat models on common industry benchmarks.',
|
|
330
360
|
tags: [],
|
|
331
361
|
properties: {
|
|
362
|
+
context_window: 7968,
|
|
332
363
|
terms: 'https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE',
|
|
333
364
|
},
|
|
334
365
|
},
|
|
@@ -339,6 +370,7 @@ export const workersAiCatalog = {
|
|
|
339
370
|
description: 'Quantized (int4) generative text model with 8 billion parameters from Meta.\n',
|
|
340
371
|
tags: [],
|
|
341
372
|
properties: {
|
|
373
|
+
context_window: 8192,
|
|
342
374
|
terms: 'https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE',
|
|
343
375
|
},
|
|
344
376
|
},
|
|
@@ -350,6 +382,7 @@ export const workersAiCatalog = {
|
|
|
350
382
|
tags: [],
|
|
351
383
|
properties: {
|
|
352
384
|
beta: true,
|
|
385
|
+
context_window: 4096,
|
|
353
386
|
info: 'https://huggingface.co/TheBloke/zephyr-7B-beta-AWQ',
|
|
354
387
|
},
|
|
355
388
|
},
|
|
@@ -361,6 +394,7 @@ export const workersAiCatalog = {
|
|
|
361
394
|
tags: [],
|
|
362
395
|
properties: {
|
|
363
396
|
beta: true,
|
|
397
|
+
context_window: 3500,
|
|
364
398
|
lora: true,
|
|
365
399
|
},
|
|
366
400
|
},
|
|
@@ -372,6 +406,7 @@ export const workersAiCatalog = {
|
|
|
372
406
|
tags: [],
|
|
373
407
|
properties: {
|
|
374
408
|
beta: true,
|
|
409
|
+
context_window: 32000,
|
|
375
410
|
info: 'https://huggingface.co/qwen/qwen1.5-1.8b-chat',
|
|
376
411
|
},
|
|
377
412
|
},
|
|
@@ -382,6 +417,7 @@ export const workersAiCatalog = {
|
|
|
382
417
|
description: 'Quantized (int4) generative text model with 8 billion parameters from Meta.',
|
|
383
418
|
tags: [],
|
|
384
419
|
properties: {
|
|
420
|
+
context_window: 8192,
|
|
385
421
|
info: 'https://llama.meta.com',
|
|
386
422
|
terms: 'https://llama.meta.com/llama3/license/#',
|
|
387
423
|
},
|
|
@@ -404,6 +440,7 @@ export const workersAiCatalog = {
|
|
|
404
440
|
tags: [],
|
|
405
441
|
properties: {
|
|
406
442
|
beta: true,
|
|
443
|
+
context_window: 10000,
|
|
407
444
|
info: 'https://huggingface.co/defog/sqlcoder-7b-2',
|
|
408
445
|
terms: 'https://creativecommons.org/licenses/by-sa/4.0/deed.en',
|
|
409
446
|
},
|
|
@@ -416,6 +453,7 @@ export const workersAiCatalog = {
|
|
|
416
453
|
tags: [],
|
|
417
454
|
properties: {
|
|
418
455
|
beta: true,
|
|
456
|
+
context_window: 2048,
|
|
419
457
|
info: 'https://huggingface.co/microsoft/phi-2',
|
|
420
458
|
},
|
|
421
459
|
},
|
|
@@ -425,7 +463,9 @@ export const workersAiCatalog = {
|
|
|
425
463
|
name: '@hf/meta-llama/meta-llama-3-8b-instruct',
|
|
426
464
|
description: 'Generation over generation, Meta Llama 3 demonstrates state-of-the-art performance on a wide range of industry benchmarks and offers new capabilities, including improved reasoning.\t',
|
|
427
465
|
tags: [],
|
|
428
|
-
properties: {
|
|
466
|
+
properties: {
|
|
467
|
+
context_window: 8192,
|
|
468
|
+
},
|
|
429
469
|
},
|
|
430
470
|
{
|
|
431
471
|
id: '0f002249-7d86-4698-aabf-8529ed86cefb',
|
|
@@ -435,11 +475,9 @@ export const workersAiCatalog = {
|
|
|
435
475
|
tags: [],
|
|
436
476
|
properties: {
|
|
437
477
|
beta: true,
|
|
478
|
+
context_window: 8192,
|
|
438
479
|
info: 'https://ai.google.dev/gemma/docs',
|
|
439
480
|
lora: true,
|
|
440
|
-
max_batch_prefill_tokens: 2048,
|
|
441
|
-
max_input_length: 1512,
|
|
442
|
-
max_total_tokens: 2048,
|
|
443
481
|
terms: 'https://ai.google.dev/gemma/terms',
|
|
444
482
|
},
|
|
445
483
|
},
|
|
@@ -451,6 +489,7 @@ export const workersAiCatalog = {
|
|
|
451
489
|
tags: [],
|
|
452
490
|
properties: {
|
|
453
491
|
beta: true,
|
|
492
|
+
context_window: 7500,
|
|
454
493
|
info: 'https://huggingface.co/qwen/qwen1.5-14b-chat-awq',
|
|
455
494
|
},
|
|
456
495
|
},
|
|
@@ -462,6 +501,7 @@ export const workersAiCatalog = {
|
|
|
462
501
|
tags: [],
|
|
463
502
|
properties: {
|
|
464
503
|
beta: true,
|
|
504
|
+
context_window: 8192,
|
|
465
505
|
info: 'https://huggingface.co/openchat/openchat-3.5-0106',
|
|
466
506
|
},
|
|
467
507
|
},
|
package/dist/super-ai/index.d.ts
CHANGED
|
@@ -50,6 +50,7 @@ export declare const possibilities_mc_generic: readonly [...{
|
|
|
50
50
|
readonly tags: readonly [];
|
|
51
51
|
readonly properties: {
|
|
52
52
|
readonly beta: true;
|
|
53
|
+
readonly context_window: 24000;
|
|
53
54
|
readonly function_calling: true;
|
|
54
55
|
readonly info: "https://huggingface.co/NousResearch/Hermes-2-Pro-Mistral-7B";
|
|
55
56
|
};
|
|
@@ -78,6 +79,7 @@ export declare const possibilities_mc_extraction: readonly [...{
|
|
|
78
79
|
readonly tags: readonly [];
|
|
79
80
|
readonly properties: {
|
|
80
81
|
readonly beta: true;
|
|
82
|
+
readonly context_window: 24000;
|
|
81
83
|
readonly function_calling: true;
|
|
82
84
|
readonly info: "https://huggingface.co/NousResearch/Hermes-2-Pro-Mistral-7B";
|
|
83
85
|
};
|
|
@@ -93,6 +95,7 @@ export declare const possibilities_mc_tagging: readonly [...{
|
|
|
93
95
|
readonly tags: readonly [];
|
|
94
96
|
readonly properties: {
|
|
95
97
|
readonly beta: true;
|
|
98
|
+
readonly context_window: 24000;
|
|
96
99
|
readonly function_calling: true;
|
|
97
100
|
readonly info: "https://huggingface.co/NousResearch/Hermes-2-Pro-Mistral-7B";
|
|
98
101
|
};
|
|
@@ -108,6 +111,7 @@ export declare const possibilities_mc_sentiment: readonly [...{
|
|
|
108
111
|
readonly tags: readonly [];
|
|
109
112
|
readonly properties: {
|
|
110
113
|
readonly beta: true;
|
|
114
|
+
readonly context_window: 24000;
|
|
111
115
|
readonly function_calling: true;
|
|
112
116
|
readonly info: "https://huggingface.co/NousResearch/Hermes-2-Pro-Mistral-7B";
|
|
113
117
|
};
|
|
@@ -123,6 +127,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
123
127
|
readonly tags: readonly [];
|
|
124
128
|
readonly properties: {
|
|
125
129
|
readonly beta: true;
|
|
130
|
+
readonly context_window: 32000;
|
|
126
131
|
readonly info: "https://huggingface.co/qwen/qwen1.5-0.5b-chat";
|
|
127
132
|
};
|
|
128
133
|
}, {
|
|
@@ -133,6 +138,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
133
138
|
readonly tags: readonly [];
|
|
134
139
|
readonly properties: {
|
|
135
140
|
readonly beta: true;
|
|
141
|
+
readonly context_window: 8192;
|
|
136
142
|
readonly lora: true;
|
|
137
143
|
};
|
|
138
144
|
}, {
|
|
@@ -143,6 +149,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
143
149
|
readonly tags: readonly [];
|
|
144
150
|
readonly properties: {
|
|
145
151
|
readonly beta: true;
|
|
152
|
+
readonly context_window: 4096;
|
|
146
153
|
readonly info: "https://huggingface.co/Nexusflow/Starling-LM-7B-beta";
|
|
147
154
|
readonly max_batch_prefill_tokens: 8192;
|
|
148
155
|
readonly max_input_length: 3072;
|
|
@@ -155,6 +162,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
155
162
|
readonly description: "Generation over generation, Meta Llama 3 demonstrates state-of-the-art performance on a wide range of industry benchmarks and offers new capabilities, including improved reasoning.";
|
|
156
163
|
readonly tags: readonly [];
|
|
157
164
|
readonly properties: {
|
|
165
|
+
readonly context_window: 7968;
|
|
158
166
|
readonly info: "https://llama.meta.com";
|
|
159
167
|
readonly terms: "https://llama.meta.com/llama3/license/#";
|
|
160
168
|
};
|
|
@@ -165,6 +173,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
165
173
|
readonly description: "The Llama 3.2 instruction-tuned text only models are optimized for multilingual dialogue use cases, including agentic retrieval and summarization tasks.";
|
|
166
174
|
readonly tags: readonly [];
|
|
167
175
|
readonly properties: {
|
|
176
|
+
readonly context_window: 128000;
|
|
168
177
|
readonly terms: "https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/LICENSE";
|
|
169
178
|
};
|
|
170
179
|
}, {
|
|
@@ -175,6 +184,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
175
184
|
readonly tags: readonly [];
|
|
176
185
|
readonly properties: {
|
|
177
186
|
readonly beta: true;
|
|
187
|
+
readonly context_window: 4096;
|
|
178
188
|
};
|
|
179
189
|
}, {
|
|
180
190
|
readonly id: "d2ba5c6b-bbb7-49d6-b466-900654870cd6";
|
|
@@ -184,6 +194,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
184
194
|
readonly tags: readonly [];
|
|
185
195
|
readonly properties: {
|
|
186
196
|
readonly beta: true;
|
|
197
|
+
readonly context_window: 4096;
|
|
187
198
|
};
|
|
188
199
|
}, {
|
|
189
200
|
readonly id: "ca54bcd6-0d98-4739-9b3b-5c8b4402193d";
|
|
@@ -193,6 +204,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
193
204
|
readonly tags: readonly [];
|
|
194
205
|
readonly properties: {
|
|
195
206
|
readonly beta: true;
|
|
207
|
+
readonly context_window: 4096;
|
|
196
208
|
readonly info: "https://ai.meta.com/llama/";
|
|
197
209
|
readonly terms: "https://ai.meta.com/resources/models-and-libraries/llama-downloads/";
|
|
198
210
|
};
|
|
@@ -204,6 +216,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
204
216
|
readonly tags: readonly [];
|
|
205
217
|
readonly properties: {
|
|
206
218
|
readonly beta: true;
|
|
219
|
+
readonly context_window: 2824;
|
|
207
220
|
readonly info: "https://mistral.ai/news/announcing-mistral-7b/";
|
|
208
221
|
readonly lora: true;
|
|
209
222
|
};
|
|
@@ -215,6 +228,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
215
228
|
readonly tags: readonly [];
|
|
216
229
|
readonly properties: {
|
|
217
230
|
readonly beta: true;
|
|
231
|
+
readonly context_window: 15000;
|
|
218
232
|
readonly lora: true;
|
|
219
233
|
};
|
|
220
234
|
}, {
|
|
@@ -225,6 +239,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
225
239
|
readonly tags: readonly [];
|
|
226
240
|
readonly properties: {
|
|
227
241
|
readonly beta: true;
|
|
242
|
+
readonly context_window: 2048;
|
|
228
243
|
readonly info: "https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0";
|
|
229
244
|
};
|
|
230
245
|
}, {
|
|
@@ -235,6 +250,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
235
250
|
readonly tags: readonly [];
|
|
236
251
|
readonly properties: {
|
|
237
252
|
readonly beta: true;
|
|
253
|
+
readonly context_window: 3072;
|
|
238
254
|
readonly info: "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2";
|
|
239
255
|
readonly lora: true;
|
|
240
256
|
readonly max_batch_prefill_tokens: 8192;
|
|
@@ -249,6 +265,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
249
265
|
readonly tags: readonly [];
|
|
250
266
|
readonly properties: {
|
|
251
267
|
readonly beta: true;
|
|
268
|
+
readonly context_window: 15000;
|
|
252
269
|
};
|
|
253
270
|
}, {
|
|
254
271
|
readonly id: "ad01ab83-baf8-4e7b-8fed-a0a219d4eb45";
|
|
@@ -257,6 +274,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
257
274
|
readonly description: "DeepSeek-R1-Distill-Qwen-32B is a model distilled from DeepSeek-R1 based on Qwen2.5. It outperforms OpenAI-o1-mini across various benchmarks, achieving new state-of-the-art results for dense models.";
|
|
258
275
|
readonly tags: readonly [];
|
|
259
276
|
readonly properties: {
|
|
277
|
+
readonly context_window: 80000;
|
|
260
278
|
readonly terms: "https://github.com/deepseek-ai/DeepSeek-R1/blob/main/LICENSE";
|
|
261
279
|
};
|
|
262
280
|
}, {
|
|
@@ -267,6 +285,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
267
285
|
readonly tags: readonly [];
|
|
268
286
|
readonly properties: {
|
|
269
287
|
readonly beta: true;
|
|
288
|
+
readonly context_window: 4096;
|
|
270
289
|
readonly info: "https://huggingface.co/TheBloke/DiscoLM_German_7b_v1-AWQ";
|
|
271
290
|
};
|
|
272
291
|
}, {
|
|
@@ -275,7 +294,9 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
275
294
|
readonly name: "@cf/meta/llama-2-7b-chat-int8";
|
|
276
295
|
readonly description: "Quantized (int8) generative text model with 7 billion parameters from Meta";
|
|
277
296
|
readonly tags: readonly [];
|
|
278
|
-
readonly properties: {
|
|
297
|
+
readonly properties: {
|
|
298
|
+
readonly context_window: 8192;
|
|
299
|
+
};
|
|
279
300
|
}, {
|
|
280
301
|
readonly id: "9b9c87c6-d4b7-494c-b177-87feab5904db";
|
|
281
302
|
readonly source: 1;
|
|
@@ -283,6 +304,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
283
304
|
readonly description: "Llama 3.1 8B quantized to FP8 precision";
|
|
284
305
|
readonly tags: readonly [];
|
|
285
306
|
readonly properties: {
|
|
307
|
+
readonly context_window: 32000;
|
|
286
308
|
readonly terms: "https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE";
|
|
287
309
|
};
|
|
288
310
|
}, {
|
|
@@ -293,6 +315,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
293
315
|
readonly tags: readonly [];
|
|
294
316
|
readonly properties: {
|
|
295
317
|
readonly beta: true;
|
|
318
|
+
readonly context_window: 4096;
|
|
296
319
|
readonly info: "https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-AWQ";
|
|
297
320
|
};
|
|
298
321
|
}, {
|
|
@@ -303,6 +326,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
303
326
|
readonly tags: readonly [];
|
|
304
327
|
readonly properties: {
|
|
305
328
|
readonly beta: true;
|
|
329
|
+
readonly context_window: 20000;
|
|
306
330
|
readonly info: "https://huggingface.co/qwen/qwen1.5-7b-chat-awq";
|
|
307
331
|
};
|
|
308
332
|
}, {
|
|
@@ -312,6 +336,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
312
336
|
readonly description: "The Llama 3.2 instruction-tuned text only models are optimized for multilingual dialogue use cases, including agentic retrieval and summarization tasks.";
|
|
313
337
|
readonly tags: readonly [];
|
|
314
338
|
readonly properties: {
|
|
339
|
+
readonly context_window: 128000;
|
|
315
340
|
readonly terms: "https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/LICENSE";
|
|
316
341
|
};
|
|
317
342
|
}, {
|
|
@@ -322,6 +347,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
322
347
|
readonly tags: readonly [];
|
|
323
348
|
readonly properties: {
|
|
324
349
|
readonly beta: true;
|
|
350
|
+
readonly context_window: 4096;
|
|
325
351
|
readonly info: "https://huggingface.co/TheBloke/Llama-2-13B-chat-AWQ";
|
|
326
352
|
};
|
|
327
353
|
}, {
|
|
@@ -332,6 +358,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
332
358
|
readonly tags: readonly [];
|
|
333
359
|
readonly properties: {
|
|
334
360
|
readonly beta: true;
|
|
361
|
+
readonly context_window: 4096;
|
|
335
362
|
readonly terms: "https://huggingface.co/TheBloke/deepseek-coder-6.7B-base-AWQ";
|
|
336
363
|
};
|
|
337
364
|
}, {
|
|
@@ -342,6 +369,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
342
369
|
readonly tags: readonly [];
|
|
343
370
|
readonly properties: {
|
|
344
371
|
readonly beta: true;
|
|
372
|
+
readonly context_window: 8192;
|
|
345
373
|
readonly lora: true;
|
|
346
374
|
};
|
|
347
375
|
}, {
|
|
@@ -351,6 +379,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
351
379
|
readonly description: "Llama 3.3 70B quantized to fp8 precision, optimized to be faster.";
|
|
352
380
|
readonly tags: readonly [];
|
|
353
381
|
readonly properties: {
|
|
382
|
+
readonly context_window: 24000;
|
|
354
383
|
readonly terms: "https://github.com/meta-llama/llama-models/blob/main/models/llama3_3/LICENSE";
|
|
355
384
|
};
|
|
356
385
|
}, {
|
|
@@ -361,6 +390,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
361
390
|
readonly tags: readonly [];
|
|
362
391
|
readonly properties: {
|
|
363
392
|
readonly beta: true;
|
|
393
|
+
readonly context_window: 4096;
|
|
364
394
|
};
|
|
365
395
|
}, {
|
|
366
396
|
readonly id: "60474554-f03b-4ff4-8ecc-c1b7c71d7b29";
|
|
@@ -370,6 +400,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
370
400
|
readonly tags: readonly [];
|
|
371
401
|
readonly properties: {
|
|
372
402
|
readonly beta: true;
|
|
403
|
+
readonly context_window: 4096;
|
|
373
404
|
readonly terms: "https://huggingface.co/TheBloke/deepseek-coder-6.7B-instruct-AWQ";
|
|
374
405
|
};
|
|
375
406
|
}, {
|
|
@@ -380,6 +411,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
380
411
|
readonly tags: readonly [];
|
|
381
412
|
readonly properties: {
|
|
382
413
|
readonly beta: true;
|
|
414
|
+
readonly context_window: 4096;
|
|
383
415
|
readonly info: "https://huggingface.co/deepseek-ai/deepseek-math-7b-instruct";
|
|
384
416
|
readonly terms: "https://github.com/deepseek-ai/DeepSeek-Math/blob/main/LICENSE-MODEL";
|
|
385
417
|
};
|
|
@@ -391,6 +423,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
391
423
|
readonly tags: readonly [];
|
|
392
424
|
readonly properties: {
|
|
393
425
|
readonly beta: true;
|
|
426
|
+
readonly context_window: 4096;
|
|
394
427
|
readonly info: "https://huggingface.co/tiiuae/falcon-7b-instruct";
|
|
395
428
|
};
|
|
396
429
|
}, {
|
|
@@ -401,6 +434,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
401
434
|
readonly tags: readonly [];
|
|
402
435
|
readonly properties: {
|
|
403
436
|
readonly beta: true;
|
|
437
|
+
readonly context_window: 24000;
|
|
404
438
|
readonly function_calling: true;
|
|
405
439
|
readonly info: "https://huggingface.co/NousResearch/Hermes-2-Pro-Mistral-7B";
|
|
406
440
|
};
|
|
@@ -411,6 +445,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
411
445
|
readonly description: "The Meta Llama 3.1 collection of multilingual large language models (LLMs) is a collection of pretrained and instruction tuned generative models. The Llama 3.1 instruction tuned text only models are optimized for multilingual dialogue use cases and outperform many of the available open source and closed chat models on common industry benchmarks.";
|
|
412
446
|
readonly tags: readonly [];
|
|
413
447
|
readonly properties: {
|
|
448
|
+
readonly context_window: 7968;
|
|
414
449
|
readonly terms: "https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE";
|
|
415
450
|
};
|
|
416
451
|
}, {
|
|
@@ -420,6 +455,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
420
455
|
readonly description: "Quantized (int4) generative text model with 8 billion parameters from Meta.\n";
|
|
421
456
|
readonly tags: readonly [];
|
|
422
457
|
readonly properties: {
|
|
458
|
+
readonly context_window: 8192;
|
|
423
459
|
readonly terms: "https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE";
|
|
424
460
|
};
|
|
425
461
|
}, {
|
|
@@ -430,6 +466,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
430
466
|
readonly tags: readonly [];
|
|
431
467
|
readonly properties: {
|
|
432
468
|
readonly beta: true;
|
|
469
|
+
readonly context_window: 4096;
|
|
433
470
|
readonly info: "https://huggingface.co/TheBloke/zephyr-7B-beta-AWQ";
|
|
434
471
|
};
|
|
435
472
|
}, {
|
|
@@ -440,6 +477,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
440
477
|
readonly tags: readonly [];
|
|
441
478
|
readonly properties: {
|
|
442
479
|
readonly beta: true;
|
|
480
|
+
readonly context_window: 3500;
|
|
443
481
|
readonly lora: true;
|
|
444
482
|
};
|
|
445
483
|
}, {
|
|
@@ -450,6 +488,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
450
488
|
readonly tags: readonly [];
|
|
451
489
|
readonly properties: {
|
|
452
490
|
readonly beta: true;
|
|
491
|
+
readonly context_window: 32000;
|
|
453
492
|
readonly info: "https://huggingface.co/qwen/qwen1.5-1.8b-chat";
|
|
454
493
|
};
|
|
455
494
|
}, {
|
|
@@ -459,6 +498,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
459
498
|
readonly description: "Quantized (int4) generative text model with 8 billion parameters from Meta.";
|
|
460
499
|
readonly tags: readonly [];
|
|
461
500
|
readonly properties: {
|
|
501
|
+
readonly context_window: 8192;
|
|
462
502
|
readonly info: "https://llama.meta.com";
|
|
463
503
|
readonly terms: "https://llama.meta.com/llama3/license/#";
|
|
464
504
|
};
|
|
@@ -479,6 +519,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
479
519
|
readonly tags: readonly [];
|
|
480
520
|
readonly properties: {
|
|
481
521
|
readonly beta: true;
|
|
522
|
+
readonly context_window: 10000;
|
|
482
523
|
readonly info: "https://huggingface.co/defog/sqlcoder-7b-2";
|
|
483
524
|
readonly terms: "https://creativecommons.org/licenses/by-sa/4.0/deed.en";
|
|
484
525
|
};
|
|
@@ -490,6 +531,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
490
531
|
readonly tags: readonly [];
|
|
491
532
|
readonly properties: {
|
|
492
533
|
readonly beta: true;
|
|
534
|
+
readonly context_window: 2048;
|
|
493
535
|
readonly info: "https://huggingface.co/microsoft/phi-2";
|
|
494
536
|
};
|
|
495
537
|
}, {
|
|
@@ -498,7 +540,9 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
498
540
|
readonly name: "@hf/meta-llama/meta-llama-3-8b-instruct";
|
|
499
541
|
readonly description: "Generation over generation, Meta Llama 3 demonstrates state-of-the-art performance on a wide range of industry benchmarks and offers new capabilities, including improved reasoning.\t";
|
|
500
542
|
readonly tags: readonly [];
|
|
501
|
-
readonly properties: {
|
|
543
|
+
readonly properties: {
|
|
544
|
+
readonly context_window: 8192;
|
|
545
|
+
};
|
|
502
546
|
}, {
|
|
503
547
|
readonly id: "0f002249-7d86-4698-aabf-8529ed86cefb";
|
|
504
548
|
readonly source: 2;
|
|
@@ -507,11 +551,9 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
507
551
|
readonly tags: readonly [];
|
|
508
552
|
readonly properties: {
|
|
509
553
|
readonly beta: true;
|
|
554
|
+
readonly context_window: 8192;
|
|
510
555
|
readonly info: "https://ai.google.dev/gemma/docs";
|
|
511
556
|
readonly lora: true;
|
|
512
|
-
readonly max_batch_prefill_tokens: 2048;
|
|
513
|
-
readonly max_input_length: 1512;
|
|
514
|
-
readonly max_total_tokens: 2048;
|
|
515
557
|
readonly terms: "https://ai.google.dev/gemma/terms";
|
|
516
558
|
};
|
|
517
559
|
}, {
|
|
@@ -522,6 +564,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
522
564
|
readonly tags: readonly [];
|
|
523
565
|
readonly properties: {
|
|
524
566
|
readonly beta: true;
|
|
567
|
+
readonly context_window: 7500;
|
|
525
568
|
readonly info: "https://huggingface.co/qwen/qwen1.5-14b-chat-awq";
|
|
526
569
|
};
|
|
527
570
|
}, {
|
|
@@ -532,6 +575,7 @@ export declare const possibilities_mc_safety: readonly [...{
|
|
|
532
575
|
readonly tags: readonly [];
|
|
533
576
|
readonly properties: {
|
|
534
577
|
readonly beta: true;
|
|
578
|
+
readonly context_window: 8192;
|
|
535
579
|
readonly info: "https://huggingface.co/openchat/openchat-3.5-0106";
|
|
536
580
|
};
|
|
537
581
|
}];
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@chainfuse/types",
|
|
3
|
-
"version": "1.6.
|
|
3
|
+
"version": "1.6.8",
|
|
4
4
|
"description": "",
|
|
5
5
|
"author": "ChainFuse",
|
|
6
6
|
"homepage": "https://github.com/ChainFuse/packages/tree/main/packages/types#readme",
|
|
@@ -86,8 +86,8 @@
|
|
|
86
86
|
},
|
|
87
87
|
"prettier": "@demosjarco/prettier-config",
|
|
88
88
|
"devDependencies": {
|
|
89
|
-
"@cloudflare/workers-types": "^4.
|
|
89
|
+
"@cloudflare/workers-types": "^4.20250224.0",
|
|
90
90
|
"@types/json-schema": "^7.0.15"
|
|
91
91
|
},
|
|
92
|
-
"gitHead": "
|
|
92
|
+
"gitHead": "ab7a78396cb7871e1b4b78f9ffd7a0973839964e"
|
|
93
93
|
}
|