@funkai/models 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.generated/entries.json +23 -0
- package/.generated/req.txt +1 -0
- package/.turbo/turbo-build.log +145 -0
- package/.turbo/turbo-typecheck.log +4 -0
- package/CHANGELOG.md +23 -0
- package/README.md +95 -0
- package/dist/alibaba-B6q4Ng1R.mjs +957 -0
- package/dist/alibaba-B6q4Ng1R.mjs.map +1 -0
- package/dist/amazon-bedrock-Cv9AHQBH.mjs +2070 -0
- package/dist/amazon-bedrock-Cv9AHQBH.mjs.map +1 -0
- package/dist/anthropic-yB7ST97_.mjs +651 -0
- package/dist/anthropic-yB7ST97_.mjs.map +1 -0
- package/dist/cerebras-COfl7XM-.mjs +95 -0
- package/dist/cerebras-COfl7XM-.mjs.map +1 -0
- package/dist/cohere-B7TgO0hT.mjs +271 -0
- package/dist/cohere-B7TgO0hT.mjs.map +1 -0
- package/dist/deepinfra-B0GxUwCG.mjs +636 -0
- package/dist/deepinfra-B0GxUwCG.mjs.map +1 -0
- package/dist/deepseek-D64ZEsvS.mjs +50 -0
- package/dist/deepseek-D64ZEsvS.mjs.map +1 -0
- package/dist/fireworks-ai-DJYvdAi_.mjs +304 -0
- package/dist/fireworks-ai-DJYvdAi_.mjs.map +1 -0
- package/dist/google-BypRl349.mjs +833 -0
- package/dist/google-BypRl349.mjs.map +1 -0
- package/dist/google-vertex-DbS-zTGD.mjs +730 -0
- package/dist/google-vertex-DbS-zTGD.mjs.map +1 -0
- package/dist/groq-ei_PerYi.mjs +381 -0
- package/dist/groq-ei_PerYi.mjs.map +1 -0
- package/dist/huggingface-DaM1EeLP.mjs +456 -0
- package/dist/huggingface-DaM1EeLP.mjs.map +1 -0
- package/dist/inception-CspEzqNV.mjs +101 -0
- package/dist/inception-CspEzqNV.mjs.map +1 -0
- package/dist/index.d.mts +30314 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.mjs +271 -0
- package/dist/index.mjs.map +1 -0
- package/dist/llama-Cf3-koap.mjs +161 -0
- package/dist/llama-Cf3-koap.mjs.map +1 -0
- package/dist/mistral-BI9MdAO4.mjs +579 -0
- package/dist/mistral-BI9MdAO4.mjs.map +1 -0
- package/dist/nvidia-COHacuoa.mjs +1625 -0
- package/dist/nvidia-COHacuoa.mjs.map +1 -0
- package/dist/openai-C0nCfZUq.mjs +1023 -0
- package/dist/openai-C0nCfZUq.mjs.map +1 -0
- package/dist/openrouter-DSFzxKQb.mjs +4608 -0
- package/dist/openrouter-DSFzxKQb.mjs.map +1 -0
- package/dist/perplexity-zeZ2WlBU.mjs +96 -0
- package/dist/perplexity-zeZ2WlBU.mjs.map +1 -0
- package/dist/providers/alibaba.d.mts +1795 -0
- package/dist/providers/alibaba.d.mts.map +1 -0
- package/dist/providers/alibaba.mjs +39 -0
- package/dist/providers/alibaba.mjs.map +1 -0
- package/dist/providers/amazon-bedrock.d.mts +3713 -0
- package/dist/providers/amazon-bedrock.d.mts.map +1 -0
- package/dist/providers/amazon-bedrock.mjs +39 -0
- package/dist/providers/amazon-bedrock.mjs.map +1 -0
- package/dist/providers/anthropic.d.mts +1109 -0
- package/dist/providers/anthropic.d.mts.map +1 -0
- package/dist/providers/anthropic.mjs +39 -0
- package/dist/providers/anthropic.mjs.map +1 -0
- package/dist/providers/cerebras.d.mts +219 -0
- package/dist/providers/cerebras.d.mts.map +1 -0
- package/dist/providers/cerebras.mjs +39 -0
- package/dist/providers/cerebras.mjs.map +1 -0
- package/dist/providers/cohere.d.mts +555 -0
- package/dist/providers/cohere.d.mts.map +1 -0
- package/dist/providers/cohere.mjs +39 -0
- package/dist/providers/cohere.mjs.map +1 -0
- package/dist/providers/deepinfra.d.mts +1245 -0
- package/dist/providers/deepinfra.d.mts.map +1 -0
- package/dist/providers/deepinfra.mjs +39 -0
- package/dist/providers/deepinfra.mjs.map +1 -0
- package/dist/providers/deepseek.d.mts +139 -0
- package/dist/providers/deepseek.d.mts.map +1 -0
- package/dist/providers/deepseek.mjs +39 -0
- package/dist/providers/deepseek.mjs.map +1 -0
- package/dist/providers/fireworks-ai.d.mts +611 -0
- package/dist/providers/fireworks-ai.d.mts.map +1 -0
- package/dist/providers/fireworks-ai.mjs +39 -0
- package/dist/providers/fireworks-ai.mjs.map +1 -0
- package/dist/providers/google-vertex.d.mts +1227 -0
- package/dist/providers/google-vertex.d.mts.map +1 -0
- package/dist/providers/google-vertex.mjs +39 -0
- package/dist/providers/google-vertex.mjs.map +1 -0
- package/dist/providers/google.d.mts +1359 -0
- package/dist/providers/google.d.mts.map +1 -0
- package/dist/providers/google.mjs +39 -0
- package/dist/providers/google.mjs.map +1 -0
- package/dist/providers/groq.d.mts +765 -0
- package/dist/providers/groq.d.mts.map +1 -0
- package/dist/providers/groq.mjs +39 -0
- package/dist/providers/groq.mjs.map +1 -0
- package/dist/providers/huggingface.d.mts +901 -0
- package/dist/providers/huggingface.d.mts.map +1 -0
- package/dist/providers/huggingface.mjs +39 -0
- package/dist/providers/huggingface.mjs.map +1 -0
- package/dist/providers/inception.d.mts +231 -0
- package/dist/providers/inception.d.mts.map +1 -0
- package/dist/providers/inception.mjs +39 -0
- package/dist/providers/inception.mjs.map +1 -0
- package/dist/providers/llama.d.mts +345 -0
- package/dist/providers/llama.d.mts.map +1 -0
- package/dist/providers/llama.mjs +39 -0
- package/dist/providers/llama.mjs.map +1 -0
- package/dist/providers/mistral.d.mts +1143 -0
- package/dist/providers/mistral.d.mts.map +1 -0
- package/dist/providers/mistral.mjs +39 -0
- package/dist/providers/mistral.mjs.map +1 -0
- package/dist/providers/nvidia.d.mts +3117 -0
- package/dist/providers/nvidia.d.mts.map +1 -0
- package/dist/providers/nvidia.mjs +39 -0
- package/dist/providers/nvidia.mjs.map +1 -0
- package/dist/providers/openai.d.mts +1963 -0
- package/dist/providers/openai.d.mts.map +1 -0
- package/dist/providers/openai.mjs +39 -0
- package/dist/providers/openai.mjs.map +1 -0
- package/dist/providers/openrouter.d.mts +8531 -0
- package/dist/providers/openrouter.d.mts.map +1 -0
- package/dist/providers/openrouter.mjs +39 -0
- package/dist/providers/openrouter.mjs.map +1 -0
- package/dist/providers/perplexity.d.mts +221 -0
- package/dist/providers/perplexity.d.mts.map +1 -0
- package/dist/providers/perplexity.mjs +39 -0
- package/dist/providers/perplexity.mjs.map +1 -0
- package/dist/providers/togetherai.d.mts +767 -0
- package/dist/providers/togetherai.d.mts.map +1 -0
- package/dist/providers/togetherai.mjs +39 -0
- package/dist/providers/togetherai.mjs.map +1 -0
- package/dist/providers/xai.d.mts +1161 -0
- package/dist/providers/xai.d.mts.map +1 -0
- package/dist/providers/xai.mjs +39 -0
- package/dist/providers/xai.mjs.map +1 -0
- package/dist/togetherai-BvcxUfPE.mjs +382 -0
- package/dist/togetherai-BvcxUfPE.mjs.map +1 -0
- package/dist/types-DjdaZckF.d.mts +71 -0
- package/dist/types-DjdaZckF.d.mts.map +1 -0
- package/dist/xai-fSuAkQJo.mjs +587 -0
- package/dist/xai-fSuAkQJo.mjs.map +1 -0
- package/docs/catalog/filtering.md +102 -0
- package/docs/catalog/overview.md +168 -0
- package/docs/catalog/providers.md +73 -0
- package/docs/cost/overview.md +125 -0
- package/docs/guides/filter-models.md +113 -0
- package/docs/guides/setup-resolver.md +106 -0
- package/docs/guides/track-costs.md +133 -0
- package/docs/overview.md +139 -0
- package/docs/provider/configuration.md +100 -0
- package/docs/provider/openrouter.md +105 -0
- package/docs/provider/overview.md +131 -0
- package/docs/troubleshooting.md +100 -0
- package/package.json +142 -0
- package/providers.json +39 -0
- package/scripts/generate-models.ts +392 -0
- package/src/catalog/index.test.ts +124 -0
- package/src/catalog/index.ts +65 -0
- package/src/catalog/providers/alibaba.ts +468 -0
- package/src/catalog/providers/amazon-bedrock.ts +941 -0
- package/src/catalog/providers/anthropic.ts +270 -0
- package/src/catalog/providers/cerebras.ts +61 -0
- package/src/catalog/providers/cohere.ts +149 -0
- package/src/catalog/providers/deepinfra.ts +325 -0
- package/src/catalog/providers/deepseek.ts +39 -0
- package/src/catalog/providers/fireworks-ai.ts +160 -0
- package/src/catalog/providers/google-vertex.ts +314 -0
- package/src/catalog/providers/google.ts +347 -0
- package/src/catalog/providers/groq.ts +204 -0
- package/src/catalog/providers/huggingface.ts +237 -0
- package/src/catalog/providers/inception.ts +61 -0
- package/src/catalog/providers/index.ts +59 -0
- package/src/catalog/providers/llama.ts +94 -0
- package/src/catalog/providers/mistral.ts +303 -0
- package/src/catalog/providers/nvidia.ts +820 -0
- package/src/catalog/providers/openai.ts +501 -0
- package/src/catalog/providers/openrouter.ts +2201 -0
- package/src/catalog/providers/perplexity.ts +61 -0
- package/src/catalog/providers/togetherai.ts +204 -0
- package/src/catalog/providers/xai.ts +292 -0
- package/src/catalog/types.ts +86 -0
- package/src/cost/calculate.test.ts +157 -0
- package/src/cost/calculate.ts +43 -0
- package/src/cost/index.ts +2 -0
- package/src/cost/types.ts +25 -0
- package/src/index.ts +25 -0
- package/src/provider/index.ts +9 -0
- package/src/provider/openrouter.test.ts +125 -0
- package/src/provider/openrouter.ts +110 -0
- package/src/provider/resolver.test.ts +138 -0
- package/src/provider/resolver.ts +125 -0
- package/src/provider/types.ts +39 -0
- package/src/providers/alibaba.ts +65 -0
- package/src/providers/amazon-bedrock.ts +67 -0
- package/src/providers/anthropic.ts +65 -0
- package/src/providers/cerebras.ts +65 -0
- package/src/providers/cohere.ts +65 -0
- package/src/providers/deepinfra.ts +65 -0
- package/src/providers/deepseek.ts +65 -0
- package/src/providers/fireworks-ai.ts +65 -0
- package/src/providers/google-vertex.ts +67 -0
- package/src/providers/google.ts +65 -0
- package/src/providers/groq.ts +65 -0
- package/src/providers/huggingface.ts +67 -0
- package/src/providers/inception.ts +65 -0
- package/src/providers/llama.ts +65 -0
- package/src/providers/mistral.ts +65 -0
- package/src/providers/nvidia.ts +65 -0
- package/src/providers/openai.ts +65 -0
- package/src/providers/openrouter.ts +67 -0
- package/src/providers/perplexity.ts +67 -0
- package/src/providers/togetherai.ts +65 -0
- package/src/providers/xai.ts +65 -0
- package/tsconfig.json +25 -0
- package/tsdown.config.ts +23 -0
- package/vitest.config.ts +29 -0
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
// ──────────────────────────────────────────────────────────────
|
|
2
|
+
// ███████╗██╗ ██╗███╗ ██╗██╗ ██╗ █████╗ ██╗
|
|
3
|
+
// ██╔════╝██║ ██║████╗ ██║██║ ██╔╝██╔══██╗██║
|
|
4
|
+
// █████╗ ██║ ██║██╔██╗ ██║█████╔╝ ███████║██║
|
|
5
|
+
// ██╔══╝ ██║ ██║██║╚██╗██║██╔═██╗ ██╔══██║██║
|
|
6
|
+
// ██║ ╚██████╔╝██║ ╚████║██║ ██╗██║ ██║██║
|
|
7
|
+
// ╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═╝╚═╝
|
|
8
|
+
//
|
|
9
|
+
// AUTO-GENERATED — DO NOT EDIT
|
|
10
|
+
// Source: https://models.dev
|
|
11
|
+
// Update: pnpm --filter=@funkai/models generate:models
|
|
12
|
+
// ──────────────────────────────────────────────────────────────
|
|
13
|
+
|
|
14
|
+
import type { ModelDefinition } from "../types.js";
|
|
15
|
+
|
|
16
|
+
export const GROQ_MODELS = [
|
|
17
|
+
{
|
|
18
|
+
id: "llama3-70b-8192",
|
|
19
|
+
name: "Llama 3 70B",
|
|
20
|
+
provider: "groq",
|
|
21
|
+
family: "llama",
|
|
22
|
+
pricing: { input: 5.9e-7, output: 7.9e-7 },
|
|
23
|
+
contextWindow: 8192,
|
|
24
|
+
maxOutput: 8192,
|
|
25
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
26
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
id: "qwen-qwq-32b",
|
|
30
|
+
name: "Qwen QwQ 32B",
|
|
31
|
+
provider: "groq",
|
|
32
|
+
family: "qwen",
|
|
33
|
+
pricing: { input: 2.9e-7, output: 3.9e-7 },
|
|
34
|
+
contextWindow: 131072,
|
|
35
|
+
maxOutput: 16384,
|
|
36
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
37
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
id: "llama-3.1-8b-instant",
|
|
41
|
+
name: "Llama 3.1 8B Instant",
|
|
42
|
+
provider: "groq",
|
|
43
|
+
family: "llama",
|
|
44
|
+
pricing: { input: 5e-8, output: 8e-8 },
|
|
45
|
+
contextWindow: 131072,
|
|
46
|
+
maxOutput: 131072,
|
|
47
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
48
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
49
|
+
},
|
|
50
|
+
{
|
|
51
|
+
id: "llama-guard-3-8b",
|
|
52
|
+
name: "Llama Guard 3 8B",
|
|
53
|
+
provider: "groq",
|
|
54
|
+
family: "llama",
|
|
55
|
+
pricing: { input: 2e-7, output: 2e-7 },
|
|
56
|
+
contextWindow: 8192,
|
|
57
|
+
maxOutput: 8192,
|
|
58
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
59
|
+
capabilities: { reasoning: false, toolCall: false, attachment: false, structuredOutput: false },
|
|
60
|
+
},
|
|
61
|
+
{
|
|
62
|
+
id: "deepseek-r1-distill-llama-70b",
|
|
63
|
+
name: "DeepSeek R1 Distill Llama 70B",
|
|
64
|
+
provider: "groq",
|
|
65
|
+
family: "deepseek-thinking",
|
|
66
|
+
pricing: { input: 7.5e-7, output: 9.9e-7 },
|
|
67
|
+
contextWindow: 131072,
|
|
68
|
+
maxOutput: 8192,
|
|
69
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
70
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
71
|
+
},
|
|
72
|
+
{
|
|
73
|
+
id: "llama3-8b-8192",
|
|
74
|
+
name: "Llama 3 8B",
|
|
75
|
+
provider: "groq",
|
|
76
|
+
family: "llama",
|
|
77
|
+
pricing: { input: 5e-8, output: 8e-8 },
|
|
78
|
+
contextWindow: 8192,
|
|
79
|
+
maxOutput: 8192,
|
|
80
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
81
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
82
|
+
},
|
|
83
|
+
{
|
|
84
|
+
id: "mistral-saba-24b",
|
|
85
|
+
name: "Mistral Saba 24B",
|
|
86
|
+
provider: "groq",
|
|
87
|
+
family: "mistral",
|
|
88
|
+
pricing: { input: 7.9e-7, output: 7.9e-7 },
|
|
89
|
+
contextWindow: 32768,
|
|
90
|
+
maxOutput: 32768,
|
|
91
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
92
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
93
|
+
},
|
|
94
|
+
{
|
|
95
|
+
id: "llama-3.3-70b-versatile",
|
|
96
|
+
name: "Llama 3.3 70B Versatile",
|
|
97
|
+
provider: "groq",
|
|
98
|
+
family: "llama",
|
|
99
|
+
pricing: { input: 5.9e-7, output: 7.9e-7 },
|
|
100
|
+
contextWindow: 131072,
|
|
101
|
+
maxOutput: 32768,
|
|
102
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
103
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
104
|
+
},
|
|
105
|
+
{
|
|
106
|
+
id: "gemma2-9b-it",
|
|
107
|
+
name: "Gemma 2 9B",
|
|
108
|
+
provider: "groq",
|
|
109
|
+
family: "gemma",
|
|
110
|
+
pricing: { input: 2e-7, output: 2e-7 },
|
|
111
|
+
contextWindow: 8192,
|
|
112
|
+
maxOutput: 8192,
|
|
113
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
114
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
115
|
+
},
|
|
116
|
+
{
|
|
117
|
+
id: "moonshotai/kimi-k2-instruct",
|
|
118
|
+
name: "Kimi K2 Instruct",
|
|
119
|
+
provider: "groq",
|
|
120
|
+
family: "kimi",
|
|
121
|
+
pricing: { input: 0.000001, output: 0.000003 },
|
|
122
|
+
contextWindow: 131072,
|
|
123
|
+
maxOutput: 16384,
|
|
124
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
125
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
126
|
+
},
|
|
127
|
+
{
|
|
128
|
+
id: "moonshotai/kimi-k2-instruct-0905",
|
|
129
|
+
name: "Kimi K2 Instruct 0905",
|
|
130
|
+
provider: "groq",
|
|
131
|
+
family: "kimi",
|
|
132
|
+
pricing: { input: 0.000001, output: 0.000003 },
|
|
133
|
+
contextWindow: 262144,
|
|
134
|
+
maxOutput: 16384,
|
|
135
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
136
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: true },
|
|
137
|
+
},
|
|
138
|
+
{
|
|
139
|
+
id: "qwen/qwen3-32b",
|
|
140
|
+
name: "Qwen3 32B",
|
|
141
|
+
provider: "groq",
|
|
142
|
+
family: "qwen",
|
|
143
|
+
pricing: { input: 2.9e-7, output: 5.9e-7 },
|
|
144
|
+
contextWindow: 131072,
|
|
145
|
+
maxOutput: 16384,
|
|
146
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
147
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
148
|
+
},
|
|
149
|
+
{
|
|
150
|
+
id: "meta-llama/llama-4-scout-17b-16e-instruct",
|
|
151
|
+
name: "Llama 4 Scout 17B",
|
|
152
|
+
provider: "groq",
|
|
153
|
+
family: "llama",
|
|
154
|
+
pricing: { input: 1.1e-7, output: 3.4e-7 },
|
|
155
|
+
contextWindow: 131072,
|
|
156
|
+
maxOutput: 8192,
|
|
157
|
+
modalities: { input: ["text", "image"], output: ["text"] },
|
|
158
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: true },
|
|
159
|
+
},
|
|
160
|
+
{
|
|
161
|
+
id: "meta-llama/llama-guard-4-12b",
|
|
162
|
+
name: "Llama Guard 4 12B",
|
|
163
|
+
provider: "groq",
|
|
164
|
+
family: "llama",
|
|
165
|
+
pricing: { input: 2e-7, output: 2e-7 },
|
|
166
|
+
contextWindow: 131072,
|
|
167
|
+
maxOutput: 1024,
|
|
168
|
+
modalities: { input: ["text", "image"], output: ["text"] },
|
|
169
|
+
capabilities: { reasoning: false, toolCall: false, attachment: false, structuredOutput: false },
|
|
170
|
+
},
|
|
171
|
+
{
|
|
172
|
+
id: "meta-llama/llama-4-maverick-17b-128e-instruct",
|
|
173
|
+
name: "Llama 4 Maverick 17B",
|
|
174
|
+
provider: "groq",
|
|
175
|
+
family: "llama",
|
|
176
|
+
pricing: { input: 2e-7, output: 6e-7 },
|
|
177
|
+
contextWindow: 131072,
|
|
178
|
+
maxOutput: 8192,
|
|
179
|
+
modalities: { input: ["text", "image"], output: ["text"] },
|
|
180
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: true },
|
|
181
|
+
},
|
|
182
|
+
{
|
|
183
|
+
id: "openai/gpt-oss-120b",
|
|
184
|
+
name: "GPT OSS 120B",
|
|
185
|
+
provider: "groq",
|
|
186
|
+
family: "gpt-oss",
|
|
187
|
+
pricing: { input: 1.5e-7, output: 6e-7 },
|
|
188
|
+
contextWindow: 131072,
|
|
189
|
+
maxOutput: 65536,
|
|
190
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
191
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: true },
|
|
192
|
+
},
|
|
193
|
+
{
|
|
194
|
+
id: "openai/gpt-oss-20b",
|
|
195
|
+
name: "GPT OSS 20B",
|
|
196
|
+
provider: "groq",
|
|
197
|
+
family: "gpt-oss",
|
|
198
|
+
pricing: { input: 7.5e-8, output: 3e-7 },
|
|
199
|
+
contextWindow: 131072,
|
|
200
|
+
maxOutput: 65536,
|
|
201
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
202
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: true },
|
|
203
|
+
},
|
|
204
|
+
] as const satisfies readonly ModelDefinition[];
|
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
// ──────────────────────────────────────────────────────────────
|
|
2
|
+
// ███████╗██╗ ██╗███╗ ██╗██╗ ██╗ █████╗ ██╗
|
|
3
|
+
// ██╔════╝██║ ██║████╗ ██║██║ ██╔╝██╔══██╗██║
|
|
4
|
+
// █████╗ ██║ ██║██╔██╗ ██║█████╔╝ ███████║██║
|
|
5
|
+
// ██╔══╝ ██║ ██║██║╚██╗██║██╔═██╗ ██╔══██║██║
|
|
6
|
+
// ██║ ╚██████╔╝██║ ╚████║██║ ██╗██║ ██║██║
|
|
7
|
+
// ╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═╝╚═╝
|
|
8
|
+
//
|
|
9
|
+
// AUTO-GENERATED — DO NOT EDIT
|
|
10
|
+
// Source: https://models.dev
|
|
11
|
+
// Update: pnpm --filter=@funkai/models generate:models
|
|
12
|
+
// ──────────────────────────────────────────────────────────────
|
|
13
|
+
|
|
14
|
+
import type { ModelDefinition } from "../types.js";
|
|
15
|
+
|
|
16
|
+
export const HUGGINGFACE_MODELS = [
|
|
17
|
+
{
|
|
18
|
+
id: "zai-org/GLM-4.7-Flash",
|
|
19
|
+
name: "GLM-4.7-Flash",
|
|
20
|
+
provider: "huggingface",
|
|
21
|
+
family: "glm",
|
|
22
|
+
pricing: { input: 0, output: 0 },
|
|
23
|
+
contextWindow: 200000,
|
|
24
|
+
maxOutput: 128000,
|
|
25
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
26
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
id: "zai-org/GLM-4.7",
|
|
30
|
+
name: "GLM-4.7",
|
|
31
|
+
provider: "huggingface",
|
|
32
|
+
family: "glm",
|
|
33
|
+
pricing: { input: 6e-7, output: 0.0000022, cacheRead: 1.1e-7 },
|
|
34
|
+
contextWindow: 204800,
|
|
35
|
+
maxOutput: 131072,
|
|
36
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
37
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
id: "zai-org/GLM-5",
|
|
41
|
+
name: "GLM-5",
|
|
42
|
+
provider: "huggingface",
|
|
43
|
+
family: "glm",
|
|
44
|
+
pricing: { input: 0.000001, output: 0.0000032, cacheRead: 2e-7 },
|
|
45
|
+
contextWindow: 202752,
|
|
46
|
+
maxOutput: 131072,
|
|
47
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
48
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
49
|
+
},
|
|
50
|
+
{
|
|
51
|
+
id: "XiaomiMiMo/MiMo-V2-Flash",
|
|
52
|
+
name: "MiMo-V2-Flash",
|
|
53
|
+
provider: "huggingface",
|
|
54
|
+
family: "mimo",
|
|
55
|
+
pricing: { input: 1e-7, output: 3e-7 },
|
|
56
|
+
contextWindow: 262144,
|
|
57
|
+
maxOutput: 4096,
|
|
58
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
59
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
60
|
+
},
|
|
61
|
+
{
|
|
62
|
+
id: "MiniMaxAI/MiniMax-M2.5",
|
|
63
|
+
name: "MiniMax-M2.5",
|
|
64
|
+
provider: "huggingface",
|
|
65
|
+
family: "minimax",
|
|
66
|
+
pricing: { input: 3e-7, output: 0.0000012, cacheRead: 3e-8 },
|
|
67
|
+
contextWindow: 204800,
|
|
68
|
+
maxOutput: 131072,
|
|
69
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
70
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
71
|
+
},
|
|
72
|
+
{
|
|
73
|
+
id: "MiniMaxAI/MiniMax-M2.1",
|
|
74
|
+
name: "MiniMax-M2.1",
|
|
75
|
+
provider: "huggingface",
|
|
76
|
+
family: "minimax",
|
|
77
|
+
pricing: { input: 3e-7, output: 0.0000012 },
|
|
78
|
+
contextWindow: 204800,
|
|
79
|
+
maxOutput: 131072,
|
|
80
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
81
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
82
|
+
},
|
|
83
|
+
{
|
|
84
|
+
id: "deepseek-ai/DeepSeek-R1-0528",
|
|
85
|
+
name: "DeepSeek-R1-0528",
|
|
86
|
+
provider: "huggingface",
|
|
87
|
+
family: "deepseek-thinking",
|
|
88
|
+
pricing: { input: 0.000003, output: 0.000005 },
|
|
89
|
+
contextWindow: 163840,
|
|
90
|
+
maxOutput: 163840,
|
|
91
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
92
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
93
|
+
},
|
|
94
|
+
{
|
|
95
|
+
id: "deepseek-ai/DeepSeek-V3.2",
|
|
96
|
+
name: "DeepSeek-V3.2",
|
|
97
|
+
provider: "huggingface",
|
|
98
|
+
family: "deepseek",
|
|
99
|
+
pricing: { input: 2.8e-7, output: 4e-7 },
|
|
100
|
+
contextWindow: 163840,
|
|
101
|
+
maxOutput: 65536,
|
|
102
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
103
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
104
|
+
},
|
|
105
|
+
{
|
|
106
|
+
id: "moonshotai/Kimi-K2-Instruct",
|
|
107
|
+
name: "Kimi-K2-Instruct",
|
|
108
|
+
provider: "huggingface",
|
|
109
|
+
family: "kimi",
|
|
110
|
+
pricing: { input: 0.000001, output: 0.000003 },
|
|
111
|
+
contextWindow: 131072,
|
|
112
|
+
maxOutput: 16384,
|
|
113
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
114
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
115
|
+
},
|
|
116
|
+
{
|
|
117
|
+
id: "moonshotai/Kimi-K2-Instruct-0905",
|
|
118
|
+
name: "Kimi-K2-Instruct-0905",
|
|
119
|
+
provider: "huggingface",
|
|
120
|
+
family: "kimi",
|
|
121
|
+
pricing: { input: 0.000001, output: 0.000003 },
|
|
122
|
+
contextWindow: 262144,
|
|
123
|
+
maxOutput: 16384,
|
|
124
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
125
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
126
|
+
},
|
|
127
|
+
{
|
|
128
|
+
id: "moonshotai/Kimi-K2.5",
|
|
129
|
+
name: "Kimi-K2.5",
|
|
130
|
+
provider: "huggingface",
|
|
131
|
+
family: "kimi",
|
|
132
|
+
pricing: { input: 6e-7, output: 0.000003, cacheRead: 1e-7 },
|
|
133
|
+
contextWindow: 262144,
|
|
134
|
+
maxOutput: 262144,
|
|
135
|
+
modalities: { input: ["text", "image", "video"], output: ["text"] },
|
|
136
|
+
capabilities: { reasoning: true, toolCall: true, attachment: true, structuredOutput: false },
|
|
137
|
+
},
|
|
138
|
+
{
|
|
139
|
+
id: "moonshotai/Kimi-K2-Thinking",
|
|
140
|
+
name: "Kimi-K2-Thinking",
|
|
141
|
+
provider: "huggingface",
|
|
142
|
+
family: "kimi-thinking",
|
|
143
|
+
pricing: { input: 6e-7, output: 0.0000025, cacheRead: 1.5e-7 },
|
|
144
|
+
contextWindow: 262144,
|
|
145
|
+
maxOutput: 262144,
|
|
146
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
147
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
148
|
+
},
|
|
149
|
+
{
|
|
150
|
+
id: "Qwen/Qwen3-Next-80B-A3B-Instruct",
|
|
151
|
+
name: "Qwen3-Next-80B-A3B-Instruct",
|
|
152
|
+
provider: "huggingface",
|
|
153
|
+
family: "qwen",
|
|
154
|
+
pricing: { input: 2.5e-7, output: 0.000001 },
|
|
155
|
+
contextWindow: 262144,
|
|
156
|
+
maxOutput: 66536,
|
|
157
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
158
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
159
|
+
},
|
|
160
|
+
{
|
|
161
|
+
id: "Qwen/Qwen3.5-397B-A17B",
|
|
162
|
+
name: "Qwen3.5-397B-A17B",
|
|
163
|
+
provider: "huggingface",
|
|
164
|
+
family: "qwen",
|
|
165
|
+
pricing: { input: 6e-7, output: 0.0000036 },
|
|
166
|
+
contextWindow: 262144,
|
|
167
|
+
maxOutput: 32768,
|
|
168
|
+
modalities: { input: ["text", "image"], output: ["text"] },
|
|
169
|
+
capabilities: { reasoning: true, toolCall: true, attachment: true, structuredOutput: false },
|
|
170
|
+
},
|
|
171
|
+
{
|
|
172
|
+
id: "Qwen/Qwen3-235B-A22B-Thinking-2507",
|
|
173
|
+
name: "Qwen3-235B-A22B-Thinking-2507",
|
|
174
|
+
provider: "huggingface",
|
|
175
|
+
family: "qwen",
|
|
176
|
+
pricing: { input: 3e-7, output: 0.000003 },
|
|
177
|
+
contextWindow: 262144,
|
|
178
|
+
maxOutput: 131072,
|
|
179
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
180
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: false },
|
|
181
|
+
},
|
|
182
|
+
{
|
|
183
|
+
id: "Qwen/Qwen3-Coder-Next",
|
|
184
|
+
name: "Qwen3-Coder-Next",
|
|
185
|
+
provider: "huggingface",
|
|
186
|
+
family: "qwen",
|
|
187
|
+
pricing: { input: 2e-7, output: 0.0000015 },
|
|
188
|
+
contextWindow: 262144,
|
|
189
|
+
maxOutput: 65536,
|
|
190
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
191
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
192
|
+
},
|
|
193
|
+
{
|
|
194
|
+
id: "Qwen/Qwen3-Coder-480B-A35B-Instruct",
|
|
195
|
+
name: "Qwen3-Coder-480B-A35B-Instruct",
|
|
196
|
+
provider: "huggingface",
|
|
197
|
+
family: "qwen",
|
|
198
|
+
pricing: { input: 0.000002, output: 0.000002 },
|
|
199
|
+
contextWindow: 262144,
|
|
200
|
+
maxOutput: 66536,
|
|
201
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
202
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
203
|
+
},
|
|
204
|
+
{
|
|
205
|
+
id: "Qwen/Qwen3-Embedding-4B",
|
|
206
|
+
name: "Qwen 3 Embedding 4B",
|
|
207
|
+
provider: "huggingface",
|
|
208
|
+
family: "qwen",
|
|
209
|
+
pricing: { input: 1e-8, output: 0 },
|
|
210
|
+
contextWindow: 32000,
|
|
211
|
+
maxOutput: 2048,
|
|
212
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
213
|
+
capabilities: { reasoning: false, toolCall: false, attachment: false, structuredOutput: false },
|
|
214
|
+
},
|
|
215
|
+
{
|
|
216
|
+
id: "Qwen/Qwen3-Embedding-8B",
|
|
217
|
+
name: "Qwen 3 Embedding 8B",
|
|
218
|
+
provider: "huggingface",
|
|
219
|
+
family: "qwen",
|
|
220
|
+
pricing: { input: 1e-8, output: 0 },
|
|
221
|
+
contextWindow: 32000,
|
|
222
|
+
maxOutput: 4096,
|
|
223
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
224
|
+
capabilities: { reasoning: false, toolCall: false, attachment: false, structuredOutput: false },
|
|
225
|
+
},
|
|
226
|
+
{
|
|
227
|
+
id: "Qwen/Qwen3-Next-80B-A3B-Thinking",
|
|
228
|
+
name: "Qwen3-Next-80B-A3B-Thinking",
|
|
229
|
+
provider: "huggingface",
|
|
230
|
+
family: "qwen",
|
|
231
|
+
pricing: { input: 3e-7, output: 0.000002 },
|
|
232
|
+
contextWindow: 262144,
|
|
233
|
+
maxOutput: 131072,
|
|
234
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
235
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
236
|
+
},
|
|
237
|
+
] as const satisfies readonly ModelDefinition[];
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
// ──────────────────────────────────────────────────────────────
|
|
2
|
+
// ███████╗██╗ ██╗███╗ ██╗██╗ ██╗ █████╗ ██╗
|
|
3
|
+
// ██╔════╝██║ ██║████╗ ██║██║ ██╔╝██╔══██╗██║
|
|
4
|
+
// █████╗ ██║ ██║██╔██╗ ██║█████╔╝ ███████║██║
|
|
5
|
+
// ██╔══╝ ██║ ██║██║╚██╗██║██╔═██╗ ██╔══██║██║
|
|
6
|
+
// ██║ ╚██████╔╝██║ ╚████║██║ ██╗██║ ██║██║
|
|
7
|
+
// ╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═╝╚═╝
|
|
8
|
+
//
|
|
9
|
+
// AUTO-GENERATED — DO NOT EDIT
|
|
10
|
+
// Source: https://models.dev
|
|
11
|
+
// Update: pnpm --filter=@funkai/models generate:models
|
|
12
|
+
// ──────────────────────────────────────────────────────────────
|
|
13
|
+
|
|
14
|
+
import type { ModelDefinition } from "../types.js";
|
|
15
|
+
|
|
16
|
+
export const INCEPTION_MODELS = [
|
|
17
|
+
{
|
|
18
|
+
id: "mercury-2",
|
|
19
|
+
name: "Mercury 2",
|
|
20
|
+
provider: "inception",
|
|
21
|
+
family: "mercury",
|
|
22
|
+
pricing: { input: 2.5e-7, output: 7.5e-7, cacheRead: 2.5e-8 },
|
|
23
|
+
contextWindow: 128000,
|
|
24
|
+
maxOutput: 50000,
|
|
25
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
26
|
+
capabilities: { reasoning: true, toolCall: true, attachment: false, structuredOutput: true },
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
id: "mercury",
|
|
30
|
+
name: "Mercury",
|
|
31
|
+
provider: "inception",
|
|
32
|
+
family: "mercury",
|
|
33
|
+
pricing: { input: 2.5e-7, output: 0.000001, cacheRead: 2.5e-7, cacheWrite: 0.000001 },
|
|
34
|
+
contextWindow: 128000,
|
|
35
|
+
maxOutput: 16384,
|
|
36
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
37
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
id: "mercury-edit",
|
|
41
|
+
name: "Mercury Edit",
|
|
42
|
+
provider: "inception",
|
|
43
|
+
family: "",
|
|
44
|
+
pricing: { input: 2.5e-7, output: 7.5e-7, cacheRead: 2.5e-8 },
|
|
45
|
+
contextWindow: 128000,
|
|
46
|
+
maxOutput: 8192,
|
|
47
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
48
|
+
capabilities: { reasoning: true, toolCall: false, attachment: false, structuredOutput: false },
|
|
49
|
+
},
|
|
50
|
+
{
|
|
51
|
+
id: "mercury-coder",
|
|
52
|
+
name: "Mercury Coder",
|
|
53
|
+
provider: "inception",
|
|
54
|
+
family: "mercury",
|
|
55
|
+
pricing: { input: 2.5e-7, output: 0.000001, cacheRead: 2.5e-7, cacheWrite: 0.000001 },
|
|
56
|
+
contextWindow: 128000,
|
|
57
|
+
maxOutput: 16384,
|
|
58
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
59
|
+
capabilities: { reasoning: false, toolCall: true, attachment: false, structuredOutput: false },
|
|
60
|
+
},
|
|
61
|
+
] as const satisfies readonly ModelDefinition[];
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
// ──────────────────────────────────────────────────────────────
|
|
2
|
+
// ███████╗██╗ ██╗███╗ ██╗██╗ ██╗ █████╗ ██╗
|
|
3
|
+
// ██╔════╝██║ ██║████╗ ██║██║ ██╔╝██╔══██╗██║
|
|
4
|
+
// █████╗ ██║ ██║██╔██╗ ██║█████╔╝ ███████║██║
|
|
5
|
+
// ██╔══╝ ██║ ██║██║╚██╗██║██╔═██╗ ██╔══██║██║
|
|
6
|
+
// ██║ ╚██████╔╝██║ ╚████║██║ ██╗██║ ██║██║
|
|
7
|
+
// ╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═╝╚═╝
|
|
8
|
+
//
|
|
9
|
+
// AUTO-GENERATED — DO NOT EDIT
|
|
10
|
+
// Source: https://models.dev
|
|
11
|
+
// Update: pnpm --filter=@funkai/models generate:models
|
|
12
|
+
// ──────────────────────────────────────────────────────────────
|
|
13
|
+
|
|
14
|
+
import type { ModelDefinition } from "../types.js";
|
|
15
|
+
import { ALIBABA_MODELS } from "./alibaba.js";
|
|
16
|
+
import { AMAZON_BEDROCK_MODELS } from "./amazon-bedrock.js";
|
|
17
|
+
import { ANTHROPIC_MODELS } from "./anthropic.js";
|
|
18
|
+
import { CEREBRAS_MODELS } from "./cerebras.js";
|
|
19
|
+
import { COHERE_MODELS } from "./cohere.js";
|
|
20
|
+
import { DEEPINFRA_MODELS } from "./deepinfra.js";
|
|
21
|
+
import { DEEPSEEK_MODELS } from "./deepseek.js";
|
|
22
|
+
import { FIREWORKS_AI_MODELS } from "./fireworks-ai.js";
|
|
23
|
+
import { GOOGLE_VERTEX_MODELS } from "./google-vertex.js";
|
|
24
|
+
import { GOOGLE_MODELS } from "./google.js";
|
|
25
|
+
import { GROQ_MODELS } from "./groq.js";
|
|
26
|
+
import { HUGGINGFACE_MODELS } from "./huggingface.js";
|
|
27
|
+
import { INCEPTION_MODELS } from "./inception.js";
|
|
28
|
+
import { LLAMA_MODELS } from "./llama.js";
|
|
29
|
+
import { MISTRAL_MODELS } from "./mistral.js";
|
|
30
|
+
import { NVIDIA_MODELS } from "./nvidia.js";
|
|
31
|
+
import { OPENAI_MODELS } from "./openai.js";
|
|
32
|
+
import { OPENROUTER_MODELS } from "./openrouter.js";
|
|
33
|
+
import { PERPLEXITY_MODELS } from "./perplexity.js";
|
|
34
|
+
import { TOGETHERAI_MODELS } from "./togetherai.js";
|
|
35
|
+
import { XAI_MODELS } from "./xai.js";
|
|
36
|
+
|
|
37
|
+
export const MODELS = [
|
|
38
|
+
...OPENAI_MODELS,
|
|
39
|
+
...ANTHROPIC_MODELS,
|
|
40
|
+
...GOOGLE_MODELS,
|
|
41
|
+
...GOOGLE_VERTEX_MODELS,
|
|
42
|
+
...MISTRAL_MODELS,
|
|
43
|
+
...AMAZON_BEDROCK_MODELS,
|
|
44
|
+
...GROQ_MODELS,
|
|
45
|
+
...DEEPSEEK_MODELS,
|
|
46
|
+
...XAI_MODELS,
|
|
47
|
+
...COHERE_MODELS,
|
|
48
|
+
...FIREWORKS_AI_MODELS,
|
|
49
|
+
...TOGETHERAI_MODELS,
|
|
50
|
+
...DEEPINFRA_MODELS,
|
|
51
|
+
...CEREBRAS_MODELS,
|
|
52
|
+
...PERPLEXITY_MODELS,
|
|
53
|
+
...OPENROUTER_MODELS,
|
|
54
|
+
...LLAMA_MODELS,
|
|
55
|
+
...ALIBABA_MODELS,
|
|
56
|
+
...NVIDIA_MODELS,
|
|
57
|
+
...HUGGINGFACE_MODELS,
|
|
58
|
+
...INCEPTION_MODELS,
|
|
59
|
+
] as const satisfies readonly ModelDefinition[];
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
// ──────────────────────────────────────────────────────────────
|
|
2
|
+
// ███████╗██╗ ██╗███╗ ██╗██╗ ██╗ █████╗ ██╗
|
|
3
|
+
// ██╔════╝██║ ██║████╗ ██║██║ ██╔╝██╔══██╗██║
|
|
4
|
+
// █████╗ ██║ ██║██╔██╗ ██║█████╔╝ ███████║██║
|
|
5
|
+
// ██╔══╝ ██║ ██║██║╚██╗██║██╔═██╗ ██╔══██║██║
|
|
6
|
+
// ██║ ╚██████╔╝██║ ╚████║██║ ██╗██║ ██║██║
|
|
7
|
+
// ╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═╝╚═╝
|
|
8
|
+
//
|
|
9
|
+
// AUTO-GENERATED — DO NOT EDIT
|
|
10
|
+
// Source: https://models.dev
|
|
11
|
+
// Update: pnpm --filter=@funkai/models generate:models
|
|
12
|
+
// ──────────────────────────────────────────────────────────────
|
|
13
|
+
|
|
14
|
+
import type { ModelDefinition } from "../types.js";
|
|
15
|
+
|
|
16
|
+
export const LLAMA_MODELS = [
|
|
17
|
+
{
|
|
18
|
+
id: "cerebras-llama-4-maverick-17b-128e-instruct",
|
|
19
|
+
name: "Cerebras-Llama-4-Maverick-17B-128E-Instruct",
|
|
20
|
+
provider: "llama",
|
|
21
|
+
family: "llama",
|
|
22
|
+
pricing: { input: 0, output: 0 },
|
|
23
|
+
contextWindow: 128000,
|
|
24
|
+
maxOutput: 4096,
|
|
25
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
26
|
+
capabilities: { reasoning: false, toolCall: true, attachment: true, structuredOutput: false },
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
id: "llama-4-scout-17b-16e-instruct-fp8",
|
|
30
|
+
name: "Llama-4-Scout-17B-16E-Instruct-FP8",
|
|
31
|
+
provider: "llama",
|
|
32
|
+
family: "llama",
|
|
33
|
+
pricing: { input: 0, output: 0 },
|
|
34
|
+
contextWindow: 128000,
|
|
35
|
+
maxOutput: 4096,
|
|
36
|
+
modalities: { input: ["text", "image"], output: ["text"] },
|
|
37
|
+
capabilities: { reasoning: false, toolCall: true, attachment: true, structuredOutput: false },
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
id: "llama-3.3-8b-instruct",
|
|
41
|
+
name: "Llama-3.3-8B-Instruct",
|
|
42
|
+
provider: "llama",
|
|
43
|
+
family: "llama",
|
|
44
|
+
pricing: { input: 0, output: 0 },
|
|
45
|
+
contextWindow: 128000,
|
|
46
|
+
maxOutput: 4096,
|
|
47
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
48
|
+
capabilities: { reasoning: false, toolCall: true, attachment: true, structuredOutput: false },
|
|
49
|
+
},
|
|
50
|
+
{
|
|
51
|
+
id: "groq-llama-4-maverick-17b-128e-instruct",
|
|
52
|
+
name: "Groq-Llama-4-Maverick-17B-128E-Instruct",
|
|
53
|
+
provider: "llama",
|
|
54
|
+
family: "llama",
|
|
55
|
+
pricing: { input: 0, output: 0 },
|
|
56
|
+
contextWindow: 128000,
|
|
57
|
+
maxOutput: 4096,
|
|
58
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
59
|
+
capabilities: { reasoning: false, toolCall: true, attachment: true, structuredOutput: false },
|
|
60
|
+
},
|
|
61
|
+
{
|
|
62
|
+
id: "llama-3.3-70b-instruct",
|
|
63
|
+
name: "Llama-3.3-70B-Instruct",
|
|
64
|
+
provider: "llama",
|
|
65
|
+
family: "llama",
|
|
66
|
+
pricing: { input: 0, output: 0 },
|
|
67
|
+
contextWindow: 128000,
|
|
68
|
+
maxOutput: 4096,
|
|
69
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
70
|
+
capabilities: { reasoning: false, toolCall: true, attachment: true, structuredOutput: false },
|
|
71
|
+
},
|
|
72
|
+
{
|
|
73
|
+
id: "cerebras-llama-4-scout-17b-16e-instruct",
|
|
74
|
+
name: "Cerebras-Llama-4-Scout-17B-16E-Instruct",
|
|
75
|
+
provider: "llama",
|
|
76
|
+
family: "llama",
|
|
77
|
+
pricing: { input: 0, output: 0 },
|
|
78
|
+
contextWindow: 128000,
|
|
79
|
+
maxOutput: 4096,
|
|
80
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
81
|
+
capabilities: { reasoning: false, toolCall: true, attachment: true, structuredOutput: false },
|
|
82
|
+
},
|
|
83
|
+
{
|
|
84
|
+
id: "llama-4-maverick-17b-128e-instruct-fp8",
|
|
85
|
+
name: "Llama-4-Maverick-17B-128E-Instruct-FP8",
|
|
86
|
+
provider: "llama",
|
|
87
|
+
family: "llama",
|
|
88
|
+
pricing: { input: 0, output: 0 },
|
|
89
|
+
contextWindow: 128000,
|
|
90
|
+
maxOutput: 4096,
|
|
91
|
+
modalities: { input: ["text", "image"], output: ["text"] },
|
|
92
|
+
capabilities: { reasoning: false, toolCall: true, attachment: true, structuredOutput: false },
|
|
93
|
+
},
|
|
94
|
+
] as const satisfies readonly ModelDefinition[];
|