cogeai 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +122 -0
- package/bin/publish.sh +27 -0
- package/bin/sync-models.js +60 -0
- package/coge.js +479 -0
- package/lib/bandit.js +198 -0
- package/lib/config.js +129 -0
- package/lib/default-config.json +422 -0
- package/lib/init-config.js +12 -0
- package/lib/model-classify.js +70 -0
- package/lib/stats.js +83 -0
- package/package.json +39 -0
- package/providers/cerebras.js +11 -0
- package/providers/cloudflare.js +41 -0
- package/providers/codestral.js +7 -0
- package/providers/cohere.js +60 -0
- package/providers/gemini.js +52 -0
- package/providers/github-models.js +29 -0
- package/providers/groq.js +11 -0
- package/providers/huggingface.js +14 -0
- package/providers/index.js +156 -0
- package/providers/mistral.js +12 -0
- package/providers/ollama.js +20 -0
- package/providers/openai-compatible.js +97 -0
- package/providers/openai.js +11 -0
- package/providers/openrouter.js +71 -0
- package/providers/vercel-ai.js +11 -0
|
@@ -0,0 +1,422 @@
|
|
|
1
|
+
{
|
|
2
|
+
"provider": "gemini",
|
|
3
|
+
"model": "gemini-2.5-flash",
|
|
4
|
+
"strategy": "auto",
|
|
5
|
+
"topModels": [
|
|
6
|
+
"gemini-2.5-flash",
|
|
7
|
+
"gemini-2.5-pro",
|
|
8
|
+
"gpt-4o-mini",
|
|
9
|
+
"gpt-4.1-mini",
|
|
10
|
+
"gpt-4o",
|
|
11
|
+
"gpt-4.1",
|
|
12
|
+
"mistral-small-latest",
|
|
13
|
+
"mistral-large-latest",
|
|
14
|
+
"codestral-latest",
|
|
15
|
+
"llama-3.3-70b-versatile",
|
|
16
|
+
"command-a-03-2025",
|
|
17
|
+
"devstral-small-latest"
|
|
18
|
+
],
|
|
19
|
+
"providers": {
|
|
20
|
+
"gemini": {
|
|
21
|
+
"default": "gemini-2.5-flash",
|
|
22
|
+
"available": [
|
|
23
|
+
"gemini-2.5-flash",
|
|
24
|
+
"gemini-2.5-pro",
|
|
25
|
+
"gemma-3-1b-it",
|
|
26
|
+
"nano-banana-pro-preview",
|
|
27
|
+
"gemini-2.0-flash",
|
|
28
|
+
"gemini-2.0-flash-001",
|
|
29
|
+
"gemini-2.0-flash-lite-001",
|
|
30
|
+
"gemini-2.0-flash-lite",
|
|
31
|
+
"gemma-3-4b-it",
|
|
32
|
+
"gemma-3-12b-it",
|
|
33
|
+
"gemma-3-27b-it",
|
|
34
|
+
"gemma-3n-e4b-it",
|
|
35
|
+
"gemma-3n-e2b-it",
|
|
36
|
+
"gemini-flash-latest",
|
|
37
|
+
"gemini-flash-lite-latest",
|
|
38
|
+
"gemini-pro-latest",
|
|
39
|
+
"gemini-2.5-flash-lite",
|
|
40
|
+
"gemini-2.5-flash-lite-preview-09-2025",
|
|
41
|
+
"gemini-3-pro-preview",
|
|
42
|
+
"gemini-3-flash-preview",
|
|
43
|
+
"gemini-3.1-pro-preview",
|
|
44
|
+
"gemini-3.1-pro-preview-customtools",
|
|
45
|
+
"gemini-robotics-er-1.5-preview",
|
|
46
|
+
"gemini-2.5-computer-use-preview-10-2025",
|
|
47
|
+
"deep-research-pro-preview-12-2025",
|
|
48
|
+
"gemini-2.5-flash-preview-tts",
|
|
49
|
+
"gemini-2.5-pro-preview-tts",
|
|
50
|
+
"gemini-2.5-flash-image",
|
|
51
|
+
"gemini-3-pro-image-preview",
|
|
52
|
+
"gemini-embedding-001",
|
|
53
|
+
"gemini-2.5-flash-native-audio-latest",
|
|
54
|
+
"gemini-2.5-flash-native-audio-preview-09-2025",
|
|
55
|
+
"gemini-2.5-flash-native-audio-preview-12-2025"
|
|
56
|
+
]
|
|
57
|
+
},
|
|
58
|
+
"openrouter": {
|
|
59
|
+
"default": "nvidia/nemotron-3-nano-30b-a3b:free",
|
|
60
|
+
"available": [
|
|
61
|
+
"nvidia/nemotron-3-nano-30b-a3b:free",
|
|
62
|
+
"google/gemini-2.5-flash:free",
|
|
63
|
+
"meta-llama/llama-3.3-70b-instruct:free"
|
|
64
|
+
]
|
|
65
|
+
},
|
|
66
|
+
"openai": {
|
|
67
|
+
"default": "gpt-4o-mini",
|
|
68
|
+
"available": [
|
|
69
|
+
"gpt-4o-mini",
|
|
70
|
+
"gpt-4o",
|
|
71
|
+
"gpt-4.1-mini",
|
|
72
|
+
"gpt-4.1"
|
|
73
|
+
]
|
|
74
|
+
},
|
|
75
|
+
"ollama": {
|
|
76
|
+
"default": "llama3.2",
|
|
77
|
+
"available": [
|
|
78
|
+
"llama3.2",
|
|
79
|
+
"llama3.1",
|
|
80
|
+
"mistral",
|
|
81
|
+
"codellama"
|
|
82
|
+
]
|
|
83
|
+
},
|
|
84
|
+
"cerebras": {
|
|
85
|
+
"default": "qwen-3-235b-a22b-instruct-2507",
|
|
86
|
+
"available": [
|
|
87
|
+
"llama3.1-8b",
|
|
88
|
+
"gpt-oss-120b",
|
|
89
|
+
"qwen-3-235b-a22b-instruct-2507",
|
|
90
|
+
"zai-glm-4.7"
|
|
91
|
+
]
|
|
92
|
+
},
|
|
93
|
+
"cloudflare": {
|
|
94
|
+
"default": "@cf/meta/llama-3.3-70b-instruct-fp8-fast",
|
|
95
|
+
"available": [
|
|
96
|
+
"@cf/google/gemma-2b-it-lora",
|
|
97
|
+
"@hf/nexusflow/starling-lm-7b-beta",
|
|
98
|
+
"@cf/meta/llama-3-8b-instruct",
|
|
99
|
+
"@cf/meta/llama-3.2-3b-instruct",
|
|
100
|
+
"@hf/thebloke/neural-chat-7b-v3-1-awq",
|
|
101
|
+
"@cf/meta/llama-guard-3-8b",
|
|
102
|
+
"@cf/meta/llama-2-7b-chat-fp16",
|
|
103
|
+
"@cf/mistral/mistral-7b-instruct-v0.1",
|
|
104
|
+
"@cf/mistral/mistral-7b-instruct-v0.2-lora",
|
|
105
|
+
"@hf/mistral/mistral-7b-instruct-v0.2",
|
|
106
|
+
"@cf/fblgit/una-cybertron-7b-v2-bf16",
|
|
107
|
+
"@cf/thebloke/discolm-german-7b-v1-awq",
|
|
108
|
+
"@cf/meta/llama-2-7b-chat-int8",
|
|
109
|
+
"@cf/meta/llama-3.1-8b-instruct-fp8",
|
|
110
|
+
"@hf/thebloke/mistral-7b-instruct-v0.1-awq",
|
|
111
|
+
"@cf/qwen/qwen1.5-7b-chat-awq",
|
|
112
|
+
"@cf/meta/llama-3.2-1b-instruct",
|
|
113
|
+
"@cf/meta-llama/llama-2-7b-chat-hf-lora",
|
|
114
|
+
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
|
|
115
|
+
"@hf/thebloke/openhermes-2.5-mistral-7b-awq",
|
|
116
|
+
"@cf/deepseek-ai/deepseek-math-7b-instruct",
|
|
117
|
+
"@cf/tiiuae/falcon-7b-instruct",
|
|
118
|
+
"@hf/nousresearch/hermes-2-pro-mistral-7b",
|
|
119
|
+
"@cf/meta/llama-3.1-8b-instruct-awq",
|
|
120
|
+
"@hf/thebloke/zephyr-7b-beta-awq",
|
|
121
|
+
"@cf/google/gemma-7b-it-lora",
|
|
122
|
+
"@cf/mistralai/mistral-small-3.1-24b-instruct",
|
|
123
|
+
"@cf/meta/llama-3-8b-instruct-awq",
|
|
124
|
+
"@cf/defog/sqlcoder-7b-2",
|
|
125
|
+
"@hf/google/gemma-7b-it",
|
|
126
|
+
"@cf/openai/gpt-oss-120b",
|
|
127
|
+
"@cf/qwen/qwen1.5-0.5b-chat",
|
|
128
|
+
"@cf/tinyllama/tinyllama-1.1b-chat-v1.0",
|
|
129
|
+
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
|
|
130
|
+
"@cf/zai-org/glm-4.7-flash",
|
|
131
|
+
"@hf/thebloke/llama-2-13b-chat-awq",
|
|
132
|
+
"@hf/thebloke/deepseek-coder-6.7b-base-awq",
|
|
133
|
+
"@cf/ibm-granite/granite-4.0-h-micro",
|
|
134
|
+
"@hf/thebloke/deepseek-coder-6.7b-instruct-awq",
|
|
135
|
+
"@cf/qwen/qwen2.5-coder-32b-instruct",
|
|
136
|
+
"@cf/aisingapore/gemma-sea-lion-v4-27b-it",
|
|
137
|
+
"@cf/qwen/qwen3-30b-a3b-fp8",
|
|
138
|
+
"@cf/qwen/qwen1.5-1.8b-chat",
|
|
139
|
+
"@cf/microsoft/phi-2",
|
|
140
|
+
"@cf/openai/gpt-oss-20b",
|
|
141
|
+
"@cf/qwen/qwen1.5-14b-chat-awq",
|
|
142
|
+
"@cf/openchat/openchat-3.5-0106",
|
|
143
|
+
"@cf/meta/llama-4-scout-17b-16e-instruct",
|
|
144
|
+
"@cf/google/gemma-3-12b-it",
|
|
145
|
+
"@cf/qwen/qwq-32b",
|
|
146
|
+
"@cf/meta/llama-3.2-11b-vision-instruct"
|
|
147
|
+
]
|
|
148
|
+
},
|
|
149
|
+
"cohere": {
|
|
150
|
+
"default": "command-a-03-2025",
|
|
151
|
+
"available": [
|
|
152
|
+
"command-a-03-2025",
|
|
153
|
+
"command-r-plus",
|
|
154
|
+
"command-r"
|
|
155
|
+
]
|
|
156
|
+
},
|
|
157
|
+
"github-models": {
|
|
158
|
+
"default": "openai/gpt-4o-mini",
|
|
159
|
+
"blacklist": [
|
|
160
|
+
"openai/gpt-5-mini"
|
|
161
|
+
],
|
|
162
|
+
"available": [
|
|
163
|
+
"openai/gpt-4.1-mini",
|
|
164
|
+
"openai/gpt-4.1-nano",
|
|
165
|
+
"openai/gpt-5-mini",
|
|
166
|
+
"openai/gpt-5-nano",
|
|
167
|
+
"openai/o1-mini",
|
|
168
|
+
"openai/o3-mini",
|
|
169
|
+
"openai/o4-mini",
|
|
170
|
+
"meta/meta-llama-3.1-8b-instruct",
|
|
171
|
+
"mistral-ai/ministral-3b",
|
|
172
|
+
"mistral-ai/mistral-small-2503",
|
|
173
|
+
"xai/grok-3-mini",
|
|
174
|
+
"microsoft/phi-4-mini-instruct",
|
|
175
|
+
"microsoft/phi-4-mini-reasoning",
|
|
176
|
+
"openai/gpt-4o-mini",
|
|
177
|
+
"openai/gpt-4.1",
|
|
178
|
+
"openai/gpt-5",
|
|
179
|
+
"openai/gpt-5-chat",
|
|
180
|
+
"openai/o1",
|
|
181
|
+
"openai/o1-preview",
|
|
182
|
+
"openai/o3",
|
|
183
|
+
"ai21-labs/ai21-jamba-1.5-large",
|
|
184
|
+
"cohere/cohere-command-a",
|
|
185
|
+
"cohere/cohere-command-r-08-2024",
|
|
186
|
+
"cohere/cohere-command-r-plus-08-2024",
|
|
187
|
+
"deepseek/deepseek-r1",
|
|
188
|
+
"deepseek/deepseek-r1-0528",
|
|
189
|
+
"deepseek/deepseek-v3-0324",
|
|
190
|
+
"meta/llama-3.3-70b-instruct",
|
|
191
|
+
"meta/llama-4-scout-17b-16e-instruct",
|
|
192
|
+
"meta/meta-llama-3.1-405b-instruct",
|
|
193
|
+
"mistral-ai/codestral-2501",
|
|
194
|
+
"xai/grok-3",
|
|
195
|
+
"microsoft/mai-ds-r1",
|
|
196
|
+
"microsoft/phi-4",
|
|
197
|
+
"microsoft/phi-4-reasoning",
|
|
198
|
+
"openai/gpt-4o",
|
|
199
|
+
"meta/llama-4-maverick-17b-128e-instruct-fp8",
|
|
200
|
+
"mistral-ai/mistral-medium-2505",
|
|
201
|
+
"microsoft/phi-4-multimodal-instruct",
|
|
202
|
+
"openai/text-embedding-3-large",
|
|
203
|
+
"openai/text-embedding-3-small",
|
|
204
|
+
"meta/llama-3.2-11b-vision-instruct",
|
|
205
|
+
"meta/llama-3.2-90b-vision-instruct"
|
|
206
|
+
]
|
|
207
|
+
},
|
|
208
|
+
"groq": {
|
|
209
|
+
"default": "llama-3.3-70b-versatile",
|
|
210
|
+
"available": [
|
|
211
|
+
"llama-3.3-70b-versatile",
|
|
212
|
+
"llama-3.1-8b-instant",
|
|
213
|
+
"mixtral-8x7b-32768"
|
|
214
|
+
]
|
|
215
|
+
},
|
|
216
|
+
"huggingface": {
|
|
217
|
+
"default": "meta-llama/Llama-3.1-8B-Instruct",
|
|
218
|
+
"available": [
|
|
219
|
+
"meta-llama/Llama-3.1-8B-Instruct",
|
|
220
|
+
"Qwen/Qwen3-8B",
|
|
221
|
+
"Qwen/Qwen2.5-7B-Instruct",
|
|
222
|
+
"Qwen/Qwen2.5-Coder-7B-Instruct",
|
|
223
|
+
"Qwen/Qwen3-VL-8B-Instruct",
|
|
224
|
+
"meta-llama/Llama-3.2-3B-Instruct",
|
|
225
|
+
"meta-llama/Meta-Llama-3-8B-Instruct",
|
|
226
|
+
"mistralai/Mistral-7B-Instruct-v0.2",
|
|
227
|
+
"meta-llama/Llama-3.2-1B-Instruct",
|
|
228
|
+
"nvidia/NVIDIA-Nemotron-Nano-9B-v2",
|
|
229
|
+
"Qwen/Qwen2.5-VL-7B-Instruct",
|
|
230
|
+
"allenai/Molmo2-8B",
|
|
231
|
+
"HuggingFaceTB/SmolLM3-3B",
|
|
232
|
+
"swiss-ai/Apertus-8B-Instruct-2509",
|
|
233
|
+
"allenai/Olmo-3-7B-Think",
|
|
234
|
+
"deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
|
|
235
|
+
"marin-community/marin-8b-instruct",
|
|
236
|
+
"NousResearch/Hermes-2-Pro-Llama-3-8B",
|
|
237
|
+
"Sao10K/L3-8B-Stheno-v3.2",
|
|
238
|
+
"allenai/Olmo-3-7B-Instruct",
|
|
239
|
+
"arcee-ai/Trinity-Mini",
|
|
240
|
+
"Sao10K/L3-8B-Lunaris-v1",
|
|
241
|
+
"Qwen/Qwen2.5-Coder-3B-Instruct",
|
|
242
|
+
"deepseek-ai/DeepSeek-R1-Distill-Llama-8B",
|
|
243
|
+
"Qwen/Qwen3.5-397B-A17B",
|
|
244
|
+
"zai-org/GLM-5",
|
|
245
|
+
"MiniMaxAI/MiniMax-M2.5",
|
|
246
|
+
"moonshotai/Kimi-K2.5",
|
|
247
|
+
"Qwen/Qwen3-Coder-Next",
|
|
248
|
+
"zai-org/GLM-4.7-Flash",
|
|
249
|
+
"openai/gpt-oss-120b",
|
|
250
|
+
"openai/gpt-oss-20b",
|
|
251
|
+
"deepseek-ai/DeepSeek-V3.2",
|
|
252
|
+
"google/gemma-3-27b-it",
|
|
253
|
+
"Qwen/Qwen3-4B-Instruct-2507",
|
|
254
|
+
"deepseek-ai/DeepSeek-R1",
|
|
255
|
+
"deepseek-ai/DeepSeek-V3.2-Exp",
|
|
256
|
+
"Qwen/Qwen3-Coder-Next-FP8",
|
|
257
|
+
"meta-llama/Llama-3.3-70B-Instruct",
|
|
258
|
+
"zai-org/GLM-4.7",
|
|
259
|
+
"Qwen/Qwen3-4B-Thinking-2507",
|
|
260
|
+
"moonshotai/Kimi-K2-Thinking",
|
|
261
|
+
"XiaomiMiMo/MiMo-V2-Flash",
|
|
262
|
+
"google/gemma-3n-E4B-it",
|
|
263
|
+
"Qwen/Qwen3-Next-80B-A3B-Instruct",
|
|
264
|
+
"deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
|
|
265
|
+
"Qwen/Qwen3-32B",
|
|
266
|
+
"Qwen/Qwen3-VL-30B-A3B-Instruct",
|
|
267
|
+
"Qwen/Qwen3-14B",
|
|
268
|
+
"Qwen/Qwen3-Coder-30B-A3B-Instruct",
|
|
269
|
+
"moonshotai/Kimi-K2-Instruct",
|
|
270
|
+
"MiniMaxAI/MiniMax-M2.1",
|
|
271
|
+
"meta-llama/Llama-3.1-70B-Instruct",
|
|
272
|
+
"moonshotai/Kimi-K2-Instruct-0905",
|
|
273
|
+
"Qwen/Qwen3-VL-32B-Instruct",
|
|
274
|
+
"CohereLabs/command-a-reasoning-08-2025",
|
|
275
|
+
"ServiceNow-AI/Apriel-1.6-15b-Thinker",
|
|
276
|
+
"Qwen/Qwen3-VL-235B-A22B-Thinking",
|
|
277
|
+
"meta-llama/Llama-4-Scout-17B-16E-Instruct",
|
|
278
|
+
"Qwen/Qwen2.5-Coder-32B-Instruct",
|
|
279
|
+
"Qwen/Qwen3-30B-A3B",
|
|
280
|
+
"openai/gpt-oss-safeguard-20b",
|
|
281
|
+
"Qwen/Qwen3-VL-30B-A3B-Thinking",
|
|
282
|
+
"Qwen/QwQ-32B",
|
|
283
|
+
"MiniMaxAI/MiniMax-M2",
|
|
284
|
+
"zai-org/GLM-4.6V-Flash",
|
|
285
|
+
"Qwen/Qwen2.5-VL-72B-Instruct",
|
|
286
|
+
"zai-org/AutoGLM-Phone-9B-Multilingual",
|
|
287
|
+
"meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
|
|
288
|
+
"Qwen/Qwen3-Next-80B-A3B-Thinking",
|
|
289
|
+
"zai-org/GLM-4.6V",
|
|
290
|
+
"deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
|
|
291
|
+
"deepseek-ai/DeepSeek-Prover-V2-671B",
|
|
292
|
+
"meta-llama/Llama-4-Maverick-17B-128E-Instruct",
|
|
293
|
+
"Qwen/Qwen3-235B-A22B-Instruct-2507",
|
|
294
|
+
"deepseek-ai/DeepSeek-V3.1-Terminus",
|
|
295
|
+
"deepseek-ai/DeepSeek-R1-0528",
|
|
296
|
+
"allenai/Olmo-3.1-32B-Think",
|
|
297
|
+
"CohereLabs/c4ai-command-r7b-12-2024",
|
|
298
|
+
"Qwen/Qwen3-235B-A22B-Thinking-2507",
|
|
299
|
+
"zai-org/GLM-4.5V",
|
|
300
|
+
"deepseek-ai/DeepSeek-V3",
|
|
301
|
+
"utter-project/EuroLLM-22B-Instruct-2512",
|
|
302
|
+
"CohereLabs/c4ai-command-a-03-2025",
|
|
303
|
+
"Qwen/Qwen3-VL-235B-A22B-Instruct",
|
|
304
|
+
"zai-org/GLM-4.5-Air-FP8",
|
|
305
|
+
"allenai/Olmo-3.1-32B-Instruct",
|
|
306
|
+
"CohereLabs/command-a-translate-08-2025",
|
|
307
|
+
"EssentialAI/rnj-1-instruct",
|
|
308
|
+
"meta-llama/Llama-Guard-4-12B",
|
|
309
|
+
"meta-llama/Meta-Llama-3-70B-Instruct",
|
|
310
|
+
"deepseek-ai/DeepSeek-V3.1",
|
|
311
|
+
"Qwen/Qwen2.5-72B-Instruct",
|
|
312
|
+
"zai-org/GLM-4-32B-0414",
|
|
313
|
+
"MiniMaxAI/MiniMax-M1-80k",
|
|
314
|
+
"aisingapore/Gemma-SEA-LION-v4-27B-IT",
|
|
315
|
+
"zai-org/GLM-4.5V-FP8",
|
|
316
|
+
"alpindale/WizardLM-2-8x22B",
|
|
317
|
+
"zai-org/GLM-4.5",
|
|
318
|
+
"swiss-ai/Apertus-70B-Instruct-2509",
|
|
319
|
+
"zai-org/GLM-4.6V-FP8",
|
|
320
|
+
"Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8",
|
|
321
|
+
"zai-org/GLM-4.5-Air",
|
|
322
|
+
"aisingapore/Qwen-SEA-LION-v4-32B-IT",
|
|
323
|
+
"zai-org/GLM-4.6",
|
|
324
|
+
"zai-org/GLM-4.6-FP8",
|
|
325
|
+
"deepcogito/cogito-671b-v2.1-FP8",
|
|
326
|
+
"Sao10K/L3-70B-Euryale-v2.1",
|
|
327
|
+
"zai-org/GLM-4.7-FP8",
|
|
328
|
+
"dicta-il/DictaLM-3.0-24B-Thinking",
|
|
329
|
+
"baidu/ERNIE-4.5-VL-424B-A47B-Base-PT",
|
|
330
|
+
"Qwen/Qwen3-Coder-480B-A35B-Instruct",
|
|
331
|
+
"deepcogito/cogito-671b-v2.1",
|
|
332
|
+
"baidu/ERNIE-4.5-300B-A47B-Base-PT",
|
|
333
|
+
"baidu/ERNIE-4.5-VL-28B-A3B-PT",
|
|
334
|
+
"Qwen/Qwen2.5-VL-32B-Instruct",
|
|
335
|
+
"baidu/ERNIE-4.5-21B-A3B-PT",
|
|
336
|
+
"tokyotech-llm/Llama-3.3-Swallow-70B-Instruct-v0.4",
|
|
337
|
+
"deepseek-ai/DeepSeek-V3-0324",
|
|
338
|
+
"CohereLabs/c4ai-command-r-08-2024",
|
|
339
|
+
"deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
|
|
340
|
+
"Qwen/Qwen3-235B-A22B",
|
|
341
|
+
"katanemo/Arch-Router-1.5B",
|
|
342
|
+
"CohereLabs/aya-expanse-32b",
|
|
343
|
+
"CohereLabs/c4ai-command-r7b-arabic-02-2025",
|
|
344
|
+
"CohereLabs/aya-vision-32b",
|
|
345
|
+
"CohereLabs/command-a-vision-07-2025"
|
|
346
|
+
]
|
|
347
|
+
},
|
|
348
|
+
"codestral": {
|
|
349
|
+
"default": "codestral-latest",
|
|
350
|
+
"available": [
|
|
351
|
+
"codestral-latest"
|
|
352
|
+
]
|
|
353
|
+
},
|
|
354
|
+
"mistral": {
|
|
355
|
+
"default": "mistral-small-latest",
|
|
356
|
+
"available": [
|
|
357
|
+
"codestral-latest",
|
|
358
|
+
"devstral-small-latest",
|
|
359
|
+
"mistral-small-latest",
|
|
360
|
+
"mistral-large-latest",
|
|
361
|
+
"mistral-tiny-2407",
|
|
362
|
+
"mistral-tiny-latest",
|
|
363
|
+
"devstral-small-2507",
|
|
364
|
+
"labs-devstral-small-2512",
|
|
365
|
+
"mistral-small-2506",
|
|
366
|
+
"labs-mistral-small-creative",
|
|
367
|
+
"magistral-small-2509",
|
|
368
|
+
"magistral-small-latest",
|
|
369
|
+
"voxtral-mini-2507",
|
|
370
|
+
"voxtral-mini-latest",
|
|
371
|
+
"voxtral-small-2507",
|
|
372
|
+
"voxtral-small-latest",
|
|
373
|
+
"ministral-3b-2512",
|
|
374
|
+
"ministral-3b-latest",
|
|
375
|
+
"ministral-8b-2512",
|
|
376
|
+
"ministral-8b-latest",
|
|
377
|
+
"mistral-small-2501",
|
|
378
|
+
"voxtral-mini-2602",
|
|
379
|
+
"mistral-medium-2505",
|
|
380
|
+
"mistral-medium-2508",
|
|
381
|
+
"mistral-medium-latest",
|
|
382
|
+
"mistral-medium",
|
|
383
|
+
"mistral-vibe-cli-with-tools",
|
|
384
|
+
"open-mistral-nemo",
|
|
385
|
+
"open-mistral-nemo-2407",
|
|
386
|
+
"mistral-large-2411",
|
|
387
|
+
"pixtral-large-2411",
|
|
388
|
+
"pixtral-large-latest",
|
|
389
|
+
"mistral-large-pixtral-2411",
|
|
390
|
+
"codestral-2508",
|
|
391
|
+
"devstral-medium-2507",
|
|
392
|
+
"devstral-2512",
|
|
393
|
+
"mistral-vibe-cli-latest",
|
|
394
|
+
"devstral-medium-latest",
|
|
395
|
+
"devstral-latest",
|
|
396
|
+
"magistral-medium-2509",
|
|
397
|
+
"magistral-medium-latest",
|
|
398
|
+
"mistral-large-2512",
|
|
399
|
+
"ministral-14b-2512",
|
|
400
|
+
"ministral-14b-latest",
|
|
401
|
+
"mistral-embed-2312",
|
|
402
|
+
"mistral-embed",
|
|
403
|
+
"codestral-embed",
|
|
404
|
+
"codestral-embed-2505",
|
|
405
|
+
"mistral-moderation-2411",
|
|
406
|
+
"mistral-moderation-latest",
|
|
407
|
+
"mistral-ocr-2512",
|
|
408
|
+
"mistral-ocr-latest",
|
|
409
|
+
"mistral-ocr-2505",
|
|
410
|
+
"mistral-ocr-2503",
|
|
411
|
+
"voxtral-mini-transcribe-2507"
|
|
412
|
+
]
|
|
413
|
+
},
|
|
414
|
+
"vercel-ai": {
|
|
415
|
+
"default": "gpt-4o-mini",
|
|
416
|
+
"available": [
|
|
417
|
+
"gpt-4o-mini",
|
|
418
|
+
"gpt-4o"
|
|
419
|
+
]
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { writeDefaultConfigIfMissing, getConfigPath } from "./config.js";
|
|
3
|
+
|
|
4
|
+
try {
|
|
5
|
+
const created = writeDefaultConfigIfMissing();
|
|
6
|
+
if (created) {
|
|
7
|
+
console.log("coge: created default config at", getConfigPath());
|
|
8
|
+
}
|
|
9
|
+
} catch (err) {
|
|
10
|
+
console.error("coge: failed to init config:", err.message);
|
|
11
|
+
process.exit(1);
|
|
12
|
+
}
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import { createRequire } from "module";
|
|
2
|
+
|
|
3
|
+
const require = createRequire(import.meta.url);
|
|
4
|
+
const defaultConfig = require("./default-config.json");
|
|
5
|
+
|
|
6
|
+
const IRRELEVANT_PATTERNS = /whisper|tts|dall-e|embed|moderation|audio|image|vision|realtime|transcri|ocr/i;
|
|
7
|
+
const SMALL_PATTERNS = /\bmini\b|\bsmall\b|\bnano\b|\btiny\b|\bfast\b|\binstant\b|[_-]8b\b|[_-]7b\b|[_-]3b\b|[_-]1b\b|[_-]2b\b/i;
|
|
8
|
+
|
|
9
|
+
const TOP_MODELS = new Set(defaultConfig.topModels ?? []);
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Classifies a model as "top", "small", "large", or "irrelevant".
|
|
13
|
+
* Top models are checked first (hardcoded list), then API metadata, then name heuristics.
|
|
14
|
+
* @param {string} id Model ID
|
|
15
|
+
* @param {{ type?: string; description?: string }} [metadata]
|
|
16
|
+
* @returns {"top" | "small" | "large" | "irrelevant"}
|
|
17
|
+
*/
|
|
18
|
+
export function classifyModel(id, metadata) {
|
|
19
|
+
// Top models — hardcoded curated list
|
|
20
|
+
if (TOP_MODELS.has(id)) return "top";
|
|
21
|
+
|
|
22
|
+
// Check metadata
|
|
23
|
+
if (metadata?.type) {
|
|
24
|
+
const t = metadata.type.toLowerCase();
|
|
25
|
+
if (t.includes("audio") || t.includes("image") || t.includes("embed") || t.includes("tts")) {
|
|
26
|
+
return "irrelevant";
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
if (metadata?.description) {
|
|
30
|
+
if (IRRELEVANT_PATTERNS.test(metadata.description)) return "irrelevant";
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// Name heuristics
|
|
34
|
+
if (IRRELEVANT_PATTERNS.test(id)) return "irrelevant";
|
|
35
|
+
if (SMALL_PATTERNS.test(id)) return "small";
|
|
36
|
+
return "large";
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Normalizes an available entry to { id, category } object.
|
|
41
|
+
* Handles both plain strings (backward compat) and objects.
|
|
42
|
+
* @param {string | { id: string; category?: string }} entry
|
|
43
|
+
* @returns {{ id: string; category: string }}
|
|
44
|
+
*/
|
|
45
|
+
export function normalizeAvailableEntry(entry) {
|
|
46
|
+
if (typeof entry === "string") return { id: entry, category: classifyModel(entry) };
|
|
47
|
+
return { id: entry.id, category: entry.category ?? classifyModel(entry.id) };
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Checks whether a model ID is in the provider's blacklist.
|
|
52
|
+
* @param {string} modelId
|
|
53
|
+
* @param {string[]} [blacklist]
|
|
54
|
+
* @returns {boolean}
|
|
55
|
+
*/
|
|
56
|
+
export function isBlacklisted(modelId, blacklist) {
|
|
57
|
+
return Array.isArray(blacklist) && blacklist.includes(modelId);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const CATEGORY_ORDER = { top: 0, small: 1, large: 2, irrelevant: 3, blacklist: 4 };
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Sorts an array of { id, category } entries by category order: top → small → large → irrelevant.
|
|
64
|
+
* Mutates and returns the array.
|
|
65
|
+
* @param {{ id: string; category: string }[]} entries
|
|
66
|
+
* @returns {{ id: string; category: string }[]}
|
|
67
|
+
*/
|
|
68
|
+
export function sortByCategory(entries) {
|
|
69
|
+
return entries.sort((a, b) => (CATEGORY_ORDER[a.category] ?? 2) - (CATEGORY_ORDER[b.category] ?? 2));
|
|
70
|
+
}
|
package/lib/stats.js
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import { getConfigDir } from "./config.js";
|
|
4
|
+
|
|
5
|
+
const STATS_FILENAME = "stats.json";
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Returns path to stats.json in the config directory.
|
|
9
|
+
* @returns {string}
|
|
10
|
+
*/
|
|
11
|
+
function getStatsPath() {
|
|
12
|
+
return path.join(getConfigDir(), STATS_FILENAME);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Loads usage stats from stats.json. Returns {} if file missing.
|
|
17
|
+
* @returns {Record<string, {execute: number, copy: number, cancel: number, last_used: string}>}
|
|
18
|
+
*/
|
|
19
|
+
export function loadStats() {
|
|
20
|
+
try {
|
|
21
|
+
const raw = fs.readFileSync(getStatsPath(), "utf8");
|
|
22
|
+
return JSON.parse(raw);
|
|
23
|
+
} catch (err) {
|
|
24
|
+
if (err.code === "ENOENT") return {};
|
|
25
|
+
throw err;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Saves usage stats to stats.json.
|
|
31
|
+
* @param {Record<string, {execute: number, copy: number, cancel: number, last_used: string}>} stats
|
|
32
|
+
*/
|
|
33
|
+
export function saveStats(stats) {
|
|
34
|
+
const dir = getConfigDir();
|
|
35
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
36
|
+
fs.writeFileSync(getStatsPath(), JSON.stringify(stats, null, 2) + "\n", "utf8");
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Records a user action (execute/copy/cancel) for a provider:model arm.
|
|
41
|
+
* @param {string} armKey - e.g. "gemini:gemini-2.5-flash"
|
|
42
|
+
* @param {"execute"|"copy"|"cancel"} action
|
|
43
|
+
*/
|
|
44
|
+
export function recordAction(armKey, action) {
|
|
45
|
+
const stats = loadStats();
|
|
46
|
+
if (!stats[armKey]) {
|
|
47
|
+
stats[armKey] = { execute: 0, copy: 0, cancel: 0 };
|
|
48
|
+
}
|
|
49
|
+
stats[armKey][action] += 1;
|
|
50
|
+
stats[armKey].last_used = new Date().toISOString();
|
|
51
|
+
saveStats(stats);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Formats stats as a table string for --stats output.
|
|
56
|
+
* @param {Record<string, {execute: number, copy: number, cancel: number}>} stats
|
|
57
|
+
* @returns {string}
|
|
58
|
+
*/
|
|
59
|
+
export function formatStats(stats) {
|
|
60
|
+
const keys = Object.keys(stats);
|
|
61
|
+
if (keys.length === 0) {
|
|
62
|
+
return "No usage stats recorded yet.";
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const header = "Provider/Model Exec Copy Cancel Total Accept%";
|
|
66
|
+
const separator = "-".repeat(header.length);
|
|
67
|
+
const lines = [header, separator];
|
|
68
|
+
|
|
69
|
+
for (const key of keys) {
|
|
70
|
+
const s = stats[key];
|
|
71
|
+
const total = s.execute + s.copy + s.cancel;
|
|
72
|
+
const acceptPct = total > 0 ? Math.round(((s.execute + s.copy) / total) * 100) : 0;
|
|
73
|
+
const line = key.padEnd(36)
|
|
74
|
+
+ String(s.execute).padStart(4) + " "
|
|
75
|
+
+ String(s.copy).padStart(4) + " "
|
|
76
|
+
+ String(s.cancel).padStart(6) + " "
|
|
77
|
+
+ String(total).padStart(5) + " "
|
|
78
|
+
+ String(acceptPct).padStart(6) + "%";
|
|
79
|
+
lines.push(line);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
return lines.join("\n");
|
|
83
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "cogeai",
|
|
3
|
+
"type": "module",
|
|
4
|
+
"version": "1.0.0",
|
|
5
|
+
"description": "AI-powered command generator — describe what you want, get a shell command back",
|
|
6
|
+
"main": "coge.js",
|
|
7
|
+
"bin": {
|
|
8
|
+
"coge": "coge.js"
|
|
9
|
+
},
|
|
10
|
+
"files": [
|
|
11
|
+
"coge.js",
|
|
12
|
+
"providers/",
|
|
13
|
+
"lib/",
|
|
14
|
+
"bin/",
|
|
15
|
+
"README.md"
|
|
16
|
+
],
|
|
17
|
+
"scripts": {
|
|
18
|
+
"test": "node --test test/*.test.js",
|
|
19
|
+
"sync-models": "node bin/sync-models.js",
|
|
20
|
+
"postinstall": "node lib/init-config.js"
|
|
21
|
+
},
|
|
22
|
+
"keywords": [
|
|
23
|
+
"cli",
|
|
24
|
+
"ai",
|
|
25
|
+
"llm",
|
|
26
|
+
"command-generator",
|
|
27
|
+
"shell"
|
|
28
|
+
],
|
|
29
|
+
"author": "",
|
|
30
|
+
"license": "ISC",
|
|
31
|
+
"repository": {
|
|
32
|
+
"type": "git",
|
|
33
|
+
"url": "https://github.com/vovanmozg/coge.git"
|
|
34
|
+
},
|
|
35
|
+
"dependencies": {
|
|
36
|
+
"@google/genai": "^1.7.0",
|
|
37
|
+
"clipboardy": "^4.0.0"
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { makeOpenAICompatible, fetchOpenAIModelIds } from "./openai-compatible.js";
|
|
2
|
+
|
|
3
|
+
export async function fetchModels() {
|
|
4
|
+
return fetchOpenAIModelIds("https://api.cerebras.ai/v1/models", process.env.COGE_CEREBRAS_API_KEY);
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
export const createCerebrasProvider = makeOpenAICompatible(
|
|
8
|
+
"cerebras",
|
|
9
|
+
"https://api.cerebras.ai/v1/chat/completions",
|
|
10
|
+
"COGE_CEREBRAS_API_KEY",
|
|
11
|
+
);
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { createOpenAICompatibleProvider } from "./openai-compatible.js";
|
|
2
|
+
|
|
3
|
+
export async function fetchModels() {
|
|
4
|
+
const accountId = process.env.COGE_CLOUDFLARE_ACCOUNT_ID;
|
|
5
|
+
const apiKey = process.env.COGE_CLOUDFLARE_API_KEY;
|
|
6
|
+
if (!apiKey) throw new Error("COGE_CLOUDFLARE_API_KEY not set.");
|
|
7
|
+
if (!accountId) throw new Error("COGE_CLOUDFLARE_ACCOUNT_ID not set.");
|
|
8
|
+
|
|
9
|
+
const { classifyModel } = await import("../lib/model-classify.js");
|
|
10
|
+
const url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/models/search?task=Text Generation&per_page=200`;
|
|
11
|
+
const res = await fetch(url, { headers: { Authorization: `Bearer ${apiKey}` } });
|
|
12
|
+
if (!res.ok) throw new Error(`Cloudflare API ${res.status}: ${res.statusText}`);
|
|
13
|
+
const data = await res.json();
|
|
14
|
+
if (!data.success) throw new Error("Cloudflare API error: " + JSON.stringify(data.errors));
|
|
15
|
+
return data.result.map((m) => ({ id: m.name, category: classifyModel(m.name) }));
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* @param {string} apiKey
|
|
20
|
+
* @param {string} [model]
|
|
21
|
+
* @returns {import("./index.js").Provider}
|
|
22
|
+
*/
|
|
23
|
+
export function createCloudflareProvider(apiKey, model) {
|
|
24
|
+
const accountId = process.env.COGE_CLOUDFLARE_ACCOUNT_ID;
|
|
25
|
+
if (!apiKey) {
|
|
26
|
+
throw new Error("COGE_CLOUDFLARE_API_KEY not set.");
|
|
27
|
+
}
|
|
28
|
+
if (!accountId) {
|
|
29
|
+
throw new Error("COGE_CLOUDFLARE_ACCOUNT_ID not set.");
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/v1/chat/completions`;
|
|
33
|
+
const provider = createOpenAICompatibleProvider({
|
|
34
|
+
name: "cloudflare",
|
|
35
|
+
url,
|
|
36
|
+
envKey: "COGE_CLOUDFLARE_API_KEY",
|
|
37
|
+
apiKey,
|
|
38
|
+
});
|
|
39
|
+
provider._model = model;
|
|
40
|
+
return provider;
|
|
41
|
+
}
|