@a-company/paradigm 5.9.0 → 5.9.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{accept-orchestration-GX2YRWM4.js → accept-orchestration-UQLM7PTQ.js} +4 -4
- package/dist/chunk-5OUOLN6M.js +659 -0
- package/dist/{chunk-EI32ZBE6.js → chunk-RTHA3XRE.js} +19 -672
- package/dist/{chunk-WQITYKHM.js → chunk-YRZ5RPEB.js} +7 -7
- package/dist/{diff-RQLLNAFI.js → diff-D4X53HAC.js} +4 -4
- package/dist/{docs-AIY6VNF7.js → docs-QIYKO3BR.js} +1 -1
- package/dist/index.js +19 -19
- package/dist/mcp.js +48 -12
- package/dist/model-discovery-D2H3VBGC.js +8 -0
- package/dist/{orchestrate-XZA33TJC.js → orchestrate-JLILBBJE.js} +4 -4
- package/dist/{serve-QWWJP2EW.js → serve-CAH3PHE7.js} +1 -1
- package/dist/{shift-VJUGMADR.js → shift-D2JOHHBF.js} +33 -5
- package/dist/{spawn-AW6GDECS.js → spawn-RCHNXDHE.js} +4 -4
- package/dist/{team-7HG7XK5C.js → team-O5MIIFMA.js} +6 -5
- package/package.json +1 -1
- package/dist/{chunk-LSRABQIY.js → chunk-45MUDW6E.js} +3 -3
- /package/dist/{platform-server-U5L2G3EU.js → platform-server-H5YO3DQD.js} +0 -0
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import {
|
|
3
3
|
BackgroundOrchestrator
|
|
4
|
-
} from "./chunk-
|
|
5
|
-
import "./chunk-LSRABQIY.js";
|
|
6
|
-
import "./chunk-PBHIFAL4.js";
|
|
7
|
-
import "./chunk-TXESEO7Y.js";
|
|
4
|
+
} from "./chunk-YRZ5RPEB.js";
|
|
8
5
|
import "./chunk-6QC3YGB6.js";
|
|
6
|
+
import "./chunk-45MUDW6E.js";
|
|
9
7
|
import "./chunk-PMXRGPRQ.js";
|
|
8
|
+
import "./chunk-PBHIFAL4.js";
|
|
9
|
+
import "./chunk-TXESEO7Y.js";
|
|
10
10
|
import "./chunk-5JGJACDU.js";
|
|
11
11
|
import "./chunk-ZGUAAVMA.js";
|
|
12
12
|
import "./chunk-EDOAWN7J.js";
|
|
@@ -0,0 +1,659 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/core/model-discovery.ts
|
|
4
|
+
import { exec } from "child_process";
|
|
5
|
+
import { promisify } from "util";
|
|
6
|
+
import * as fs from "fs";
|
|
7
|
+
import * as path from "path";
|
|
8
|
+
var execAsync = promisify(exec);
|
|
9
|
+
var MANIFEST_URL = "https://raw.githubusercontent.com/ascend42/a-paradigm/main/models.json";
|
|
10
|
+
var MANIFEST_CACHE_TTL = 7 * 24 * 60 * 60 * 1e3;
|
|
11
|
+
var ModelDiscovery = class {
|
|
12
|
+
cacheFile;
|
|
13
|
+
manifestCacheFile;
|
|
14
|
+
cacheTTL = 24 * 60 * 60 * 1e3;
|
|
15
|
+
// 24 hours
|
|
16
|
+
constructor(_rootDir) {
|
|
17
|
+
this.cacheFile = path.join(_rootDir, ".paradigm", "model-cache.json");
|
|
18
|
+
this.manifestCacheFile = path.join(_rootDir, ".paradigm", "model-manifest-cache.json");
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Fetch the remote model manifest (cached for 7 days).
|
|
22
|
+
* Returns null on any failure — callers fall back to hardcoded presets.
|
|
23
|
+
*/
|
|
24
|
+
async fetchManifest() {
|
|
25
|
+
try {
|
|
26
|
+
if (fs.existsSync(this.manifestCacheFile)) {
|
|
27
|
+
const raw = fs.readFileSync(this.manifestCacheFile, "utf8");
|
|
28
|
+
const cached = JSON.parse(raw);
|
|
29
|
+
const age = Date.now() - new Date(cached._fetchedAt).getTime();
|
|
30
|
+
if (age < MANIFEST_CACHE_TTL) {
|
|
31
|
+
return cached;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
} catch {
|
|
35
|
+
}
|
|
36
|
+
try {
|
|
37
|
+
const controller = new AbortController();
|
|
38
|
+
const timeout = setTimeout(() => controller.abort(), 5e3);
|
|
39
|
+
const response = await fetch(MANIFEST_URL, { signal: controller.signal });
|
|
40
|
+
clearTimeout(timeout);
|
|
41
|
+
if (!response.ok) return null;
|
|
42
|
+
const manifest = await response.json();
|
|
43
|
+
try {
|
|
44
|
+
const dir = path.dirname(this.manifestCacheFile);
|
|
45
|
+
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
|
46
|
+
fs.writeFileSync(this.manifestCacheFile, JSON.stringify({ ...manifest, _fetchedAt: (/* @__PURE__ */ new Date()).toISOString() }, null, 2));
|
|
47
|
+
} catch {
|
|
48
|
+
}
|
|
49
|
+
return manifest;
|
|
50
|
+
} catch {
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Get models for a provider from the remote manifest.
|
|
56
|
+
* Returns null if manifest unavailable or provider not found.
|
|
57
|
+
*/
|
|
58
|
+
async getManifestModels(provider) {
|
|
59
|
+
const manifest = await this.fetchManifest();
|
|
60
|
+
if (!manifest?.providers?.[provider]) return null;
|
|
61
|
+
return manifest.providers[provider];
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Get environment-specific models from the remote manifest.
|
|
65
|
+
* Returns null if manifest unavailable or environment not found.
|
|
66
|
+
*/
|
|
67
|
+
async getManifestEnvironment(env) {
|
|
68
|
+
const manifest = await this.fetchManifest();
|
|
69
|
+
if (!manifest?.environments?.[env]) return null;
|
|
70
|
+
const envConfig = manifest.environments[env];
|
|
71
|
+
if (envConfig.models) {
|
|
72
|
+
return envConfig.models;
|
|
73
|
+
}
|
|
74
|
+
if (envConfig.include) {
|
|
75
|
+
const models = [];
|
|
76
|
+
for (const providerName of envConfig.include) {
|
|
77
|
+
const providerModels = manifest.providers?.[providerName];
|
|
78
|
+
if (providerModels) {
|
|
79
|
+
models.push(...providerModels);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
return models.length > 0 ? models : null;
|
|
83
|
+
}
|
|
84
|
+
return null;
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Discover available models
|
|
88
|
+
*/
|
|
89
|
+
async discover() {
|
|
90
|
+
const cached = this.loadCache();
|
|
91
|
+
if (cached) return cached;
|
|
92
|
+
const env = this.detectEnvironment();
|
|
93
|
+
let result;
|
|
94
|
+
switch (env) {
|
|
95
|
+
case "cursor":
|
|
96
|
+
result = await this.discoverCursorModels();
|
|
97
|
+
break;
|
|
98
|
+
case "claude-code":
|
|
99
|
+
result = this.getClaudeCodeModels();
|
|
100
|
+
break;
|
|
101
|
+
case "vscode":
|
|
102
|
+
result = await this.getVSCodeModels();
|
|
103
|
+
break;
|
|
104
|
+
case "multi-provider":
|
|
105
|
+
result = await this.discoverMultiProviderModels();
|
|
106
|
+
break;
|
|
107
|
+
default:
|
|
108
|
+
result = this.getFallbackModels();
|
|
109
|
+
}
|
|
110
|
+
this.saveCache(result);
|
|
111
|
+
return result;
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Detect the current environment
|
|
115
|
+
*/
|
|
116
|
+
detectEnvironment() {
|
|
117
|
+
if (process.env.CLAUDE_CODE === "1" || process.env.TERM_PROGRAM === "claude") {
|
|
118
|
+
return "claude-code";
|
|
119
|
+
}
|
|
120
|
+
if (process.env.TERM_PROGRAM === "cursor" || process.env.CURSOR_SESSION || process.env.CURSOR_TRACE_ID || // Cursor sets VSCODE_* vars but with cursor in the path
|
|
121
|
+
process.env.VSCODE_CWD && process.env.VSCODE_CWD.toLowerCase().includes("cursor") || process.env.VSCODE_NLS_CONFIG && process.env.VSCODE_NLS_CONFIG.toLowerCase().includes("cursor") || // Check if running in Cursor's integrated terminal
|
|
122
|
+
process.env.TERM_PROGRAM === "vscode" && process.env.VSCODE_GIT_ASKPASS_NODE?.toLowerCase().includes("cursor")) {
|
|
123
|
+
return "cursor";
|
|
124
|
+
}
|
|
125
|
+
if (process.env.TERM_PROGRAM === "vscode" || process.env.VSCODE_PID) {
|
|
126
|
+
return "vscode";
|
|
127
|
+
}
|
|
128
|
+
const providers = this.getAvailableProviders();
|
|
129
|
+
if (providers.length > 0) {
|
|
130
|
+
return "multi-provider";
|
|
131
|
+
}
|
|
132
|
+
return "fallback";
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Get list of providers with configured API keys
|
|
136
|
+
*/
|
|
137
|
+
getAvailableProviders() {
|
|
138
|
+
const providers = [];
|
|
139
|
+
if (process.env.ANTHROPIC_API_KEY) providers.push("anthropic");
|
|
140
|
+
if (process.env.OPENAI_API_KEY) providers.push("openai");
|
|
141
|
+
if (process.env.GOOGLE_API_KEY || process.env.GOOGLE_GENERATIVE_AI_API_KEY) providers.push("google");
|
|
142
|
+
if (process.env.XAI_API_KEY) providers.push("xai");
|
|
143
|
+
if (process.env.MISTRAL_API_KEY) providers.push("mistral");
|
|
144
|
+
if (process.env.DEEPSEEK_API_KEY) providers.push("deepseek");
|
|
145
|
+
if (process.env.COHERE_API_KEY) providers.push("cohere");
|
|
146
|
+
if (process.env.OPENROUTER_API_KEY) providers.push("openrouter");
|
|
147
|
+
return providers;
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* Discover models from all available providers
|
|
151
|
+
*/
|
|
152
|
+
async discoverMultiProviderModels() {
|
|
153
|
+
const providers = this.getAvailableProviders();
|
|
154
|
+
const allModels = [];
|
|
155
|
+
const discoveries = await Promise.allSettled(
|
|
156
|
+
providers.map((provider) => this.discoverProviderModels(provider))
|
|
157
|
+
);
|
|
158
|
+
for (const result of discoveries) {
|
|
159
|
+
if (result.status === "fulfilled" && result.value) {
|
|
160
|
+
allModels.push(...result.value.models);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
if (allModels.length === 0) {
|
|
164
|
+
return this.getFallbackModels();
|
|
165
|
+
}
|
|
166
|
+
return {
|
|
167
|
+
source: "multi-provider",
|
|
168
|
+
models: allModels,
|
|
169
|
+
cached: false,
|
|
170
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Discover models for a specific provider
|
|
175
|
+
*/
|
|
176
|
+
async discoverProviderModels(provider) {
|
|
177
|
+
switch (provider) {
|
|
178
|
+
case "anthropic":
|
|
179
|
+
return this.discoverAnthropicModels();
|
|
180
|
+
case "openai":
|
|
181
|
+
return this.discoverOpenAIModels();
|
|
182
|
+
case "google":
|
|
183
|
+
return this.discoverGoogleModels();
|
|
184
|
+
case "xai":
|
|
185
|
+
return this.discoverXAIModels();
|
|
186
|
+
case "openrouter":
|
|
187
|
+
return this.discoverOpenRouterModels();
|
|
188
|
+
default:
|
|
189
|
+
return null;
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
/**
|
|
193
|
+
* Discover Anthropic models via API
|
|
194
|
+
*/
|
|
195
|
+
async discoverAnthropicModels() {
|
|
196
|
+
try {
|
|
197
|
+
const response = await fetch("https://api.anthropic.com/v1/models", {
|
|
198
|
+
headers: {
|
|
199
|
+
"x-api-key": process.env.ANTHROPIC_API_KEY,
|
|
200
|
+
"anthropic-version": "2023-06-01"
|
|
201
|
+
}
|
|
202
|
+
});
|
|
203
|
+
if (!response.ok) {
|
|
204
|
+
return await this.getAnthropicPresets();
|
|
205
|
+
}
|
|
206
|
+
const data = await response.json();
|
|
207
|
+
if (!data.data || data.data.length === 0) {
|
|
208
|
+
return await this.getAnthropicPresets();
|
|
209
|
+
}
|
|
210
|
+
return {
|
|
211
|
+
source: "anthropic-api",
|
|
212
|
+
models: data.data.map((m) => ({
|
|
213
|
+
id: m.id,
|
|
214
|
+
name: m.display_name || this.formatModelName(m.id),
|
|
215
|
+
provider: "anthropic",
|
|
216
|
+
family: this.extractFamily(m.id, "claude")
|
|
217
|
+
})),
|
|
218
|
+
cached: false,
|
|
219
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
220
|
+
};
|
|
221
|
+
} catch {
|
|
222
|
+
return await this.getAnthropicPresets();
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
/**
|
|
226
|
+
* Discover OpenAI models via API
|
|
227
|
+
*/
|
|
228
|
+
async discoverOpenAIModels() {
|
|
229
|
+
try {
|
|
230
|
+
const response = await fetch("https://api.openai.com/v1/models", {
|
|
231
|
+
headers: { "Authorization": `Bearer ${process.env.OPENAI_API_KEY}` }
|
|
232
|
+
});
|
|
233
|
+
if (!response.ok) {
|
|
234
|
+
return await this.getOpenAIPresets();
|
|
235
|
+
}
|
|
236
|
+
const data = await response.json();
|
|
237
|
+
if (!data.data) {
|
|
238
|
+
return await this.getOpenAIPresets();
|
|
239
|
+
}
|
|
240
|
+
const chatModels = data.data.filter(
|
|
241
|
+
(m) => m.id.includes("gpt-4") || m.id.includes("o1") || m.id.includes("o3") || m.id.includes("o4")
|
|
242
|
+
);
|
|
243
|
+
return {
|
|
244
|
+
source: "openai",
|
|
245
|
+
models: chatModels.map((m) => ({
|
|
246
|
+
id: m.id,
|
|
247
|
+
name: this.formatModelName(m.id),
|
|
248
|
+
provider: "openai",
|
|
249
|
+
family: this.extractFamily(m.id, "gpt")
|
|
250
|
+
})),
|
|
251
|
+
cached: false,
|
|
252
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
253
|
+
};
|
|
254
|
+
} catch {
|
|
255
|
+
return await this.getOpenAIPresets();
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
/**
|
|
259
|
+
* Discover Google models via API
|
|
260
|
+
*/
|
|
261
|
+
async discoverGoogleModels() {
|
|
262
|
+
try {
|
|
263
|
+
const apiKey = process.env.GOOGLE_API_KEY || process.env.GOOGLE_GENERATIVE_AI_API_KEY;
|
|
264
|
+
const response = await fetch(
|
|
265
|
+
`https://generativelanguage.googleapis.com/v1beta/models?key=${apiKey}`
|
|
266
|
+
);
|
|
267
|
+
if (!response.ok) {
|
|
268
|
+
return await this.getGooglePresets();
|
|
269
|
+
}
|
|
270
|
+
const data = await response.json();
|
|
271
|
+
if (!data.models) {
|
|
272
|
+
return await this.getGooglePresets();
|
|
273
|
+
}
|
|
274
|
+
return {
|
|
275
|
+
source: "google",
|
|
276
|
+
models: data.models.filter((m) => m.name.includes("gemini")).map((m) => ({
|
|
277
|
+
id: m.name.replace("models/", ""),
|
|
278
|
+
name: m.displayName || this.formatModelName(m.name),
|
|
279
|
+
provider: "google",
|
|
280
|
+
family: "gemini"
|
|
281
|
+
})),
|
|
282
|
+
cached: false,
|
|
283
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
284
|
+
};
|
|
285
|
+
} catch {
|
|
286
|
+
return await this.getGooglePresets();
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
/**
|
|
290
|
+
* Discover xAI/Grok models via API
|
|
291
|
+
*/
|
|
292
|
+
async discoverXAIModels() {
|
|
293
|
+
try {
|
|
294
|
+
const response = await fetch("https://api.x.ai/v1/models", {
|
|
295
|
+
headers: { "Authorization": `Bearer ${process.env.XAI_API_KEY}` }
|
|
296
|
+
});
|
|
297
|
+
if (!response.ok) {
|
|
298
|
+
return await this.getXAIPresets();
|
|
299
|
+
}
|
|
300
|
+
const data = await response.json();
|
|
301
|
+
if (!data.data) {
|
|
302
|
+
return await this.getXAIPresets();
|
|
303
|
+
}
|
|
304
|
+
return {
|
|
305
|
+
source: "xai",
|
|
306
|
+
models: data.data.map((m) => ({
|
|
307
|
+
id: m.id,
|
|
308
|
+
name: m.id.includes("grok") ? `Grok ${m.id.split("-").pop()}` : m.id,
|
|
309
|
+
provider: "xai",
|
|
310
|
+
family: "grok"
|
|
311
|
+
})),
|
|
312
|
+
cached: false,
|
|
313
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
314
|
+
};
|
|
315
|
+
} catch {
|
|
316
|
+
return await this.getXAIPresets();
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
/**
|
|
320
|
+
* Discover OpenRouter models via API
|
|
321
|
+
*/
|
|
322
|
+
async discoverOpenRouterModels() {
|
|
323
|
+
try {
|
|
324
|
+
const response = await fetch("https://openrouter.ai/api/v1/models", {
|
|
325
|
+
headers: { "Authorization": `Bearer ${process.env.OPENROUTER_API_KEY}` }
|
|
326
|
+
});
|
|
327
|
+
if (!response.ok) {
|
|
328
|
+
return { source: "openrouter", models: [], cached: false, timestamp: (/* @__PURE__ */ new Date()).toISOString() };
|
|
329
|
+
}
|
|
330
|
+
const data = await response.json();
|
|
331
|
+
if (!data.data) {
|
|
332
|
+
return { source: "openrouter", models: [], cached: false, timestamp: (/* @__PURE__ */ new Date()).toISOString() };
|
|
333
|
+
}
|
|
334
|
+
return {
|
|
335
|
+
source: "openrouter",
|
|
336
|
+
models: data.data.slice(0, 30).map((m) => ({
|
|
337
|
+
id: m.id,
|
|
338
|
+
name: m.name || m.id,
|
|
339
|
+
provider: m.id.split("/")[0] || "openrouter"
|
|
340
|
+
})),
|
|
341
|
+
cached: false,
|
|
342
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
343
|
+
};
|
|
344
|
+
} catch {
|
|
345
|
+
return { source: "openrouter", models: [], cached: false, timestamp: (/* @__PURE__ */ new Date()).toISOString() };
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
/**
|
|
349
|
+
* Discover Cursor models
|
|
350
|
+
*/
|
|
351
|
+
async discoverCursorModels() {
|
|
352
|
+
try {
|
|
353
|
+
const { stdout } = await execAsync("cursor agent models --json", { timeout: 5e3 });
|
|
354
|
+
const models = JSON.parse(stdout);
|
|
355
|
+
return {
|
|
356
|
+
source: "cursor",
|
|
357
|
+
models: models.map((m) => this.normalizeModel(m)),
|
|
358
|
+
cached: false,
|
|
359
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
360
|
+
};
|
|
361
|
+
} catch {
|
|
362
|
+
const manifest = await this.getManifestEnvironment("cursor");
|
|
363
|
+
return {
|
|
364
|
+
source: manifest ? "cursor-manifest" : "cursor",
|
|
365
|
+
models: manifest || this.getCursorPresets(),
|
|
366
|
+
cached: false,
|
|
367
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
368
|
+
};
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
/**
|
|
372
|
+
* Get Claude Code models (fixed list)
|
|
373
|
+
*/
|
|
374
|
+
getClaudeCodeModels() {
|
|
375
|
+
return {
|
|
376
|
+
source: "claude-code",
|
|
377
|
+
models: [
|
|
378
|
+
{ id: "opus", name: "Claude Opus", provider: "anthropic", family: "claude" },
|
|
379
|
+
{ id: "sonnet", name: "Claude Sonnet", provider: "anthropic", family: "claude" },
|
|
380
|
+
{ id: "haiku", name: "Claude Haiku", provider: "anthropic", family: "claude" }
|
|
381
|
+
],
|
|
382
|
+
cached: false,
|
|
383
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
384
|
+
};
|
|
385
|
+
}
|
|
386
|
+
/**
|
|
387
|
+
* Get VSCode/Copilot models
|
|
388
|
+
*/
|
|
389
|
+
async getVSCodeModels() {
|
|
390
|
+
const manifest = await this.getManifestEnvironment("vscode");
|
|
391
|
+
if (manifest) {
|
|
392
|
+
return { source: "vscode-manifest", models: manifest, cached: false, timestamp: (/* @__PURE__ */ new Date()).toISOString() };
|
|
393
|
+
}
|
|
394
|
+
return {
|
|
395
|
+
source: "vscode",
|
|
396
|
+
models: [
|
|
397
|
+
{ id: "gpt-4.1", name: "GPT-4.1", provider: "openai", family: "gpt-4.1" },
|
|
398
|
+
{ id: "gpt-4.1-mini", name: "GPT-4.1 Mini", provider: "openai", family: "gpt-4.1" },
|
|
399
|
+
{ id: "gpt-4.1-nano", name: "GPT-4.1 Nano", provider: "openai", family: "gpt-4.1" },
|
|
400
|
+
{ id: "o3", name: "OpenAI o3", provider: "openai", family: "o3" },
|
|
401
|
+
{ id: "o4-mini", name: "OpenAI o4 Mini", provider: "openai", family: "o4" },
|
|
402
|
+
{ id: "claude-sonnet-4-5-20250929", name: "Claude Sonnet 4.5", provider: "anthropic", family: "claude-4" }
|
|
403
|
+
],
|
|
404
|
+
cached: false,
|
|
405
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
406
|
+
};
|
|
407
|
+
}
|
|
408
|
+
/**
|
|
409
|
+
* Get comprehensive preset models for Cursor
|
|
410
|
+
*/
|
|
411
|
+
getCursorPresets() {
|
|
412
|
+
return [
|
|
413
|
+
// Anthropic Claude models
|
|
414
|
+
{ id: "claude-opus-4-6", name: "Claude Opus 4.6", provider: "anthropic", family: "claude-4" },
|
|
415
|
+
{ id: "claude-sonnet-4-5-20250929", name: "Claude Sonnet 4.5", provider: "anthropic", family: "claude-4" },
|
|
416
|
+
{ id: "claude-haiku-4-5-20251001", name: "Claude Haiku 4.5", provider: "anthropic", family: "claude-4" },
|
|
417
|
+
// OpenAI GPT models
|
|
418
|
+
{ id: "gpt-4.1", name: "GPT-4.1", provider: "openai", family: "gpt-4.1" },
|
|
419
|
+
{ id: "gpt-4.1-mini", name: "GPT-4.1 Mini", provider: "openai", family: "gpt-4.1" },
|
|
420
|
+
{ id: "gpt-4.1-nano", name: "GPT-4.1 Nano", provider: "openai", family: "gpt-4.1" },
|
|
421
|
+
{ id: "o3", name: "OpenAI o3", provider: "openai", family: "o3" },
|
|
422
|
+
{ id: "o4-mini", name: "OpenAI o4 Mini", provider: "openai", family: "o4" },
|
|
423
|
+
{ id: "o3-mini", name: "OpenAI o3 Mini", provider: "openai", family: "o3" },
|
|
424
|
+
// Google Gemini models
|
|
425
|
+
{ id: "gemini-2.5-pro", name: "Gemini 2.5 Pro", provider: "google", family: "gemini-2.5" },
|
|
426
|
+
{ id: "gemini-2.5-flash", name: "Gemini 2.5 Flash", provider: "google", family: "gemini-2.5" },
|
|
427
|
+
{ id: "gemini-2.0-flash", name: "Gemini 2.0 Flash", provider: "google", family: "gemini-2" },
|
|
428
|
+
// xAI Grok models
|
|
429
|
+
{ id: "grok-3", name: "Grok 3", provider: "xai", family: "grok" },
|
|
430
|
+
{ id: "grok-3-mini", name: "Grok 3 Mini", provider: "xai", family: "grok" },
|
|
431
|
+
// Meta Llama models
|
|
432
|
+
{ id: "llama-4-scout", name: "Llama 4 Scout", provider: "meta", family: "llama-4" },
|
|
433
|
+
{ id: "llama-4-maverick", name: "Llama 4 Maverick", provider: "meta", family: "llama-4" },
|
|
434
|
+
// Mistral models
|
|
435
|
+
{ id: "mistral-large", name: "Mistral Large", provider: "mistral", family: "mistral" },
|
|
436
|
+
{ id: "codestral", name: "Codestral", provider: "mistral", family: "codestral" },
|
|
437
|
+
// DeepSeek models
|
|
438
|
+
{ id: "deepseek-r1", name: "DeepSeek R1", provider: "deepseek", family: "deepseek" },
|
|
439
|
+
{ id: "deepseek-v3", name: "DeepSeek V3", provider: "deepseek", family: "deepseek" },
|
|
440
|
+
// Cohere models
|
|
441
|
+
{ id: "command-r-plus", name: "Command R+", provider: "cohere", family: "command" },
|
|
442
|
+
{ id: "command-r", name: "Command R", provider: "cohere", family: "command" }
|
|
443
|
+
];
|
|
444
|
+
}
|
|
445
|
+
/**
|
|
446
|
+
* Get Anthropic preset models (manifest → hardcoded)
|
|
447
|
+
*/
|
|
448
|
+
async getAnthropicPresets() {
|
|
449
|
+
const manifest = await this.getManifestModels("anthropic");
|
|
450
|
+
if (manifest) {
|
|
451
|
+
return { source: "anthropic-manifest", models: manifest, cached: false, timestamp: (/* @__PURE__ */ new Date()).toISOString() };
|
|
452
|
+
}
|
|
453
|
+
return {
|
|
454
|
+
source: "anthropic-api",
|
|
455
|
+
models: [
|
|
456
|
+
{ id: "claude-opus-4-6", name: "Claude Opus 4.6", provider: "anthropic", family: "claude-4" },
|
|
457
|
+
{ id: "claude-sonnet-4-5-20250929", name: "Claude Sonnet 4.5", provider: "anthropic", family: "claude-4" },
|
|
458
|
+
{ id: "claude-haiku-4-5-20251001", name: "Claude Haiku 4.5", provider: "anthropic", family: "claude-4" }
|
|
459
|
+
],
|
|
460
|
+
cached: false,
|
|
461
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
462
|
+
};
|
|
463
|
+
}
|
|
464
|
+
/**
|
|
465
|
+
* Get OpenAI preset models (manifest → hardcoded)
|
|
466
|
+
*/
|
|
467
|
+
async getOpenAIPresets() {
|
|
468
|
+
const manifest = await this.getManifestModels("openai");
|
|
469
|
+
if (manifest) {
|
|
470
|
+
return { source: "openai-manifest", models: manifest, cached: false, timestamp: (/* @__PURE__ */ new Date()).toISOString() };
|
|
471
|
+
}
|
|
472
|
+
return {
|
|
473
|
+
source: "openai",
|
|
474
|
+
models: [
|
|
475
|
+
{ id: "gpt-4.1", name: "GPT-4.1", provider: "openai", family: "gpt-4.1" },
|
|
476
|
+
{ id: "gpt-4.1-mini", name: "GPT-4.1 Mini", provider: "openai", family: "gpt-4.1" },
|
|
477
|
+
{ id: "gpt-4.1-nano", name: "GPT-4.1 Nano", provider: "openai", family: "gpt-4.1" },
|
|
478
|
+
{ id: "o3", name: "OpenAI o3", provider: "openai", family: "o3" },
|
|
479
|
+
{ id: "o4-mini", name: "OpenAI o4 Mini", provider: "openai", family: "o4" },
|
|
480
|
+
{ id: "o3-mini", name: "OpenAI o3 Mini", provider: "openai", family: "o3" }
|
|
481
|
+
],
|
|
482
|
+
cached: false,
|
|
483
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
484
|
+
};
|
|
485
|
+
}
|
|
486
|
+
/**
|
|
487
|
+
* Get Google preset models (manifest → hardcoded)
|
|
488
|
+
*/
|
|
489
|
+
async getGooglePresets() {
|
|
490
|
+
const manifest = await this.getManifestModels("google");
|
|
491
|
+
if (manifest) {
|
|
492
|
+
return { source: "google-manifest", models: manifest, cached: false, timestamp: (/* @__PURE__ */ new Date()).toISOString() };
|
|
493
|
+
}
|
|
494
|
+
return {
|
|
495
|
+
source: "google",
|
|
496
|
+
models: [
|
|
497
|
+
{ id: "gemini-2.5-pro", name: "Gemini 2.5 Pro", provider: "google", family: "gemini-2.5" },
|
|
498
|
+
{ id: "gemini-2.5-flash", name: "Gemini 2.5 Flash", provider: "google", family: "gemini-2.5" },
|
|
499
|
+
{ id: "gemini-2.0-flash", name: "Gemini 2.0 Flash", provider: "google", family: "gemini-2" }
|
|
500
|
+
],
|
|
501
|
+
cached: false,
|
|
502
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
503
|
+
};
|
|
504
|
+
}
|
|
505
|
+
/**
|
|
506
|
+
* Get xAI preset models (manifest → hardcoded)
|
|
507
|
+
*/
|
|
508
|
+
async getXAIPresets() {
|
|
509
|
+
const manifest = await this.getManifestModels("xai");
|
|
510
|
+
if (manifest) {
|
|
511
|
+
return { source: "xai-manifest", models: manifest, cached: false, timestamp: (/* @__PURE__ */ new Date()).toISOString() };
|
|
512
|
+
}
|
|
513
|
+
return {
|
|
514
|
+
source: "xai",
|
|
515
|
+
models: [
|
|
516
|
+
{ id: "grok-3", name: "Grok 3", provider: "xai", family: "grok" },
|
|
517
|
+
{ id: "grok-3-mini", name: "Grok 3 Mini", provider: "xai", family: "grok" }
|
|
518
|
+
],
|
|
519
|
+
cached: false,
|
|
520
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
521
|
+
};
|
|
522
|
+
}
|
|
523
|
+
/**
|
|
524
|
+
* Get fallback models (basic Claude models)
|
|
525
|
+
*/
|
|
526
|
+
getFallbackModels() {
|
|
527
|
+
return {
|
|
528
|
+
source: "fallback",
|
|
529
|
+
models: [
|
|
530
|
+
{ id: "opus", name: "Claude Opus", provider: "anthropic", family: "claude" },
|
|
531
|
+
{ id: "sonnet", name: "Claude Sonnet", provider: "anthropic", family: "claude" },
|
|
532
|
+
{ id: "haiku", name: "Claude Haiku", provider: "anthropic", family: "claude" }
|
|
533
|
+
],
|
|
534
|
+
cached: false,
|
|
535
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
536
|
+
};
|
|
537
|
+
}
|
|
538
|
+
/**
|
|
539
|
+
* Normalize a model from external source
|
|
540
|
+
*/
|
|
541
|
+
normalizeModel(model) {
|
|
542
|
+
return {
|
|
543
|
+
id: model.id,
|
|
544
|
+
name: model.name || this.formatModelName(model.id),
|
|
545
|
+
provider: model.provider || "unknown",
|
|
546
|
+
family: model.family,
|
|
547
|
+
capabilities: model.capabilities
|
|
548
|
+
};
|
|
549
|
+
}
|
|
550
|
+
/**
|
|
551
|
+
* Format a model ID into a human-readable name
|
|
552
|
+
*/
|
|
553
|
+
formatModelName(id) {
|
|
554
|
+
return id.replace(/-/g, " ").replace(/\b\w/g, (c) => c.toUpperCase()).replace(/(\d)([a-z])/gi, "$1 $2");
|
|
555
|
+
}
|
|
556
|
+
/**
|
|
557
|
+
* Extract model family from ID
|
|
558
|
+
*/
|
|
559
|
+
extractFamily(id, defaultFamily) {
|
|
560
|
+
const patterns = [
|
|
561
|
+
[/claude-4|claude-opus-4|claude-sonnet-4|claude-haiku-4/i, "claude-4"],
|
|
562
|
+
[/claude-3\.5|claude-3-5/i, "claude-3.5"],
|
|
563
|
+
[/claude-3/i, "claude-3"],
|
|
564
|
+
[/gpt-4\.1/i, "gpt-4.1"],
|
|
565
|
+
[/gpt-4o/i, "gpt-4o"],
|
|
566
|
+
[/gpt-4/i, "gpt-4"],
|
|
567
|
+
[/o4/i, "o4"],
|
|
568
|
+
[/o3/i, "o3"],
|
|
569
|
+
[/o1/i, "o1"],
|
|
570
|
+
[/gemini-2\.5/i, "gemini-2.5"],
|
|
571
|
+
[/gemini-2/i, "gemini-2"],
|
|
572
|
+
[/gemini-1\.5/i, "gemini-1.5"],
|
|
573
|
+
[/grok/i, "grok"],
|
|
574
|
+
[/llama-4/i, "llama-4"],
|
|
575
|
+
[/llama/i, "llama"],
|
|
576
|
+
[/mistral/i, "mistral"]
|
|
577
|
+
];
|
|
578
|
+
for (const [pattern, family] of patterns) {
|
|
579
|
+
if (pattern.test(id)) {
|
|
580
|
+
return family;
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
return defaultFamily;
|
|
584
|
+
}
|
|
585
|
+
/**
|
|
586
|
+
* Load cached models
|
|
587
|
+
*/
|
|
588
|
+
loadCache() {
|
|
589
|
+
try {
|
|
590
|
+
if (!fs.existsSync(this.cacheFile)) {
|
|
591
|
+
return null;
|
|
592
|
+
}
|
|
593
|
+
const content = fs.readFileSync(this.cacheFile, "utf8");
|
|
594
|
+
const cached = JSON.parse(content);
|
|
595
|
+
const cacheTime = new Date(cached.timestamp).getTime();
|
|
596
|
+
if (Date.now() - cacheTime > this.cacheTTL) {
|
|
597
|
+
return null;
|
|
598
|
+
}
|
|
599
|
+
return { ...cached, cached: true };
|
|
600
|
+
} catch {
|
|
601
|
+
return null;
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
/**
|
|
605
|
+
* Save models to cache
|
|
606
|
+
*/
|
|
607
|
+
saveCache(result) {
|
|
608
|
+
try {
|
|
609
|
+
const dir = path.dirname(this.cacheFile);
|
|
610
|
+
if (!fs.existsSync(dir)) {
|
|
611
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
612
|
+
}
|
|
613
|
+
fs.writeFileSync(this.cacheFile, JSON.stringify(result, null, 2));
|
|
614
|
+
} catch {
|
|
615
|
+
}
|
|
616
|
+
}
|
|
617
|
+
/**
|
|
618
|
+
* Clear the cache
|
|
619
|
+
*/
|
|
620
|
+
clearCache() {
|
|
621
|
+
try {
|
|
622
|
+
if (fs.existsSync(this.cacheFile)) {
|
|
623
|
+
fs.unlinkSync(this.cacheFile);
|
|
624
|
+
}
|
|
625
|
+
} catch {
|
|
626
|
+
}
|
|
627
|
+
}
|
|
628
|
+
/**
|
|
629
|
+
* Get models grouped by tier (high/medium/low)
|
|
630
|
+
*/
|
|
631
|
+
groupByTier(models) {
|
|
632
|
+
const high = [];
|
|
633
|
+
const medium = [];
|
|
634
|
+
const low = [];
|
|
635
|
+
const miniPattern = /\bmini\b/i;
|
|
636
|
+
const flashPattern = /\bflash\b/i;
|
|
637
|
+
const smallPattern = /\bsmall\b/i;
|
|
638
|
+
for (const model of models) {
|
|
639
|
+
const name = model.name.toLowerCase();
|
|
640
|
+
const id = model.id.toLowerCase();
|
|
641
|
+
const combined = `${name} ${id}`;
|
|
642
|
+
const isLowTier = name.includes("haiku") || miniPattern.test(combined) || name.includes("nano") || flashPattern.test(combined) && !name.includes("flash-thinking") || smallPattern.test(combined) || name.includes("scout") || name.includes("instant");
|
|
643
|
+
const isHighTier = name.includes("opus") || name.includes("gpt-4") && !miniPattern.test(combined) && !name.includes("nano") || id.includes("gpt-4.1") && !miniPattern.test(combined) && !id.includes("nano") || (id === "o3" || id.includes("o3") && !miniPattern.test(combined)) || (id === "o1" || id.includes("o1") && !miniPattern.test(combined) && !id.includes("o1-")) || // Pro models (Gemini Pro, etc.) but not mini variants
|
|
644
|
+
id.includes("-pro") && !miniPattern.test(combined) || id.includes("grok-3") && !miniPattern.test(combined) || id.includes("grok-2") && !miniPattern.test(combined) || name.includes("large") || name.includes("maverick") || name.includes("command r+") || id.includes("deepseek-r1") || id.includes("deepseek-v3");
|
|
645
|
+
if (isLowTier) {
|
|
646
|
+
low.push(model);
|
|
647
|
+
} else if (isHighTier) {
|
|
648
|
+
high.push(model);
|
|
649
|
+
} else {
|
|
650
|
+
medium.push(model);
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
return { high, medium, low };
|
|
654
|
+
}
|
|
655
|
+
};
|
|
656
|
+
|
|
657
|
+
export {
|
|
658
|
+
ModelDiscovery
|
|
659
|
+
};
|