@johndimm/constellations 1.0.4 → 1.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/App.tsx +54 -2
- package/hooks/useSearchHandlers.ts +2 -2
- package/package.json +1 -1
- package/services/aiUtils.ts +31 -2
- package/services/debugLog.ts +6 -0
- package/services/deepseekService.ts +3 -3
- package/services/geminiService.ts +5 -11
- package/services/imageService.ts +6 -11
- package/services/modelsService.ts +140 -0
package/App.tsx
CHANGED
|
@@ -23,6 +23,53 @@ import { buildHandoffFromLiveState, type ConstellationsSessionHandoffV1 } from '
|
|
|
23
23
|
|
|
24
24
|
const PeopleBrowserSidebar = lazy(() => import('./components/PeopleBrowserSidebar'));
|
|
25
25
|
|
|
26
|
+
const WELCOME_EXAMPLES = ["Alan Turing", "The Godfather", "Iceman by Drake"];
|
|
27
|
+
|
|
28
|
+
function WelcomeOverlay({ hideHeader, onSearch, onDismiss }: { hideHeader?: boolean; onSearch: (term: string) => void; onDismiss: () => void }) {
|
|
29
|
+
const [draft, setDraft] = useState('');
|
|
30
|
+
const submit = (term: string) => { const t = term.trim(); if (t) onSearch(t); };
|
|
31
|
+
return (
|
|
32
|
+
<div className="absolute inset-0 z-[150] bg-slate-950 flex flex-col items-center justify-center" style={{ paddingTop: hideHeader ? 0 : '3.5rem' }}>
|
|
33
|
+
<div className="text-center max-w-lg px-8 w-full">
|
|
34
|
+
<h1 className="text-4xl font-bold mb-3 bg-gradient-to-r from-indigo-400 via-purple-400 to-cyan-400 bg-clip-text text-transparent">
|
|
35
|
+
Constellations
|
|
36
|
+
</h1>
|
|
37
|
+
<p className="text-slate-400 mb-8 text-base leading-relaxed">
|
|
38
|
+
Start with anything. Follow the connections.
|
|
39
|
+
</p>
|
|
40
|
+
<form onSubmit={e => { e.preventDefault(); submit(draft); }} className="flex gap-2 mb-6">
|
|
41
|
+
<input
|
|
42
|
+
autoFocus
|
|
43
|
+
type="text"
|
|
44
|
+
value={draft}
|
|
45
|
+
onChange={e => setDraft(e.target.value)}
|
|
46
|
+
placeholder="Person, film, album, idea…"
|
|
47
|
+
className="flex-1 bg-slate-800 border border-slate-700 rounded-xl px-4 py-2.5 text-sm text-white placeholder-slate-500 focus:outline-none focus:border-indigo-500/60 focus:ring-1 focus:ring-indigo-500/20"
|
|
48
|
+
/>
|
|
49
|
+
<button
|
|
50
|
+
type="submit"
|
|
51
|
+
disabled={!draft.trim()}
|
|
52
|
+
className="px-4 py-2.5 rounded-xl bg-indigo-600 hover:bg-indigo-500 disabled:opacity-40 disabled:cursor-not-allowed text-white text-sm font-medium transition-colors"
|
|
53
|
+
>
|
|
54
|
+
Go
|
|
55
|
+
</button>
|
|
56
|
+
</form>
|
|
57
|
+
<div className="flex flex-wrap justify-center gap-2">
|
|
58
|
+
{WELCOME_EXAMPLES.map(ex => (
|
|
59
|
+
<button key={ex} onClick={() => submit(ex)}
|
|
60
|
+
className="px-3 py-1.5 rounded-lg bg-slate-800 border border-slate-700 text-slate-300 hover:bg-slate-700 hover:text-white transition-all text-sm">
|
|
61
|
+
{ex}
|
|
62
|
+
</button>
|
|
63
|
+
))}
|
|
64
|
+
<button onClick={onDismiss}
|
|
65
|
+
className="px-3 py-1.5 rounded-lg bg-slate-800/50 border border-slate-700/50 text-slate-500 hover:bg-slate-800 hover:text-slate-300 transition-all text-sm">
|
|
66
|
+
blank slate
|
|
67
|
+
</button>
|
|
68
|
+
</div>
|
|
69
|
+
</div>
|
|
70
|
+
</div>
|
|
71
|
+
);
|
|
72
|
+
}
|
|
26
73
|
|
|
27
74
|
type AppProps = {
|
|
28
75
|
mode?: 'standalone' | 'extension';
|
|
@@ -36,7 +83,7 @@ type AppProps = {
|
|
|
36
83
|
*/
|
|
37
84
|
showExtensionWhenPanelHidden?: boolean;
|
|
38
85
|
hideSidebar?: boolean;
|
|
39
|
-
externalSearch?: { term: string; id: string | number } | null;
|
|
86
|
+
externalSearch?: { term: string; id: string | number; typeHint?: string } | null;
|
|
40
87
|
onExternalSearchConsumed?: (id: string | number) => void;
|
|
41
88
|
onNodeNavigate?: (node: GraphNode) => void;
|
|
42
89
|
renderEvidencePopup?: (selectedLink: GraphLink | null, onClose: () => void) => React.ReactNode;
|
|
@@ -243,6 +290,7 @@ const App: React.FC<AppProps> = ({
|
|
|
243
290
|
});
|
|
244
291
|
|
|
245
292
|
const [showHelp, setShowHelp] = useState(false);
|
|
293
|
+
const [welcomeDismissed, setWelcomeDismissed] = useState(false);
|
|
246
294
|
|
|
247
295
|
const {
|
|
248
296
|
exploreTerm, setExploreTerm, pathStart, setPathStart, pathEnd, setPathEnd,
|
|
@@ -325,7 +373,7 @@ const App: React.FC<AppProps> = ({
|
|
|
325
373
|
useEffect(() => {
|
|
326
374
|
if (skipPlayerBootstrapRef.current) return;
|
|
327
375
|
if (!externalSearch?.term) return;
|
|
328
|
-
handleStartSearchRef.current(externalSearch.term);
|
|
376
|
+
handleStartSearchRef.current(externalSearch.term, 0, externalSearch.typeHint);
|
|
329
377
|
if (externalSearch?.id !== undefined) {
|
|
330
378
|
onExternalSearchConsumedRef.current?.(externalSearch.id);
|
|
331
379
|
}
|
|
@@ -756,6 +804,10 @@ const App: React.FC<AppProps> = ({
|
|
|
756
804
|
/>
|
|
757
805
|
</div>
|
|
758
806
|
|
|
807
|
+
{!embedded && nodes.length === 0 && !isProcessing && !welcomeDismissed && (
|
|
808
|
+
<WelcomeOverlay hideHeader={hideHeader} onSearch={handleStartSearch} onDismiss={() => setWelcomeDismissed(true)} />
|
|
809
|
+
)}
|
|
810
|
+
|
|
759
811
|
<AppHeader
|
|
760
812
|
showHeader={!hideHeader}
|
|
761
813
|
panelCollapsed={panelCollapsed}
|
|
@@ -101,7 +101,7 @@ export function useSearchHandlers(options: UseSearchHandlersOptions) {
|
|
|
101
101
|
return nodeData;
|
|
102
102
|
}, [cacheEnabled, cacheBaseUrl]);
|
|
103
103
|
|
|
104
|
-
const handleStartSearch = useCallback(async (term: string, recursiveDepth = 0) => {
|
|
104
|
+
const handleStartSearch = useCallback(async (term: string, recursiveDepth = 0, typeHint?: string) => {
|
|
105
105
|
setIsProcessing(true);
|
|
106
106
|
setError(null);
|
|
107
107
|
const nextSearchId = searchIdRef.current + 1;
|
|
@@ -134,7 +134,7 @@ export function useSearchHandlers(options: UseSearchHandlersOptions) {
|
|
|
134
134
|
// CRITICAL FIX: Only use kiosk domain context if the user hasn't provided a specific disambiguated term.
|
|
135
135
|
// "Republic (Plato)" should NEVER get "Actors / Movies / TV" context.
|
|
136
136
|
const hasDisambiguation = term.includes('(') && term.includes(')');
|
|
137
|
-
const wikiContext = (showControlPanel && !hasDisambiguation) ? selectedKioskDomain?.label :
|
|
137
|
+
const wikiContext = (showControlPanel && !hasDisambiguation) ? selectedKioskDomain?.label : typeHint;
|
|
138
138
|
|
|
139
139
|
const wiki = await fetchWikipediaSummary(term, wikiContext);
|
|
140
140
|
const canonicalTitle = (wiki.title || term).trim();
|
package/package.json
CHANGED
package/services/aiUtils.ts
CHANGED
|
@@ -357,6 +357,7 @@ export function isRateLimitError(e: any): boolean {
|
|
|
357
357
|
export type LlmProviderId = "gemini" | "deepseek" | "openai" | "anthropic";
|
|
358
358
|
|
|
359
359
|
const BROWSER_LLM_KEY = "constellations_llm_provider";
|
|
360
|
+
const BROWSER_LLM_MODEL_KEY = "constellations_llm_model";
|
|
360
361
|
|
|
361
362
|
function isValidProvider(v: string): v is LlmProviderId {
|
|
362
363
|
return v === "gemini" || v === "deepseek" || v === "openai" || v === "anthropic";
|
|
@@ -382,14 +383,42 @@ export function setBrowserLlmOverride(provider: LlmProviderId | null): void {
|
|
|
382
383
|
} catch {}
|
|
383
384
|
}
|
|
384
385
|
|
|
385
|
-
|
|
386
|
-
|
|
386
|
+
export function getBrowserLlmModel(): string | null {
|
|
387
|
+
if (typeof window === "undefined") return null;
|
|
388
|
+
try {
|
|
389
|
+
return window.localStorage.getItem(BROWSER_LLM_MODEL_KEY)?.trim() || null;
|
|
390
|
+
} catch {}
|
|
391
|
+
return null;
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
export function setBrowserLlmModel(model: string | null): void {
|
|
395
|
+
if (typeof window === "undefined") return;
|
|
396
|
+
try {
|
|
397
|
+
if (!model) {
|
|
398
|
+
window.localStorage.removeItem(BROWSER_LLM_MODEL_KEY);
|
|
399
|
+
} else {
|
|
400
|
+
window.localStorage.setItem(BROWSER_LLM_MODEL_KEY, model);
|
|
401
|
+
}
|
|
402
|
+
} catch {}
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
// Server-side per-request overrides (Node.js module memory, set before each proxy call).
|
|
406
|
+
// Intentionally simple — dev server is single-user so concurrent-request races are fine.
|
|
387
407
|
let _serverLlmOverride: LlmProviderId | null = null;
|
|
408
|
+
let _serverLlmModelOverride: string | null = null;
|
|
388
409
|
|
|
389
410
|
export function setServerLlmOverride(provider: LlmProviderId | null): void {
|
|
390
411
|
_serverLlmOverride = provider;
|
|
391
412
|
}
|
|
392
413
|
|
|
414
|
+
export function setServerLlmModelOverride(model: string | null): void {
|
|
415
|
+
_serverLlmModelOverride = model;
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
export function getServerLlmModelOverride(): string | null {
|
|
419
|
+
return _serverLlmModelOverride;
|
|
420
|
+
}
|
|
421
|
+
|
|
393
422
|
export function getLlmProvider(): LlmProviderId {
|
|
394
423
|
if (_serverLlmOverride) return _serverLlmOverride;
|
|
395
424
|
const browser = getBrowserLlmOverride();
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use client";
|
|
2
2
|
import { GeminiResponse, PersonWorksResponse, PathResponse } from "../types";
|
|
3
|
-
import { parseJsonFromModelText, withTimeout, withRetry, getEnvCacheUrl, readBundledEnv, getLlmProvider, looksLikePersonName } from "./aiUtils";
|
|
3
|
+
import { parseJsonFromModelText, withTimeout, withRetry, getEnvCacheUrl, readBundledEnv, getLlmProvider, looksLikePersonName, getServerLlmModelOverride } from "./aiUtils";
|
|
4
4
|
import type { LockedPair } from "./geminiService";
|
|
5
5
|
|
|
6
6
|
export type { LockedPair };
|
|
@@ -61,9 +61,9 @@ async function callAltLlm(system: string, user: string, timeoutMs = TIMEOUT_MS):
|
|
|
61
61
|
const baseUrl = isOpenAI
|
|
62
62
|
? (readBundledEnv("VITE_OPENAI_BASE_URL") || "https://api.openai.com/v1")
|
|
63
63
|
: (readBundledEnv("VITE_DEEPSEEK_BASE_URL") || "https://api.deepseek.com/v1");
|
|
64
|
-
const model = isOpenAI
|
|
64
|
+
const model = getServerLlmModelOverride() || (isOpenAI
|
|
65
65
|
? (readBundledEnv("VITE_OPENAI_MODEL") || "gpt-4o-mini")
|
|
66
|
-
: (readBundledEnv("VITE_DEEPSEEK_MODEL") || "deepseek-chat");
|
|
66
|
+
: (readBundledEnv("VITE_DEEPSEEK_MODEL") || "deepseek-chat"));
|
|
67
67
|
const key = isOpenAI
|
|
68
68
|
? readBundledEnv("VITE_OPENAI_API_KEY")
|
|
69
69
|
: readBundledEnv("VITE_DEEPSEEK_API_KEY");
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use client";
|
|
2
2
|
import { GoogleGenAI, Type } from "@google/genai";
|
|
3
3
|
import { GeminiResponse, PersonWorksResponse, PathResponse } from "../types";
|
|
4
|
-
import { getApiKey, getResponseText, cleanJson, parseJsonFromModelText, withTimeout, withRetry, getEnvCacheUrl, getEnvGeminiModel, getEnvGeminiModelClassify, sanitizeSearchTerm, looksLikePersonName, getLlmProvider } from "./aiUtils";
|
|
4
|
+
import { getApiKey, getResponseText, cleanJson, parseJsonFromModelText, withTimeout, withRetry, getEnvCacheUrl, getEnvGeminiModel, getEnvGeminiModelClassify, sanitizeSearchTerm, looksLikePersonName, getLlmProvider, getBrowserLlmModel, getServerLlmModelOverride } from "./aiUtils";
|
|
5
5
|
|
|
6
6
|
export { getApiKey, getResponseText, cleanJson, parseJsonFromModelText, withTimeout, withRetry, getEnvCacheUrl, getEnvGeminiModel, getEnvGeminiModelClassify } from "./aiUtils";
|
|
7
7
|
|
|
@@ -74,8 +74,8 @@ const CLASSIFY_TIMEOUT_MS = 15000; // 15 seconds for classification
|
|
|
74
74
|
// Model selection (configurable via Vite env vars)
|
|
75
75
|
// - VITE_GEMINI_MODEL: used for expansions + pathfinding (default)
|
|
76
76
|
// - VITE_GEMINI_MODEL_CLASSIFY: optional override for classification
|
|
77
|
-
const getGeminiModel = getEnvGeminiModel;
|
|
78
|
-
const getGeminiModelClassify = getEnvGeminiModelClassify;
|
|
77
|
+
const getGeminiModel = () => getServerLlmModelOverride() || getEnvGeminiModel();
|
|
78
|
+
const getGeminiModelClassify = () => getServerLlmModelOverride() || getEnvGeminiModelClassify();
|
|
79
79
|
|
|
80
80
|
// Rejects YouTube channel names, streaming platforms, and other web junk.
|
|
81
81
|
function isValidEntityName(name: string): boolean {
|
|
@@ -120,7 +120,7 @@ async function callAiProxy(endpoint: string, body: any) {
|
|
|
120
120
|
const resp = await fetch(url, {
|
|
121
121
|
method: "POST",
|
|
122
122
|
headers: { "Content-Type": "application/json" },
|
|
123
|
-
body: JSON.stringify({ ...body, llmProvider: getLlmProvider() })
|
|
123
|
+
body: JSON.stringify({ ...body, llmProvider: getLlmProvider(), llmModel: getBrowserLlmModel() ?? undefined })
|
|
124
124
|
});
|
|
125
125
|
|
|
126
126
|
if (resp.status === 404 && endpoint === "/api/ai/classify-start") {
|
|
@@ -292,13 +292,7 @@ export const classifyStartPair = async (
|
|
|
292
292
|
});
|
|
293
293
|
}
|
|
294
294
|
if (proxy) {
|
|
295
|
-
|
|
296
|
-
// Sanity check: if proxy says non-atomic but the term strongly looks like a person name, correct it.
|
|
297
|
-
if (!proxyResult.isAtomic && looksLikePersonName(rawTerm)) {
|
|
298
|
-
console.warn("[classifyStartPair] proxy returned isAtomic=false for apparent person name; overriding", rawTerm);
|
|
299
|
-
return { ...proxyResult, isAtomic: true, type: "Person" };
|
|
300
|
-
}
|
|
301
|
-
return proxyResult;
|
|
295
|
+
return await callAiProxy("/api/ai/classify-start", { term: rawTerm.trim(), wikiContext });
|
|
302
296
|
}
|
|
303
297
|
|
|
304
298
|
const needsMusic = rawTermNeedsMusicEntityExtract(rawTerm);
|
package/services/imageService.ts
CHANGED
|
@@ -10,19 +10,14 @@ export type ServerImageResult = {
|
|
|
10
10
|
|
|
11
11
|
/**
|
|
12
12
|
* Base URL for `GET /api/image` in the browser.
|
|
13
|
-
* Prefer the
|
|
14
|
-
*
|
|
15
|
-
* when the host app can resolve Wikipedia images itself.
|
|
13
|
+
* Prefer the cache/proxy server when one is configured — it always implements /api/image.
|
|
14
|
+
* Fall back to window.location.origin for Next.js host apps that implement the route locally.
|
|
16
15
|
*/
|
|
17
16
|
export const getImageApiBaseUrl = (cacheBaseUrl: string | undefined): string => {
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
return
|
|
22
|
-
(cacheBaseUrl && cacheBaseUrl.replace(/\/$/, '')) ||
|
|
23
|
-
getEffectiveCacheBaseUrl() ||
|
|
24
|
-
''
|
|
25
|
-
);
|
|
17
|
+
const cacheBase = (cacheBaseUrl && cacheBaseUrl.replace(/\/$/, '')) || getEffectiveCacheBaseUrl();
|
|
18
|
+
if (cacheBase) return cacheBase;
|
|
19
|
+
if (typeof window !== 'undefined') return window.location.origin;
|
|
20
|
+
return '';
|
|
26
21
|
};
|
|
27
22
|
|
|
28
23
|
export const fetchServerImage = async (
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
export interface ModelInfo {
|
|
2
|
+
provider: string;
|
|
3
|
+
id: string;
|
|
4
|
+
displayName: string;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
interface CacheEntry {
|
|
8
|
+
models: ModelInfo[];
|
|
9
|
+
fetchedAt: number;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
const CACHE_TTL_MS = 4 * 60 * 60 * 1000; // 4 hours
|
|
13
|
+
const cache = new Map<string, CacheEntry>();
|
|
14
|
+
|
|
15
|
+
function cached(provider: string): ModelInfo[] | null {
|
|
16
|
+
const entry = cache.get(provider);
|
|
17
|
+
if (entry && Date.now() - entry.fetchedAt < CACHE_TTL_MS) return entry.models;
|
|
18
|
+
return null;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function store(provider: string, models: ModelInfo[]): ModelInfo[] {
|
|
22
|
+
cache.set(provider, { models, fetchedAt: Date.now() });
|
|
23
|
+
return models;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
async function fetchGeminiModels(): Promise<ModelInfo[]> {
|
|
27
|
+
const key = process.env.GEMINI_API_KEY || process.env.VITE_API_KEY || process.env.VITE_GEMINI_API_KEY || "";
|
|
28
|
+
if (!key) return [];
|
|
29
|
+
const hit = cached("gemini");
|
|
30
|
+
if (hit) return hit;
|
|
31
|
+
try {
|
|
32
|
+
const res = await fetch(`https://generativelanguage.googleapis.com/v1beta/models?key=${key}&pageSize=100`);
|
|
33
|
+
if (!res.ok) return [];
|
|
34
|
+
const data = await res.json() as { models?: any[] };
|
|
35
|
+
const models = (data.models ?? [])
|
|
36
|
+
.filter((m: any) => Array.isArray(m.supportedGenerationMethods) && m.supportedGenerationMethods.includes("generateContent"))
|
|
37
|
+
.filter((m: any) => !/embedding|aqa|vision-001/i.test(m.name ?? ""))
|
|
38
|
+
.map((m: any) => ({
|
|
39
|
+
provider: "gemini",
|
|
40
|
+
id: String(m.name ?? "").replace(/^models\//, ""),
|
|
41
|
+
displayName: m.displayName || String(m.name ?? "").replace(/^models\//, ""),
|
|
42
|
+
}))
|
|
43
|
+
.sort((a: ModelInfo, b: ModelInfo) => a.id.localeCompare(b.id));
|
|
44
|
+
return store("gemini", models);
|
|
45
|
+
} catch {
|
|
46
|
+
return [];
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
async function fetchAnthropicModels(): Promise<ModelInfo[]> {
|
|
51
|
+
const key = process.env.VITE_ANTHROPIC_API_KEY || "";
|
|
52
|
+
if (!key) return [];
|
|
53
|
+
const hit = cached("anthropic");
|
|
54
|
+
if (hit) return hit;
|
|
55
|
+
try {
|
|
56
|
+
const res = await fetch("https://api.anthropic.com/v1/models", {
|
|
57
|
+
headers: { "x-api-key": key, "anthropic-version": "2023-06-01" },
|
|
58
|
+
});
|
|
59
|
+
if (!res.ok) return [];
|
|
60
|
+
const data = await res.json() as { data?: any[] };
|
|
61
|
+
const models = (data.data ?? [])
|
|
62
|
+
.filter((m: any) => String(m.id ?? "").startsWith("claude-"))
|
|
63
|
+
.map((m: any) => ({
|
|
64
|
+
provider: "anthropic",
|
|
65
|
+
id: String(m.id ?? ""),
|
|
66
|
+
displayName: m.display_name || m.id,
|
|
67
|
+
}))
|
|
68
|
+
.sort((a: ModelInfo, b: ModelInfo) => a.id.localeCompare(b.id));
|
|
69
|
+
return store("anthropic", models);
|
|
70
|
+
} catch {
|
|
71
|
+
return [];
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
async function fetchOpenAICompatibleModels(
|
|
76
|
+
provider: "openai" | "deepseek",
|
|
77
|
+
baseUrl: string,
|
|
78
|
+
key: string,
|
|
79
|
+
filter: (id: string) => boolean
|
|
80
|
+
): Promise<ModelInfo[]> {
|
|
81
|
+
if (!key) return [];
|
|
82
|
+
const hit = cached(provider);
|
|
83
|
+
if (hit) return hit;
|
|
84
|
+
try {
|
|
85
|
+
const res = await fetch(`${baseUrl.replace(/\/$/, "")}/models`, {
|
|
86
|
+
headers: { Authorization: `Bearer ${key}` },
|
|
87
|
+
});
|
|
88
|
+
if (!res.ok) return [];
|
|
89
|
+
const data = await res.json() as { data?: any[] };
|
|
90
|
+
const models = (data.data ?? [])
|
|
91
|
+
.filter((m: any) => filter(String(m.id ?? "")))
|
|
92
|
+
.map((m: any) => ({
|
|
93
|
+
provider,
|
|
94
|
+
id: String(m.id ?? ""),
|
|
95
|
+
displayName: m.id,
|
|
96
|
+
}))
|
|
97
|
+
.sort((a: ModelInfo, b: ModelInfo) => a.id.localeCompare(b.id));
|
|
98
|
+
return store(provider, models);
|
|
99
|
+
} catch {
|
|
100
|
+
return [];
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function isOpenAIChatModel(id: string): boolean {
|
|
105
|
+
if (!/^(gpt-|o\d|chatgpt-)/i.test(id)) return false;
|
|
106
|
+
if (/instruct|whisper|dall-e|tts|embed|search|realtime|audio|preview.*audio/i.test(id)) return false;
|
|
107
|
+
return true;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
async function fetchDeepSeekModels(): Promise<ModelInfo[]> {
|
|
111
|
+
const key = process.env.VITE_DEEPSEEK_API_KEY || "";
|
|
112
|
+
const baseUrl = process.env.VITE_DEEPSEEK_BASE_URL || "https://api.deepseek.com/v1";
|
|
113
|
+
return fetchOpenAICompatibleModels("deepseek", baseUrl, key, () => true);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
async function fetchOpenAIModels(): Promise<ModelInfo[]> {
|
|
117
|
+
const key = process.env.VITE_OPENAI_API_KEY || "";
|
|
118
|
+
const baseUrl = process.env.VITE_OPENAI_BASE_URL || "https://api.openai.com/v1";
|
|
119
|
+
return fetchOpenAICompatibleModels("openai", baseUrl, key, isOpenAIChatModel);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
export async function fetchAllModels(): Promise<ModelInfo[]> {
|
|
123
|
+
const results = await Promise.allSettled([
|
|
124
|
+
fetchGeminiModels(),
|
|
125
|
+
fetchAnthropicModels(),
|
|
126
|
+
fetchDeepSeekModels(),
|
|
127
|
+
fetchOpenAIModels(),
|
|
128
|
+
]);
|
|
129
|
+
return results.flatMap(r => r.status === "fulfilled" ? r.value : []);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
export async function fetchModelsForProvider(provider: string): Promise<ModelInfo[]> {
|
|
133
|
+
switch (provider) {
|
|
134
|
+
case "gemini": return fetchGeminiModels();
|
|
135
|
+
case "anthropic": return fetchAnthropicModels();
|
|
136
|
+
case "deepseek": return fetchDeepSeekModels();
|
|
137
|
+
case "openai": return fetchOpenAIModels();
|
|
138
|
+
default: return [];
|
|
139
|
+
}
|
|
140
|
+
}
|