open-sse 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +180 -0
- package/config/constants.js +206 -0
- package/config/defaultThinkingSignature.js +7 -0
- package/config/ollamaModels.js +19 -0
- package/config/providerModels.js +161 -0
- package/handlers/chatCore.js +277 -0
- package/handlers/responsesHandler.js +69 -0
- package/index.js +69 -0
- package/package.json +44 -0
- package/services/accountFallback.js +148 -0
- package/services/combo.js +69 -0
- package/services/compact.js +64 -0
- package/services/model.js +109 -0
- package/services/provider.js +237 -0
- package/services/tokenRefresh.js +542 -0
- package/services/usage.js +398 -0
- package/translator/formats.js +12 -0
- package/translator/from-openai/claude.js +341 -0
- package/translator/from-openai/gemini.js +469 -0
- package/translator/from-openai/openai-responses.js +361 -0
- package/translator/helpers/claudeHelper.js +179 -0
- package/translator/helpers/geminiHelper.js +131 -0
- package/translator/helpers/openaiHelper.js +80 -0
- package/translator/helpers/responsesApiHelper.js +103 -0
- package/translator/helpers/toolCallHelper.js +111 -0
- package/translator/index.js +167 -0
- package/translator/to-openai/claude.js +238 -0
- package/translator/to-openai/gemini.js +151 -0
- package/translator/to-openai/openai-responses.js +140 -0
- package/translator/to-openai/openai.js +371 -0
- package/utils/bypassHandler.js +258 -0
- package/utils/error.js +133 -0
- package/utils/ollamaTransform.js +82 -0
- package/utils/requestLogger.js +217 -0
- package/utils/stream.js +274 -0
- package/utils/streamHandler.js +131 -0
package/README.md
ADDED
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
# open-sse
|
|
2
|
+
|
|
3
|
+
Universal AI proxy library with SSE streaming support for OpenAI, Claude, Gemini and more.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install open-sse
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Features
|
|
12
|
+
|
|
13
|
+
- Multi-provider support (OpenAI, Claude, Gemini, Copilot, Codex, etc.)
|
|
14
|
+
- SSE streaming for real-time responses
|
|
15
|
+
- Automatic format translation between providers
|
|
16
|
+
- Token refresh & OAuth management
|
|
17
|
+
- Account fallback handling
|
|
18
|
+
- Combo models (route across multiple providers)
|
|
19
|
+
|
|
20
|
+
## Quick Start
|
|
21
|
+
|
|
22
|
+
### Basic Usage
|
|
23
|
+
|
|
24
|
+
```javascript
|
|
25
|
+
import { handleChatCore, getModelInfoCore } from "open-sse";
|
|
26
|
+
|
|
27
|
+
async function handleChat(request) {
|
|
28
|
+
const body = await request.json();
|
|
29
|
+
|
|
30
|
+
// Get model info (auto-detect provider)
|
|
31
|
+
const modelInfo = await getModelInfoCore(body.model);
|
|
32
|
+
|
|
33
|
+
// Provider credentials
|
|
34
|
+
const credentials = {
|
|
35
|
+
provider: modelInfo.provider,
|
|
36
|
+
accessToken: "your-token"
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
// Handle chat with auto translation & streaming
|
|
40
|
+
return await handleChatCore(body, credentials, console);
|
|
41
|
+
}
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
### With Token Refresh
|
|
45
|
+
|
|
46
|
+
```javascript
|
|
47
|
+
import { handleChatCore, isTokenExpiringSoon, refreshTokenByProvider } from "open-sse";
|
|
48
|
+
|
|
49
|
+
async function handleChat(request, credentials) {
|
|
50
|
+
const body = await request.json();
|
|
51
|
+
|
|
52
|
+
// Auto refresh if expiring
|
|
53
|
+
if (isTokenExpiringSoon(credentials)) {
|
|
54
|
+
const newTokens = await refreshTokenByProvider(
|
|
55
|
+
credentials.provider,
|
|
56
|
+
credentials,
|
|
57
|
+
console
|
|
58
|
+
);
|
|
59
|
+
credentials = { ...credentials, ...newTokens };
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
return await handleChatCore(body, credentials, console);
|
|
63
|
+
}
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
### Format Translation
|
|
67
|
+
|
|
68
|
+
```javascript
|
|
69
|
+
import { translateRequest, translateResponse } from "open-sse";
|
|
70
|
+
|
|
71
|
+
// OpenAI → Claude
|
|
72
|
+
const claudeRequest = await translateRequest(openAIRequest, "openai", "claude");
|
|
73
|
+
|
|
74
|
+
// Claude → OpenAI
|
|
75
|
+
const openAIResponse = await translateResponse(claudeResponse, "claude", "openai");
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
### Combo Models
|
|
79
|
+
|
|
80
|
+
```javascript
|
|
81
|
+
import { handleComboChat } from "open-sse/services/combo.js";
|
|
82
|
+
|
|
83
|
+
const models = [
|
|
84
|
+
{ provider: "claude", model: "claude-3-5-sonnet-20241022" },
|
|
85
|
+
{ provider: "openai", model: "gpt-4" }
|
|
86
|
+
];
|
|
87
|
+
|
|
88
|
+
const response = await handleComboChat(request, models, getCredentials, console);
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
## Main Exports
|
|
92
|
+
|
|
93
|
+
```javascript
|
|
94
|
+
// Handlers
|
|
95
|
+
import { handleChatCore, isTokenExpiringSoon } from "open-sse";
|
|
96
|
+
|
|
97
|
+
// Services
|
|
98
|
+
import { getModelInfoCore, parseModel } from "open-sse";
|
|
99
|
+
import { buildProviderUrl, buildProviderHeaders, detectFormat } from "open-sse";
|
|
100
|
+
import { refreshTokenByProvider, refreshClaudeOAuthToken } from "open-sse";
|
|
101
|
+
import { checkFallbackError, isAccountUnavailable } from "open-sse";
|
|
102
|
+
|
|
103
|
+
// Translation
|
|
104
|
+
import { translateRequest, translateResponse, needsTranslation } from "open-sse";
|
|
105
|
+
|
|
106
|
+
// Utils
|
|
107
|
+
import { errorResponse } from "open-sse";
|
|
108
|
+
import { createSSETransformStreamWithLogger } from "open-sse";
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
## Configuration
|
|
112
|
+
|
|
113
|
+
### Provider Models
|
|
114
|
+
|
|
115
|
+
```javascript
|
|
116
|
+
import { PROVIDER_MODELS, getProviderModels } from "open-sse";
|
|
117
|
+
|
|
118
|
+
const claudeModels = getProviderModels("claude");
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
### Constants
|
|
122
|
+
|
|
123
|
+
```javascript
|
|
124
|
+
import { PROVIDERS, OAUTH_ENDPOINTS, CACHE_TTL } from "open-sse";
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
## Examples
|
|
128
|
+
|
|
129
|
+
### Next.js API Route
|
|
130
|
+
|
|
131
|
+
```javascript
|
|
132
|
+
// app/api/chat/route.js
|
|
133
|
+
import { handleChatCore, getModelInfoCore } from "open-sse";
|
|
134
|
+
|
|
135
|
+
export async function POST(request) {
|
|
136
|
+
const body = await request.json();
|
|
137
|
+
const modelInfo = await getModelInfoCore(body.model);
|
|
138
|
+
|
|
139
|
+
const credentials = {
|
|
140
|
+
provider: modelInfo.provider,
|
|
141
|
+
accessToken: process.env.API_TOKEN
|
|
142
|
+
};
|
|
143
|
+
|
|
144
|
+
return await handleChatCore(body, credentials, console);
|
|
145
|
+
}
|
|
146
|
+
```
|
|
147
|
+
|
|
148
|
+
### Express.js
|
|
149
|
+
|
|
150
|
+
```javascript
|
|
151
|
+
import express from "express";
|
|
152
|
+
import { handleChatCore, getModelInfoCore } from "open-sse";
|
|
153
|
+
|
|
154
|
+
const app = express();
|
|
155
|
+
|
|
156
|
+
app.post("/api/chat", async (req, res) => {
|
|
157
|
+
const modelInfo = await getModelInfoCore(req.body.model);
|
|
158
|
+
|
|
159
|
+
const credentials = {
|
|
160
|
+
provider: modelInfo.provider,
|
|
161
|
+
accessToken: process.env.API_TOKEN
|
|
162
|
+
};
|
|
163
|
+
|
|
164
|
+
const response = await handleChatCore(req.body, credentials, console);
|
|
165
|
+
return res.send(response);
|
|
166
|
+
});
|
|
167
|
+
```
|
|
168
|
+
|
|
169
|
+
## Supported Providers
|
|
170
|
+
|
|
171
|
+
- OpenAI (GPT-4, GPT-3.5)
|
|
172
|
+
- Anthropic Claude (Sonnet, Opus, Haiku)
|
|
173
|
+
- Google Gemini
|
|
174
|
+
- GitHub Copilot
|
|
175
|
+
- Codex
|
|
176
|
+
- Qwen
|
|
177
|
+
|
|
178
|
+
## License
|
|
179
|
+
|
|
180
|
+
MIT
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
// Provider configurations
|
|
2
|
+
export const PROVIDERS = {
|
|
3
|
+
claude: {
|
|
4
|
+
baseUrl: "https://api.anthropic.com/v1/messages",
|
|
5
|
+
format: "claude",
|
|
6
|
+
headers: {
|
|
7
|
+
"Anthropic-Version": "2023-06-01",
|
|
8
|
+
"Anthropic-Beta": "claude-code-20250219,oauth-2025-04-20,interleaved-thinking-2025-05-14,fine-grained-tool-streaming-2025-05-14,context-management-2025-06-27",
|
|
9
|
+
"Anthropic-Dangerous-Direct-Browser-Access": "true",
|
|
10
|
+
"User-Agent": "claude-cli/1.0.83 (external, cli)",
|
|
11
|
+
"X-App": "cli",
|
|
12
|
+
"X-Stainless-Helper-Method": "stream",
|
|
13
|
+
"X-Stainless-Retry-Count": "0",
|
|
14
|
+
"X-Stainless-Runtime-Version": "v24.3.0",
|
|
15
|
+
"X-Stainless-Package-Version": "0.55.1",
|
|
16
|
+
"X-Stainless-Runtime": "node",
|
|
17
|
+
"X-Stainless-Lang": "js",
|
|
18
|
+
"X-Stainless-Arch": "arm64",
|
|
19
|
+
"X-Stainless-Os": "MacOS",
|
|
20
|
+
"X-Stainless-Timeout": "60"
|
|
21
|
+
},
|
|
22
|
+
// Claude OAuth configuration
|
|
23
|
+
clientId: "9d1c250a-e61b-44d9-88ed-5944d1962f5e",
|
|
24
|
+
tokenUrl: "https://console.anthropic.com/v1/oauth/token"
|
|
25
|
+
},
|
|
26
|
+
gemini: {
|
|
27
|
+
baseUrl: "https://generativelanguage.googleapis.com/v1beta/models",
|
|
28
|
+
format: "gemini",
|
|
29
|
+
clientId: "681255809395-oo8ft2oprdrnp9e3aqf6av3hmdib135j.apps.googleusercontent.com",
|
|
30
|
+
clientSecret: "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"
|
|
31
|
+
},
|
|
32
|
+
"gemini-cli": {
|
|
33
|
+
baseUrl: "https://cloudcode-pa.googleapis.com/v1internal",
|
|
34
|
+
format: "gemini-cli",
|
|
35
|
+
clientId: "681255809395-oo8ft2oprdrnp9e3aqf6av3hmdib135j.apps.googleusercontent.com",
|
|
36
|
+
clientSecret: "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"
|
|
37
|
+
},
|
|
38
|
+
codex: {
|
|
39
|
+
baseUrl: "https://chatgpt.com/backend-api/codex/responses",
|
|
40
|
+
format: "codex",
|
|
41
|
+
headers: {
|
|
42
|
+
"Version": "0.21.0",
|
|
43
|
+
"Openai-Beta": "responses=experimental",
|
|
44
|
+
"User-Agent": "codex_cli_rs/0.50.0 (Mac OS 26.0.1; arm64)"
|
|
45
|
+
},
|
|
46
|
+
// OpenAI OAuth configuration
|
|
47
|
+
clientId: "app_EMoamEEZ73f0CkXaXp7hrann",
|
|
48
|
+
clientSecret: "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",
|
|
49
|
+
tokenUrl: "https://auth.openai.com/oauth/token"
|
|
50
|
+
},
|
|
51
|
+
qwen: {
|
|
52
|
+
baseUrl: "https://portal.qwen.ai/v1/chat/completions",
|
|
53
|
+
format: "openai",
|
|
54
|
+
headers: {
|
|
55
|
+
"User-Agent": "google-api-nodejs-client/9.15.1",
|
|
56
|
+
"X-Goog-Api-Client": "gl-node/22.17.0"
|
|
57
|
+
},
|
|
58
|
+
// Qwen OAuth configuration
|
|
59
|
+
clientId: "f0304373b74a44d2b584a3fb70ca9e56", // From CLIProxyAPI
|
|
60
|
+
tokenUrl: "https://chat.qwen.ai/api/v1/oauth2/token",
|
|
61
|
+
authUrl: "https://chat.qwen.ai/api/v1/oauth2/device/code"
|
|
62
|
+
},
|
|
63
|
+
iflow: {
|
|
64
|
+
baseUrl: "https://apis.iflow.cn/v1/chat/completions",
|
|
65
|
+
format: "openai",
|
|
66
|
+
headers: {
|
|
67
|
+
"User-Agent": "iFlow-Cli"
|
|
68
|
+
},
|
|
69
|
+
// iFlow OAuth configuration (from CLIProxyAPI)
|
|
70
|
+
clientId: "10009311001",
|
|
71
|
+
clientSecret: "4Z3YjXycVsQvyGF1etiNlIBB4RsqSDtW",
|
|
72
|
+
tokenUrl: "https://iflow.cn/oauth/token",
|
|
73
|
+
authUrl: "https://iflow.cn/oauth"
|
|
74
|
+
},
|
|
75
|
+
antigravity: {
|
|
76
|
+
baseUrls: [
|
|
77
|
+
"https://daily-cloudcode-pa.sandbox.googleapis.com",
|
|
78
|
+
"https://cloudcode-pa.googleapis.com"
|
|
79
|
+
],
|
|
80
|
+
format: "antigravity",
|
|
81
|
+
headers: {
|
|
82
|
+
"User-Agent": "antigravity/1.11.5 windows/amd64"
|
|
83
|
+
},
|
|
84
|
+
clientId: "1071006060591-tmhssin2h21lcre235vtolojh4g403ep.apps.googleusercontent.com",
|
|
85
|
+
clientSecret: "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"
|
|
86
|
+
},
|
|
87
|
+
openrouter: {
|
|
88
|
+
baseUrl: "https://openrouter.ai/api/v1/chat/completions",
|
|
89
|
+
format: "openai",
|
|
90
|
+
headers: {
|
|
91
|
+
"HTTP-Referer": "https://endpoint-proxy.local",
|
|
92
|
+
"X-Title": "Endpoint Proxy"
|
|
93
|
+
}
|
|
94
|
+
},
|
|
95
|
+
openai: {
|
|
96
|
+
baseUrl: "https://api.openai.com/v1/chat/completions",
|
|
97
|
+
format: "openai"
|
|
98
|
+
},
|
|
99
|
+
glm: {
|
|
100
|
+
baseUrl: "https://api.z.ai/api/anthropic/v1/messages",
|
|
101
|
+
format: "claude",
|
|
102
|
+
headers: {
|
|
103
|
+
"Anthropic-Version": "2023-06-01",
|
|
104
|
+
"Anthropic-Beta": "claude-code-20250219,interleaved-thinking-2025-05-14"
|
|
105
|
+
}
|
|
106
|
+
},
|
|
107
|
+
kimi: {
|
|
108
|
+
baseUrl: "https://api.kimi.com/coding/v1/messages",
|
|
109
|
+
format: "claude",
|
|
110
|
+
headers: {
|
|
111
|
+
"Anthropic-Version": "2023-06-01",
|
|
112
|
+
"Anthropic-Beta": "claude-code-20250219,interleaved-thinking-2025-05-14"
|
|
113
|
+
}
|
|
114
|
+
},
|
|
115
|
+
minimax: {
|
|
116
|
+
baseUrl: "https://api.minimax.io/anthropic/v1/messages",
|
|
117
|
+
format: "claude",
|
|
118
|
+
headers: {
|
|
119
|
+
"Anthropic-Version": "2023-06-01",
|
|
120
|
+
"Anthropic-Beta": "claude-code-20250219,interleaved-thinking-2025-05-14"
|
|
121
|
+
}
|
|
122
|
+
},
|
|
123
|
+
github: {
|
|
124
|
+
baseUrl: "https://api.githubcopilot.com/chat/completions", // GitHub Copilot API endpoint for chat
|
|
125
|
+
format: "openai", // GitHub Copilot uses OpenAI-compatible format
|
|
126
|
+
headers: {
|
|
127
|
+
"copilot-integration-id": "vscode-chat",
|
|
128
|
+
"editor-version": "vscode/1.107.1",
|
|
129
|
+
"editor-plugin-version": "copilot-chat/0.26.7",
|
|
130
|
+
"user-agent": "GitHubCopilotChat/0.26.7",
|
|
131
|
+
"openai-intent": "conversation-panel",
|
|
132
|
+
"x-github-api-version": "2025-04-01",
|
|
133
|
+
"x-vscode-user-agent-library-version": "electron-fetch",
|
|
134
|
+
"X-Initiator": "user",
|
|
135
|
+
"Accept": "application/json",
|
|
136
|
+
"Content-Type": "application/json"
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
};
|
|
140
|
+
|
|
141
|
+
// Claude system prompt
|
|
142
|
+
export const CLAUDE_SYSTEM_PROMPT = "You are Claude Code, Anthropic's official CLI for Claude.";
|
|
143
|
+
|
|
144
|
+
// OAuth endpoints
|
|
145
|
+
export const OAUTH_ENDPOINTS = {
|
|
146
|
+
google: {
|
|
147
|
+
token: "https://oauth2.googleapis.com/token",
|
|
148
|
+
auth: "https://accounts.google.com/o/oauth2/auth"
|
|
149
|
+
},
|
|
150
|
+
openai: {
|
|
151
|
+
token: "https://auth.openai.com/oauth/token",
|
|
152
|
+
auth: "https://auth.openai.com/oauth/authorize"
|
|
153
|
+
},
|
|
154
|
+
anthropic: {
|
|
155
|
+
token: "https://console.anthropic.com/v1/oauth/token",
|
|
156
|
+
auth: "https://console.anthropic.com/v1/oauth/authorize"
|
|
157
|
+
},
|
|
158
|
+
qwen: {
|
|
159
|
+
token: "https://chat.qwen.ai/api/v1/oauth2/token", // From CLIProxyAPI
|
|
160
|
+
auth: "https://chat.qwen.ai/api/v1/oauth2/device/code" // From CLIProxyAPI
|
|
161
|
+
},
|
|
162
|
+
iflow: {
|
|
163
|
+
token: "https://iflow.cn/oauth/token",
|
|
164
|
+
auth: "https://iflow.cn/oauth"
|
|
165
|
+
},
|
|
166
|
+
github: {
|
|
167
|
+
token: "https://github.com/login/oauth/access_token",
|
|
168
|
+
auth: "https://github.com/login/oauth/authorize",
|
|
169
|
+
deviceCode: "https://github.com/login/device/code"
|
|
170
|
+
}
|
|
171
|
+
};
|
|
172
|
+
|
|
173
|
+
// Cache TTLs (seconds)
|
|
174
|
+
export const CACHE_TTL = {
|
|
175
|
+
userInfo: 300, // 5 minutes
|
|
176
|
+
modelAlias: 3600 // 1 hour
|
|
177
|
+
};
|
|
178
|
+
|
|
179
|
+
// Default max tokens
|
|
180
|
+
export const DEFAULT_MAX_TOKENS = 64000;
|
|
181
|
+
|
|
182
|
+
// Exponential backoff config for rate limits (like CLIProxyAPI)
|
|
183
|
+
export const BACKOFF_CONFIG = {
|
|
184
|
+
base: 1000, // 1 second base
|
|
185
|
+
max: 30 * 60 * 1000, // 30 minutes max
|
|
186
|
+
maxLevel: 15 // Cap backoff level
|
|
187
|
+
};
|
|
188
|
+
|
|
189
|
+
// Error-based cooldown times (aligned with CLIProxyAPI)
|
|
190
|
+
export const COOLDOWN_MS = {
|
|
191
|
+
unauthorized: 30 * 60 * 1000, // 401 → 30 min
|
|
192
|
+
paymentRequired: 30 * 60 * 1000, // 402/403 → 30 min
|
|
193
|
+
notFound: 12 * 60 * 60 * 1000, // 404 → 12 hours
|
|
194
|
+
transient: 30 * 1000, // 408/500/502/503/504 → 1 min
|
|
195
|
+
requestNotAllowed: 5 * 1000, // "Request not allowed" → 5 sec
|
|
196
|
+
// Legacy aliases for backward compatibility
|
|
197
|
+
rateLimit: 15 * 60 * 1000,
|
|
198
|
+
serviceUnavailable: 60 * 1000,
|
|
199
|
+
authExpired: 30 * 60 * 1000
|
|
200
|
+
};
|
|
201
|
+
|
|
202
|
+
// Skip patterns - requests containing these texts will bypass provider
|
|
203
|
+
export const SKIP_PATTERNS = [
|
|
204
|
+
"Please write a 5-10 word title for the following conversation:"
|
|
205
|
+
];
|
|
206
|
+
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
// Default signature for thinking mode when no signature from thinkingStore
|
|
2
|
+
export const DEFAULT_THINKING_CLAUDE_SIGNATURE = "EpwGCkYIChgCKkCzVUuRrg7CcglSUWEef4rH6o35g9UYS8ZPe0/VomQTBsFx6sttYNj5l8GqgW6ejuHyYqpFToxIbZl0bw17l5dJEgzCnqDO0Z8fRlMrNgsaDLS1cnCjC53KBqE0CCIwAADQdo1eO+7qPAmo8J4WR3JPmr92S97kmvr5K1iPMiOpkZNj8mEXW8uzBoOJs/9ZKoMFiqHJ3UObwaJDqFOW70E9oCwDoc6jesaWVAEdN5vWfKMpIkjFJjECdjIdkxyJNJ8Ib8yXVal3qwE7uThoPRqSZDdHB5mmwPEjWE/90cSYCbtX2YsJki1265CabBb8/QEkODXg4kgRrL+c8e8rRXz/dr1RswvaPuzEdGKHRNi9UooNUeOK4/ebx1KkP9YZttyohN9GWqlts36kOoW0Cfie/ABDgF9g534BPth/sstxDM6d79QlRmh6NxizyTF74DXJI34u0M4tTRchqE5pAq85SgdJaa+dix1yJPMji8m6nZkwJbscJb9rdc2MKyKWjz8QL2+rTSSuZ2F1k1qSsW0xNcI7qLcI12Vncfn/VqY6YOIZy/saZBR0ezXvN6g+UYbuIdyVg7AyIFZt3nbrO7/kmOEb2VKzygwklHGEIJHfFgMpH3JSrAzbZIowVHOF7VaJ+KXRFDCFin7hHTOiOsdg+1ij1mML9Z/x/9CP4b7OUcaQm1llDZPSHc6rZMNL3DdB+fW5YfmNgKU35S+7AMtA10nVILzDAk1UV4T2K9Do09JlI6rjOs9UuULlIN2Z0eE8YTlANR6uQcw7lMcdfqYE8tke4rDKc2dDiaS5vVe45VewICNpdXGN11yw8QqH7p27CR1HtN30e0tHXOR3bIwWk/Yb6O5fTaKG6Ri8e5ZCPvdD9HqepVi188nM0iTjJqL58F3ni04ECIhcbyaQWnuTes1Kw4CMwiZDLQkk8Hgz7HkUOf1btQTF/0nhD7ry0n0hAEg2PaDM3V6TjOjf4hEldRmeqERcQF1PfgKb6ZM12rlIIfUqKACczWJSzTV158+47HX36o0cgux6nFlv/DE+sEiRVxgB";
|
|
3
|
+
|
|
4
|
+
export const DEFAULT_THINKING_GEMINI_SIGNATURE = "EuwGCukGAXLI2nxwZIq54WWSoL/YN0P3TsDZ7zRnLi8g0S4aVr2HUGxvaHKySuY6HAVzcE0GPGjXrytLIldxthSvfxgUlJh6Qa9Z+Oj5QZBlYdg6HaJ6yuY5R7waE6rdwBsRf7Ft2j3DJ9rMi9qhWFqApewYtPhls3VHtuvND3l8Rm09+lbAXQs6KKWEWrxNLKTBkfpMgXhRERc/TQRMZu1twAablm6/Zk1tsYRvfWKLsNbeKF+CCojJdXJKvnR/8Ouuoa+Y2Ti20hcW7aZIIjZDFYPU//k6Ybmhg69J/imbFai2ckhfLaisqdDkdoIiBJScTOUvYqP6AE9d4MsydSC+UlhIMk4hoP76R8vUSCZRMkjOaDXstf/QoVZKbt94wyRZgAJ1G0BqI8L5ow86kLpA4wJEtxsRGymOE4bKUvApveBakYDNM9APkf+LbtbzWSseGjoZcSlycF9iN8Q2XNYKRrHbv3Lr5Y8JjdH/5y/6SHkNehTEZugaeGnSPSyCTWto1kQgHpxdWmhkLfJGNUGLmue7Mesj4TSms4J33mRpYVhNB/J333FCqIP0hr/E7BkkjEn7yZ4X7SQlh+xKPurapsnHRwiKmtsilmEFrnTE9iQr+pMr6M29qqFNv1tr5yumbaJw8JW9sB15tNsRv+dW6BjNanbsKz7HCgKUBc8tGy+7YuhXzAfViyRefcjK7eZW0Fbyt7AbybJTKz78W8NH7ye6LAwzOebXpeZ4D43fNIt8bKh26qgduSQv/7o+pAflkuqHZ99YWgHQ8h8OkZFi3eOiSYjsjhdZ/czWOdoPI/OnqIldzMPF5YlrKBLFX8VhRKVmqgsmWf5PHGulHhMkVlS+XG2UIseGy69ARa93D78Gsa+1n1kJr7EEB7Rh+27vUMxVYLdz1yMSvE5nalTAlg/ZeG8+XQ0cHuAI3KbQpHW2Q++RdXfm5JzD5WdJZUU+Zn8t8UUn85BH4RxZLeE0qJikgSsKoYVBc6YhiMjhPgkR95ReimY4Z0xCJdRo1gjexOFeODZMpQF6Yxnoic7IrdgsFA3iePTbFnPp3IAM1fAThWhXJUn3QInUOTd5o1qmTmn6REbL15g/JQNl+dqUoPkhleeb2V3kjqp1okmO3wMZbPknR3S1LZNmlS72/iBQUm+n2b/RCn4PjmM2";
|
|
5
|
+
|
|
6
|
+
export const DEFAULT_THINKING_TEXT = "...";
|
|
7
|
+
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
export const ollamaModels = {
|
|
2
|
+
models: [
|
|
3
|
+
{
|
|
4
|
+
name: "llama3.2",
|
|
5
|
+
modified_at: "2025-12-26T00:00:00Z",
|
|
6
|
+
size: 2000000000,
|
|
7
|
+
digest: "abc123def456",
|
|
8
|
+
details: { format: "gguf", family: "llama", parameter_size: "3B", quantization_level: "Q4_K_M" }
|
|
9
|
+
},
|
|
10
|
+
{
|
|
11
|
+
name: "qwen2.5",
|
|
12
|
+
modified_at: "2025-12-26T00:00:00Z",
|
|
13
|
+
size: 4000000000,
|
|
14
|
+
digest: "def456abc123",
|
|
15
|
+
details: { format: "gguf", family: "qwen", parameter_size: "7B", quantization_level: "Q4_K_M" }
|
|
16
|
+
}
|
|
17
|
+
]
|
|
18
|
+
};
|
|
19
|
+
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
// Provider models - Single source of truth
|
|
2
|
+
// Key = alias (cc, cx, gc, qw, if, ag, gh for OAuth; id for API Key)
|
|
3
|
+
// Field "provider" for special cases (e.g. AntiGravity models that call different backends)
|
|
4
|
+
|
|
5
|
+
export const PROVIDER_MODELS = {
|
|
6
|
+
// OAuth Providers (using alias)
|
|
7
|
+
cc: [ // Claude Code
|
|
8
|
+
{ id: "claude-opus-4-5-20251101", name: "Claude 4.5 Opus" },
|
|
9
|
+
{ id: "claude-sonnet-4-5-20250929", name: "Claude 4.5 Sonnet" },
|
|
10
|
+
{ id: "claude-haiku-4-5-20251001", name: "Claude 4.5 Haiku" },
|
|
11
|
+
],
|
|
12
|
+
cx: [ // OpenAI Codex
|
|
13
|
+
{ id: "gpt-5.2-codex", name: "GPT 5.2 Codex" },
|
|
14
|
+
{ id: "gpt-5.2", name: "GPT 5.2" },
|
|
15
|
+
{ id: "gpt-5.1-codex-max", name: "GPT 5.1 Codex Max" },
|
|
16
|
+
{ id: "gpt-5.1-codex", name: "GPT 5.1 Codex" },
|
|
17
|
+
{ id: "gpt-5.1-codex-mini", name: "GPT 5.1 Codex Mini" },
|
|
18
|
+
{ id: "gpt-5.1", name: "GPT 5.1" },
|
|
19
|
+
{ id: "gpt-5-codex", name: "GPT 5 Codex" },
|
|
20
|
+
{ id: "gpt-5-codex-mini", name: "GPT 5 Codex Mini" },
|
|
21
|
+
],
|
|
22
|
+
gc: [ // Gemini CLI
|
|
23
|
+
{ id: "gemini-3-flash-preview", name: "Gemini 3 Flash Preview" },
|
|
24
|
+
{ id: "gemini-3-pro-preview", name: "Gemini 3 Pro Preview" },
|
|
25
|
+
{ id: "gemini-2.5-pro", name: "Gemini 2.5 Pro" },
|
|
26
|
+
{ id: "gemini-2.5-flash", name: "Gemini 2.5 Flash" },
|
|
27
|
+
{ id: "gemini-2.5-flash-lite", name: "Gemini 2.5 Flash Lite" },
|
|
28
|
+
],
|
|
29
|
+
qw: [ // Qwen Code
|
|
30
|
+
{ id: "qwen3-coder-plus", name: "Qwen3 Coder Plus" },
|
|
31
|
+
{ id: "qwen3-coder-flash", name: "Qwen3 Coder Flash" },
|
|
32
|
+
{ id: "vision-model", name: "Qwen3 Vision Model" },
|
|
33
|
+
],
|
|
34
|
+
if: [ // iFlow AI
|
|
35
|
+
{ id: "qwen3-coder-plus", name: "Qwen3 Coder Plus" },
|
|
36
|
+
{ id: "kimi-k2", name: "Kimi K2" },
|
|
37
|
+
{ id: "kimi-k2-thinking", name: "Kimi K2 Thinking" },
|
|
38
|
+
{ id: "deepseek-r1", name: "DeepSeek R1" },
|
|
39
|
+
{ id: "deepseek-v3.2-chat", name: "DeepSeek V3.2 Chat" },
|
|
40
|
+
{ id: "deepseek-v3.2-reasoner", name: "DeepSeek V3.2 Reasoner" },
|
|
41
|
+
{ id: "minimax-m2", name: "MiniMax M2" },
|
|
42
|
+
{ id: "glm-4.6", name: "GLM 4.6" },
|
|
43
|
+
{ id: "glm-4.7", name: "GLM 4.7" },
|
|
44
|
+
],
|
|
45
|
+
ag: [ // Antigravity - special case: models call different backends
|
|
46
|
+
{ id: "gemini-3-pro-low", name: "Gemini 3 Pro Low" },
|
|
47
|
+
{ id: "gemini-3-pro-high", name: "Gemini 3 Pro High" },
|
|
48
|
+
{ id: "gemini-3-flash", name: "Gemini 3 Flash" },
|
|
49
|
+
{ id: "gemini-2.5-flash", name: "Gemini 2.5 Flash" },
|
|
50
|
+
{ id: "claude-sonnet-4-5", name: "Claude Sonnet 4.5 " },
|
|
51
|
+
{ id: "claude-sonnet-4-5-thinking", name: "Claude Sonnet 4.5 Thinking" },
|
|
52
|
+
{ id: "claude-opus-4-5-thinking", name: "Claude Opus 4.5" },
|
|
53
|
+
],
|
|
54
|
+
gh: [ // GitHub Copilot
|
|
55
|
+
{ id: "gpt-5", name: "GPT-5" },
|
|
56
|
+
{ id: "gpt-5-mini", name: "GPT-5 Mini" },
|
|
57
|
+
// { id: "gpt-5.1", name: "GPT-5.1" },
|
|
58
|
+
// { id: "gpt-5.2", name: "GPT-5.2" },
|
|
59
|
+
// { id: "gpt-5-codex", name: "GPT-5 Codex" },
|
|
60
|
+
{ id: "gpt-5.1-codex", name: "GPT-5.1 Codex" },
|
|
61
|
+
// { id: "gpt-5.1-codex-mini", name: "GPT-5.1 Codex Mini" },
|
|
62
|
+
{ id: "gpt-5.1-codex-max", name: "GPT-5.1 Codex Max" },
|
|
63
|
+
{ id: "gpt-4.1", name: "GPT-4.1" },
|
|
64
|
+
{ id: "claude-4.5-sonnet", name: "Claude 4.5 Sonnet" },
|
|
65
|
+
{ id: "claude-4.5-opus", name: "Claude 4.5 Opus" },
|
|
66
|
+
{ id: "claude-4.5-haiku", name: "Claude 4.5 Haiku" },
|
|
67
|
+
{ id: "gemini-3-pro", name: "Gemini 3 Pro" },
|
|
68
|
+
{ id: "gemini-3-flash", name: "Gemini 3 Flash" },
|
|
69
|
+
{ id: "gemini-2.5-pro", name: "Gemini 2.5 Pro" },
|
|
70
|
+
{ id: "grok-code-fast-1", name: "Grok Code Fast 1" },
|
|
71
|
+
],
|
|
72
|
+
|
|
73
|
+
// API Key Providers (alias = id)
|
|
74
|
+
openai: [
|
|
75
|
+
{ id: "gpt-4o", name: "GPT-4o" },
|
|
76
|
+
{ id: "gpt-4o-mini", name: "GPT-4o Mini" },
|
|
77
|
+
{ id: "gpt-4-turbo", name: "GPT-4 Turbo" },
|
|
78
|
+
{ id: "o1", name: "O1" },
|
|
79
|
+
{ id: "o1-mini", name: "O1 Mini" },
|
|
80
|
+
],
|
|
81
|
+
anthropic: [
|
|
82
|
+
{ id: "claude-sonnet-4-20250514", name: "Claude Sonnet 4" },
|
|
83
|
+
{ id: "claude-opus-4-20250514", name: "Claude Opus 4" },
|
|
84
|
+
{ id: "claude-3-5-sonnet-20241022", name: "Claude 3.5 Sonnet" },
|
|
85
|
+
],
|
|
86
|
+
gemini: [
|
|
87
|
+
{ id: "gemini-3-pro-preview", name: "Gemini 3 Pro Preview" },
|
|
88
|
+
{ id: "gemini-2.5-pro", name: "Gemini 2.5 Pro" },
|
|
89
|
+
{ id: "gemini-2.5-flash", name: "Gemini 2.5 Flash" },
|
|
90
|
+
{ id: "gemini-2.5-flash-lite", name: "Gemini 2.5 Flash Lite" },
|
|
91
|
+
],
|
|
92
|
+
openrouter: [
|
|
93
|
+
{ id: "auto", name: "Auto (Best Available)" },
|
|
94
|
+
],
|
|
95
|
+
glm: [
|
|
96
|
+
{ id: "glm-4.7", name: "GLM 4.7" },
|
|
97
|
+
{ id: "glm-4.6", name: "GLM 4.6" },
|
|
98
|
+
{ id: "glm-4.6v", name: "GLM 4.6V (Vision)" },
|
|
99
|
+
],
|
|
100
|
+
kimi: [
|
|
101
|
+
{ id: "kimi-latest", name: "Kimi Latest" },
|
|
102
|
+
],
|
|
103
|
+
minimax: [
|
|
104
|
+
{ id: "MiniMax-M2.1", name: "MiniMax M2.1" },
|
|
105
|
+
],
|
|
106
|
+
};
|
|
107
|
+
|
|
108
|
+
// Helper functions
|
|
109
|
+
export function getProviderModels(aliasOrId) {
|
|
110
|
+
return PROVIDER_MODELS[aliasOrId] || [];
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
export function getDefaultModel(aliasOrId) {
|
|
114
|
+
const models = PROVIDER_MODELS[aliasOrId];
|
|
115
|
+
return models?.[0]?.id || null;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
export function isValidModel(aliasOrId, modelId, passthroughProviders = new Set()) {
|
|
119
|
+
if (passthroughProviders.has(aliasOrId)) return true;
|
|
120
|
+
const models = PROVIDER_MODELS[aliasOrId];
|
|
121
|
+
if (!models) return false;
|
|
122
|
+
return models.some(m => m.id === modelId);
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
export function findModelName(aliasOrId, modelId) {
|
|
126
|
+
const models = PROVIDER_MODELS[aliasOrId];
|
|
127
|
+
if (!models) return modelId;
|
|
128
|
+
const found = models.find(m => m.id === modelId);
|
|
129
|
+
return found?.name || modelId;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
export function getModelTargetFormat(aliasOrId, modelId) {
|
|
133
|
+
const models = PROVIDER_MODELS[aliasOrId];
|
|
134
|
+
if (!models) return null;
|
|
135
|
+
const found = models.find(m => m.id === modelId);
|
|
136
|
+
return found?.targetFormat || null;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// Provider ID to alias mapping
|
|
140
|
+
export const PROVIDER_ID_TO_ALIAS = {
|
|
141
|
+
claude: "cc",
|
|
142
|
+
codex: "cx",
|
|
143
|
+
"gemini-cli": "gc",
|
|
144
|
+
qwen: "qw",
|
|
145
|
+
iflow: "if",
|
|
146
|
+
antigravity: "ag",
|
|
147
|
+
github: "gh",
|
|
148
|
+
openai: "openai",
|
|
149
|
+
anthropic: "anthropic",
|
|
150
|
+
gemini: "gemini",
|
|
151
|
+
openrouter: "openrouter",
|
|
152
|
+
glm: "glm",
|
|
153
|
+
kimi: "kimi",
|
|
154
|
+
minimax: "minimax",
|
|
155
|
+
};
|
|
156
|
+
|
|
157
|
+
export function getModelsByProviderId(providerId) {
|
|
158
|
+
const alias = PROVIDER_ID_TO_ALIAS[providerId] || providerId;
|
|
159
|
+
return PROVIDER_MODELS[alias] || [];
|
|
160
|
+
}
|
|
161
|
+
|