hedgequantx 2.4.43 → 2.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/package.json +1 -1
- package/src/app.js +44 -6
- package/src/menus/ai-agent.js +406 -0
- package/src/menus/dashboard.js +17 -5
- package/src/services/ai/index.js +371 -0
- package/src/services/ai/providers/index.js +490 -0
|
@@ -0,0 +1,371 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AI Service Manager
|
|
3
|
+
* Manages AI provider connections and settings
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const { getProviders, getProvider } = require('./providers');
|
|
7
|
+
const { settings } = require('../../config');
|
|
8
|
+
|
|
9
|
+
// In-memory cache of current connection
|
|
10
|
+
let currentConnection = null;
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Get AI settings from storage
|
|
14
|
+
*/
|
|
15
|
+
const getAISettings = () => {
|
|
16
|
+
try {
|
|
17
|
+
return settings.get('ai') || {};
|
|
18
|
+
} catch {
|
|
19
|
+
return {};
|
|
20
|
+
}
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Save AI settings to storage
|
|
25
|
+
*/
|
|
26
|
+
const saveAISettings = (aiSettings) => {
|
|
27
|
+
try {
|
|
28
|
+
settings.set('ai', aiSettings);
|
|
29
|
+
} catch (e) {
|
|
30
|
+
// Silent fail
|
|
31
|
+
}
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Check if AI is connected
|
|
36
|
+
*/
|
|
37
|
+
const isConnected = () => {
|
|
38
|
+
const aiSettings = getAISettings();
|
|
39
|
+
return !!(aiSettings.provider && aiSettings.credentials);
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Get current connection info
|
|
44
|
+
*/
|
|
45
|
+
const getConnection = () => {
|
|
46
|
+
const aiSettings = getAISettings();
|
|
47
|
+
if (!aiSettings.provider) return null;
|
|
48
|
+
|
|
49
|
+
const provider = getProvider(aiSettings.provider);
|
|
50
|
+
if (!provider) return null;
|
|
51
|
+
|
|
52
|
+
return {
|
|
53
|
+
provider: provider,
|
|
54
|
+
option: aiSettings.option,
|
|
55
|
+
model: aiSettings.model || provider.defaultModel,
|
|
56
|
+
connected: true
|
|
57
|
+
};
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Connect to a provider
|
|
62
|
+
*/
|
|
63
|
+
const connect = async (providerId, optionId, credentials, model = null) => {
|
|
64
|
+
const provider = getProvider(providerId);
|
|
65
|
+
if (!provider) {
|
|
66
|
+
throw new Error('Invalid provider');
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
const option = provider.options.find(o => o.id === optionId);
|
|
70
|
+
if (!option) {
|
|
71
|
+
throw new Error('Invalid option');
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// Save to settings
|
|
75
|
+
const aiSettings = {
|
|
76
|
+
provider: providerId,
|
|
77
|
+
option: optionId,
|
|
78
|
+
credentials: credentials,
|
|
79
|
+
model: model || provider.defaultModel
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
saveAISettings(aiSettings);
|
|
83
|
+
currentConnection = getConnection();
|
|
84
|
+
|
|
85
|
+
return currentConnection;
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Disconnect from AI
|
|
90
|
+
*/
|
|
91
|
+
const disconnect = () => {
|
|
92
|
+
saveAISettings({});
|
|
93
|
+
currentConnection = null;
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Get credentials (for API calls)
|
|
98
|
+
*/
|
|
99
|
+
const getCredentials = () => {
|
|
100
|
+
const aiSettings = getAISettings();
|
|
101
|
+
return aiSettings.credentials || null;
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Validate API key with provider
|
|
106
|
+
*/
|
|
107
|
+
const validateConnection = async (providerId, optionId, credentials) => {
|
|
108
|
+
const provider = getProvider(providerId);
|
|
109
|
+
if (!provider) return { valid: false, error: 'Invalid provider' };
|
|
110
|
+
|
|
111
|
+
try {
|
|
112
|
+
switch (providerId) {
|
|
113
|
+
case 'anthropic':
|
|
114
|
+
return await validateAnthropic(credentials);
|
|
115
|
+
case 'openai':
|
|
116
|
+
return await validateOpenAI(credentials);
|
|
117
|
+
case 'gemini':
|
|
118
|
+
return await validateGemini(credentials);
|
|
119
|
+
case 'deepseek':
|
|
120
|
+
return await validateDeepSeek(credentials);
|
|
121
|
+
case 'groq':
|
|
122
|
+
return await validateGroq(credentials);
|
|
123
|
+
case 'ollama':
|
|
124
|
+
return await validateOllama(credentials);
|
|
125
|
+
case 'lmstudio':
|
|
126
|
+
return await validateLMStudio(credentials);
|
|
127
|
+
case 'custom':
|
|
128
|
+
return await validateCustom(credentials);
|
|
129
|
+
// OpenAI-compatible providers (use same validation)
|
|
130
|
+
case 'openrouter':
|
|
131
|
+
return await validateOpenRouter(credentials);
|
|
132
|
+
case 'xai':
|
|
133
|
+
case 'mistral':
|
|
134
|
+
case 'perplexity':
|
|
135
|
+
case 'together':
|
|
136
|
+
case 'qwen':
|
|
137
|
+
case 'moonshot':
|
|
138
|
+
case 'yi':
|
|
139
|
+
case 'zhipu':
|
|
140
|
+
case 'baichuan':
|
|
141
|
+
return await validateOpenAICompatible(provider, credentials);
|
|
142
|
+
default:
|
|
143
|
+
return { valid: false, error: 'Unknown provider' };
|
|
144
|
+
}
|
|
145
|
+
} catch (error) {
|
|
146
|
+
return { valid: false, error: error.message };
|
|
147
|
+
}
|
|
148
|
+
};
|
|
149
|
+
|
|
150
|
+
// Validation functions for each provider
|
|
151
|
+
const validateAnthropic = async (credentials) => {
|
|
152
|
+
try {
|
|
153
|
+
const response = await fetch('https://api.anthropic.com/v1/messages', {
|
|
154
|
+
method: 'POST',
|
|
155
|
+
headers: {
|
|
156
|
+
'Content-Type': 'application/json',
|
|
157
|
+
'x-api-key': credentials.apiKey || credentials.sessionKey,
|
|
158
|
+
'anthropic-version': '2023-06-01'
|
|
159
|
+
},
|
|
160
|
+
body: JSON.stringify({
|
|
161
|
+
model: 'claude-sonnet-4-5-20250929',
|
|
162
|
+
max_tokens: 10,
|
|
163
|
+
messages: [{ role: 'user', content: 'Hi' }]
|
|
164
|
+
})
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
if (response.ok) {
|
|
168
|
+
return { valid: true };
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
const error = await response.json();
|
|
172
|
+
return { valid: false, error: error.error?.message || 'Invalid API key' };
|
|
173
|
+
} catch (e) {
|
|
174
|
+
return { valid: false, error: e.message };
|
|
175
|
+
}
|
|
176
|
+
};
|
|
177
|
+
|
|
178
|
+
const validateOpenAI = async (credentials) => {
|
|
179
|
+
try {
|
|
180
|
+
const response = await fetch('https://api.openai.com/v1/models', {
|
|
181
|
+
headers: {
|
|
182
|
+
'Authorization': `Bearer ${credentials.apiKey || credentials.accessToken}`
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
if (response.ok) {
|
|
187
|
+
return { valid: true };
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
return { valid: false, error: 'Invalid API key' };
|
|
191
|
+
} catch (e) {
|
|
192
|
+
return { valid: false, error: e.message };
|
|
193
|
+
}
|
|
194
|
+
};
|
|
195
|
+
|
|
196
|
+
const validateGemini = async (credentials) => {
|
|
197
|
+
try {
|
|
198
|
+
const response = await fetch(
|
|
199
|
+
`https://generativelanguage.googleapis.com/v1/models?key=${credentials.apiKey}`
|
|
200
|
+
);
|
|
201
|
+
|
|
202
|
+
if (response.ok) {
|
|
203
|
+
return { valid: true };
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
return { valid: false, error: 'Invalid API key' };
|
|
207
|
+
} catch (e) {
|
|
208
|
+
return { valid: false, error: e.message };
|
|
209
|
+
}
|
|
210
|
+
};
|
|
211
|
+
|
|
212
|
+
const validateDeepSeek = async (credentials) => {
|
|
213
|
+
try {
|
|
214
|
+
const response = await fetch('https://api.deepseek.com/v1/models', {
|
|
215
|
+
headers: {
|
|
216
|
+
'Authorization': `Bearer ${credentials.apiKey}`
|
|
217
|
+
}
|
|
218
|
+
});
|
|
219
|
+
|
|
220
|
+
if (response.ok) {
|
|
221
|
+
return { valid: true };
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
return { valid: false, error: 'Invalid API key' };
|
|
225
|
+
} catch (e) {
|
|
226
|
+
return { valid: false, error: e.message };
|
|
227
|
+
}
|
|
228
|
+
};
|
|
229
|
+
|
|
230
|
+
const validateGroq = async (credentials) => {
|
|
231
|
+
try {
|
|
232
|
+
const response = await fetch('https://api.groq.com/openai/v1/models', {
|
|
233
|
+
headers: {
|
|
234
|
+
'Authorization': `Bearer ${credentials.apiKey}`
|
|
235
|
+
}
|
|
236
|
+
});
|
|
237
|
+
|
|
238
|
+
if (response.ok) {
|
|
239
|
+
return { valid: true };
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
return { valid: false, error: 'Invalid API key' };
|
|
243
|
+
} catch (e) {
|
|
244
|
+
return { valid: false, error: e.message };
|
|
245
|
+
}
|
|
246
|
+
};
|
|
247
|
+
|
|
248
|
+
const validateOllama = async (credentials) => {
|
|
249
|
+
try {
|
|
250
|
+
const endpoint = credentials.endpoint || 'http://localhost:11434';
|
|
251
|
+
const response = await fetch(`${endpoint}/api/tags`);
|
|
252
|
+
|
|
253
|
+
if (response.ok) {
|
|
254
|
+
const data = await response.json();
|
|
255
|
+
return {
|
|
256
|
+
valid: true,
|
|
257
|
+
models: data.models?.map(m => m.name) || []
|
|
258
|
+
};
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
return { valid: false, error: 'Cannot connect to Ollama' };
|
|
262
|
+
} catch (e) {
|
|
263
|
+
return { valid: false, error: 'Ollama not running. Start with: ollama serve' };
|
|
264
|
+
}
|
|
265
|
+
};
|
|
266
|
+
|
|
267
|
+
const validateCustom = async (credentials) => {
|
|
268
|
+
try {
|
|
269
|
+
const response = await fetch(`${credentials.endpoint}/models`, {
|
|
270
|
+
headers: credentials.apiKey ? {
|
|
271
|
+
'Authorization': `Bearer ${credentials.apiKey}`
|
|
272
|
+
} : {}
|
|
273
|
+
});
|
|
274
|
+
|
|
275
|
+
if (response.ok) {
|
|
276
|
+
return { valid: true };
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
return { valid: false, error: 'Cannot connect to endpoint' };
|
|
280
|
+
} catch (e) {
|
|
281
|
+
return { valid: false, error: e.message };
|
|
282
|
+
}
|
|
283
|
+
};
|
|
284
|
+
|
|
285
|
+
const validateOpenRouter = async (credentials) => {
|
|
286
|
+
try {
|
|
287
|
+
const response = await fetch('https://openrouter.ai/api/v1/models', {
|
|
288
|
+
headers: {
|
|
289
|
+
'Authorization': `Bearer ${credentials.apiKey}`
|
|
290
|
+
}
|
|
291
|
+
});
|
|
292
|
+
|
|
293
|
+
if (response.ok) {
|
|
294
|
+
return { valid: true };
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
return { valid: false, error: 'Invalid API key' };
|
|
298
|
+
} catch (e) {
|
|
299
|
+
return { valid: false, error: e.message };
|
|
300
|
+
}
|
|
301
|
+
};
|
|
302
|
+
|
|
303
|
+
const validateLMStudio = async (credentials) => {
|
|
304
|
+
try {
|
|
305
|
+
const endpoint = credentials.endpoint || 'http://localhost:1234/v1';
|
|
306
|
+
const response = await fetch(`${endpoint}/models`);
|
|
307
|
+
|
|
308
|
+
if (response.ok) {
|
|
309
|
+
const data = await response.json();
|
|
310
|
+
return {
|
|
311
|
+
valid: true,
|
|
312
|
+
models: data.data?.map(m => m.id) || []
|
|
313
|
+
};
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
return { valid: false, error: 'Cannot connect to LM Studio' };
|
|
317
|
+
} catch (e) {
|
|
318
|
+
return { valid: false, error: 'LM Studio not running. Start local server first.' };
|
|
319
|
+
}
|
|
320
|
+
};
|
|
321
|
+
|
|
322
|
+
const validateOpenAICompatible = async (provider, credentials) => {
|
|
323
|
+
try {
|
|
324
|
+
const endpoint = provider.endpoint;
|
|
325
|
+
const response = await fetch(`${endpoint}/models`, {
|
|
326
|
+
headers: {
|
|
327
|
+
'Authorization': `Bearer ${credentials.apiKey}`,
|
|
328
|
+
'Content-Type': 'application/json'
|
|
329
|
+
}
|
|
330
|
+
});
|
|
331
|
+
|
|
332
|
+
if (response.ok) {
|
|
333
|
+
return { valid: true };
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
// Some providers don't have /models endpoint, try a simple chat
|
|
337
|
+
const chatResponse = await fetch(`${endpoint}/chat/completions`, {
|
|
338
|
+
method: 'POST',
|
|
339
|
+
headers: {
|
|
340
|
+
'Authorization': `Bearer ${credentials.apiKey}`,
|
|
341
|
+
'Content-Type': 'application/json'
|
|
342
|
+
},
|
|
343
|
+
body: JSON.stringify({
|
|
344
|
+
model: provider.defaultModel,
|
|
345
|
+
messages: [{ role: 'user', content: 'hi' }],
|
|
346
|
+
max_tokens: 5
|
|
347
|
+
})
|
|
348
|
+
});
|
|
349
|
+
|
|
350
|
+
if (chatResponse.ok) {
|
|
351
|
+
return { valid: true };
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
return { valid: false, error: 'Invalid API key or endpoint' };
|
|
355
|
+
} catch (e) {
|
|
356
|
+
return { valid: false, error: e.message };
|
|
357
|
+
}
|
|
358
|
+
};
|
|
359
|
+
|
|
360
|
+
module.exports = {
|
|
361
|
+
getProviders,
|
|
362
|
+
getProvider,
|
|
363
|
+
isConnected,
|
|
364
|
+
getConnection,
|
|
365
|
+
connect,
|
|
366
|
+
disconnect,
|
|
367
|
+
getCredentials,
|
|
368
|
+
validateConnection,
|
|
369
|
+
getAISettings,
|
|
370
|
+
saveAISettings
|
|
371
|
+
};
|