hedgequantx 2.9.18 → 2.9.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/package.json +1 -1
  2. package/src/app.js +42 -64
  3. package/src/lib/m/hqx-2b.js +7 -0
  4. package/src/lib/m/index.js +138 -0
  5. package/src/lib/m/ultra-scalping.js +7 -0
  6. package/src/menus/connect.js +14 -17
  7. package/src/menus/dashboard.js +58 -76
  8. package/src/pages/accounts.js +38 -49
  9. package/src/pages/algo/copy-trading.js +546 -178
  10. package/src/pages/algo/index.js +18 -75
  11. package/src/pages/algo/one-account.js +322 -57
  12. package/src/pages/algo/ui.js +15 -15
  13. package/src/pages/orders.js +19 -22
  14. package/src/pages/positions.js +19 -22
  15. package/src/pages/stats/index.js +15 -16
  16. package/src/pages/user.js +7 -11
  17. package/src/services/ai-supervision/health.js +35 -47
  18. package/src/services/index.js +1 -9
  19. package/src/services/rithmic/accounts.js +8 -6
  20. package/src/ui/box.js +9 -5
  21. package/src/ui/index.js +5 -18
  22. package/src/ui/menu.js +4 -4
  23. package/src/pages/ai-agents-ui.js +0 -388
  24. package/src/pages/ai-agents.js +0 -494
  25. package/src/pages/ai-models.js +0 -389
  26. package/src/pages/algo/algo-executor.js +0 -307
  27. package/src/pages/algo/copy-executor.js +0 -331
  28. package/src/pages/algo/custom-strategy.js +0 -313
  29. package/src/services/ai-supervision/consensus.js +0 -284
  30. package/src/services/ai-supervision/context.js +0 -275
  31. package/src/services/ai-supervision/directive.js +0 -167
  32. package/src/services/ai-supervision/index.js +0 -309
  33. package/src/services/ai-supervision/parser.js +0 -278
  34. package/src/services/ai-supervision/symbols.js +0 -259
  35. package/src/services/cliproxy/index.js +0 -256
  36. package/src/services/cliproxy/installer.js +0 -111
  37. package/src/services/cliproxy/manager.js +0 -387
  38. package/src/services/llmproxy/index.js +0 -166
  39. package/src/services/llmproxy/manager.js +0 -411
@@ -1,411 +0,0 @@
1
- /**
2
- * LLM Proxy Manager
3
- *
4
- * Manages LiteLLM proxy server installation, configuration and lifecycle.
5
- * Uses Python virtual environment for isolation.
6
- */
7
-
8
- const { spawn, execSync } = require('child_process');
9
- const path = require('path');
10
- const fs = require('fs');
11
- const os = require('os');
12
- const http = require('http');
13
-
14
- // Configuration
15
- const LLMPROXY_DIR = path.join(os.homedir(), '.hqx', 'llmproxy');
16
- const VENV_DIR = path.join(LLMPROXY_DIR, 'venv');
17
- const ENV_FILE = path.join(LLMPROXY_DIR, '.env');
18
- const PID_FILE = path.join(LLMPROXY_DIR, 'llmproxy.pid');
19
- const LOG_FILE = path.join(LLMPROXY_DIR, 'llmproxy.log');
20
- const DEFAULT_PORT = 8318;
21
-
22
- /**
23
- * LLM Proxy Manager Class
24
- */
25
- class LLMProxyManager {
26
- constructor() {
27
- this.port = DEFAULT_PORT;
28
- this.process = null;
29
- }
30
-
31
- /**
32
- * Get Python executable path in venv
33
- */
34
- getPythonPath() {
35
- const isWindows = process.platform === 'win32';
36
- return isWindows
37
- ? path.join(VENV_DIR, 'Scripts', 'python.exe')
38
- : path.join(VENV_DIR, 'bin', 'python');
39
- }
40
-
41
- /**
42
- * Get pip executable path in venv
43
- */
44
- getPipPath() {
45
- const isWindows = process.platform === 'win32';
46
- return isWindows
47
- ? path.join(VENV_DIR, 'Scripts', 'pip.exe')
48
- : path.join(VENV_DIR, 'bin', 'pip');
49
- }
50
-
51
- /**
52
- * Check if LLM Proxy is installed
53
- */
54
- isInstalled() {
55
- try {
56
- const pythonPath = this.getPythonPath();
57
- if (!fs.existsSync(pythonPath)) return false;
58
-
59
- // Check if litellm is installed
60
- execSync(`"${pythonPath}" -c "import litellm"`, { stdio: 'ignore' });
61
- return true;
62
- } catch {
63
- return false;
64
- }
65
- }
66
-
67
- /**
68
- * Install LLM Proxy (Python venv + LiteLLM)
69
- */
70
- async install(onProgress = () => {}) {
71
- try {
72
- // Create directory
73
- if (!fs.existsSync(LLMPROXY_DIR)) {
74
- fs.mkdirSync(LLMPROXY_DIR, { recursive: true });
75
- }
76
-
77
- onProgress('Creating Python virtual environment', 10);
78
-
79
- // Check for Python
80
- let pythonCmd = 'python3';
81
- try {
82
- execSync('python3 --version', { stdio: 'ignore' });
83
- } catch {
84
- try {
85
- execSync('python --version', { stdio: 'ignore' });
86
- pythonCmd = 'python';
87
- } catch {
88
- return { success: false, error: 'Python not found. Install Python 3.8+' };
89
- }
90
- }
91
-
92
- // Create venv
93
- if (!fs.existsSync(VENV_DIR)) {
94
- execSync(`${pythonCmd} -m venv "${VENV_DIR}"`, { stdio: 'ignore' });
95
- }
96
-
97
- onProgress('Installing LiteLLM', 40);
98
-
99
- // Install litellm
100
- const pipPath = this.getPipPath();
101
- execSync(`"${pipPath}" install --upgrade pip`, { stdio: 'ignore' });
102
- execSync(`"${pipPath}" install litellm[proxy]`, { stdio: 'ignore', timeout: 300000 });
103
-
104
- onProgress('Installation complete', 100);
105
-
106
- return { success: true };
107
- } catch (error) {
108
- return { success: false, error: error.message };
109
- }
110
- }
111
-
112
- /**
113
- * Check if LLM Proxy is running
114
- */
115
- async isRunning() {
116
- try {
117
- // Check PID file
118
- if (fs.existsSync(PID_FILE)) {
119
- const pid = parseInt(fs.readFileSync(PID_FILE, 'utf8').trim());
120
- try {
121
- process.kill(pid, 0); // Check if process exists
122
- // Verify it's responding
123
- const health = await this.healthCheck();
124
- if (health.success) {
125
- return { running: true, port: this.port, pid };
126
- }
127
- } catch {
128
- // PID exists but process doesn't - clean up
129
- fs.unlinkSync(PID_FILE);
130
- }
131
- }
132
- return { running: false };
133
- } catch {
134
- return { running: false };
135
- }
136
- }
137
-
138
- /**
139
- * Health check - ping the proxy
140
- */
141
- healthCheck() {
142
- return new Promise((resolve) => {
143
- const req = http.request({
144
- hostname: 'localhost',
145
- port: this.port,
146
- path: '/health',
147
- method: 'GET',
148
- timeout: 5000
149
- }, (res) => {
150
- resolve({ success: res.statusCode === 200 });
151
- });
152
- req.on('error', () => resolve({ success: false }));
153
- req.on('timeout', () => { req.destroy(); resolve({ success: false }); });
154
- req.end();
155
- });
156
- }
157
-
158
- /**
159
- * Load environment variables from .env file
160
- */
161
- loadEnvFile() {
162
- if (!fs.existsSync(ENV_FILE)) return {};
163
- const content = fs.readFileSync(ENV_FILE, 'utf8');
164
- const env = {};
165
- for (const line of content.split('\n')) {
166
- const match = line.match(/^([^=]+)=(.*)$/);
167
- if (match) {
168
- env[match[1].trim()] = match[2].trim();
169
- }
170
- }
171
- return env;
172
- }
173
-
174
- /**
175
- * Save environment variable to .env file
176
- */
177
- saveEnvVar(key, value) {
178
- const env = this.loadEnvFile();
179
- env[key] = value;
180
- const content = Object.entries(env)
181
- .map(([k, v]) => `${k}=${v}`)
182
- .join('\n');
183
- fs.writeFileSync(ENV_FILE, content);
184
- }
185
-
186
- /**
187
- * Start LLM Proxy server
188
- */
189
- async start() {
190
- try {
191
- // Check if already running
192
- const status = await this.isRunning();
193
- if (status.running) {
194
- return { success: true, message: 'Already running' };
195
- }
196
-
197
- if (!this.isInstalled()) {
198
- return { success: false, error: 'LLM Proxy not installed. Run install() first.' };
199
- }
200
-
201
- const pythonPath = this.getPythonPath();
202
- const env = { ...process.env, ...this.loadEnvFile() };
203
-
204
- // Start LiteLLM proxy
205
- const proc = spawn(pythonPath, [
206
- '-m', 'litellm',
207
- '--port', String(this.port),
208
- '--host', '0.0.0.0'
209
- ], {
210
- cwd: LLMPROXY_DIR,
211
- env,
212
- detached: true,
213
- stdio: ['ignore', 'pipe', 'pipe']
214
- });
215
-
216
- // Write logs
217
- const logStream = fs.createWriteStream(LOG_FILE, { flags: 'a' });
218
- proc.stdout.pipe(logStream);
219
- proc.stderr.pipe(logStream);
220
-
221
- // Save PID
222
- fs.writeFileSync(PID_FILE, String(proc.pid));
223
- proc.unref();
224
-
225
- // Wait for startup
226
- await new Promise(r => setTimeout(r, 3000));
227
-
228
- // Verify running
229
- const health = await this.healthCheck();
230
- if (!health.success) {
231
- return { success: false, error: 'Proxy started but not responding' };
232
- }
233
-
234
- return { success: true, port: this.port, pid: proc.pid };
235
- } catch (error) {
236
- return { success: false, error: error.message };
237
- }
238
- }
239
-
240
- /**
241
- * Stop LLM Proxy server
242
- */
243
- async stop() {
244
- try {
245
- if (fs.existsSync(PID_FILE)) {
246
- const pid = parseInt(fs.readFileSync(PID_FILE, 'utf8').trim());
247
- try {
248
- process.kill(pid, 'SIGTERM');
249
- await new Promise(r => setTimeout(r, 1000));
250
- try { process.kill(pid, 'SIGKILL'); } catch {}
251
- } catch {}
252
- fs.unlinkSync(PID_FILE);
253
- }
254
- return { success: true };
255
- } catch (error) {
256
- return { success: false, error: error.message };
257
- }
258
- }
259
-
260
- /**
261
- * Set API key for a provider
262
- */
263
- async setApiKey(providerId, apiKey) {
264
- try {
265
- const envKey = this.getEnvKeyName(providerId);
266
- this.saveEnvVar(envKey, apiKey);
267
-
268
- // Restart proxy if running to pick up new key
269
- const status = await this.isRunning();
270
- if (status.running) {
271
- await this.stop();
272
- await this.start();
273
- }
274
-
275
- return { success: true };
276
- } catch (error) {
277
- return { success: false, error: error.message };
278
- }
279
- }
280
-
281
- /**
282
- * Get API key for a provider
283
- */
284
- getApiKey(providerId) {
285
- const envKey = this.getEnvKeyName(providerId);
286
- const env = this.loadEnvFile();
287
- return env[envKey] || null;
288
- }
289
-
290
- /**
291
- * Get environment variable name for provider API key
292
- */
293
- getEnvKeyName(providerId) {
294
- const mapping = {
295
- minimax: 'MINIMAX_API_KEY',
296
- deepseek: 'DEEPSEEK_API_KEY',
297
- groq: 'GROQ_API_KEY',
298
- mistral: 'MISTRAL_API_KEY',
299
- xai: 'XAI_API_KEY',
300
- perplexity: 'PERPLEXITYAI_API_KEY',
301
- openrouter: 'OPENROUTER_API_KEY',
302
- together: 'TOGETHERAI_API_KEY',
303
- fireworks: 'FIREWORKS_AI_API_KEY',
304
- cohere: 'COHERE_API_KEY',
305
- ai21: 'AI21_API_KEY',
306
- replicate: 'REPLICATE_API_KEY',
307
- anthropic: 'ANTHROPIC_API_KEY',
308
- openai: 'OPENAI_API_KEY',
309
- google: 'GEMINI_API_KEY',
310
- };
311
- return mapping[providerId] || `${providerId.toUpperCase()}_API_KEY`;
312
- }
313
-
314
- /**
315
- * Test connection to a provider
316
- */
317
- async testConnection(providerId, modelId) {
318
- try {
319
- const start = Date.now();
320
- const result = await this.chatCompletion(providerId, modelId, [
321
- { role: 'user', content: 'Say "OK" in one word.' }
322
- ], { max_tokens: 5 });
323
-
324
- if (result.success) {
325
- return { success: true, latency: Date.now() - start };
326
- }
327
- return { success: false, error: result.error };
328
- } catch (error) {
329
- return { success: false, error: error.message };
330
- }
331
- }
332
-
333
- /**
334
- * Make chat completion request via LLM Proxy
335
- */
336
- async chatCompletion(providerId, modelId, messages, options = {}) {
337
- return new Promise((resolve) => {
338
- const modelPrefix = this.getModelPrefix(providerId);
339
- const fullModelId = modelId.includes('/') ? modelId : `${modelPrefix}${modelId}`;
340
-
341
- const body = JSON.stringify({
342
- model: fullModelId,
343
- messages,
344
- ...options
345
- });
346
-
347
- const req = http.request({
348
- hostname: 'localhost',
349
- port: this.port,
350
- path: '/v1/chat/completions',
351
- method: 'POST',
352
- headers: {
353
- 'Content-Type': 'application/json',
354
- 'Content-Length': Buffer.byteLength(body)
355
- },
356
- timeout: 60000
357
- }, (res) => {
358
- let data = '';
359
- res.on('data', chunk => data += chunk);
360
- res.on('end', () => {
361
- try {
362
- const parsed = JSON.parse(data);
363
- if (res.statusCode >= 200 && res.statusCode < 300) {
364
- resolve({ success: true, response: parsed });
365
- } else {
366
- resolve({ success: false, error: parsed.error?.message || `HTTP ${res.statusCode}` });
367
- }
368
- } catch {
369
- resolve({ success: false, error: 'Invalid response' });
370
- }
371
- });
372
- });
373
-
374
- req.on('error', (err) => resolve({ success: false, error: err.message }));
375
- req.on('timeout', () => { req.destroy(); resolve({ success: false, error: 'Timeout' }); });
376
- req.write(body);
377
- req.end();
378
- });
379
- }
380
-
381
- /**
382
- * Get LiteLLM model prefix for provider
383
- */
384
- getModelPrefix(providerId) {
385
- const prefixes = {
386
- minimax: 'minimax/',
387
- deepseek: 'deepseek/',
388
- groq: 'groq/',
389
- mistral: 'mistral/',
390
- xai: 'xai/',
391
- perplexity: 'perplexity/',
392
- openrouter: 'openrouter/',
393
- together: 'together_ai/',
394
- fireworks: 'fireworks_ai/',
395
- cohere: 'cohere/',
396
- anthropic: 'anthropic/',
397
- openai: 'openai/',
398
- google: 'gemini/',
399
- };
400
- return prefixes[providerId] || `${providerId}/`;
401
- }
402
-
403
- /**
404
- * Get base URL for LLM Proxy
405
- */
406
- getBaseUrl() {
407
- return `http://localhost:${this.port}`;
408
- }
409
- }
410
-
411
- module.exports = { LLMProxyManager };