hedgequantx 2.9.20 → 2.9.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/app.js +64 -42
- package/src/menus/connect.js +17 -14
- package/src/menus/dashboard.js +76 -58
- package/src/pages/accounts.js +49 -38
- package/src/pages/ai-agents-ui.js +388 -0
- package/src/pages/ai-agents.js +494 -0
- package/src/pages/ai-models.js +389 -0
- package/src/pages/algo/algo-executor.js +307 -0
- package/src/pages/algo/copy-executor.js +331 -0
- package/src/pages/algo/copy-trading.js +178 -546
- package/src/pages/algo/custom-strategy.js +313 -0
- package/src/pages/algo/index.js +75 -18
- package/src/pages/algo/one-account.js +57 -322
- package/src/pages/algo/ui.js +15 -15
- package/src/pages/orders.js +22 -19
- package/src/pages/positions.js +22 -19
- package/src/pages/stats/index.js +16 -15
- package/src/pages/user.js +11 -7
- package/src/services/ai-supervision/consensus.js +284 -0
- package/src/services/ai-supervision/context.js +275 -0
- package/src/services/ai-supervision/directive.js +167 -0
- package/src/services/ai-supervision/health.js +47 -35
- package/src/services/ai-supervision/index.js +359 -0
- package/src/services/ai-supervision/parser.js +278 -0
- package/src/services/ai-supervision/symbols.js +259 -0
- package/src/services/cliproxy/index.js +256 -0
- package/src/services/cliproxy/installer.js +111 -0
- package/src/services/cliproxy/manager.js +387 -0
- package/src/services/index.js +9 -1
- package/src/services/llmproxy/index.js +166 -0
- package/src/services/llmproxy/manager.js +411 -0
- package/src/services/rithmic/accounts.js +6 -8
- package/src/ui/box.js +5 -9
- package/src/ui/index.js +18 -5
- package/src/ui/menu.js +4 -4
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LLM Proxy Manager
|
|
3
|
+
*
|
|
4
|
+
* Manages LiteLLM proxy server installation, configuration and lifecycle.
|
|
5
|
+
* Uses Python virtual environment for isolation.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const { spawn, execSync } = require('child_process');
|
|
9
|
+
const path = require('path');
|
|
10
|
+
const fs = require('fs');
|
|
11
|
+
const os = require('os');
|
|
12
|
+
const http = require('http');
|
|
13
|
+
|
|
14
|
+
// Configuration
|
|
15
|
+
const LLMPROXY_DIR = path.join(os.homedir(), '.hqx', 'llmproxy');
|
|
16
|
+
const VENV_DIR = path.join(LLMPROXY_DIR, 'venv');
|
|
17
|
+
const ENV_FILE = path.join(LLMPROXY_DIR, '.env');
|
|
18
|
+
const PID_FILE = path.join(LLMPROXY_DIR, 'llmproxy.pid');
|
|
19
|
+
const LOG_FILE = path.join(LLMPROXY_DIR, 'llmproxy.log');
|
|
20
|
+
const DEFAULT_PORT = 8318;
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* LLM Proxy Manager Class
|
|
24
|
+
*/
|
|
25
|
+
class LLMProxyManager {
|
|
26
|
+
constructor() {
|
|
27
|
+
this.port = DEFAULT_PORT;
|
|
28
|
+
this.process = null;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Get Python executable path in venv
|
|
33
|
+
*/
|
|
34
|
+
getPythonPath() {
|
|
35
|
+
const isWindows = process.platform === 'win32';
|
|
36
|
+
return isWindows
|
|
37
|
+
? path.join(VENV_DIR, 'Scripts', 'python.exe')
|
|
38
|
+
: path.join(VENV_DIR, 'bin', 'python');
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Get pip executable path in venv
|
|
43
|
+
*/
|
|
44
|
+
getPipPath() {
|
|
45
|
+
const isWindows = process.platform === 'win32';
|
|
46
|
+
return isWindows
|
|
47
|
+
? path.join(VENV_DIR, 'Scripts', 'pip.exe')
|
|
48
|
+
: path.join(VENV_DIR, 'bin', 'pip');
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Check if LLM Proxy is installed
|
|
53
|
+
*/
|
|
54
|
+
isInstalled() {
|
|
55
|
+
try {
|
|
56
|
+
const pythonPath = this.getPythonPath();
|
|
57
|
+
if (!fs.existsSync(pythonPath)) return false;
|
|
58
|
+
|
|
59
|
+
// Check if litellm is installed
|
|
60
|
+
execSync(`"${pythonPath}" -c "import litellm"`, { stdio: 'ignore' });
|
|
61
|
+
return true;
|
|
62
|
+
} catch {
|
|
63
|
+
return false;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Install LLM Proxy (Python venv + LiteLLM)
|
|
69
|
+
*/
|
|
70
|
+
async install(onProgress = () => {}) {
|
|
71
|
+
try {
|
|
72
|
+
// Create directory
|
|
73
|
+
if (!fs.existsSync(LLMPROXY_DIR)) {
|
|
74
|
+
fs.mkdirSync(LLMPROXY_DIR, { recursive: true });
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
onProgress('Creating Python virtual environment', 10);
|
|
78
|
+
|
|
79
|
+
// Check for Python
|
|
80
|
+
let pythonCmd = 'python3';
|
|
81
|
+
try {
|
|
82
|
+
execSync('python3 --version', { stdio: 'ignore' });
|
|
83
|
+
} catch {
|
|
84
|
+
try {
|
|
85
|
+
execSync('python --version', { stdio: 'ignore' });
|
|
86
|
+
pythonCmd = 'python';
|
|
87
|
+
} catch {
|
|
88
|
+
return { success: false, error: 'Python not found. Install Python 3.8+' };
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Create venv
|
|
93
|
+
if (!fs.existsSync(VENV_DIR)) {
|
|
94
|
+
execSync(`${pythonCmd} -m venv "${VENV_DIR}"`, { stdio: 'ignore' });
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
onProgress('Installing LiteLLM', 40);
|
|
98
|
+
|
|
99
|
+
// Install litellm
|
|
100
|
+
const pipPath = this.getPipPath();
|
|
101
|
+
execSync(`"${pipPath}" install --upgrade pip`, { stdio: 'ignore' });
|
|
102
|
+
execSync(`"${pipPath}" install litellm[proxy]`, { stdio: 'ignore', timeout: 300000 });
|
|
103
|
+
|
|
104
|
+
onProgress('Installation complete', 100);
|
|
105
|
+
|
|
106
|
+
return { success: true };
|
|
107
|
+
} catch (error) {
|
|
108
|
+
return { success: false, error: error.message };
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
/**
|
|
113
|
+
* Check if LLM Proxy is running
|
|
114
|
+
*/
|
|
115
|
+
async isRunning() {
|
|
116
|
+
try {
|
|
117
|
+
// Check PID file
|
|
118
|
+
if (fs.existsSync(PID_FILE)) {
|
|
119
|
+
const pid = parseInt(fs.readFileSync(PID_FILE, 'utf8').trim());
|
|
120
|
+
try {
|
|
121
|
+
process.kill(pid, 0); // Check if process exists
|
|
122
|
+
// Verify it's responding
|
|
123
|
+
const health = await this.healthCheck();
|
|
124
|
+
if (health.success) {
|
|
125
|
+
return { running: true, port: this.port, pid };
|
|
126
|
+
}
|
|
127
|
+
} catch {
|
|
128
|
+
// PID exists but process doesn't - clean up
|
|
129
|
+
fs.unlinkSync(PID_FILE);
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
return { running: false };
|
|
133
|
+
} catch {
|
|
134
|
+
return { running: false };
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* Health check - ping the proxy
|
|
140
|
+
*/
|
|
141
|
+
healthCheck() {
|
|
142
|
+
return new Promise((resolve) => {
|
|
143
|
+
const req = http.request({
|
|
144
|
+
hostname: 'localhost',
|
|
145
|
+
port: this.port,
|
|
146
|
+
path: '/health',
|
|
147
|
+
method: 'GET',
|
|
148
|
+
timeout: 5000
|
|
149
|
+
}, (res) => {
|
|
150
|
+
resolve({ success: res.statusCode === 200 });
|
|
151
|
+
});
|
|
152
|
+
req.on('error', () => resolve({ success: false }));
|
|
153
|
+
req.on('timeout', () => { req.destroy(); resolve({ success: false }); });
|
|
154
|
+
req.end();
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Load environment variables from .env file
|
|
160
|
+
*/
|
|
161
|
+
loadEnvFile() {
|
|
162
|
+
if (!fs.existsSync(ENV_FILE)) return {};
|
|
163
|
+
const content = fs.readFileSync(ENV_FILE, 'utf8');
|
|
164
|
+
const env = {};
|
|
165
|
+
for (const line of content.split('\n')) {
|
|
166
|
+
const match = line.match(/^([^=]+)=(.*)$/);
|
|
167
|
+
if (match) {
|
|
168
|
+
env[match[1].trim()] = match[2].trim();
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
return env;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Save environment variable to .env file
|
|
176
|
+
*/
|
|
177
|
+
saveEnvVar(key, value) {
|
|
178
|
+
const env = this.loadEnvFile();
|
|
179
|
+
env[key] = value;
|
|
180
|
+
const content = Object.entries(env)
|
|
181
|
+
.map(([k, v]) => `${k}=${v}`)
|
|
182
|
+
.join('\n');
|
|
183
|
+
fs.writeFileSync(ENV_FILE, content);
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Start LLM Proxy server
|
|
188
|
+
*/
|
|
189
|
+
async start() {
|
|
190
|
+
try {
|
|
191
|
+
// Check if already running
|
|
192
|
+
const status = await this.isRunning();
|
|
193
|
+
if (status.running) {
|
|
194
|
+
return { success: true, message: 'Already running' };
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
if (!this.isInstalled()) {
|
|
198
|
+
return { success: false, error: 'LLM Proxy not installed. Run install() first.' };
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
const pythonPath = this.getPythonPath();
|
|
202
|
+
const env = { ...process.env, ...this.loadEnvFile() };
|
|
203
|
+
|
|
204
|
+
// Start LiteLLM proxy
|
|
205
|
+
const proc = spawn(pythonPath, [
|
|
206
|
+
'-m', 'litellm',
|
|
207
|
+
'--port', String(this.port),
|
|
208
|
+
'--host', '0.0.0.0'
|
|
209
|
+
], {
|
|
210
|
+
cwd: LLMPROXY_DIR,
|
|
211
|
+
env,
|
|
212
|
+
detached: true,
|
|
213
|
+
stdio: ['ignore', 'pipe', 'pipe']
|
|
214
|
+
});
|
|
215
|
+
|
|
216
|
+
// Write logs
|
|
217
|
+
const logStream = fs.createWriteStream(LOG_FILE, { flags: 'a' });
|
|
218
|
+
proc.stdout.pipe(logStream);
|
|
219
|
+
proc.stderr.pipe(logStream);
|
|
220
|
+
|
|
221
|
+
// Save PID
|
|
222
|
+
fs.writeFileSync(PID_FILE, String(proc.pid));
|
|
223
|
+
proc.unref();
|
|
224
|
+
|
|
225
|
+
// Wait for startup
|
|
226
|
+
await new Promise(r => setTimeout(r, 3000));
|
|
227
|
+
|
|
228
|
+
// Verify running
|
|
229
|
+
const health = await this.healthCheck();
|
|
230
|
+
if (!health.success) {
|
|
231
|
+
return { success: false, error: 'Proxy started but not responding' };
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
return { success: true, port: this.port, pid: proc.pid };
|
|
235
|
+
} catch (error) {
|
|
236
|
+
return { success: false, error: error.message };
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
/**
|
|
241
|
+
* Stop LLM Proxy server
|
|
242
|
+
*/
|
|
243
|
+
async stop() {
|
|
244
|
+
try {
|
|
245
|
+
if (fs.existsSync(PID_FILE)) {
|
|
246
|
+
const pid = parseInt(fs.readFileSync(PID_FILE, 'utf8').trim());
|
|
247
|
+
try {
|
|
248
|
+
process.kill(pid, 'SIGTERM');
|
|
249
|
+
await new Promise(r => setTimeout(r, 1000));
|
|
250
|
+
try { process.kill(pid, 'SIGKILL'); } catch {}
|
|
251
|
+
} catch {}
|
|
252
|
+
fs.unlinkSync(PID_FILE);
|
|
253
|
+
}
|
|
254
|
+
return { success: true };
|
|
255
|
+
} catch (error) {
|
|
256
|
+
return { success: false, error: error.message };
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
/**
|
|
261
|
+
* Set API key for a provider
|
|
262
|
+
*/
|
|
263
|
+
async setApiKey(providerId, apiKey) {
|
|
264
|
+
try {
|
|
265
|
+
const envKey = this.getEnvKeyName(providerId);
|
|
266
|
+
this.saveEnvVar(envKey, apiKey);
|
|
267
|
+
|
|
268
|
+
// Restart proxy if running to pick up new key
|
|
269
|
+
const status = await this.isRunning();
|
|
270
|
+
if (status.running) {
|
|
271
|
+
await this.stop();
|
|
272
|
+
await this.start();
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
return { success: true };
|
|
276
|
+
} catch (error) {
|
|
277
|
+
return { success: false, error: error.message };
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
/**
|
|
282
|
+
* Get API key for a provider
|
|
283
|
+
*/
|
|
284
|
+
getApiKey(providerId) {
|
|
285
|
+
const envKey = this.getEnvKeyName(providerId);
|
|
286
|
+
const env = this.loadEnvFile();
|
|
287
|
+
return env[envKey] || null;
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
/**
|
|
291
|
+
* Get environment variable name for provider API key
|
|
292
|
+
*/
|
|
293
|
+
getEnvKeyName(providerId) {
|
|
294
|
+
const mapping = {
|
|
295
|
+
minimax: 'MINIMAX_API_KEY',
|
|
296
|
+
deepseek: 'DEEPSEEK_API_KEY',
|
|
297
|
+
groq: 'GROQ_API_KEY',
|
|
298
|
+
mistral: 'MISTRAL_API_KEY',
|
|
299
|
+
xai: 'XAI_API_KEY',
|
|
300
|
+
perplexity: 'PERPLEXITYAI_API_KEY',
|
|
301
|
+
openrouter: 'OPENROUTER_API_KEY',
|
|
302
|
+
together: 'TOGETHERAI_API_KEY',
|
|
303
|
+
fireworks: 'FIREWORKS_AI_API_KEY',
|
|
304
|
+
cohere: 'COHERE_API_KEY',
|
|
305
|
+
ai21: 'AI21_API_KEY',
|
|
306
|
+
replicate: 'REPLICATE_API_KEY',
|
|
307
|
+
anthropic: 'ANTHROPIC_API_KEY',
|
|
308
|
+
openai: 'OPENAI_API_KEY',
|
|
309
|
+
google: 'GEMINI_API_KEY',
|
|
310
|
+
};
|
|
311
|
+
return mapping[providerId] || `${providerId.toUpperCase()}_API_KEY`;
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
/**
|
|
315
|
+
* Test connection to a provider
|
|
316
|
+
*/
|
|
317
|
+
async testConnection(providerId, modelId) {
|
|
318
|
+
try {
|
|
319
|
+
const start = Date.now();
|
|
320
|
+
const result = await this.chatCompletion(providerId, modelId, [
|
|
321
|
+
{ role: 'user', content: 'Say "OK" in one word.' }
|
|
322
|
+
], { max_tokens: 5 });
|
|
323
|
+
|
|
324
|
+
if (result.success) {
|
|
325
|
+
return { success: true, latency: Date.now() - start };
|
|
326
|
+
}
|
|
327
|
+
return { success: false, error: result.error };
|
|
328
|
+
} catch (error) {
|
|
329
|
+
return { success: false, error: error.message };
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
/**
|
|
334
|
+
* Make chat completion request via LLM Proxy
|
|
335
|
+
*/
|
|
336
|
+
async chatCompletion(providerId, modelId, messages, options = {}) {
|
|
337
|
+
return new Promise((resolve) => {
|
|
338
|
+
const modelPrefix = this.getModelPrefix(providerId);
|
|
339
|
+
const fullModelId = modelId.includes('/') ? modelId : `${modelPrefix}${modelId}`;
|
|
340
|
+
|
|
341
|
+
const body = JSON.stringify({
|
|
342
|
+
model: fullModelId,
|
|
343
|
+
messages,
|
|
344
|
+
...options
|
|
345
|
+
});
|
|
346
|
+
|
|
347
|
+
const req = http.request({
|
|
348
|
+
hostname: 'localhost',
|
|
349
|
+
port: this.port,
|
|
350
|
+
path: '/v1/chat/completions',
|
|
351
|
+
method: 'POST',
|
|
352
|
+
headers: {
|
|
353
|
+
'Content-Type': 'application/json',
|
|
354
|
+
'Content-Length': Buffer.byteLength(body)
|
|
355
|
+
},
|
|
356
|
+
timeout: 60000
|
|
357
|
+
}, (res) => {
|
|
358
|
+
let data = '';
|
|
359
|
+
res.on('data', chunk => data += chunk);
|
|
360
|
+
res.on('end', () => {
|
|
361
|
+
try {
|
|
362
|
+
const parsed = JSON.parse(data);
|
|
363
|
+
if (res.statusCode >= 200 && res.statusCode < 300) {
|
|
364
|
+
resolve({ success: true, response: parsed });
|
|
365
|
+
} else {
|
|
366
|
+
resolve({ success: false, error: parsed.error?.message || `HTTP ${res.statusCode}` });
|
|
367
|
+
}
|
|
368
|
+
} catch {
|
|
369
|
+
resolve({ success: false, error: 'Invalid response' });
|
|
370
|
+
}
|
|
371
|
+
});
|
|
372
|
+
});
|
|
373
|
+
|
|
374
|
+
req.on('error', (err) => resolve({ success: false, error: err.message }));
|
|
375
|
+
req.on('timeout', () => { req.destroy(); resolve({ success: false, error: 'Timeout' }); });
|
|
376
|
+
req.write(body);
|
|
377
|
+
req.end();
|
|
378
|
+
});
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
/**
|
|
382
|
+
* Get LiteLLM model prefix for provider
|
|
383
|
+
*/
|
|
384
|
+
getModelPrefix(providerId) {
|
|
385
|
+
const prefixes = {
|
|
386
|
+
minimax: 'minimax/',
|
|
387
|
+
deepseek: 'deepseek/',
|
|
388
|
+
groq: 'groq/',
|
|
389
|
+
mistral: 'mistral/',
|
|
390
|
+
xai: 'xai/',
|
|
391
|
+
perplexity: 'perplexity/',
|
|
392
|
+
openrouter: 'openrouter/',
|
|
393
|
+
together: 'together_ai/',
|
|
394
|
+
fireworks: 'fireworks_ai/',
|
|
395
|
+
cohere: 'cohere/',
|
|
396
|
+
anthropic: 'anthropic/',
|
|
397
|
+
openai: 'openai/',
|
|
398
|
+
google: 'gemini/',
|
|
399
|
+
};
|
|
400
|
+
return prefixes[providerId] || `${providerId}/`;
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
/**
|
|
404
|
+
* Get base URL for LLM Proxy
|
|
405
|
+
*/
|
|
406
|
+
getBaseUrl() {
|
|
407
|
+
return `http://localhost:${this.port}`;
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
module.exports = { LLMProxyManager };
|
|
@@ -245,18 +245,16 @@ const getTradingAccounts = async (service) => {
|
|
|
245
245
|
profitAndLoss: profitAndLoss,
|
|
246
246
|
openPnL: openPnL,
|
|
247
247
|
todayPnL: closedPnL,
|
|
248
|
-
// Status
|
|
249
|
-
|
|
250
|
-
//
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
// Keep RMS data for reference
|
|
254
|
-
rmsStatus: rmsInfo.status || null,
|
|
255
|
-
rmsAlgorithm: rmsInfo.algorithm || null,
|
|
248
|
+
// Status from Rithmic RMS API (field 154003 - ACCOUNT_STATUS)
|
|
249
|
+
status: rmsInfo.status || null,
|
|
250
|
+
// Algorithm/Type from Rithmic RMS API (field 150142 - ALGORITHM)
|
|
251
|
+
algorithm: rmsInfo.algorithm || null,
|
|
252
|
+
// Additional RMS data from API
|
|
256
253
|
lossLimit: rmsInfo.lossLimit || null,
|
|
257
254
|
minAccountBalance: rmsInfo.minAccountBalance || null,
|
|
258
255
|
buyLimit: rmsInfo.buyLimit || null,
|
|
259
256
|
sellLimit: rmsInfo.sellLimit || null,
|
|
257
|
+
currency: rmsInfo.currency || acc.accountCurrency || null,
|
|
260
258
|
platform: 'Rithmic',
|
|
261
259
|
propfirm: service.propfirm.name,
|
|
262
260
|
};
|
package/src/ui/box.js
CHANGED
|
@@ -10,23 +10,19 @@ let logoWidth = null;
|
|
|
10
10
|
|
|
11
11
|
/**
|
|
12
12
|
* Get logo width for consistent box sizing
|
|
13
|
+
* Fixed width of 98 to match HQX banner (logo 88 + X 8 + padding 2)
|
|
13
14
|
* Adapts to terminal width for mobile devices
|
|
14
15
|
*/
|
|
15
16
|
const getLogoWidth = () => {
|
|
16
|
-
const termWidth = process.stdout.columns ||
|
|
17
|
+
const termWidth = process.stdout.columns || 100;
|
|
17
18
|
|
|
18
19
|
// Mobile: use terminal width
|
|
19
20
|
if (termWidth < 60) {
|
|
20
21
|
return Math.max(termWidth - 2, 40);
|
|
21
22
|
}
|
|
22
23
|
|
|
23
|
-
// Desktop:
|
|
24
|
-
|
|
25
|
-
const logoText = figlet.textSync('HEDGEQUANTX', { font: 'ANSI Shadow' });
|
|
26
|
-
const lines = logoText.split('\n').filter(line => line.trim().length > 0);
|
|
27
|
-
logoWidth = Math.max(...lines.map(line => line.length)) + 4;
|
|
28
|
-
}
|
|
29
|
-
return Math.min(logoWidth, termWidth - 2);
|
|
24
|
+
// Desktop: fixed width 98 to match banner
|
|
25
|
+
return Math.min(98, termWidth - 2);
|
|
30
26
|
};
|
|
31
27
|
|
|
32
28
|
/**
|
|
@@ -97,7 +93,7 @@ const drawBoxSeparator = (width) => {
|
|
|
97
93
|
const printLogo = () => {
|
|
98
94
|
const logoText = figlet.textSync('HEDGEQUANTX', { font: 'ANSI Shadow' });
|
|
99
95
|
console.log(chalk.cyan(logoText));
|
|
100
|
-
console.log(chalk.gray.italic('
|
|
96
|
+
console.log(chalk.gray.italic(' PROP FUTURES ALGO TRADING CLI'));
|
|
101
97
|
console.log();
|
|
102
98
|
};
|
|
103
99
|
|
package/src/ui/index.js
CHANGED
|
@@ -26,8 +26,7 @@ const {
|
|
|
26
26
|
const { createBoxMenu } = require('./menu');
|
|
27
27
|
|
|
28
28
|
/**
|
|
29
|
-
* Display HQX Banner
|
|
30
|
-
* Note: console.clear() is handled by app.js banner() to avoid terminal bugs
|
|
29
|
+
* Display HQX Banner - ALWAYS closed with bottom border
|
|
31
30
|
*/
|
|
32
31
|
const displayBanner = () => {
|
|
33
32
|
const termWidth = process.stdout.columns || 100;
|
|
@@ -70,8 +69,20 @@ const displayBanner = () => {
|
|
|
70
69
|
}
|
|
71
70
|
|
|
72
71
|
console.log(chalk.cyan('╠' + '═'.repeat(innerWidth) + '╣'));
|
|
73
|
-
const tagline = isMobile ? `HQX
|
|
74
|
-
console.log(chalk.cyan('║') + chalk.
|
|
72
|
+
const tagline = isMobile ? `HQX V${version}` : `PROP FUTURES ALGO TRADING V${version}`;
|
|
73
|
+
console.log(chalk.cyan('║') + chalk.yellow(centerText(tagline, innerWidth)) + chalk.cyan('║'));
|
|
74
|
+
|
|
75
|
+
// ALWAYS close the banner
|
|
76
|
+
console.log(chalk.cyan('╚' + '═'.repeat(innerWidth) + '╝'));
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Clear screen without using alternate screen buffer
|
|
81
|
+
* Uses ANSI escape codes directly to avoid terminal state issues
|
|
82
|
+
*/
|
|
83
|
+
const clearScreen = () => {
|
|
84
|
+
// ESC[H = home, ESC[2J = clear screen, ESC[3J = clear scrollback
|
|
85
|
+
process.stdout.write('\x1B[H\x1B[2J\x1B[3J');
|
|
75
86
|
};
|
|
76
87
|
|
|
77
88
|
/**
|
|
@@ -121,5 +132,7 @@ module.exports = {
|
|
|
121
132
|
// Stdin
|
|
122
133
|
prepareStdin,
|
|
123
134
|
// Banner
|
|
124
|
-
displayBanner
|
|
135
|
+
displayBanner,
|
|
136
|
+
// Screen
|
|
137
|
+
clearScreen
|
|
125
138
|
};
|
package/src/ui/menu.js
CHANGED
|
@@ -48,7 +48,7 @@ const createBoxMenu = async (title, items, options = {}) => {
|
|
|
48
48
|
});
|
|
49
49
|
|
|
50
50
|
console.log(chalk.cyan('╠' + '═'.repeat(innerWidth) + '╣'));
|
|
51
|
-
console.log(chalk.cyan('║') + chalk.
|
|
51
|
+
console.log(chalk.cyan('║') + chalk.yellow(centerText(`PROP FUTURES ALGO TRADING V${version}`, innerWidth)) + chalk.cyan('║'));
|
|
52
52
|
|
|
53
53
|
// Stats bar if provided
|
|
54
54
|
if (options.statsLine) {
|
|
@@ -81,7 +81,7 @@ const createBoxMenu = async (title, items, options = {}) => {
|
|
|
81
81
|
const isSelected = index === selectedIndex;
|
|
82
82
|
const prefix = isSelected ? chalk.white('▸ ') : ' ';
|
|
83
83
|
const color = item.disabled ? chalk.gray : (item.color || chalk.cyan);
|
|
84
|
-
const label = item.label + (item.disabled ? ' (
|
|
84
|
+
const label = item.label.toUpperCase() + (item.disabled ? ' (COMING SOON)' : '');
|
|
85
85
|
const text = prefix + color(label);
|
|
86
86
|
const visLen = text.replace(/\x1b\[[0-9;]*m/g, '').length;
|
|
87
87
|
const padding = innerWidth - visLen;
|
|
@@ -96,8 +96,8 @@ const createBoxMenu = async (title, items, options = {}) => {
|
|
|
96
96
|
|
|
97
97
|
// Footer
|
|
98
98
|
console.log(chalk.cyan('╠' + '─'.repeat(innerWidth) + '╣'));
|
|
99
|
-
const footerText = options.footerText || '
|
|
100
|
-
console.log(chalk.cyan('║') + chalk.gray(centerText(footerText, innerWidth)) + chalk.cyan('║'));
|
|
99
|
+
const footerText = options.footerText || 'USE ↑↓ ARROWS TO NAVIGATE, ENTER TO SELECT';
|
|
100
|
+
console.log(chalk.cyan('║') + chalk.gray(centerText(footerText.toUpperCase(), innerWidth)) + chalk.cyan('║'));
|
|
101
101
|
console.log(chalk.cyan('╚' + '═'.repeat(innerWidth) + '╝'));
|
|
102
102
|
};
|
|
103
103
|
|