hedgequantx 2.6.152 → 2.6.154
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/pages/algo/custom-strategy.js +15 -14
- package/src/services/ai/client.js +26 -20
package/package.json
CHANGED
|
@@ -19,7 +19,7 @@ const readline = require('readline');
|
|
|
19
19
|
|
|
20
20
|
const { connections } = require('../../services');
|
|
21
21
|
const { AlgoUI, renderSessionSummary, renderMultiSymbolSummary } = require('./ui');
|
|
22
|
-
const { displayBanner } = require('../../ui');
|
|
22
|
+
const { displayBanner, drawBoxHeaderContinue, drawBoxFooter, drawBoxRow, getLogoWidth, centerText } = require('../../ui');
|
|
23
23
|
const { prompts } = require('../../utils');
|
|
24
24
|
const { checkMarketHours } = require('../../services/projectx/market');
|
|
25
25
|
const { FAST_SCALPING } = require('../../config/settings');
|
|
@@ -304,11 +304,13 @@ const customStrategyMenu = async (service) => {
|
|
|
304
304
|
}
|
|
305
305
|
|
|
306
306
|
const agentName = agents[0].name || agents[0].provider;
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
307
|
+
const boxWidth = Math.max(getLogoWidth(), 98);
|
|
308
|
+
const innerWidth = boxWidth - 2;
|
|
309
|
+
|
|
310
|
+
// Header aligned with main banner
|
|
311
|
+
drawBoxHeaderContinue('CUSTOM STRATEGY - AI GENERATED');
|
|
312
|
+
drawBoxRow(`Agent: ${agentName}`, innerWidth);
|
|
313
|
+
drawBoxFooter();
|
|
312
314
|
|
|
313
315
|
// Step 1: Get strategy description from user
|
|
314
316
|
console.log(chalk.yellow(' Describe your trading strategy in natural language:'));
|
|
@@ -366,12 +368,11 @@ const customStrategyMenu = async (service) => {
|
|
|
366
368
|
console.log(chalk.gray(` Saved to: ${filepath}\n`));
|
|
367
369
|
|
|
368
370
|
// Step 4: Show strategy summary and confirm
|
|
369
|
-
console.log(
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
console.log(chalk.cyan(' ╚════════════════════════════════════════════════════════════╝\n'));
|
|
371
|
+
console.log('');
|
|
372
|
+
drawBoxHeaderContinue('STRATEGY READY');
|
|
373
|
+
drawBoxRow(`Name: ${genResult.name}`, innerWidth);
|
|
374
|
+
drawBoxRow(`File: ${filename}`, innerWidth);
|
|
375
|
+
drawBoxFooter();
|
|
375
376
|
|
|
376
377
|
const confirm = await prompts.textInput(chalk.cyan(' Continue with this strategy? (Y/n): '));
|
|
377
378
|
if (confirm.toLowerCase() === 'n') {
|
|
@@ -393,8 +394,8 @@ const customStrategyMenu = async (service) => {
|
|
|
393
394
|
* Same as one-account but uses the custom strategy class
|
|
394
395
|
*/
|
|
395
396
|
async function executeWithCustomStrategy(service, StrategyClass, strategyName) {
|
|
396
|
-
|
|
397
|
-
const
|
|
397
|
+
const boxWidth = Math.max(getLogoWidth(), 98);
|
|
398
|
+
const innerWidth = boxWidth - 2;
|
|
398
399
|
|
|
399
400
|
// Get accounts
|
|
400
401
|
const accountsResult = await service.getTradingAccounts();
|
|
@@ -61,9 +61,10 @@ const makeRequest = (url, options) => {
|
|
|
61
61
|
* @param {Object} agent - Agent configuration
|
|
62
62
|
* @param {string} prompt - User prompt
|
|
63
63
|
* @param {string} systemPrompt - System prompt
|
|
64
|
+
* @param {Object} options - Optional settings { maxTokens, temperature, timeout }
|
|
64
65
|
* @returns {Promise<string|null>} Response text or null on error
|
|
65
66
|
*/
|
|
66
|
-
const callOpenAICompatible = async (agent, prompt, systemPrompt) => {
|
|
67
|
+
const callOpenAICompatible = async (agent, prompt, systemPrompt, options = {}) => {
|
|
67
68
|
const provider = getProvider(agent.providerId);
|
|
68
69
|
if (!provider) return null;
|
|
69
70
|
|
|
@@ -97,12 +98,12 @@ const callOpenAICompatible = async (agent, prompt, systemPrompt) => {
|
|
|
97
98
|
{ role: 'system', content: systemPrompt },
|
|
98
99
|
{ role: 'user', content: prompt }
|
|
99
100
|
],
|
|
100
|
-
temperature: 0.3
|
|
101
|
-
max_tokens: 500
|
|
101
|
+
temperature: options.temperature || 0.3
|
|
102
102
|
};
|
|
103
103
|
|
|
104
104
|
try {
|
|
105
|
-
const
|
|
105
|
+
const timeout = options.timeout || 30000;
|
|
106
|
+
const response = await makeRequest(url, { headers, body, timeout });
|
|
106
107
|
return response.choices?.[0]?.message?.content || null;
|
|
107
108
|
} catch (error) {
|
|
108
109
|
return null;
|
|
@@ -138,9 +139,10 @@ const getValidOAuthToken = async (credentials) => {
|
|
|
138
139
|
* @param {Object} agent - Agent configuration
|
|
139
140
|
* @param {string} prompt - User prompt
|
|
140
141
|
* @param {string} systemPrompt - System prompt
|
|
142
|
+
* @param {Object} options - Optional settings { maxTokens, temperature, timeout }
|
|
141
143
|
* @returns {Promise<string|null>} Response text or null on error
|
|
142
144
|
*/
|
|
143
|
-
const callAnthropic = async (agent, prompt, systemPrompt) => {
|
|
145
|
+
const callAnthropic = async (agent, prompt, systemPrompt, options = {}) => {
|
|
144
146
|
const provider = getProvider('anthropic');
|
|
145
147
|
if (!provider) return null;
|
|
146
148
|
|
|
@@ -171,7 +173,6 @@ const callAnthropic = async (agent, prompt, systemPrompt) => {
|
|
|
171
173
|
|
|
172
174
|
const body = {
|
|
173
175
|
model,
|
|
174
|
-
max_tokens: 500,
|
|
175
176
|
system: systemPrompt,
|
|
176
177
|
messages: [
|
|
177
178
|
{ role: 'user', content: prompt }
|
|
@@ -179,7 +180,8 @@ const callAnthropic = async (agent, prompt, systemPrompt) => {
|
|
|
179
180
|
};
|
|
180
181
|
|
|
181
182
|
try {
|
|
182
|
-
const
|
|
183
|
+
const timeout = options.timeout || 30000;
|
|
184
|
+
const response = await makeRequest(url, { headers, body, timeout });
|
|
183
185
|
return response.content?.[0]?.text || null;
|
|
184
186
|
} catch (error) {
|
|
185
187
|
return null;
|
|
@@ -193,7 +195,7 @@ const callAnthropic = async (agent, prompt, systemPrompt) => {
|
|
|
193
195
|
* @param {string} systemPrompt - System prompt
|
|
194
196
|
* @returns {Promise<string|null>} Response text or null on error
|
|
195
197
|
*/
|
|
196
|
-
const callGemini = async (agent, prompt, systemPrompt) => {
|
|
198
|
+
const callGemini = async (agent, prompt, systemPrompt, options = {}) => {
|
|
197
199
|
const provider = getProvider('gemini');
|
|
198
200
|
if (!provider) return null;
|
|
199
201
|
|
|
@@ -213,13 +215,13 @@ const callGemini = async (agent, prompt, systemPrompt) => {
|
|
|
213
215
|
{ role: 'user', parts: [{ text: `${systemPrompt}\n\n${prompt}` }] }
|
|
214
216
|
],
|
|
215
217
|
generationConfig: {
|
|
216
|
-
temperature: 0.3
|
|
217
|
-
maxOutputTokens: 500
|
|
218
|
+
temperature: 0.3
|
|
218
219
|
}
|
|
219
220
|
};
|
|
220
221
|
|
|
221
222
|
try {
|
|
222
|
-
const
|
|
223
|
+
const timeout = options.timeout || 60000;
|
|
224
|
+
const response = await makeRequest(url, { headers, body, timeout });
|
|
223
225
|
return response.candidates?.[0]?.content?.parts?.[0]?.text || null;
|
|
224
226
|
} catch (error) {
|
|
225
227
|
return null;
|
|
@@ -234,7 +236,7 @@ const callGemini = async (agent, prompt, systemPrompt) => {
|
|
|
234
236
|
* @param {string} systemPrompt - System prompt
|
|
235
237
|
* @returns {Promise<string|null>} Response text or null on error
|
|
236
238
|
*/
|
|
237
|
-
const callViaProxy = async (agent, prompt, systemPrompt) => {
|
|
239
|
+
const callViaProxy = async (agent, prompt, systemPrompt, options = {}) => {
|
|
238
240
|
const proxyPort = agent.credentials?.proxyPort || 8317;
|
|
239
241
|
const model = agent.model;
|
|
240
242
|
|
|
@@ -253,12 +255,12 @@ const callViaProxy = async (agent, prompt, systemPrompt) => {
|
|
|
253
255
|
{ role: 'system', content: systemPrompt },
|
|
254
256
|
{ role: 'user', content: prompt }
|
|
255
257
|
],
|
|
256
|
-
temperature: 0.3
|
|
257
|
-
max_tokens: 500
|
|
258
|
+
temperature: 0.3
|
|
258
259
|
};
|
|
259
260
|
|
|
260
261
|
try {
|
|
261
|
-
const
|
|
262
|
+
const timeout = options.timeout || 60000;
|
|
263
|
+
const response = await makeRequest(url, { headers, body, timeout });
|
|
262
264
|
return response.choices?.[0]?.message?.content || null;
|
|
263
265
|
} catch (error) {
|
|
264
266
|
return null;
|
|
@@ -270,22 +272,26 @@ const callViaProxy = async (agent, prompt, systemPrompt) => {
|
|
|
270
272
|
* @param {Object} agent - Agent with providerId and credentials
|
|
271
273
|
* @param {string} prompt - User prompt
|
|
272
274
|
* @param {string} systemPrompt - System prompt
|
|
275
|
+
* @param {Object} options - Optional settings { maxTokens, temperature, timeout }
|
|
273
276
|
* @returns {Promise<string|null>} AI response or null on error
|
|
274
277
|
*/
|
|
275
|
-
const callAI = async (agent, prompt, systemPrompt = '') => {
|
|
278
|
+
const callAI = async (agent, prompt, systemPrompt = '', options = {}) => {
|
|
276
279
|
if (!agent || !agent.providerId) return null;
|
|
277
280
|
|
|
281
|
+
// Default timeout 60s for code generation
|
|
282
|
+
const opts = { timeout: 60000, ...options };
|
|
283
|
+
|
|
278
284
|
// Check if using proxy mode (subscription accounts)
|
|
279
285
|
if (agent.credentials?.useProxy) {
|
|
280
|
-
return callViaProxy(agent, prompt, systemPrompt);
|
|
286
|
+
return callViaProxy(agent, prompt, systemPrompt, opts);
|
|
281
287
|
}
|
|
282
288
|
|
|
283
289
|
switch (agent.providerId) {
|
|
284
290
|
case 'anthropic':
|
|
285
|
-
return callAnthropic(agent, prompt, systemPrompt);
|
|
291
|
+
return callAnthropic(agent, prompt, systemPrompt, opts);
|
|
286
292
|
|
|
287
293
|
case 'gemini':
|
|
288
|
-
return callGemini(agent, prompt, systemPrompt);
|
|
294
|
+
return callGemini(agent, prompt, systemPrompt, opts);
|
|
289
295
|
|
|
290
296
|
// All OpenAI-compatible APIs
|
|
291
297
|
case 'openai':
|
|
@@ -304,7 +310,7 @@ const callAI = async (agent, prompt, systemPrompt = '') => {
|
|
|
304
310
|
case 'ollama':
|
|
305
311
|
case 'lmstudio':
|
|
306
312
|
case 'custom':
|
|
307
|
-
return callOpenAICompatible(agent, prompt, systemPrompt);
|
|
313
|
+
return callOpenAICompatible(agent, prompt, systemPrompt, opts);
|
|
308
314
|
|
|
309
315
|
default:
|
|
310
316
|
return null;
|