@darksol/terminal 0.1.1 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,286 @@
1
+ import fetch from 'node-fetch';
2
+ import { getKeyFromEnv, getKey, SERVICES } from '../config/keys.js';
3
+ import { getConfig } from '../config/store.js';
4
+ import { theme } from '../ui/theme.js';
5
+ import { spinner, kvDisplay, success, error, warn, info } from '../ui/components.js';
6
+ import { showSection } from '../ui/banner.js';
7
+
8
+ // ──────────────────────────────────────────────────
9
+ // LLM PROVIDER ADAPTERS
10
+ // ──────────────────────────────────────────────────
11
+
12
+ const PROVIDERS = {
13
+ openai: {
14
+ url: 'https://api.openai.com/v1/chat/completions',
15
+ defaultModel: 'gpt-4o',
16
+ authHeader: (key) => ({ 'Authorization': `Bearer ${key}` }),
17
+ parseResponse: (data) => data.choices?.[0]?.message?.content,
18
+ parseUsage: (data) => data.usage,
19
+ },
20
+ anthropic: {
21
+ url: 'https://api.anthropic.com/v1/messages',
22
+ defaultModel: 'claude-sonnet-4-20250514',
23
+ authHeader: (key) => ({ 'x-api-key': key, 'anthropic-version': '2023-06-01' }),
24
+ buildBody: (model, messages, systemPrompt) => ({
25
+ model,
26
+ max_tokens: 4096,
27
+ system: systemPrompt,
28
+ messages: messages.map(m => ({
29
+ role: m.role === 'system' ? 'user' : m.role,
30
+ content: m.content,
31
+ })),
32
+ }),
33
+ parseResponse: (data) => data.content?.[0]?.text,
34
+ parseUsage: (data) => ({ input: data.usage?.input_tokens, output: data.usage?.output_tokens }),
35
+ },
36
+ openrouter: {
37
+ url: 'https://openrouter.ai/api/v1/chat/completions',
38
+ defaultModel: 'anthropic/claude-sonnet-4-20250514',
39
+ authHeader: (key) => ({
40
+ 'Authorization': `Bearer ${key}`,
41
+ 'HTTP-Referer': 'https://darksol.net',
42
+ 'X-Title': 'DARKSOL Terminal',
43
+ }),
44
+ parseResponse: (data) => data.choices?.[0]?.message?.content,
45
+ parseUsage: (data) => data.usage,
46
+ },
47
+ ollama: {
48
+ url: null, // Set from config
49
+ defaultModel: 'llama3.1',
50
+ authHeader: () => ({}),
51
+ parseResponse: (data) => data.choices?.[0]?.message?.content || data.message?.content,
52
+ parseUsage: () => ({ input: 0, output: 0 }),
53
+ },
54
+ };
55
+
56
+ // ──────────────────────────────────────────────────
57
+ // LLM ENGINE
58
+ // ──────────────────────────────────────────────────
59
+
60
+ export class LLMEngine {
61
+ constructor(opts = {}) {
62
+ this.provider = opts.provider || getConfig('llm.provider') || 'openai';
63
+ this.model = opts.model || getConfig('llm.model') || null;
64
+ this.apiKey = opts.apiKey || null;
65
+ this.conversationHistory = [];
66
+ this.systemPrompt = '';
67
+ this.maxHistoryTokens = opts.maxHistory || 8000;
68
+ this.temperature = opts.temperature ?? 0.7;
69
+
70
+ // Usage tracking
71
+ this.totalInputTokens = 0;
72
+ this.totalOutputTokens = 0;
73
+ this.totalCalls = 0;
74
+ }
75
+
76
+ /**
77
+ * Initialize the engine — resolve API key
78
+ */
79
+ async init(vaultPassword) {
80
+ if (!this.apiKey) {
81
+ // Try env first, then vault
82
+ this.apiKey = getKeyFromEnv(this.provider);
83
+ if (!this.apiKey && vaultPassword) {
84
+ this.apiKey = await getKey(this.provider, vaultPassword);
85
+ }
86
+ }
87
+
88
+ if (!this.apiKey && this.provider !== 'ollama') {
89
+ throw new Error(`No API key for ${this.provider}. Run: darksol keys add ${this.provider}`);
90
+ }
91
+
92
+ const providerConfig = PROVIDERS[this.provider];
93
+ if (!providerConfig) {
94
+ throw new Error(`Unknown LLM provider: ${this.provider}. Supported: ${Object.keys(PROVIDERS).join(', ')}`);
95
+ }
96
+
97
+ if (!this.model) {
98
+ this.model = providerConfig.defaultModel;
99
+ }
100
+
101
+ // Ollama URL from config
102
+ if (this.provider === 'ollama') {
103
+ const host = this.apiKey || getConfig('llm.ollamaHost') || 'http://localhost:11434';
104
+ PROVIDERS.ollama.url = `${host}/v1/chat/completions`;
105
+ this.apiKey = 'ollama'; // placeholder
106
+ }
107
+
108
+ return this;
109
+ }
110
+
111
+ /**
112
+ * Set the system prompt (persona/context for the LLM)
113
+ */
114
+ setSystemPrompt(prompt) {
115
+ this.systemPrompt = prompt;
116
+ return this;
117
+ }
118
+
119
+ /**
120
+ * Send a message and get a response
121
+ */
122
+ async chat(userMessage, opts = {}) {
123
+ const providerConfig = PROVIDERS[this.provider];
124
+
125
+ // Build messages array
126
+ const messages = [];
127
+ if (this.systemPrompt && this.provider !== 'anthropic') {
128
+ messages.push({ role: 'system', content: this.systemPrompt });
129
+ }
130
+
131
+ // Add conversation history
132
+ for (const msg of this.conversationHistory) {
133
+ messages.push(msg);
134
+ }
135
+
136
+ messages.push({ role: 'user', content: userMessage });
137
+
138
+ // Build request body
139
+ let body;
140
+ if (providerConfig.buildBody) {
141
+ body = providerConfig.buildBody(this.model, messages, this.systemPrompt);
142
+ } else {
143
+ body = {
144
+ model: this.model,
145
+ messages,
146
+ temperature: opts.temperature ?? this.temperature,
147
+ max_tokens: opts.maxTokens || 4096,
148
+ };
149
+
150
+ // JSON mode if requested
151
+ if (opts.json) {
152
+ body.response_format = { type: 'json_object' };
153
+ }
154
+ }
155
+
156
+ const url = providerConfig.url;
157
+ const headers = {
158
+ 'Content-Type': 'application/json',
159
+ ...providerConfig.authHeader(this.apiKey),
160
+ };
161
+
162
+ const response = await fetch(url, {
163
+ method: 'POST',
164
+ headers,
165
+ body: JSON.stringify(body),
166
+ });
167
+
168
+ if (!response.ok) {
169
+ const errText = await response.text();
170
+ throw new Error(`LLM API error (${response.status}): ${errText.slice(0, 200)}`);
171
+ }
172
+
173
+ const data = await response.json();
174
+ const content = providerConfig.parseResponse(data);
175
+ const usage = providerConfig.parseUsage(data);
176
+
177
+ // Track usage
178
+ this.totalCalls++;
179
+ if (usage) {
180
+ this.totalInputTokens += usage.input_tokens || usage.prompt_tokens || usage.input || 0;
181
+ this.totalOutputTokens += usage.output_tokens || usage.completion_tokens || usage.output || 0;
182
+ }
183
+
184
+ // Store in history
185
+ if (!opts.ephemeral) {
186
+ this.conversationHistory.push({ role: 'user', content: userMessage });
187
+ this.conversationHistory.push({ role: 'assistant', content });
188
+ this._trimHistory();
189
+ }
190
+
191
+ return {
192
+ content,
193
+ usage,
194
+ model: this.model,
195
+ provider: this.provider,
196
+ };
197
+ }
198
+
199
+ /**
200
+ * One-shot completion (no history)
201
+ */
202
+ async complete(prompt, opts = {}) {
203
+ return this.chat(prompt, { ...opts, ephemeral: true });
204
+ }
205
+
206
+ /**
207
+ * Get structured JSON response
208
+ */
209
+ async json(prompt, opts = {}) {
210
+ const result = await this.chat(
211
+ prompt + '\n\nRespond with valid JSON only. No markdown, no explanation.',
212
+ { ...opts, ephemeral: true }
213
+ );
214
+
215
+ try {
216
+ // Extract JSON from response (handle markdown code blocks)
217
+ let jsonStr = result.content;
218
+ const match = jsonStr.match(/```(?:json)?\s*([\s\S]*?)\s*```/);
219
+ if (match) jsonStr = match[1];
220
+
221
+ result.parsed = JSON.parse(jsonStr.trim());
222
+ } catch {
223
+ result.parsed = null;
224
+ }
225
+
226
+ return result;
227
+ }
228
+
229
+ /**
230
+ * Clear conversation history
231
+ */
232
+ clearHistory() {
233
+ this.conversationHistory = [];
234
+ return this;
235
+ }
236
+
237
+ /**
238
+ * Get usage stats
239
+ */
240
+ getUsage() {
241
+ return {
242
+ calls: this.totalCalls,
243
+ inputTokens: this.totalInputTokens,
244
+ outputTokens: this.totalOutputTokens,
245
+ totalTokens: this.totalInputTokens + this.totalOutputTokens,
246
+ provider: this.provider,
247
+ model: this.model,
248
+ };
249
+ }
250
+
251
+ /**
252
+ * Trim history to stay within token budget (rough estimate)
253
+ */
254
+ _trimHistory() {
255
+ // Rough: 1 token ≈ 4 chars
256
+ const estimateTokens = (msgs) => msgs.reduce((sum, m) => sum + Math.ceil(m.content.length / 4), 0);
257
+
258
+ while (this.conversationHistory.length > 2 && estimateTokens(this.conversationHistory) > this.maxHistoryTokens) {
259
+ // Remove oldest pair (user + assistant)
260
+ this.conversationHistory.splice(0, 2);
261
+ }
262
+ }
263
+ }
264
+
265
+ // ──────────────────────────────────────────────────
266
+ // FACTORY
267
+ // ──────────────────────────────────────────────────
268
+
269
+ /**
270
+ * Create and initialize an LLM engine
271
+ */
272
+ export async function createLLM(opts = {}) {
273
+ const engine = new LLMEngine(opts);
274
+ await engine.init(opts.vaultPassword);
275
+ return engine;
276
+ }
277
+
278
+ /**
279
+ * Quick one-shot LLM call (auto-resolves provider/key)
280
+ */
281
+ export async function ask(prompt, opts = {}) {
282
+ const engine = await createLLM(opts);
283
+ return engine.complete(prompt, opts);
284
+ }
285
+
286
+ export { PROVIDERS };
@@ -0,0 +1,310 @@
1
+ import { createLLM } from './engine.js';
2
+ import { quickPrice } from '../utils/helpers.js';
3
+ import { getConfig } from '../config/store.js';
4
+ import { theme } from '../ui/theme.js';
5
+ import { spinner, kvDisplay, success, error, warn, info } from '../ui/components.js';
6
+ import { showSection } from '../ui/banner.js';
7
+
8
+ // ──────────────────────────────────────────────────
9
+ // INTENT SYSTEM PROMPT
10
+ // ──────────────────────────────────────────────────
11
+
12
+ const INTENT_SYSTEM_PROMPT = `You are DARKSOL Terminal's trading AI assistant. You help users execute trades, analyze markets, manage DCA strategies, and navigate the DARKSOL ecosystem.
13
+
14
+ CAPABILITIES:
15
+ - Parse natural language trade instructions into structured commands
16
+ - Analyze token prices, liquidity, and market conditions
17
+ - Suggest DCA strategies based on user goals
18
+ - Explain transaction results and gas costs
19
+ - Warn about risks (low liquidity, high slippage, unverified contracts)
20
+
21
+ SUPPORTED CHAINS: Base (default), Ethereum, Polygon, Arbitrum, Optimism
22
+
23
+ RESPONSE RULES:
24
+ - Be concise and direct
25
+ - Always include risk warnings for trades
26
+ - When parsing trade intent, output structured JSON
27
+ - Never reveal private keys or sensitive wallet info
28
+ - If uncertain about a token, say so
29
+ - Use plain numbers, avoid scientific notation
30
+
31
+ USER CONTEXT:
32
+ - Active chain: {{chain}}
33
+ - Active wallet: {{wallet}}
34
+ - Slippage setting: {{slippage}}%
35
+
36
+ When parsing trade instructions, respond with JSON:
37
+ {
38
+ "action": "swap|snipe|dca|transfer|info|analyze|unknown",
39
+ "tokenIn": "symbol or address",
40
+ "tokenOut": "symbol or address",
41
+ "amount": "number",
42
+ "chain": "chain name",
43
+ "confidence": 0-1,
44
+ "reasoning": "brief explanation",
45
+ "warnings": ["array of risk warnings"],
46
+ "command": "the CLI command to execute"
47
+ }`;
48
+
49
+ // ──────────────────────────────────────────────────
50
+ // INTENT PARSER
51
+ // ──────────────────────────────────────────────────
52
+
53
+ /**
54
+ * Parse natural language into a trading intent
55
+ * @param {string} input - User's natural language input
56
+ * @param {object} opts - { provider, model, vaultPassword }
57
+ * @returns {Promise<object>} Parsed intent
58
+ */
59
+ export async function parseIntent(input, opts = {}) {
60
+ const spin = spinner('Understanding your intent...').start();
61
+
62
+ try {
63
+ const llm = await createLLM(opts);
64
+ const chain = getConfig('chain') || 'base';
65
+ const wallet = getConfig('activeWallet') || '(not set)';
66
+ const slippage = getConfig('slippage') || 0.5;
67
+
68
+ const systemPrompt = INTENT_SYSTEM_PROMPT
69
+ .replace('{{chain}}', chain)
70
+ .replace('{{wallet}}', wallet)
71
+ .replace('{{slippage}}', slippage);
72
+
73
+ llm.setSystemPrompt(systemPrompt);
74
+
75
+ // Enrich with price data if we detect a token mention
76
+ let context = '';
77
+ const tokenPattern = /\b([A-Z]{2,10})\b/g;
78
+ const tokens = [...new Set(input.toUpperCase().match(tokenPattern) || [])];
79
+
80
+ if (tokens.length > 0 && tokens.length <= 3) {
81
+ const prices = [];
82
+ for (const t of tokens) {
83
+ if (['ETH', 'THE', 'FOR', 'AND', 'BUY', 'SELL', 'DCA', 'SWAP'].includes(t)) continue;
84
+ const p = await quickPrice(t);
85
+ if (p) prices.push(`${p.symbol}: $${p.price} (liquidity: $${p.liquidity}, 24h: ${p.change24h}%)`);
86
+ }
87
+ if (prices.length > 0) {
88
+ context = `\n\nCurrent market data:\n${prices.join('\n')}`;
89
+ }
90
+ }
91
+
92
+ const prompt = `Parse this trading instruction and respond with JSON:\n\n"${input}"${context}`;
93
+ const result = await llm.json(prompt);
94
+
95
+ spin.succeed('Intent parsed');
96
+
97
+ if (result.parsed) {
98
+ return {
99
+ ...result.parsed,
100
+ raw: result.content,
101
+ model: result.model,
102
+ };
103
+ }
104
+
105
+ return {
106
+ action: 'unknown',
107
+ reasoning: result.content,
108
+ confidence: 0,
109
+ raw: result.content,
110
+ model: result.model,
111
+ };
112
+ } catch (err) {
113
+ spin.fail('Intent parsing failed');
114
+ error(err.message);
115
+ return { action: 'error', error: err.message };
116
+ }
117
+ }
118
+
119
+ // ──────────────────────────────────────────────────
120
+ // INTERACTIVE CHAT
121
+ // ──────────────────────────────────────────────────
122
+
123
+ /**
124
+ * Start an interactive trading chat session
125
+ */
126
+ export async function startChat(opts = {}) {
127
+ showSection('DARKSOL AI — TRADING ASSISTANT');
128
+ console.log(theme.dim(' Natural language trading. Type "exit" to quit.'));
129
+ console.log(theme.dim(' Examples: "buy 0.1 ETH worth of VIRTUAL", "what\'s the price of AERO?"'));
130
+ console.log('');
131
+
132
+ const spin = spinner('Initializing AI...').start();
133
+ let llm;
134
+
135
+ try {
136
+ llm = await createLLM(opts);
137
+ const chain = getConfig('chain') || 'base';
138
+ const wallet = getConfig('activeWallet') || '(not set)';
139
+ const slippage = getConfig('slippage') || 0.5;
140
+
141
+ const systemPrompt = INTENT_SYSTEM_PROMPT
142
+ .replace('{{chain}}', chain)
143
+ .replace('{{wallet}}', wallet)
144
+ .replace('{{slippage}}', slippage);
145
+
146
+ llm.setSystemPrompt(systemPrompt);
147
+ spin.succeed(`AI ready (${llm.provider}/${llm.model})`);
148
+ } catch (err) {
149
+ spin.fail('Failed to initialize AI');
150
+ error(err.message);
151
+ info('Add an API key: darksol keys add openai');
152
+ return;
153
+ }
154
+
155
+ const inquirer = (await import('inquirer')).default;
156
+
157
+ while (true) {
158
+ const { input } = await inquirer.prompt([{
159
+ type: 'input',
160
+ name: 'input',
161
+ message: theme.gold('You:'),
162
+ validate: (v) => v.length > 0 || 'Say something',
163
+ }]);
164
+
165
+ if (['exit', 'quit', 'q'].includes(input.toLowerCase())) {
166
+ const usage = llm.getUsage();
167
+ console.log('');
168
+ info(`Session: ${usage.calls} calls, ${usage.totalTokens} tokens`);
169
+ break;
170
+ }
171
+
172
+ const spin2 = spinner('Thinking...').start();
173
+ try {
174
+ // Enrich with live price data
175
+ let enriched = input;
176
+ const tokenPattern = /\b([A-Z]{2,10})\b/g;
177
+ const tokens = [...new Set(input.toUpperCase().match(tokenPattern) || [])];
178
+ const skipTokens = ['ETH', 'THE', 'FOR', 'AND', 'BUY', 'SELL', 'DCA', 'SWAP', 'WHAT', 'PRICE', 'HOW', 'MUCH'];
179
+
180
+ const priceData = [];
181
+ for (const t of tokens.filter(t => !skipTokens.includes(t)).slice(0, 3)) {
182
+ const p = await quickPrice(t);
183
+ if (p) priceData.push(`${p.symbol}: $${p.price} (liq: $${p.liquidity})`);
184
+ }
185
+
186
+ if (priceData.length > 0) {
187
+ enriched += `\n\n[Live data: ${priceData.join(', ')}]`;
188
+ }
189
+
190
+ const result = await llm.chat(enriched);
191
+ spin2.succeed('');
192
+
193
+ // Display response
194
+ console.log('');
195
+ console.log(theme.gold(' DARKSOL AI:'));
196
+ const lines = result.content.split('\n');
197
+ for (const line of lines) {
198
+ console.log(theme.dim(' ') + line);
199
+ }
200
+ console.log('');
201
+
202
+ } catch (err) {
203
+ spin2.fail('Error');
204
+ error(err.message);
205
+ }
206
+ }
207
+ }
208
+
209
+ // ──────────────────────────────────────────────────
210
+ // STRATEGY ADVISOR
211
+ // ──────────────────────────────────────────────────
212
+
213
+ /**
214
+ * Get a DCA strategy recommendation
215
+ */
216
+ export async function adviseStrategy(tokenSymbol, budget, timeframe, opts = {}) {
217
+ const spin = spinner('Analyzing strategy...').start();
218
+
219
+ try {
220
+ const llm = await createLLM(opts);
221
+ llm.setSystemPrompt(`You are a DCA strategy advisor for crypto trading on Base/Ethereum.
222
+ Give specific, actionable DCA recommendations with exact amounts and intervals.
223
+ Always include risk warnings. Be concise.`);
224
+
225
+ // Get live price data
226
+ const price = await quickPrice(tokenSymbol);
227
+ const priceInfo = price
228
+ ? `Current price: $${price.price}, Liquidity: $${price.liquidity}, 24h change: ${price.change24h}%`
229
+ : 'Price data unavailable';
230
+
231
+ const prompt = `DCA strategy for ${tokenSymbol}:
232
+ Budget: $${budget}
233
+ Timeframe: ${timeframe}
234
+ ${priceInfo}
235
+
236
+ Recommend: interval, amount per buy, total orders, entry/exit conditions, risk level.`;
237
+
238
+ const result = await llm.complete(prompt);
239
+ spin.succeed('Strategy ready');
240
+
241
+ showSection(`DCA STRATEGY — ${tokenSymbol.toUpperCase()}`);
242
+ const lines = result.content.split('\n');
243
+ for (const line of lines) {
244
+ if (line.trim()) console.log(' ' + line);
245
+ }
246
+ console.log('');
247
+
248
+ return result;
249
+ } catch (err) {
250
+ spin.fail('Strategy analysis failed');
251
+ error(err.message);
252
+ }
253
+ }
254
+
255
+ /**
256
+ * Analyze a token for trading
257
+ */
258
+ export async function analyzeToken(query, opts = {}) {
259
+ const spin = spinner(`Analyzing ${query}...`).start();
260
+
261
+ try {
262
+ const llm = await createLLM(opts);
263
+ llm.setSystemPrompt(`You are a crypto token analyst. Provide factual analysis based on on-chain data.
264
+ Include: price analysis, liquidity assessment, volume trends, risk factors.
265
+ Be objective. Never guarantee returns.`);
266
+
267
+ const price = await quickPrice(query);
268
+ if (!price) {
269
+ spin.fail('Token not found');
270
+ return;
271
+ }
272
+
273
+ const prompt = `Analyze this token:
274
+ Symbol: ${price.symbol} (${price.name})
275
+ Chain: ${price.chain}
276
+ Price: $${price.price}
277
+ 24h Change: ${price.change24h}%
278
+ Liquidity: $${price.liquidity}
279
+ 24h Volume: $${price.volume24h}
280
+ DEX: ${price.dex}
281
+ Contract: ${price.contract}
282
+
283
+ Provide: sentiment, liquidity assessment, risk level (1-10), key considerations.`;
284
+
285
+ const result = await llm.complete(prompt);
286
+ spin.succeed('Analysis ready');
287
+
288
+ showSection(`TOKEN ANALYSIS — ${price.symbol}`);
289
+ kvDisplay([
290
+ ['Price', `$${price.price}`],
291
+ ['24h', `${price.change24h}%`],
292
+ ['Liquidity', `$${price.liquidity}`],
293
+ ['Volume', `$${price.volume24h}`],
294
+ ]);
295
+ console.log('');
296
+
297
+ const lines = result.content.split('\n');
298
+ for (const line of lines) {
299
+ if (line.trim()) console.log(' ' + line);
300
+ }
301
+ console.log('');
302
+
303
+ return result;
304
+ } catch (err) {
305
+ spin.fail('Analysis failed');
306
+ error(err.message);
307
+ }
308
+ }
309
+
310
+ export { INTENT_SYSTEM_PROMPT };