@darksol/terminal 0.8.1 → 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/llm/engine.js CHANGED
@@ -1,19 +1,14 @@
1
1
  import fetch from 'node-fetch';
2
- import { getKeyFromEnv, getKey, SERVICES } from '../config/keys.js';
2
+ import { getKeyFromEnv, getKey } from '../config/keys.js';
3
3
  import { getConfig } from '../config/store.js';
4
- import { theme } from '../ui/theme.js';
5
- import { spinner, kvDisplay, success, error, warn, info } from '../ui/components.js';
6
- import { showSection } from '../ui/banner.js';
7
-
8
- // ──────────────────────────────────────────────────
9
- // LLM PROVIDER ADAPTERS
10
- // ──────────────────────────────────────────────────
4
+ import { SessionMemory, extractMemories, searchMemories } from '../memory/index.js';
5
+ import { formatSystemPrompt as formatSoulSystemPrompt } from '../soul/index.js';
11
6
 
12
7
  const PROVIDERS = {
13
8
  openai: {
14
9
  url: 'https://api.openai.com/v1/chat/completions',
15
10
  defaultModel: 'gpt-4o',
16
- authHeader: (key) => ({ 'Authorization': `Bearer ${key}` }),
11
+ authHeader: (key) => ({ Authorization: `Bearer ${key}` }),
17
12
  parseResponse: (data) => data.choices?.[0]?.message?.content,
18
13
  parseUsage: (data) => data.usage,
19
14
  },
@@ -25,9 +20,9 @@ const PROVIDERS = {
25
20
  model,
26
21
  max_tokens: 4096,
27
22
  system: systemPrompt,
28
- messages: messages.map(m => ({
29
- role: m.role === 'system' ? 'user' : m.role,
30
- content: m.content,
23
+ messages: messages.map((message) => ({
24
+ role: message.role === 'system' ? 'user' : message.role,
25
+ content: message.content,
31
26
  })),
32
27
  }),
33
28
  parseResponse: (data) => data.content?.[0]?.text,
@@ -37,15 +32,22 @@ const PROVIDERS = {
37
32
  url: 'https://openrouter.ai/api/v1/chat/completions',
38
33
  defaultModel: 'anthropic/claude-sonnet-4-20250514',
39
34
  authHeader: (key) => ({
40
- 'Authorization': `Bearer ${key}`,
35
+ Authorization: `Bearer ${key}`,
41
36
  'HTTP-Referer': 'https://darksol.net',
42
37
  'X-Title': 'DARKSOL Terminal',
43
38
  }),
44
39
  parseResponse: (data) => data.choices?.[0]?.message?.content,
45
40
  parseUsage: (data) => data.usage,
46
41
  },
42
+ minimax: {
43
+ url: 'https://api.minimax.io/v1/chat/completions',
44
+ defaultModel: 'MiniMax-M2.5',
45
+ authHeader: (key) => ({ Authorization: `Bearer ${key}` }),
46
+ parseResponse: (data) => data.choices?.[0]?.message?.content,
47
+ parseUsage: (data) => data.usage,
48
+ },
47
49
  ollama: {
48
- url: null, // Set from config
50
+ url: null,
49
51
  defaultModel: 'llama3.1',
50
52
  authHeader: () => ({}),
51
53
  parseResponse: (data) => data.choices?.[0]?.message?.content || data.message?.content,
@@ -60,32 +62,23 @@ const PROVIDERS = {
60
62
  },
61
63
  };
62
64
 
63
- // ──────────────────────────────────────────────────
64
- // LLM ENGINE
65
- // ──────────────────────────────────────────────────
66
-
67
65
  export class LLMEngine {
68
66
  constructor(opts = {}) {
69
67
  this.provider = opts.provider || getConfig('llm.provider') || 'openai';
70
68
  this.model = opts.model || getConfig('llm.model') || null;
71
69
  this.apiKey = opts.apiKey || null;
72
- this.conversationHistory = [];
73
70
  this.systemPrompt = '';
74
- this.maxHistoryTokens = opts.maxHistory || 8000;
75
71
  this.temperature = opts.temperature ?? 0.7;
72
+ this.sessionMemory = opts.sessionMemory || new SessionMemory({ maxTurns: opts.maxTurns || 20 });
73
+ this.maxRelevantMemories = opts.maxRelevantMemories || 5;
76
74
 
77
- // Usage tracking
78
75
  this.totalInputTokens = 0;
79
76
  this.totalOutputTokens = 0;
80
77
  this.totalCalls = 0;
81
78
  }
82
79
 
83
- /**
84
- * Initialize the engine — resolve API key
85
- */
86
80
  async init(vaultPassword) {
87
81
  if (!this.apiKey) {
88
- // Try env first, then vault
89
82
  this.apiKey = getKeyFromEnv(this.provider);
90
83
  if (!this.apiKey && vaultPassword) {
91
84
  this.apiKey = await getKey(this.provider, vaultPassword);
@@ -93,7 +86,6 @@ export class LLMEngine {
93
86
  }
94
87
 
95
88
  if (!this.apiKey && this.provider !== 'ollama') {
96
- // Try auto-stored keys as last resort
97
89
  const { getKeyAuto } = await import('../config/keys.js');
98
90
  this.apiKey = getKeyAuto(this.provider);
99
91
  }
@@ -111,47 +103,42 @@ export class LLMEngine {
111
103
  this.model = providerConfig.defaultModel;
112
104
  }
113
105
 
114
- // Ollama URL from config
115
106
  if (this.provider === 'ollama') {
116
107
  const host = this.apiKey || getConfig('llm.ollamaHost') || 'http://localhost:11434';
117
108
  PROVIDERS.ollama.url = `${host}/v1/chat/completions`;
118
- this.apiKey = 'ollama'; // placeholder
109
+ this.apiKey = 'ollama';
119
110
  }
120
111
 
121
112
  return this;
122
113
  }
123
114
 
124
- /**
125
- * Set the system prompt (persona/context for the LLM)
126
- */
127
115
  setSystemPrompt(prompt) {
128
116
  this.systemPrompt = prompt;
129
117
  return this;
130
118
  }
131
119
 
132
- /**
133
- * Send a message and get a response
134
- */
135
120
  async chat(userMessage, opts = {}) {
136
121
  const providerConfig = PROVIDERS[this.provider];
137
-
138
- // Build messages array
122
+ const systemPrompt = opts.skipContext
123
+ ? (opts.systemPrompt || this.systemPrompt || '')
124
+ : await this._buildSystemPrompt(userMessage, opts.systemPrompt);
139
125
  const messages = [];
140
- if (this.systemPrompt && this.provider !== 'anthropic') {
141
- messages.push({ role: 'system', content: this.systemPrompt });
126
+
127
+ if (systemPrompt && this.provider !== 'anthropic') {
128
+ messages.push({ role: 'system', content: systemPrompt });
142
129
  }
143
130
 
144
- // Add conversation history
145
- for (const msg of this.conversationHistory) {
146
- messages.push(msg);
131
+ if (!opts.skipContext) {
132
+ for (const message of this.sessionMemory.getContext()) {
133
+ messages.push(message);
134
+ }
147
135
  }
148
136
 
149
137
  messages.push({ role: 'user', content: userMessage });
150
138
 
151
- // Build request body
152
139
  let body;
153
140
  if (providerConfig.buildBody) {
154
- body = providerConfig.buildBody(this.model, messages, this.systemPrompt);
141
+ body = providerConfig.buildBody(this.model, messages, systemPrompt);
155
142
  } else {
156
143
  body = {
157
144
  model: this.model,
@@ -160,21 +147,17 @@ export class LLMEngine {
160
147
  max_tokens: opts.maxTokens || 4096,
161
148
  };
162
149
 
163
- // JSON mode if requested
164
150
  if (opts.json) {
165
151
  body.response_format = { type: 'json_object' };
166
152
  }
167
153
  }
168
154
 
169
- const url = providerConfig.url;
170
- const headers = {
171
- 'Content-Type': 'application/json',
172
- ...providerConfig.authHeader(this.apiKey),
173
- };
174
-
175
- const response = await fetch(url, {
155
+ const response = await fetch(providerConfig.url, {
176
156
  method: 'POST',
177
- headers,
157
+ headers: {
158
+ 'Content-Type': 'application/json',
159
+ ...providerConfig.authHeader(this.apiKey),
160
+ },
178
161
  body: JSON.stringify(body),
179
162
  });
180
163
 
@@ -187,18 +170,21 @@ export class LLMEngine {
187
170
  const content = providerConfig.parseResponse(data);
188
171
  const usage = providerConfig.parseUsage(data);
189
172
 
190
- // Track usage
191
173
  this.totalCalls++;
192
174
  if (usage) {
193
175
  this.totalInputTokens += usage.input_tokens || usage.prompt_tokens || usage.input || 0;
194
176
  this.totalOutputTokens += usage.output_tokens || usage.completion_tokens || usage.output || 0;
195
177
  }
196
178
 
197
- // Store in history
198
179
  if (!opts.ephemeral) {
199
- this.conversationHistory.push({ role: 'user', content: userMessage });
200
- this.conversationHistory.push({ role: 'assistant', content });
201
- this._trimHistory();
180
+ this.sessionMemory.addTurn('user', userMessage);
181
+ this.sessionMemory.addTurn('assistant', content);
182
+ await this.sessionMemory.compact(this);
183
+
184
+ if (!opts.skipMemoryExtraction) {
185
+ await extractMemories(userMessage, 'user');
186
+ await extractMemories(content, 'assistant');
187
+ }
202
188
  }
203
189
 
204
190
  return {
@@ -209,24 +195,17 @@ export class LLMEngine {
209
195
  };
210
196
  }
211
197
 
212
- /**
213
- * One-shot completion (no history)
214
- */
215
198
  async complete(prompt, opts = {}) {
216
199
  return this.chat(prompt, { ...opts, ephemeral: true });
217
200
  }
218
201
 
219
- /**
220
- * Get structured JSON response
221
- */
222
202
  async json(prompt, opts = {}) {
223
203
  const result = await this.chat(
224
- prompt + '\n\nRespond with valid JSON only. No markdown, no explanation.',
204
+ `${prompt}\n\nRespond with valid JSON only. No markdown, no explanation.`,
225
205
  { ...opts, ephemeral: true }
226
206
  );
227
207
 
228
208
  try {
229
- // Extract JSON from response (handle markdown code blocks)
230
209
  let jsonStr = result.content;
231
210
  const match = jsonStr.match(/```(?:json)?\s*([\s\S]*?)\s*```/);
232
211
  if (match) jsonStr = match[1];
@@ -239,17 +218,11 @@ export class LLMEngine {
239
218
  return result;
240
219
  }
241
220
 
242
- /**
243
- * Clear conversation history
244
- */
245
221
  clearHistory() {
246
- this.conversationHistory = [];
222
+ this.sessionMemory.clear();
247
223
  return this;
248
224
  }
249
225
 
250
- /**
251
- * Get usage stats
252
- */
253
226
  getUsage() {
254
227
  return {
255
228
  calls: this.totalCalls,
@@ -261,36 +234,37 @@ export class LLMEngine {
261
234
  };
262
235
  }
263
236
 
264
- /**
265
- * Trim history to stay within token budget (rough estimate)
266
- */
267
- _trimHistory() {
268
- // Rough: 1 token 4 chars
269
- const estimateTokens = (msgs) => msgs.reduce((sum, m) => sum + Math.ceil(m.content.length / 4), 0);
237
+ async _buildSystemPrompt(userMessage, overridePrompt) {
238
+ const parts = [];
239
+ const soulPrompt = formatSoulSystemPrompt();
240
+ if (soulPrompt) parts.push(soulPrompt);
241
+ if (overridePrompt || this.systemPrompt) parts.push(overridePrompt || this.systemPrompt);
270
242
 
271
- while (this.conversationHistory.length > 2 && estimateTokens(this.conversationHistory) > this.maxHistoryTokens) {
272
- // Remove oldest pair (user + assistant)
273
- this.conversationHistory.splice(0, 2);
243
+ const summary = this.sessionMemory.getSummary();
244
+ if (summary) {
245
+ parts.push(`Session summary:\n${summary}`);
274
246
  }
247
+
248
+ const relevantMemories = await searchMemories(userMessage);
249
+ if (relevantMemories.length > 0) {
250
+ parts.push(
251
+ `Relevant persistent memories:\n${relevantMemories
252
+ .slice(0, this.maxRelevantMemories)
253
+ .map((memory) => `- [${memory.category}] ${memory.content}`)
254
+ .join('\n')}`
255
+ );
256
+ }
257
+
258
+ return parts.filter(Boolean).join('\n\n');
275
259
  }
276
260
  }
277
261
 
278
- // ──────────────────────────────────────────────────
279
- // FACTORY
280
- // ──────────────────────────────────────────────────
281
-
282
- /**
283
- * Create and initialize an LLM engine
284
- */
285
262
  export async function createLLM(opts = {}) {
286
263
  const engine = new LLMEngine(opts);
287
264
  await engine.init(opts.vaultPassword);
288
265
  return engine;
289
266
  }
290
267
 
291
- /**
292
- * Quick one-shot LLM call (auto-resolves provider/key)
293
- */
294
268
  export async function ask(prompt, opts = {}) {
295
269
  const engine = await createLLM(opts);
296
270
  return engine.complete(prompt, opts);
@@ -0,0 +1,275 @@
1
+ import { mkdir, readFile, writeFile } from 'fs/promises';
2
+ import { existsSync } from 'fs';
3
+ import { homedir } from 'os';
4
+ import { join } from 'path';
5
+ import { randomUUID } from 'crypto';
6
+
7
+ const MEMORY_DIR = join(homedir(), '.darksol', 'memory');
8
+ const MEMORY_FILE = join(MEMORY_DIR, 'memory.json');
9
+ const MEMORY_CATEGORIES = new Set(['preference', 'fact', 'decision', 'lesson']);
10
+ const MEMORY_PATTERNS = [
11
+ { regex: /\b(i prefer|i like|i usually|my favorite)\b/i, category: 'preference' },
12
+ { regex: /\b(remember that|remember this|my address is|i live at|my phone number is)\b/i, category: 'fact' },
13
+ { regex: /\b(always|never|from now on|do not|don't)\b/i, category: 'decision' },
14
+ { regex: /\b(i learned|lesson|next time|that means)\b/i, category: 'lesson' },
15
+ ];
16
+
17
+ /**
18
+ * Ensure the memory directory and file exist.
19
+ * @returns {Promise<void>}
20
+ */
21
+ async function ensureMemoryStore() {
22
+ await mkdir(MEMORY_DIR, { recursive: true });
23
+ if (!existsSync(MEMORY_FILE)) {
24
+ await writeFile(MEMORY_FILE, '[]\n', 'utf8');
25
+ }
26
+ }
27
+
28
+ /**
29
+ * Load all persistent memories from disk.
30
+ * @returns {Promise<Array<{id: string, content: string, category: string, timestamp: string, source: string}>>}
31
+ */
32
+ export async function loadMemories() {
33
+ await ensureMemoryStore();
34
+
35
+ try {
36
+ const raw = await readFile(MEMORY_FILE, 'utf8');
37
+ const parsed = JSON.parse(raw);
38
+ return Array.isArray(parsed) ? parsed : [];
39
+ } catch {
40
+ return [];
41
+ }
42
+ }
43
+
44
+ /**
45
+ * Persist the full memory list.
46
+ * @param {Array<object>} memories
47
+ * @returns {Promise<void>}
48
+ */
49
+ async function writeMemories(memories) {
50
+ await ensureMemoryStore();
51
+ await writeFile(MEMORY_FILE, `${JSON.stringify(memories, null, 2)}\n`, 'utf8');
52
+ }
53
+
54
+ /**
55
+ * Save a memory item to disk.
56
+ * @param {string} content
57
+ * @param {'preference'|'fact'|'decision'|'lesson'} category
58
+ * @param {string} [source='user']
59
+ * @returns {Promise<object|null>}
60
+ */
61
+ export async function saveMemory(content, category, source = 'user') {
62
+ const trimmed = String(content || '').trim();
63
+ if (!trimmed) return null;
64
+
65
+ const finalCategory = MEMORY_CATEGORIES.has(category) ? category : 'fact';
66
+ const memories = await loadMemories();
67
+ const duplicate = memories.find((memory) => memory.content.toLowerCase() === trimmed.toLowerCase());
68
+ if (duplicate) return duplicate;
69
+
70
+ const entry = {
71
+ id: randomUUID(),
72
+ content: trimmed,
73
+ category: finalCategory,
74
+ timestamp: new Date().toISOString(),
75
+ source,
76
+ };
77
+
78
+ memories.push(entry);
79
+ await writeMemories(memories);
80
+ return entry;
81
+ }
82
+
83
+ /**
84
+ * Search memories by a text query.
85
+ * @param {string} query
86
+ * @returns {Promise<Array<object>>}
87
+ */
88
+ export async function searchMemories(query) {
89
+ const trimmed = String(query || '').trim().toLowerCase();
90
+ if (!trimmed) return [];
91
+
92
+ const terms = trimmed.split(/\s+/).filter(Boolean);
93
+ const memories = await loadMemories();
94
+
95
+ return memories
96
+ .map((memory) => {
97
+ const haystack = `${memory.content} ${memory.category} ${memory.source}`.toLowerCase();
98
+ const score = terms.reduce((sum, term) => sum + (haystack.includes(term) ? 1 : 0), 0);
99
+ return { memory, score };
100
+ })
101
+ .filter(({ score }) => score > 0)
102
+ .sort((a, b) => {
103
+ if (b.score !== a.score) return b.score - a.score;
104
+ return new Date(b.memory.timestamp).getTime() - new Date(a.memory.timestamp).getTime();
105
+ })
106
+ .map(({ memory }) => memory);
107
+ }
108
+
109
+ /**
110
+ * Return the most recent N memories.
111
+ * @param {number} [n=10]
112
+ * @returns {Promise<Array<object>>}
113
+ */
114
+ export async function getRecentMemories(n = 10) {
115
+ const memories = await loadMemories();
116
+ return [...memories]
117
+ .sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime())
118
+ .slice(0, n);
119
+ }
120
+
121
+ /**
122
+ * Remove memories older than maxAge milliseconds.
123
+ * @param {number} maxAge
124
+ * @returns {Promise<number>}
125
+ */
126
+ export async function pruneMemories(maxAge) {
127
+ if (!Number.isFinite(maxAge) || maxAge <= 0) return 0;
128
+
129
+ const cutoff = Date.now() - maxAge;
130
+ const memories = await loadMemories();
131
+ const kept = memories.filter((memory) => new Date(memory.timestamp).getTime() >= cutoff);
132
+ await writeMemories(kept);
133
+ return memories.length - kept.length;
134
+ }
135
+
136
+ /**
137
+ * Remove all persistent memories.
138
+ * @returns {Promise<void>}
139
+ */
140
+ export async function clearMemories() {
141
+ await ensureMemoryStore();
142
+ await writeMemories([]);
143
+ }
144
+
145
+ /**
146
+ * Export memories to a JSON file and return its path.
147
+ * @param {string} filePath
148
+ * @returns {Promise<string>}
149
+ */
150
+ export async function exportMemories(filePath) {
151
+ const memories = await loadMemories();
152
+ await writeFile(filePath, `${JSON.stringify(memories, null, 2)}\n`, 'utf8');
153
+ return filePath;
154
+ }
155
+
156
+ /**
157
+ * Attempt to extract memory-worthy statements from a message.
158
+ * @param {string} content
159
+ * @param {string} [source='user']
160
+ * @returns {Promise<Array<object>>}
161
+ */
162
+ export async function extractMemories(content, source = 'user') {
163
+ const text = String(content || '').trim();
164
+ if (!text) return [];
165
+
166
+ const segments = text
167
+ .split(/[\n\r]+|(?<=[.!?])\s+/)
168
+ .map((segment) => segment.trim())
169
+ .filter(Boolean);
170
+
171
+ const saved = [];
172
+ for (const segment of segments) {
173
+ for (const pattern of MEMORY_PATTERNS) {
174
+ if (pattern.regex.test(segment)) {
175
+ const memory = await saveMemory(segment, pattern.category, source);
176
+ if (memory) saved.push(memory);
177
+ break;
178
+ }
179
+ }
180
+ }
181
+
182
+ return saved;
183
+ }
184
+
185
+ /**
186
+ * In-session conversation memory with rolling compaction.
187
+ */
188
+ export class SessionMemory {
189
+ /**
190
+ * @param {{maxTurns?: number}} [opts]
191
+ */
192
+ constructor(opts = {}) {
193
+ this.maxTurns = opts.maxTurns || 20;
194
+ this.messages = [];
195
+ this.summary = '';
196
+ }
197
+
198
+ /**
199
+ * Add a new turn to the current session.
200
+ * @param {'user'|'assistant'|'system'} role
201
+ * @param {string} content
202
+ * @returns {void}
203
+ */
204
+ addTurn(role, content) {
205
+ const trimmed = String(content || '').trim();
206
+ if (!trimmed) return;
207
+ this.messages.push({ role, content: trimmed });
208
+ }
209
+
210
+ /**
211
+ * Return recent conversation turns.
212
+ * @returns {Array<{role: string, content: string}>}
213
+ */
214
+ getContext() {
215
+ return [...this.messages];
216
+ }
217
+
218
+ /**
219
+ * Return the current summary, if one exists.
220
+ * @returns {string}
221
+ */
222
+ getSummary() {
223
+ return this.summary;
224
+ }
225
+
226
+ /**
227
+ * Clear all session memory.
228
+ * @returns {void}
229
+ */
230
+ clear() {
231
+ this.messages = [];
232
+ this.summary = '';
233
+ }
234
+
235
+ /**
236
+ * Compact older turns into a short summary with help from the LLM.
237
+ * @param {{complete: (prompt: string, opts?: object) => Promise<{content: string}>}} llm
238
+ * @returns {Promise<void>}
239
+ */
240
+ async compact(llm) {
241
+ if (this.messages.length <= this.maxTurns) return;
242
+
243
+ const overflow = this.messages.length - this.maxTurns;
244
+ const batchSize = Math.max(overflow, Math.ceil(this.maxTurns / 2));
245
+ const olderMessages = this.messages.splice(0, batchSize);
246
+ const transcript = olderMessages
247
+ .map((message) => `${message.role.toUpperCase()}: ${message.content}`)
248
+ .join('\n');
249
+
250
+ const prompt = [
251
+ 'Summarize this conversation context for future replies.',
252
+ 'Preserve preferences, decisions, constraints, open tasks, and factual details.',
253
+ 'Keep it under 180 words.',
254
+ this.summary ? `Existing summary:\n${this.summary}` : '',
255
+ `Conversation to compact:\n${transcript}`,
256
+ ].filter(Boolean).join('\n\n');
257
+
258
+ try {
259
+ const result = await llm.complete(prompt, {
260
+ ephemeral: true,
261
+ skipContext: true,
262
+ skipMemoryExtraction: true,
263
+ });
264
+ this.summary = String(result.content || '').trim() || this.summary;
265
+ } catch {
266
+ const fallback = olderMessages
267
+ .slice(-4)
268
+ .map((message) => `${message.role}: ${message.content}`)
269
+ .join(' | ');
270
+ this.summary = [this.summary, fallback].filter(Boolean).join(' | ').slice(-1200);
271
+ }
272
+ }
273
+ }
274
+
275
+ export { MEMORY_DIR, MEMORY_FILE };