wispy-cli 0.6.1 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/core/config.mjs +104 -0
- package/core/engine.mjs +532 -0
- package/core/index.mjs +12 -0
- package/core/mcp.mjs +8 -0
- package/core/providers.mjs +410 -0
- package/core/session.mjs +196 -0
- package/core/tools.mjs +526 -0
- package/lib/channels/index.mjs +45 -246
- package/lib/wispy-repl.mjs +332 -2447
- package/lib/wispy-tui.mjs +105 -588
- package/package.json +2 -1
|
@@ -0,0 +1,410 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* core/providers.mjs — Provider abstraction for Wispy
|
|
3
|
+
*
|
|
4
|
+
* Class ProviderRegistry:
|
|
5
|
+
* - detect() → available providers list
|
|
6
|
+
* - get(name) → Provider
|
|
7
|
+
* - getDefault() → Provider (first available)
|
|
8
|
+
* - chat(messages, tools, opts?) → response
|
|
9
|
+
* - setModel(model) → void
|
|
10
|
+
*
|
|
11
|
+
* Each provider: { name, label, models, chat(messages, tools, opts) }
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { PROVIDERS, detectProvider } from "./config.mjs";
|
|
15
|
+
|
|
16
|
+
const OPENAI_COMPAT_ENDPOINTS = {
|
|
17
|
+
openai: "https://api.openai.com/v1/chat/completions",
|
|
18
|
+
openrouter: "https://openrouter.ai/api/v1/chat/completions",
|
|
19
|
+
groq: "https://api.groq.com/openai/v1/chat/completions",
|
|
20
|
+
deepseek: "https://api.deepseek.com/v1/chat/completions",
|
|
21
|
+
ollama: `${process.env.OLLAMA_HOST ?? "http://localhost:11434"}/v1/chat/completions`,
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
export class ProviderRegistry {
|
|
25
|
+
constructor() {
|
|
26
|
+
this._detected = null;
|
|
27
|
+
this._provider = null;
|
|
28
|
+
this._apiKey = null;
|
|
29
|
+
this._model = null;
|
|
30
|
+
this._sessionTokens = { input: 0, output: 0 };
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Initialize by detecting available provider.
|
|
35
|
+
*/
|
|
36
|
+
async init(overrides = {}) {
|
|
37
|
+
const detected = await detectProvider();
|
|
38
|
+
if (!detected && !overrides.provider) {
|
|
39
|
+
return null;
|
|
40
|
+
}
|
|
41
|
+
this._provider = overrides.provider ?? detected?.provider;
|
|
42
|
+
this._apiKey = overrides.key ?? detected?.key;
|
|
43
|
+
this._model = overrides.model ?? detected?.model;
|
|
44
|
+
this._detected = detected;
|
|
45
|
+
return { provider: this._provider, key: this._apiKey, model: this._model };
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
get provider() { return this._provider; }
|
|
49
|
+
get apiKey() { return this._apiKey; }
|
|
50
|
+
get model() { return this._model; }
|
|
51
|
+
get sessionTokens() { return this._sessionTokens; }
|
|
52
|
+
|
|
53
|
+
setModel(model) {
|
|
54
|
+
this._model = model;
|
|
55
|
+
process.env.WISPY_MODEL = model;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
detect() {
|
|
59
|
+
return Object.entries(PROVIDERS)
|
|
60
|
+
.filter(([id, p]) => {
|
|
61
|
+
for (const k of p.envKeys) { if (process.env[k]) return true; }
|
|
62
|
+
return p.local && process.env.OLLAMA_HOST;
|
|
63
|
+
})
|
|
64
|
+
.map(([id, p]) => ({ id, label: p.label, defaultModel: p.defaultModel }));
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
get(name) {
|
|
68
|
+
return PROVIDERS[name] ?? null;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
getDefault() {
|
|
72
|
+
return PROVIDERS[this._provider] ?? null;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
_estimateTokens(text) {
|
|
76
|
+
return Math.ceil((text?.length ?? 0) / 4);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Main chat entrypoint. Routes to the correct provider.
|
|
81
|
+
* messages: array of { role, content, toolCalls?, ... }
|
|
82
|
+
* tools: array of tool definitions
|
|
83
|
+
* opts: { onChunk?(chunk), model? }
|
|
84
|
+
* Returns: { type: "text"|"tool_calls", text?, calls? }
|
|
85
|
+
*/
|
|
86
|
+
async chat(messages, tools = [], opts = {}) {
|
|
87
|
+
const model = opts.model ?? this._model;
|
|
88
|
+
if (this._provider === "google") {
|
|
89
|
+
return this._chatGemini(messages, tools, opts, model);
|
|
90
|
+
}
|
|
91
|
+
if (this._provider === "anthropic") {
|
|
92
|
+
return this._chatAnthropic(messages, tools, opts, model);
|
|
93
|
+
}
|
|
94
|
+
return this._chatOpenAICompat(messages, tools, opts, model);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
async _chatGemini(messages, tools, opts, model) {
|
|
98
|
+
const systemInstruction = messages.find(m => m.role === "system")?.content ?? "";
|
|
99
|
+
const contents = [];
|
|
100
|
+
|
|
101
|
+
for (const m of messages) {
|
|
102
|
+
if (m.role === "system") continue;
|
|
103
|
+
if (m.role === "tool_result") {
|
|
104
|
+
contents.push({
|
|
105
|
+
role: "user",
|
|
106
|
+
parts: [{ functionResponse: { name: m.toolName, response: m.result } }],
|
|
107
|
+
});
|
|
108
|
+
} else if (m.role === "assistant" && m.toolCalls) {
|
|
109
|
+
contents.push({
|
|
110
|
+
role: "model",
|
|
111
|
+
parts: m.toolCalls.map(tc => ({
|
|
112
|
+
functionCall: { name: tc.name, args: tc.args },
|
|
113
|
+
})),
|
|
114
|
+
});
|
|
115
|
+
} else {
|
|
116
|
+
contents.push({
|
|
117
|
+
role: m.role === "assistant" ? "model" : "user",
|
|
118
|
+
parts: [{ text: m.content }],
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
const inputText = contents.map(c => c.parts?.map(p => p.text ?? JSON.stringify(p)).join("")).join("");
|
|
124
|
+
this._sessionTokens.input += this._estimateTokens(systemInstruction + inputText);
|
|
125
|
+
|
|
126
|
+
const geminiTools = tools.length > 0 ? [{
|
|
127
|
+
functionDeclarations: tools.map(t => ({
|
|
128
|
+
name: t.name,
|
|
129
|
+
description: t.description,
|
|
130
|
+
parameters: t.parameters,
|
|
131
|
+
})),
|
|
132
|
+
}] : [];
|
|
133
|
+
|
|
134
|
+
const hasToolResults = messages.some(m => m.role === "tool_result");
|
|
135
|
+
const useStreaming = !hasToolResults;
|
|
136
|
+
const endpoint = useStreaming ? "streamGenerateContent" : "generateContent";
|
|
137
|
+
const url = `https://generativelanguage.googleapis.com/v1beta/models/${model}:${endpoint}?${useStreaming ? "alt=sse&" : ""}key=${this._apiKey}`;
|
|
138
|
+
|
|
139
|
+
const body = {
|
|
140
|
+
system_instruction: systemInstruction ? { parts: [{ text: systemInstruction }] } : undefined,
|
|
141
|
+
contents,
|
|
142
|
+
generationConfig: { temperature: 0.7, maxOutputTokens: 4096 },
|
|
143
|
+
};
|
|
144
|
+
if (geminiTools.length > 0) body.tools = geminiTools;
|
|
145
|
+
|
|
146
|
+
const response = await fetch(url, {
|
|
147
|
+
method: "POST",
|
|
148
|
+
headers: { "Content-Type": "application/json" },
|
|
149
|
+
body: JSON.stringify(body),
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
if (!response.ok) {
|
|
153
|
+
const err = await response.text();
|
|
154
|
+
throw new Error(`Gemini API error ${response.status}: ${err.slice(0, 300)}`);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
if (useStreaming) {
|
|
158
|
+
const reader = response.body.getReader();
|
|
159
|
+
const decoder = new TextDecoder();
|
|
160
|
+
let fullText = "";
|
|
161
|
+
let sseBuffer = "";
|
|
162
|
+
|
|
163
|
+
while (true) {
|
|
164
|
+
const { done, value } = await reader.read();
|
|
165
|
+
if (done) break;
|
|
166
|
+
sseBuffer += decoder.decode(value, { stream: true });
|
|
167
|
+
const sseLines = sseBuffer.split("\n");
|
|
168
|
+
sseBuffer = sseLines.pop() ?? "";
|
|
169
|
+
|
|
170
|
+
for (const line of sseLines) {
|
|
171
|
+
if (!line.startsWith("data: ")) continue;
|
|
172
|
+
const ld = line.slice(6).trim();
|
|
173
|
+
if (!ld || ld === "[DONE]") continue;
|
|
174
|
+
try {
|
|
175
|
+
const parsed = JSON.parse(ld);
|
|
176
|
+
const streamParts = parsed.candidates?.[0]?.content?.parts ?? [];
|
|
177
|
+
const streamFC = streamParts.filter(p => p.functionCall);
|
|
178
|
+
if (streamFC.length > 0) {
|
|
179
|
+
this._sessionTokens.output += this._estimateTokens(JSON.stringify(streamFC));
|
|
180
|
+
return { type: "tool_calls", calls: streamFC.map(p => ({ name: p.functionCall.name, args: p.functionCall.args })) };
|
|
181
|
+
}
|
|
182
|
+
const t = streamParts.map(p => p.text ?? "").join("");
|
|
183
|
+
if (t) { fullText += t; opts.onChunk?.(t); }
|
|
184
|
+
} catch { /* skip */ }
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
this._sessionTokens.output += this._estimateTokens(fullText);
|
|
188
|
+
return { type: "text", text: fullText };
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
const data = await response.json();
|
|
192
|
+
const candidate = data.candidates?.[0];
|
|
193
|
+
if (!candidate) throw new Error("No response from Gemini");
|
|
194
|
+
|
|
195
|
+
const parts = candidate.content?.parts ?? [];
|
|
196
|
+
const functionCalls = parts.filter(p => p.functionCall);
|
|
197
|
+
if (functionCalls.length > 0) {
|
|
198
|
+
this._sessionTokens.output += this._estimateTokens(JSON.stringify(functionCalls));
|
|
199
|
+
return { type: "tool_calls", calls: functionCalls.map(p => ({ name: p.functionCall.name, args: p.functionCall.args })) };
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
const text = parts.map(p => p.text ?? "").join("");
|
|
203
|
+
this._sessionTokens.output += this._estimateTokens(text);
|
|
204
|
+
opts.onChunk?.(text);
|
|
205
|
+
return { type: "text", text };
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
async _chatAnthropic(messages, tools, opts, model) {
|
|
209
|
+
const systemPrompt = messages.find(m => m.role === "system")?.content ?? "";
|
|
210
|
+
const anthropicMessages = [];
|
|
211
|
+
|
|
212
|
+
for (const m of messages) {
|
|
213
|
+
if (m.role === "system") continue;
|
|
214
|
+
if (m.role === "tool_result") {
|
|
215
|
+
anthropicMessages.push({
|
|
216
|
+
role: "user",
|
|
217
|
+
content: [{ type: "tool_result", tool_use_id: m.toolUseId ?? m.toolName, content: JSON.stringify(m.result) }],
|
|
218
|
+
});
|
|
219
|
+
} else if (m.role === "assistant" && m.toolCalls) {
|
|
220
|
+
anthropicMessages.push({
|
|
221
|
+
role: "assistant",
|
|
222
|
+
content: m.toolCalls.map(tc => ({
|
|
223
|
+
type: "tool_use", id: tc.id ?? tc.name, name: tc.name, input: tc.args,
|
|
224
|
+
})),
|
|
225
|
+
});
|
|
226
|
+
} else {
|
|
227
|
+
anthropicMessages.push({
|
|
228
|
+
role: m.role === "assistant" ? "assistant" : "user",
|
|
229
|
+
content: m.content,
|
|
230
|
+
});
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
const inputText = anthropicMessages.map(m => typeof m.content === "string" ? m.content : JSON.stringify(m.content)).join("");
|
|
235
|
+
this._sessionTokens.input += this._estimateTokens(systemPrompt + inputText);
|
|
236
|
+
|
|
237
|
+
const anthropicTools = tools.map(t => ({
|
|
238
|
+
name: t.name,
|
|
239
|
+
description: t.description,
|
|
240
|
+
input_schema: t.parameters,
|
|
241
|
+
}));
|
|
242
|
+
|
|
243
|
+
const response = await fetch("https://api.anthropic.com/v1/messages", {
|
|
244
|
+
method: "POST",
|
|
245
|
+
headers: {
|
|
246
|
+
"Content-Type": "application/json",
|
|
247
|
+
"x-api-key": this._apiKey,
|
|
248
|
+
"anthropic-version": "2023-06-01",
|
|
249
|
+
},
|
|
250
|
+
body: JSON.stringify({
|
|
251
|
+
model,
|
|
252
|
+
max_tokens: 4096,
|
|
253
|
+
system: systemPrompt,
|
|
254
|
+
messages: anthropicMessages,
|
|
255
|
+
tools: anthropicTools,
|
|
256
|
+
stream: true,
|
|
257
|
+
}),
|
|
258
|
+
});
|
|
259
|
+
|
|
260
|
+
if (!response.ok) {
|
|
261
|
+
const err = await response.text();
|
|
262
|
+
throw new Error(`Anthropic API error ${response.status}: ${err.slice(0, 300)}`);
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
const reader = response.body.getReader();
|
|
266
|
+
const decoder = new TextDecoder();
|
|
267
|
+
let buffer = "";
|
|
268
|
+
let fullText = "";
|
|
269
|
+
const toolCalls = [];
|
|
270
|
+
let currentToolCall = null;
|
|
271
|
+
let currentToolInput = "";
|
|
272
|
+
|
|
273
|
+
while (true) {
|
|
274
|
+
const { done, value } = await reader.read();
|
|
275
|
+
if (done) break;
|
|
276
|
+
|
|
277
|
+
buffer += decoder.decode(value, { stream: true });
|
|
278
|
+
const lines = buffer.split("\n");
|
|
279
|
+
buffer = lines.pop() ?? "";
|
|
280
|
+
|
|
281
|
+
for (const line of lines) {
|
|
282
|
+
if (!line.startsWith("data: ")) continue;
|
|
283
|
+
const data = line.slice(6).trim();
|
|
284
|
+
if (!data) continue;
|
|
285
|
+
|
|
286
|
+
try {
|
|
287
|
+
const event = JSON.parse(data);
|
|
288
|
+
if (event.type === "content_block_start") {
|
|
289
|
+
if (event.content_block?.type === "tool_use") {
|
|
290
|
+
currentToolCall = { id: event.content_block.id, name: event.content_block.name, args: {} };
|
|
291
|
+
currentToolInput = "";
|
|
292
|
+
}
|
|
293
|
+
} else if (event.type === "content_block_delta") {
|
|
294
|
+
if (event.delta?.type === "text_delta") {
|
|
295
|
+
fullText += event.delta.text;
|
|
296
|
+
opts.onChunk?.(event.delta.text);
|
|
297
|
+
} else if (event.delta?.type === "input_json_delta") {
|
|
298
|
+
currentToolInput += event.delta.partial_json ?? "";
|
|
299
|
+
}
|
|
300
|
+
} else if (event.type === "content_block_stop") {
|
|
301
|
+
if (currentToolCall) {
|
|
302
|
+
try { currentToolCall.args = JSON.parse(currentToolInput); } catch { currentToolCall.args = {}; }
|
|
303
|
+
toolCalls.push(currentToolCall);
|
|
304
|
+
currentToolCall = null;
|
|
305
|
+
currentToolInput = "";
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
} catch { /* skip */ }
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
this._sessionTokens.output += this._estimateTokens(fullText + JSON.stringify(toolCalls));
|
|
313
|
+
|
|
314
|
+
if (toolCalls.length > 0) {
|
|
315
|
+
return { type: "tool_calls", calls: toolCalls };
|
|
316
|
+
}
|
|
317
|
+
return { type: "text", text: fullText };
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
async _chatOpenAICompat(messages, tools, opts, model) {
|
|
321
|
+
const openaiMessages = messages.map(m => {
|
|
322
|
+
if (m.role === "tool_result") {
|
|
323
|
+
return { role: "tool", tool_call_id: m.toolCallId ?? m.toolName, content: JSON.stringify(m.result) };
|
|
324
|
+
}
|
|
325
|
+
if (m.role === "assistant" && m.toolCalls) {
|
|
326
|
+
return {
|
|
327
|
+
role: "assistant",
|
|
328
|
+
content: null,
|
|
329
|
+
tool_calls: m.toolCalls.map((tc, i) => ({
|
|
330
|
+
id: tc.id ?? `call_${i}`,
|
|
331
|
+
type: "function",
|
|
332
|
+
function: { name: tc.name, arguments: JSON.stringify(tc.args) },
|
|
333
|
+
})),
|
|
334
|
+
};
|
|
335
|
+
}
|
|
336
|
+
return { role: m.role, content: m.content };
|
|
337
|
+
});
|
|
338
|
+
|
|
339
|
+
const inputText = openaiMessages.map(m => m.content ?? "").join("");
|
|
340
|
+
this._sessionTokens.input += this._estimateTokens(inputText);
|
|
341
|
+
|
|
342
|
+
const endpoint = OPENAI_COMPAT_ENDPOINTS[this._provider] ?? OPENAI_COMPAT_ENDPOINTS.openai;
|
|
343
|
+
const headers = { "Content-Type": "application/json" };
|
|
344
|
+
if (this._apiKey) headers["Authorization"] = `Bearer ${this._apiKey}`;
|
|
345
|
+
if (this._provider === "openrouter") headers["HTTP-Referer"] = "https://wispy.dev";
|
|
346
|
+
|
|
347
|
+
const supportsTools = !["ollama"].includes(this._provider);
|
|
348
|
+
const body = { model, messages: openaiMessages, temperature: 0.7, max_tokens: 4096 };
|
|
349
|
+
if (supportsTools && tools.length > 0) {
|
|
350
|
+
body.tools = tools.map(t => ({
|
|
351
|
+
type: "function",
|
|
352
|
+
function: { name: t.name, description: t.description, parameters: t.parameters },
|
|
353
|
+
}));
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
const response = await fetch(endpoint, {
|
|
357
|
+
method: "POST",
|
|
358
|
+
headers,
|
|
359
|
+
body: JSON.stringify(body),
|
|
360
|
+
});
|
|
361
|
+
|
|
362
|
+
if (!response.ok) {
|
|
363
|
+
const err = await response.text();
|
|
364
|
+
throw new Error(`OpenAI API error ${response.status}: ${err.slice(0, 300)}`);
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
const data = await response.json();
|
|
368
|
+
const choice = data.choices?.[0];
|
|
369
|
+
if (!choice) throw new Error("No response from OpenAI");
|
|
370
|
+
|
|
371
|
+
if (choice.message?.tool_calls?.length > 0) {
|
|
372
|
+
const calls = choice.message.tool_calls.map(tc => ({
|
|
373
|
+
id: tc.id,
|
|
374
|
+
name: tc.function.name,
|
|
375
|
+
args: JSON.parse(tc.function.arguments),
|
|
376
|
+
}));
|
|
377
|
+
this._sessionTokens.output += this._estimateTokens(JSON.stringify(calls));
|
|
378
|
+
return { type: "tool_calls", calls };
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
const text = choice.message?.content ?? "";
|
|
382
|
+
this._sessionTokens.output += this._estimateTokens(text);
|
|
383
|
+
opts.onChunk?.(text);
|
|
384
|
+
return { type: "text", text };
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
formatCost() {
|
|
388
|
+
const MODEL_PRICING = {
|
|
389
|
+
"gemini-2.5-flash": { input: 0.15, output: 0.60 },
|
|
390
|
+
"gemini-2.5-pro": { input: 1.25, output: 10.0 },
|
|
391
|
+
"gemini-2.0-flash": { input: 0.10, output: 0.40 },
|
|
392
|
+
"claude-sonnet-4-20250514": { input: 3.0, output: 15.0 },
|
|
393
|
+
"claude-opus-4-6": { input: 15.0, output: 75.0 },
|
|
394
|
+
"claude-haiku-3.5": { input: 0.80, output: 4.0 },
|
|
395
|
+
"gpt-4o": { input: 2.50, output: 10.0 },
|
|
396
|
+
"gpt-4o-mini": { input: 0.15, output: 0.60 },
|
|
397
|
+
"gpt-4.1": { input: 2.0, output: 8.0 },
|
|
398
|
+
"gpt-4.1-mini": { input: 0.40, output: 1.60 },
|
|
399
|
+
"gpt-4.1-nano": { input: 0.10, output: 0.40 },
|
|
400
|
+
"o4-mini": { input: 1.10, output: 4.40 },
|
|
401
|
+
"llama-3.3-70b-versatile": { input: 0.59, output: 0.79 },
|
|
402
|
+
"deepseek-chat": { input: 0.27, output: 1.10 },
|
|
403
|
+
"llama3.2": { input: 0, output: 0 },
|
|
404
|
+
};
|
|
405
|
+
const pricing = MODEL_PRICING[this._model] ?? { input: 1.0, output: 3.0 };
|
|
406
|
+
const cost = (this._sessionTokens.input * pricing.input + this._sessionTokens.output * pricing.output) / 1_000_000;
|
|
407
|
+
const total = this._sessionTokens.input + this._sessionTokens.output;
|
|
408
|
+
return `${total} tokens (~$${cost.toFixed(4)})`;
|
|
409
|
+
}
|
|
410
|
+
}
|
package/core/session.mjs
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* core/session.mjs — Session management for Wispy
|
|
3
|
+
*
|
|
4
|
+
* Class SessionManager:
|
|
5
|
+
* - create({ workstream?, channel?, chatId? }) → Session
|
|
6
|
+
* - get(id) → Session
|
|
7
|
+
* - list(filter?) → Session[]
|
|
8
|
+
* - addMessage(id, message) → void
|
|
9
|
+
* - clear(id) → void
|
|
10
|
+
* - save(id) → void
|
|
11
|
+
* - load(id) → Session
|
|
12
|
+
* - getOrCreate(key) → Session (for channel adapters: key = "telegram:chatId")
|
|
13
|
+
*
|
|
14
|
+
* File-based persistence: ~/.wispy/sessions/{id}.json
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
import os from "node:os";
|
|
18
|
+
import path from "node:path";
|
|
19
|
+
import { readFile, writeFile, mkdir, readdir } from "node:fs/promises";
|
|
20
|
+
import { SESSIONS_DIR } from "./config.mjs";
|
|
21
|
+
|
|
22
|
+
export class Session {
|
|
23
|
+
constructor({ id, workstream = "default", channel = null, chatId = null, messages = [], createdAt = null }) {
|
|
24
|
+
this.id = id;
|
|
25
|
+
this.workstream = workstream;
|
|
26
|
+
this.channel = channel;
|
|
27
|
+
this.chatId = chatId;
|
|
28
|
+
this.messages = messages;
|
|
29
|
+
this.createdAt = createdAt ?? new Date().toISOString();
|
|
30
|
+
this.updatedAt = new Date().toISOString();
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
toJSON() {
|
|
34
|
+
return {
|
|
35
|
+
id: this.id,
|
|
36
|
+
workstream: this.workstream,
|
|
37
|
+
channel: this.channel,
|
|
38
|
+
chatId: this.chatId,
|
|
39
|
+
messages: this.messages,
|
|
40
|
+
createdAt: this.createdAt,
|
|
41
|
+
updatedAt: this.updatedAt,
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export class SessionManager {
|
|
47
|
+
constructor() {
|
|
48
|
+
this._sessions = new Map(); // id → Session (in-memory cache)
|
|
49
|
+
this._keyMap = new Map(); // composite key → id (for getOrCreate)
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Create a new session
|
|
54
|
+
*/
|
|
55
|
+
create({ workstream = "default", channel = null, chatId = null } = {}) {
|
|
56
|
+
const id = `${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 6)}`;
|
|
57
|
+
const session = new Session({ id, workstream, channel, chatId });
|
|
58
|
+
this._sessions.set(id, session);
|
|
59
|
+
if (channel && chatId) {
|
|
60
|
+
this._keyMap.set(`${channel}:${chatId}`, id);
|
|
61
|
+
}
|
|
62
|
+
return session;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Get a session by ID. Returns null if not found in memory.
|
|
67
|
+
*/
|
|
68
|
+
get(id) {
|
|
69
|
+
return this._sessions.get(id) ?? null;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Get or load a session by ID.
|
|
74
|
+
*/
|
|
75
|
+
async getOrLoad(id) {
|
|
76
|
+
if (this._sessions.has(id)) return this._sessions.get(id);
|
|
77
|
+
return this.load(id);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* List sessions matching an optional filter.
|
|
82
|
+
*/
|
|
83
|
+
list(filter = null) {
|
|
84
|
+
const all = Array.from(this._sessions.values());
|
|
85
|
+
if (!filter) return all;
|
|
86
|
+
return all.filter(s => {
|
|
87
|
+
if (filter.workstream && s.workstream !== filter.workstream) return false;
|
|
88
|
+
if (filter.channel && s.channel !== filter.channel) return false;
|
|
89
|
+
return true;
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Add a message to a session.
|
|
95
|
+
*/
|
|
96
|
+
addMessage(id, message) {
|
|
97
|
+
const session = this._sessions.get(id);
|
|
98
|
+
if (!session) throw new Error(`Session not found: ${id}`);
|
|
99
|
+
session.messages.push(message);
|
|
100
|
+
session.updatedAt = new Date().toISOString();
|
|
101
|
+
// Keep last 50 messages
|
|
102
|
+
if (session.messages.length > 50) {
|
|
103
|
+
session.messages = session.messages.slice(-50);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Clear message history for a session.
|
|
109
|
+
*/
|
|
110
|
+
clear(id) {
|
|
111
|
+
const session = this._sessions.get(id);
|
|
112
|
+
if (!session) throw new Error(`Session not found: ${id}`);
|
|
113
|
+
session.messages = [];
|
|
114
|
+
session.updatedAt = new Date().toISOString();
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Save a session to disk.
|
|
119
|
+
*/
|
|
120
|
+
async save(id) {
|
|
121
|
+
const session = this._sessions.get(id);
|
|
122
|
+
if (!session) throw new Error(`Session not found: ${id}`);
|
|
123
|
+
await mkdir(SESSIONS_DIR, { recursive: true });
|
|
124
|
+
const filePath = path.join(SESSIONS_DIR, `${id}.json`);
|
|
125
|
+
await writeFile(filePath, JSON.stringify(session.toJSON(), null, 2) + "\n", "utf8");
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Load a session from disk.
|
|
130
|
+
*/
|
|
131
|
+
async load(id) {
|
|
132
|
+
const filePath = path.join(SESSIONS_DIR, `${id}.json`);
|
|
133
|
+
try {
|
|
134
|
+
const raw = await readFile(filePath, "utf8");
|
|
135
|
+
const data = JSON.parse(raw);
|
|
136
|
+
const session = new Session(data);
|
|
137
|
+
this._sessions.set(id, session);
|
|
138
|
+
if (session.channel && session.chatId) {
|
|
139
|
+
this._keyMap.set(`${session.channel}:${session.chatId}`, id);
|
|
140
|
+
}
|
|
141
|
+
return session;
|
|
142
|
+
} catch {
|
|
143
|
+
return null;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
/**
|
|
148
|
+
* Get or create a session by a composite key (e.g., "telegram:chatId").
|
|
149
|
+
* Used by channel adapters to maintain per-chat sessions.
|
|
150
|
+
*/
|
|
151
|
+
async getOrCreate(key, opts = {}) {
|
|
152
|
+
// Check in-memory key map
|
|
153
|
+
if (this._keyMap.has(key)) {
|
|
154
|
+
const id = this._keyMap.get(key);
|
|
155
|
+
if (this._sessions.has(id)) return this._sessions.get(id);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// Try to find on disk by scanning sessions dir
|
|
159
|
+
try {
|
|
160
|
+
const files = await readdir(SESSIONS_DIR);
|
|
161
|
+
for (const file of files) {
|
|
162
|
+
if (!file.endsWith(".json")) continue;
|
|
163
|
+
const id = file.replace(".json", "");
|
|
164
|
+
const existing = await this.load(id);
|
|
165
|
+
if (existing) {
|
|
166
|
+
const existingKey = existing.channel && existing.chatId
|
|
167
|
+
? `${existing.channel}:${existing.chatId}`
|
|
168
|
+
: null;
|
|
169
|
+
if (existingKey === key) return existing;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
} catch { /* sessions dir might not exist yet */ }
|
|
173
|
+
|
|
174
|
+
// Parse channel:chatId from key
|
|
175
|
+
const colonIdx = key.indexOf(":");
|
|
176
|
+
const channel = colonIdx !== -1 ? key.slice(0, colonIdx) : null;
|
|
177
|
+
const chatId = colonIdx !== -1 ? key.slice(colonIdx + 1) : key;
|
|
178
|
+
|
|
179
|
+
const session = this.create({ channel, chatId, ...opts });
|
|
180
|
+
this._keyMap.set(key, session.id);
|
|
181
|
+
return session;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
/**
|
|
185
|
+
* Get all messages for a session, including a system prompt prefix.
|
|
186
|
+
* Ensures the system message is always first.
|
|
187
|
+
*/
|
|
188
|
+
getMessages(id) {
|
|
189
|
+
const session = this._sessions.get(id);
|
|
190
|
+
if (!session) return [];
|
|
191
|
+
return session.messages;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// Default global instance
|
|
196
|
+
export const sessionManager = new SessionManager();
|