@makolabs/ripple 0.4.1-0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +165 -205
- package/dist/adapters/ai/OpenAIAdapter.d.ts +115 -0
- package/dist/adapters/ai/OpenAIAdapter.js +568 -0
- package/dist/adapters/ai/index.d.ts +3 -0
- package/dist/adapters/ai/index.js +3 -0
- package/dist/adapters/ai/types.d.ts +108 -0
- package/dist/adapters/ai/types.js +31 -0
- package/dist/adapters/storage/BaseAdapter.js +31 -31
- package/dist/ai/AIChatInterface.svelte +435 -0
- package/dist/ai/AIChatInterface.svelte.d.ts +18 -0
- package/dist/ai/ChatInput.svelte +211 -0
- package/dist/ai/ChatInput.svelte.d.ts +18 -0
- package/dist/ai/CodeRenderer.svelte +174 -0
- package/dist/ai/CodeRenderer.svelte.d.ts +8 -0
- package/dist/ai/ComposeDropdown.svelte +171 -0
- package/dist/ai/ComposeDropdown.svelte.d.ts +9 -0
- package/dist/ai/MermaidRenderer.svelte +89 -0
- package/dist/ai/MermaidRenderer.svelte.d.ts +7 -0
- package/dist/ai/MessageBox.svelte +403 -0
- package/dist/ai/MessageBox.svelte.d.ts +12 -0
- package/dist/ai/ThinkingDisplay.svelte +275 -0
- package/dist/ai/ThinkingDisplay.svelte.d.ts +9 -0
- package/dist/ai/ai-chat-interface.d.ts +161 -0
- package/dist/ai/ai-chat-interface.js +63 -0
- package/dist/ai/content-detector.d.ts +41 -0
- package/dist/ai/content-detector.js +153 -0
- package/dist/config/ai.d.ts +13 -0
- package/dist/config/ai.js +43 -0
- package/dist/elements/accordion/accordion.js +1 -1
- package/dist/elements/badge/Badge.svelte +14 -3
- package/dist/elements/dropdown/Dropdown.svelte +2 -2
- package/dist/elements/dropdown/Select.svelte +1 -1
- package/dist/elements/progress/Progress.svelte +7 -10
- package/dist/file-browser/FileBrowser.svelte +1 -1
- package/dist/forms/DateRange.svelte +18 -16
- package/dist/forms/NumberInput.svelte +1 -1
- package/dist/forms/RadioInputs.svelte +1 -1
- package/dist/forms/RadioPill.svelte +1 -1
- package/dist/forms/Tags.svelte +2 -2
- package/dist/helper/date.d.ts +1 -0
- package/dist/helper/date.js +6 -0
- package/dist/index.d.ts +65 -1
- package/dist/index.js +11 -0
- package/dist/layout/activity-list/ActivityList.svelte +94 -0
- package/dist/layout/activity-list/ActivityList.svelte.d.ts +4 -0
- package/dist/layout/activity-list/activity-list.d.ts +152 -0
- package/dist/layout/activity-list/activity-list.js +59 -0
- package/dist/layout/card/Card.svelte +1 -5
- package/dist/layout/card/metric-card.d.ts +18 -18
- package/dist/layout/table/Cells.svelte +1 -7
- package/dist/layout/table/Cells.svelte.d.ts +1 -1
- package/dist/modal/Modal.svelte +4 -2
- package/dist/modal/Modal.svelte.d.ts +1 -1
- package/dist/modal/modal.d.ts +19 -18
- package/dist/modal/modal.js +7 -6
- package/dist/sonner/sonner.svelte +1 -7
- package/dist/types/markdown.d.ts +14 -0
- package/dist/utils/Portal.svelte +1 -1
- package/package.json +128 -121
|
@@ -0,0 +1,568 @@
|
|
|
1
|
+
import { browser } from '$app/environment';
|
|
2
|
+
import { AIConfigurationError, AINetworkError, AIRateLimitError } from './types.js';
|
|
3
|
+
/**
|
|
4
|
+
* OpenAI adapter with simple sliding window memory
|
|
5
|
+
* Handles communication with OpenAI's GPT models with AI-generated summaries
|
|
6
|
+
*/
|
|
7
|
+
export class OpenAIAdapter {
|
|
8
|
+
config;
|
|
9
|
+
messages = [];
|
|
10
|
+
summary = '';
|
|
11
|
+
userProfile = {};
|
|
12
|
+
maxMessages = 45;
|
|
13
|
+
constructor(config) {
|
|
14
|
+
// Set defaults
|
|
15
|
+
this.config = {
|
|
16
|
+
model: 'gpt-5', // make this configurable
|
|
17
|
+
baseUrl: 'https://api.openai.com/v1', // make this configurable
|
|
18
|
+
systemPrompt: this.getDefaultSystemPrompt(),
|
|
19
|
+
temperature: 0.7,
|
|
20
|
+
maxTokens: 4000,
|
|
21
|
+
maintainHistory: true,
|
|
22
|
+
maxHistoryLength: 20,
|
|
23
|
+
...config
|
|
24
|
+
};
|
|
25
|
+
if (!this.config.apiKey) {
|
|
26
|
+
throw new AIConfigurationError('OpenAI API key is required', this.getName());
|
|
27
|
+
}
|
|
28
|
+
// Load from storage
|
|
29
|
+
this.loadFromStorage();
|
|
30
|
+
}
|
|
31
|
+
getName() {
|
|
32
|
+
return 'OpenAI';
|
|
33
|
+
}
|
|
34
|
+
isConfigured() {
|
|
35
|
+
return !!this.config.apiKey && browser;
|
|
36
|
+
}
|
|
37
|
+
getDefaultSystemPrompt() {
|
|
38
|
+
return `You are a helpful AI assistant. Be concise and accurate.`;
|
|
39
|
+
}
|
|
40
|
+
getCodeRules() {
|
|
41
|
+
return `
|
|
42
|
+
CODE RULES:
|
|
43
|
+
- ALL code must use \`\`\`language blocks
|
|
44
|
+
- Always specify language: python, javascript, bash, etc.
|
|
45
|
+
- Never write code without fences`;
|
|
46
|
+
}
|
|
47
|
+
getMermaidRules() {
|
|
48
|
+
return `
|
|
49
|
+
MERMAID RULES:
|
|
50
|
+
- Use \`\`\`mermaid for all diagrams
|
|
51
|
+
- FLOWCHARTS: Use 'graph TB' or 'graph LR' (NOT 'flowchart TD')
|
|
52
|
+
- Node labels: Keep simple, avoid special chars like (){}[]<>
|
|
53
|
+
- NO multi-line text or <br/> tags in node labels
|
|
54
|
+
- Use underscores instead of spaces in node IDs
|
|
55
|
+
- Node shapes: [text] rectangles, {text} diamonds, (text) rounded, ((text)) circles
|
|
56
|
+
- Connections: --> for arrows, --- for lines
|
|
57
|
+
- Styling: Use 'style NodeID fill:#color' format
|
|
58
|
+
- Keep syntax minimal and parser-friendly
|
|
59
|
+
- Example:
|
|
60
|
+
\`\`\`mermaid
|
|
61
|
+
graph TB
|
|
62
|
+
A[Start] --> B[Process]
|
|
63
|
+
B --> C{Decision}
|
|
64
|
+
C -->|Yes| D[End]
|
|
65
|
+
\`\`\``;
|
|
66
|
+
}
|
|
67
|
+
buildConditionalSystemPrompt(userMessage) {
|
|
68
|
+
let prompt = this.getDefaultSystemPrompt();
|
|
69
|
+
// Check if user message contains code-related keywords
|
|
70
|
+
const codeKeywords = /\b(code|function|class|variable|script|program|algorithm|syntax|debug|compile|execute|run|install|import|export|api|method|loop|condition|array|object|string|number|boolean|return|print|console|log|error|exception|try|catch|if|else|for|while|switch|case|break|continue|def|var|let|const|async|await|promise|callback|json|xml|html|css|javascript|python|java|cpp|csharp|php|ruby|go|rust|swift|kotlin|typescript|bash|shell|sql|yaml|dockerfile|markdown)\b/i;
|
|
71
|
+
// Check if user message contains mermaid/diagram keywords
|
|
72
|
+
const mermaidKeywords = /\b(mermaid|diagram|flowchart|chart|graph|flow|sequence|class|state|entity|relationship|gantt|pie|journey|gitgraph|mindmap|visualization|visualize|draw|show|create.*diagram|generate.*chart|make.*flow|workflow|process|architecture|schema|database|user.*journey|state.*machine|git.*flow|branch|timeline|roadmap)\b/i;
|
|
73
|
+
if (codeKeywords.test(userMessage)) {
|
|
74
|
+
prompt += this.getCodeRules();
|
|
75
|
+
}
|
|
76
|
+
if (mermaidKeywords.test(userMessage)) {
|
|
77
|
+
prompt += this.getMermaidRules();
|
|
78
|
+
}
|
|
79
|
+
// Add footer only if rules were added
|
|
80
|
+
if (codeKeywords.test(userMessage) || mermaidKeywords.test(userMessage)) {
|
|
81
|
+
prompt += `\n\nThese formatting rules are required for proper UI rendering.`;
|
|
82
|
+
}
|
|
83
|
+
return prompt;
|
|
84
|
+
}
|
|
85
|
+
async sendMessage(message, context) {
|
|
86
|
+
if (!this.isConfigured()) {
|
|
87
|
+
throw new AIConfigurationError('OpenAI adapter not properly configured', this.getName());
|
|
88
|
+
}
|
|
89
|
+
try {
|
|
90
|
+
const thinkingMode = context?.thinkingMode;
|
|
91
|
+
// Build context from summary + recent messages
|
|
92
|
+
const memoryContext = this.buildMemoryContext();
|
|
93
|
+
const enhancedSystemPrompt = this.buildEnhancedSystemPrompt(memoryContext, message);
|
|
94
|
+
// Build messages array for OpenAI
|
|
95
|
+
const messages = [
|
|
96
|
+
{
|
|
97
|
+
role: 'system',
|
|
98
|
+
content: enhancedSystemPrompt
|
|
99
|
+
},
|
|
100
|
+
{
|
|
101
|
+
role: 'user',
|
|
102
|
+
content: message
|
|
103
|
+
}
|
|
104
|
+
];
|
|
105
|
+
// Prepare request body - reasoning models have different parameter requirements
|
|
106
|
+
const requestBody = {
|
|
107
|
+
model: this.config.model,
|
|
108
|
+
input: messages,
|
|
109
|
+
max_output_tokens: this.config.maxTokens,
|
|
110
|
+
...(thinkingMode && { reasoning: {
|
|
111
|
+
effort: 'medium',
|
|
112
|
+
summary: 'auto'
|
|
113
|
+
} })
|
|
114
|
+
};
|
|
115
|
+
// Call OpenAI API directly
|
|
116
|
+
const response = await fetch(`${this.config.baseUrl}/responses`, {
|
|
117
|
+
method: 'POST',
|
|
118
|
+
headers: {
|
|
119
|
+
'Content-Type': 'application/json',
|
|
120
|
+
Authorization: `Bearer ${this.config.apiKey}`
|
|
121
|
+
},
|
|
122
|
+
body: JSON.stringify(requestBody)
|
|
123
|
+
});
|
|
124
|
+
if (!response.ok) {
|
|
125
|
+
if (response.status === 429) {
|
|
126
|
+
throw new AIRateLimitError('Rate limit exceeded', this.getName());
|
|
127
|
+
}
|
|
128
|
+
const errorData = await response.json().catch(() => ({}));
|
|
129
|
+
throw new AINetworkError(`OpenAI API error: ${response.status} - ${errorData.error?.message || 'Unknown error'}`, this.getName());
|
|
130
|
+
}
|
|
131
|
+
const openAIResponse = await response.json();
|
|
132
|
+
if (openAIResponse.error) {
|
|
133
|
+
throw new AINetworkError(`OpenAI API error: ${openAIResponse.error.message}`, this.getName());
|
|
134
|
+
}
|
|
135
|
+
if (!openAIResponse.output || openAIResponse.output.length === 0) {
|
|
136
|
+
throw new AINetworkError('No response from OpenAI', this.getName());
|
|
137
|
+
}
|
|
138
|
+
// Find reasoning and message content from output array
|
|
139
|
+
let aiResponseContent = '';
|
|
140
|
+
let reasoningContent = '';
|
|
141
|
+
for (const outputItem of openAIResponse.output) {
|
|
142
|
+
if (outputItem.type === 'reasoning' && outputItem.summary) {
|
|
143
|
+
// Extract reasoning summary text
|
|
144
|
+
reasoningContent = outputItem.summary
|
|
145
|
+
.filter(s => s.type === 'summary_text')
|
|
146
|
+
.map(s => s.text)
|
|
147
|
+
.join('');
|
|
148
|
+
}
|
|
149
|
+
else if (outputItem.type === 'message' && outputItem.content) {
|
|
150
|
+
// Extract message content text
|
|
151
|
+
aiResponseContent = outputItem.content
|
|
152
|
+
.filter(c => c.type === 'output_text')
|
|
153
|
+
.map(c => c.text)
|
|
154
|
+
.join('');
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
// Create chat response
|
|
158
|
+
const chatResponse = {
|
|
159
|
+
type: 'chat',
|
|
160
|
+
content: aiResponseContent,
|
|
161
|
+
thinkingContent: reasoningContent,
|
|
162
|
+
isThinkingComplete: !!reasoningContent
|
|
163
|
+
};
|
|
164
|
+
if (!chatResponse.type || !chatResponse.content) {
|
|
165
|
+
throw new AINetworkError('Invalid response format', this.getName());
|
|
166
|
+
}
|
|
167
|
+
// Add messages to memory with sliding window logic
|
|
168
|
+
if (this.config.maintainHistory) {
|
|
169
|
+
this.addToMemory(message, aiResponseContent, reasoningContent);
|
|
170
|
+
}
|
|
171
|
+
return chatResponse;
|
|
172
|
+
}
|
|
173
|
+
catch (error) {
|
|
174
|
+
if (error instanceof AIConfigurationError ||
|
|
175
|
+
error instanceof AINetworkError ||
|
|
176
|
+
error instanceof AIRateLimitError) {
|
|
177
|
+
throw error;
|
|
178
|
+
}
|
|
179
|
+
console.error('OpenAI adapter error:', error);
|
|
180
|
+
throw new AINetworkError(`Unexpected error: ${error instanceof Error ? error.message : 'Unknown error'}`, this.getName());
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
async sendMessageStream(message, onStream, context) {
|
|
184
|
+
if (!this.isConfigured()) {
|
|
185
|
+
throw new AIConfigurationError('OpenAI adapter not properly configured', this.getName());
|
|
186
|
+
}
|
|
187
|
+
try {
|
|
188
|
+
const thinkingMode = context?.thinkingMode;
|
|
189
|
+
// Build context from summary + recent messages
|
|
190
|
+
const memoryContext = this.buildMemoryContext();
|
|
191
|
+
const enhancedSystemPrompt = this.buildEnhancedSystemPrompt(memoryContext, message);
|
|
192
|
+
// Build messages array for OpenAI
|
|
193
|
+
const messages = [
|
|
194
|
+
{
|
|
195
|
+
role: 'system',
|
|
196
|
+
content: enhancedSystemPrompt
|
|
197
|
+
},
|
|
198
|
+
{
|
|
199
|
+
role: 'user',
|
|
200
|
+
content: message
|
|
201
|
+
}
|
|
202
|
+
];
|
|
203
|
+
// Generate unique message ID for this streaming response
|
|
204
|
+
const messageId = this.generateId();
|
|
205
|
+
let fullContent = '';
|
|
206
|
+
let fullReasoning = '';
|
|
207
|
+
// Prepare streaming request body - reasoning models have different parameter requirements
|
|
208
|
+
const requestBody = {
|
|
209
|
+
model: this.config.model,
|
|
210
|
+
input: messages,
|
|
211
|
+
stream: true,
|
|
212
|
+
max_output_tokens: this.config.maxTokens,
|
|
213
|
+
...(thinkingMode && { reasoning: {
|
|
214
|
+
effort: 'medium',
|
|
215
|
+
summary: 'auto'
|
|
216
|
+
} })
|
|
217
|
+
};
|
|
218
|
+
// Call OpenAI API with streaming
|
|
219
|
+
const response = await fetch(`${this.config.baseUrl}/responses`, {
|
|
220
|
+
method: 'POST',
|
|
221
|
+
headers: {
|
|
222
|
+
'Content-Type': 'application/json',
|
|
223
|
+
Authorization: `Bearer ${this.config.apiKey}`
|
|
224
|
+
},
|
|
225
|
+
body: JSON.stringify(requestBody)
|
|
226
|
+
});
|
|
227
|
+
if (!response.ok) {
|
|
228
|
+
if (response.status === 429) {
|
|
229
|
+
throw new AIRateLimitError('Rate limit exceeded', this.getName());
|
|
230
|
+
}
|
|
231
|
+
const errorData = await response.json().catch(() => ({}));
|
|
232
|
+
throw new AINetworkError(`OpenAI API error: ${response.status} - ${errorData.error?.message || 'Unknown error'}`, this.getName());
|
|
233
|
+
}
|
|
234
|
+
if (!response.body) {
|
|
235
|
+
throw new AINetworkError('No response body from OpenAI', this.getName());
|
|
236
|
+
}
|
|
237
|
+
// Process the stream
|
|
238
|
+
const reader = response.body.getReader();
|
|
239
|
+
const decoder = new TextDecoder();
|
|
240
|
+
try {
|
|
241
|
+
while (true) {
|
|
242
|
+
const { done, value } = await reader.read();
|
|
243
|
+
if (done)
|
|
244
|
+
break;
|
|
245
|
+
const chunk = decoder.decode(value, { stream: true });
|
|
246
|
+
const lines = chunk.split('\n');
|
|
247
|
+
for (const line of lines) {
|
|
248
|
+
if (line.startsWith('event: ') || line.startsWith('data: ')) {
|
|
249
|
+
// Skip event type lines, process data lines
|
|
250
|
+
if (line.startsWith('data: ')) {
|
|
251
|
+
const data = line.slice(6);
|
|
252
|
+
try {
|
|
253
|
+
const parsed = JSON.parse(data);
|
|
254
|
+
// Handle reasoning summary text deltas
|
|
255
|
+
if (parsed.type === 'response.reasoning_summary_text.delta') {
|
|
256
|
+
if (parsed.delta) {
|
|
257
|
+
fullReasoning += parsed.delta;
|
|
258
|
+
// Send reasoning update
|
|
259
|
+
onStream({
|
|
260
|
+
type: 'chat',
|
|
261
|
+
content: fullContent,
|
|
262
|
+
messageId,
|
|
263
|
+
thinkingContent: fullReasoning,
|
|
264
|
+
isThinkingComplete: false,
|
|
265
|
+
isStreaming: true,
|
|
266
|
+
isStreamEnd: false
|
|
267
|
+
});
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
// Handle output text deltas (main content)
|
|
271
|
+
else if (parsed.type === 'response.output_text.delta') {
|
|
272
|
+
if (parsed.delta) {
|
|
273
|
+
fullContent += parsed.delta;
|
|
274
|
+
// Send content update
|
|
275
|
+
onStream({
|
|
276
|
+
type: 'chat',
|
|
277
|
+
content: fullContent,
|
|
278
|
+
messageId,
|
|
279
|
+
thinkingContent: fullReasoning || undefined,
|
|
280
|
+
isThinkingComplete: false,
|
|
281
|
+
isStreaming: true,
|
|
282
|
+
isStreamEnd: false
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
// Handle completion
|
|
287
|
+
else if (parsed.type === 'response.completed') {
|
|
288
|
+
// Send final streaming response
|
|
289
|
+
onStream({
|
|
290
|
+
type: 'chat',
|
|
291
|
+
content: fullContent,
|
|
292
|
+
messageId,
|
|
293
|
+
thinkingContent: fullReasoning || undefined,
|
|
294
|
+
isThinkingComplete: !!fullReasoning,
|
|
295
|
+
isStreaming: true,
|
|
296
|
+
isStreamEnd: true
|
|
297
|
+
});
|
|
298
|
+
break;
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
catch (e) {
|
|
302
|
+
// Skip invalid JSON chunks
|
|
303
|
+
continue;
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
finally {
|
|
311
|
+
reader.releaseLock();
|
|
312
|
+
}
|
|
313
|
+
// Create final response
|
|
314
|
+
const chatResponse = {
|
|
315
|
+
type: 'chat',
|
|
316
|
+
content: fullContent,
|
|
317
|
+
messageId,
|
|
318
|
+
thinkingContent: fullReasoning || undefined,
|
|
319
|
+
isThinkingComplete: !!fullReasoning,
|
|
320
|
+
isStreaming: false,
|
|
321
|
+
isStreamEnd: true
|
|
322
|
+
};
|
|
323
|
+
// Add messages to memory with sliding window logic
|
|
324
|
+
if (this.config.maintainHistory) {
|
|
325
|
+
await this.addToMemory(message, fullContent, fullReasoning);
|
|
326
|
+
}
|
|
327
|
+
return chatResponse;
|
|
328
|
+
}
|
|
329
|
+
catch (error) {
|
|
330
|
+
if (error instanceof AIConfigurationError ||
|
|
331
|
+
error instanceof AINetworkError ||
|
|
332
|
+
error instanceof AIRateLimitError) {
|
|
333
|
+
throw error;
|
|
334
|
+
}
|
|
335
|
+
console.error('OpenAI streaming adapter error:', error);
|
|
336
|
+
throw new AINetworkError(`Unexpected streaming error: ${error instanceof Error ? error.message : 'Unknown error'}`, this.getName());
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
getHistory() {
|
|
340
|
+
return this.messages;
|
|
341
|
+
}
|
|
342
|
+
clearHistory() {
|
|
343
|
+
this.messages = [];
|
|
344
|
+
this.summary = '';
|
|
345
|
+
this.userProfile = {};
|
|
346
|
+
this.saveToStorage();
|
|
347
|
+
}
|
|
348
|
+
setSystemPrompt(prompt) {
|
|
349
|
+
this.config.systemPrompt = prompt;
|
|
350
|
+
}
|
|
351
|
+
getSystemPrompt() {
|
|
352
|
+
return this.config.systemPrompt;
|
|
353
|
+
}
|
|
354
|
+
/**
|
|
355
|
+
* Update configuration
|
|
356
|
+
*/
|
|
357
|
+
updateConfig(newConfig) {
|
|
358
|
+
this.config = { ...this.config, ...newConfig };
|
|
359
|
+
}
|
|
360
|
+
/**
|
|
361
|
+
* Get current configuration (without API key for security)
|
|
362
|
+
*/
|
|
363
|
+
getConfig() {
|
|
364
|
+
const { apiKey, ...configWithoutKey } = this.config;
|
|
365
|
+
return configWithoutKey;
|
|
366
|
+
}
|
|
367
|
+
/**
|
|
368
|
+
* Add message to memory with sliding window + AI summary logic
|
|
369
|
+
*/
|
|
370
|
+
async addToMemory(userMessage, aiResponse, reasoning) {
|
|
371
|
+
// Add messages
|
|
372
|
+
this.messages.push({
|
|
373
|
+
id: this.generateId(),
|
|
374
|
+
type: 'chat',
|
|
375
|
+
content: `User: ${userMessage}`,
|
|
376
|
+
timestamp: new Date()
|
|
377
|
+
});
|
|
378
|
+
this.messages.push({
|
|
379
|
+
id: this.generateId(),
|
|
380
|
+
type: 'chat',
|
|
381
|
+
content: `${aiResponse}`,
|
|
382
|
+
timestamp: new Date(),
|
|
383
|
+
...(reasoning && {
|
|
384
|
+
thinkingContent: reasoning,
|
|
385
|
+
isThinkingComplete: true
|
|
386
|
+
})
|
|
387
|
+
});
|
|
388
|
+
// Extract user name if available
|
|
389
|
+
this.extractUserInfo(userMessage);
|
|
390
|
+
// Check if we need to summarize (at maxMessages - 1)
|
|
391
|
+
if (this.messages.length >= this.maxMessages - 1) {
|
|
392
|
+
await this.createSummaryAndTruncate();
|
|
393
|
+
}
|
|
394
|
+
this.saveToStorage();
|
|
395
|
+
}
|
|
396
|
+
/**
|
|
397
|
+
* Create AI summary and truncate old messages
|
|
398
|
+
*/
|
|
399
|
+
async createSummaryAndTruncate() {
|
|
400
|
+
try {
|
|
401
|
+
// Build conversation text
|
|
402
|
+
const conversationText = this.messages.map((msg) => msg.content).join('\n');
|
|
403
|
+
// Create summary prompt
|
|
404
|
+
let summaryPrompt = 'Summarize this conversation preserving key details like names, preferences, established facts, and context:\n\n';
|
|
405
|
+
if (this.summary) {
|
|
406
|
+
summaryPrompt += `Previous summary: ${this.summary}\n\nRecent conversation:\n`;
|
|
407
|
+
}
|
|
408
|
+
summaryPrompt += conversationText + '\n\nConcise summary:';
|
|
409
|
+
// Generate summary using AI
|
|
410
|
+
const newSummary = await this.generateSummaryWithAI(summaryPrompt);
|
|
411
|
+
if (newSummary) {
|
|
412
|
+
this.summary = newSummary;
|
|
413
|
+
// Keep only last 2 messages
|
|
414
|
+
this.messages = this.messages.slice(-2);
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
catch (error) {
|
|
418
|
+
console.warn('Failed to create summary:', error);
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
/**
|
|
422
|
+
* Generate summary using OpenAI
|
|
423
|
+
*/
|
|
424
|
+
async generateSummaryWithAI(prompt) {
|
|
425
|
+
try {
|
|
426
|
+
const response = await fetch(`${this.config.baseUrl}/responses`, {
|
|
427
|
+
method: 'POST',
|
|
428
|
+
headers: {
|
|
429
|
+
'Content-Type': 'application/json',
|
|
430
|
+
Authorization: `Bearer ${this.config.apiKey}`
|
|
431
|
+
},
|
|
432
|
+
body: JSON.stringify({
|
|
433
|
+
model: this.config.model,
|
|
434
|
+
input: [
|
|
435
|
+
{
|
|
436
|
+
role: 'system',
|
|
437
|
+
content: 'You are a helpful assistant that creates concise conversation summaries.'
|
|
438
|
+
},
|
|
439
|
+
{ role: 'user', content: prompt }
|
|
440
|
+
],
|
|
441
|
+
temperature: 0.3,
|
|
442
|
+
max_tokens: 200
|
|
443
|
+
})
|
|
444
|
+
});
|
|
445
|
+
if (response.ok) {
|
|
446
|
+
const data = await response.json();
|
|
447
|
+
// Extract content from the new response format
|
|
448
|
+
for (const outputItem of data.output || []) {
|
|
449
|
+
if (outputItem.type === 'message' && outputItem.content) {
|
|
450
|
+
const content = outputItem.content
|
|
451
|
+
.filter(c => c.type === 'output_text')
|
|
452
|
+
.map(c => c.text)
|
|
453
|
+
.join('');
|
|
454
|
+
return content.trim() || null;
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
return null;
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
catch (error) {
|
|
461
|
+
console.error('AI summary generation failed:', error);
|
|
462
|
+
}
|
|
463
|
+
return null;
|
|
464
|
+
}
|
|
465
|
+
/**
|
|
466
|
+
* Build memory context from summary + recent messages
|
|
467
|
+
*/
|
|
468
|
+
buildMemoryContext() {
|
|
469
|
+
let context = '';
|
|
470
|
+
// Add user info
|
|
471
|
+
if (this.userProfile.name) {
|
|
472
|
+
context += `User: ${this.userProfile.name}\n`;
|
|
473
|
+
}
|
|
474
|
+
// Add summary
|
|
475
|
+
if (this.summary) {
|
|
476
|
+
context += `\nPrevious conversation summary:\n${this.summary}\n`;
|
|
477
|
+
}
|
|
478
|
+
// Add recent messages
|
|
479
|
+
if (this.messages.length > 0) {
|
|
480
|
+
context += '\nRecent conversation:\n';
|
|
481
|
+
this.messages.forEach((msg) => {
|
|
482
|
+
context += `${msg.content}\n`;
|
|
483
|
+
});
|
|
484
|
+
}
|
|
485
|
+
return context.trim();
|
|
486
|
+
}
|
|
487
|
+
/**
|
|
488
|
+
* Extract user information
|
|
489
|
+
*/
|
|
490
|
+
extractUserInfo(message) {
|
|
491
|
+
const nameMatch = message.match(/(?:my name is|i'm|i am|call me)\s+([a-zA-Z0-9_]+)/i);
|
|
492
|
+
if (nameMatch && nameMatch[1]) {
|
|
493
|
+
this.userProfile.name = nameMatch[1];
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
/**
|
|
497
|
+
* Save to localStorage
|
|
498
|
+
*/
|
|
499
|
+
saveToStorage() {
|
|
500
|
+
if (typeof localStorage === 'undefined')
|
|
501
|
+
return;
|
|
502
|
+
try {
|
|
503
|
+
const data = {
|
|
504
|
+
messages: this.messages,
|
|
505
|
+
summary: this.summary,
|
|
506
|
+
userProfile: this.userProfile
|
|
507
|
+
};
|
|
508
|
+
localStorage.setItem(`openai-memory-${this.config.apiKey.slice(-8)}`, JSON.stringify(data));
|
|
509
|
+
}
|
|
510
|
+
catch (error) {
|
|
511
|
+
console.warn('Failed to save memory:', error);
|
|
512
|
+
}
|
|
513
|
+
}
|
|
514
|
+
/**
|
|
515
|
+
* Load from localStorage
|
|
516
|
+
*/
|
|
517
|
+
loadFromStorage() {
|
|
518
|
+
if (typeof localStorage === 'undefined')
|
|
519
|
+
return;
|
|
520
|
+
try {
|
|
521
|
+
const stored = localStorage.getItem(`openai-memory-${this.config.apiKey.slice(-8)}`);
|
|
522
|
+
if (stored) {
|
|
523
|
+
const data = JSON.parse(stored);
|
|
524
|
+
this.messages = (data.messages || []).map((msg) => ({
|
|
525
|
+
...msg,
|
|
526
|
+
timestamp: new Date(msg.timestamp),
|
|
527
|
+
// Ensure thinking content is marked as complete when loaded from storage
|
|
528
|
+
...(msg.thinkingContent && { isThinkingComplete: true })
|
|
529
|
+
}));
|
|
530
|
+
this.summary = data.summary || '';
|
|
531
|
+
this.userProfile = data.userProfile || {};
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
catch (error) {
|
|
535
|
+
console.warn('Failed to load memory:', error);
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
generateId() {
|
|
539
|
+
return `msg_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
540
|
+
}
|
|
541
|
+
/**
|
|
542
|
+
* Build enhanced system prompt with memory context
|
|
543
|
+
*/
|
|
544
|
+
buildEnhancedSystemPrompt(memoryContext, userMessage) {
|
|
545
|
+
let enhancedPrompt = this.buildConditionalSystemPrompt(userMessage);
|
|
546
|
+
if (memoryContext.trim()) {
|
|
547
|
+
enhancedPrompt += '\n\n--- CONVERSATION CONTEXT ---\n' + memoryContext;
|
|
548
|
+
enhancedPrompt +=
|
|
549
|
+
'\n\nIMPORTANT: Use the context above to maintain consistency. If you have an established identity, age, or preferences, continue using them. Reference previous conversations when relevant.';
|
|
550
|
+
}
|
|
551
|
+
return enhancedPrompt;
|
|
552
|
+
}
|
|
553
|
+
/**
|
|
554
|
+
* Get memory statistics
|
|
555
|
+
*/
|
|
556
|
+
getMemoryStats() {
|
|
557
|
+
return {
|
|
558
|
+
totalMessages: this.messages.length,
|
|
559
|
+
hasSummary: this.summary.length > 0
|
|
560
|
+
};
|
|
561
|
+
}
|
|
562
|
+
/**
|
|
563
|
+
* Get the context that will be sent to AI
|
|
564
|
+
*/
|
|
565
|
+
getContextForAI() {
|
|
566
|
+
return this.buildMemoryContext();
|
|
567
|
+
}
|
|
568
|
+
}
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import type { ChatResponse, ChatMessage, QuickAction, StreamingCallback } from '../../index.js';
|
|
2
|
+
/**
|
|
3
|
+
* Base interface for AI adapters
|
|
4
|
+
*/
|
|
5
|
+
export interface AIAdapter {
|
|
6
|
+
/**
|
|
7
|
+
* Name of the AI provider
|
|
8
|
+
*/
|
|
9
|
+
getName(): string;
|
|
10
|
+
/**
|
|
11
|
+
* Check if the adapter is properly configured
|
|
12
|
+
*/
|
|
13
|
+
isConfigured(): boolean;
|
|
14
|
+
/**
|
|
15
|
+
* Send a message to the AI and get a response
|
|
16
|
+
*/
|
|
17
|
+
sendMessage(message: string, context?: AIContext): Promise<ChatResponse>;
|
|
18
|
+
/**
|
|
19
|
+
* Send a message with streaming support (optional)
|
|
20
|
+
*/
|
|
21
|
+
sendMessageStream?(message: string, onStream: StreamingCallback, context?: AIContext): Promise<ChatResponse>;
|
|
22
|
+
/**
|
|
23
|
+
* Get conversation history if supported
|
|
24
|
+
*/
|
|
25
|
+
getHistory?(): ChatMessage[];
|
|
26
|
+
/**
|
|
27
|
+
* Clear conversation history if supported
|
|
28
|
+
*/
|
|
29
|
+
clearHistory?(): void;
|
|
30
|
+
/**
|
|
31
|
+
* Set system prompt if supported
|
|
32
|
+
*/
|
|
33
|
+
setSystemPrompt?(prompt: string): void;
|
|
34
|
+
/**
|
|
35
|
+
* Get current system prompt if supported
|
|
36
|
+
*/
|
|
37
|
+
getSystemPrompt?(): string;
|
|
38
|
+
/**
|
|
39
|
+
* Get quick actions for this adapter
|
|
40
|
+
*/
|
|
41
|
+
getQuickActions?(): QuickAction[];
|
|
42
|
+
/**
|
|
43
|
+
* Set quick actions
|
|
44
|
+
*/
|
|
45
|
+
setQuickActions?(actions: QuickAction[]): void;
|
|
46
|
+
/**
|
|
47
|
+
* Get memory manager if supported
|
|
48
|
+
*/
|
|
49
|
+
getMemoryManager?(): MemoryManager | null;
|
|
50
|
+
/**
|
|
51
|
+
* Get memory statistics if supported
|
|
52
|
+
*/
|
|
53
|
+
getMemoryStats?(): any;
|
|
54
|
+
/**
|
|
55
|
+
* Get essential context if supported
|
|
56
|
+
*/
|
|
57
|
+
getEssentialContext?(): any;
|
|
58
|
+
/**
|
|
59
|
+
* Get context prompt if supported
|
|
60
|
+
*/
|
|
61
|
+
getContextPrompt?(): string;
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Context information passed to AI adapters
|
|
65
|
+
*/
|
|
66
|
+
export interface AIContext {
|
|
67
|
+
/**
|
|
68
|
+
* Current page or route
|
|
69
|
+
*/
|
|
70
|
+
page?: string;
|
|
71
|
+
/**
|
|
72
|
+
* User information
|
|
73
|
+
*/
|
|
74
|
+
user?: {
|
|
75
|
+
id?: string;
|
|
76
|
+
role?: string;
|
|
77
|
+
preferences?: Record<string, unknown>;
|
|
78
|
+
};
|
|
79
|
+
/**
|
|
80
|
+
* Application state
|
|
81
|
+
*/
|
|
82
|
+
appState?: Record<string, unknown>;
|
|
83
|
+
/**
|
|
84
|
+
* Whether thinking/reasoning mode is enabled
|
|
85
|
+
*/
|
|
86
|
+
thinkingMode?: boolean;
|
|
87
|
+
/**
|
|
88
|
+
* Additional metadata
|
|
89
|
+
*/
|
|
90
|
+
metadata?: Record<string, unknown>;
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Error types for AI adapters
|
|
94
|
+
*/
|
|
95
|
+
export declare class AIAdapterError extends Error {
|
|
96
|
+
code: string;
|
|
97
|
+
adapter: string;
|
|
98
|
+
constructor(message: string, code: string, adapter: string);
|
|
99
|
+
}
|
|
100
|
+
export declare class AIConfigurationError extends AIAdapterError {
|
|
101
|
+
constructor(message: string, adapter: string);
|
|
102
|
+
}
|
|
103
|
+
export declare class AINetworkError extends AIAdapterError {
|
|
104
|
+
constructor(message: string, adapter: string);
|
|
105
|
+
}
|
|
106
|
+
export declare class AIRateLimitError extends AIAdapterError {
|
|
107
|
+
constructor(message: string, adapter: string);
|
|
108
|
+
}
|