lemonade-sdk 9.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lemonade/__init__.py +5 -0
- lemonade/api.py +180 -0
- lemonade/cache.py +92 -0
- lemonade/cli.py +173 -0
- lemonade/common/__init__.py +0 -0
- lemonade/common/build.py +176 -0
- lemonade/common/cli_helpers.py +139 -0
- lemonade/common/exceptions.py +98 -0
- lemonade/common/filesystem.py +368 -0
- lemonade/common/inference_engines.py +408 -0
- lemonade/common/network.py +93 -0
- lemonade/common/printing.py +110 -0
- lemonade/common/status.py +471 -0
- lemonade/common/system_info.py +1411 -0
- lemonade/common/test_helpers.py +28 -0
- lemonade/profilers/__init__.py +1 -0
- lemonade/profilers/agt_power.py +437 -0
- lemonade/profilers/hwinfo_power.py +429 -0
- lemonade/profilers/memory_tracker.py +259 -0
- lemonade/profilers/profiler.py +58 -0
- lemonade/sequence.py +363 -0
- lemonade/state.py +159 -0
- lemonade/tools/__init__.py +1 -0
- lemonade/tools/accuracy.py +432 -0
- lemonade/tools/adapter.py +114 -0
- lemonade/tools/bench.py +302 -0
- lemonade/tools/flm/__init__.py +1 -0
- lemonade/tools/flm/utils.py +305 -0
- lemonade/tools/huggingface/bench.py +187 -0
- lemonade/tools/huggingface/load.py +235 -0
- lemonade/tools/huggingface/utils.py +359 -0
- lemonade/tools/humaneval.py +264 -0
- lemonade/tools/llamacpp/bench.py +255 -0
- lemonade/tools/llamacpp/load.py +222 -0
- lemonade/tools/llamacpp/utils.py +1260 -0
- lemonade/tools/management_tools.py +319 -0
- lemonade/tools/mmlu.py +319 -0
- lemonade/tools/oga/__init__.py +0 -0
- lemonade/tools/oga/bench.py +120 -0
- lemonade/tools/oga/load.py +804 -0
- lemonade/tools/oga/migration.py +403 -0
- lemonade/tools/oga/utils.py +462 -0
- lemonade/tools/perplexity.py +147 -0
- lemonade/tools/prompt.py +263 -0
- lemonade/tools/report/__init__.py +0 -0
- lemonade/tools/report/llm_report.py +203 -0
- lemonade/tools/report/table.py +899 -0
- lemonade/tools/server/__init__.py +0 -0
- lemonade/tools/server/flm.py +133 -0
- lemonade/tools/server/llamacpp.py +320 -0
- lemonade/tools/server/serve.py +2123 -0
- lemonade/tools/server/static/favicon.ico +0 -0
- lemonade/tools/server/static/index.html +279 -0
- lemonade/tools/server/static/js/chat.js +1059 -0
- lemonade/tools/server/static/js/model-settings.js +183 -0
- lemonade/tools/server/static/js/models.js +1395 -0
- lemonade/tools/server/static/js/shared.js +556 -0
- lemonade/tools/server/static/logs.html +191 -0
- lemonade/tools/server/static/styles.css +2654 -0
- lemonade/tools/server/static/webapp.html +321 -0
- lemonade/tools/server/tool_calls.py +153 -0
- lemonade/tools/server/tray.py +664 -0
- lemonade/tools/server/utils/macos_tray.py +226 -0
- lemonade/tools/server/utils/port.py +77 -0
- lemonade/tools/server/utils/thread.py +85 -0
- lemonade/tools/server/utils/windows_tray.py +408 -0
- lemonade/tools/server/webapp.py +34 -0
- lemonade/tools/server/wrapped_server.py +559 -0
- lemonade/tools/tool.py +374 -0
- lemonade/version.py +1 -0
- lemonade_install/__init__.py +1 -0
- lemonade_install/install.py +239 -0
- lemonade_sdk-9.1.1.dist-info/METADATA +276 -0
- lemonade_sdk-9.1.1.dist-info/RECORD +84 -0
- lemonade_sdk-9.1.1.dist-info/WHEEL +5 -0
- lemonade_sdk-9.1.1.dist-info/entry_points.txt +5 -0
- lemonade_sdk-9.1.1.dist-info/licenses/LICENSE +201 -0
- lemonade_sdk-9.1.1.dist-info/licenses/NOTICE.md +47 -0
- lemonade_sdk-9.1.1.dist-info/top_level.txt +3 -0
- lemonade_server/cli.py +805 -0
- lemonade_server/model_manager.py +758 -0
- lemonade_server/pydantic_models.py +159 -0
- lemonade_server/server_models.json +643 -0
- lemonade_server/settings.py +39 -0
|
@@ -0,0 +1,1059 @@
|
|
|
1
|
+
// Chat logic and functionality
|
|
2
|
+
let messages = [];
|
|
3
|
+
let attachedFiles = [];
|
|
4
|
+
let systemMessageElement = null;
|
|
5
|
+
let abortController = null;
|
|
6
|
+
|
|
7
|
+
const THINKING_ANIM_INTERVAL_MS = 550;
|
|
8
|
+
// Toggle this to false if you prefer plain dots only.
|
|
9
|
+
const THINKING_USE_LEMON = true;
|
|
10
|
+
const THINKING_FRAMES = THINKING_USE_LEMON
|
|
11
|
+
? ['Thinking.','Thinking..','Thinking...','Thinking 🍋']
|
|
12
|
+
: ['Thinking.','Thinking..','Thinking...'];
|
|
13
|
+
|
|
14
|
+
// Get DOM elements
|
|
15
|
+
let chatHistory, chatInput, attachmentBtn, fileAttachment, attachmentsPreviewContainer, attachmentsPreviewRow, modelSelect, toggleBtn;
|
|
16
|
+
// Track if a stream is currently active (separate from abortController existing briefly before validation)
|
|
17
|
+
let isStreaming = false;
|
|
18
|
+
// When the user scrolls up in the chat history, disable automatic scrolling until they scroll back to the bottom.
|
|
19
|
+
let autoscrollEnabled = true;
|
|
20
|
+
const AUTOSCROLL_TOLERANCE_PX = 10;
|
|
21
|
+
|
|
22
|
+
// Initialize chat functionality when DOM is loaded
|
|
23
|
+
document.addEventListener('DOMContentLoaded', function() {
|
|
24
|
+
chatHistory = document.getElementById('chat-history');
|
|
25
|
+
chatInput = document.getElementById('chat-input');
|
|
26
|
+
toggleBtn = document.getElementById('toggle-btn');
|
|
27
|
+
attachmentBtn = document.getElementById('attachment-btn');
|
|
28
|
+
fileAttachment = document.getElementById('file-attachment');
|
|
29
|
+
attachmentsPreviewContainer = document.getElementById('attachments-preview-container');
|
|
30
|
+
attachmentsPreviewRow = document.getElementById('attachments-preview-row');
|
|
31
|
+
modelSelect = document.getElementById('model-select');
|
|
32
|
+
|
|
33
|
+
// Set up event listeners
|
|
34
|
+
setupChatEventListeners();
|
|
35
|
+
|
|
36
|
+
// Pause autoscroll when user scrolls up in the chat history. If they scroll back to bottom, resume.
|
|
37
|
+
if (chatHistory) {
|
|
38
|
+
chatHistory.addEventListener('scroll', function () {
|
|
39
|
+
try {
|
|
40
|
+
const atBottom = chatHistory.scrollTop + chatHistory.clientHeight >= chatHistory.scrollHeight - AUTOSCROLL_TOLERANCE_PX;
|
|
41
|
+
if (atBottom) {
|
|
42
|
+
if (!autoscrollEnabled) {
|
|
43
|
+
autoscrollEnabled = true;
|
|
44
|
+
chatHistory.classList.remove('autoscroll-paused');
|
|
45
|
+
}
|
|
46
|
+
} else {
|
|
47
|
+
if (autoscrollEnabled) {
|
|
48
|
+
autoscrollEnabled = false;
|
|
49
|
+
chatHistory.classList.add('autoscroll-paused');
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
} catch (_) {}
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Initialize model dropdown (will be populated when models.js calls updateModelStatusIndicator)
|
|
57
|
+
initializeModelDropdown();
|
|
58
|
+
|
|
59
|
+
// Update attachment button state periodically
|
|
60
|
+
updateAttachmentButtonState();
|
|
61
|
+
setInterval(updateAttachmentButtonState, 1000);
|
|
62
|
+
|
|
63
|
+
// Display initial system message
|
|
64
|
+
displaySystemMessage();
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
function setupChatEventListeners() {
|
|
68
|
+
// Toggle button click – send or stop streaming
|
|
69
|
+
toggleBtn.onclick = function () {
|
|
70
|
+
if (abortController) {
|
|
71
|
+
abortCurrentRequest();
|
|
72
|
+
} else {
|
|
73
|
+
sendMessage();
|
|
74
|
+
}
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
// Send on Enter, clear attachments on Escape
|
|
78
|
+
if (chatInput) {
|
|
79
|
+
chatInput.addEventListener('keydown', handleChatInputKeydown);
|
|
80
|
+
chatInput.addEventListener('paste', handleChatInputPaste);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// Open file picker and handle image selection
|
|
84
|
+
if (attachmentBtn && fileAttachment) {
|
|
85
|
+
attachmentBtn.addEventListener('click', function () {
|
|
86
|
+
// Let the selection handler validate vision capability, etc.
|
|
87
|
+
fileAttachment.click();
|
|
88
|
+
});
|
|
89
|
+
fileAttachment.addEventListener('change', handleFileSelection);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// React to model selection changes
|
|
93
|
+
if (modelSelect) {
|
|
94
|
+
modelSelect.addEventListener('change', handleModelSelectChange);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Initialize model dropdown with available models
|
|
99
|
+
function initializeModelDropdown() {
|
|
100
|
+
const allModels = window.SERVER_MODELS || {};
|
|
101
|
+
|
|
102
|
+
// Clear existing options except the first one
|
|
103
|
+
const indicator = document.getElementById('model-status-indicator');
|
|
104
|
+
if (indicator.classList.contains('offline') || modelSelect.value === 'server-offline') {
|
|
105
|
+
modelSelect.value = 'server-offline';
|
|
106
|
+
} else {
|
|
107
|
+
modelSelect.innerHTML = '<option value="">Click to select a model ▼</option>';
|
|
108
|
+
}
|
|
109
|
+
// Add only installed models to dropdown
|
|
110
|
+
Object.keys(allModels).forEach(modelId => {
|
|
111
|
+
// Only add if the model is installed
|
|
112
|
+
if (window.installedModels && window.installedModels.has(modelId)) {
|
|
113
|
+
const option = document.createElement('option');
|
|
114
|
+
option.value = modelId;
|
|
115
|
+
option.textContent = modelId;
|
|
116
|
+
modelSelect.appendChild(option);
|
|
117
|
+
}
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
// Set current selection based on loaded model
|
|
121
|
+
updateModelSelectValue();
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Make dropdown initialization accessible globally so models.js can refresh it
|
|
125
|
+
window.initializeModelDropdown = initializeModelDropdown;
|
|
126
|
+
|
|
127
|
+
// Update model select value to match currently loaded model
|
|
128
|
+
function updateModelSelectValue() {
|
|
129
|
+
const indicator = document.getElementById('model-status-indicator');
|
|
130
|
+
if (currentLoadedModel && indicator.classList.contains('loading')) {
|
|
131
|
+
modelSelect.value = 'loading-model';
|
|
132
|
+
} else if (currentLoadedModel) {
|
|
133
|
+
modelSelect.value = currentLoadedModel;
|
|
134
|
+
} else if (indicator.classList.contains('offline') && modelSelect.value === 'server-offline') {
|
|
135
|
+
modelSelect.value = 'server-offline';
|
|
136
|
+
} else {
|
|
137
|
+
return;
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Make updateModelSelectValue accessible globally
|
|
142
|
+
window.updateModelSelectValue = updateModelSelectValue;
|
|
143
|
+
|
|
144
|
+
// Handle model selection change
|
|
145
|
+
async function handleModelSelectChange() {
|
|
146
|
+
const selectedModel = modelSelect.value;
|
|
147
|
+
|
|
148
|
+
if (!selectedModel) return; // "Click to select a model ▼" selected
|
|
149
|
+
if (selectedModel === currentLoadedModel) return; // Same model already loaded
|
|
150
|
+
|
|
151
|
+
// Use the standardized load function
|
|
152
|
+
await loadModelStandardized(selectedModel, {
|
|
153
|
+
onLoadingStart: (modelId) => {
|
|
154
|
+
// Update dropdown to show loading state with model name
|
|
155
|
+
const loadingOption = document.createElement('option');
|
|
156
|
+
const select = document.getElementById('model-select');
|
|
157
|
+
select.innerHTML = '';
|
|
158
|
+
|
|
159
|
+
if (loadingOption) {
|
|
160
|
+
loadingOption.value = 'loading-model';
|
|
161
|
+
loadingOption.textContent = `Loading ${modelId}...`;
|
|
162
|
+
loadingOption.hidden = true;
|
|
163
|
+
select.appendChild(loadingOption);
|
|
164
|
+
}
|
|
165
|
+
// Gray out send button during loading
|
|
166
|
+
updateAttachmentButtonState();
|
|
167
|
+
},
|
|
168
|
+
onLoadingEnd: (modelId, success) => {
|
|
169
|
+
// Reset the default option text
|
|
170
|
+
const defaultOption = modelSelect.querySelector('option[value=""]');
|
|
171
|
+
if (defaultOption) defaultOption.textContent = 'Click to select a model ▼';
|
|
172
|
+
// Update button state after loading completes
|
|
173
|
+
updateAttachmentButtonState();
|
|
174
|
+
},
|
|
175
|
+
onSuccess: () => {
|
|
176
|
+
updateAttachmentButtonState();
|
|
177
|
+
},
|
|
178
|
+
onError: () => {
|
|
179
|
+
updateModelSelectValue();
|
|
180
|
+
updateAttachmentButtonState();
|
|
181
|
+
}
|
|
182
|
+
});
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// Update attachment button state based on current model
|
|
186
|
+
function updateAttachmentButtonState() {
|
|
187
|
+
// Update model dropdown selection
|
|
188
|
+
updateModelSelectValue();
|
|
189
|
+
|
|
190
|
+
// Update send button state based on model loading
|
|
191
|
+
if (toggleBtn) {
|
|
192
|
+
const loading = !!(modelSelect && modelSelect.disabled);
|
|
193
|
+
if (isStreaming) {
|
|
194
|
+
toggleBtn.disabled = false;
|
|
195
|
+
toggleBtn.textContent = 'Stop';
|
|
196
|
+
} else {
|
|
197
|
+
// Gray out send button if no model is loaded or if loading
|
|
198
|
+
toggleBtn.disabled = loading || !currentLoadedModel;
|
|
199
|
+
toggleBtn.textContent = 'Send';
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
if (!currentLoadedModel) {
|
|
204
|
+
attachmentBtn.style.opacity = '0.5';
|
|
205
|
+
attachmentBtn.style.cursor = 'not-allowed';
|
|
206
|
+
attachmentBtn.title = 'Load a model first';
|
|
207
|
+
} else {
|
|
208
|
+
const isVision = isVisionModel(currentLoadedModel);
|
|
209
|
+
|
|
210
|
+
if (isVision) {
|
|
211
|
+
attachmentBtn.style.opacity = '1';
|
|
212
|
+
attachmentBtn.style.cursor = 'pointer';
|
|
213
|
+
attachmentBtn.title = 'Attach images';
|
|
214
|
+
} else {
|
|
215
|
+
attachmentBtn.style.opacity = '0.5';
|
|
216
|
+
attachmentBtn.style.cursor = 'not-allowed';
|
|
217
|
+
attachmentBtn.title = 'Image attachments not supported by this model';
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// Update system message when model state changes
|
|
222
|
+
displaySystemMessage();
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
// Make updateAttachmentButtonState accessible globally
|
|
226
|
+
window.updateAttachmentButtonState = updateAttachmentButtonState;
|
|
227
|
+
|
|
228
|
+
// Make displaySystemMessage accessible globally
|
|
229
|
+
window.displaySystemMessage = displaySystemMessage;
|
|
230
|
+
|
|
231
|
+
// Check if model supports vision and update attachment button
|
|
232
|
+
function checkCurrentModel() {
|
|
233
|
+
if (attachedFiles.length > 0 && currentLoadedModel && !isVisionModel(currentLoadedModel)) {
|
|
234
|
+
if (confirm(`The current model "${currentLoadedModel}" does not support images. Would you like to remove the attached images?`)) {
|
|
235
|
+
clearAttachments();
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
updateAttachmentButtonState();
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
// Handle file selection
|
|
242
|
+
function handleFileSelection() {
|
|
243
|
+
if (fileAttachment.files.length > 0) {
|
|
244
|
+
// Check if current model supports vision
|
|
245
|
+
if (!currentLoadedModel) {
|
|
246
|
+
alert('Please load a model first before attaching images.');
|
|
247
|
+
fileAttachment.value = '';
|
|
248
|
+
return;
|
|
249
|
+
}
|
|
250
|
+
if (!isVisionModel(currentLoadedModel)) {
|
|
251
|
+
alert(`The current model "${currentLoadedModel}" does not support image inputs. Please load a model with "Vision" capabilities.`);
|
|
252
|
+
fileAttachment.value = '';
|
|
253
|
+
return;
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
// Filter only image files
|
|
257
|
+
const imageFiles = Array.from(fileAttachment.files).filter(file => {
|
|
258
|
+
if (!file.type.startsWith('image/')) {
|
|
259
|
+
console.warn(`Skipping non-image file: ${file.name} (${file.type})`);
|
|
260
|
+
return false;
|
|
261
|
+
}
|
|
262
|
+
return true;
|
|
263
|
+
});
|
|
264
|
+
|
|
265
|
+
if (imageFiles.length === 0) {
|
|
266
|
+
alert('Please select only image files (PNG, JPG, GIF, etc.)');
|
|
267
|
+
fileAttachment.value = '';
|
|
268
|
+
return;
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
if (imageFiles.length !== fileAttachment.files.length) {
|
|
272
|
+
alert(`${fileAttachment.files.length - imageFiles.length} non-image file(s) were skipped. Only image files are supported.`);
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
attachedFiles = imageFiles;
|
|
276
|
+
updateInputPlaceholder();
|
|
277
|
+
updateAttachmentPreviewVisibility();
|
|
278
|
+
updateAttachmentPreviews();
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
// Handle chat input keydown events
|
|
283
|
+
function handleChatInputKeydown(e) {
|
|
284
|
+
if (e.key === 'Escape' && attachedFiles.length > 0) {
|
|
285
|
+
e.preventDefault();
|
|
286
|
+
clearAttachments();
|
|
287
|
+
} else if (e.key === 'Enter' && !e.shiftKey) {
|
|
288
|
+
e.preventDefault();
|
|
289
|
+
// Only send if we have a loaded model
|
|
290
|
+
if (currentLoadedModel && modelSelect.value !== '' && !modelSelect.disabled) {
|
|
291
|
+
sendMessage();
|
|
292
|
+
}
|
|
293
|
+
// Otherwise do nothing - button is grayed out
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
// Handle paste events for images
|
|
298
|
+
async function handleChatInputPaste(e) {
|
|
299
|
+
const clipboardData = e.clipboardData || window.clipboardData;
|
|
300
|
+
const items = clipboardData.items;
|
|
301
|
+
let hasImage = false;
|
|
302
|
+
let pastedText = '';
|
|
303
|
+
|
|
304
|
+
// Check for text content first
|
|
305
|
+
for (let item of items) {
|
|
306
|
+
if (item.type === 'text/plain') {
|
|
307
|
+
pastedText = clipboardData.getData('text/plain');
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
// Check for images
|
|
312
|
+
for (let item of items) {
|
|
313
|
+
if (item.type.indexOf('image') !== -1) {
|
|
314
|
+
hasImage = true;
|
|
315
|
+
const file = item.getAsFile();
|
|
316
|
+
if (file && file.type.startsWith('image/')) {
|
|
317
|
+
// Check if current model supports vision before adding image
|
|
318
|
+
const currentModel = modelSelect.value;
|
|
319
|
+
if (!isVisionModel(currentModel)) {
|
|
320
|
+
alert(`The selected model "${currentModel}" does not support image inputs. Please select a model with "Vision" capabilities to paste images.`);
|
|
321
|
+
// Don't prevent default if we're not handling the paste
|
|
322
|
+
return;
|
|
323
|
+
}
|
|
324
|
+
// Add to attachedFiles array only if it's an image and model supports vision
|
|
325
|
+
attachedFiles.push(file);
|
|
326
|
+
} else if (file) {
|
|
327
|
+
console.warn(`Skipping non-image pasted file: ${file.name || 'unknown'} (${file.type})`);
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
// If we have images, prevent default and handle specially
|
|
333
|
+
if (hasImage && attachedFiles.length > 0) {
|
|
334
|
+
e.preventDefault();
|
|
335
|
+
|
|
336
|
+
// If there's also text, insert it at cursor position
|
|
337
|
+
if (pastedText) {
|
|
338
|
+
insertTextAtCursor(chatInput, pastedText);
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
// Update placeholder to show attached images
|
|
342
|
+
updateInputPlaceholder();
|
|
343
|
+
updateAttachmentPreviewVisibility();
|
|
344
|
+
updateAttachmentPreviews();
|
|
345
|
+
}
|
|
346
|
+
// If no images, let the browser handle the paste normally (preserves cursor position and undo)
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
// Helper function to insert text at cursor position
|
|
350
|
+
function insertTextAtCursor(textElement, text) {
|
|
351
|
+
const start = textElement.selectionStart;
|
|
352
|
+
const end = textElement.selectionEnd;
|
|
353
|
+
const currentValue = textElement.value;
|
|
354
|
+
|
|
355
|
+
// Insert the text at the cursor position
|
|
356
|
+
const newValue = currentValue.substring(0, start) + text + currentValue.substring(end);
|
|
357
|
+
textElement.value = newValue;
|
|
358
|
+
|
|
359
|
+
// Move cursor to end of inserted text
|
|
360
|
+
const newCursorPos = start + text.length;
|
|
361
|
+
textElement.setSelectionRange(newCursorPos, newCursorPos);
|
|
362
|
+
|
|
363
|
+
// Focus the element to ensure cursor is visible
|
|
364
|
+
textElement.focus();
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
function clearAttachments() {
|
|
368
|
+
attachedFiles = [];
|
|
369
|
+
fileAttachment.value = '';
|
|
370
|
+
updateInputPlaceholder();
|
|
371
|
+
updateAttachmentPreviewVisibility();
|
|
372
|
+
updateAttachmentPreviews();
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
function updateAttachmentPreviewVisibility() {
|
|
376
|
+
if (attachedFiles.length > 0) {
|
|
377
|
+
attachmentsPreviewContainer.classList.add('has-attachments');
|
|
378
|
+
} else {
|
|
379
|
+
attachmentsPreviewContainer.classList.remove('has-attachments');
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
function updateAttachmentPreviews() {
|
|
384
|
+
// Clear existing previews
|
|
385
|
+
attachmentsPreviewRow.innerHTML = '';
|
|
386
|
+
|
|
387
|
+
if (attachedFiles.length === 0) return;
|
|
388
|
+
|
|
389
|
+
attachedFiles.forEach((file, index) => {
|
|
390
|
+
// Skip non-image files (extra safety check)
|
|
391
|
+
if (!file.type.startsWith('image/')) {
|
|
392
|
+
console.warn(`Skipping non-image file in preview: ${file.name} (${file.type})`);
|
|
393
|
+
return;
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
const previewDiv = document.createElement('div');
|
|
397
|
+
previewDiv.className = 'attachment-preview';
|
|
398
|
+
|
|
399
|
+
// Create thumbnail
|
|
400
|
+
const thumbnail = document.createElement('img');
|
|
401
|
+
thumbnail.className = 'attachment-thumbnail';
|
|
402
|
+
thumbnail.alt = file.name;
|
|
403
|
+
|
|
404
|
+
// Create filename display
|
|
405
|
+
const filename = document.createElement('div');
|
|
406
|
+
filename.className = 'attachment-filename';
|
|
407
|
+
filename.textContent = file.name || `pasted-image-${index + 1}`;
|
|
408
|
+
filename.title = file.name || `pasted-image-${index + 1}`;
|
|
409
|
+
|
|
410
|
+
// Create remove button
|
|
411
|
+
const removeBtn = document.createElement('button');
|
|
412
|
+
removeBtn.className = 'attachment-remove-btn';
|
|
413
|
+
removeBtn.innerHTML = '✕';
|
|
414
|
+
removeBtn.title = 'Remove this image';
|
|
415
|
+
removeBtn.onclick = () => removeAttachment(index);
|
|
416
|
+
|
|
417
|
+
// Generate thumbnail for image
|
|
418
|
+
const reader = new FileReader();
|
|
419
|
+
reader.onload = (e) => { thumbnail.src = e.target.result; };
|
|
420
|
+
reader.readAsDataURL(file);
|
|
421
|
+
|
|
422
|
+
previewDiv.appendChild(thumbnail);
|
|
423
|
+
previewDiv.appendChild(filename);
|
|
424
|
+
previewDiv.appendChild(removeBtn);
|
|
425
|
+
attachmentsPreviewRow.appendChild(previewDiv);
|
|
426
|
+
});
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
function removeAttachment(index) {
|
|
430
|
+
attachedFiles.splice(index, 1);
|
|
431
|
+
updateInputPlaceholder();
|
|
432
|
+
updateAttachmentPreviewVisibility();
|
|
433
|
+
updateAttachmentPreviews();
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
// Function to update input placeholder to show attached files
|
|
437
|
+
function updateInputPlaceholder() {
|
|
438
|
+
if (attachedFiles.length > 0) {
|
|
439
|
+
chatInput.placeholder = `Type your message... (${attachedFiles.length} image${attachedFiles.length > 1 ? 's' : ''} attached)`;
|
|
440
|
+
} else {
|
|
441
|
+
chatInput.placeholder = 'Type your message...';
|
|
442
|
+
}
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
// Function to convert file to base64
|
|
446
|
+
function fileToBase64(file) {
|
|
447
|
+
return new Promise((resolve, reject) => {
|
|
448
|
+
const reader = new FileReader();
|
|
449
|
+
reader.readAsDataURL(file);
|
|
450
|
+
reader.onload = () => resolve(reader.result.split(',')[1]);
|
|
451
|
+
reader.onerror = error => reject(error);
|
|
452
|
+
});
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
/**
|
|
456
|
+
* Incrementally (re)renders reasoning + answer without blowing away the header so user
|
|
457
|
+
* collapsing/expanding persists while tokens stream.
|
|
458
|
+
*/
|
|
459
|
+
function updateMessageContent(bubbleElement, text, isMarkdown = false) {
|
|
460
|
+
if (!isMarkdown) {
|
|
461
|
+
bubbleElement.textContent = text;
|
|
462
|
+
return;
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
const { main, thought, isThinking } = parseReasoningBlocks(text);
|
|
466
|
+
|
|
467
|
+
// Pure normal markdown (no reasoning)
|
|
468
|
+
if (!thought.trim()) {
|
|
469
|
+
// If structure existed before, replace fully (safe—no toggle needed)
|
|
470
|
+
bubbleElement.innerHTML = renderMarkdown(main);
|
|
471
|
+
delete bubbleElement.dataset.thinkExpanded;
|
|
472
|
+
return;
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
// Determine current expanded state (user preference) or default
|
|
476
|
+
let expanded;
|
|
477
|
+
if (bubbleElement.dataset.thinkExpanded === 'true') expanded = true;
|
|
478
|
+
else if (bubbleElement.dataset.thinkExpanded === 'false') expanded = false;
|
|
479
|
+
else expanded = !!isThinking; // default: open while still streaming until user intervenes
|
|
480
|
+
|
|
481
|
+
// Create structure once
|
|
482
|
+
let container = bubbleElement.querySelector('.think-tokens-container');
|
|
483
|
+
let thoughtContent, headerChevron, headerLabel, mainDiv;
|
|
484
|
+
|
|
485
|
+
if (!container) {
|
|
486
|
+
bubbleElement.innerHTML = ''; // first time constructing reasoning UI
|
|
487
|
+
|
|
488
|
+
container = document.createElement('div');
|
|
489
|
+
container.className = 'think-tokens-container' + (expanded ? '' : ' collapsed');
|
|
490
|
+
|
|
491
|
+
const header = document.createElement('div');
|
|
492
|
+
header.className = 'think-tokens-header';
|
|
493
|
+
header.onclick = function () { toggleThinkTokens(header); };
|
|
494
|
+
|
|
495
|
+
headerChevron = document.createElement('span');
|
|
496
|
+
headerChevron.className = 'think-tokens-chevron';
|
|
497
|
+
headerChevron.textContent = expanded ? '▼' : '▶';
|
|
498
|
+
|
|
499
|
+
headerLabel = document.createElement('span');
|
|
500
|
+
headerLabel.className = 'think-tokens-label';
|
|
501
|
+
header.appendChild(headerChevron);
|
|
502
|
+
header.appendChild(headerLabel);
|
|
503
|
+
|
|
504
|
+
thoughtContent = document.createElement('div');
|
|
505
|
+
thoughtContent.className = 'think-tokens-content';
|
|
506
|
+
thoughtContent.style.display = expanded ? 'block' : 'none';
|
|
507
|
+
|
|
508
|
+
container.appendChild(header);
|
|
509
|
+
container.appendChild(thoughtContent);
|
|
510
|
+
bubbleElement.appendChild(container);
|
|
511
|
+
|
|
512
|
+
if (main.trim()) {
|
|
513
|
+
mainDiv = document.createElement('div');
|
|
514
|
+
mainDiv.className = 'main-response';
|
|
515
|
+
bubbleElement.appendChild(mainDiv);
|
|
516
|
+
}
|
|
517
|
+
} else {
|
|
518
|
+
thoughtContent = container.querySelector('.think-tokens-content');
|
|
519
|
+
headerChevron = container.querySelector('.think-tokens-chevron');
|
|
520
|
+
headerLabel = container.querySelector('.think-tokens-label');
|
|
521
|
+
mainDiv = bubbleElement.querySelector('.main-response');
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
// Update label & chevron (don’t override user-expanded state)
|
|
525
|
+
headerChevron.textContent = expanded ? '▼' : '▶';
|
|
526
|
+
// Animation-aware label handling
|
|
527
|
+
if (isThinking) {
|
|
528
|
+
// If not already animating, seed an initial frame then start animation
|
|
529
|
+
if (bubbleElement.dataset.thinkAnimActive !== '1') {
|
|
530
|
+
headerLabel.textContent = THINKING_FRAMES[0];
|
|
531
|
+
startThinkingAnimation(container);
|
|
532
|
+
}
|
|
533
|
+
} else {
|
|
534
|
+
// Stop any animation and set final label
|
|
535
|
+
if (bubbleElement.dataset.thinkAnimActive === '1') {
|
|
536
|
+
stopThinkingAnimation(container);
|
|
537
|
+
} else {
|
|
538
|
+
headerLabel.textContent = 'Thought Process';
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
// Update reasoning content (can re-run markdown safely)
|
|
543
|
+
thoughtContent.innerHTML = renderMarkdown(thought);
|
|
544
|
+
|
|
545
|
+
// Update main answer section
|
|
546
|
+
if (main.trim()) {
|
|
547
|
+
if (!mainDiv) {
|
|
548
|
+
mainDiv = document.createElement('div');
|
|
549
|
+
mainDiv.className = 'main-response';
|
|
550
|
+
bubbleElement.appendChild(mainDiv);
|
|
551
|
+
}
|
|
552
|
+
mainDiv.innerHTML = renderMarkdown(main);
|
|
553
|
+
} else if (mainDiv) {
|
|
554
|
+
mainDiv.remove();
|
|
555
|
+
}
|
|
556
|
+
|
|
557
|
+
// Persist preference
|
|
558
|
+
bubbleElement.dataset.thinkExpanded = expanded ? 'true' : 'false';
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
// Scroll helper that respects user's scroll interaction. If autoscroll is disabled
|
|
562
|
+
// because the user scrolled up, this will not force the view to the bottom.
|
|
563
|
+
function scrollChatToBottom(force = false) {
|
|
564
|
+
if (!chatHistory) return;
|
|
565
|
+
if (force || autoscrollEnabled) {
|
|
566
|
+
// Small timeout to allow DOM insertion/layout to finish in streaming cases
|
|
567
|
+
setTimeout(() => {
|
|
568
|
+
try { chatHistory.scrollTop = chatHistory.scrollHeight; } catch (_) {}
|
|
569
|
+
}, 0);
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
|
|
573
|
+
function appendMessage(role, text, isMarkdown = false) {
|
|
574
|
+
const div = document.createElement('div');
|
|
575
|
+
div.className = 'chat-message ' + role;
|
|
576
|
+
// Add a bubble for iMessage style
|
|
577
|
+
const bubble = document.createElement('div');
|
|
578
|
+
bubble.className = 'chat-bubble ' + role;
|
|
579
|
+
|
|
580
|
+
// Check if isMarkdown is true, regardless of role
|
|
581
|
+
if (isMarkdown) {
|
|
582
|
+
// Build structure via incremental updater (ensures later token updates won’t wipe user toggle)
|
|
583
|
+
updateMessageContent(bubble, text, true);
|
|
584
|
+
} else {
|
|
585
|
+
bubble.textContent = text;
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
div.appendChild(bubble);
|
|
589
|
+
chatHistory.appendChild(div);
|
|
590
|
+
scrollChatToBottom();
|
|
591
|
+
return bubble;
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
// Display system message based on current state
|
|
595
|
+
function displaySystemMessage() {
|
|
596
|
+
// Remove existing system message if it exists
|
|
597
|
+
if (systemMessageElement) {
|
|
598
|
+
systemMessageElement.remove();
|
|
599
|
+
systemMessageElement = null;
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
// Don't show system message if there are already user/LLM messages
|
|
603
|
+
if (messages.length > 0) return;
|
|
604
|
+
|
|
605
|
+
let messageText = '';
|
|
606
|
+
|
|
607
|
+
// Check if any models are installed
|
|
608
|
+
const hasInstalledModels = window.installedModels && window.installedModels.size > 0;
|
|
609
|
+
|
|
610
|
+
if (!hasInstalledModels) {
|
|
611
|
+
// No models installed - show first message
|
|
612
|
+
messageText = `Welcome to Lemonade! To get started:
|
|
613
|
+
1. Head over to the Model Management tab.
|
|
614
|
+
2. Use the 📥Download button to download a model.
|
|
615
|
+
3. Use the 🚀Load button to load the model.
|
|
616
|
+
4. Come back to this tab, and you are ready to chat with the model.`;
|
|
617
|
+
} else if (!currentLoadedModel) {
|
|
618
|
+
// Models available but none loaded - show second message
|
|
619
|
+
messageText = 'Welcome to Lemonade! Choose a model from the dropdown menu below to load it and start chatting.';
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
if (messageText) {
|
|
623
|
+
const div = document.createElement('div');
|
|
624
|
+
div.className = 'chat-message system';
|
|
625
|
+
div.setAttribute('data-system-message', 'true');
|
|
626
|
+
|
|
627
|
+
const bubble = document.createElement('div');
|
|
628
|
+
bubble.className = 'chat-bubble system';
|
|
629
|
+
bubble.textContent = messageText;
|
|
630
|
+
|
|
631
|
+
div.appendChild(bubble);
|
|
632
|
+
chatHistory.appendChild(div);
|
|
633
|
+
scrollChatToBottom();
|
|
634
|
+
|
|
635
|
+
systemMessageElement = div;
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
|
|
639
|
+
function abortCurrentRequest() {
|
|
640
|
+
if (abortController) {
|
|
641
|
+
// Abort the in-flight fetch stream immediately
|
|
642
|
+
abortController.abort();
|
|
643
|
+
|
|
644
|
+
// Also signal the server to halt generation promptly (helps slow CPU backends)
|
|
645
|
+
try {
|
|
646
|
+
// Fire-and-forget; no await to avoid blocking UI
|
|
647
|
+
fetch(getServerBaseUrl() + '/api/v1/halt', { method: 'GET', keepalive: true }).catch(() => {});
|
|
648
|
+
} catch (_) {}
|
|
649
|
+
abortController = null;
|
|
650
|
+
isStreaming = false;
|
|
651
|
+
updateAttachmentButtonState();
|
|
652
|
+
console.log('Streaming request aborted by user.');
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
|
|
657
|
+
// ---------- Reasoning Parsing (Harmony + <think>) ----------
|
|
658
|
+
|
|
659
|
+
function parseReasoningBlocks(raw) {
|
|
660
|
+
if (raw == null) return { main: '', thought: '', isThinking: false };
|
|
661
|
+
// Added additional Harmony variants: <|channel|>analysis<|channel|>, <|channel|>analysis<|message|>, <|channel|>analysis<|assistant|>
|
|
662
|
+
const RE_OPEN = /<think>|<\|channel\|>analysis<\|(channel|message|assistant)\|>/;
|
|
663
|
+
const RE_CLOSE = /<\/think>|<\|end\|>/;
|
|
664
|
+
|
|
665
|
+
let remaining = String(raw);
|
|
666
|
+
let main = '';
|
|
667
|
+
let thought = '';
|
|
668
|
+
let isThinking = false;
|
|
669
|
+
|
|
670
|
+
while (true) {
|
|
671
|
+
const openIdx = remaining.search(RE_OPEN);
|
|
672
|
+
if (openIdx === -1) {
|
|
673
|
+
if (isThinking) {
|
|
674
|
+
thought += remaining;
|
|
675
|
+
} else {
|
|
676
|
+
main += remaining;
|
|
677
|
+
}
|
|
678
|
+
break;
|
|
679
|
+
}
|
|
680
|
+
|
|
681
|
+
// Text before the opener
|
|
682
|
+
if (isThinking) {
|
|
683
|
+
thought += remaining.slice(0, openIdx);
|
|
684
|
+
} else {
|
|
685
|
+
main += remaining.slice(0, openIdx);
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
// Drop the opener
|
|
689
|
+
remaining = remaining.slice(openIdx).replace(RE_OPEN, '');
|
|
690
|
+
isThinking = true;
|
|
691
|
+
|
|
692
|
+
const closeIdx = remaining.search(RE_CLOSE);
|
|
693
|
+
if (closeIdx === -1) {
|
|
694
|
+
// Still streaming reasoning (no closer yet)
|
|
695
|
+
thought += remaining;
|
|
696
|
+
break;
|
|
697
|
+
}
|
|
698
|
+
|
|
699
|
+
// Add reasoning segment up to closer
|
|
700
|
+
thought += remaining.slice(0, closeIdx);
|
|
701
|
+
remaining = remaining.slice(closeIdx).replace(RE_CLOSE, '');
|
|
702
|
+
isThinking = false;
|
|
703
|
+
// Loop to look for additional reasoning blocks
|
|
704
|
+
}
|
|
705
|
+
return { main, thought, isThinking };
|
|
706
|
+
}
|
|
707
|
+
|
|
708
|
+
function renderMarkdownWithThinkTokens(text, preservedExpanded) {
|
|
709
|
+
const { main, thought, isThinking } = parseReasoningBlocks(text);
|
|
710
|
+
|
|
711
|
+
if (!thought.trim()) {
|
|
712
|
+
return renderMarkdown(main);
|
|
713
|
+
}
|
|
714
|
+
|
|
715
|
+
// If we have a preserved user preference, honor it. Otherwise default:
|
|
716
|
+
// open while streaming (original behavior) else collapsed = false.
|
|
717
|
+
let expanded = (typeof preservedExpanded === 'boolean')
|
|
718
|
+
? preservedExpanded
|
|
719
|
+
: !!isThinking;
|
|
720
|
+
|
|
721
|
+
const chevron = expanded ? '▼' : '▶';
|
|
722
|
+
const label = expanded && isThinking ? 'Thinking...' : (expanded ? 'Thought Process' : 'Thought Process');
|
|
723
|
+
|
|
724
|
+
let html = `
|
|
725
|
+
<div class="think-tokens-container${expanded ? '' : ' collapsed'}">
|
|
726
|
+
<div class="think-tokens-header" onclick="toggleThinkTokens(this)">
|
|
727
|
+
<span class="think-tokens-chevron">${chevron}</span>
|
|
728
|
+
<span class="think-tokens-label">${label}</span>
|
|
729
|
+
</div>
|
|
730
|
+
<div class="think-tokens-content" style="display:${expanded ? 'block' : 'none'};">
|
|
731
|
+
${renderMarkdown(thought)}
|
|
732
|
+
</div>
|
|
733
|
+
</div>
|
|
734
|
+
`;
|
|
735
|
+
if (main.trim()) {
|
|
736
|
+
html += `<div class="main-response">${renderMarkdown(main)}</div>`;
|
|
737
|
+
}
|
|
738
|
+
return html;
|
|
739
|
+
}
|
|
740
|
+
|
|
741
|
+
function extractAssistantReasoning(fullText) {
|
|
742
|
+
const { main, thought } = parseReasoningBlocks(fullText);
|
|
743
|
+
const result = { content: (main || '').trim(), raw: fullText };
|
|
744
|
+
if (thought && thought.trim()) result.reasoning_content = thought.trim();
|
|
745
|
+
return result;
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
// -----------------------------------------------------------
|
|
749
|
+
|
|
750
|
+
function toggleThinkTokens(header) {
|
|
751
|
+
const container = header.parentElement;
|
|
752
|
+
const content = container.querySelector('.think-tokens-content');
|
|
753
|
+
const chevron = header.querySelector('.think-tokens-chevron');
|
|
754
|
+
const bubble = header.closest('.chat-bubble');
|
|
755
|
+
|
|
756
|
+
const nowCollapsed = !container.classList.contains('collapsed'); // current (before toggle) expanded?
|
|
757
|
+
if (nowCollapsed) {
|
|
758
|
+
// Collapse
|
|
759
|
+
content.style.display = 'none';
|
|
760
|
+
chevron.textContent = '▶';
|
|
761
|
+
container.classList.add('collapsed');
|
|
762
|
+
if (bubble) bubble.dataset.thinkExpanded = 'false';
|
|
763
|
+
} else {
|
|
764
|
+
// Expand
|
|
765
|
+
content.style.display = 'block';
|
|
766
|
+
chevron.textContent = '▼';
|
|
767
|
+
container.classList.remove('collapsed');
|
|
768
|
+
if (bubble) bubble.dataset.thinkExpanded = 'true';
|
|
769
|
+
}
|
|
770
|
+
}
|
|
771
|
+
|
|
772
|
+
function startThinkingAnimation(container) {
|
|
773
|
+
const bubble = container.closest('.chat-bubble');
|
|
774
|
+
if (!bubble || bubble.dataset.thinkAnimActive === '1') return;
|
|
775
|
+
const labelEl = container.querySelector('.think-tokens-label');
|
|
776
|
+
if (!labelEl) return;
|
|
777
|
+
bubble.dataset.thinkAnimActive = '1';
|
|
778
|
+
let i = 0;
|
|
779
|
+
const update = () => {
|
|
780
|
+
// If streaming ended mid-cycle, stop.
|
|
781
|
+
if (bubble.dataset.thinkAnimActive !== '1') return;
|
|
782
|
+
labelEl.textContent = THINKING_FRAMES[i % THINKING_FRAMES.length];
|
|
783
|
+
i++;
|
|
784
|
+
bubble.dataset.thinkAnimId = String(setTimeout(update, THINKING_ANIM_INTERVAL_MS));
|
|
785
|
+
};
|
|
786
|
+
update();
|
|
787
|
+
}
|
|
788
|
+
|
|
789
|
+
function stopThinkingAnimation(container, finalLabel = 'Thought Process') {
|
|
790
|
+
const bubble = container.closest('.chat-bubble');
|
|
791
|
+
if (!bubble) return;
|
|
792
|
+
bubble.dataset.thinkAnimActive = '0';
|
|
793
|
+
const id = bubble.dataset.thinkAnimId;
|
|
794
|
+
if (id) {
|
|
795
|
+
clearTimeout(Number(id));
|
|
796
|
+
delete bubble.dataset.thinkAnimId;
|
|
797
|
+
}
|
|
798
|
+
const labelEl = container.querySelector('.think-tokens-label');
|
|
799
|
+
if (labelEl) labelEl.textContent = finalLabel;
|
|
800
|
+
}
|
|
801
|
+
|
|
802
|
+
async function sendMessage(existingTextIfAny) {
|
|
803
|
+
const text = (existingTextIfAny !== undefined ? existingTextIfAny : chatInput.value.trim());
|
|
804
|
+
|
|
805
|
+
// Prepare abort controller for this request
|
|
806
|
+
abortController = new AbortController();
|
|
807
|
+
// UI state: set button to Stop
|
|
808
|
+
if (toggleBtn) {
|
|
809
|
+
toggleBtn.disabled = false;
|
|
810
|
+
toggleBtn.textContent = 'Stop';
|
|
811
|
+
}
|
|
812
|
+
if (!text && attachedFiles.length === 0) {
|
|
813
|
+
// Nothing to send; revert button state and clear abort handle
|
|
814
|
+
abortController = null;
|
|
815
|
+
updateAttachmentButtonState();
|
|
816
|
+
return;
|
|
817
|
+
}
|
|
818
|
+
|
|
819
|
+
isStreaming = true;
|
|
820
|
+
|
|
821
|
+
// Remove system message when user starts chatting
|
|
822
|
+
if (systemMessageElement) {
|
|
823
|
+
systemMessageElement.remove();
|
|
824
|
+
systemMessageElement = null;
|
|
825
|
+
}
|
|
826
|
+
|
|
827
|
+
// Check if a model is loaded
|
|
828
|
+
if (!currentLoadedModel) {
|
|
829
|
+
alert('Please load a model first before sending messages.');
|
|
830
|
+
abortController = null;
|
|
831
|
+
isStreaming = false;
|
|
832
|
+
updateAttachmentButtonState();
|
|
833
|
+
return;
|
|
834
|
+
}
|
|
835
|
+
|
|
836
|
+
// Check if trying to send images to non-vision model
|
|
837
|
+
if (attachedFiles.length > 0 && !isVisionModel(currentLoadedModel)) {
|
|
838
|
+
alert(`Cannot send images to model "${currentLoadedModel}" as it does not support vision. Please load a model with "Vision" capabilities or remove the attached images.`);
|
|
839
|
+
return;
|
|
840
|
+
}
|
|
841
|
+
|
|
842
|
+
// Create message content
|
|
843
|
+
let messageContent = [];
|
|
844
|
+
|
|
845
|
+
// Add text if present
|
|
846
|
+
if (text) {
|
|
847
|
+
messageContent.push({ type: "text", text: text });
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
// Add images if present
|
|
851
|
+
if (attachedFiles.length > 0) {
|
|
852
|
+
for (const file of attachedFiles) {
|
|
853
|
+
if (file.type.startsWith('image/')) {
|
|
854
|
+
try {
|
|
855
|
+
const base64 = await fileToBase64(file);
|
|
856
|
+
messageContent.push({
|
|
857
|
+
type: "image_url",
|
|
858
|
+
image_url: { url: `data:${file.type};base64,${base64}` }
|
|
859
|
+
});
|
|
860
|
+
} catch (error) {
|
|
861
|
+
console.error('Error converting image to base64:', error);
|
|
862
|
+
}
|
|
863
|
+
}
|
|
864
|
+
}
|
|
865
|
+
}
|
|
866
|
+
|
|
867
|
+
// Display user message (show text and file names)
|
|
868
|
+
let displayText = text;
|
|
869
|
+
if (attachedFiles.length > 0) {
|
|
870
|
+
const fileNames = attachedFiles.map(f => f.name || 'pasted-image').join(', ');
|
|
871
|
+
displayText = displayText ? `${displayText}\n[Images: ${fileNames}]` : `[Images: ${fileNames}]`;
|
|
872
|
+
}
|
|
873
|
+
|
|
874
|
+
appendMessage('user', displayText, true);
|
|
875
|
+
|
|
876
|
+
// Add to messages array
|
|
877
|
+
const userMessage = {
|
|
878
|
+
role: 'user',
|
|
879
|
+
content: messageContent.length === 1 && messageContent[0].type === "text"
|
|
880
|
+
? messageContent[0].text
|
|
881
|
+
: messageContent
|
|
882
|
+
};
|
|
883
|
+
messages.push(userMessage);
|
|
884
|
+
|
|
885
|
+
// Clear input and attachments
|
|
886
|
+
chatInput.value = '';
|
|
887
|
+
attachedFiles = [];
|
|
888
|
+
fileAttachment.value = '';
|
|
889
|
+
updateInputPlaceholder(); // Reset placeholder
|
|
890
|
+
updateAttachmentPreviewVisibility(); // Hide preview container
|
|
891
|
+
updateAttachmentPreviews(); // Clear previews
|
|
892
|
+
// Keep the Send/Stop button enabled during streaming so user can abort.
|
|
893
|
+
|
|
894
|
+
// Streaming OpenAI completions (placeholder, adapt as needed)
|
|
895
|
+
let llmText = '';
|
|
896
|
+
const llmBubble = appendMessage('llm', '...');
|
|
897
|
+
try {
|
|
898
|
+
// Use the correct endpoint for chat completions with model settings
|
|
899
|
+
const modelSettings = getCurrentModelSettings ? getCurrentModelSettings() : {};
|
|
900
|
+
console.log('Applying model settings to API request:', modelSettings);
|
|
901
|
+
|
|
902
|
+
const payload = {
|
|
903
|
+
model: currentLoadedModel,
|
|
904
|
+
messages: messages,
|
|
905
|
+
stream: true,
|
|
906
|
+
...modelSettings // Apply current model settings
|
|
907
|
+
};
|
|
908
|
+
|
|
909
|
+
const resp = await httpRequest(getServerBaseUrl() + '/api/v1/chat/completions', {
|
|
910
|
+
method: 'POST',
|
|
911
|
+
headers: { 'Content-Type': 'application/json' },
|
|
912
|
+
body: JSON.stringify(payload),
|
|
913
|
+
signal: abortController ? abortController.signal : undefined
|
|
914
|
+
});
|
|
915
|
+
if (!resp.body) throw new Error('No stream');
|
|
916
|
+
const reader = resp.body.getReader();
|
|
917
|
+
let decoder = new TextDecoder();
|
|
918
|
+
llmBubble.textContent = '';
|
|
919
|
+
|
|
920
|
+
const reasoningEnabled = (() => {
|
|
921
|
+
try {
|
|
922
|
+
const meta = window.SERVER_MODELS?.[currentLoadedModel];
|
|
923
|
+
return Array.isArray(meta?.labels) && meta.labels.includes('reasoning');
|
|
924
|
+
} catch (_) { return false; }
|
|
925
|
+
})();
|
|
926
|
+
|
|
927
|
+
let thinkOpened = false;
|
|
928
|
+
let thinkClosed = false;
|
|
929
|
+
let reasoningSchemaActive = false; // true if we saw delta.reasoning object
|
|
930
|
+
let receivedAnyReasoning = false; // true once any reasoning (schema or reasoning_content) arrived
|
|
931
|
+
|
|
932
|
+
while (true) {
|
|
933
|
+
const { done, value } = await reader.read();
|
|
934
|
+
if (done) break;
|
|
935
|
+
const chunk = decoder.decode(value);
|
|
936
|
+
if (!chunk.trim()) continue;
|
|
937
|
+
|
|
938
|
+
// Handle Server-Sent Events format
|
|
939
|
+
const lines = chunk.split('\n');
|
|
940
|
+
for (const rawLine of lines) {
|
|
941
|
+
if (!rawLine.startsWith('data: ')) continue;
|
|
942
|
+
const jsonStr = rawLine.slice(6).trim();
|
|
943
|
+
if (!jsonStr || jsonStr === '[DONE]') continue;
|
|
944
|
+
|
|
945
|
+
let deltaObj;
|
|
946
|
+
try { deltaObj = JSON.parse(jsonStr); } catch { continue; }
|
|
947
|
+
const choiceDelta = deltaObj?.choices?.[0]?.delta;
|
|
948
|
+
if (!choiceDelta) continue;
|
|
949
|
+
|
|
950
|
+
// 1. OpenAI reasoning object (preferred schema)
|
|
951
|
+
if (choiceDelta.reasoning && !thinkClosed) {
|
|
952
|
+
reasoningSchemaActive = true;
|
|
953
|
+
const r = choiceDelta.reasoning;
|
|
954
|
+
if (!thinkOpened) {
|
|
955
|
+
llmText += '<think>';
|
|
956
|
+
thinkOpened = true;
|
|
957
|
+
}
|
|
958
|
+
if (Array.isArray(r.content)) {
|
|
959
|
+
for (const seg of r.content) {
|
|
960
|
+
if (seg?.type === 'output_text' && seg.text) {
|
|
961
|
+
llmText += unescapeJsonString(seg.text);
|
|
962
|
+
receivedAnyReasoning = true;
|
|
963
|
+
}
|
|
964
|
+
}
|
|
965
|
+
}
|
|
966
|
+
if (r.done && !thinkClosed) {
|
|
967
|
+
llmText += '</think>';
|
|
968
|
+
thinkClosed = true;
|
|
969
|
+
}
|
|
970
|
+
}
|
|
971
|
+
|
|
972
|
+
// 2. llama.cpp style: reasoning_content (string fragments)
|
|
973
|
+
if (choiceDelta.reasoning_content && !thinkClosed) {
|
|
974
|
+
if (!thinkOpened) {
|
|
975
|
+
llmText += '<think>';
|
|
976
|
+
thinkOpened = true;
|
|
977
|
+
}
|
|
978
|
+
llmText += unescapeJsonString(choiceDelta.reasoning_content);
|
|
979
|
+
receivedAnyReasoning = true;
|
|
980
|
+
// We DO NOT close yet; we’ll close when first normal content arrives.
|
|
981
|
+
}
|
|
982
|
+
|
|
983
|
+
// 3. Plain content tokens
|
|
984
|
+
if (choiceDelta.content) {
|
|
985
|
+
let c = unescapeJsonString(choiceDelta.content);
|
|
986
|
+
|
|
987
|
+
// If we are inside reasoning (opened, not closed) and this is the first visible answer token,
|
|
988
|
+
// close the reasoning block before appending (unless model already emitted </think> itself).
|
|
989
|
+
if (thinkOpened && !thinkClosed) {
|
|
990
|
+
if (c.startsWith('</think>')) {
|
|
991
|
+
// Model closed it explicitly; strip that tag and mark closed
|
|
992
|
+
c = c.replace(/^<\/think>\s*/, '');
|
|
993
|
+
thinkClosed = true;
|
|
994
|
+
} else {
|
|
995
|
+
// Close ourselves (covers reasoning_content path & schema early content anomaly)
|
|
996
|
+
if (receivedAnyReasoning || reasoningEnabled) {
|
|
997
|
+
llmText += '</think>';
|
|
998
|
+
thinkClosed = true;
|
|
999
|
+
}
|
|
1000
|
+
}
|
|
1001
|
+
}
|
|
1002
|
+
|
|
1003
|
+
// If content stream itself begins a new reasoning section (rare), handle gracefully
|
|
1004
|
+
if (!thinkOpened && /<think>/.test(c)) {
|
|
1005
|
+
thinkOpened = true;
|
|
1006
|
+
const parts = c.split(/<think>/);
|
|
1007
|
+
// parts[0] is anything before accidental <think>, treat as normal visible content
|
|
1008
|
+
llmText += parts[0];
|
|
1009
|
+
// Everything after opener treated as reasoning until a closing tag or we decide to close
|
|
1010
|
+
llmText += '<think>' + parts.slice(1).join('<think>');
|
|
1011
|
+
receivedAnyReasoning = true;
|
|
1012
|
+
updateMessageContent(llmBubble, llmText, true);
|
|
1013
|
+
scrollChatToBottom();
|
|
1014
|
+
continue;
|
|
1015
|
+
}
|
|
1016
|
+
|
|
1017
|
+
llmText += c;
|
|
1018
|
+
}
|
|
1019
|
+
|
|
1020
|
+
updateMessageContent(llmBubble, llmText, true);
|
|
1021
|
+
scrollChatToBottom();
|
|
1022
|
+
}
|
|
1023
|
+
}
|
|
1024
|
+
|
|
1025
|
+
// Final safety close (e.g., model stopped mid-reasoning)
|
|
1026
|
+
if (thinkOpened && !thinkClosed) {
|
|
1027
|
+
llmText += '</think>';
|
|
1028
|
+
}
|
|
1029
|
+
|
|
1030
|
+
const assistantMsg = extractAssistantReasoning(llmText);
|
|
1031
|
+
messages.push({ role: 'assistant', ...assistantMsg });
|
|
1032
|
+
|
|
1033
|
+
} catch (e) {
|
|
1034
|
+
// If the request was aborted by the user, just clean up UI without error banner
|
|
1035
|
+
if (e.name === 'AbortError') {
|
|
1036
|
+
console.log('Chat request aborted by user.');
|
|
1037
|
+
} else {
|
|
1038
|
+
let detail = e.message;
|
|
1039
|
+
try {
|
|
1040
|
+
const errPayload = { model: currentLoadedModel, messages: messages, stream: false };
|
|
1041
|
+
const errResp = await httpJson(getServerBaseUrl() + '/api/v1/chat/completions', {
|
|
1042
|
+
method: 'POST',
|
|
1043
|
+
headers: { 'Content-Type': 'application/json' },
|
|
1044
|
+
body: JSON.stringify(errPayload)
|
|
1045
|
+
});
|
|
1046
|
+
if (errResp && errResp.detail) detail = errResp.detail;
|
|
1047
|
+
} catch (_) {}
|
|
1048
|
+
if (e && e.name !== 'AbortError') {
|
|
1049
|
+
llmBubble.textContent = '[Error: ' + detail + ']';
|
|
1050
|
+
showErrorBanner(`Chat error: ${detail}`);
|
|
1051
|
+
}
|
|
1052
|
+
}
|
|
1053
|
+
}
|
|
1054
|
+
// Reset UI state after streaming finishes
|
|
1055
|
+
abortController = null;
|
|
1056
|
+
isStreaming = false;
|
|
1057
|
+
updateAttachmentButtonState();
|
|
1058
|
+
updateMessageContent(llmBubble, llmText, true);
|
|
1059
|
+
}
|