llms-py 2.0.35__py3-none-any.whl → 3.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
llms/ui/ChatPrompt.mjs CHANGED
@@ -1,7 +1,8 @@
1
1
  import { ref, nextTick, inject, unref } from 'vue'
2
2
  import { useRouter } from 'vue-router'
3
3
  import { lastRightPart } from '@servicestack/client'
4
- import { deepClone, fileToDataUri, fileToBase64, addCopyButtons, toModelInfo, tokenCost } from './utils.mjs'
4
+ import { deepClone, fileToDataUri, fileToBase64, addCopyButtons, toModelInfo, tokenCost, uploadFile } from './utils.mjs'
5
+ import { toRaw } from 'vue'
5
6
 
6
7
  const imageExts = 'png,webp,jpg,jpeg,gif,bmp,svg,tiff,ico'.split(',')
7
8
  const audioExts = 'mp3,wav,ogg,flac,m4a,opus,webm'.split(',')
@@ -17,12 +18,15 @@ export function useChatPrompt() {
17
18
  const hasFile = () => attachedFiles.value.length > 0
18
19
  // const hasText = () => !hasImage() && !hasAudio() && !hasFile()
19
20
 
21
+ const editingMessageId = ref(null)
22
+
20
23
  function reset() {
21
24
  // Ensure initial state is ready to accept input
22
25
  isGenerating.value = false
23
26
  attachedFiles.value = []
24
27
  messageText.value = ''
25
28
  abortController.value = null
29
+ editingMessageId.value = null
26
30
  }
27
31
 
28
32
  function cancel() {
@@ -41,6 +45,7 @@ export function useChatPrompt() {
41
45
  errorStatus,
42
46
  isGenerating,
43
47
  abortController,
48
+ editingMessageId,
44
49
  get generating() {
45
50
  return isGenerating.value
46
51
  },
@@ -54,7 +59,7 @@ export function useChatPrompt() {
54
59
  }
55
60
 
56
61
  export default {
57
- template:`
62
+ template: `
58
63
  <div class="mx-auto max-w-3xl">
59
64
  <SettingsDialog :isOpen="showSettings" @close="showSettings = false" />
60
65
  <div class="flex space-x-2">
@@ -139,8 +144,8 @@ export default {
139
144
  `,
140
145
  props: {
141
146
  model: {
142
- type: String,
143
- default: ''
147
+ type: Object,
148
+ default: null
144
149
  },
145
150
  systemPrompt: {
146
151
  type: String,
@@ -160,7 +165,8 @@ export default {
160
165
  errorStatus,
161
166
  hasImage,
162
167
  hasAudio,
163
- hasFile
168
+ hasFile,
169
+ editingMessageId
164
170
  } = chatPrompt
165
171
  const threads = inject('threads')
166
172
  const {
@@ -176,9 +182,41 @@ export default {
176
182
  const triggerFilePicker = () => {
177
183
  if (fileInput.value) fileInput.value.click()
178
184
  }
179
- const onFilesSelected = (e) => {
185
+ const onFilesSelected = async (e) => {
180
186
  const files = Array.from(e.target?.files || [])
181
- if (files.length) attachedFiles.value.push(...files)
187
+ if (files.length) {
188
+ // Upload files immediately
189
+ const uploadedFiles = await Promise.all(files.map(async f => {
190
+ try {
191
+ const response = await uploadFile(f)
192
+ const metadata = {
193
+ url: response.url,
194
+ name: f.name,
195
+ size: response.size,
196
+ type: f.type,
197
+ width: response.width,
198
+ height: response.height,
199
+ threadId: currentThread.value?.id,
200
+ created: Date.now()
201
+ }
202
+
203
+ return {
204
+ ...metadata,
205
+ file: f // Keep original file for preview/fallback if needed
206
+ }
207
+ } catch (error) {
208
+ console.error('File upload failed:', error)
209
+ errorStatus.value = {
210
+ errorCode: 'Upload Failed',
211
+ message: `Failed to upload ${f.name}: ${error.message}`
212
+ }
213
+ return null
214
+ }
215
+ }))
216
+
217
+ attachedFiles.value.push(...uploadedFiles.filter(f => f))
218
+ }
219
+
182
220
  // allow re-selecting the same file
183
221
  if (fileInput.value) fileInput.value.value = ''
184
222
 
@@ -253,7 +291,9 @@ export default {
253
291
 
254
292
  if (files.length > 0) {
255
293
  e.preventDefault()
256
- addFilesAndSetMessage(files)
294
+ // Reuse the same logic as onFilesSelected for consistency
295
+ const event = { target: { files: files } }
296
+ await onFilesSelected(event)
257
297
  }
258
298
  }
259
299
 
@@ -272,14 +312,16 @@ export default {
272
312
  isDragging.value = false
273
313
  }
274
314
 
275
- const onDrop = (e) => {
315
+ const onDrop = async (e) => {
276
316
  e.preventDefault()
277
317
  e.stopPropagation()
278
318
  isDragging.value = false
279
319
 
280
320
  const files = Array.from(e.dataTransfer?.files || [])
281
321
  if (files.length > 0) {
282
- addFilesAndSetMessage(files)
322
+ // Reuse the same logic as onFilesSelected for consistency
323
+ const event = { target: { files: files } }
324
+ await onFilesSelected(event)
283
325
  }
284
326
  }
285
327
 
@@ -310,18 +352,28 @@ export default {
310
352
  // Clear any existing error message
311
353
  errorStatus.value = null
312
354
 
313
- let message = messageText.value.trim()
314
- if (attachedFiles.value.length) {
315
- const names = attachedFiles.value.map(f => f.name).join(', ')
316
- const mediaType = imageExts.some(ext => names.includes(ext))
317
- ? '🖼️'
318
- : audioExts.some(ext => names.includes(ext))
319
- ? '🔉'
320
- : '📎'
321
- message += `\n\n[${mediaType} ${names}]`
322
- }
355
+ // 1. Construct Structured Content (Text + Attachments)
356
+ let text = messageText.value.trim()
357
+ let content = []
358
+
359
+
323
360
  messageText.value = ''
324
361
 
362
+ // Add Text Block
363
+ content.push({ type: 'text', text: text })
364
+
365
+ // Add Attachment Blocks
366
+ for (const f of attachedFiles.value) {
367
+ const ext = lastRightPart(f.name, '.')
368
+ if (imageExts.includes(ext)) {
369
+ content.push({ type: 'image_url', image_url: { url: f.url } })
370
+ } else if (audioExts.includes(ext)) {
371
+ content.push({ type: 'input_audio', input_audio: { data: f.url, format: ext } })
372
+ } else {
373
+ content.push({ type: 'file', file: { file_data: f.url, filename: f.name } })
374
+ }
375
+ }
376
+
325
377
  // Create AbortController for this request
326
378
  const controller = new AbortController()
327
379
  chatPrompt.abortController.value = controller
@@ -339,7 +391,7 @@ export default {
339
391
  threadId = currentThread.value.id
340
392
  // Update the existing thread's model and systemPrompt to match current selection
341
393
  await threads.updateThread(threadId, {
342
- model: props.model.id,
394
+ model: props.model.name,
343
395
  info: toModelInfo(props.model),
344
396
  systemPrompt: props.systemPrompt
345
397
  })
@@ -347,120 +399,84 @@ export default {
347
399
 
348
400
  // Get the thread to check for duplicates
349
401
  let thread = await threads.getThread(threadId)
350
- const lastMessage = thread.messages[thread.messages.length - 1]
351
- const isDuplicate = lastMessage && lastMessage.role === 'user' && lastMessage.content === message
352
-
353
- // Add user message only if it's not a duplicate
354
- if (!isDuplicate) {
355
- await threads.addMessageToThread(threadId, {
356
- role: 'user',
357
- content: message
358
- })
359
- // Reload thread after adding message
402
+
403
+ // Handle Editing / Redo Logic
404
+ if (editingMessageId.value) {
405
+ // Check if message still exists
406
+ const messageExists = thread.messages.find(m => m.id === editingMessageId.value)
407
+ if (messageExists) {
408
+ // Update the message content
409
+ await threads.updateMessageInThread(threadId, editingMessageId.value, { content: content })
410
+ // Redo from this message (clears subsequent)
411
+ await threads.redoMessageFromThread(threadId, editingMessageId.value)
412
+
413
+ // Clear editing state
414
+ editingMessageId.value = null
415
+ } else {
416
+ // Fallback if message was deleted
417
+ editingMessageId.value = null
418
+ }
419
+ // Refresh thread state
360
420
  thread = await threads.getThread(threadId)
421
+ } else {
422
+ // Regular Send Logic
423
+ const lastMessage = thread.messages[thread.messages.length - 1]
424
+
425
+ // Check duplicate based on text content extracted from potential array
426
+ const getLastText = (msgContent) => {
427
+ if (typeof msgContent === 'string') return msgContent
428
+ if (Array.isArray(msgContent)) return msgContent.find(c => c.type === 'text')?.text || ''
429
+ return ''
430
+ }
431
+ const newText = text // content[0].text
432
+ const lastText = lastMessage && lastMessage.role === 'user' ? getLastText(lastMessage.content) : null
433
+
434
+ const isDuplicate = lastText === newText
435
+
436
+ // Add user message only if it's not a duplicate
437
+ // Note: We are saving the FULL STRUCTURED CONTENT array here
438
+ if (!isDuplicate) {
439
+ await threads.addMessageToThread(threadId, {
440
+ role: 'user',
441
+ content: content
442
+ })
443
+ // Reload thread after adding message
444
+ thread = await threads.getThread(threadId)
445
+ }
361
446
  }
362
447
 
363
448
  isGenerating.value = true
364
- const messages = [...thread.messages]
449
+
450
+ // Construct API Request from History
451
+ const chatRequest = {
452
+ model: props.model.name,
453
+ messages: [],
454
+ metadata: {}
455
+ }
365
456
 
366
457
  // Add system prompt if present
367
458
  if (props.systemPrompt?.trim()) {
368
- messages.unshift({
459
+ chatRequest.messages.push({
369
460
  role: 'system',
370
- content: [
371
- { type: 'text', text: props.systemPrompt }
372
- ]
461
+ content: props.systemPrompt // assuming system prompt is just string
373
462
  })
374
463
  }
375
464
 
376
- const chatRequest = createChatRequest()
377
- chatRequest.model = props.model.id
465
+ // Add History
466
+ thread.messages.forEach(m => {
467
+ chatRequest.messages.push({
468
+ role: m.role,
469
+ content: m.content
470
+ })
471
+ })
378
472
 
379
473
  // Apply user settings
380
474
  applySettings(chatRequest)
381
-
382
- console.debug('chatRequest', chatRequest, hasImage(), hasAudio(), attachedFiles.value.length, attachedFiles.value)
383
-
384
- function setContentText(chatRequest, text) {
385
- // Replace text message
386
- const textImage = chatRequest.messages.find(m =>
387
- m.role === 'user' && Array.isArray(m.content) && m.content.some(c => c.type === 'text'))
388
- for (const c of textImage.content) {
389
- if (c.type === 'text') {
390
- c.text = text
391
- }
392
- }
393
- }
394
-
395
- if (hasImage()) {
396
- const imageMessage = chatRequest.messages.find(m =>
397
- m.role === 'user' && Array.isArray(m.content) && m.content.some(c => c.type === 'image_url'))
398
- console.debug('hasImage', chatRequest, imageMessage)
399
- if (imageMessage) {
400
- const imgs = []
401
- let imagePart = deepClone(imageMessage.content.find(c => c.type === 'image_url'))
402
- for (const f of attachedFiles.value) {
403
- if (imageExts.includes(lastRightPart(f.name, '.'))) {
404
- imagePart.image_url.url = await fileToDataUri(f)
405
- }
406
- imgs.push(imagePart)
407
- }
408
- imageMessage.content = imageMessage.content.filter(c => c.type !== 'image_url')
409
- imageMessage.content = [...imgs, ...imageMessage.content]
410
- setContentText(chatRequest, message)
411
- }
412
-
413
- } else if (hasAudio()) {
414
- console.debug('hasAudio', chatRequest)
415
- const audioMessage = chatRequest.messages.find(m =>
416
- m.role === 'user' && Array.isArray(m.content) && m.content.some(c => c.type === 'input_audio'))
417
- if (audioMessage) {
418
- const audios = []
419
- let audioPart = deepClone(audioMessage.content.find(c => c.type === 'input_audio'))
420
- for (const f of attachedFiles.value) {
421
- if (audioExts.includes(lastRightPart(f.name, '.'))) {
422
- audioPart.input_audio.data = await fileToBase64(f)
423
- }
424
- audios.push(audioPart)
425
- }
426
- audioMessage.content = audioMessage.content.filter(c => c.type !== 'input_audio')
427
- audioMessage.content = [...audios, ...audioMessage.content]
428
- setContentText(chatRequest, message)
429
- }
430
- } else if (attachedFiles.value.length) {
431
- console.debug('hasFile', chatRequest)
432
- const fileMessage = chatRequest.messages.find(m =>
433
- m.role === 'user' && Array.isArray(m.content) && m.content.some(c => c.type === 'file'))
434
- if (fileMessage) {
435
- const files = []
436
- let filePart = deepClone(fileMessage.content.find(c => c.type === 'file'))
437
- for (const f of attachedFiles.value) {
438
- filePart.file.file_data = await fileToDataUri(f)
439
- filePart.file.filename = f.name
440
- files.push(filePart)
441
- }
442
- fileMessage.content = fileMessage.content.filter(c => c.type !== 'file')
443
- fileMessage.content = [...files, ...fileMessage.content]
444
- setContentText(chatRequest, message)
445
- }
446
-
447
- } else {
448
- console.debug('hasText', chatRequest)
449
- // Chat template message needs to be empty
450
- chatRequest.messages = []
451
- messages.forEach(m => chatRequest.messages.push({
452
- role: m.role,
453
- content: typeof m.content === 'string'
454
- ? [{ type: 'text', text: m.content }]
455
- : m.content
456
- }))
457
- }
458
-
459
- chatRequest.metadata ??= {}
460
475
  chatRequest.metadata.threadId = threadId
461
476
 
462
- // Send to API
463
477
  console.debug('chatRequest', chatRequest)
478
+
479
+ // Send to API
464
480
  const startTime = Date.now()
465
481
  const response = await ai.post('/v1/chat/completions', {
466
482
  body: JSON.stringify(chatRequest),
@@ -513,8 +529,8 @@ export default {
513
529
  errorCode: 'Error',
514
530
  }
515
531
  errorStatus.value.message = result.error
516
- }
517
-
532
+ }
533
+
518
534
  if (!errorStatus.value) {
519
535
  // Add assistant response (save entire message including reasoning)
520
536
  const assistantMessage = result.choices?.[0]?.message
@@ -522,13 +538,13 @@ export default {
522
538
  const usage = result.usage
523
539
  if (usage) {
524
540
  if (result.metadata?.pricing) {
525
- const [ input, output ] = result.metadata.pricing.split('/')
541
+ const [input, output] = result.metadata.pricing.split('/')
526
542
  usage.duration = result.metadata.duration ?? (Date.now() - startTime)
527
543
  usage.input = input
528
544
  usage.output = output
529
545
  usage.tokens = usage.completion_tokens
530
546
  usage.price = usage.output
531
- usage.cost = tokenCost(usage.prompt_tokens * parseFloat(input) + usage.completion_tokens * parseFloat(output))
547
+ usage.cost = tokenCost(usage.prompt_tokens / 1_000_000 * parseFloat(input) + usage.completion_tokens / 1_000_000 * parseFloat(output))
532
548
  }
533
549
  await threads.logRequest(threadId, props.model, chatRequest, result)
534
550
  }