llms-py 3.0.0b6__py3-none-any.whl → 3.0.0b7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. llms/__pycache__/main.cpython-314.pyc +0 -0
  2. llms/{ui/modules/analytics.mjs → extensions/analytics/ui/index.mjs} +4 -2
  3. llms/extensions/core_tools/__init__.py +358 -0
  4. llms/extensions/core_tools/__pycache__/__init__.cpython-314.pyc +0 -0
  5. llms/extensions/gallery/__init__.py +61 -0
  6. llms/extensions/gallery/__pycache__/__init__.cpython-314.pyc +0 -0
  7. llms/extensions/gallery/__pycache__/db.cpython-314.pyc +0 -0
  8. llms/extensions/gallery/db.py +298 -0
  9. llms/extensions/gallery/ui/index.mjs +480 -0
  10. llms/extensions/providers/__init__.py +18 -0
  11. llms/extensions/providers/__pycache__/__init__.cpython-314.pyc +0 -0
  12. llms/{providers → extensions/providers}/__pycache__/anthropic.cpython-314.pyc +0 -0
  13. llms/extensions/providers/__pycache__/chutes.cpython-314.pyc +0 -0
  14. llms/extensions/providers/__pycache__/google.cpython-314.pyc +0 -0
  15. llms/{providers → extensions/providers}/__pycache__/nvidia.cpython-314.pyc +0 -0
  16. llms/{providers → extensions/providers}/__pycache__/openai.cpython-314.pyc +0 -0
  17. llms/extensions/providers/__pycache__/openrouter.cpython-314.pyc +0 -0
  18. llms/{providers → extensions/providers}/anthropic.py +1 -4
  19. llms/{providers → extensions/providers}/chutes.py +21 -18
  20. llms/{providers → extensions/providers}/google.py +99 -27
  21. llms/{providers → extensions/providers}/nvidia.py +6 -8
  22. llms/{providers → extensions/providers}/openai.py +3 -6
  23. llms/{providers → extensions/providers}/openrouter.py +12 -10
  24. llms/extensions/system_prompts/__init__.py +45 -0
  25. llms/extensions/system_prompts/__pycache__/__init__.cpython-314.pyc +0 -0
  26. llms/extensions/system_prompts/ui/index.mjs +284 -0
  27. llms/extensions/system_prompts/ui/prompts.json +1067 -0
  28. llms/{ui/modules/tools.mjs → extensions/tools/ui/index.mjs} +4 -2
  29. llms/llms.json +17 -1
  30. llms/main.py +381 -170
  31. llms/providers-extra.json +0 -32
  32. llms/ui/App.mjs +17 -18
  33. llms/ui/ai.mjs +10 -3
  34. llms/ui/app.css +1553 -24
  35. llms/ui/ctx.mjs +70 -12
  36. llms/ui/index.mjs +13 -8
  37. llms/ui/modules/chat/ChatBody.mjs +11 -248
  38. llms/ui/modules/chat/HomeTools.mjs +254 -0
  39. llms/ui/modules/chat/SettingsDialog.mjs +1 -1
  40. llms/ui/modules/chat/index.mjs +278 -174
  41. llms/ui/modules/layout.mjs +2 -26
  42. llms/ui/modules/model-selector.mjs +1 -1
  43. llms/ui/modules/threads/index.mjs +5 -11
  44. llms/ui/modules/threads/threadStore.mjs +56 -2
  45. llms/ui/utils.mjs +21 -3
  46. {llms_py-3.0.0b6.dist-info → llms_py-3.0.0b7.dist-info}/METADATA +1 -1
  47. llms_py-3.0.0b7.dist-info/RECORD +80 -0
  48. llms/providers/__pycache__/chutes.cpython-314.pyc +0 -0
  49. llms/providers/__pycache__/google.cpython-314.pyc +0 -0
  50. llms/providers/__pycache__/openrouter.cpython-314.pyc +0 -0
  51. llms_py-3.0.0b6.dist-info/RECORD +0 -66
  52. {llms_py-3.0.0b6.dist-info → llms_py-3.0.0b7.dist-info}/WHEEL +0 -0
  53. {llms_py-3.0.0b6.dist-info → llms_py-3.0.0b7.dist-info}/entry_points.txt +0 -0
  54. {llms_py-3.0.0b6.dist-info → llms_py-3.0.0b7.dist-info}/licenses/LICENSE +0 -0
  55. {llms_py-3.0.0b6.dist-info → llms_py-3.0.0b7.dist-info}/top_level.txt +0 -0
@@ -1,9 +1,10 @@
1
1
 
2
2
  import { ref, computed, watch, nextTick, inject } from 'vue'
3
3
  import { useRouter } from 'vue-router'
4
- import { $$, createElement, lastRightPart } from "@servicestack/client"
4
+ import { $$, createElement, lastRightPart, ApiResult, createErrorStatus } from "@servicestack/client"
5
5
  import SettingsDialog, { useSettings } from './SettingsDialog.mjs'
6
6
  import ChatBody from './ChatBody.mjs'
7
+ import HomeTools from './HomeTools.mjs'
7
8
  import { AppContext } from '../../ctx.mjs'
8
9
 
9
10
  const imageExts = 'png,webp,jpg,jpeg,gif,bmp,svg,tiff,ico'.split(',')
@@ -55,6 +56,11 @@ const imageAspectRatios = {
55
56
  '1344×768': '16:9',
56
57
  '1536×672': '21:9',
57
58
  }
59
+ // Reverse lookup
60
+ const imageRatioSizes = Object.entries(imageAspectRatios).reduce((acc, [key, value]) => {
61
+ acc[value] = key
62
+ return acc
63
+ }, {})
58
64
 
59
65
  const svg = {
60
66
  clipboard: `<svg class="w-6 h-6" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><g fill="none"><path d="M8 5H6a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h10a2 2 0 0 0 2-2v-1M8 5a2 2 0 0 0 2 2h2a2 2 0 0 0 2-2M8 5a2 2 0 0 1 2-2h2a2 2 0 0 1 2 2m0 0h2a2 2 0 0 1 2 2v3m2 4H10m0 0l3-3m-3 3l3 3" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"></path></g></svg>`,
@@ -149,7 +155,15 @@ export function useChatPrompt(ctx) {
149
155
 
150
156
  function getSelectedModel() {
151
157
  const candidates = [ctx.state.selectedModel, ctx.state.config.defaults.text.model]
152
- return candidates.map(name => name && getModel(name)).find(x => !!x)
158
+ const ret = candidates.map(name => name && getModel(name)).find(x => !!x)
159
+ if (!ret) {
160
+ // Try to find a model in the latest threads
161
+ for (const thread in ctx.threads.threads) {
162
+ const model = thread.model && getModel(thread.model)
163
+ if (model) return model
164
+ }
165
+ }
166
+ return ret
153
167
  }
154
168
 
155
169
  function setSelectedModel(model) {
@@ -165,7 +179,234 @@ export function useChatPrompt(ctx) {
165
179
  return getModel(model)?.provider
166
180
  }
167
181
 
182
+ const canGenerateImage = model => {
183
+ return model?.modalities?.output?.includes('image')
184
+ }
185
+ const canGenerateAudio = model => {
186
+ return model?.modalities?.output?.includes('audio')
187
+ }
188
+
189
+ function applySettings(request) {
190
+ settings.applySettings(request)
191
+ }
192
+
193
+ function createContent({ text, files }) {
194
+ let content = []
195
+
196
+ // Add Text Block
197
+ if (text) {
198
+ content.push({ type: 'text', text })
199
+ }
200
+
201
+ // Add Attachment Blocks
202
+ if (Array.isArray(files)) {
203
+ for (const f of files) {
204
+ const ext = lastRightPart(f.name, '.')
205
+ if (imageExts.includes(ext)) {
206
+ content.push({ type: 'image_url', image_url: { url: f.url } })
207
+ } else if (audioExts.includes(ext)) {
208
+ content.push({ type: 'input_audio', input_audio: { data: f.url, format: ext } })
209
+ } else {
210
+ content.push({ type: 'file', file: { file_data: f.url, filename: f.name } })
211
+ }
212
+ }
213
+ }
214
+ return content
215
+ }
216
+
217
+ function createRequest({ model, text, files, systemPrompt, aspectRatio }) {
218
+ // Construct API Request from History
219
+ const request = {
220
+ model: model.name,
221
+ messages: [],
222
+ metadata: {}
223
+ }
224
+
225
+ // Apply user settings
226
+ applySettings(request)
227
+
228
+ if (systemPrompt) {
229
+ request.messages = request.messages.filter(m => m.role !== 'system')
230
+ request.messages.unshift({
231
+ role: 'system',
232
+ content: systemPrompt
233
+ })
234
+ }
235
+
236
+ if (canGenerateImage(model)) {
237
+ request.image_config = {
238
+ aspect_ratio: aspectRatio || imageAspectRatios[ctx.state.selectedAspectRatio] || '1:1'
239
+ }
240
+ request.modalities = ["image", "text"]
241
+ }
242
+ else if (canGenerateAudio(model)) {
243
+ request.modalities = ["audio", "text"]
244
+ }
245
+
246
+ if (text) {
247
+ const content = createContent({ text, files })
248
+ request.messages.push({
249
+ role: 'user',
250
+ content
251
+ })
252
+ }
253
+
254
+ return request
255
+ }
256
+
257
+ async function completion({ request, model, thread, controller, store }) {
258
+ try {
259
+ let error
260
+ if (!model) {
261
+ if (request.model) {
262
+ model = getModel(request.model)
263
+ } else {
264
+ model = getModel(request.model) ?? getSelectedModel()
265
+ }
266
+ }
267
+
268
+ if (!model) {
269
+ return new ApiResult({
270
+ error: createErrorStatus(`Model ${request.model || ''} not found`, 'NotFound')
271
+ })
272
+ }
273
+
274
+ if (!request.messages) request.messages = []
275
+ if (!request.metadata) request.metadata = {}
276
+
277
+ if (store && !thread) {
278
+ const title = getTextContent(request) || 'New Chat'
279
+ thread = await ctx.threads.startNewThread({ title, model })
280
+ }
281
+
282
+ const threadId = thread?.id || ctx.threads.generateThreadId()
283
+
284
+ const ctxRequest = {
285
+ request,
286
+ thread,
287
+ }
288
+ ctx.chatRequestFilters.forEach(f => f(ctxRequest))
289
+
290
+ console.debug('completion.request', request)
291
+
292
+ // Send to API
293
+ const startTime = Date.now()
294
+ const res = await ctx.post('/v1/chat/completions', {
295
+ body: JSON.stringify(request),
296
+ signal: controller?.signal
297
+ })
298
+
299
+ let response = null
300
+ if (!res.ok) {
301
+ error = createErrorStatus('', `HTTP ${res.status} ${res.statusText}`)
302
+ let errorBody = null
303
+ try {
304
+ errorBody = await res.text()
305
+ if (errorBody) {
306
+ // Try to parse as JSON for better formatting
307
+ try {
308
+ const errorJson = JSON.parse(errorBody)
309
+ const status = errorJson?.responseStatus
310
+ if (status) {
311
+ error.errorCode += ` ${status.errorCode}`
312
+ error.message = status.message
313
+ error.stackTrace = status.stackTrace
314
+ } else {
315
+ error.stackTrace = JSON.stringify(errorJson, null, 2)
316
+ }
317
+ } catch (e) {
318
+ }
319
+ }
320
+ } catch (e) {
321
+ // If we can't read the response body, just use the status
322
+ }
323
+ } else {
324
+ try {
325
+ response = await res.json()
326
+ const ctxResponse = {
327
+ response,
328
+ thread,
329
+ }
330
+ ctx.chatResponseFilters.forEach(f => f(ctxResponse))
331
+ console.debug('completion.response', JSON.stringify(response, null, 2))
332
+ } catch (e) {
333
+ error = createErrorStatus(e.message)
334
+ }
335
+ }
336
+
337
+ if (response?.error) {
338
+ error ??= createErrorStatus()
339
+ error.message = response.error
340
+ }
341
+
342
+ if (error) {
343
+ ctx.chatErrorFilters.forEach(f => f(error))
344
+ return new ApiResult({ error })
345
+ }
346
+
347
+ if (!error) {
348
+ // Add tool history messages if any
349
+ if (response.tool_history && Array.isArray(response.tool_history)) {
350
+ for (const msg of response.tool_history) {
351
+ if (msg.role === 'assistant') {
352
+ msg.model = model.name // tag with model
353
+ }
354
+ if (store) {
355
+ await ctx.threads.addMessageToThread(threadId, msg)
356
+ }
357
+ }
358
+ }
359
+
360
+ // Add assistant response (save entire message including reasoning)
361
+ const assistantMessage = response.choices?.[0]?.message
362
+
363
+ const usage = response.usage
364
+ if (usage) {
365
+ if (response.metadata?.pricing) {
366
+ const [input, output] = response.metadata.pricing.split('/')
367
+ usage.duration = response.metadata.duration ?? (Date.now() - startTime)
368
+ usage.input = input
369
+ usage.output = output
370
+ usage.tokens = usage.completion_tokens
371
+ usage.price = usage.output
372
+ usage.cost = ctx.fmt.tokenCost(usage.prompt_tokens / 1_000_000 * parseFloat(input) + usage.completion_tokens / 1_000_000 * parseFloat(output))
373
+ }
374
+ await ctx.threads.logRequest(threadId, model, request, response)
375
+ }
376
+ if (store) {
377
+ assistantMessage.model = model.name
378
+ await ctx.threads.addMessageToThread(threadId, assistantMessage, usage)
379
+ }
380
+
381
+ nextTick(addCopyButtons)
382
+
383
+ return new ApiResult({ response })
384
+ }
385
+ } catch (e) {
386
+ console.log('completion.error', e)
387
+ return new ApiResult({ error: createErrorStatus(e.message, 'ChatFailed') })
388
+ }
389
+ }
390
+ function getTextContent(chat) {
391
+ const textMessage = chat.messages.find(m =>
392
+ m.role === 'user' && Array.isArray(m.content) && m.content.some(c => c.type === 'text'))
393
+ return textMessage?.content.find(c => c.type === 'text')?.text || ''
394
+ }
395
+ function getAnswer(response) {
396
+ const textMessage = response.choices?.[0]?.message
397
+ return textMessage?.content || ''
398
+ }
399
+ function selectAspectRatio(ratio) {
400
+ const selectedAspectRatio = imageRatioSizes[ratio] || '1024×1024'
401
+ console.log(`selectAspectRatio(${ratio})`, selectedAspectRatio)
402
+ ctx.setState({ selectedAspectRatio })
403
+ }
404
+
168
405
  return {
406
+ completion,
407
+ createContent,
408
+ createRequest,
409
+ applySettings,
169
410
  messageText,
170
411
  attachedFiles,
171
412
  errorStatus,
@@ -187,6 +428,11 @@ export function useChatPrompt(ctx) {
187
428
  getSelectedModel,
188
429
  setSelectedModel,
189
430
  getProviderForModel,
431
+ canGenerateImage,
432
+ canGenerateAudio,
433
+ getTextContent,
434
+ getAnswer,
435
+ selectAspectRatio,
190
436
  }
191
437
  }
192
438
 
@@ -269,7 +515,7 @@ const ChatPrompt = {
269
515
  </div>
270
516
 
271
517
  <!-- Image Aspect Ratio Selector -->
272
- <div v-if="canGenerateImages" class="min-w-[120px]">
518
+ <div v-if="$chat.canGenerateImage(model)" class="min-w-[120px]">
273
519
  <select name="aspect_ratio" v-model="$state.selectedAspectRatio"
274
520
  class="block w-full rounded-md border border-gray-300 dark:border-gray-600 bg-white dark:bg-gray-800 text-xs text-gray-700 dark:text-gray-300 pl-2 pr-6 py-1 focus:ring-blue-500 focus:border-blue-500">
275
521
  <option v-for="(ratio, size) in imageAspectRatios" :key="size" :value="size">
@@ -294,7 +540,6 @@ const ChatPrompt = {
294
540
  setup(props) {
295
541
  const ctx = inject('ctx')
296
542
  const config = ctx.state.config
297
- const ai = ctx.ai
298
543
  const router = useRouter()
299
544
  const chatPrompt = ctx.chat
300
545
  const {
@@ -306,6 +551,7 @@ const ChatPrompt = {
306
551
  hasAudio,
307
552
  hasFile,
308
553
  editingMessageId,
554
+ getTextContent,
309
555
  } = chatPrompt
310
556
  const threads = ctx.threads
311
557
  const {
@@ -315,11 +561,6 @@ const ChatPrompt = {
315
561
  const fileInput = ref(null)
316
562
  const refMessage = ref(null)
317
563
  const showSettings = ref(false)
318
- const { applySettings } = ctx.chat.settings
319
-
320
- const canGenerateImages = computed(() => {
321
- return props.model?.modalities?.output?.includes('image')
322
- })
323
564
 
324
565
  // File attachments (+) handlers
325
566
  const triggerFilePicker = () => {
@@ -450,18 +691,6 @@ const ChatPrompt = {
450
691
  }
451
692
  }
452
693
 
453
- function getTextContent(chat) {
454
- const textMessage = chat.messages.find(m =>
455
- m.role === 'user' && Array.isArray(m.content) && m.content.some(c => c.type === 'text'))
456
- return textMessage?.content.find(c => c.type === 'text')?.text || ''
457
- }
458
-
459
- function toModelInfo(model) {
460
- if (!model) return undefined
461
- const { id, name, provider, cost, modalities } = model
462
- return ctx.utils.deepClone({ id, name, provider, cost, modalities })
463
- }
464
-
465
694
  // Send message
466
695
  const sendMessage = async () => {
467
696
  if (!messageText.value.trim() && !hasImage() && !hasAudio() && !hasFile()) return
@@ -472,55 +701,35 @@ const ChatPrompt = {
472
701
 
473
702
  // 1. Construct Structured Content (Text + Attachments)
474
703
  let text = messageText.value.trim()
475
- let content = []
476
704
 
477
705
 
478
706
  messageText.value = ''
479
-
480
- // Add Text Block
481
- content.push({ type: 'text', text: text })
482
-
483
- // Add Attachment Blocks
484
- for (const f of attachedFiles.value) {
485
- const ext = lastRightPart(f.name, '.')
486
- if (imageExts.includes(ext)) {
487
- content.push({ type: 'image_url', image_url: { url: f.url } })
488
- } else if (audioExts.includes(ext)) {
489
- content.push({ type: 'input_audio', input_audio: { data: f.url, format: ext } })
490
- } else {
491
- content.push({ type: 'file', file: { file_data: f.url, filename: f.name } })
492
- }
493
- }
707
+ let content = ctx.chat.createContent({ text, files: attachedFiles.value })
494
708
 
495
709
  // Create AbortController for this request
496
710
  const controller = new AbortController()
497
711
  chatPrompt.abortController.value = controller
498
712
  const model = props.model.name
499
713
 
500
- try {
501
- let threadId
714
+ let thread
502
715
 
716
+ try {
503
717
  // Create thread if none exists
504
718
  if (!currentThread.value) {
505
- const newThread = await threads.createThread({
506
- title: 'New Chat',
507
- model,
508
- info: toModelInfo(props.model),
509
- })
510
- threadId = newThread.id
511
- // Navigate to the new thread URL
512
- router.push(`${ai.base}/c/${newThread.id}`)
719
+ thread = await ctx.threads.startNewThread({ model: props.model })
513
720
  } else {
514
- threadId = currentThread.value.id
721
+ let threadId = currentThread.value.id
515
722
  // Update the existing thread's model to match current selection
516
723
  await threads.updateThread(threadId, {
517
724
  model,
518
- info: toModelInfo(props.model),
725
+ info: ctx.utils.toModelInfo(props.model),
519
726
  })
727
+
728
+ // Get the thread to check for duplicates
729
+ thread = await threads.getThread(threadId)
520
730
  }
521
731
 
522
- // Get the thread to check for duplicates
523
- let thread = await threads.getThread(threadId)
732
+ let threadId = thread.id
524
733
 
525
734
  // Handle Editing / Redo Logic
526
735
  if (editingMessageId.value) {
@@ -528,7 +737,7 @@ const ChatPrompt = {
528
737
  const messageExists = thread.messages.find(m => m.id === editingMessageId.value)
529
738
  if (messageExists) {
530
739
  // Update the message content
531
- await threads.updateMessageInThread(threadId, editingMessageId.value, { content: content })
740
+ await threads.updateMessageInThread(threadId, editingMessageId.value, { content })
532
741
  // Redo from this message (clears subsequent)
533
742
  await threads.redoMessageFromThread(threadId, editingMessageId.value)
534
743
 
@@ -570,138 +779,25 @@ const ChatPrompt = {
570
779
 
571
780
  isGenerating.value = true
572
781
 
573
- // Construct API Request from History
574
- const request = {
575
- model,
576
- messages: [],
577
- metadata: {}
578
- }
782
+ const request = ctx.chat.createRequest({ model: props.model })
579
783
 
580
784
  // Add History
581
- thread.messages.forEach(m => {
785
+ thread?.messages.forEach(m => {
582
786
  request.messages.push({
583
787
  role: m.role,
584
788
  content: m.content
585
789
  })
586
790
  })
791
+ request.metadata.threadId = thread.id
587
792
 
588
- // Apply user settings
589
- applySettings(request)
590
-
591
- if (canGenerateImages.value) {
592
- request.image_config = {
593
- aspect_ratio: imageAspectRatios[ctx.state.selectedAspectRatio] || '1:1'
594
- }
595
- request.modalities = ["image", "text"]
596
- }
597
-
598
- request.metadata.threadId = threadId
599
-
600
- const ctxRequest = {
601
- request,
602
- thread,
603
- }
604
- ctx.chatRequestFilters.forEach(f => f(ctxRequest))
605
-
606
- console.debug('chatRequest', request)
607
-
608
- // Send to API
609
- const startTime = Date.now()
610
- const res = await ai.post('/v1/chat/completions', {
611
- body: JSON.stringify(request),
612
- signal: controller.signal
613
- })
614
-
615
- let response = null
616
- if (!res.ok) {
617
- errorStatus.value = {
618
- errorCode: `HTTP ${res.status} ${res.statusText}`,
619
- message: null,
620
- stackTrace: null
621
- }
622
- let errorBody = null
623
- try {
624
- errorBody = await res.text()
625
- if (errorBody) {
626
- // Try to parse as JSON for better formatting
627
- try {
628
- const errorJson = JSON.parse(errorBody)
629
- const status = errorJson?.responseStatus
630
- if (status) {
631
- errorStatus.value.errorCode += ` ${status.errorCode}`
632
- errorStatus.value.message = status.message
633
- errorStatus.value.stackTrace = status.stackTrace
634
- } else {
635
- errorStatus.value.stackTrace = JSON.stringify(errorJson, null, 2)
636
- }
637
- } catch (e) {
638
- }
639
- }
640
- } catch (e) {
641
- // If we can't read the response body, just use the status
642
- }
643
- } else {
644
- try {
645
- response = await res.json()
646
- const ctxResponse = {
647
- response,
648
- thread,
649
- }
650
- ctx.chatResponseFilters.forEach(f => f(ctxResponse))
651
- console.debug('chatResponse', JSON.stringify(response, null, 2))
652
- } catch (e) {
653
- errorStatus.value = {
654
- errorCode: 'Error',
655
- message: e.message,
656
- stackTrace: null
657
- }
658
- }
659
- }
660
-
661
- if (response?.error) {
662
- errorStatus.value ??= {
663
- errorCode: 'Error',
664
- }
665
- errorStatus.value.message = response.error
666
- }
667
-
668
- if (!errorStatus.value) {
669
- // Add tool history messages if any
670
- if (response.tool_history && Array.isArray(response.tool_history)) {
671
- for (const msg of response.tool_history) {
672
- if (msg.role === 'assistant') {
673
- msg.model = props.model.name // tag with model
674
- }
675
- await threads.addMessageToThread(threadId, msg)
676
- }
677
- }
678
-
679
- // Add assistant response (save entire message including reasoning)
680
- const assistantMessage = response.choices?.[0]?.message
681
-
682
- const usage = response.usage
683
- if (usage) {
684
- if (response.metadata?.pricing) {
685
- const [input, output] = response.metadata.pricing.split('/')
686
- usage.duration = response.metadata.duration ?? (Date.now() - startTime)
687
- usage.input = input
688
- usage.output = output
689
- usage.tokens = usage.completion_tokens
690
- usage.price = usage.output
691
- usage.cost = ctx.fmt.tokenCost(usage.prompt_tokens / 1_000_000 * parseFloat(input) + usage.completion_tokens / 1_000_000 * parseFloat(output))
692
- }
693
- await threads.logRequest(threadId, props.model, request, response)
694
- }
695
- assistantMessage.model = props.model.name
696
- await threads.addMessageToThread(threadId, assistantMessage, usage)
697
-
698
- nextTick(addCopyButtons)
699
-
793
+ const api = await ctx.chat.completion({ request, thread, controller, store: true })
794
+ if (api.response) {
795
+ // success
700
796
  attachedFiles.value = []
701
- // Error will be cleared when user sends next message (no auto-timeout)
702
797
  } else {
703
- ctx.chatErrorFilters.forEach(f => f(errorStatus.value))
798
+ errorStatus.value = api.error
704
799
  }
800
+
705
801
  } catch (error) {
706
802
  // Check if the error is due to abort
707
803
  if (error.name === 'AbortError') {
@@ -734,6 +830,14 @@ const ChatPrompt = {
734
830
  ctx.setPrefs({ aspectRatio: newValue })
735
831
  })
736
832
 
833
+ watch(() => ctx.layout.path, newValue => {
834
+ if (newValue === '/' || newValue.startsWith('/c/')) {
835
+ nextTick(() => {
836
+ refMessage.value?.focus()
837
+ })
838
+ }
839
+ })
840
+
737
841
  return {
738
842
  isGenerating,
739
843
  attachedFiles,
@@ -753,7 +857,6 @@ const ChatPrompt = {
753
857
  cancelRequest,
754
858
  addNewLine,
755
859
  imageAspectRatios,
756
- canGenerateImages,
757
860
  }
758
861
  }
759
862
  }
@@ -766,6 +869,7 @@ export default {
766
869
  SettingsDialog,
767
870
  ChatPrompt,
768
871
  ChatBody,
872
+ HomeTools,
769
873
  Home,
770
874
  })
771
875
  ctx.setGlobals({
@@ -1,44 +1,21 @@
1
1
  import { computed, inject, ref, onMounted, onUnmounted } from "vue"
2
2
  import { toJsonObject } from "../utils.mjs"
3
3
 
4
- const ToggleSidebarButton = {
5
- template: `
6
- <button type="button"
7
- @click="$emit('toggle-sidebar')"
8
- class="group relative text-gray-500 dark:text-gray-400 hover:text-blue-600 dark:hover:text-blue-400 focus:outline-none transition-colors"
9
- title="Collapse sidebar"
10
- >
11
- <div class="relative size-5">
12
- <!-- Default sidebar icon -->
13
- <svg class="absolute inset-0 group-hover:hidden" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
14
- <rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect>
15
- <line x1="9" y1="3" x2="9" y2="21"></line>
16
- </svg>
17
- <!-- Hover state: |← icon -->
18
- <svg class="absolute inset-0 hidden group-hover:block" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path fill="currentColor" d="m10.071 4.929l1.414 1.414L6.828 11H16v2H6.828l4.657 4.657l-1.414 1.414L3 12zM18.001 19V5h2v14z"/></svg>
19
- </div>
20
- </button>
21
- `,
22
- emits: ['toggle-sidebar'],
23
- }
24
-
25
4
  const Brand = {
26
5
  template: `
27
6
  <div class="flex-shrink-0 p-2 border-b border-gray-200 dark:border-gray-700 select-none">
28
7
  <div class="flex items-center justify-between">
29
8
  <div class="flex items-center space-x-2">
30
9
  <button type="button"
31
- @click="$router.push('/')"
10
+ @click="$ctx.to('/')"
32
11
  class="text-lg font-semibold text-gray-900 dark:text-gray-200 hover:text-blue-600 dark:hover:text-blue-400 focus:outline-none transition-colors"
33
- title="Go back home"
34
- >
12
+ title="Go back home">
35
13
  {{ $state.title }}
36
14
  </button>
37
15
  </div>
38
16
  </div>
39
17
  </div>
40
18
  `,
41
- emits: ['home', 'toggle-sidebar'],
42
19
  }
43
20
 
44
21
  const Welcome = {
@@ -256,7 +233,6 @@ const SignIn = {
256
233
  export default {
257
234
  install(ctx) {
258
235
  ctx.components({
259
- ToggleSidebarButton,
260
236
  Brand,
261
237
  Welcome,
262
238
  Avatar,
@@ -148,7 +148,7 @@ const ProviderStatus = {
148
148
  const onToggle = async (provider, enable) => {
149
149
  pending.value = { ...pending.value, [provider]: true }
150
150
  try {
151
- const res = await ai.post(`/providers/${encodeURIComponent(provider)}`, {
151
+ const res = await ctx.post(`/providers/${encodeURIComponent(provider)}`, {
152
152
  body: JSON.stringify(enable ? { enable: true } : { disable: true })
153
153
  })
154
154
  if (!res.ok) throw new Error(`HTTP ${res.status} ${res.statusText}`)