@test328932/test328933 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,813 @@
1
+ import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
2
+ import { CreateMLCEngine, deleteModelAllInfoInCache } from '@mlc-ai/web-llm'
3
+
4
+ const DEFAULT_MODEL_ID = 'Qwen3-1.7B-q4f16_1-MLC'
5
+ const AVAILABLE_MODELS = [
6
+ 'SmolLM2-135M-Instruct-q0f16-MLC',
7
+ 'SmolLM2-360M-Instruct-q4f16_1-MLC',
8
+ 'Qwen3-0.6B-q4f16_1-MLC',
9
+ 'Qwen3-1.7B-q4f16_1-MLC',
10
+ 'Qwen3-4B-q4f16_1-MLC',
11
+ ]
12
+ const MAX_SOURCE_CHARS = 8000
13
+ const STYLE_TAG_ID = 'test328932-llm-drawer-style'
14
+
15
+ function trimSource(source) {
16
+ if (source.length <= MAX_SOURCE_CHARS) return source
17
+ const head = 5200
18
+ const tail = 2800
19
+ return `${source.slice(0, head)}\n/* ... source truncated for local inference ... */\n${source.slice(-tail)}`
20
+ }
21
+
22
+ function getErrorMessage(error) {
23
+ if (error instanceof Error) return error.message
24
+ return 'Unexpected LLM error'
25
+ }
26
+
27
+ function looksLikeWebGPUError(message) {
28
+ return /webgpu|adapter|gpu/i.test(message)
29
+ }
30
+
31
+ function installDrawerStyles() {
32
+ if (typeof document === 'undefined') return
33
+ if (document.getElementById(STYLE_TAG_ID)) return
34
+
35
+ const style = document.createElement('style')
36
+ style.id = STYLE_TAG_ID
37
+ style.textContent = `
38
+ .llm-drawer-toggle {
39
+ display: none;
40
+ }
41
+
42
+ .llm-drawer {
43
+ display: flex;
44
+ flex-direction: column;
45
+ border: 1px solid #243b53;
46
+ border-radius: 8px;
47
+ background: #0d2137;
48
+ overflow: hidden;
49
+ margin-top: 12px;
50
+ }
51
+
52
+ .llm-drawer[data-open="false"] {
53
+ display: none;
54
+ }
55
+
56
+ .llm-drawer-header {
57
+ display: flex;
58
+ align-items: center;
59
+ justify-content: space-between;
60
+ gap: 8px;
61
+ padding: 8px 10px;
62
+ border-bottom: 1px solid #243b53;
63
+ background: #132f4c;
64
+ }
65
+
66
+ .llm-drawer-title {
67
+ margin: 0;
68
+ font-size: 0.85rem;
69
+ font-weight: 700;
70
+ color: #f0f4f8;
71
+ }
72
+
73
+ .llm-drawer-close {
74
+ width: 26px;
75
+ height: 26px;
76
+ border-radius: 6px;
77
+ border: 1px solid #486581;
78
+ background: transparent;
79
+ color: #90cdf4;
80
+ font-weight: 700;
81
+ cursor: pointer;
82
+ }
83
+
84
+ .llm-drawer-content {
85
+ flex: 1;
86
+ overflow: auto;
87
+ padding: 10px;
88
+ display: flex;
89
+ flex-direction: column;
90
+ gap: 10px;
91
+ font-size: 12px;
92
+ color: #f0f4f8;
93
+ }
94
+
95
+ .llm-drawer-section-title {
96
+ margin: 0;
97
+ font-size: 0.75rem;
98
+ font-weight: 700;
99
+ text-transform: uppercase;
100
+ letter-spacing: 0.04em;
101
+ color: #90cdf4;
102
+ }
103
+
104
+ .llm-drawer-controls {
105
+ display: grid;
106
+ grid-template-columns: 1fr;
107
+ gap: 8px;
108
+ }
109
+
110
+ .llm-drawer-select {
111
+ width: 100%;
112
+ border: 1px solid #486581;
113
+ border-radius: 6px;
114
+ background: #1a3a5c;
115
+ color: #f0f4f8;
116
+ padding: 6px;
117
+ font-size: 11px;
118
+ }
119
+
120
+ .llm-drawer-row {
121
+ display: flex;
122
+ gap: 8px;
123
+ align-items: center;
124
+ }
125
+
126
+ .llm-drawer-btn {
127
+ border: 1px solid #486581;
128
+ border-radius: 6px;
129
+ padding: 5px 8px;
130
+ background: transparent;
131
+ color: #90cdf4;
132
+ font-size: 11px;
133
+ cursor: pointer;
134
+ }
135
+
136
+ .llm-drawer-btn:disabled {
137
+ opacity: 0.6;
138
+ cursor: not-allowed;
139
+ }
140
+
141
+ .llm-drawer-btn-primary {
142
+ border-color: #2d6a4f;
143
+ background: #2d6a4f;
144
+ color: #fff;
145
+ font-weight: 600;
146
+ }
147
+
148
+ .llm-drawer-status {
149
+ margin: 0;
150
+ font-size: 11px;
151
+ color: #b0c4de;
152
+ line-height: 1.4;
153
+ }
154
+
155
+ .llm-drawer-muted {
156
+ margin: 0;
157
+ color: #6b8aad;
158
+ font-size: 11px;
159
+ }
160
+
161
+ .llm-drawer-model-list {
162
+ list-style: none;
163
+ margin: 0;
164
+ padding: 0;
165
+ display: flex;
166
+ flex-direction: column;
167
+ gap: 6px;
168
+ }
169
+
170
+ .llm-drawer-model-item {
171
+ display: grid;
172
+ grid-template-columns: 1fr auto auto;
173
+ gap: 4px;
174
+ align-items: center;
175
+ padding: 5px;
176
+ border: 1px solid #243b53;
177
+ border-radius: 6px;
178
+ background: #1a3a5c;
179
+ }
180
+
181
+ .llm-drawer-model-name {
182
+ margin: 0;
183
+ font-size: 10px;
184
+ line-height: 1.3;
185
+ overflow: hidden;
186
+ text-overflow: ellipsis;
187
+ white-space: nowrap;
188
+ color: #e2e8f0;
189
+ font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace;
190
+ }
191
+
192
+ .llm-drawer-progress {
193
+ list-style: none;
194
+ margin: 0;
195
+ padding: 0;
196
+ display: flex;
197
+ flex-direction: column;
198
+ gap: 6px;
199
+ }
200
+
201
+ .llm-drawer-progress-item {
202
+ display: flex;
203
+ justify-content: space-between;
204
+ gap: 6px;
205
+ padding: 5px 6px;
206
+ border: 1px solid #243b53;
207
+ border-radius: 6px;
208
+ background: #1a3a5c;
209
+ font-size: 10px;
210
+ color: #e2e8f0;
211
+ font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace;
212
+ }
213
+
214
+ .llm-drawer-error {
215
+ margin: 0;
216
+ color: #e63946;
217
+ font-size: 11px;
218
+ }
219
+
220
+ .llm-drawer-output {
221
+ width: 100%;
222
+ min-height: 100px;
223
+ border: 1px solid #486581;
224
+ border-radius: 6px;
225
+ background: #1a3a5c;
226
+ color: #e2e8f0;
227
+ padding: 6px;
228
+ font-size: 10px;
229
+ line-height: 1.45;
230
+ resize: vertical;
231
+ font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace;
232
+ box-sizing: border-box;
233
+ }
234
+
235
+ .llm-drawer-details {
236
+ border: 1px solid #243b53;
237
+ border-radius: 6px;
238
+ background: #132f4c;
239
+ padding: 6px 8px;
240
+ }
241
+
242
+ .llm-drawer-summary {
243
+ cursor: pointer;
244
+ font-size: 11px;
245
+ font-weight: 600;
246
+ color: #90cdf4;
247
+ }
248
+
249
+ .llm-drawer-source-explorer {
250
+ display: flex;
251
+ flex-direction: column;
252
+ gap: 8px;
253
+ }
254
+
255
+ .llm-drawer-tree-panel {
256
+ max-height: 180px;
257
+ overflow: auto;
258
+ border: 1px solid #243b53;
259
+ border-radius: 6px;
260
+ padding: 6px;
261
+ background: #1a3a5c;
262
+ }
263
+
264
+ .llm-drawer-tree-list {
265
+ list-style: none;
266
+ padding-left: 8px;
267
+ margin: 0;
268
+ display: flex;
269
+ flex-direction: column;
270
+ gap: 6px;
271
+ }
272
+
273
+ .llm-drawer-tree-list li {
274
+ display: block;
275
+ padding: 0;
276
+ border: 0;
277
+ background: transparent;
278
+ }
279
+
280
+ .llm-drawer-tree-list details > summary {
281
+ cursor: pointer;
282
+ color: #b0c4de;
283
+ font-weight: 600;
284
+ margin-bottom: 4px;
285
+ font-size: 11px;
286
+ }
287
+
288
+ .llm-drawer-tree-file-btn {
289
+ width: 100%;
290
+ text-align: left;
291
+ background: transparent;
292
+ color: #90cdf4;
293
+ padding: 3px 5px;
294
+ border-radius: 4px;
295
+ border: none;
296
+ font-size: 11px;
297
+ cursor: pointer;
298
+ }
299
+
300
+ .llm-drawer-tree-file-btn:hover {
301
+ background: #243b53;
302
+ }
303
+
304
+ .llm-drawer-tree-file-btn.active {
305
+ background: #243b53;
306
+ color: #48bb78;
307
+ }
308
+
309
+ @media (max-width: 760px) {
310
+ .llm-drawer {
311
+ width: 100%;
312
+ }
313
+ }
314
+ `
315
+
316
+ document.head.appendChild(style)
317
+ }
318
+
319
+ export default function LlmExplainPanel({ sourcePath = 'App.tsx' }) {
320
+ const [isOpen, setIsOpen] = useState(true)
321
+ const [activeModelId, setActiveModelId] = useState(DEFAULT_MODEL_ID)
322
+ const [llmState, setLlmState] = useState('idle')
323
+ const [progressItems, setProgressItems] = useState([])
324
+ const [explanationText, setExplanationText] = useState('')
325
+ const [llmError, setLlmError] = useState('')
326
+ const [debugMessages, setDebugMessages] = useState([])
327
+ const [tokenCount, setTokenCount] = useState(0)
328
+ const [tokensPerSec, setTokensPerSec] = useState(0)
329
+ const [cachedModels, setCachedModels] = useState([])
330
+ const [compressedSource, setCompressedSource] = useState('')
331
+ const [compressedCharCount, setCompressedCharCount] = useState(0)
332
+
333
+ const [tree, setTree] = useState([])
334
+ const [treeError, setTreeError] = useState('')
335
+ const [isTreeLoading, setIsTreeLoading] = useState(false)
336
+ const [openedFile, setOpenedFile] = useState(null)
337
+ const [sourceError, setSourceError] = useState('')
338
+ const [activePath, setActivePath] = useState('')
339
+
340
+ const engineRef = useRef(null)
341
+ const currentRequestIdRef = useRef(null)
342
+ const firstChunkAtRef = useRef(null)
343
+
344
+ useEffect(() => {
345
+ installDrawerStyles()
346
+ }, [])
347
+
348
+ useEffect(() => {
349
+ const loadTree = async () => {
350
+ setIsTreeLoading(true)
351
+ setTreeError('')
352
+ try {
353
+ const response = await fetch('/__source/tree')
354
+ if (!response.ok) throw new Error(`Tree request failed (${response.status})`)
355
+ const payload = await response.json()
356
+ if (!Array.isArray(payload)) throw new Error('Unexpected tree response')
357
+ setTree(payload)
358
+ } catch (error) {
359
+ setTreeError(error instanceof Error ? error.message : 'Failed to load source tree')
360
+ } finally {
361
+ setIsTreeLoading(false)
362
+ }
363
+ }
364
+ loadTree()
365
+ }, [])
366
+
367
+ const openSourceFile = useCallback(async (filePath) => {
368
+ setSourceError('')
369
+ setActivePath(filePath)
370
+ try {
371
+ const response = await fetch(`/__source/file?path=${encodeURIComponent(filePath)}`)
372
+ if (!response.ok) throw new Error(`File request failed (${response.status})`)
373
+ const payload = await response.json()
374
+ if (!payload || typeof payload.path !== 'string' || typeof payload.content !== 'string') {
375
+ throw new Error('Unexpected file response')
376
+ }
377
+ setOpenedFile(payload)
378
+ } catch (error) {
379
+ setSourceError(error instanceof Error ? error.message : 'Failed to load file')
380
+ }
381
+ }, [])
382
+
383
+ const renderSourceTree = useCallback((nodes) => {
384
+ return (
385
+ <ul className="llm-drawer-tree-list">
386
+ {nodes.map(node => (
387
+ <li key={node.path}>
388
+ {node.type === 'dir' ? (
389
+ <details open>
390
+ <summary>{node.name}</summary>
391
+ {node.children && node.children.length > 0
392
+ ? renderSourceTree(node.children)
393
+ : <p style={{ color: '#888', fontSize: '11px', marginLeft: 8 }}>Empty</p>}
394
+ </details>
395
+ ) : (
396
+ <button
397
+ className={`llm-drawer-tree-file-btn${activePath === node.path ? ' active' : ''}`}
398
+ onClick={() => openSourceFile(node.path)}
399
+ >
400
+ {node.name}
401
+ </button>
402
+ )}
403
+ </li>
404
+ ))}
405
+ </ul>
406
+ )
407
+ }, [activePath, openSourceFile])
408
+
409
+ const selectedSourceText = openedFile
410
+ ? `${openedFile.path}:\n${openedFile.content}\n------`
411
+ : ''
412
+
413
+ const pushDebugMessage = useCallback((message) => {
414
+ const timestamp = new Date().toLocaleTimeString()
415
+ setDebugMessages((prev) => [`[${timestamp}] ${message}`, ...prev].slice(0, 250))
416
+ }, [])
417
+
418
+ const refreshCacheList = useCallback(async () => {
419
+ if (!('caches' in self)) {
420
+ setCachedModels([])
421
+ return
422
+ }
423
+
424
+ try {
425
+ const cache = await caches.open('webllm/config')
426
+ const requests = await cache.keys()
427
+ const models = new Set()
428
+
429
+ for (const req of requests) {
430
+ const parts = new URL(req.url).pathname.split('/')
431
+ const modelPart = parts.find((part) => part.endsWith('-MLC'))
432
+ if (modelPart) models.add(modelPart)
433
+ }
434
+
435
+ setCachedModels(Array.from(models))
436
+ } catch {
437
+ setCachedModels([])
438
+ }
439
+ }, [])
440
+
441
+ const fetchCompressedSource = useCallback(async () => {
442
+ try {
443
+ const res = await fetch('/__source/compressed')
444
+ if (!res.ok) return
445
+ const data = await res.json()
446
+ setCompressedSource(data.content ?? '')
447
+ setCompressedCharCount(data.charCount ?? 0)
448
+ } catch {
449
+ // Ignore network or parse errors in dev panel.
450
+ }
451
+ }, [])
452
+
453
+ useEffect(() => {
454
+ fetchCompressedSource()
455
+ refreshCacheList()
456
+
457
+ if (!import.meta.hot) return
458
+
459
+ const onSourceUpdate = () => {
460
+ fetchCompressedSource()
461
+ }
462
+
463
+ import.meta.hot.on('source-compressed:update', onSourceUpdate)
464
+ return () => {
465
+ import.meta.hot?.off?.('source-compressed:update', onSourceUpdate)
466
+ }
467
+ }, [fetchCompressedSource, refreshCacheList])
468
+
469
+ useEffect(() => {
470
+ return () => {
471
+ engineRef.current = null
472
+ currentRequestIdRef.current = null
473
+ }
474
+ }, [])
475
+
476
+ const ensureEngine = useCallback(async () => {
477
+ if (engineRef.current) return engineRef.current
478
+
479
+ if (typeof navigator === 'undefined' || !('gpu' in navigator)) {
480
+ throw new Error('WebGPU is required but not available in this browser.')
481
+ }
482
+
483
+ pushDebugMessage(`Creating WebLLM engine (${activeModelId})...`)
484
+ setLlmState('loading')
485
+
486
+ const engine = await CreateMLCEngine(activeModelId, {
487
+ initProgressCallback: (progress) => {
488
+ const pct = Math.round((progress?.progress ?? 0) * 100)
489
+ const text = progress?.text ?? 'model-files'
490
+ pushDebugMessage(`Progress: ${text} ${pct}%`)
491
+ setProgressItems([{ file: text, progress: pct }])
492
+ },
493
+ })
494
+
495
+ engineRef.current = engine
496
+ setProgressItems([])
497
+ pushDebugMessage('Engine ready.')
498
+
499
+ return engine
500
+ }, [activeModelId, pushDebugMessage])
501
+
502
+ const switchModel = useCallback((modelId) => {
503
+ engineRef.current = null
504
+ currentRequestIdRef.current = null
505
+ setActiveModelId(modelId)
506
+ setLlmState('idle')
507
+ pushDebugMessage(`Switched to model: ${modelId}`)
508
+ }, [pushDebugMessage])
509
+
510
+ const deleteCachedModel = useCallback(async (modelId) => {
511
+ engineRef.current = null
512
+ currentRequestIdRef.current = null
513
+
514
+ pushDebugMessage(`Deleting model: ${modelId}`)
515
+ try {
516
+ await deleteModelAllInfoInCache(modelId)
517
+ pushDebugMessage(`Deleted: ${modelId}`)
518
+ } catch (error) {
519
+ pushDebugMessage(`Error deleting ${modelId}: ${getErrorMessage(error)}`)
520
+ }
521
+
522
+ setLlmState('idle')
523
+ refreshCacheList()
524
+ }, [pushDebugMessage, refreshCacheList])
525
+
526
+ const explain = useCallback(async () => {
527
+ if (currentRequestIdRef.current) {
528
+ setLlmError('An explanation request is already running. Please wait.')
529
+ pushDebugMessage('Request blocked: another request is in-flight.')
530
+ return
531
+ }
532
+
533
+ const requestId = typeof crypto !== 'undefined' && crypto.randomUUID
534
+ ? crypto.randomUUID()
535
+ : `${Date.now()}-${Math.random()}`
536
+
537
+ currentRequestIdRef.current = requestId
538
+ firstChunkAtRef.current = null
539
+ setLlmError('')
540
+ setExplanationText('')
541
+ setTokenCount(0)
542
+ setTokensPerSec(0)
543
+
544
+ try {
545
+ const engine = await ensureEngine()
546
+ if (currentRequestIdRef.current !== requestId) return
547
+
548
+ refreshCacheList()
549
+ setLlmState('generating')
550
+
551
+ pushDebugMessage(`Fetching source file: ${sourcePath}`)
552
+ const response = await fetch(`/__source/file?path=${encodeURIComponent(sourcePath)}`)
553
+ if (!response.ok) {
554
+ throw new Error(`Source request failed (${response.status})`)
555
+ }
556
+
557
+ const payload = await response.json()
558
+ if (!payload || typeof payload.content !== 'string') {
559
+ throw new Error('Unexpected source response')
560
+ }
561
+
562
+ const sourceCode = trimSource(payload.content)
563
+ if (sourceCode.length !== payload.content.length) {
564
+ pushDebugMessage(`Source trimmed: ${payload.content.length} -> ${sourceCode.length} chars.`)
565
+ }
566
+
567
+ pushDebugMessage('Starting streamed generation...')
568
+ const chunks = await engine.chat.completions.create({
569
+ messages: [
570
+ {
571
+ role: 'user',
572
+ content:
573
+ `Explain this code in 5 to 10 sentences. ` +
574
+ `Describe what the app does, the main user interactions, and data flow.\n\n${sourceCode}`,
575
+ },
576
+ ],
577
+ stream: true,
578
+ max_tokens: 768,
579
+ temperature: 0,
580
+ })
581
+
582
+ let count = 0
583
+ for await (const chunk of chunks) {
584
+ if (currentRequestIdRef.current !== requestId) break
585
+
586
+ const delta = chunk?.choices?.[0]?.delta?.content
587
+ if (!delta) continue
588
+
589
+ count += 1
590
+ if (!firstChunkAtRef.current) {
591
+ firstChunkAtRef.current = performance.now()
592
+ }
593
+
594
+ setTokenCount(count)
595
+ const elapsed = (performance.now() - firstChunkAtRef.current) / 1000
596
+ if (elapsed > 0.1) {
597
+ setTokensPerSec(Math.round((count / elapsed) * 10) / 10)
598
+ }
599
+
600
+ setExplanationText((prev) => prev + delta)
601
+ }
602
+
603
+ if (currentRequestIdRef.current === requestId) {
604
+ pushDebugMessage(`Generation complete. Chunks: ${count}.`)
605
+ setLlmState('ready')
606
+ currentRequestIdRef.current = null
607
+ }
608
+ } catch (error) {
609
+ const message = getErrorMessage(error)
610
+ pushDebugMessage(`Generation error: ${message}`)
611
+ setLlmError(message)
612
+ setLlmState(looksLikeWebGPUError(message) ? 'unsupported' : 'error')
613
+ currentRequestIdRef.current = null
614
+ }
615
+ }, [ensureEngine, pushDebugMessage, refreshCacheList, sourcePath])
616
+
617
+ const isBusy = llmState === 'loading' || llmState === 'generating'
618
+
619
+ const buttonLabel = useMemo(() => {
620
+ if (llmState === 'loading') return 'Loading model...'
621
+ if (llmState === 'generating') return 'Explaining...'
622
+ if (llmState === 'unsupported') return 'Unsupported'
623
+ return `Explain ${sourcePath}`
624
+ }, [llmState, sourcePath])
625
+
626
+ const statusText = useMemo(() => {
627
+ if (llmState === 'loading') return 'Downloading model files...'
628
+ if (llmState === 'generating') {
629
+ if (tokensPerSec > 0) return `Generating... ${tokenCount} chunks (${tokensPerSec} chunks/s)`
630
+ return 'Generating explanation...'
631
+ }
632
+ if (llmState === 'ready') return 'Model ready.'
633
+ if (llmState === 'unsupported') return 'WebGPU is required for this model.'
634
+ if (llmState === 'error') return 'Could not generate explanation.'
635
+ return 'Open drawer and click Explain.'
636
+ }, [llmState, tokenCount, tokensPerSec])
637
+
638
+ return (
639
+ <>
640
+ <button
641
+ type="button"
642
+ className="llm-drawer-toggle"
643
+ onClick={() => setIsOpen((prev) => !prev)}
644
+ title={isOpen ? 'Hide LLM drawer' : 'Show LLM drawer'}
645
+ >
646
+ {isOpen ? '‹' : '›'}
647
+ </button>
648
+
649
+ <aside className="llm-drawer" data-open={isOpen ? 'true' : 'false'}>
650
+ <div className="llm-drawer-header">
651
+ <p className="llm-drawer-title">LLM + Source</p>
652
+ <button
653
+ type="button"
654
+ className="llm-drawer-close"
655
+ onClick={() => setIsOpen(false)}
656
+ title="Close drawer"
657
+ >
658
+ ×
659
+ </button>
660
+ </div>
661
+
662
+ <div className="llm-drawer-content">
663
+ <p className="llm-drawer-section-title">Explain</p>
664
+ <div className="llm-drawer-controls">
665
+ <select
666
+ value={activeModelId}
667
+ onChange={(event) => switchModel(event.target.value)}
668
+ disabled={isBusy}
669
+ className="llm-drawer-select"
670
+ >
671
+ {AVAILABLE_MODELS.map((id) => (
672
+ <option key={id} value={id}>{id}</option>
673
+ ))}
674
+ </select>
675
+
676
+ <div className="llm-drawer-row">
677
+ <button
678
+ type="button"
679
+ className="llm-drawer-btn llm-drawer-btn-primary"
680
+ onClick={explain}
681
+ disabled={isBusy || llmState === 'unsupported'}
682
+ >
683
+ {buttonLabel}
684
+ </button>
685
+ <button
686
+ type="button"
687
+ className="llm-drawer-btn"
688
+ onClick={refreshCacheList}
689
+ disabled={isBusy}
690
+ >
691
+ Refresh cache
692
+ </button>
693
+ </div>
694
+
695
+ <p className="llm-drawer-status">{statusText}</p>
696
+ </div>
697
+
698
+ <p className="llm-drawer-section-title">Downloaded Models</p>
699
+ {cachedModels.length === 0 ? (
700
+ <p className="llm-drawer-muted">No cached models found yet.</p>
701
+ ) : (
702
+ <ul className="llm-drawer-model-list">
703
+ {cachedModels.map((name) => (
704
+ <li key={name} className="llm-drawer-model-item">
705
+ <p className="llm-drawer-model-name">
706
+ {name}
707
+ {name === activeModelId ? ' (active)' : ''}
708
+ </p>
709
+ {name !== activeModelId && (
710
+ <button
711
+ type="button"
712
+ className="llm-drawer-btn"
713
+ onClick={() => switchModel(name)}
714
+ disabled={isBusy}
715
+ >
716
+ Use
717
+ </button>
718
+ )}
719
+ <button
720
+ type="button"
721
+ className="llm-drawer-btn"
722
+ onClick={() => deleteCachedModel(name)}
723
+ disabled={isBusy}
724
+ >
725
+ Remove
726
+ </button>
727
+ </li>
728
+ ))}
729
+ </ul>
730
+ )}
731
+
732
+ {progressItems.length > 0 && (
733
+ <ul className="llm-drawer-progress">
734
+ {progressItems.map((item, index) => (
735
+ <li key={`${item.file}-${index}`} className="llm-drawer-progress-item">
736
+ <span>{item.file}</span>
737
+ <span>{Math.round(item.progress)}%</span>
738
+ </li>
739
+ ))}
740
+ </ul>
741
+ )}
742
+
743
+ {llmError && <p className="llm-drawer-error">{llmError}</p>}
744
+
745
+ <textarea
746
+ className="llm-drawer-output"
747
+ readOnly
748
+ value={explanationText}
749
+ placeholder="Explanation will stream here after you click Explain."
750
+ />
751
+
752
+ <details className="llm-drawer-details" open>
753
+ <summary className="llm-drawer-summary">
754
+ Compressed Source ({compressedCharCount} chars)
755
+ </summary>
756
+ <textarea
757
+ className="llm-drawer-output"
758
+ readOnly
759
+ value={compressedSource}
760
+ placeholder="Compressed source will appear here when files change."
761
+ style={{ minHeight: 220, marginTop: 8 }}
762
+ />
763
+ </details>
764
+
765
+ <details className="llm-drawer-details" open>
766
+ <summary className="llm-drawer-summary">Source Explorer</summary>
767
+ <div className="llm-drawer-source-explorer" style={{ marginTop: 8 }}>
768
+ {isTreeLoading && <p className="llm-drawer-muted">Loading source tree...</p>}
769
+ {treeError && <p className="llm-drawer-error">{treeError}</p>}
770
+ {!isTreeLoading && !treeError && (
771
+ <>
772
+ <div className="llm-drawer-tree-panel">
773
+ {tree.length > 0
774
+ ? renderSourceTree(tree)
775
+ : <p className="llm-drawer-muted">No source files found.</p>}
776
+ </div>
777
+ {sourceError && <p className="llm-drawer-error">{sourceError}</p>}
778
+ <textarea
779
+ className="llm-drawer-output"
780
+ readOnly
781
+ value={selectedSourceText}
782
+ placeholder="Click a file above to view its source."
783
+ style={{ minHeight: 180 }}
784
+ />
785
+ </>
786
+ )}
787
+ </div>
788
+ </details>
789
+
790
+ <details className="llm-drawer-details" open>
791
+ <summary className="llm-drawer-summary">Explain Debug Log</summary>
792
+ <div className="llm-drawer-row" style={{ marginTop: 8 }}>
793
+ <button
794
+ className="llm-drawer-btn"
795
+ onClick={() => setDebugMessages([])}
796
+ type="button"
797
+ >
798
+ Clear Log
799
+ </button>
800
+ </div>
801
+ <textarea
802
+ className="llm-drawer-output"
803
+ readOnly
804
+ value={debugMessages.join('\n')}
805
+ placeholder="Debug messages will appear here."
806
+ style={{ minHeight: 150, marginTop: 8 }}
807
+ />
808
+ </details>
809
+ </div>
810
+ </aside>
811
+ </>
812
+ )
813
+ }