chat-console 0.2.0__py3-none-any.whl → 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- app/__init__.py +1 -1
- app/api/ollama.py +741 -1
- app/main.py +192 -69
- app/ui/model_browser.py +1146 -0
- app/utils.py +23 -22
- {chat_console-0.2.0.dist-info → chat_console-0.2.5.dist-info}/METADATA +1 -1
- {chat_console-0.2.0.dist-info → chat_console-0.2.5.dist-info}/RECORD +11 -10
- {chat_console-0.2.0.dist-info → chat_console-0.2.5.dist-info}/WHEEL +0 -0
- {chat_console-0.2.0.dist-info → chat_console-0.2.5.dist-info}/entry_points.txt +0 -0
- {chat_console-0.2.0.dist-info → chat_console-0.2.5.dist-info}/licenses/LICENSE +0 -0
- {chat_console-0.2.0.dist-info → chat_console-0.2.5.dist-info}/top_level.txt +0 -0
app/ui/model_browser.py
ADDED
@@ -0,0 +1,1146 @@
|
|
1
|
+
import asyncio
|
2
|
+
import logging
|
3
|
+
from typing import Dict, List, Any, Optional
|
4
|
+
from textual.app import ComposeResult
|
5
|
+
from textual.containers import Container, Horizontal, Vertical, ScrollableContainer
|
6
|
+
from textual.widgets import Button, Input, Label, Static, DataTable, LoadingIndicator, ProgressBar
|
7
|
+
from textual.widget import Widget
|
8
|
+
from textual.message import Message
|
9
|
+
from textual.reactive import reactive
|
10
|
+
|
11
|
+
from ..api.ollama import OllamaClient
|
12
|
+
from ..config import CONFIG
|
13
|
+
|
14
|
+
# Set up logging
|
15
|
+
logger = logging.getLogger(__name__)
|
16
|
+
|
17
|
+
class ModelBrowser(Container):
|
18
|
+
"""Widget for browsing and downloading Ollama models"""
|
19
|
+
|
20
|
+
DEFAULT_CSS = """
|
21
|
+
ModelBrowser {
|
22
|
+
width: 100%;
|
23
|
+
height: 100%;
|
24
|
+
background: $surface;
|
25
|
+
padding: 1;
|
26
|
+
}
|
27
|
+
|
28
|
+
#browser-header {
|
29
|
+
width: 100%;
|
30
|
+
height: 3;
|
31
|
+
layout: horizontal;
|
32
|
+
margin-bottom: 1;
|
33
|
+
}
|
34
|
+
|
35
|
+
#browser-title {
|
36
|
+
width: 1fr;
|
37
|
+
height: 3;
|
38
|
+
content-align: center middle;
|
39
|
+
text-align: center;
|
40
|
+
color: $text;
|
41
|
+
background: $primary-darken-2;
|
42
|
+
}
|
43
|
+
|
44
|
+
#close-button {
|
45
|
+
width: 10;
|
46
|
+
height: 3;
|
47
|
+
margin-left: 1;
|
48
|
+
}
|
49
|
+
|
50
|
+
#search-container {
|
51
|
+
width: 100%;
|
52
|
+
height: 3;
|
53
|
+
layout: horizontal;
|
54
|
+
margin-bottom: 1;
|
55
|
+
}
|
56
|
+
|
57
|
+
#model-search {
|
58
|
+
width: 1fr;
|
59
|
+
height: 3;
|
60
|
+
}
|
61
|
+
|
62
|
+
#search-button {
|
63
|
+
width: 10;
|
64
|
+
height: 3;
|
65
|
+
margin-left: 1;
|
66
|
+
}
|
67
|
+
|
68
|
+
#refresh-button {
|
69
|
+
width: 10;
|
70
|
+
height: 3;
|
71
|
+
margin-left: 1;
|
72
|
+
}
|
73
|
+
|
74
|
+
#tabs-container {
|
75
|
+
width: 100%;
|
76
|
+
height: 3;
|
77
|
+
layout: horizontal;
|
78
|
+
margin-bottom: 1;
|
79
|
+
}
|
80
|
+
|
81
|
+
.tab-button {
|
82
|
+
height: 3;
|
83
|
+
min-width: 15;
|
84
|
+
background: $primary-darken-3;
|
85
|
+
}
|
86
|
+
|
87
|
+
.tab-button.active {
|
88
|
+
background: $primary;
|
89
|
+
}
|
90
|
+
|
91
|
+
#models-container {
|
92
|
+
width: 100%;
|
93
|
+
height: 1fr;
|
94
|
+
}
|
95
|
+
|
96
|
+
#local-models, #available-models {
|
97
|
+
width: 100%;
|
98
|
+
height: 100%;
|
99
|
+
display: none;
|
100
|
+
}
|
101
|
+
|
102
|
+
#local-models.active, #available-models.active {
|
103
|
+
display: block;
|
104
|
+
}
|
105
|
+
|
106
|
+
DataTable {
|
107
|
+
width: 100%;
|
108
|
+
height: 1fr;
|
109
|
+
min-height: 10;
|
110
|
+
}
|
111
|
+
|
112
|
+
#model-actions {
|
113
|
+
width: 100%;
|
114
|
+
height: auto;
|
115
|
+
margin-top: 1;
|
116
|
+
}
|
117
|
+
|
118
|
+
#model-details {
|
119
|
+
width: 100%;
|
120
|
+
height: auto;
|
121
|
+
display: none;
|
122
|
+
border: solid $primary;
|
123
|
+
padding: 1;
|
124
|
+
margin-top: 1;
|
125
|
+
}
|
126
|
+
|
127
|
+
#model-details.visible {
|
128
|
+
display: block;
|
129
|
+
}
|
130
|
+
|
131
|
+
#progress-area {
|
132
|
+
width: 100%;
|
133
|
+
height: auto;
|
134
|
+
display: none;
|
135
|
+
margin-top: 1;
|
136
|
+
border: solid $primary;
|
137
|
+
padding: 1;
|
138
|
+
}
|
139
|
+
|
140
|
+
#progress-area.visible {
|
141
|
+
display: block;
|
142
|
+
}
|
143
|
+
|
144
|
+
#progress-bar {
|
145
|
+
width: 100%;
|
146
|
+
height: 1;
|
147
|
+
}
|
148
|
+
|
149
|
+
#progress-label {
|
150
|
+
width: 100%;
|
151
|
+
height: 1;
|
152
|
+
content-align: center middle;
|
153
|
+
text-align: center;
|
154
|
+
}
|
155
|
+
|
156
|
+
#status-label {
|
157
|
+
width: 100%;
|
158
|
+
height: 2;
|
159
|
+
content-align: center middle;
|
160
|
+
text-align: center;
|
161
|
+
}
|
162
|
+
|
163
|
+
#action-buttons {
|
164
|
+
layout: horizontal;
|
165
|
+
width: 100%;
|
166
|
+
height: auto;
|
167
|
+
align: center middle;
|
168
|
+
}
|
169
|
+
|
170
|
+
#action-buttons Button {
|
171
|
+
margin: 0 1;
|
172
|
+
}
|
173
|
+
|
174
|
+
LoadingIndicator {
|
175
|
+
width: 100%;
|
176
|
+
height: 1fr;
|
177
|
+
}
|
178
|
+
"""
|
179
|
+
|
180
|
+
# Reactive variables to track state
|
181
|
+
selected_model_id = reactive("")
|
182
|
+
current_tab = reactive("local") # "local" or "available"
|
183
|
+
is_loading = reactive(False)
|
184
|
+
is_pulling = reactive(False)
|
185
|
+
pull_progress = reactive(0.0)
|
186
|
+
pull_status = reactive("")
|
187
|
+
|
188
|
+
def __init__(
|
189
|
+
self,
|
190
|
+
name: Optional[str] = None,
|
191
|
+
id: Optional[str] = None
|
192
|
+
):
|
193
|
+
super().__init__(name=name, id=id)
|
194
|
+
self.ollama_client = OllamaClient()
|
195
|
+
self.local_models = []
|
196
|
+
self.available_models = []
|
197
|
+
|
198
|
+
def compose(self) -> ComposeResult:
|
199
|
+
"""Set up the model browser"""
|
200
|
+
# Title and close button
|
201
|
+
with Container(id="browser-header"):
|
202
|
+
yield Static("Ollama Model Browser", id="browser-title")
|
203
|
+
yield Button("Close", id="close-button", variant="error")
|
204
|
+
|
205
|
+
# Search bar
|
206
|
+
with Container(id="search-container"):
|
207
|
+
yield Input(placeholder="Search models...", id="model-search")
|
208
|
+
yield Button("Search", id="search-button")
|
209
|
+
yield Button("Refresh", id="refresh-button")
|
210
|
+
|
211
|
+
# Tabs
|
212
|
+
with Container(id="tabs-container"):
|
213
|
+
yield Button("Local Models", id="local-tab", classes="tab-button active")
|
214
|
+
yield Button("Available Models", id="available-tab", classes="tab-button")
|
215
|
+
|
216
|
+
# Models container (will hold both tabs)
|
217
|
+
with Container(id="models-container"):
|
218
|
+
# Local models tab
|
219
|
+
with ScrollableContainer(id="local-models", classes="active"):
|
220
|
+
yield DataTable(id="local-models-table")
|
221
|
+
with Container(id="model-actions"):
|
222
|
+
with Horizontal(id="action-buttons"):
|
223
|
+
yield Button("Run Model", id="run-button", variant="success")
|
224
|
+
yield Button("Delete Model", id="delete-button", variant="error")
|
225
|
+
yield Button("View Details", id="details-button", variant="default")
|
226
|
+
|
227
|
+
# Available models tab
|
228
|
+
with ScrollableContainer(id="available-models"):
|
229
|
+
yield DataTable(id="available-models-table")
|
230
|
+
with Container(id="model-actions"):
|
231
|
+
with Horizontal(id="action-buttons"):
|
232
|
+
yield Button("Pull Model", id="pull-available-button", variant="primary")
|
233
|
+
yield Button("View Details", id="details-available-button", variant="default")
|
234
|
+
|
235
|
+
# Model details area (hidden by default)
|
236
|
+
with ScrollableContainer(id="model-details"):
|
237
|
+
yield Static("No model selected", id="details-content")
|
238
|
+
|
239
|
+
# Progress area for model downloads (hidden by default)
|
240
|
+
with Container(id="progress-area"):
|
241
|
+
yield Static("Downloading model...", id="status-label")
|
242
|
+
yield ProgressBar(id="progress-bar", total=100)
|
243
|
+
yield Static("0%", id="progress-label")
|
244
|
+
|
245
|
+
async def on_mount(self) -> None:
|
246
|
+
"""Initialize model tables after mount"""
|
247
|
+
# Set up local models table
|
248
|
+
local_table = self.query_one("#local-models-table", DataTable)
|
249
|
+
local_table.add_columns("Model", "Size", "Family", "Modified")
|
250
|
+
local_table.cursor_type = "row"
|
251
|
+
|
252
|
+
# Set up available models table
|
253
|
+
available_table = self.query_one("#available-models-table", DataTable)
|
254
|
+
available_table.add_columns("Model", "Size", "Family", "Description")
|
255
|
+
available_table.cursor_type = "row"
|
256
|
+
|
257
|
+
# Show notification about model loading
|
258
|
+
self.notify("Initializing model browser, this might take a moment on first run...",
|
259
|
+
severity="information", timeout=5)
|
260
|
+
|
261
|
+
# Load models
|
262
|
+
await self.load_local_models()
|
263
|
+
|
264
|
+
# Start loading available models in the background
|
265
|
+
asyncio.create_task(self.preload_available_models())
|
266
|
+
|
267
|
+
# Focus search input
|
268
|
+
self.query_one("#model-search").focus()
|
269
|
+
|
270
|
+
async def preload_available_models(self) -> None:
|
271
|
+
"""Preload available models in the background"""
|
272
|
+
# Load the available models list in the background to make it faster when
|
273
|
+
# the user switches to the Available Models tab
|
274
|
+
try:
|
275
|
+
# This will trigger cache creation if needed, making tab switching faster
|
276
|
+
models = await self.ollama_client.list_available_models_from_registry()
|
277
|
+
if models:
|
278
|
+
logger.info(f"Preloaded {len(models)} available models")
|
279
|
+
except Exception as e:
|
280
|
+
logger.error(f"Error preloading available models: {str(e)}")
|
281
|
+
|
282
|
+
async def load_local_models(self) -> None:
|
283
|
+
"""Load locally installed Ollama models"""
|
284
|
+
self.is_loading = True
|
285
|
+
|
286
|
+
try:
|
287
|
+
self.local_models = await self.ollama_client.get_available_models()
|
288
|
+
|
289
|
+
# Clear and populate table
|
290
|
+
local_table = self.query_one("#local-models-table", DataTable)
|
291
|
+
local_table.clear()
|
292
|
+
|
293
|
+
for model in self.local_models:
|
294
|
+
# Try to get additional details
|
295
|
+
try:
|
296
|
+
details = await self.ollama_client.get_model_details(model["id"])
|
297
|
+
|
298
|
+
# Extract parameter size info (in billions)
|
299
|
+
size = "Unknown"
|
300
|
+
|
301
|
+
# First try to get parameter size from modelfile if available
|
302
|
+
if "modelfile" in details and details["modelfile"] is not None:
|
303
|
+
modelfile = details["modelfile"]
|
304
|
+
if "parameter_size" in modelfile and modelfile["parameter_size"]:
|
305
|
+
size = str(modelfile["parameter_size"])
|
306
|
+
# Make sure it ends with B for billions if it doesn't already
|
307
|
+
if not size.upper().endswith("B"):
|
308
|
+
size += "B"
|
309
|
+
|
310
|
+
# If not found in modelfile, try to extract from name
|
311
|
+
if size == "Unknown":
|
312
|
+
name = model["name"].lower()
|
313
|
+
if "70b" in name:
|
314
|
+
size = "70B"
|
315
|
+
elif "405b" in name or "400b" in name:
|
316
|
+
size = "405B"
|
317
|
+
elif "34b" in name or "35b" in name:
|
318
|
+
size = "34B"
|
319
|
+
elif "27b" in name or "28b" in name:
|
320
|
+
size = "27B"
|
321
|
+
elif "13b" in name or "14b" in name:
|
322
|
+
size = "13B"
|
323
|
+
elif "8b" in name:
|
324
|
+
size = "8B"
|
325
|
+
elif "7b" in name:
|
326
|
+
size = "7B"
|
327
|
+
elif "6b" in name:
|
328
|
+
size = "6B"
|
329
|
+
elif "3b" in name:
|
330
|
+
size = "3B"
|
331
|
+
elif "2b" in name:
|
332
|
+
size = "2B"
|
333
|
+
elif "1b" in name:
|
334
|
+
size = "1B"
|
335
|
+
elif "mini" in name:
|
336
|
+
size = "3B"
|
337
|
+
elif "small" in name:
|
338
|
+
size = "7B"
|
339
|
+
elif "medium" in name:
|
340
|
+
size = "13B"
|
341
|
+
elif "large" in name:
|
342
|
+
size = "34B"
|
343
|
+
|
344
|
+
# Special handling for base models with no size indicator
|
345
|
+
if size == "Unknown":
|
346
|
+
# Remove tag part if present to get base model
|
347
|
+
base_name = name.split(":")[0]
|
348
|
+
|
349
|
+
# Check if we have default parameter sizes for known models
|
350
|
+
model_defaults = {
|
351
|
+
"llama3": "8B",
|
352
|
+
"llama2": "7B",
|
353
|
+
"mistral": "7B",
|
354
|
+
"gemma": "7B",
|
355
|
+
"gemma2": "9B",
|
356
|
+
"phi": "3B",
|
357
|
+
"phi2": "3B",
|
358
|
+
"phi3": "3B",
|
359
|
+
"orca-mini": "7B",
|
360
|
+
"llava": "7B",
|
361
|
+
"codellama": "7B",
|
362
|
+
"neural-chat": "7B",
|
363
|
+
"wizard-math": "7B",
|
364
|
+
"yi": "6B",
|
365
|
+
"deepseek": "7B",
|
366
|
+
"deepseek-coder": "7B",
|
367
|
+
"qwen": "7B",
|
368
|
+
"falcon": "7B",
|
369
|
+
"stable-code": "3B"
|
370
|
+
}
|
371
|
+
|
372
|
+
# Try to find a match in default sizes
|
373
|
+
for model_name, default_size in model_defaults.items():
|
374
|
+
if model_name in base_name:
|
375
|
+
size = default_size
|
376
|
+
break
|
377
|
+
|
378
|
+
# Extract family info - check multiple possible locations
|
379
|
+
family = "Unknown"
|
380
|
+
if "modelfile" in details and details["modelfile"] is not None:
|
381
|
+
# First check for family field
|
382
|
+
if "family" in details["modelfile"] and details["modelfile"]["family"]:
|
383
|
+
family = details["modelfile"]["family"]
|
384
|
+
# Try to infer from model name if not available
|
385
|
+
else:
|
386
|
+
name = model["name"].lower()
|
387
|
+
if "llama" in name:
|
388
|
+
family = "Llama"
|
389
|
+
elif "mistral" in name:
|
390
|
+
family = "Mistral"
|
391
|
+
elif "phi" in name:
|
392
|
+
family = "Phi"
|
393
|
+
elif "gemma" in name:
|
394
|
+
family = "Gemma"
|
395
|
+
elif "yi" in name:
|
396
|
+
family = "Yi"
|
397
|
+
elif "orca" in name:
|
398
|
+
family = "Orca"
|
399
|
+
elif "wizard" in name:
|
400
|
+
family = "Wizard"
|
401
|
+
elif "neural" in name:
|
402
|
+
family = "Neural Chat"
|
403
|
+
elif "qwen" in name:
|
404
|
+
family = "Qwen"
|
405
|
+
elif "deepseek" in name:
|
406
|
+
family = "DeepSeek"
|
407
|
+
elif "falcon" in name:
|
408
|
+
family = "Falcon"
|
409
|
+
elif "stable" in name:
|
410
|
+
family = "Stable"
|
411
|
+
elif "codellama" in name:
|
412
|
+
family = "CodeLlama"
|
413
|
+
elif "llava" in name:
|
414
|
+
family = "LLaVA"
|
415
|
+
|
416
|
+
# Extract modified date
|
417
|
+
modified = details.get("modified_at", "Unknown")
|
418
|
+
if modified == "Unknown" and "created_at" in details:
|
419
|
+
modified = details["created_at"]
|
420
|
+
|
421
|
+
except Exception as detail_error:
|
422
|
+
self.notify(f"Error getting details for {model['name']}: {str(detail_error)}", severity="warning")
|
423
|
+
size = "Unknown"
|
424
|
+
family = "Unknown"
|
425
|
+
modified = "Unknown"
|
426
|
+
|
427
|
+
local_table.add_row(model["name"], size, family, modified)
|
428
|
+
|
429
|
+
self.notify(f"Loaded {len(self.local_models)} local models", severity="information")
|
430
|
+
|
431
|
+
except Exception as e:
|
432
|
+
self.notify(f"Error loading local models: {str(e)}", severity="error")
|
433
|
+
finally:
|
434
|
+
self.is_loading = False
|
435
|
+
|
436
|
+
async def load_available_models(self) -> None:
|
437
|
+
"""Load available models from Ollama registry"""
|
438
|
+
self.is_loading = True
|
439
|
+
|
440
|
+
try:
|
441
|
+
# Get search query if any
|
442
|
+
search_input = self.query_one("#model-search", Input)
|
443
|
+
query = search_input.value.strip()
|
444
|
+
|
445
|
+
# Debug to track model loading
|
446
|
+
logger.info(f"Loading available models, query: '{query}'")
|
447
|
+
|
448
|
+
# Load models from registry - don't apply the query here, get ALL models
|
449
|
+
try:
|
450
|
+
# First try the API-based registry
|
451
|
+
self.available_models = await self.ollama_client.list_available_models_from_registry("")
|
452
|
+
logger.info(f"Got {len(self.available_models)} models from registry")
|
453
|
+
|
454
|
+
# If no models found, use the curated list
|
455
|
+
if not self.available_models:
|
456
|
+
self.available_models = await self.ollama_client.get_registry_models("")
|
457
|
+
logger.info(f"Got {len(self.available_models)} models from curated list")
|
458
|
+
except Exception as e:
|
459
|
+
logger.error(f"Error from registry API: {str(e)}")
|
460
|
+
# Fallback to curated list
|
461
|
+
self.available_models = await self.ollama_client.get_registry_models("")
|
462
|
+
logger.info(f"Fallback: Got {len(self.available_models)} models from curated list")
|
463
|
+
|
464
|
+
# Clear and populate table
|
465
|
+
available_table = self.query_one("#available-models-table", DataTable)
|
466
|
+
available_table.clear()
|
467
|
+
|
468
|
+
# Get number of models loaded (but don't notify to avoid notification spam)
|
469
|
+
model_count = len(self.available_models)
|
470
|
+
logger.info(f"Found {model_count} models to display")
|
471
|
+
|
472
|
+
# Filter models by search query if provided
|
473
|
+
filtered_models = self.available_models
|
474
|
+
if query:
|
475
|
+
query = query.lower()
|
476
|
+
filtered_models = []
|
477
|
+
for model in self.available_models:
|
478
|
+
# Check if query matches name, description or family
|
479
|
+
name = str(model.get("name", "")).lower()
|
480
|
+
desc = str(model.get("description", "")).lower()
|
481
|
+
family = str(model.get("model_family", "")).lower()
|
482
|
+
|
483
|
+
# Also check variants if available
|
484
|
+
variants_match = False
|
485
|
+
if "variants" in model and model["variants"]:
|
486
|
+
variants_text = " ".join([str(v).lower() for v in model["variants"]])
|
487
|
+
if query in variants_text:
|
488
|
+
variants_match = True
|
489
|
+
|
490
|
+
if query in name or query in desc or query in family or variants_match:
|
491
|
+
filtered_models.append(model)
|
492
|
+
|
493
|
+
logger.info(f"Filtered to {len(filtered_models)} models matching '{query}'")
|
494
|
+
|
495
|
+
# Add all filtered models to the table - no pagination limit
|
496
|
+
for model in filtered_models:
|
497
|
+
name = model.get("name", "Unknown")
|
498
|
+
|
499
|
+
# Extract parameter size info (in billions)
|
500
|
+
size = "Unknown"
|
501
|
+
|
502
|
+
# Check if parameter_size is available in the model metadata
|
503
|
+
if "parameter_size" in model and model["parameter_size"]:
|
504
|
+
size = str(model["parameter_size"])
|
505
|
+
# Make sure it ends with B for billions if it doesn't already
|
506
|
+
if not size.upper().endswith("B"):
|
507
|
+
size += "B"
|
508
|
+
# Check if we can extract from variants
|
509
|
+
elif "variants" in model and model["variants"]:
|
510
|
+
for variant in model["variants"]:
|
511
|
+
if any(char.isdigit() for char in str(variant)):
|
512
|
+
# This looks like a size variant (e.g., "7b", "70b")
|
513
|
+
variant_str = str(variant).lower()
|
514
|
+
if variant_str.endswith('b'):
|
515
|
+
size = str(variant).upper()
|
516
|
+
else:
|
517
|
+
size = f"{variant}B"
|
518
|
+
break
|
519
|
+
else:
|
520
|
+
# Extract from name if not available
|
521
|
+
model_name = str(name).lower()
|
522
|
+
if "70b" in model_name:
|
523
|
+
size = "70B"
|
524
|
+
elif "405b" in model_name or "400b" in model_name:
|
525
|
+
size = "405B"
|
526
|
+
elif "34b" in model_name or "35b" in model_name:
|
527
|
+
size = "34B"
|
528
|
+
elif "27b" in model_name or "28b" in model_name:
|
529
|
+
size = "27B"
|
530
|
+
elif "13b" in model_name or "14b" in model_name:
|
531
|
+
size = "13B"
|
532
|
+
elif "8b" in model_name:
|
533
|
+
size = "8B"
|
534
|
+
elif "7b" in model_name:
|
535
|
+
size = "7B"
|
536
|
+
elif "6b" in model_name:
|
537
|
+
size = "6B"
|
538
|
+
elif "3b" in model_name:
|
539
|
+
size = "3B"
|
540
|
+
elif "2b" in model_name:
|
541
|
+
size = "2B"
|
542
|
+
elif "1b" in model_name:
|
543
|
+
size = "1B"
|
544
|
+
elif "mini" in model_name:
|
545
|
+
size = "3B"
|
546
|
+
elif "small" in model_name:
|
547
|
+
size = "7B"
|
548
|
+
elif "medium" in model_name:
|
549
|
+
size = "13B"
|
550
|
+
elif "large" in model_name:
|
551
|
+
size = "34B"
|
552
|
+
|
553
|
+
# Special handling for base models with no size indicator
|
554
|
+
if size == "Unknown":
|
555
|
+
# Remove tag part if present to get base model
|
556
|
+
base_name = model_name.split(":")[0]
|
557
|
+
|
558
|
+
# Check if we have default parameter sizes for known models
|
559
|
+
model_defaults = {
|
560
|
+
"llama3": "8B",
|
561
|
+
"llama2": "7B",
|
562
|
+
"mistral": "7B",
|
563
|
+
"gemma": "7B",
|
564
|
+
"gemma2": "9B",
|
565
|
+
"phi": "3B",
|
566
|
+
"phi2": "3B",
|
567
|
+
"phi3": "3B",
|
568
|
+
"phi4": "7B",
|
569
|
+
"orca-mini": "7B",
|
570
|
+
"llava": "7B",
|
571
|
+
"codellama": "7B",
|
572
|
+
"neural-chat": "7B",
|
573
|
+
"wizard-math": "7B",
|
574
|
+
"yi": "6B",
|
575
|
+
"deepseek": "7B",
|
576
|
+
"deepseek-coder": "7B",
|
577
|
+
"qwen": "7B",
|
578
|
+
"falcon": "7B",
|
579
|
+
"stable-code": "3B"
|
580
|
+
}
|
581
|
+
|
582
|
+
# Try to find a match in default sizes
|
583
|
+
for model_prefix, default_size in model_defaults.items():
|
584
|
+
if model_prefix in base_name:
|
585
|
+
size = default_size
|
586
|
+
break
|
587
|
+
|
588
|
+
family = model.get("model_family", "Unknown")
|
589
|
+
description = model.get("description", "No description available")
|
590
|
+
|
591
|
+
# Keep this for debugging
|
592
|
+
# logger.info(f"Adding model to table: {name} - {size} - {family}")
|
593
|
+
|
594
|
+
available_table.add_row(name, size, family, description)
|
595
|
+
|
596
|
+
actual_displayed = available_table.row_count
|
597
|
+
logger.info(f"Loaded {actual_displayed} available models")
|
598
|
+
|
599
|
+
except Exception as e:
|
600
|
+
logger.error(f"Error loading available models: {str(e)}")
|
601
|
+
self.notify(f"Error loading available models: {str(e)}", severity="error")
|
602
|
+
finally:
|
603
|
+
self.is_loading = False
|
604
|
+
|
605
|
+
def _format_size(self, size_bytes: int) -> str:
|
606
|
+
"""Format size in bytes to human-readable format"""
|
607
|
+
if size_bytes == 0:
|
608
|
+
return "Unknown"
|
609
|
+
|
610
|
+
suffixes = ["B", "KB", "MB", "GB", "TB"]
|
611
|
+
i = 0
|
612
|
+
while size_bytes >= 1024 and i < len(suffixes) - 1:
|
613
|
+
size_bytes /= 1024
|
614
|
+
i += 1
|
615
|
+
|
616
|
+
return f"{size_bytes:.2f} {suffixes[i]}"
|
617
|
+
|
618
|
+
def on_button_pressed(self, event: Button.Pressed) -> None:
|
619
|
+
"""Handle button presses"""
|
620
|
+
button_id = event.button.id
|
621
|
+
|
622
|
+
if button_id == "close-button":
|
623
|
+
# Close the model browser by popping the screen
|
624
|
+
if hasattr(self.app, "pop_screen"):
|
625
|
+
self.app.pop_screen()
|
626
|
+
return
|
627
|
+
elif button_id == "local-tab":
|
628
|
+
self._switch_tab("local")
|
629
|
+
elif button_id == "available-tab":
|
630
|
+
self._switch_tab("available")
|
631
|
+
# Load available models if they haven't been loaded yet
|
632
|
+
if not self.available_models:
|
633
|
+
self.app.call_later(self.load_available_models)
|
634
|
+
elif button_id == "search-button":
|
635
|
+
# Search in the current tab
|
636
|
+
if self.current_tab == "local":
|
637
|
+
self.app.call_later(self.load_local_models)
|
638
|
+
else:
|
639
|
+
self.app.call_later(self.load_available_models)
|
640
|
+
elif button_id == "refresh-button":
|
641
|
+
# Refresh current tab
|
642
|
+
if self.current_tab == "local":
|
643
|
+
self.app.call_later(self.load_local_models)
|
644
|
+
else:
|
645
|
+
self.app.call_later(self.load_available_models)
|
646
|
+
elif button_id == "run-button":
|
647
|
+
# Set model in the main app
|
648
|
+
self.app.call_later(self._run_selected_model)
|
649
|
+
elif button_id == "pull-available-button":
|
650
|
+
# Start model pull
|
651
|
+
self.app.call_later(self._pull_selected_model)
|
652
|
+
elif button_id == "delete-button":
|
653
|
+
# Delete selected model
|
654
|
+
self.app.call_later(self._delete_selected_model)
|
655
|
+
elif button_id in ["details-button", "details-available-button"]:
|
656
|
+
# Show model details
|
657
|
+
self.app.call_later(self._show_model_details)
|
658
|
+
|
659
|
+
def _switch_tab(self, tab: str) -> None:
|
660
|
+
"""Switch between local and available tabs"""
|
661
|
+
self.current_tab = tab
|
662
|
+
|
663
|
+
# Update tab buttons
|
664
|
+
local_tab = self.query_one("#local-tab", Button)
|
665
|
+
available_tab = self.query_one("#available-tab", Button)
|
666
|
+
|
667
|
+
if tab == "local":
|
668
|
+
local_tab.add_class("active")
|
669
|
+
available_tab.remove_class("active")
|
670
|
+
else:
|
671
|
+
local_tab.remove_class("active")
|
672
|
+
available_tab.add_class("active")
|
673
|
+
|
674
|
+
# Update containers
|
675
|
+
local_container = self.query_one("#local-models", ScrollableContainer)
|
676
|
+
available_container = self.query_one("#available-models", ScrollableContainer)
|
677
|
+
|
678
|
+
if tab == "local":
|
679
|
+
local_container.add_class("active")
|
680
|
+
available_container.remove_class("active")
|
681
|
+
else:
|
682
|
+
local_container.remove_class("active")
|
683
|
+
available_container.add_class("active")
|
684
|
+
|
685
|
+
async def _run_selected_model(self) -> None:
|
686
|
+
"""Set the selected model as the active model in the main app"""
|
687
|
+
# Get selected model based on current tab
|
688
|
+
model_id = self._get_selected_model_id()
|
689
|
+
|
690
|
+
if not model_id:
|
691
|
+
self.notify("No model selected", severity="warning")
|
692
|
+
return
|
693
|
+
|
694
|
+
try:
|
695
|
+
# Set the model in the app
|
696
|
+
if hasattr(self.app, "selected_model"):
|
697
|
+
self.app.selected_model = model_id
|
698
|
+
self.app.update_app_info() # Update app info to show new model
|
699
|
+
self.notify(f"Model set to: {model_id}", severity="success")
|
700
|
+
self.app.pop_screen() # Close the model browser screen
|
701
|
+
else:
|
702
|
+
self.notify("Cannot set model: app interface not available", severity="error")
|
703
|
+
except Exception as e:
|
704
|
+
self.notify(f"Error setting model: {str(e)}", severity="error")
|
705
|
+
|
706
|
+
async def _pull_selected_model(self) -> None:
|
707
|
+
"""Pull the selected model from Ollama registry"""
|
708
|
+
# Get selected model based on current tab
|
709
|
+
model_id = self._get_selected_model_id()
|
710
|
+
|
711
|
+
if not model_id:
|
712
|
+
self.notify("No model selected", severity="warning")
|
713
|
+
return
|
714
|
+
|
715
|
+
# Show confirmation dialog - use a simple notification instead of modal
|
716
|
+
msg = f"Downloading model '{model_id}'. This may take several minutes depending on model size."
|
717
|
+
self.notify(msg, severity="information", timeout=5)
|
718
|
+
|
719
|
+
# No confirmation needed now, since we're just proceeding with notification
|
720
|
+
|
721
|
+
if self.is_pulling:
|
722
|
+
self.notify("Already pulling a model", severity="warning")
|
723
|
+
return
|
724
|
+
|
725
|
+
self.is_pulling = True
|
726
|
+
self.pull_progress = 0.0
|
727
|
+
self.pull_status = f"Starting download of {model_id}..."
|
728
|
+
|
729
|
+
# Show progress area
|
730
|
+
progress_area = self.query_one("#progress-area")
|
731
|
+
progress_area.add_class("visible")
|
732
|
+
|
733
|
+
# Update progress UI
|
734
|
+
progress_bar = self.query_one("#progress-bar", ProgressBar)
|
735
|
+
progress_bar.update(progress=0)
|
736
|
+
status_label = self.query_one("#status-label", Static)
|
737
|
+
status_label.update(f"Downloading {model_id}...")
|
738
|
+
progress_label = self.query_one("#progress-label", Static)
|
739
|
+
progress_label.update("0%")
|
740
|
+
|
741
|
+
try:
|
742
|
+
# Start pulling model with progress updates
|
743
|
+
async for progress_data in self.ollama_client.pull_model(model_id):
|
744
|
+
# Update progress
|
745
|
+
if "status" in progress_data:
|
746
|
+
self.pull_status = progress_data["status"]
|
747
|
+
status_label.update(self.pull_status)
|
748
|
+
|
749
|
+
if "completed" in progress_data and "total" in progress_data:
|
750
|
+
completed = progress_data["completed"]
|
751
|
+
total = progress_data["total"]
|
752
|
+
if total > 0:
|
753
|
+
percentage = (completed / total) * 100
|
754
|
+
self.pull_progress = percentage
|
755
|
+
progress_bar.update(progress=int(percentage))
|
756
|
+
progress_label.update(f"{percentage:.1f}%")
|
757
|
+
|
758
|
+
# Download complete
|
759
|
+
self.pull_status = f"Download of {model_id} complete!"
|
760
|
+
status_label.update(self.pull_status)
|
761
|
+
progress_bar.update(progress=100)
|
762
|
+
progress_label.update("100%")
|
763
|
+
|
764
|
+
self.notify(f"Model {model_id} downloaded successfully", severity="success")
|
765
|
+
|
766
|
+
# Refresh local models
|
767
|
+
await self.load_local_models()
|
768
|
+
|
769
|
+
except Exception as e:
|
770
|
+
self.notify(f"Error pulling model: {str(e)}", severity="error")
|
771
|
+
status_label.update(f"Error: {str(e)}")
|
772
|
+
finally:
|
773
|
+
self.is_pulling = False
|
774
|
+
# Hide progress area after a delay
|
775
|
+
async def hide_progress():
|
776
|
+
# Use asyncio.sleep instead of app.sleep
|
777
|
+
import asyncio
|
778
|
+
await asyncio.sleep(3)
|
779
|
+
progress_area.remove_class("visible")
|
780
|
+
self.app.call_later(hide_progress)
|
781
|
+
|
782
|
+
async def _delete_selected_model(self) -> None:
|
783
|
+
"""Delete the selected model from local storage"""
|
784
|
+
# Only works on local tab
|
785
|
+
if self.current_tab != "local":
|
786
|
+
self.notify("Can only delete local models", severity="warning")
|
787
|
+
return
|
788
|
+
|
789
|
+
model_id = self._get_selected_model_id()
|
790
|
+
|
791
|
+
if not model_id:
|
792
|
+
self.notify("No model selected", severity="warning")
|
793
|
+
return
|
794
|
+
|
795
|
+
# Confirm deletion
|
796
|
+
if not await self.app.run_modal("confirm_dialog", f"Are you sure you want to delete {model_id}?"):
|
797
|
+
return
|
798
|
+
|
799
|
+
try:
|
800
|
+
await self.ollama_client.delete_model(model_id)
|
801
|
+
self.notify(f"Model {model_id} deleted successfully", severity="success")
|
802
|
+
|
803
|
+
# Refresh local models
|
804
|
+
await self.load_local_models()
|
805
|
+
|
806
|
+
except Exception as e:
|
807
|
+
self.notify(f"Error deleting model: {str(e)}", severity="error")
|
808
|
+
|
809
|
+
async def _show_model_details(self) -> None:
|
810
|
+
"""Show details for the selected model"""
|
811
|
+
model_id = self._get_selected_model_id()
|
812
|
+
|
813
|
+
if not model_id:
|
814
|
+
# Try to select the first model in the table
|
815
|
+
if self.current_tab == "local" and self.local_models:
|
816
|
+
model_id = self.local_models[0]["id"]
|
817
|
+
elif self.current_tab == "available" and self.available_models:
|
818
|
+
model_id = self.available_models[0]["name"]
|
819
|
+
|
820
|
+
# If we still don't have a model ID, show warning and return
|
821
|
+
if not model_id:
|
822
|
+
self.notify("No model selected", severity="warning")
|
823
|
+
return
|
824
|
+
|
825
|
+
# Get model details container
|
826
|
+
details_container = self.query_one("#model-details")
|
827
|
+
details_content = self.query_one("#details-content", Static)
|
828
|
+
|
829
|
+
# Check if we're in "available" tab or "local" tab
|
830
|
+
if self.current_tab == "available":
|
831
|
+
# For available models, use cached info instead of making API calls
|
832
|
+
try:
|
833
|
+
# Find the model in our available_models list
|
834
|
+
model_info = None
|
835
|
+
for model in self.available_models:
|
836
|
+
if model.get("name") == model_id:
|
837
|
+
model_info = model
|
838
|
+
break
|
839
|
+
|
840
|
+
if not model_info:
|
841
|
+
details_content.update(f"No details found for model: {model_id}")
|
842
|
+
details_container.add_class("visible")
|
843
|
+
return
|
844
|
+
|
845
|
+
# Format the details from the cached info
|
846
|
+
formatted_details = f"Model: {model_id}\n"
|
847
|
+
|
848
|
+
# Add parameters info
|
849
|
+
param_size = model_info.get("parameter_size", "Unknown")
|
850
|
+
if param_size and not str(param_size).upper().endswith("B"):
|
851
|
+
param_size = f"{param_size}B"
|
852
|
+
formatted_details += f"Parameters: {param_size}\n"
|
853
|
+
|
854
|
+
# Add family info
|
855
|
+
family = model_info.get("model_family", "Unknown")
|
856
|
+
formatted_details += f"Family: {family}\n"
|
857
|
+
|
858
|
+
# Add description
|
859
|
+
description = model_info.get("description", "No description available.")
|
860
|
+
formatted_details += f"\nDescription:\n{description}\n"
|
861
|
+
|
862
|
+
# Add variants if available
|
863
|
+
if "variants" in model_info and model_info["variants"]:
|
864
|
+
formatted_details += f"\nVariants: {', '.join(model_info['variants'])}\n"
|
865
|
+
|
866
|
+
# Add stats if available
|
867
|
+
if "stats" in model_info and model_info["stats"]:
|
868
|
+
stats = model_info["stats"]
|
869
|
+
formatted_details += f"\nStats:\n"
|
870
|
+
if "pulls" in stats:
|
871
|
+
formatted_details += f"Pulls: {stats['pulls']}\n"
|
872
|
+
if "tags" in stats:
|
873
|
+
formatted_details += f"Tags: {stats['tags']}\n"
|
874
|
+
if "last_updated" in stats:
|
875
|
+
formatted_details += f"Last Updated: {stats['last_updated']}\n"
|
876
|
+
|
877
|
+
# Update and show details
|
878
|
+
details_content.update(formatted_details)
|
879
|
+
details_container.add_class("visible")
|
880
|
+
except Exception as e:
|
881
|
+
logger.error(f"Error showing available model details: {str(e)}")
|
882
|
+
details_content.update(f"Error loading details: {str(e)}")
|
883
|
+
details_container.add_class("visible")
|
884
|
+
else:
|
885
|
+
# For local models, we still need to get details from API
|
886
|
+
try:
|
887
|
+
# Get model details from Ollama
|
888
|
+
details = await self.ollama_client.get_model_details(model_id)
|
889
|
+
|
890
|
+
# Check for error in response
|
891
|
+
if "error" in details:
|
892
|
+
error_msg = f"Error: {details['error']}"
|
893
|
+
details_content.update(error_msg)
|
894
|
+
details_container.add_class("visible")
|
895
|
+
return
|
896
|
+
|
897
|
+
formatted_details = f"Model: {model_id}\n"
|
898
|
+
|
899
|
+
# Extract parameter size info
|
900
|
+
param_size = "Unknown"
|
901
|
+
|
902
|
+
# First try to get parameter size from modelfile if available
|
903
|
+
if "modelfile" in details and details["modelfile"] is not None:
|
904
|
+
modelfile = details["modelfile"]
|
905
|
+
if "parameter_size" in modelfile and modelfile["parameter_size"]:
|
906
|
+
param_size = str(modelfile["parameter_size"])
|
907
|
+
# Make sure it ends with B for billions if it doesn't already
|
908
|
+
if not param_size.upper().endswith("B"):
|
909
|
+
param_size += "B"
|
910
|
+
|
911
|
+
# If not found in modelfile, try to extract from name
|
912
|
+
if param_size == "Unknown":
|
913
|
+
model_name = str(model_id).lower()
|
914
|
+
if "70b" in model_name:
|
915
|
+
param_size = "70B"
|
916
|
+
elif "405b" in model_name or "400b" in model_name:
|
917
|
+
param_size = "405B"
|
918
|
+
elif "34b" in model_name or "35b" in model_name:
|
919
|
+
param_size = "34B"
|
920
|
+
elif "27b" in model_name or "28b" in model_name:
|
921
|
+
param_size = "27B"
|
922
|
+
elif "13b" in model_name or "14b" in model_name:
|
923
|
+
param_size = "13B"
|
924
|
+
elif "8b" in model_name:
|
925
|
+
param_size = "8B"
|
926
|
+
elif "7b" in model_name:
|
927
|
+
param_size = "7B"
|
928
|
+
elif "6b" in model_name:
|
929
|
+
param_size = "6B"
|
930
|
+
elif "3b" in model_name:
|
931
|
+
param_size = "3B"
|
932
|
+
elif "2b" in model_name:
|
933
|
+
param_size = "2B"
|
934
|
+
elif "1b" in model_name:
|
935
|
+
param_size = "1B"
|
936
|
+
elif "mini" in model_name:
|
937
|
+
param_size = "3B"
|
938
|
+
elif "small" in model_name:
|
939
|
+
param_size = "7B"
|
940
|
+
elif "medium" in model_name:
|
941
|
+
param_size = "13B"
|
942
|
+
elif "large" in model_name:
|
943
|
+
param_size = "34B"
|
944
|
+
|
945
|
+
# Special handling for base models with no size indicator
|
946
|
+
if param_size == "Unknown":
|
947
|
+
# Remove tag part if present to get base model
|
948
|
+
base_name = model_name.split(":")[0]
|
949
|
+
|
950
|
+
# Check if we have default parameter sizes for known models
|
951
|
+
model_defaults = {
|
952
|
+
"llama3": "8B",
|
953
|
+
"llama2": "7B",
|
954
|
+
"mistral": "7B",
|
955
|
+
"gemma": "7B",
|
956
|
+
"gemma2": "9B",
|
957
|
+
"phi": "3B",
|
958
|
+
"phi2": "3B",
|
959
|
+
"phi3": "3B",
|
960
|
+
"phi4": "7B",
|
961
|
+
"orca-mini": "7B",
|
962
|
+
"llava": "7B",
|
963
|
+
"codellama": "7B",
|
964
|
+
"neural-chat": "7B",
|
965
|
+
"wizard-math": "7B",
|
966
|
+
"yi": "6B",
|
967
|
+
"deepseek": "7B",
|
968
|
+
"deepseek-coder": "7B",
|
969
|
+
"qwen": "7B",
|
970
|
+
"falcon": "7B",
|
971
|
+
"stable-code": "3B"
|
972
|
+
}
|
973
|
+
|
974
|
+
# Try to find a match in default sizes
|
975
|
+
for model_name, default_size in model_defaults.items():
|
976
|
+
if model_name in base_name:
|
977
|
+
param_size = default_size
|
978
|
+
break
|
979
|
+
|
980
|
+
# Show both parameter size and disk size
|
981
|
+
formatted_details += f"Parameters: {param_size}\n"
|
982
|
+
formatted_details += f"Disk Size: {self._format_size(details.get('size', 0))}\n"
|
983
|
+
|
984
|
+
# Extract family info - check multiple possible locations
|
985
|
+
family = "Unknown"
|
986
|
+
template = "Unknown"
|
987
|
+
license_info = "Unknown"
|
988
|
+
system_prompt = ""
|
989
|
+
|
990
|
+
if "modelfile" in details and details["modelfile"] is not None:
|
991
|
+
modelfile = details["modelfile"]
|
992
|
+
|
993
|
+
# Ensure modelfile is a dictionary before accessing keys
|
994
|
+
if isinstance(modelfile, dict):
|
995
|
+
# Extract family/parameter size
|
996
|
+
if "parameter_size" in modelfile:
|
997
|
+
family = modelfile.get("parameter_size")
|
998
|
+
elif "family" in modelfile:
|
999
|
+
family = modelfile.get("family")
|
1000
|
+
else:
|
1001
|
+
# Try to infer from model name if not explicitly set
|
1002
|
+
try:
|
1003
|
+
name = str(model_id).lower() if model_id is not None else ""
|
1004
|
+
if "llama" in name:
|
1005
|
+
family = "Llama"
|
1006
|
+
elif "mistral" in name:
|
1007
|
+
family = "Mistral"
|
1008
|
+
elif "phi" in name:
|
1009
|
+
family = "Phi"
|
1010
|
+
elif "gemma" in name:
|
1011
|
+
family = "Gemma"
|
1012
|
+
else:
|
1013
|
+
family = "Unknown"
|
1014
|
+
except (TypeError, ValueError) as e:
|
1015
|
+
logger.error(f"Error inferring model family: {str(e)}")
|
1016
|
+
family = "Unknown"
|
1017
|
+
|
1018
|
+
# Get template
|
1019
|
+
template = modelfile.get("template", "Unknown")
|
1020
|
+
|
1021
|
+
# Get license
|
1022
|
+
license_info = modelfile.get("license", "Unknown")
|
1023
|
+
|
1024
|
+
# Get system prompt if available
|
1025
|
+
if "system" in modelfile:
|
1026
|
+
system_prompt = modelfile.get("system", "") # Use get for safety
|
1027
|
+
else:
|
1028
|
+
# If modelfile is not a dict (e.g., a string), set defaults
|
1029
|
+
logger.warning(f"Modelfile for {model_id} is not a dictionary. Type: {type(modelfile)}")
|
1030
|
+
# Keep existing defaults or try to infer family from name again
|
1031
|
+
if family == "Unknown":
|
1032
|
+
try:
|
1033
|
+
name = str(model_id).lower() if model_id is not None else ""
|
1034
|
+
if "llama" in name: family = "Llama"
|
1035
|
+
elif "mistral" in name: family = "Mistral"
|
1036
|
+
elif "phi" in name: family = "Phi"
|
1037
|
+
elif "gemma" in name: family = "Gemma"
|
1038
|
+
except (TypeError, ValueError): pass # Ignore errors here
|
1039
|
+
# template, license_info, system_prompt remain "Unknown" or empty
|
1040
|
+
|
1041
|
+
formatted_details += f"Family: {family}\n"
|
1042
|
+
formatted_details += f"Template: {template}\n"
|
1043
|
+
formatted_details += f"License: {license_info}\n"
|
1044
|
+
|
1045
|
+
# Add timestamps if available
|
1046
|
+
if "modified_at" in details and details["modified_at"]:
|
1047
|
+
formatted_details += f"Modified: {details['modified_at']}\n"
|
1048
|
+
elif "created_at" in details and details["created_at"]:
|
1049
|
+
formatted_details += f"Created: {details['created_at']}\n"
|
1050
|
+
|
1051
|
+
# Add system prompt if available
|
1052
|
+
if system_prompt:
|
1053
|
+
formatted_details += f"\nSystem Prompt:\n{system_prompt}\n"
|
1054
|
+
|
1055
|
+
# Update and show details
|
1056
|
+
details_content.update(formatted_details)
|
1057
|
+
details_container.add_class("visible")
|
1058
|
+
|
1059
|
+
except Exception as e:
|
1060
|
+
self.notify(f"Error getting model details: {str(e)}", severity="error")
|
1061
|
+
details_content.update(f"Error loading details: {str(e)}")
|
1062
|
+
details_container.add_class("visible")
|
1063
|
+
|
1064
|
+
def _get_selected_model_id(self) -> str:
|
1065
|
+
"""Get the ID of the currently selected model"""
|
1066
|
+
try:
|
1067
|
+
if self.current_tab == "local":
|
1068
|
+
table = self.query_one("#local-models-table", DataTable)
|
1069
|
+
if table.cursor_row is not None:
|
1070
|
+
row = table.get_row_at(table.cursor_row)
|
1071
|
+
# Get model ID from local models list
|
1072
|
+
try:
|
1073
|
+
if row and len(row) > 0:
|
1074
|
+
row_name = str(row[0]) if row[0] is not None else ""
|
1075
|
+
for model in self.local_models:
|
1076
|
+
if model["name"] == row_name:
|
1077
|
+
return model["id"]
|
1078
|
+
except (IndexError, TypeError) as e:
|
1079
|
+
logger.error(f"Error processing row data: {str(e)}")
|
1080
|
+
else:
|
1081
|
+
table = self.query_one("#available-models-table", DataTable)
|
1082
|
+
if table.cursor_row is not None:
|
1083
|
+
try:
|
1084
|
+
row = table.get_row_at(table.cursor_row)
|
1085
|
+
# Return the model name as ID
|
1086
|
+
if row and len(row) > 0:
|
1087
|
+
return str(row[0]) if row[0] is not None else ""
|
1088
|
+
except Exception as e:
|
1089
|
+
logger.error(f"Error getting row at cursor: {str(e)}")
|
1090
|
+
|
1091
|
+
# If we couldn't get a valid row, check if there are any rows and select the first one
|
1092
|
+
if table.row_count > 0:
|
1093
|
+
try:
|
1094
|
+
# Select the first row and get its ID
|
1095
|
+
table.cursor_row = 0
|
1096
|
+
row = table.get_row_at(0)
|
1097
|
+
if row and len(row) > 0:
|
1098
|
+
return str(row[0]) if row[0] is not None else ""
|
1099
|
+
except Exception as e:
|
1100
|
+
logger.error(f"Error selecting first row: {str(e)}")
|
1101
|
+
except Exception as e:
|
1102
|
+
logger.error(f"Error in _get_selected_model_id: {str(e)}")
|
1103
|
+
|
1104
|
+
return ""
|
1105
|
+
|
1106
|
+
def on_data_table_row_selected(self, event: DataTable.RowSelected) -> None:
|
1107
|
+
"""Handle row selection in data tables"""
|
1108
|
+
# Set selected model ID based on the selected row
|
1109
|
+
if event.data_table.id == "local-models-table":
|
1110
|
+
row = event.data_table.get_row_at(event.cursor_row)
|
1111
|
+
# Find the model ID from the display name
|
1112
|
+
try:
|
1113
|
+
if row and len(row) > 0:
|
1114
|
+
row_name = str(row[0]) if row[0] is not None else ""
|
1115
|
+
for model in self.local_models:
|
1116
|
+
if model["name"] == row_name:
|
1117
|
+
self.selected_model_id = model["id"]
|
1118
|
+
break
|
1119
|
+
except (IndexError, TypeError) as e:
|
1120
|
+
logger.error(f"Error processing row data: {str(e)}")
|
1121
|
+
elif event.data_table.id == "available-models-table":
|
1122
|
+
row = event.data_table.get_row_at(event.cursor_row)
|
1123
|
+
# Model name is used as ID
|
1124
|
+
try:
|
1125
|
+
if row and len(row) > 0:
|
1126
|
+
self.selected_model_id = str(row[0]) if row[0] is not None else ""
|
1127
|
+
else:
|
1128
|
+
self.selected_model_id = ""
|
1129
|
+
except (IndexError, TypeError) as e:
|
1130
|
+
logger.error(f"Error getting model ID from row: {str(e)}")
|
1131
|
+
self.selected_model_id = ""
|
1132
|
+
|
1133
|
+
def on_input_submitted(self, event: Input.Submitted) -> None:
|
1134
|
+
"""Handle input submission (Enter key in search input)"""
|
1135
|
+
if event.input.id == "model-search":
|
1136
|
+
# Trigger search
|
1137
|
+
if self.current_tab == "local":
|
1138
|
+
self.app.call_later(self.load_local_models)
|
1139
|
+
else:
|
1140
|
+
self.app.call_later(self.load_available_models)
|
1141
|
+
|
1142
|
+
def on_input_changed(self, event: Input.Changed) -> None:
|
1143
|
+
"""Handle input changes for live search"""
|
1144
|
+
if event.input.id == "model-search" and self.current_tab == "available":
|
1145
|
+
# Auto-search as user types in the available models tab
|
1146
|
+
self.app.call_later(self.load_available_models)
|