pysfi 0.1.12__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pysfi-0.1.12.dist-info → pysfi-0.1.13.dist-info}/METADATA +1 -1
- {pysfi-0.1.12.dist-info → pysfi-0.1.13.dist-info}/RECORD +35 -27
- {pysfi-0.1.12.dist-info → pysfi-0.1.13.dist-info}/entry_points.txt +2 -0
- sfi/__init__.py +5 -3
- sfi/alarmclock/__init__.py +3 -0
- sfi/alarmclock/alarmclock.py +23 -40
- sfi/bumpversion/__init__.py +5 -3
- sfi/cleanbuild/__init__.py +3 -0
- sfi/cli.py +12 -2
- sfi/condasetup/__init__.py +1 -0
- sfi/docdiff/__init__.py +1 -0
- sfi/docdiff/docdiff.py +1 -1
- sfi/docscan/__init__.py +3 -3
- sfi/docscan/docscan_gui.py +150 -46
- sfi/img2pdf/__init__.py +0 -0
- sfi/img2pdf/img2pdf.py +453 -0
- sfi/llmclient/llmclient.py +31 -8
- sfi/llmquantize/llmquantize.py +39 -11
- sfi/llmserver/__init__.py +1 -0
- sfi/llmserver/llmserver.py +63 -13
- sfi/makepython/makepython.py +507 -124
- sfi/pyarchive/__init__.py +1 -0
- sfi/pyarchive/pyarchive.py +908 -278
- sfi/pyembedinstall/pyembedinstall.py +88 -89
- sfi/pylibpack/pylibpack.py +571 -465
- sfi/pyloadergen/pyloadergen.py +372 -218
- sfi/pypack/pypack.py +494 -965
- sfi/pyprojectparse/pyprojectparse.py +328 -28
- sfi/pysourcepack/__init__.py +1 -0
- sfi/pysourcepack/pysourcepack.py +210 -131
- sfi/quizbase/quizbase_gui.py +2 -2
- sfi/taskkill/taskkill.py +168 -59
- sfi/which/which.py +11 -3
- sfi/workflowengine/workflowengine.py +225 -122
- {pysfi-0.1.12.dist-info → pysfi-0.1.13.dist-info}/WHEEL +0 -0
sfi/llmserver/llmserver.py
CHANGED
|
@@ -6,8 +6,8 @@ import logging
|
|
|
6
6
|
import os
|
|
7
7
|
import pathlib
|
|
8
8
|
import sys
|
|
9
|
+
from dataclasses import dataclass
|
|
9
10
|
from pathlib import Path
|
|
10
|
-
from types import SimpleNamespace
|
|
11
11
|
from typing import ClassVar
|
|
12
12
|
|
|
13
13
|
from PySide2.QtCore import QProcess, QTextStream, QUrl
|
|
@@ -36,36 +36,61 @@ from PySide2.QtWidgets import (
|
|
|
36
36
|
QWidget,
|
|
37
37
|
)
|
|
38
38
|
|
|
39
|
-
CONFIG_FILE = Path.home() / ".
|
|
39
|
+
CONFIG_FILE = Path.home() / ".pysfi" / "llmserver.json"
|
|
40
40
|
logging.basicConfig(level="INFO", format="%(message)s")
|
|
41
41
|
logger = logging.getLogger(__name__)
|
|
42
42
|
|
|
43
43
|
|
|
44
|
-
|
|
44
|
+
@dataclass
|
|
45
|
+
class LLMServerConfig:
|
|
45
46
|
"""Llama local model server configuration."""
|
|
46
47
|
|
|
47
48
|
TITLE: str = "Llama Local Model Server"
|
|
48
49
|
WIN_SIZE: ClassVar[list[int]] = [800, 800]
|
|
49
50
|
WIN_POS: ClassVar[list[int]] = [200, 200]
|
|
50
51
|
MODEL_PATH: str = ""
|
|
51
|
-
|
|
52
52
|
URL: str = "http://127.0.0.1"
|
|
53
53
|
LISTEN_PORT: int = 8080
|
|
54
54
|
LISTEN_PORT_RNG: ClassVar[list[int]] = [1024, 65535]
|
|
55
55
|
THREAD_COUNT_RNG: ClassVar[list[int]] = [1, 24]
|
|
56
56
|
THREAD_COUNT: int = 4
|
|
57
57
|
|
|
58
|
-
|
|
58
|
+
_loaded_from_file: bool = False
|
|
59
|
+
|
|
60
|
+
def __post_init__(self) -> None:
|
|
59
61
|
if CONFIG_FILE.exists():
|
|
60
62
|
logger.info("Loading configuration from %s", CONFIG_FILE)
|
|
61
|
-
|
|
63
|
+
try:
|
|
64
|
+
config_data = json.loads(CONFIG_FILE.read_text())
|
|
65
|
+
# Update instance attributes with loaded values
|
|
66
|
+
for key, value in config_data.items():
|
|
67
|
+
if hasattr(self, key):
|
|
68
|
+
setattr(self, key, value)
|
|
69
|
+
self._loaded_from_file = True
|
|
70
|
+
except (json.JSONDecodeError, TypeError, AttributeError) as e:
|
|
71
|
+
logger.error(f"Error loading config from {CONFIG_FILE}: {e}")
|
|
62
72
|
else:
|
|
63
73
|
logger.info("Using default configuration")
|
|
64
74
|
|
|
65
75
|
def save(self) -> None:
|
|
66
76
|
"""Save configuration."""
|
|
67
|
-
|
|
68
|
-
|
|
77
|
+
try:
|
|
78
|
+
CONFIG_FILE.parent.mkdir(parents=True, exist_ok=True)
|
|
79
|
+
# Convert dataclass to dict for JSON serialization
|
|
80
|
+
config_dict = {}
|
|
81
|
+
for attr_name in dir(self):
|
|
82
|
+
if not attr_name.startswith("_"):
|
|
83
|
+
try:
|
|
84
|
+
attr_value = getattr(self, attr_name)
|
|
85
|
+
if not callable(attr_value):
|
|
86
|
+
config_dict[attr_name] = attr_value
|
|
87
|
+
except AttributeError:
|
|
88
|
+
continue
|
|
89
|
+
CONFIG_FILE.write_text(json.dumps(config_dict, indent=4))
|
|
90
|
+
logger.info(f"Configuration saved to {CONFIG_FILE}")
|
|
91
|
+
except Exception as e:
|
|
92
|
+
logger.error(f"Failed to save configuration: {e}")
|
|
93
|
+
raise
|
|
69
94
|
|
|
70
95
|
|
|
71
96
|
conf = LLMServerConfig()
|
|
@@ -92,11 +117,8 @@ class LlamaServerGUI(QMainWindow):
|
|
|
92
117
|
self.init_ui()
|
|
93
118
|
self.setup_process()
|
|
94
119
|
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
self.model_path_input.setText(str(model_path))
|
|
98
|
-
else:
|
|
99
|
-
self.model_path_input.setPlaceholderText("Choose model file...")
|
|
120
|
+
# Apply loaded configuration to UI controls
|
|
121
|
+
self.apply_config_to_ui()
|
|
100
122
|
|
|
101
123
|
def init_ui(self) -> None:
|
|
102
124
|
"""Initialize user interface."""
|
|
@@ -191,6 +213,19 @@ class LlamaServerGUI(QMainWindow):
|
|
|
191
213
|
self.process.readyReadStandardError.connect(self.handle_stderr) # type: ignore
|
|
192
214
|
self.process.finished.connect(self.on_process_finished) # type: ignore
|
|
193
215
|
|
|
216
|
+
def apply_config_to_ui(self) -> None:
|
|
217
|
+
"""Apply loaded configuration to UI controls."""
|
|
218
|
+
# Set model path
|
|
219
|
+
model_path = conf.MODEL_PATH
|
|
220
|
+
if model_path:
|
|
221
|
+
self.model_path_input.setText(str(model_path))
|
|
222
|
+
else:
|
|
223
|
+
self.model_path_input.setPlaceholderText("Choose model file...")
|
|
224
|
+
|
|
225
|
+
# Set port and thread values
|
|
226
|
+
self.port_spin.setValue(conf.LISTEN_PORT)
|
|
227
|
+
self.threads_spin.setValue(conf.THREAD_COUNT)
|
|
228
|
+
|
|
194
229
|
def on_config_changed(self) -> None:
|
|
195
230
|
"""Configuration changed."""
|
|
196
231
|
conf.MODEL_PATH = self.model_path_input.text().strip()
|
|
@@ -325,6 +360,21 @@ class LlamaServerGUI(QMainWindow):
|
|
|
325
360
|
conf.WIN_SIZE = [geometry.width(), geometry.height()]
|
|
326
361
|
return super().resizeEvent(event)
|
|
327
362
|
|
|
363
|
+
def closeEvent(self, event) -> None: # noqa: N802
|
|
364
|
+
"""Handle window close event to ensure configuration is saved."""
|
|
365
|
+
try:
|
|
366
|
+
# Save current configuration
|
|
367
|
+
conf.save()
|
|
368
|
+
logger.info("Configuration saved successfully on exit")
|
|
369
|
+
except Exception as e:
|
|
370
|
+
logger.error(f"Failed to save configuration on exit: {e}")
|
|
371
|
+
finally:
|
|
372
|
+
# Also stop server if running
|
|
373
|
+
if hasattr(self, "process") and self.process.state() == QProcess.Running:
|
|
374
|
+
self.process.terminate()
|
|
375
|
+
self.process.waitForFinished(2000) # Wait up to 2 seconds
|
|
376
|
+
event.accept()
|
|
377
|
+
|
|
328
378
|
|
|
329
379
|
def main() -> None:
|
|
330
380
|
"""Main entry point."""
|