openspeechapi 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openspeech/__init__.py +75 -0
- openspeech/__main__.py +5 -0
- openspeech/cli.py +413 -0
- openspeech/client/__init__.py +4 -0
- openspeech/client/client.py +145 -0
- openspeech/config.py +212 -0
- openspeech/core/__init__.py +0 -0
- openspeech/core/base.py +75 -0
- openspeech/core/enums.py +39 -0
- openspeech/core/models.py +61 -0
- openspeech/core/registry.py +37 -0
- openspeech/core/settings.py +8 -0
- openspeech/demo.py +675 -0
- openspeech/dispatch/__init__.py +0 -0
- openspeech/dispatch/context.py +34 -0
- openspeech/dispatch/dispatcher.py +661 -0
- openspeech/dispatch/executors/__init__.py +0 -0
- openspeech/dispatch/executors/base.py +34 -0
- openspeech/dispatch/executors/in_process.py +66 -0
- openspeech/dispatch/executors/remote.py +64 -0
- openspeech/dispatch/executors/subprocess_exec.py +446 -0
- openspeech/dispatch/fanout.py +95 -0
- openspeech/dispatch/filters.py +73 -0
- openspeech/dispatch/lifecycle.py +178 -0
- openspeech/dispatch/watcher.py +82 -0
- openspeech/engine_catalog.py +236 -0
- openspeech/engine_registry.yaml +347 -0
- openspeech/exceptions.py +51 -0
- openspeech/factory.py +325 -0
- openspeech/local_engines/__init__.py +12 -0
- openspeech/local_engines/aim_resolver.py +91 -0
- openspeech/local_engines/backends/__init__.py +1 -0
- openspeech/local_engines/backends/docker_backend.py +490 -0
- openspeech/local_engines/backends/native_backend.py +902 -0
- openspeech/local_engines/base.py +30 -0
- openspeech/local_engines/engines/__init__.py +1 -0
- openspeech/local_engines/engines/faster_whisper.py +36 -0
- openspeech/local_engines/engines/fish_speech.py +33 -0
- openspeech/local_engines/engines/sherpa_onnx.py +56 -0
- openspeech/local_engines/engines/whisper.py +41 -0
- openspeech/local_engines/engines/whisperlivekit.py +60 -0
- openspeech/local_engines/manager.py +208 -0
- openspeech/local_engines/models.py +50 -0
- openspeech/local_engines/progress.py +69 -0
- openspeech/local_engines/registry.py +19 -0
- openspeech/local_engines/task_store.py +52 -0
- openspeech/local_engines/tasks.py +71 -0
- openspeech/logging_config.py +607 -0
- openspeech/observe/__init__.py +0 -0
- openspeech/observe/base.py +79 -0
- openspeech/observe/debug.py +44 -0
- openspeech/observe/latency.py +19 -0
- openspeech/observe/metrics.py +47 -0
- openspeech/observe/tracing.py +44 -0
- openspeech/observe/usage.py +27 -0
- openspeech/providers/__init__.py +0 -0
- openspeech/providers/_template.py +101 -0
- openspeech/providers/stt/__init__.py +0 -0
- openspeech/providers/stt/alibaba.py +86 -0
- openspeech/providers/stt/assemblyai.py +135 -0
- openspeech/providers/stt/azure_speech.py +99 -0
- openspeech/providers/stt/baidu.py +135 -0
- openspeech/providers/stt/deepgram.py +311 -0
- openspeech/providers/stt/elevenlabs.py +385 -0
- openspeech/providers/stt/faster_whisper.py +211 -0
- openspeech/providers/stt/google_cloud.py +106 -0
- openspeech/providers/stt/iflytek.py +427 -0
- openspeech/providers/stt/macos_speech.py +226 -0
- openspeech/providers/stt/openai.py +84 -0
- openspeech/providers/stt/sherpa_onnx.py +353 -0
- openspeech/providers/stt/tencent.py +212 -0
- openspeech/providers/stt/volcengine.py +107 -0
- openspeech/providers/stt/whisper.py +153 -0
- openspeech/providers/stt/whisperlivekit.py +530 -0
- openspeech/providers/stt/windows_speech.py +249 -0
- openspeech/providers/tts/__init__.py +0 -0
- openspeech/providers/tts/alibaba.py +95 -0
- openspeech/providers/tts/azure_speech.py +123 -0
- openspeech/providers/tts/baidu.py +143 -0
- openspeech/providers/tts/coqui.py +64 -0
- openspeech/providers/tts/cosyvoice.py +90 -0
- openspeech/providers/tts/deepgram.py +174 -0
- openspeech/providers/tts/elevenlabs.py +311 -0
- openspeech/providers/tts/fish_speech.py +158 -0
- openspeech/providers/tts/google_cloud.py +107 -0
- openspeech/providers/tts/iflytek.py +209 -0
- openspeech/providers/tts/macos_say.py +251 -0
- openspeech/providers/tts/minimax.py +122 -0
- openspeech/providers/tts/openai.py +104 -0
- openspeech/providers/tts/piper.py +104 -0
- openspeech/providers/tts/tencent.py +189 -0
- openspeech/providers/tts/volcengine.py +117 -0
- openspeech/providers/tts/windows_sapi.py +234 -0
- openspeech/server/__init__.py +1 -0
- openspeech/server/app.py +72 -0
- openspeech/server/auth.py +42 -0
- openspeech/server/middleware.py +75 -0
- openspeech/server/routes/__init__.py +1 -0
- openspeech/server/routes/management.py +848 -0
- openspeech/server/routes/stt.py +121 -0
- openspeech/server/routes/tts.py +159 -0
- openspeech/server/routes/webui.py +29 -0
- openspeech/server/webui/app.js +2649 -0
- openspeech/server/webui/index.html +216 -0
- openspeech/server/webui/styles.css +617 -0
- openspeech/server/ws/__init__.py +1 -0
- openspeech/server/ws/stt_stream.py +263 -0
- openspeech/server/ws/tts_stream.py +207 -0
- openspeech/telemetry/__init__.py +21 -0
- openspeech/telemetry/perf.py +307 -0
- openspeech/utils/__init__.py +5 -0
- openspeech/utils/audio_converter.py +406 -0
- openspeech/utils/audio_playback.py +156 -0
- openspeech/vendor_registry.yaml +74 -0
- openspeechapi-0.1.0.dist-info/METADATA +101 -0
- openspeechapi-0.1.0.dist-info/RECORD +118 -0
- openspeechapi-0.1.0.dist-info/WHEEL +4 -0
- openspeechapi-0.1.0.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,902 @@
|
|
|
1
|
+
"""Native runtime backend for local engine lifecycle."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
import re
|
|
7
|
+
import shlex
|
|
8
|
+
import shutil
|
|
9
|
+
import signal
|
|
10
|
+
import subprocess
|
|
11
|
+
import time
|
|
12
|
+
import urllib.error
|
|
13
|
+
import urllib.parse
|
|
14
|
+
import urllib.request
|
|
15
|
+
|
|
16
|
+
from openspeech.local_engines.aim_resolver import resolve_aim_model_paths
|
|
17
|
+
from openspeech.local_engines.base import RuntimeBackend
|
|
18
|
+
from openspeech.local_engines.models import EngineSpec, EngineStatus, RuntimeConfig
|
|
19
|
+
|
|
20
|
+
DEFAULT_NATIVE_ROOT = "~/AI/services"
|
|
21
|
+
_ANSI_RE = re.compile(r"\x1B\[[0-?]*[ -/]*[@-~]")
|
|
22
|
+
_TQDM_HINT_RE = re.compile(
|
|
23
|
+
r"(?:(?P<label>[^:\n]{1,80}):\s*)?"
|
|
24
|
+
r"(?P<pct>\d{1,3})%\|[^|]*\|\s*"
|
|
25
|
+
r"(?P<done>[0-9.]+[A-Za-z]*)/(?P<total>[0-9.]+[A-Za-z]*)\s*"
|
|
26
|
+
r"\[(?P<elapsed>\d{1,2}:\d{2}(?::\d{2})?)<(?P<eta>\d{1,2}:\d{2}(?::\d{2})?),\s*(?P<speed>[^\]]+)\]"
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class NativeBackend(RuntimeBackend):
|
|
31
|
+
runtime_name = "native"
|
|
32
|
+
|
|
33
|
+
def _effective_options(self, spec: EngineSpec, cfg: RuntimeConfig) -> dict:
|
|
34
|
+
opts = dict(spec.options)
|
|
35
|
+
opts.update(cfg.options)
|
|
36
|
+
return opts
|
|
37
|
+
|
|
38
|
+
def _native_root(self, cfg: RuntimeConfig) -> Path:
|
|
39
|
+
root = (cfg.install_dir or "").strip() or DEFAULT_NATIVE_ROOT
|
|
40
|
+
return Path(root).expanduser().resolve()
|
|
41
|
+
|
|
42
|
+
@staticmethod
|
|
43
|
+
def _project_root() -> Path:
|
|
44
|
+
# .../openspeech/local_engines/backends/native_backend.py -> project root
|
|
45
|
+
return Path(__file__).resolve().parents[3]
|
|
46
|
+
|
|
47
|
+
def _service_dir(self, spec: EngineSpec, cfg: RuntimeConfig) -> Path:
|
|
48
|
+
return self._native_root(cfg) / spec.name
|
|
49
|
+
|
|
50
|
+
def _repo_dir(self, spec: EngineSpec, cfg: RuntimeConfig) -> Path:
|
|
51
|
+
return self._service_dir(spec, cfg) / "src"
|
|
52
|
+
|
|
53
|
+
def _venv_dir(self, spec: EngineSpec, cfg: RuntimeConfig) -> Path:
|
|
54
|
+
return self._service_dir(spec, cfg) / ".venv"
|
|
55
|
+
|
|
56
|
+
def _run_dir(self, spec: EngineSpec, cfg: RuntimeConfig) -> Path:
|
|
57
|
+
return self._service_dir(spec, cfg) / "run"
|
|
58
|
+
|
|
59
|
+
def _model_only_ready_file(self, spec: EngineSpec, cfg: RuntimeConfig) -> Path:
|
|
60
|
+
return self._run_dir(spec, cfg) / "model-only.ready"
|
|
61
|
+
|
|
62
|
+
def _log_file(self, spec: EngineSpec, cfg: RuntimeConfig) -> Path:
|
|
63
|
+
return self._run_dir(spec, cfg) / "native.log"
|
|
64
|
+
|
|
65
|
+
def _pid_file(self, spec: EngineSpec, cfg: RuntimeConfig) -> Path:
|
|
66
|
+
return self._run_dir(spec, cfg) / "native.pid"
|
|
67
|
+
|
|
68
|
+
def _ensure_binary(self, name: str) -> None:
|
|
69
|
+
if shutil.which(name) is None:
|
|
70
|
+
raise RuntimeError(f"Required command not found in PATH: {name}")
|
|
71
|
+
|
|
72
|
+
def _resolve_script_path(self, script_value: str) -> Path:
|
|
73
|
+
raw = (script_value or "").strip()
|
|
74
|
+
if not raw:
|
|
75
|
+
raise RuntimeError("script path is empty")
|
|
76
|
+
p = Path(raw).expanduser()
|
|
77
|
+
if p.is_absolute():
|
|
78
|
+
return p
|
|
79
|
+
return (self._project_root() / p).resolve()
|
|
80
|
+
|
|
81
|
+
def _resolve_hf_bin(self, opts: dict) -> str:
|
|
82
|
+
preferred = str(opts.get("native_hf_bin", "")).strip()
|
|
83
|
+
if preferred:
|
|
84
|
+
path = shutil.which(preferred) if "/" not in preferred else preferred
|
|
85
|
+
if path:
|
|
86
|
+
return path
|
|
87
|
+
for name in ("hf", "huggingface-cli"):
|
|
88
|
+
path = shutil.which(name)
|
|
89
|
+
if path:
|
|
90
|
+
return path
|
|
91
|
+
raise RuntimeError(
|
|
92
|
+
"Hugging Face CLI not found. Install `hf` or `huggingface-cli` to download model weights."
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
def _model_dir(self, spec: EngineSpec, cfg: RuntimeConfig, opts: dict) -> Path:
|
|
96
|
+
rel = str(opts.get("native_model_dir", "checkpoints/s2-pro")).strip()
|
|
97
|
+
p = Path(rel)
|
|
98
|
+
if p.is_absolute():
|
|
99
|
+
return p
|
|
100
|
+
if self._is_model_only(opts):
|
|
101
|
+
return self._service_dir(spec, cfg) / p
|
|
102
|
+
return self._repo_dir(spec, cfg) / p
|
|
103
|
+
|
|
104
|
+
@staticmethod
|
|
105
|
+
def _is_model_only(opts: dict) -> bool:
|
|
106
|
+
return bool(opts.get("native_model_only", False))
|
|
107
|
+
|
|
108
|
+
@staticmethod
|
|
109
|
+
def _stringify_paths(value: object) -> str:
|
|
110
|
+
if isinstance(value, str):
|
|
111
|
+
return value.strip()
|
|
112
|
+
if isinstance(value, (list, tuple)):
|
|
113
|
+
items = [str(x).strip() for x in value if str(x).strip()]
|
|
114
|
+
return ":".join(items)
|
|
115
|
+
return ""
|
|
116
|
+
|
|
117
|
+
def _existing_model_paths(self, opts: dict) -> str:
|
|
118
|
+
configured = opts.get("native_existing_model_paths", "")
|
|
119
|
+
merged: list[str] = []
|
|
120
|
+
seen: set[str] = set()
|
|
121
|
+
|
|
122
|
+
aim_enabled = bool(opts.get("native_use_aim", False))
|
|
123
|
+
if aim_enabled:
|
|
124
|
+
aim_model_ids = opts.get("native_aim_model_ids", [])
|
|
125
|
+
if isinstance(aim_model_ids, str):
|
|
126
|
+
aim_model_ids = [x.strip() for x in aim_model_ids.split(",") if x.strip()]
|
|
127
|
+
if not isinstance(aim_model_ids, list):
|
|
128
|
+
aim_model_ids = []
|
|
129
|
+
aim_kind = str(opts.get("native_aim_model_kind", "any")).strip() or "any"
|
|
130
|
+
aim_paths = resolve_aim_model_paths(
|
|
131
|
+
model_ids=[str(x) for x in aim_model_ids],
|
|
132
|
+
provision_engine=str(opts.get("native_aim_provision_engine", "whisper")).strip() or "whisper",
|
|
133
|
+
kind=aim_kind,
|
|
134
|
+
)
|
|
135
|
+
for p in aim_paths:
|
|
136
|
+
if p not in seen:
|
|
137
|
+
seen.add(p)
|
|
138
|
+
merged.append(p)
|
|
139
|
+
|
|
140
|
+
if isinstance(configured, str):
|
|
141
|
+
configured_list = [x.strip() for x in configured.split(":") if x.strip()]
|
|
142
|
+
elif isinstance(configured, (list, tuple)):
|
|
143
|
+
configured_list = [str(x).strip() for x in configured if str(x).strip()]
|
|
144
|
+
else:
|
|
145
|
+
configured_list = []
|
|
146
|
+
for p in configured_list:
|
|
147
|
+
if p not in seen:
|
|
148
|
+
seen.add(p)
|
|
149
|
+
merged.append(p)
|
|
150
|
+
return ":".join(merged)
|
|
151
|
+
|
|
152
|
+
@staticmethod
|
|
153
|
+
def _aim_model_ids(opts: dict) -> list[str]:
|
|
154
|
+
ids = opts.get("native_aim_model_ids", [])
|
|
155
|
+
if isinstance(ids, str):
|
|
156
|
+
return [x.strip() for x in ids.split(",") if x.strip()]
|
|
157
|
+
if isinstance(ids, list):
|
|
158
|
+
return [str(x).strip() for x in ids if str(x).strip()]
|
|
159
|
+
return []
|
|
160
|
+
|
|
161
|
+
@staticmethod
|
|
162
|
+
def _aim_download_specs(opts: dict) -> list[dict]:
|
|
163
|
+
raw = opts.get("native_aim_downloads", [])
|
|
164
|
+
if isinstance(raw, dict):
|
|
165
|
+
raw = [raw]
|
|
166
|
+
if not isinstance(raw, list):
|
|
167
|
+
return []
|
|
168
|
+
specs: list[dict] = []
|
|
169
|
+
for item in raw:
|
|
170
|
+
if isinstance(item, str):
|
|
171
|
+
specs.append({"source": item.strip()})
|
|
172
|
+
continue
|
|
173
|
+
if not isinstance(item, dict):
|
|
174
|
+
continue
|
|
175
|
+
source = str(item.get("source", "")).strip()
|
|
176
|
+
model_id = str(item.get("model_id", "")).strip()
|
|
177
|
+
category = str(item.get("category", "")).strip()
|
|
178
|
+
if not source:
|
|
179
|
+
continue
|
|
180
|
+
spec = {"source": source}
|
|
181
|
+
if model_id:
|
|
182
|
+
spec["model_id"] = model_id
|
|
183
|
+
if category:
|
|
184
|
+
spec["category"] = category
|
|
185
|
+
specs.append(spec)
|
|
186
|
+
return specs
|
|
187
|
+
|
|
188
|
+
@staticmethod
|
|
189
|
+
def _pick_aim_download_spec(model_id: str, specs: list[dict]) -> dict | None:
|
|
190
|
+
for s in specs:
|
|
191
|
+
sid = str(s.get("model_id", "")).strip()
|
|
192
|
+
if sid and sid == model_id:
|
|
193
|
+
return s
|
|
194
|
+
for s in specs:
|
|
195
|
+
if "model_id" not in s:
|
|
196
|
+
return s
|
|
197
|
+
return None
|
|
198
|
+
|
|
199
|
+
def _aim_download(self, source: str, model_id: str, category: str = "") -> tuple[bool, str]:
|
|
200
|
+
cmd = ["aim", "download", source, "--name", model_id]
|
|
201
|
+
if category:
|
|
202
|
+
cmd.extend(["--category", category])
|
|
203
|
+
proc = subprocess.run( # noqa: S603
|
|
204
|
+
cmd,
|
|
205
|
+
capture_output=True,
|
|
206
|
+
text=True,
|
|
207
|
+
check=False,
|
|
208
|
+
)
|
|
209
|
+
out = (proc.stdout or "").strip()
|
|
210
|
+
err = (proc.stderr or "").strip()
|
|
211
|
+
detail = "\n".join(x for x in [out, err] if x).strip()
|
|
212
|
+
if proc.returncode != 0:
|
|
213
|
+
return False, detail or f"aim download failed (exit={proc.returncode})"
|
|
214
|
+
return True, detail or "ok"
|
|
215
|
+
|
|
216
|
+
def _ensure_aim_models(self, opts: dict, report) -> None:
|
|
217
|
+
if not bool(opts.get("native_use_aim", False)):
|
|
218
|
+
return
|
|
219
|
+
model_ids = self._aim_model_ids(opts)
|
|
220
|
+
if not model_ids:
|
|
221
|
+
return
|
|
222
|
+
if shutil.which("aim") is None:
|
|
223
|
+
report("aim_download", "AIM is not installed; fallback to legacy model download flow.", 16)
|
|
224
|
+
return
|
|
225
|
+
|
|
226
|
+
provision_engine = str(opts.get("native_aim_provision_engine", "whisper")).strip() or "whisper"
|
|
227
|
+
kind = str(opts.get("native_aim_model_kind", "any")).strip() or "any"
|
|
228
|
+
specs = self._aim_download_specs(opts)
|
|
229
|
+
report("aim_download", "Checking model availability via AIM...", 14)
|
|
230
|
+
|
|
231
|
+
for mid in model_ids:
|
|
232
|
+
existing = resolve_aim_model_paths(
|
|
233
|
+
model_ids=[mid],
|
|
234
|
+
provision_engine=provision_engine,
|
|
235
|
+
kind=kind,
|
|
236
|
+
)
|
|
237
|
+
if existing:
|
|
238
|
+
report("aim_download", f"AIM model ready: {mid}", 16)
|
|
239
|
+
continue
|
|
240
|
+
|
|
241
|
+
spec = self._pick_aim_download_spec(mid, specs)
|
|
242
|
+
if not spec:
|
|
243
|
+
report("aim_download", f"AIM model missing for {mid}; no AIM source configured, fallback.", 16)
|
|
244
|
+
continue
|
|
245
|
+
source = str(spec.get("source", "")).strip()
|
|
246
|
+
category = str(spec.get("category", "")).strip()
|
|
247
|
+
report("aim_download", f"Downloading {mid} via AIM: {source}", 20)
|
|
248
|
+
ok, detail = self._aim_download(source=source, model_id=mid, category=category)
|
|
249
|
+
if not ok:
|
|
250
|
+
report("aim_download", f"AIM download failed for {mid}; fallback to legacy flow. {detail}", 22)
|
|
251
|
+
continue
|
|
252
|
+
existing = resolve_aim_model_paths(
|
|
253
|
+
model_ids=[mid],
|
|
254
|
+
provision_engine=provision_engine,
|
|
255
|
+
kind=kind,
|
|
256
|
+
)
|
|
257
|
+
if existing:
|
|
258
|
+
report("aim_download", f"AIM download completed: {mid}", 24)
|
|
259
|
+
else:
|
|
260
|
+
report("aim_download", f"AIM download finished but model still unresolved: {mid}. {detail}", 24)
|
|
261
|
+
|
|
262
|
+
def _model_assets_present(self, spec: EngineSpec, cfg: RuntimeConfig, opts: dict) -> bool:
|
|
263
|
+
model_dir = self._model_dir(spec, cfg, opts)
|
|
264
|
+
marker = str(opts.get("native_model_marker", "")).strip()
|
|
265
|
+
if marker:
|
|
266
|
+
p = Path(marker)
|
|
267
|
+
if not p.is_absolute():
|
|
268
|
+
p = model_dir / marker
|
|
269
|
+
return p.exists()
|
|
270
|
+
if not model_dir.exists():
|
|
271
|
+
return False
|
|
272
|
+
return any(model_dir.iterdir())
|
|
273
|
+
|
|
274
|
+
@staticmethod
|
|
275
|
+
def _model_ready(model_dir: Path) -> bool:
|
|
276
|
+
return (model_dir / "codec.pth").exists() and (model_dir / "model.safetensors.index.json").exists()
|
|
277
|
+
|
|
278
|
+
def _download_model_weights(self, spec: EngineSpec, cfg: RuntimeConfig, opts: dict, report) -> None:
|
|
279
|
+
model_repo = str(opts.get("native_model_repo", "fishaudio/s2-pro")).strip()
|
|
280
|
+
model_dir = self._model_dir(spec, cfg, opts)
|
|
281
|
+
model_dir.parent.mkdir(parents=True, exist_ok=True)
|
|
282
|
+
if self._model_ready(model_dir):
|
|
283
|
+
report("download_model", f"Model weights already present: {model_dir}", 50)
|
|
284
|
+
return
|
|
285
|
+
|
|
286
|
+
hf_bin = self._resolve_hf_bin(opts)
|
|
287
|
+
report("download_model", f"Downloading model weights from {model_repo} ...", 50)
|
|
288
|
+
cmd = [hf_bin, "download", model_repo, "--local-dir", str(model_dir)]
|
|
289
|
+
max_workers = int(opts.get("native_hf_max_workers", 4) or 4)
|
|
290
|
+
if max_workers > 0:
|
|
291
|
+
cmd.extend(["--max-workers", str(max_workers)])
|
|
292
|
+
token = str(opts.get("native_hf_token", "")).strip()
|
|
293
|
+
if token:
|
|
294
|
+
cmd.extend(["--token", token])
|
|
295
|
+
self._run(cmd, cwd=self._repo_dir(spec, cfg))
|
|
296
|
+
|
|
297
|
+
if not self._model_ready(model_dir):
|
|
298
|
+
raise RuntimeError(
|
|
299
|
+
f"Model download did not produce expected files under {model_dir}."
|
|
300
|
+
)
|
|
301
|
+
report("download_model", f"Model weights downloaded to {model_dir}", 55)
|
|
302
|
+
|
|
303
|
+
def _python_version(self, python_bin: str) -> tuple[int, int, int]:
|
|
304
|
+
proc = subprocess.run(
|
|
305
|
+
[
|
|
306
|
+
python_bin,
|
|
307
|
+
"-c",
|
|
308
|
+
"import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}')",
|
|
309
|
+
],
|
|
310
|
+
capture_output=True,
|
|
311
|
+
text=True,
|
|
312
|
+
check=False,
|
|
313
|
+
)
|
|
314
|
+
if proc.returncode != 0:
|
|
315
|
+
raise RuntimeError(f"Unable to inspect Python version for: {python_bin}")
|
|
316
|
+
raw = (proc.stdout or "").strip()
|
|
317
|
+
parts = raw.split(".")
|
|
318
|
+
if len(parts) < 2:
|
|
319
|
+
raise RuntimeError(f"Invalid Python version output from {python_bin}: {raw}")
|
|
320
|
+
major = int(parts[0])
|
|
321
|
+
minor = int(parts[1])
|
|
322
|
+
micro = int(parts[2]) if len(parts) > 2 and parts[2].isdigit() else 0
|
|
323
|
+
return major, minor, micro
|
|
324
|
+
|
|
325
|
+
def _resolve_python_bin(self, opts: dict) -> str:
|
|
326
|
+
preferred = str(opts.get("native_python_bin", "")).strip()
|
|
327
|
+
candidates: list[str] = []
|
|
328
|
+
if preferred:
|
|
329
|
+
candidates.append(preferred)
|
|
330
|
+
candidates.extend(["python3.12", "python3.11", "python3.10", "python3.13", "python3"])
|
|
331
|
+
|
|
332
|
+
checked: list[str] = []
|
|
333
|
+
for name in candidates:
|
|
334
|
+
path = shutil.which(name) if "/" not in name else name
|
|
335
|
+
if not path:
|
|
336
|
+
continue
|
|
337
|
+
if path in checked:
|
|
338
|
+
continue
|
|
339
|
+
checked.append(path)
|
|
340
|
+
try:
|
|
341
|
+
major, minor, _ = self._python_version(path)
|
|
342
|
+
except Exception:
|
|
343
|
+
continue
|
|
344
|
+
if major == 3 and minor >= 10:
|
|
345
|
+
return path
|
|
346
|
+
tried = ", ".join(candidates)
|
|
347
|
+
raise RuntimeError(
|
|
348
|
+
"No compatible Python interpreter found for native install. "
|
|
349
|
+
f"Need Python >=3.10; tried: {tried}"
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
def _venv_python(self, spec: EngineSpec, cfg: RuntimeConfig) -> Path:
|
|
353
|
+
return self._venv_dir(spec, cfg) / "bin" / "python"
|
|
354
|
+
|
|
355
|
+
def _ensure_venv_compatible(self, spec: EngineSpec, cfg: RuntimeConfig, python_bin: str, report) -> None:
|
|
356
|
+
venv_dir = self._venv_dir(spec, cfg)
|
|
357
|
+
venv_python = self._venv_python(spec, cfg)
|
|
358
|
+
if not venv_python.exists():
|
|
359
|
+
report("create_venv", f"Creating virtualenv at {venv_dir} ...", 42)
|
|
360
|
+
self._run([python_bin, "-m", "venv", str(venv_dir)])
|
|
361
|
+
return
|
|
362
|
+
major, minor, _ = self._python_version(str(venv_python))
|
|
363
|
+
if major == 3 and minor >= 10:
|
|
364
|
+
report("create_venv", f"Virtualenv already exists: {venv_dir}", 42)
|
|
365
|
+
return
|
|
366
|
+
report("create_venv", f"Recreating incompatible virtualenv at {venv_dir} ...", 42)
|
|
367
|
+
shutil.rmtree(venv_dir, ignore_errors=True)
|
|
368
|
+
self._run([python_bin, "-m", "venv", str(venv_dir)])
|
|
369
|
+
|
|
370
|
+
def _run(self, cmd: list[str], cwd: Path | None = None) -> subprocess.CompletedProcess:
|
|
371
|
+
proc = subprocess.run(cmd, cwd=cwd, capture_output=True, text=True, check=False)
|
|
372
|
+
if proc.returncode != 0:
|
|
373
|
+
out = (proc.stdout or "").strip()
|
|
374
|
+
err = (proc.stderr or "").strip()
|
|
375
|
+
detail = "\n".join(x for x in [out, err] if x)
|
|
376
|
+
if detail:
|
|
377
|
+
raise RuntimeError(f"Command failed: {' '.join(cmd)}\n{detail}")
|
|
378
|
+
raise RuntimeError(f"Command failed: {' '.join(cmd)} (exit={proc.returncode})")
|
|
379
|
+
return proc
|
|
380
|
+
|
|
381
|
+
def _run_script_with_progress(
|
|
382
|
+
self,
|
|
383
|
+
script_path: Path,
|
|
384
|
+
*,
|
|
385
|
+
cwd: Path,
|
|
386
|
+
env: dict[str, str],
|
|
387
|
+
report,
|
|
388
|
+
) -> None:
|
|
389
|
+
if not script_path.exists():
|
|
390
|
+
raise RuntimeError(f"Install script not found: {script_path}")
|
|
391
|
+
|
|
392
|
+
proc = subprocess.Popen( # noqa: S603
|
|
393
|
+
["/bin/bash", str(script_path)],
|
|
394
|
+
cwd=cwd,
|
|
395
|
+
env=env,
|
|
396
|
+
stdout=subprocess.PIPE,
|
|
397
|
+
stderr=subprocess.STDOUT,
|
|
398
|
+
text=True,
|
|
399
|
+
bufsize=1,
|
|
400
|
+
)
|
|
401
|
+
if proc.stdout is None:
|
|
402
|
+
raise RuntimeError("Failed to capture install script output")
|
|
403
|
+
|
|
404
|
+
phase_re = re.compile(r"^OPENSPEECH_PHASE:([^:]+):([0-9]+(?:\.[0-9]+)?):(.*)$")
|
|
405
|
+
tail: list[str] = []
|
|
406
|
+
for line in proc.stdout:
|
|
407
|
+
s = line.rstrip("\n")
|
|
408
|
+
if not s:
|
|
409
|
+
continue
|
|
410
|
+
tail.append(s)
|
|
411
|
+
if len(tail) > 60:
|
|
412
|
+
tail.pop(0)
|
|
413
|
+
m = phase_re.match(s)
|
|
414
|
+
if m:
|
|
415
|
+
phase = m.group(1).strip()
|
|
416
|
+
progress = float(m.group(2))
|
|
417
|
+
message = m.group(3).strip()
|
|
418
|
+
report(phase, message, progress)
|
|
419
|
+
|
|
420
|
+
returncode = proc.wait()
|
|
421
|
+
if returncode != 0:
|
|
422
|
+
detail = "\n".join(tail)
|
|
423
|
+
raise RuntimeError(
|
|
424
|
+
f"Command failed: /bin/bash {script_path}\n{detail}"
|
|
425
|
+
)
|
|
426
|
+
|
|
427
|
+
def _run_install_script(
|
|
428
|
+
self,
|
|
429
|
+
spec: EngineSpec,
|
|
430
|
+
cfg: RuntimeConfig,
|
|
431
|
+
opts: dict,
|
|
432
|
+
report,
|
|
433
|
+
*,
|
|
434
|
+
action: str,
|
|
435
|
+
) -> bool:
|
|
436
|
+
install_script = str(opts.get("native_install_script", "")).strip()
|
|
437
|
+
if not install_script:
|
|
438
|
+
return False
|
|
439
|
+
script_path = self._resolve_script_path(install_script)
|
|
440
|
+
python_bin = self._resolve_python_bin(opts)
|
|
441
|
+
root = self._native_root(cfg)
|
|
442
|
+
service_dir = self._service_dir(spec, cfg)
|
|
443
|
+
repo_dir = self._repo_dir(spec, cfg)
|
|
444
|
+
hf_bin = ""
|
|
445
|
+
try:
|
|
446
|
+
hf_bin = self._resolve_hf_bin(opts)
|
|
447
|
+
except Exception:
|
|
448
|
+
hf_bin = ""
|
|
449
|
+
model_dir = self._model_dir(spec, cfg, opts)
|
|
450
|
+
env = dict(os.environ)
|
|
451
|
+
env.update(
|
|
452
|
+
{
|
|
453
|
+
"OPENSPEECH_ACTION": action,
|
|
454
|
+
"OPENSPEECH_ENGINE": spec.name,
|
|
455
|
+
"OPENSPEECH_NATIVE_ROOT": str(root),
|
|
456
|
+
"OPENSPEECH_SERVICE_DIR": str(service_dir),
|
|
457
|
+
"OPENSPEECH_REPO_DIR": str(repo_dir),
|
|
458
|
+
"OPENSPEECH_VENV_DIR": str(self._venv_dir(spec, cfg)),
|
|
459
|
+
"OPENSPEECH_REPO_URL": str(
|
|
460
|
+
opts.get("native_repo_url", "https://github.com/fishaudio/fish-speech.git")
|
|
461
|
+
),
|
|
462
|
+
"OPENSPEECH_REPO_REF": str(opts.get("native_repo_ref", "main")),
|
|
463
|
+
"OPENSPEECH_PYTHON_BIN": python_bin,
|
|
464
|
+
"OPENSPEECH_HF_BIN": hf_bin,
|
|
465
|
+
"OPENSPEECH_HF_MAX_WORKERS": str(int(opts.get("native_hf_max_workers", 4) or 4)),
|
|
466
|
+
"OPENSPEECH_MODEL_REPO": str(opts.get("native_model_repo", "fishaudio/s2-pro")),
|
|
467
|
+
"OPENSPEECH_MODEL_DIR": str(model_dir),
|
|
468
|
+
"OPENSPEECH_INSTALL_TARGET": str(opts.get("native_install_target", ".[cpu]")),
|
|
469
|
+
"OPENSPEECH_EXISTING_MODEL_PATHS": self._existing_model_paths(opts),
|
|
470
|
+
"OPENSPEECH_SIMULATE_DOWNLOAD": str(
|
|
471
|
+
int(bool(opts.get("native_simulate_download", True)))
|
|
472
|
+
),
|
|
473
|
+
}
|
|
474
|
+
)
|
|
475
|
+
hf_token = str(opts.get("native_hf_token", "")).strip()
|
|
476
|
+
if hf_token:
|
|
477
|
+
env["OPENSPEECH_HF_TOKEN"] = hf_token
|
|
478
|
+
report("run_install_script", f"Running install script: {script_path}", 18)
|
|
479
|
+
self._run_script_with_progress(
|
|
480
|
+
script_path,
|
|
481
|
+
cwd=self._project_root(),
|
|
482
|
+
env=env,
|
|
483
|
+
report=report,
|
|
484
|
+
)
|
|
485
|
+
report("done", f"Native runtime installed under {service_dir}", 100)
|
|
486
|
+
return True
|
|
487
|
+
|
|
488
|
+
@staticmethod
|
|
489
|
+
def _is_pid_running(pid: int) -> bool:
|
|
490
|
+
if pid <= 0:
|
|
491
|
+
return False
|
|
492
|
+
try:
|
|
493
|
+
os.kill(pid, 0)
|
|
494
|
+
return True
|
|
495
|
+
except OSError:
|
|
496
|
+
return False
|
|
497
|
+
|
|
498
|
+
def _read_pid(self, pid_path: Path) -> int | None:
|
|
499
|
+
if not pid_path.exists():
|
|
500
|
+
return None
|
|
501
|
+
try:
|
|
502
|
+
return int(pid_path.read_text(encoding="utf-8").strip())
|
|
503
|
+
except Exception:
|
|
504
|
+
return None
|
|
505
|
+
|
|
506
|
+
@staticmethod
|
|
507
|
+
def _tail_file(path: Path, lines: int) -> str:
|
|
508
|
+
if lines <= 0 or not path.exists():
|
|
509
|
+
return ""
|
|
510
|
+
with path.open("r", encoding="utf-8", errors="ignore") as f:
|
|
511
|
+
all_lines = f.readlines()
|
|
512
|
+
return "".join(all_lines[-lines:]).strip()
|
|
513
|
+
|
|
514
|
+
@staticmethod
|
|
515
|
+
def _sanitize_log_line(line: str, max_len: int = 220) -> str:
|
|
516
|
+
clean = _ANSI_RE.sub("", line).strip()
|
|
517
|
+
if len(clean) <= max_len:
|
|
518
|
+
return clean
|
|
519
|
+
return clean[: max_len - 3] + "..."
|
|
520
|
+
|
|
521
|
+
def _read_log_hint(self, log_path: Path, cursor: int) -> tuple[int, str | None]:
|
|
522
|
+
if not log_path.exists():
|
|
523
|
+
return cursor, None
|
|
524
|
+
with log_path.open("r", encoding="utf-8", errors="ignore") as f:
|
|
525
|
+
f.seek(cursor)
|
|
526
|
+
chunk = f.read()
|
|
527
|
+
new_cursor = f.tell()
|
|
528
|
+
if not chunk:
|
|
529
|
+
return new_cursor, None
|
|
530
|
+
lines = [
|
|
531
|
+
self._sanitize_log_line(x)
|
|
532
|
+
for x in chunk.replace("\r", "\n").split("\n")
|
|
533
|
+
if x.strip()
|
|
534
|
+
]
|
|
535
|
+
if not lines:
|
|
536
|
+
return new_cursor, None
|
|
537
|
+
return new_cursor, lines[-1]
|
|
538
|
+
|
|
539
|
+
@staticmethod
|
|
540
|
+
def _parse_time_to_seconds(raw: str) -> int | None:
|
|
541
|
+
txt = (raw or "").strip()
|
|
542
|
+
if not txt:
|
|
543
|
+
return None
|
|
544
|
+
parts = txt.split(":")
|
|
545
|
+
if len(parts) not in {2, 3}:
|
|
546
|
+
return None
|
|
547
|
+
try:
|
|
548
|
+
nums = [int(x) for x in parts]
|
|
549
|
+
except ValueError:
|
|
550
|
+
return None
|
|
551
|
+
if len(nums) == 2:
|
|
552
|
+
mm, ss = nums
|
|
553
|
+
return max(0, mm * 60 + ss)
|
|
554
|
+
hh, mm, ss = nums
|
|
555
|
+
return max(0, hh * 3600 + mm * 60 + ss)
|
|
556
|
+
|
|
557
|
+
def _summarize_log_hint(self, hint: str) -> tuple[str, float | None, int | None]:
|
|
558
|
+
m = _TQDM_HINT_RE.search(hint)
|
|
559
|
+
if not m:
|
|
560
|
+
return hint, None, None
|
|
561
|
+
pct = max(0.0, min(100.0, float(m.group("pct"))))
|
|
562
|
+
done = m.group("done")
|
|
563
|
+
total = m.group("total")
|
|
564
|
+
speed = m.group("speed").strip()
|
|
565
|
+
eta_txt = m.group("eta")
|
|
566
|
+
label = (m.group("label") or "").strip()
|
|
567
|
+
if label:
|
|
568
|
+
msg = f"{label}: {pct:.1f}% ({done}/{total}), {speed}, ETA {eta_txt}"
|
|
569
|
+
else:
|
|
570
|
+
msg = f"download: {pct:.1f}% ({done}/{total}), {speed}, ETA {eta_txt}"
|
|
571
|
+
return msg, pct, self._parse_time_to_seconds(eta_txt)
|
|
572
|
+
|
|
573
|
+
@staticmethod
|
|
574
|
+
def _is_fatal_log_hint(hint: str) -> bool:
|
|
575
|
+
text = (hint or "").strip().lower()
|
|
576
|
+
if not text:
|
|
577
|
+
return False
|
|
578
|
+
fatal_markers = (
|
|
579
|
+
"traceback (most recent call last)",
|
|
580
|
+
"runtimeerror:",
|
|
581
|
+
"module not found",
|
|
582
|
+
"no module named",
|
|
583
|
+
"failed to bind",
|
|
584
|
+
"address already in use",
|
|
585
|
+
"permission denied",
|
|
586
|
+
)
|
|
587
|
+
return any(m in text for m in fatal_markers)
|
|
588
|
+
|
|
589
|
+
@staticmethod
|
|
590
|
+
def _parse_listen_from_api_url(api_url: str) -> tuple[str, int]:
|
|
591
|
+
u = urllib.parse.urlparse(api_url)
|
|
592
|
+
host = u.hostname or "127.0.0.1"
|
|
593
|
+
if u.port is not None:
|
|
594
|
+
return host, int(u.port)
|
|
595
|
+
if u.scheme == "https":
|
|
596
|
+
return host, 443
|
|
597
|
+
return host, 8080
|
|
598
|
+
|
|
599
|
+
@staticmethod
|
|
600
|
+
def _check_http_alive(url: str) -> bool:
|
|
601
|
+
try:
|
|
602
|
+
with urllib.request.urlopen(url, timeout=2) as resp:
|
|
603
|
+
return resp.status < 500
|
|
604
|
+
except urllib.error.HTTPError as exc:
|
|
605
|
+
return exc.code < 500
|
|
606
|
+
except Exception:
|
|
607
|
+
return False
|
|
608
|
+
|
|
609
|
+
def _start_cmd(self, spec: EngineSpec, cfg: RuntimeConfig, opts: dict) -> list[str]:
|
|
610
|
+
venv_python = self._venv_python(spec, cfg)
|
|
611
|
+
if not venv_python.exists():
|
|
612
|
+
raise RuntimeError(
|
|
613
|
+
"Python virtualenv is not ready. Run install first for native runtime."
|
|
614
|
+
)
|
|
615
|
+
host, port = self._parse_listen_from_api_url(cfg.api_url)
|
|
616
|
+
custom = opts.get("native_start_cmd")
|
|
617
|
+
if custom:
|
|
618
|
+
if isinstance(custom, str):
|
|
619
|
+
parts = shlex.split(custom)
|
|
620
|
+
elif isinstance(custom, (list, tuple)):
|
|
621
|
+
parts = [str(x) for x in custom]
|
|
622
|
+
else:
|
|
623
|
+
raise RuntimeError("native_start_cmd must be a string or list")
|
|
624
|
+
variables = {
|
|
625
|
+
"venv_python": str(venv_python),
|
|
626
|
+
"api_host": host,
|
|
627
|
+
"api_port": str(port),
|
|
628
|
+
"api_url": cfg.api_url,
|
|
629
|
+
"project_root": str(self._project_root()),
|
|
630
|
+
"service_dir": str(self._service_dir(spec, cfg)),
|
|
631
|
+
"repo_dir": str(self._repo_dir(spec, cfg)),
|
|
632
|
+
"model_dir": str(self._model_dir(spec, cfg, opts)),
|
|
633
|
+
}
|
|
634
|
+
rendered = []
|
|
635
|
+
for p in parts:
|
|
636
|
+
try:
|
|
637
|
+
rendered.append(p.format(**variables))
|
|
638
|
+
except Exception:
|
|
639
|
+
rendered.append(p)
|
|
640
|
+
return rendered
|
|
641
|
+
cmd = [
|
|
642
|
+
str(venv_python),
|
|
643
|
+
"tools/api_server.py",
|
|
644
|
+
"--listen",
|
|
645
|
+
f"{host}:{port}",
|
|
646
|
+
"--mode",
|
|
647
|
+
"tts",
|
|
648
|
+
"--device",
|
|
649
|
+
str(opts.get("native_device", "mps")),
|
|
650
|
+
"--llama-checkpoint-path",
|
|
651
|
+
str(opts.get("native_llama_checkpoint_path", "checkpoints/s2-pro")),
|
|
652
|
+
"--decoder-checkpoint-path",
|
|
653
|
+
str(opts.get("native_decoder_checkpoint_path", "checkpoints/s2-pro/codec.pth")),
|
|
654
|
+
"--decoder-config-name",
|
|
655
|
+
str(opts.get("native_decoder_config_name", "modded_dac_vq")),
|
|
656
|
+
]
|
|
657
|
+
if bool(opts.get("native_half", False)):
|
|
658
|
+
cmd.append("--half")
|
|
659
|
+
if bool(opts.get("native_compile", False)):
|
|
660
|
+
cmd.append("--compile")
|
|
661
|
+
api_key = str(opts.get("native_api_key", "")).strip()
|
|
662
|
+
if api_key:
|
|
663
|
+
cmd.extend(["--api-key", api_key])
|
|
664
|
+
return cmd
|
|
665
|
+
|
|
666
|
+
def install(self, spec: EngineSpec, cfg: RuntimeConfig, report) -> None:
|
|
667
|
+
opts = self._effective_options(spec, cfg)
|
|
668
|
+
report("check_runtime", "Checking native runtime prerequisites...", 5)
|
|
669
|
+
if not self._is_model_only(opts):
|
|
670
|
+
self._ensure_binary("git")
|
|
671
|
+
python_bin = self._resolve_python_bin(opts)
|
|
672
|
+
report("check_runtime", f"Using Python: {python_bin}", 8)
|
|
673
|
+
self._ensure_aim_models(opts, report)
|
|
674
|
+
|
|
675
|
+
root = self._native_root(cfg)
|
|
676
|
+
service_dir = self._service_dir(spec, cfg)
|
|
677
|
+
repo_dir = self._repo_dir(spec, cfg)
|
|
678
|
+
report("prepare_dirs", f"Preparing native root: {root}", 12)
|
|
679
|
+
service_dir.mkdir(parents=True, exist_ok=True)
|
|
680
|
+
|
|
681
|
+
if self._run_install_script(spec, cfg, opts, report, action="install"):
|
|
682
|
+
return
|
|
683
|
+
|
|
684
|
+
repo_url = str(opts.get("native_repo_url", "https://github.com/fishaudio/fish-speech.git"))
|
|
685
|
+
repo_ref = str(opts.get("native_repo_ref", "main"))
|
|
686
|
+
if (repo_dir / ".git").exists():
|
|
687
|
+
report("clone_repo", f"Repository already exists: {repo_dir}", 25)
|
|
688
|
+
else:
|
|
689
|
+
report("clone_repo", f"Cloning repository from {repo_url} ...", 25)
|
|
690
|
+
self._run(
|
|
691
|
+
["git", "clone", "--depth", "1", "--branch", repo_ref, repo_url, str(repo_dir)]
|
|
692
|
+
)
|
|
693
|
+
|
|
694
|
+
self._ensure_venv_compatible(spec, cfg, python_bin, report)
|
|
695
|
+
venv_python = self._venv_python(spec, cfg)
|
|
696
|
+
|
|
697
|
+
self._download_model_weights(spec, cfg, opts, report)
|
|
698
|
+
|
|
699
|
+
report("install_deps", "Installing Python dependencies (this may take a while)...", 58)
|
|
700
|
+
self._run([str(venv_python), "-m", "pip", "install", "--upgrade", "pip"])
|
|
701
|
+
install_target = str(opts.get("native_install_target", ".[cpu]"))
|
|
702
|
+
self._run([str(venv_python), "-m", "pip", "install", "-e", install_target], cwd=repo_dir)
|
|
703
|
+
report("done", f"Native runtime installed under {service_dir}", 100)
|
|
704
|
+
|
|
705
|
+
def update(self, spec: EngineSpec, cfg: RuntimeConfig, report) -> None:
|
|
706
|
+
opts = self._effective_options(spec, cfg)
|
|
707
|
+
self._ensure_aim_models(opts, report)
|
|
708
|
+
if self._is_model_only(opts) and self._run_install_script(spec, cfg, opts, report, action="update"):
|
|
709
|
+
return
|
|
710
|
+
python_bin = self._resolve_python_bin(opts)
|
|
711
|
+
report("check_runtime", "Checking native runtime prerequisites...", 10)
|
|
712
|
+
self._ensure_binary("git")
|
|
713
|
+
repo_dir = self._repo_dir(spec, cfg)
|
|
714
|
+
if not (repo_dir / ".git").exists():
|
|
715
|
+
raise RuntimeError(
|
|
716
|
+
f"Native source not found at {repo_dir}. Run install first."
|
|
717
|
+
)
|
|
718
|
+
report("update_repo", "Updating native source repository...", 30)
|
|
719
|
+
self._run(["git", "-C", str(repo_dir), "fetch", "--all", "--tags"])
|
|
720
|
+
self._run(["git", "-C", str(repo_dir), "pull", "--ff-only"])
|
|
721
|
+
|
|
722
|
+
report("update_deps", "Refreshing Python dependencies...", 70)
|
|
723
|
+
self._ensure_venv_compatible(spec, cfg, python_bin, report)
|
|
724
|
+
venv_python = self._venv_python(spec, cfg)
|
|
725
|
+
self._run([str(venv_python), "-m", "pip", "install", "--upgrade", "pip"])
|
|
726
|
+
self._run([str(venv_python), "-m", "pip", "install", "-e", ".[cpu]"], cwd=repo_dir)
|
|
727
|
+
report("done", "Native runtime update completed.", 100)
|
|
728
|
+
|
|
729
|
+
def start(self, spec: EngineSpec, cfg: RuntimeConfig, report) -> None:
|
|
730
|
+
opts = self._effective_options(spec, cfg)
|
|
731
|
+
if self._is_model_only(opts):
|
|
732
|
+
run_dir = self._run_dir(spec, cfg)
|
|
733
|
+
run_dir.mkdir(parents=True, exist_ok=True)
|
|
734
|
+
ready = self._model_only_ready_file(spec, cfg)
|
|
735
|
+
report("check_assets", "Checking local model assets...", 25)
|
|
736
|
+
if not self._model_assets_present(spec, cfg, opts):
|
|
737
|
+
raise RuntimeError(
|
|
738
|
+
"Model assets not found. Run install first or configure existing model paths."
|
|
739
|
+
)
|
|
740
|
+
ready.write_text(str(int(time.time())), encoding="utf-8")
|
|
741
|
+
report("ready", "Model assets are ready (model-only engine).", 100)
|
|
742
|
+
return
|
|
743
|
+
report("check_runtime", "Checking native runtime prerequisites...", 8)
|
|
744
|
+
repo_dir = self._repo_dir(spec, cfg)
|
|
745
|
+
if not repo_dir.exists():
|
|
746
|
+
raise RuntimeError(
|
|
747
|
+
f"Native source not found at {repo_dir}. Run install first."
|
|
748
|
+
)
|
|
749
|
+
|
|
750
|
+
run_dir = self._run_dir(spec, cfg)
|
|
751
|
+
run_dir.mkdir(parents=True, exist_ok=True)
|
|
752
|
+
pid_file = self._pid_file(spec, cfg)
|
|
753
|
+
existing_pid = self._read_pid(pid_file)
|
|
754
|
+
if existing_pid is not None and self._is_pid_running(existing_pid):
|
|
755
|
+
report("ready", f"Native service already running (pid={existing_pid}).", 100)
|
|
756
|
+
return
|
|
757
|
+
|
|
758
|
+
log_path = self._log_file(spec, cfg)
|
|
759
|
+
cmd = self._start_cmd(spec, cfg, opts)
|
|
760
|
+
report("start_process", "Starting native API process...", 40)
|
|
761
|
+
with log_path.open("a", encoding="utf-8") as log_f:
|
|
762
|
+
log_f.write(
|
|
763
|
+
f"\n[{time.strftime('%Y-%m-%d %H:%M:%S')}] starting: {' '.join(cmd)}\n"
|
|
764
|
+
)
|
|
765
|
+
log_f.flush()
|
|
766
|
+
proc = subprocess.Popen( # noqa: S603
|
|
767
|
+
cmd,
|
|
768
|
+
cwd=repo_dir,
|
|
769
|
+
stdout=log_f,
|
|
770
|
+
stderr=subprocess.STDOUT,
|
|
771
|
+
start_new_session=True,
|
|
772
|
+
text=True,
|
|
773
|
+
)
|
|
774
|
+
pid_file.write_text(str(proc.pid), encoding="utf-8")
|
|
775
|
+
|
|
776
|
+
report("wait_health", f"Waiting for native API health: {cfg.api_url}", 65)
|
|
777
|
+
deadline = time.monotonic() + max(cfg.timeout_s, 1.0)
|
|
778
|
+
next_heartbeat = 0.0
|
|
779
|
+
log_cursor = 0
|
|
780
|
+
last_log_hint: str | None = None
|
|
781
|
+
while time.monotonic() < deadline:
|
|
782
|
+
pid = self._read_pid(pid_file)
|
|
783
|
+
if pid is None or not self._is_pid_running(pid):
|
|
784
|
+
tail = self._tail_file(log_path, 40)
|
|
785
|
+
raise RuntimeError(
|
|
786
|
+
"Native process exited unexpectedly.\n"
|
|
787
|
+
+ (tail or "No logs available.")
|
|
788
|
+
)
|
|
789
|
+
now = time.monotonic()
|
|
790
|
+
if now >= next_heartbeat:
|
|
791
|
+
total = max(cfg.timeout_s, 1.0)
|
|
792
|
+
elapsed = total - max(0.0, deadline - now)
|
|
793
|
+
ratio = min(1.0, max(0.0, elapsed / total))
|
|
794
|
+
progress = 65.0 + ratio * 30.0
|
|
795
|
+
log_cursor, hint = self._read_log_hint(log_path, log_cursor)
|
|
796
|
+
if hint:
|
|
797
|
+
last_log_hint = hint
|
|
798
|
+
if last_log_hint:
|
|
799
|
+
hint_text, hint_pct, hint_eta = self._summarize_log_hint(last_log_hint)
|
|
800
|
+
if hint_pct is not None:
|
|
801
|
+
progress = max(progress, 65.0 + hint_pct * 0.30)
|
|
802
|
+
if self._is_fatal_log_hint(hint_text):
|
|
803
|
+
tail = self._tail_file(log_path, 80)
|
|
804
|
+
raise RuntimeError(
|
|
805
|
+
"Native process reported a fatal error during startup.\n"
|
|
806
|
+
+ (tail or hint_text)
|
|
807
|
+
)
|
|
808
|
+
report(
|
|
809
|
+
"wait_health",
|
|
810
|
+
f"Waiting for native API at {cfg.api_url} ... {hint_text}",
|
|
811
|
+
progress,
|
|
812
|
+
eta_seconds=hint_eta,
|
|
813
|
+
)
|
|
814
|
+
else:
|
|
815
|
+
report(
|
|
816
|
+
"wait_health",
|
|
817
|
+
f"Waiting for native API at {cfg.api_url} ...",
|
|
818
|
+
progress,
|
|
819
|
+
)
|
|
820
|
+
next_heartbeat = now + 1.0
|
|
821
|
+
if self._check_http_alive(cfg.api_url):
|
|
822
|
+
report("ready", f"Native API is healthy: {cfg.api_url}", 100)
|
|
823
|
+
return
|
|
824
|
+
time.sleep(1.0)
|
|
825
|
+
|
|
826
|
+
raise RuntimeError(
|
|
827
|
+
f"Timed out waiting for native API at {cfg.api_url}. "
|
|
828
|
+
f"Check logs in {log_path}."
|
|
829
|
+
)
|
|
830
|
+
|
|
831
|
+
def stop(self, spec: EngineSpec, cfg: RuntimeConfig, report) -> None:
|
|
832
|
+
opts = self._effective_options(spec, cfg)
|
|
833
|
+
if self._is_model_only(opts):
|
|
834
|
+
self._model_only_ready_file(spec, cfg).unlink(missing_ok=True)
|
|
835
|
+
report("done", "Model-only engine marked as stopped.", 100)
|
|
836
|
+
return
|
|
837
|
+
report("stop_process", "Stopping native API process...", 50)
|
|
838
|
+
pid_file = self._pid_file(spec, cfg)
|
|
839
|
+
pid = self._read_pid(pid_file)
|
|
840
|
+
if pid is None:
|
|
841
|
+
report("done", "Native service is not running.", 100)
|
|
842
|
+
return
|
|
843
|
+
if not self._is_pid_running(pid):
|
|
844
|
+
pid_file.unlink(missing_ok=True)
|
|
845
|
+
report("done", "Native service was already stopped.", 100)
|
|
846
|
+
return
|
|
847
|
+
try:
|
|
848
|
+
os.killpg(pid, signal.SIGTERM)
|
|
849
|
+
except Exception:
|
|
850
|
+
os.kill(pid, signal.SIGTERM)
|
|
851
|
+
|
|
852
|
+
deadline = time.monotonic() + 10.0
|
|
853
|
+
while time.monotonic() < deadline:
|
|
854
|
+
if not self._is_pid_running(pid):
|
|
855
|
+
break
|
|
856
|
+
time.sleep(0.2)
|
|
857
|
+
if self._is_pid_running(pid):
|
|
858
|
+
try:
|
|
859
|
+
os.killpg(pid, signal.SIGKILL)
|
|
860
|
+
except Exception:
|
|
861
|
+
os.kill(pid, signal.SIGKILL)
|
|
862
|
+
pid_file.unlink(missing_ok=True)
|
|
863
|
+
report("done", "Native service stopped.", 100)
|
|
864
|
+
|
|
865
|
+
def status(self, spec: EngineSpec, cfg: RuntimeConfig) -> EngineStatus:
|
|
866
|
+
opts = self._effective_options(spec, cfg)
|
|
867
|
+
if self._is_model_only(opts):
|
|
868
|
+
ready = self._model_only_ready_file(spec, cfg).exists()
|
|
869
|
+
assets_ok = self._model_assets_present(spec, cfg, opts)
|
|
870
|
+
return EngineStatus(
|
|
871
|
+
engine=spec.name,
|
|
872
|
+
runtime=self.runtime_name,
|
|
873
|
+
running=ready,
|
|
874
|
+
healthy=assets_ok,
|
|
875
|
+
detail="ready(model-only)" if ready else "stopped(model-only)",
|
|
876
|
+
metadata={
|
|
877
|
+
"root_dir": str(self._native_root(cfg)),
|
|
878
|
+
"service_dir": str(self._service_dir(spec, cfg)),
|
|
879
|
+
"model_dir": str(self._model_dir(spec, cfg, opts)),
|
|
880
|
+
"ready_file": str(self._model_only_ready_file(spec, cfg)),
|
|
881
|
+
},
|
|
882
|
+
)
|
|
883
|
+
pid = self._read_pid(self._pid_file(spec, cfg))
|
|
884
|
+
running = pid is not None and self._is_pid_running(pid)
|
|
885
|
+
healthy = running and self._check_http_alive(cfg.api_url)
|
|
886
|
+
detail = "running" if running else "stopped"
|
|
887
|
+
return EngineStatus(
|
|
888
|
+
engine=spec.name,
|
|
889
|
+
runtime=self.runtime_name,
|
|
890
|
+
running=running,
|
|
891
|
+
healthy=healthy,
|
|
892
|
+
detail=detail,
|
|
893
|
+
metadata={
|
|
894
|
+
"pid": pid,
|
|
895
|
+
"root_dir": str(self._native_root(cfg)),
|
|
896
|
+
"service_dir": str(self._service_dir(spec, cfg)),
|
|
897
|
+
"log_file": str(self._log_file(spec, cfg)),
|
|
898
|
+
},
|
|
899
|
+
)
|
|
900
|
+
|
|
901
|
+
def logs(self, spec: EngineSpec, cfg: RuntimeConfig, lines: int = 100) -> str:
|
|
902
|
+
return self._tail_file(self._log_file(spec, cfg), lines)
|