development-engine-vector 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dev/__init__.py +3 -0
- dev/__main__.py +5 -0
- dev/cli/__init__.py +0 -0
- dev/cli/cli.py +674 -0
- dev/kernel/__init__.py +0 -0
- dev/kernel/config.py +46 -0
- dev/kernel/db.py +129 -0
- dev/kernel/paths.py +85 -0
- dev/kernel/pmf_kernel.py +432 -0
- dev/kernel/selfcheck.py +137 -0
- dev/ui/__init__.py +0 -0
- dev/ui/actions.py +47 -0
- dev/ui/db/__init__.py +26 -0
- dev/ui/db/base.py +75 -0
- dev/ui/db/checks.py +16 -0
- dev/ui/db/events.py +18 -0
- dev/ui/db/health.py +34 -0
- dev/ui/db/identity.py +12 -0
- dev/ui/db/manifest.py +35 -0
- dev/ui/db/overview.py +47 -0
- dev/ui/db/runs.py +47 -0
- dev/ui/routes.py +114 -0
- dev/ui/static/__init__.py +0 -0
- dev/ui/static/web.css +722 -0
- dev/ui/static/web.js +528 -0
- dev/ui/templates.py +231 -0
- dev/ui/web.py +28 -0
- dev/utils.py +93 -0
- dev/vcs/__init__.py +0 -0
- dev/vcs/git.py +107 -0
- dev/vcs/github.py +77 -0
- dev/workflow/__init__.py +0 -0
- dev/workflow/cda.py +143 -0
- dev/workflow/changelog.py +102 -0
- dev/workflow/preflight.py +307 -0
- dev/workflow/release.py +217 -0
- dev/workflow/versioning.py +87 -0
- development_engine_vector-0.3.0.dist-info/METADATA +252 -0
- development_engine_vector-0.3.0.dist-info/RECORD +41 -0
- development_engine_vector-0.3.0.dist-info/WHEEL +4 -0
- development_engine_vector-0.3.0.dist-info/entry_points.txt +2 -0
dev/kernel/config.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
DEFAULT_CONFIG_NAMES = [".dev-cli.toml", "dev-cli.toml"]
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def load_toml_file(path):
|
|
7
|
+
try:
|
|
8
|
+
import tomllib # type: ignore[import-not-found]
|
|
9
|
+
except ModuleNotFoundError:
|
|
10
|
+
import tomli as tomllib # type: ignore[import-not-found,no-redef]
|
|
11
|
+
|
|
12
|
+
return tomllib.loads(Path(path).read_text())
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class DevConfig:
|
|
16
|
+
"""Load dev-cli project configuration."""
|
|
17
|
+
|
|
18
|
+
def __init__(self, project_dir=".", config_path=None):
|
|
19
|
+
self.project_dir = Path(project_dir).resolve()
|
|
20
|
+
self.config_path = self._resolve_config_path(config_path)
|
|
21
|
+
self.data = self._load() if self.config_path else {}
|
|
22
|
+
|
|
23
|
+
def _resolve_config_path(self, config_path):
|
|
24
|
+
if config_path:
|
|
25
|
+
candidate = Path(config_path)
|
|
26
|
+
return candidate.resolve() if candidate.exists() else None
|
|
27
|
+
|
|
28
|
+
for name in DEFAULT_CONFIG_NAMES:
|
|
29
|
+
candidate = self.project_dir / name
|
|
30
|
+
if candidate.exists():
|
|
31
|
+
return candidate.resolve()
|
|
32
|
+
|
|
33
|
+
return None
|
|
34
|
+
|
|
35
|
+
def _load(self):
|
|
36
|
+
try:
|
|
37
|
+
return load_toml_file(self.config_path)
|
|
38
|
+
except Exception as exc:
|
|
39
|
+
raise ValueError(f"Invalid dev-cli config {self.config_path}: {exc}")
|
|
40
|
+
|
|
41
|
+
def get(self, section, key, default=None):
|
|
42
|
+
return self.data.get(section, {}).get(key, default)
|
|
43
|
+
|
|
44
|
+
def get_bool(self, section, key, default=None):
|
|
45
|
+
value = self.get(section, key, default)
|
|
46
|
+
return bool(value) if value is not None else None
|
dev/kernel/db.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
"""dev.kernel.db — persistent run history for the Development Engine Vector.
|
|
2
|
+
|
|
3
|
+
Schema:
|
|
4
|
+
runs — every workflow action: preflight, build, publish, release, check
|
|
5
|
+
projects — registry of known projects with last known version and release time
|
|
6
|
+
|
|
7
|
+
All writes use WAL mode. Reads are safe to do concurrently.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import json
|
|
11
|
+
import sqlite3
|
|
12
|
+
from datetime import datetime, timezone
|
|
13
|
+
from typing import Optional
|
|
14
|
+
|
|
15
|
+
from dev.kernel.paths import DATA_DIR, DB_PATH
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
_SCHEMA = """
|
|
19
|
+
CREATE TABLE IF NOT EXISTS runs (
|
|
20
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
21
|
+
project TEXT NOT NULL,
|
|
22
|
+
action TEXT NOT NULL,
|
|
23
|
+
version TEXT,
|
|
24
|
+
outcome TEXT NOT NULL,
|
|
25
|
+
started_at TEXT NOT NULL,
|
|
26
|
+
finished_at TEXT,
|
|
27
|
+
details TEXT
|
|
28
|
+
);
|
|
29
|
+
|
|
30
|
+
CREATE TABLE IF NOT EXISTS projects (
|
|
31
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
32
|
+
name TEXT NOT NULL UNIQUE,
|
|
33
|
+
path TEXT NOT NULL,
|
|
34
|
+
last_version TEXT,
|
|
35
|
+
last_release_at TEXT,
|
|
36
|
+
registered_at TEXT NOT NULL
|
|
37
|
+
);
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _now() -> str:
|
|
42
|
+
return datetime.now(timezone.utc).isoformat()
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def connect() -> sqlite3.Connection:
|
|
46
|
+
"""Open (or create) dev.db with WAL mode and schema applied."""
|
|
47
|
+
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
|
48
|
+
conn = sqlite3.connect(DB_PATH, timeout=10)
|
|
49
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
50
|
+
conn.execute("PRAGMA foreign_keys=ON")
|
|
51
|
+
conn.executescript(_SCHEMA)
|
|
52
|
+
conn.commit()
|
|
53
|
+
return conn
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def record_run(project: str, action: str, outcome: str,
|
|
57
|
+
version: Optional[str] = None, details: Optional[dict] = None) -> int:
|
|
58
|
+
"""Insert a run record. Returns the new run id."""
|
|
59
|
+
conn = connect()
|
|
60
|
+
cur = conn.execute(
|
|
61
|
+
"INSERT INTO runs (project, action, version, outcome, started_at, details)"
|
|
62
|
+
" VALUES (?, ?, ?, ?, ?, ?)",
|
|
63
|
+
(project, action, version, outcome, _now(),
|
|
64
|
+
json.dumps(details) if details else None),
|
|
65
|
+
)
|
|
66
|
+
conn.commit()
|
|
67
|
+
run_id = cur.lastrowid
|
|
68
|
+
conn.close()
|
|
69
|
+
return run_id if run_id is not None else -1
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def finish_run(run_id: int, outcome: str, details: Optional[dict] = None) -> None:
|
|
73
|
+
"""Update an existing run record with final outcome and finish time."""
|
|
74
|
+
conn = connect()
|
|
75
|
+
conn.execute(
|
|
76
|
+
"UPDATE runs SET outcome=?, finished_at=?, details=? WHERE id=?",
|
|
77
|
+
(outcome, _now(), json.dumps(details) if details else None, run_id),
|
|
78
|
+
)
|
|
79
|
+
conn.commit()
|
|
80
|
+
conn.close()
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def upsert_project(name: str, path: str, version: Optional[str] = None,
|
|
84
|
+
release_at: Optional[str] = None) -> None:
|
|
85
|
+
"""Register or update a project in the local registry."""
|
|
86
|
+
conn = connect()
|
|
87
|
+
conn.execute(
|
|
88
|
+
"INSERT INTO projects (name, path, last_version, last_release_at, registered_at)"
|
|
89
|
+
" VALUES (?, ?, ?, ?, ?)"
|
|
90
|
+
" ON CONFLICT(name) DO UPDATE SET"
|
|
91
|
+
" path=excluded.path,"
|
|
92
|
+
" last_version=COALESCE(excluded.last_version, last_version),"
|
|
93
|
+
" last_release_at=COALESCE(excluded.last_release_at, last_release_at)",
|
|
94
|
+
(name, path, version, release_at, _now()),
|
|
95
|
+
)
|
|
96
|
+
conn.commit()
|
|
97
|
+
conn.close()
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def get_recent_runs(project: Optional[str] = None, limit: int = 20) -> list:
|
|
101
|
+
"""Return recent runs, optionally filtered by project name."""
|
|
102
|
+
conn = connect()
|
|
103
|
+
if project:
|
|
104
|
+
rows = conn.execute(
|
|
105
|
+
"SELECT id, project, action, version, outcome, started_at, finished_at"
|
|
106
|
+
" FROM runs WHERE project=? ORDER BY id DESC LIMIT ?",
|
|
107
|
+
(project, limit),
|
|
108
|
+
).fetchall()
|
|
109
|
+
else:
|
|
110
|
+
rows = conn.execute(
|
|
111
|
+
"SELECT id, project, action, version, outcome, started_at, finished_at"
|
|
112
|
+
" FROM runs ORDER BY id DESC LIMIT ?",
|
|
113
|
+
(limit,),
|
|
114
|
+
).fetchall()
|
|
115
|
+
conn.close()
|
|
116
|
+
cols = ["id", "project", "action", "version", "outcome", "started_at", "finished_at"]
|
|
117
|
+
return [dict(zip(cols, r)) for r in rows]
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def get_projects() -> list:
|
|
121
|
+
"""Return all registered projects."""
|
|
122
|
+
conn = connect()
|
|
123
|
+
rows = conn.execute(
|
|
124
|
+
"SELECT id, name, path, last_version, last_release_at, registered_at"
|
|
125
|
+
" FROM projects ORDER BY name"
|
|
126
|
+
).fetchall()
|
|
127
|
+
conn.close()
|
|
128
|
+
cols = ["id", "name", "path", "last_version", "last_release_at", "registered_at"]
|
|
129
|
+
return [dict(zip(cols, r)) for r in rows]
|
dev/kernel/paths.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
"""dev.kernel.paths — canonical path resolution for the Development Engine Vector.
|
|
2
|
+
|
|
3
|
+
DEV_HOME is the single root for all runtime state (DB, logs, config).
|
|
4
|
+
Resolved at import time via:
|
|
5
|
+
|
|
6
|
+
1. DEV_HOME environment variable (absolute path)
|
|
7
|
+
2. ~/Library/goCosmix/tools/dev/ (macOS default)
|
|
8
|
+
3. ~/.gocosmix/tools/dev/ (fallback on non-macOS)
|
|
9
|
+
|
|
10
|
+
All goCosmix apps share the ~/Library/goCosmix/ namespace:
|
|
11
|
+
|
|
12
|
+
~/Library/goCosmix/
|
|
13
|
+
├── apps/
|
|
14
|
+
│ ├── code-data-ark/ ← long-running services
|
|
15
|
+
│ └── ...
|
|
16
|
+
├── tools/
|
|
17
|
+
│ ├── dev/ ← DEV_HOME
|
|
18
|
+
│ └── ...
|
|
19
|
+
└── system/ ← shared goCosmix infrastructure
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
import os
|
|
23
|
+
from pathlib import Path
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# ── home resolution ──────────────────────────────────────────────────────────
|
|
27
|
+
|
|
28
|
+
def _default_dev_home() -> Path:
|
|
29
|
+
"""Platform-appropriate default for DEV_HOME."""
|
|
30
|
+
library = Path.home() / "Library"
|
|
31
|
+
if library.exists(): # macOS
|
|
32
|
+
return library / "goCosmix" / "tools" / "dev"
|
|
33
|
+
return Path.home() / ".gocosmix" / "tools" / "dev"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def get_dev_home() -> Path:
|
|
37
|
+
"""Return the DEV home directory, creating it if needed."""
|
|
38
|
+
env = os.environ.get("DEV_HOME")
|
|
39
|
+
if env:
|
|
40
|
+
home = Path(env).expanduser().resolve()
|
|
41
|
+
else:
|
|
42
|
+
home = _default_dev_home()
|
|
43
|
+
home.mkdir(parents=True, exist_ok=True)
|
|
44
|
+
return home
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
# ── goCosmix namespace (shared across all goCosmix apps) ────────────────────
|
|
48
|
+
|
|
49
|
+
def get_gocosmix_home() -> Path:
|
|
50
|
+
"""Return ~/Library/goCosmix (macOS) or ~/.gocosmix (other)."""
|
|
51
|
+
library = Path.home() / "Library"
|
|
52
|
+
if library.exists():
|
|
53
|
+
return library / "goCosmix"
|
|
54
|
+
return Path.home() / ".gocosmix"
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
GOCOSMIX_HOME = get_gocosmix_home()
|
|
58
|
+
GOCOSMIX_APPS = GOCOSMIX_HOME / "apps"
|
|
59
|
+
GOCOSMIX_TOOLS = GOCOSMIX_HOME / "tools"
|
|
60
|
+
GOCOSMIX_SYSTEM = GOCOSMIX_HOME / "system"
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
# ── canonical paths (computed once at import) ────────────────────────────────
|
|
64
|
+
|
|
65
|
+
DEV_HOME = get_dev_home()
|
|
66
|
+
DATA_DIR = DEV_HOME / "data"
|
|
67
|
+
RUN_DIR = DEV_HOME / "run"
|
|
68
|
+
LOG_DIR = DEV_HOME / "logs"
|
|
69
|
+
CONFIG_DIR = DEV_HOME / "config"
|
|
70
|
+
PMF_DIR = DEV_HOME / "pmf"
|
|
71
|
+
|
|
72
|
+
DB_PATH = DATA_DIR / "dev.db"
|
|
73
|
+
UI_PID_FILE = RUN_DIR / "ui.pid"
|
|
74
|
+
UI_LOG_FILE = LOG_DIR / "ui.log"
|
|
75
|
+
PMF_LOG_DIR = PMF_DIR / "logs"
|
|
76
|
+
RUNTIME_FILE = PMF_DIR / "runtime.json"
|
|
77
|
+
|
|
78
|
+
DEFAULT_PORT = 9001
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def ensure_dirs() -> None:
|
|
82
|
+
"""Create all runtime directories. Safe to call multiple times."""
|
|
83
|
+
for d in (GOCOSMIX_HOME, GOCOSMIX_APPS, GOCOSMIX_TOOLS, GOCOSMIX_SYSTEM,
|
|
84
|
+
DATA_DIR, RUN_DIR, LOG_DIR, CONFIG_DIR, PMF_DIR, PMF_LOG_DIR):
|
|
85
|
+
d.mkdir(parents=True, exist_ok=True)
|
dev/kernel/pmf_kernel.py
ADDED
|
@@ -0,0 +1,432 @@
|
|
|
1
|
+
"""dev.kernel.pmf_kernel — embedded PMF service manager for the Development Engine Vector.
|
|
2
|
+
|
|
3
|
+
Manages the web UI as a background process with PID tracking and launchd integration.
|
|
4
|
+
Port 9001 — tools namespace (9000 reserved for the future tool orchestrator).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import shutil
|
|
10
|
+
import signal
|
|
11
|
+
import socket
|
|
12
|
+
import subprocess
|
|
13
|
+
import sys
|
|
14
|
+
import time
|
|
15
|
+
import webbrowser
|
|
16
|
+
from dataclasses import dataclass
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import Dict, List, Optional
|
|
19
|
+
|
|
20
|
+
from dev.kernel.paths import (
|
|
21
|
+
LOG_DIR, RUNTIME_FILE,
|
|
22
|
+
UI_PID_FILE, DEV_HOME,
|
|
23
|
+
ensure_dirs,
|
|
24
|
+
DEFAULT_PORT,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
DEFAULT_HOST = "127.0.0.1"
|
|
28
|
+
|
|
29
|
+
# ── launchd integration ──────────────────────────────────────────────────────
|
|
30
|
+
|
|
31
|
+
PLIST_LABEL = "com.gocosmix.dev"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def plist_path() -> Path:
|
|
35
|
+
return Path.home() / "Library" / "LaunchAgents" / f"{PLIST_LABEL}.plist"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def generate_plist(dev_bin: str, dev_home: Path) -> str:
|
|
39
|
+
log = dev_home / "logs" / "launchd.log"
|
|
40
|
+
return f"""<?xml version="1.0" encoding="UTF-8"?>
|
|
41
|
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
|
42
|
+
<plist version="1.0">
|
|
43
|
+
<dict>
|
|
44
|
+
<key>Label</key>
|
|
45
|
+
<string>{PLIST_LABEL}</string>
|
|
46
|
+
<key>ProgramArguments</key>
|
|
47
|
+
<array>
|
|
48
|
+
<string>{dev_bin}</string>
|
|
49
|
+
<string>pmf</string>
|
|
50
|
+
<string>up</string>
|
|
51
|
+
</array>
|
|
52
|
+
<key>RunAtLoad</key>
|
|
53
|
+
<true/>
|
|
54
|
+
<key>KeepAlive</key>
|
|
55
|
+
<false/>
|
|
56
|
+
<key>StandardOutPath</key>
|
|
57
|
+
<string>{log}</string>
|
|
58
|
+
<key>StandardErrorPath</key>
|
|
59
|
+
<string>{log}</string>
|
|
60
|
+
<key>EnvironmentVariables</key>
|
|
61
|
+
<dict>
|
|
62
|
+
<key>DEV_HOME</key>
|
|
63
|
+
<string>{dev_home}</string>
|
|
64
|
+
<key>PATH</key>
|
|
65
|
+
<string>{os.path.dirname(dev_bin)}:/usr/local/bin:/usr/bin:/bin</string>
|
|
66
|
+
</dict>
|
|
67
|
+
</dict>
|
|
68
|
+
</plist>
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def install_launchd(dev_home: Path) -> Path:
|
|
73
|
+
"""Write the LaunchAgent plist and load it with launchctl."""
|
|
74
|
+
dev_bin = shutil.which("dev")
|
|
75
|
+
if not dev_bin:
|
|
76
|
+
raise PMFKernelError("dev binary not found on PATH — cannot generate plist")
|
|
77
|
+
|
|
78
|
+
target = plist_path()
|
|
79
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
|
80
|
+
target.write_text(generate_plist(dev_bin, dev_home))
|
|
81
|
+
|
|
82
|
+
subprocess.run(["launchctl", "unload", str(target)], capture_output=True)
|
|
83
|
+
result = subprocess.run(["launchctl", "load", str(target)], capture_output=True, text=True)
|
|
84
|
+
if result.returncode != 0:
|
|
85
|
+
raise PMFKernelError(f"launchctl load failed: {result.stderr.strip()}")
|
|
86
|
+
|
|
87
|
+
return target
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def uninstall_launchd() -> None:
|
|
91
|
+
"""Unload and remove the LaunchAgent plist."""
|
|
92
|
+
target = plist_path()
|
|
93
|
+
if target.exists():
|
|
94
|
+
subprocess.run(["launchctl", "unload", str(target)], capture_output=True)
|
|
95
|
+
target.unlink(missing_ok=True)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def open_browser_when_ready(
|
|
99
|
+
url: str,
|
|
100
|
+
host: str = DEFAULT_HOST,
|
|
101
|
+
port: int = DEFAULT_PORT,
|
|
102
|
+
timeout: float = 12.0,
|
|
103
|
+
) -> None:
|
|
104
|
+
"""Poll host:port in a daemon thread and open browser when ready."""
|
|
105
|
+
import threading
|
|
106
|
+
|
|
107
|
+
def _wait_and_open():
|
|
108
|
+
elapsed = 0.0
|
|
109
|
+
while elapsed < timeout:
|
|
110
|
+
try:
|
|
111
|
+
with socket.create_connection((host, port), timeout=0.5):
|
|
112
|
+
webbrowser.open(url)
|
|
113
|
+
return
|
|
114
|
+
except OSError:
|
|
115
|
+
time.sleep(0.25)
|
|
116
|
+
elapsed += 0.25
|
|
117
|
+
|
|
118
|
+
threading.Thread(target=_wait_and_open, daemon=True).start()
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def wait_for_port_and_open_browser(
|
|
122
|
+
url: str,
|
|
123
|
+
host: str = DEFAULT_HOST,
|
|
124
|
+
port: int = DEFAULT_PORT,
|
|
125
|
+
timeout: float = 8.0,
|
|
126
|
+
) -> bool:
|
|
127
|
+
"""Block until host:port accepts connections, then open browser."""
|
|
128
|
+
elapsed = 0.0
|
|
129
|
+
while elapsed < timeout:
|
|
130
|
+
try:
|
|
131
|
+
with socket.create_connection((host, port), timeout=0.5):
|
|
132
|
+
webbrowser.open(url)
|
|
133
|
+
return True
|
|
134
|
+
except OSError:
|
|
135
|
+
time.sleep(0.25)
|
|
136
|
+
elapsed += 0.25
|
|
137
|
+
return False
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
ensure_dirs()
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def _now_ts() -> int:
|
|
144
|
+
return int(time.time() * 1000)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def _now_iso() -> str:
|
|
148
|
+
return time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime())
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
@dataclass
|
|
152
|
+
class ServiceSpec:
|
|
153
|
+
service_id: str
|
|
154
|
+
label: str
|
|
155
|
+
service_type: str
|
|
156
|
+
description: str
|
|
157
|
+
command: Optional[List[str]] = None
|
|
158
|
+
cwd: Optional[Path] = None
|
|
159
|
+
env: Optional[Dict[str, str]] = None
|
|
160
|
+
pid_file: Optional[Path] = None
|
|
161
|
+
log_file: Optional[Path] = None
|
|
162
|
+
allowed_actions: Optional[List[str]] = None
|
|
163
|
+
|
|
164
|
+
def build_command(self, options: Optional[Dict[str, str]] = None) -> List[str]:
|
|
165
|
+
if self.service_id == "ui":
|
|
166
|
+
host = options.get("host", DEFAULT_HOST) if options else DEFAULT_HOST
|
|
167
|
+
port = options.get("port", DEFAULT_PORT) if options else DEFAULT_PORT
|
|
168
|
+
return [
|
|
169
|
+
sys.executable,
|
|
170
|
+
"-c",
|
|
171
|
+
(
|
|
172
|
+
"import dev.ui.web as w; "
|
|
173
|
+
f"w.start_server(host={json.dumps(host)}, port={port})"
|
|
174
|
+
),
|
|
175
|
+
]
|
|
176
|
+
if self.command is not None:
|
|
177
|
+
return list(self.command)
|
|
178
|
+
raise RuntimeError(f"No command configured for service: {self.service_id}")
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
SERVICE_SPECS: Dict[str, ServiceSpec] = {
|
|
182
|
+
"ui": ServiceSpec(
|
|
183
|
+
service_id="ui",
|
|
184
|
+
label="Web UI",
|
|
185
|
+
service_type="daemon",
|
|
186
|
+
description="Embedded dev dashboard — runs, health, manifest, identity.",
|
|
187
|
+
cwd=DEV_HOME,
|
|
188
|
+
pid_file=UI_PID_FILE,
|
|
189
|
+
log_file=LOG_DIR / "ui.log",
|
|
190
|
+
allowed_actions=["start", "stop", "restart", "status"],
|
|
191
|
+
),
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def _default_state() -> Dict:
|
|
196
|
+
return {"services": {sid: {
|
|
197
|
+
"service_id": sid,
|
|
198
|
+
"status": "stopped",
|
|
199
|
+
"pid": None,
|
|
200
|
+
"exit_code": None,
|
|
201
|
+
"started_at": None,
|
|
202
|
+
"updated_at": None,
|
|
203
|
+
"last_error": None,
|
|
204
|
+
} for sid in SERVICE_SPECS}}
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
class PMFKernelError(Exception):
|
|
208
|
+
pass
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
class PMFKernel:
|
|
212
|
+
def __init__(self) -> None:
|
|
213
|
+
self.state_path = RUNTIME_FILE
|
|
214
|
+
self.state = self._load_state()
|
|
215
|
+
|
|
216
|
+
def _load_state(self) -> Dict:
|
|
217
|
+
if self.state_path.exists():
|
|
218
|
+
try:
|
|
219
|
+
return json.loads(self.state_path.read_text())
|
|
220
|
+
except Exception:
|
|
221
|
+
pass
|
|
222
|
+
state = _default_state()
|
|
223
|
+
self._save_state(state)
|
|
224
|
+
return state
|
|
225
|
+
|
|
226
|
+
def _save_state(self, state: Optional[Dict] = None) -> None:
|
|
227
|
+
if state is None:
|
|
228
|
+
state = self.state
|
|
229
|
+
self.state_path.write_text(json.dumps(state, indent=2))
|
|
230
|
+
|
|
231
|
+
def _is_alive(self, pid: int) -> bool:
|
|
232
|
+
try:
|
|
233
|
+
os.kill(pid, 0)
|
|
234
|
+
return True
|
|
235
|
+
except OSError:
|
|
236
|
+
return False
|
|
237
|
+
|
|
238
|
+
def _refresh(self, service_id: str) -> Dict:
|
|
239
|
+
spec = SERVICE_SPECS[service_id]
|
|
240
|
+
state = self.state["services"][service_id]
|
|
241
|
+
|
|
242
|
+
pid = state.get("pid")
|
|
243
|
+
if pid and self._is_alive(pid):
|
|
244
|
+
if state["status"] not in ("running", "starting"):
|
|
245
|
+
state["status"] = "running"
|
|
246
|
+
state["updated_at"] = _now_iso()
|
|
247
|
+
return state
|
|
248
|
+
|
|
249
|
+
if spec.pid_file and spec.pid_file.exists():
|
|
250
|
+
try:
|
|
251
|
+
file_pid = int(spec.pid_file.read_text().strip())
|
|
252
|
+
if self._is_alive(file_pid):
|
|
253
|
+
state["pid"] = file_pid
|
|
254
|
+
state["status"] = "running"
|
|
255
|
+
state["updated_at"] = _now_iso()
|
|
256
|
+
self._save_state()
|
|
257
|
+
return state
|
|
258
|
+
except Exception:
|
|
259
|
+
pass
|
|
260
|
+
|
|
261
|
+
if pid and not self._is_alive(pid):
|
|
262
|
+
state["status"] = "stopped"
|
|
263
|
+
state["pid"] = None
|
|
264
|
+
self._save_state()
|
|
265
|
+
|
|
266
|
+
return state
|
|
267
|
+
|
|
268
|
+
def _spec(self, service_id: str) -> ServiceSpec:
|
|
269
|
+
spec = SERVICE_SPECS.get(service_id)
|
|
270
|
+
if not spec:
|
|
271
|
+
raise PMFKernelError(f"Unknown service: {service_id}")
|
|
272
|
+
return spec
|
|
273
|
+
|
|
274
|
+
def services(self) -> List[Dict]:
|
|
275
|
+
results = []
|
|
276
|
+
for sid in SERVICE_SPECS:
|
|
277
|
+
spec = SERVICE_SPECS[sid]
|
|
278
|
+
state = self._refresh(sid)
|
|
279
|
+
results.append({
|
|
280
|
+
"service_id": sid,
|
|
281
|
+
"label": spec.label,
|
|
282
|
+
"description": spec.description,
|
|
283
|
+
"service_type": spec.service_type,
|
|
284
|
+
"status": state["status"],
|
|
285
|
+
"pid": state.get("pid"),
|
|
286
|
+
"exit_code": state.get("exit_code"),
|
|
287
|
+
"started_at": state.get("started_at"),
|
|
288
|
+
"updated_at": state.get("updated_at"),
|
|
289
|
+
"log_file": str(spec.log_file) if spec.log_file else None,
|
|
290
|
+
"allowed_actions": spec.allowed_actions,
|
|
291
|
+
})
|
|
292
|
+
return results
|
|
293
|
+
|
|
294
|
+
def service_status(self, service_id: str) -> Dict:
|
|
295
|
+
spec = self._spec(service_id)
|
|
296
|
+
state = self._refresh(service_id)
|
|
297
|
+
return {
|
|
298
|
+
"service_id": service_id,
|
|
299
|
+
"label": spec.label,
|
|
300
|
+
"description": spec.description,
|
|
301
|
+
"service_type": spec.service_type,
|
|
302
|
+
"status": state["status"],
|
|
303
|
+
"pid": state.get("pid"),
|
|
304
|
+
"exit_code": state.get("exit_code"),
|
|
305
|
+
"started_at": state.get("started_at"),
|
|
306
|
+
"updated_at": state.get("updated_at"),
|
|
307
|
+
"log_file": str(spec.log_file) if spec.log_file else None,
|
|
308
|
+
"allowed_actions": spec.allowed_actions,
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
def start_service(self, service_id: str, options: Optional[Dict[str, str]] = None) -> Dict:
|
|
312
|
+
spec = self._spec(service_id)
|
|
313
|
+
state = self.state["services"][service_id]
|
|
314
|
+
|
|
315
|
+
if spec.pid_file and spec.pid_file.exists():
|
|
316
|
+
try:
|
|
317
|
+
existing_pid = int(spec.pid_file.read_text().strip())
|
|
318
|
+
if self._is_alive(existing_pid):
|
|
319
|
+
raise PMFKernelError(f"{spec.label} is already running (pid={existing_pid})")
|
|
320
|
+
spec.pid_file.unlink(missing_ok=True)
|
|
321
|
+
except ValueError:
|
|
322
|
+
spec.pid_file.unlink(missing_ok=True)
|
|
323
|
+
|
|
324
|
+
command = spec.build_command(options or {})
|
|
325
|
+
log_file = spec.log_file or LOG_DIR / f"{service_id}.log"
|
|
326
|
+
|
|
327
|
+
with open(log_file, "a") as fh:
|
|
328
|
+
proc = subprocess.Popen(
|
|
329
|
+
command,
|
|
330
|
+
cwd=str(spec.cwd or DEV_HOME),
|
|
331
|
+
env={**os.environ, **(spec.env or {})},
|
|
332
|
+
stdout=fh,
|
|
333
|
+
stderr=fh,
|
|
334
|
+
preexec_fn=os.setsid,
|
|
335
|
+
)
|
|
336
|
+
|
|
337
|
+
state["pid"] = proc.pid
|
|
338
|
+
state["status"] = "starting"
|
|
339
|
+
state["started_at"] = _now_iso()
|
|
340
|
+
state["exit_code"] = None
|
|
341
|
+
state["last_error"] = None
|
|
342
|
+
state["updated_at"] = _now_iso()
|
|
343
|
+
self._save_state()
|
|
344
|
+
|
|
345
|
+
# Wait briefly for pid file
|
|
346
|
+
if spec.pid_file:
|
|
347
|
+
for _ in range(12):
|
|
348
|
+
if spec.pid_file.exists():
|
|
349
|
+
try:
|
|
350
|
+
pid = int(spec.pid_file.read_text().strip())
|
|
351
|
+
if self._is_alive(pid):
|
|
352
|
+
state["pid"] = pid
|
|
353
|
+
state["status"] = "running"
|
|
354
|
+
state["updated_at"] = _now_iso()
|
|
355
|
+
self._save_state()
|
|
356
|
+
return self.service_status(service_id)
|
|
357
|
+
except Exception:
|
|
358
|
+
pass
|
|
359
|
+
time.sleep(0.25)
|
|
360
|
+
# Write pid ourselves if server didn't
|
|
361
|
+
if self._is_alive(proc.pid):
|
|
362
|
+
try:
|
|
363
|
+
spec.pid_file.write_text(str(proc.pid))
|
|
364
|
+
state["pid"] = proc.pid
|
|
365
|
+
except Exception:
|
|
366
|
+
pass
|
|
367
|
+
|
|
368
|
+
state["status"] = "running"
|
|
369
|
+
self._save_state()
|
|
370
|
+
return self.service_status(service_id)
|
|
371
|
+
|
|
372
|
+
def stop_service(self, service_id: str) -> Dict:
|
|
373
|
+
spec = self._spec(service_id)
|
|
374
|
+
state = self.state["services"][service_id]
|
|
375
|
+
|
|
376
|
+
pid = None
|
|
377
|
+
if spec.pid_file and spec.pid_file.exists():
|
|
378
|
+
try:
|
|
379
|
+
pid = int(spec.pid_file.read_text().strip())
|
|
380
|
+
except Exception:
|
|
381
|
+
pass
|
|
382
|
+
if pid is None:
|
|
383
|
+
pid = state.get("pid")
|
|
384
|
+
if pid is None:
|
|
385
|
+
raise PMFKernelError(f"No running PID found for {spec.label}")
|
|
386
|
+
|
|
387
|
+
if not self._is_alive(pid):
|
|
388
|
+
state["status"] = "stopped"
|
|
389
|
+
state["pid"] = None
|
|
390
|
+
self._save_state()
|
|
391
|
+
return self.service_status(service_id)
|
|
392
|
+
|
|
393
|
+
try:
|
|
394
|
+
os.kill(pid, signal.SIGTERM)
|
|
395
|
+
time.sleep(0.5)
|
|
396
|
+
except OSError as exc:
|
|
397
|
+
raise PMFKernelError(f"Failed to stop {spec.label}: {exc}")
|
|
398
|
+
|
|
399
|
+
if spec.pid_file and spec.pid_file.exists():
|
|
400
|
+
spec.pid_file.unlink(missing_ok=True)
|
|
401
|
+
|
|
402
|
+
state["status"] = "stopped"
|
|
403
|
+
state["pid"] = None
|
|
404
|
+
state["updated_at"] = _now_iso()
|
|
405
|
+
self._save_state()
|
|
406
|
+
return self.service_status(service_id)
|
|
407
|
+
|
|
408
|
+
def restart_service(self, service_id: str, options: Optional[Dict[str, str]] = None) -> Dict:
|
|
409
|
+
try:
|
|
410
|
+
self.stop_service(service_id)
|
|
411
|
+
except PMFKernelError:
|
|
412
|
+
pass
|
|
413
|
+
time.sleep(0.5)
|
|
414
|
+
return self.start_service(service_id, options=options or {})
|
|
415
|
+
|
|
416
|
+
def tail_log(self, service_id: str, lines: int = 200) -> str:
|
|
417
|
+
spec = self._spec(service_id)
|
|
418
|
+
if not spec.log_file or not spec.log_file.exists():
|
|
419
|
+
return ""
|
|
420
|
+
with open(spec.log_file, "rb") as fh:
|
|
421
|
+
fh.seek(0, os.SEEK_END)
|
|
422
|
+
pos = fh.tell()
|
|
423
|
+
chunk = bytearray()
|
|
424
|
+
while pos > 0 and len(chunk) < 8192 * lines:
|
|
425
|
+
step = min(4096, pos)
|
|
426
|
+
pos -= step
|
|
427
|
+
fh.seek(pos)
|
|
428
|
+
chunk[:0] = fh.read(step)
|
|
429
|
+
if chunk.count(b"\n") > lines:
|
|
430
|
+
break
|
|
431
|
+
text = chunk.decode("utf-8", errors="replace")
|
|
432
|
+
return "\n".join(text.strip().splitlines()[-lines:])
|