procler 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- procler/__init__.py +3 -0
- procler/__main__.py +6 -0
- procler/api/__init__.py +5 -0
- procler/api/app.py +261 -0
- procler/api/deps.py +21 -0
- procler/api/routes/__init__.py +5 -0
- procler/api/routes/config.py +290 -0
- procler/api/routes/groups.py +62 -0
- procler/api/routes/logs.py +43 -0
- procler/api/routes/processes.py +185 -0
- procler/api/routes/recipes.py +69 -0
- procler/api/routes/snippets.py +134 -0
- procler/api/routes/ws.py +459 -0
- procler/cli.py +1478 -0
- procler/config/__init__.py +65 -0
- procler/config/changelog.py +148 -0
- procler/config/loader.py +256 -0
- procler/config/schema.py +315 -0
- procler/core/__init__.py +54 -0
- procler/core/context_base.py +117 -0
- procler/core/context_docker.py +384 -0
- procler/core/context_local.py +287 -0
- procler/core/daemon_detector.py +325 -0
- procler/core/events.py +74 -0
- procler/core/groups.py +419 -0
- procler/core/health.py +280 -0
- procler/core/log_tailer.py +262 -0
- procler/core/process_manager.py +1277 -0
- procler/core/recipes.py +330 -0
- procler/core/snippets.py +231 -0
- procler/core/variable_substitution.py +65 -0
- procler/db.py +96 -0
- procler/logging.py +41 -0
- procler/models.py +130 -0
- procler/py.typed +0 -0
- procler/settings.py +29 -0
- procler/static/assets/AboutView-BwZnsfpW.js +4 -0
- procler/static/assets/AboutView-UHbxWXcS.css +1 -0
- procler/static/assets/Code-HTS-H1S6.js +74 -0
- procler/static/assets/ConfigView-CGJcmp9G.css +1 -0
- procler/static/assets/ConfigView-aVtbRDf8.js +1 -0
- procler/static/assets/DashboardView-C5jw9Nsd.css +1 -0
- procler/static/assets/DashboardView-Dab7Cu9v.js +1 -0
- procler/static/assets/DataTable-z39TOAa4.js +746 -0
- procler/static/assets/DescriptionsItem-B2E8YbqJ.js +74 -0
- procler/static/assets/Divider-Dk-6aD2Y.js +42 -0
- procler/static/assets/Empty-MuygEHZM.js +24 -0
- procler/static/assets/Grid-CZ9QVKAT.js +1 -0
- procler/static/assets/GroupsView-BALG7i1X.js +1 -0
- procler/static/assets/GroupsView-gXAI1CVC.css +1 -0
- procler/static/assets/Input-e0xaxoWE.js +259 -0
- procler/static/assets/PhArrowsClockwise.vue-DqDg31az.js +1 -0
- procler/static/assets/PhCheckCircle.vue-Fwj9sh9m.js +1 -0
- procler/static/assets/PhEye.vue-JcPHciC2.js +1 -0
- procler/static/assets/PhPlay.vue-CZm7Gy3u.js +1 -0
- procler/static/assets/PhPlus.vue-yTWqKlSh.js +1 -0
- procler/static/assets/PhStop.vue-DxsqwIki.js +1 -0
- procler/static/assets/PhTrash.vue-DcqQbN1_.js +125 -0
- procler/static/assets/PhXCircle.vue-BXWmrabV.js +1 -0
- procler/static/assets/ProcessDetailView-DDbtIWq9.css +1 -0
- procler/static/assets/ProcessDetailView-DPtdNV-q.js +1 -0
- procler/static/assets/ProcessesView-B3a6Umur.js +1 -0
- procler/static/assets/ProcessesView-goLmghbJ.css +1 -0
- procler/static/assets/RecipesView-D2VxdneD.js +166 -0
- procler/static/assets/RecipesView-DXnFDCK4.css +1 -0
- procler/static/assets/Select-BBR17AHq.js +317 -0
- procler/static/assets/SnippetsView-B3a9q3AI.css +1 -0
- procler/static/assets/SnippetsView-DBCB2yGq.js +1 -0
- procler/static/assets/Spin-BXTjvFUk.js +90 -0
- procler/static/assets/Tag-Bh_qV63A.js +71 -0
- procler/static/assets/changelog-KkTT4H9-.js +1 -0
- procler/static/assets/groups-Zu-_v8ey.js +1 -0
- procler/static/assets/index-BsN-YMXq.css +1 -0
- procler/static/assets/index-BzW1XhyH.js +1282 -0
- procler/static/assets/procler-DOrSB1Vj.js +1 -0
- procler/static/assets/recipes-1w5SseGb.js +1 -0
- procler/static/index.html +17 -0
- procler/static/procler.png +0 -0
- procler-0.2.0.dist-info/METADATA +545 -0
- procler-0.2.0.dist-info/RECORD +83 -0
- procler-0.2.0.dist-info/WHEEL +4 -0
- procler-0.2.0.dist-info/entry_points.txt +2 -0
- procler-0.2.0.dist-info/licenses/LICENSE +21 -0
procler/__init__.py
ADDED
procler/__main__.py
ADDED
procler/api/__init__.py
ADDED
procler/api/app.py
ADDED
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
"""FastAPI application factory."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import os
|
|
5
|
+
import signal
|
|
6
|
+
from contextlib import asynccontextmanager
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from fastapi import FastAPI, Request
|
|
10
|
+
from fastapi.middleware.cors import CORSMiddleware
|
|
11
|
+
from fastapi.responses import FileResponse, JSONResponse
|
|
12
|
+
from fastapi.staticfiles import StaticFiles
|
|
13
|
+
|
|
14
|
+
from .. import __version__
|
|
15
|
+
from ..logging import logger
|
|
16
|
+
|
|
17
|
+
# Static files directory (where Vue build output goes)
|
|
18
|
+
STATIC_DIR = Path(__file__).parent.parent / "static"
|
|
19
|
+
|
|
20
|
+
# Background task for log rotation
|
|
21
|
+
_log_rotation_task: asyncio.Task | None = None
|
|
22
|
+
_shutdown_event = asyncio.Event()
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
async def _log_rotation_loop():
|
|
26
|
+
"""Background task to rotate logs periodically."""
|
|
27
|
+
from ..core import get_process_manager
|
|
28
|
+
|
|
29
|
+
rotation_interval = int(os.environ.get("PROCLER_LOG_ROTATION_INTERVAL", 3600)) # 1 hour default
|
|
30
|
+
max_logs = int(os.environ.get("PROCLER_MAX_LOGS_PER_PROCESS", 10000))
|
|
31
|
+
|
|
32
|
+
logger.info(f"Log rotation started (interval={rotation_interval}s, max_logs={max_logs})")
|
|
33
|
+
|
|
34
|
+
while not _shutdown_event.is_set():
|
|
35
|
+
try:
|
|
36
|
+
await asyncio.sleep(rotation_interval)
|
|
37
|
+
if _shutdown_event.is_set():
|
|
38
|
+
break
|
|
39
|
+
|
|
40
|
+
manager = get_process_manager()
|
|
41
|
+
rotated = await asyncio.to_thread(manager.cleanup_all_logs, max_logs)
|
|
42
|
+
if rotated:
|
|
43
|
+
logger.info(f"Rotated logs for {len(rotated)} processes")
|
|
44
|
+
except asyncio.CancelledError:
|
|
45
|
+
break
|
|
46
|
+
except Exception as e:
|
|
47
|
+
logger.error(f"Log rotation error: {e}")
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
async def _recover_processes():
|
|
51
|
+
"""Check for orphaned processes on startup and update their status."""
|
|
52
|
+
|
|
53
|
+
from ..db import init_database
|
|
54
|
+
from ..models import Process, ProcessStatus
|
|
55
|
+
|
|
56
|
+
init_database()
|
|
57
|
+
|
|
58
|
+
# Find processes marked as running
|
|
59
|
+
all_procs = Process.query().all()
|
|
60
|
+
running = [p for p in all_procs if p.status in [ProcessStatus.RUNNING.value, ProcessStatus.STARTING.value]]
|
|
61
|
+
|
|
62
|
+
if not running:
|
|
63
|
+
return
|
|
64
|
+
|
|
65
|
+
logger.info(f"Checking {len(running)} processes marked as running...")
|
|
66
|
+
|
|
67
|
+
for proc in running:
|
|
68
|
+
if proc.pid:
|
|
69
|
+
# Check if PID is still running
|
|
70
|
+
try:
|
|
71
|
+
os.kill(proc.pid, 0) # Signal 0 = check if process exists
|
|
72
|
+
logger.debug(f"Process '{proc.name}' (PID {proc.pid}) is still running")
|
|
73
|
+
except (OSError, ProcessLookupError):
|
|
74
|
+
# Process is dead, update status
|
|
75
|
+
logger.warning(f"Process '{proc.name}' (PID {proc.pid}) is dead, marking as stopped")
|
|
76
|
+
proc.status = ProcessStatus.STOPPED.value
|
|
77
|
+
proc.pid = None
|
|
78
|
+
proc.save()
|
|
79
|
+
else:
|
|
80
|
+
# No PID but marked as running - mark as stopped
|
|
81
|
+
logger.warning(f"Process '{proc.name}' has no PID but marked running, fixing")
|
|
82
|
+
proc.status = ProcessStatus.STOPPED.value
|
|
83
|
+
proc.save()
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
async def _graceful_shutdown():
|
|
87
|
+
"""Stop all running processes gracefully."""
|
|
88
|
+
from ..core import get_process_manager
|
|
89
|
+
|
|
90
|
+
# Check if already shutting down to prevent loops
|
|
91
|
+
if _shutdown_event.is_set():
|
|
92
|
+
return
|
|
93
|
+
|
|
94
|
+
logger.info("Graceful shutdown initiated...")
|
|
95
|
+
_shutdown_event.set()
|
|
96
|
+
|
|
97
|
+
manager = get_process_manager()
|
|
98
|
+
result = await manager.status()
|
|
99
|
+
|
|
100
|
+
if result["success"]:
|
|
101
|
+
running = [p for p in result["data"]["processes"] if p["status"] == "running"]
|
|
102
|
+
if running:
|
|
103
|
+
logger.info(f"Stopping {len(running)} running processes...")
|
|
104
|
+
for proc in running:
|
|
105
|
+
try:
|
|
106
|
+
await manager.stop(proc["name"], timeout=5.0)
|
|
107
|
+
logger.debug(f"Stopped '{proc['name']}'")
|
|
108
|
+
except Exception as e:
|
|
109
|
+
logger.error(f"Failed to stop '{proc['name']}': {e}")
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
@asynccontextmanager
|
|
113
|
+
async def lifespan(app: FastAPI):
|
|
114
|
+
"""Application lifespan handler for startup and shutdown."""
|
|
115
|
+
global _log_rotation_task
|
|
116
|
+
|
|
117
|
+
# Startup
|
|
118
|
+
logger.info(f"Procler v{__version__} starting...")
|
|
119
|
+
|
|
120
|
+
# Recover orphaned processes
|
|
121
|
+
await _recover_processes()
|
|
122
|
+
|
|
123
|
+
# Start log rotation background task
|
|
124
|
+
_log_rotation_task = asyncio.create_task(_log_rotation_loop())
|
|
125
|
+
|
|
126
|
+
# Register signal handlers for graceful shutdown
|
|
127
|
+
# Only trigger shutdown once even if signal received multiple times
|
|
128
|
+
shutdown_triggered = False
|
|
129
|
+
|
|
130
|
+
def handle_shutdown_signal():
|
|
131
|
+
nonlocal shutdown_triggered
|
|
132
|
+
if not shutdown_triggered:
|
|
133
|
+
shutdown_triggered = True
|
|
134
|
+
_shutdown_event.set()
|
|
135
|
+
|
|
136
|
+
loop = asyncio.get_event_loop()
|
|
137
|
+
for sig in (signal.SIGTERM, signal.SIGINT):
|
|
138
|
+
try:
|
|
139
|
+
loop.add_signal_handler(sig, handle_shutdown_signal)
|
|
140
|
+
except NotImplementedError:
|
|
141
|
+
# Windows doesn't support add_signal_handler
|
|
142
|
+
pass
|
|
143
|
+
|
|
144
|
+
logger.info("Procler ready")
|
|
145
|
+
|
|
146
|
+
yield
|
|
147
|
+
|
|
148
|
+
# Shutdown
|
|
149
|
+
logger.info("Procler shutting down...")
|
|
150
|
+
|
|
151
|
+
# Cancel log rotation task
|
|
152
|
+
if _log_rotation_task:
|
|
153
|
+
_log_rotation_task.cancel()
|
|
154
|
+
try:
|
|
155
|
+
await _log_rotation_task
|
|
156
|
+
except asyncio.CancelledError:
|
|
157
|
+
pass
|
|
158
|
+
|
|
159
|
+
# Graceful shutdown of processes
|
|
160
|
+
await _graceful_shutdown()
|
|
161
|
+
|
|
162
|
+
logger.info("Procler stopped")
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def create_app() -> FastAPI:
|
|
166
|
+
"""Create and configure the FastAPI application."""
|
|
167
|
+
app = FastAPI(
|
|
168
|
+
title="Procler",
|
|
169
|
+
description="LLM-first process manager for developers",
|
|
170
|
+
version=__version__,
|
|
171
|
+
docs_url="/api/docs",
|
|
172
|
+
redoc_url="/api/redoc",
|
|
173
|
+
openapi_url="/api/openapi.json",
|
|
174
|
+
lifespan=lifespan,
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
# Global exception handler for unexpected errors
|
|
178
|
+
@app.exception_handler(Exception)
|
|
179
|
+
async def global_exception_handler(request: Request, exc: Exception):
|
|
180
|
+
"""Handle unexpected exceptions with structured JSON response."""
|
|
181
|
+
# Log the error for debugging
|
|
182
|
+
logger.exception(f"Unexpected error handling {request.method} {request.url.path}")
|
|
183
|
+
|
|
184
|
+
return JSONResponse(
|
|
185
|
+
status_code=500,
|
|
186
|
+
content={
|
|
187
|
+
"success": False,
|
|
188
|
+
"error": "Internal server error",
|
|
189
|
+
"error_code": "internal_error",
|
|
190
|
+
"detail": str(exc) if os.environ.get("PROCLER_DEBUG") else None,
|
|
191
|
+
},
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
# Configure CORS
|
|
195
|
+
# In development, Vite runs on :5173 and proxies /api to backend
|
|
196
|
+
# In production, everything is served from the same origin
|
|
197
|
+
cors_origins = os.environ.get("PROCLER_CORS_ORIGINS", "").split(",")
|
|
198
|
+
cors_origins = [o.strip() for o in cors_origins if o.strip()]
|
|
199
|
+
|
|
200
|
+
# Default development origins
|
|
201
|
+
if not cors_origins:
|
|
202
|
+
cors_origins = [
|
|
203
|
+
"http://localhost:5173", # Vite dev server
|
|
204
|
+
"http://127.0.0.1:5173",
|
|
205
|
+
"http://localhost:8000", # Same origin
|
|
206
|
+
"http://127.0.0.1:8000",
|
|
207
|
+
]
|
|
208
|
+
|
|
209
|
+
app.add_middleware(
|
|
210
|
+
CORSMiddleware,
|
|
211
|
+
allow_origins=cors_origins,
|
|
212
|
+
allow_credentials=True,
|
|
213
|
+
allow_methods=["*"],
|
|
214
|
+
allow_headers=["*"],
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
# Include API routers
|
|
218
|
+
from .routes import config, groups, logs, processes, recipes, snippets, ws
|
|
219
|
+
|
|
220
|
+
app.include_router(processes.router, prefix="/api/processes", tags=["processes"])
|
|
221
|
+
app.include_router(groups.router, prefix="/api/groups", tags=["groups"])
|
|
222
|
+
app.include_router(recipes.router, prefix="/api/recipes", tags=["recipes"])
|
|
223
|
+
app.include_router(config.router, prefix="/api/config", tags=["config"])
|
|
224
|
+
app.include_router(logs.router, prefix="/api/logs", tags=["logs"])
|
|
225
|
+
app.include_router(snippets.router, prefix="/api/snippets", tags=["snippets"])
|
|
226
|
+
app.include_router(ws.router, prefix="/api", tags=["websocket"])
|
|
227
|
+
|
|
228
|
+
@app.get("/api/health")
|
|
229
|
+
async def health_check():
|
|
230
|
+
"""Health check endpoint."""
|
|
231
|
+
return {"status": "healthy", "version": __version__}
|
|
232
|
+
|
|
233
|
+
# Serve static files if they exist (production mode)
|
|
234
|
+
if STATIC_DIR.exists() and (STATIC_DIR / "index.html").exists():
|
|
235
|
+
# Mount static assets (js, css, etc.)
|
|
236
|
+
app.mount("/assets", StaticFiles(directory=STATIC_DIR / "assets"), name="assets")
|
|
237
|
+
|
|
238
|
+
# Serve index.html for SPA routing (catch-all for non-API routes)
|
|
239
|
+
@app.get("/{full_path:path}")
|
|
240
|
+
async def serve_spa(request: Request, full_path: str):
|
|
241
|
+
"""Serve the SPA for all non-API routes."""
|
|
242
|
+
# Don't serve index.html for API routes
|
|
243
|
+
if full_path.startswith("api/"):
|
|
244
|
+
return JSONResponse(
|
|
245
|
+
{"success": False, "error": "Not found", "error_code": "not_found"},
|
|
246
|
+
status_code=404,
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
# Check if it's a static file request
|
|
250
|
+
static_file = STATIC_DIR / full_path
|
|
251
|
+
if static_file.exists() and static_file.is_file():
|
|
252
|
+
return FileResponse(static_file)
|
|
253
|
+
|
|
254
|
+
# Return index.html for SPA routing
|
|
255
|
+
return FileResponse(STATIC_DIR / "index.html")
|
|
256
|
+
|
|
257
|
+
return app
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
# Create default app instance
|
|
261
|
+
app = create_app()
|
procler/api/deps.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"""Dependency injection for FastAPI routes."""
|
|
2
|
+
|
|
3
|
+
from ..core import ProcessManager, SnippetManager, get_process_manager, get_snippet_manager
|
|
4
|
+
from ..db import init_database
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def get_db():
|
|
8
|
+
"""Initialize and return the database."""
|
|
9
|
+
return init_database()
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def get_manager() -> ProcessManager:
|
|
13
|
+
"""Get the ProcessManager instance."""
|
|
14
|
+
init_database()
|
|
15
|
+
return get_process_manager()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def get_snippets() -> SnippetManager:
|
|
19
|
+
"""Get the SnippetManager instance."""
|
|
20
|
+
init_database()
|
|
21
|
+
return get_snippet_manager()
|
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
"""Config management API routes."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from fastapi import APIRouter
|
|
6
|
+
from pydantic import BaseModel
|
|
7
|
+
|
|
8
|
+
from ...config import (
|
|
9
|
+
find_config_dir,
|
|
10
|
+
get_changelog_path,
|
|
11
|
+
get_config,
|
|
12
|
+
get_config_file_path,
|
|
13
|
+
read_changelog,
|
|
14
|
+
reload_config,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
router = APIRouter()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ConfigResponse(BaseModel):
|
|
21
|
+
"""Standard response wrapper."""
|
|
22
|
+
|
|
23
|
+
success: bool
|
|
24
|
+
data: dict[str, Any] | None = None
|
|
25
|
+
error: str | None = None
|
|
26
|
+
error_code: str | None = None
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@router.get("")
|
|
30
|
+
async def get_config_info() -> ConfigResponse:
|
|
31
|
+
"""Get current config status and overview."""
|
|
32
|
+
try:
|
|
33
|
+
config = get_config()
|
|
34
|
+
config_path = get_config_file_path()
|
|
35
|
+
changelog_path = get_changelog_path()
|
|
36
|
+
config_dir = find_config_dir()
|
|
37
|
+
|
|
38
|
+
return ConfigResponse(
|
|
39
|
+
success=True,
|
|
40
|
+
data={
|
|
41
|
+
"config_dir": str(config_dir),
|
|
42
|
+
"config_file": str(config_path),
|
|
43
|
+
"config_exists": config_path.exists(),
|
|
44
|
+
"changelog_file": str(changelog_path),
|
|
45
|
+
"changelog_exists": changelog_path.exists(),
|
|
46
|
+
"version": config.version,
|
|
47
|
+
"vars": config.vars,
|
|
48
|
+
"stats": {
|
|
49
|
+
"processes": len(config.processes),
|
|
50
|
+
"groups": len(config.groups),
|
|
51
|
+
"recipes": len(config.recipes),
|
|
52
|
+
"snippets": len(config.snippets),
|
|
53
|
+
"vars": len(config.vars),
|
|
54
|
+
},
|
|
55
|
+
},
|
|
56
|
+
)
|
|
57
|
+
except Exception as e:
|
|
58
|
+
return ConfigResponse(
|
|
59
|
+
success=False,
|
|
60
|
+
error=str(e),
|
|
61
|
+
error_code="config_error",
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@router.get("/processes")
|
|
66
|
+
async def list_config_processes() -> ConfigResponse:
|
|
67
|
+
"""List all processes defined in config (not runtime DB)."""
|
|
68
|
+
config = get_config()
|
|
69
|
+
|
|
70
|
+
processes = []
|
|
71
|
+
for name, proc in config.processes.items():
|
|
72
|
+
processes.append(
|
|
73
|
+
{
|
|
74
|
+
"name": name,
|
|
75
|
+
"command": proc.command,
|
|
76
|
+
"context": proc.context.value,
|
|
77
|
+
"container": proc.container,
|
|
78
|
+
"cwd": proc.cwd,
|
|
79
|
+
"description": proc.description,
|
|
80
|
+
"tags": proc.tags,
|
|
81
|
+
}
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
return ConfigResponse(
|
|
85
|
+
success=True,
|
|
86
|
+
data={
|
|
87
|
+
"processes": processes,
|
|
88
|
+
"count": len(processes),
|
|
89
|
+
},
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
@router.post("/reload")
|
|
94
|
+
async def reload_config_endpoint() -> ConfigResponse:
|
|
95
|
+
"""Reload config from disk."""
|
|
96
|
+
try:
|
|
97
|
+
config = reload_config()
|
|
98
|
+
return ConfigResponse(
|
|
99
|
+
success=True,
|
|
100
|
+
data={
|
|
101
|
+
"reloaded": True,
|
|
102
|
+
"version": config.version,
|
|
103
|
+
"stats": {
|
|
104
|
+
"processes": len(config.processes),
|
|
105
|
+
"groups": len(config.groups),
|
|
106
|
+
"recipes": len(config.recipes),
|
|
107
|
+
"snippets": len(config.snippets),
|
|
108
|
+
},
|
|
109
|
+
},
|
|
110
|
+
)
|
|
111
|
+
except Exception as e:
|
|
112
|
+
return ConfigResponse(
|
|
113
|
+
success=False,
|
|
114
|
+
error=str(e),
|
|
115
|
+
error_code="reload_error",
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@router.get("/changelog")
|
|
120
|
+
async def get_changelog(tail: int = 50, format: str = "parsed") -> ConfigResponse:
|
|
121
|
+
"""
|
|
122
|
+
Get recent changelog entries.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
tail: Number of entries to return (default 50)
|
|
126
|
+
format: "parsed" for structured JSON entries, "raw" for text lines
|
|
127
|
+
"""
|
|
128
|
+
changelog_path = get_changelog_path()
|
|
129
|
+
|
|
130
|
+
if not changelog_path.exists():
|
|
131
|
+
return ConfigResponse(
|
|
132
|
+
success=True,
|
|
133
|
+
data={
|
|
134
|
+
"entries": [],
|
|
135
|
+
"count": 0,
|
|
136
|
+
"format": format,
|
|
137
|
+
},
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
if format == "parsed":
|
|
142
|
+
# Return structured JSON entries - LLM-friendly
|
|
143
|
+
all_entries = read_changelog()
|
|
144
|
+
entries = all_entries[-tail:] if len(all_entries) > tail else all_entries
|
|
145
|
+
return ConfigResponse(
|
|
146
|
+
success=True,
|
|
147
|
+
data={
|
|
148
|
+
"entries": entries,
|
|
149
|
+
"count": len(entries),
|
|
150
|
+
"total": len(all_entries),
|
|
151
|
+
"format": "parsed",
|
|
152
|
+
},
|
|
153
|
+
)
|
|
154
|
+
else:
|
|
155
|
+
# Return raw lines
|
|
156
|
+
lines = changelog_path.read_text().strip().splitlines()
|
|
157
|
+
recent_lines = lines[-tail:] if len(lines) > tail else lines
|
|
158
|
+
entries = [line for line in recent_lines if line.strip()]
|
|
159
|
+
return ConfigResponse(
|
|
160
|
+
success=True,
|
|
161
|
+
data={
|
|
162
|
+
"entries": entries,
|
|
163
|
+
"count": len(entries),
|
|
164
|
+
"total": len(lines),
|
|
165
|
+
"format": "raw",
|
|
166
|
+
},
|
|
167
|
+
)
|
|
168
|
+
except Exception as e:
|
|
169
|
+
return ConfigResponse(
|
|
170
|
+
success=False,
|
|
171
|
+
error=str(e),
|
|
172
|
+
error_code="changelog_error",
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
@router.get("/explain")
|
|
177
|
+
async def explain_config() -> ConfigResponse:
|
|
178
|
+
"""
|
|
179
|
+
Get a plain-language explanation of what the current config defines.
|
|
180
|
+
|
|
181
|
+
This is designed for LLM consumption - it describes what the config
|
|
182
|
+
will do in natural language.
|
|
183
|
+
"""
|
|
184
|
+
config = get_config()
|
|
185
|
+
config_path = get_config_file_path()
|
|
186
|
+
|
|
187
|
+
if not config_path.exists():
|
|
188
|
+
return ConfigResponse(
|
|
189
|
+
success=True,
|
|
190
|
+
data={
|
|
191
|
+
"summary": "No config file found. Run 'procler config init' to create one.",
|
|
192
|
+
"sections": [],
|
|
193
|
+
},
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
sections = []
|
|
197
|
+
|
|
198
|
+
# Explain processes
|
|
199
|
+
if config.processes:
|
|
200
|
+
proc_explanations = []
|
|
201
|
+
for name, proc in config.processes.items():
|
|
202
|
+
ctx = "locally" if proc.context.value == "local" else f"in Docker container '{proc.container}'"
|
|
203
|
+
desc = f"'{name}': runs `{proc.command}` {ctx}"
|
|
204
|
+
if proc.cwd:
|
|
205
|
+
desc += f" (working dir: {proc.cwd})"
|
|
206
|
+
if proc.description:
|
|
207
|
+
desc += f" - {proc.description}"
|
|
208
|
+
proc_explanations.append(desc)
|
|
209
|
+
sections.append(
|
|
210
|
+
{
|
|
211
|
+
"type": "processes",
|
|
212
|
+
"title": f"{len(config.processes)} Process Definitions",
|
|
213
|
+
"explanation": "These processes can be started, stopped, and monitored individually.",
|
|
214
|
+
"items": proc_explanations,
|
|
215
|
+
}
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
# Explain groups
|
|
219
|
+
if config.groups:
|
|
220
|
+
group_explanations = []
|
|
221
|
+
for name, group in config.groups.items():
|
|
222
|
+
stop_order = group.get_stop_order()
|
|
223
|
+
is_reversed = stop_order == list(reversed(group.processes))
|
|
224
|
+
stop_desc = "reversed order" if is_reversed else f"custom order: {' → '.join(stop_order)}"
|
|
225
|
+
desc = f"'{name}': starts [{' → '.join(group.processes)}], stops in {stop_desc}"
|
|
226
|
+
if group.description:
|
|
227
|
+
desc += f" - {group.description}"
|
|
228
|
+
group_explanations.append(desc)
|
|
229
|
+
sections.append(
|
|
230
|
+
{
|
|
231
|
+
"type": "groups",
|
|
232
|
+
"title": f"{len(config.groups)} Process Groups",
|
|
233
|
+
"explanation": "Groups start processes in order and stop them in reverse (or custom) order.",
|
|
234
|
+
"items": group_explanations,
|
|
235
|
+
}
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
# Explain recipes
|
|
239
|
+
if config.recipes:
|
|
240
|
+
recipe_explanations = []
|
|
241
|
+
for name, recipe in config.recipes.items():
|
|
242
|
+
steps = recipe.get_steps()
|
|
243
|
+
step_summary = f"{len(steps)} steps"
|
|
244
|
+
error_handling = "stops on error" if recipe.on_error.value == "stop" else "continues on error"
|
|
245
|
+
desc = f"'{name}': {step_summary}, {error_handling}"
|
|
246
|
+
if recipe.description:
|
|
247
|
+
desc += f" - {recipe.description}"
|
|
248
|
+
recipe_explanations.append(desc)
|
|
249
|
+
sections.append(
|
|
250
|
+
{
|
|
251
|
+
"type": "recipes",
|
|
252
|
+
"title": f"{len(config.recipes)} Recipes",
|
|
253
|
+
"explanation": "Recipes are multi-step operations that automate common workflows.",
|
|
254
|
+
"items": recipe_explanations,
|
|
255
|
+
}
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
# Explain snippets
|
|
259
|
+
if config.snippets:
|
|
260
|
+
snippet_explanations = []
|
|
261
|
+
for name, snippet in config.snippets.items():
|
|
262
|
+
ctx = "locally" if snippet.context.value == "local" else f"in Docker '{snippet.container}'"
|
|
263
|
+
desc = f"'{name}': `{snippet.command}` ({ctx})"
|
|
264
|
+
if snippet.description:
|
|
265
|
+
desc += f" - {snippet.description}"
|
|
266
|
+
snippet_explanations.append(desc)
|
|
267
|
+
sections.append(
|
|
268
|
+
{
|
|
269
|
+
"type": "snippets",
|
|
270
|
+
"title": f"{len(config.snippets)} Snippets",
|
|
271
|
+
"explanation": "Snippets are reusable commands you can run quickly.",
|
|
272
|
+
"items": snippet_explanations,
|
|
273
|
+
}
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
# Build summary
|
|
277
|
+
total = len(config.processes) + len(config.groups) + len(config.recipes) + len(config.snippets)
|
|
278
|
+
summary = (
|
|
279
|
+
f"Config defines {total} items: {len(config.processes)} processes, "
|
|
280
|
+
f"{len(config.groups)} groups, {len(config.recipes)} recipes, {len(config.snippets)} snippets."
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
return ConfigResponse(
|
|
284
|
+
success=True,
|
|
285
|
+
data={
|
|
286
|
+
"summary": summary,
|
|
287
|
+
"sections": sections,
|
|
288
|
+
"config_file": str(config_path),
|
|
289
|
+
},
|
|
290
|
+
)
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"""Group management API routes."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from fastapi import APIRouter
|
|
6
|
+
from pydantic import BaseModel
|
|
7
|
+
|
|
8
|
+
from ...core.groups import get_group_manager
|
|
9
|
+
|
|
10
|
+
router = APIRouter()
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class GroupResponse(BaseModel):
|
|
14
|
+
"""Standard response wrapper."""
|
|
15
|
+
|
|
16
|
+
success: bool
|
|
17
|
+
data: dict[str, Any] | None = None
|
|
18
|
+
error: str | None = None
|
|
19
|
+
error_code: str | None = None
|
|
20
|
+
suggestion: str | None = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@router.get("")
|
|
24
|
+
async def list_groups() -> GroupResponse:
|
|
25
|
+
"""List all defined groups."""
|
|
26
|
+
manager = get_group_manager()
|
|
27
|
+
result = manager.list_groups()
|
|
28
|
+
return GroupResponse(**result)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@router.get("/{name}")
|
|
32
|
+
async def get_group(name: str) -> GroupResponse:
|
|
33
|
+
"""Get a specific group by name."""
|
|
34
|
+
manager = get_group_manager()
|
|
35
|
+
result = manager.get_group(name)
|
|
36
|
+
if not result["success"]:
|
|
37
|
+
return GroupResponse(**result)
|
|
38
|
+
return GroupResponse(**result)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@router.get("/{name}/status")
|
|
42
|
+
async def get_group_status(name: str) -> GroupResponse:
|
|
43
|
+
"""Get status of all processes in a group."""
|
|
44
|
+
manager = get_group_manager()
|
|
45
|
+
result = await manager.status_group(name)
|
|
46
|
+
return GroupResponse(**result)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@router.post("/{name}/start")
|
|
50
|
+
async def start_group(name: str) -> GroupResponse:
|
|
51
|
+
"""Start all processes in a group in order."""
|
|
52
|
+
manager = get_group_manager()
|
|
53
|
+
result = await manager.start_group(name)
|
|
54
|
+
return GroupResponse(**result)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@router.post("/{name}/stop")
|
|
58
|
+
async def stop_group(name: str) -> GroupResponse:
|
|
59
|
+
"""Stop all processes in a group in stop order."""
|
|
60
|
+
manager = get_group_manager()
|
|
61
|
+
result = await manager.stop_group(name)
|
|
62
|
+
return GroupResponse(**result)
|