procler 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- procler/__init__.py +3 -0
- procler/__main__.py +6 -0
- procler/api/__init__.py +5 -0
- procler/api/app.py +261 -0
- procler/api/deps.py +21 -0
- procler/api/routes/__init__.py +5 -0
- procler/api/routes/config.py +290 -0
- procler/api/routes/groups.py +62 -0
- procler/api/routes/logs.py +43 -0
- procler/api/routes/processes.py +185 -0
- procler/api/routes/recipes.py +69 -0
- procler/api/routes/snippets.py +134 -0
- procler/api/routes/ws.py +459 -0
- procler/cli.py +1478 -0
- procler/config/__init__.py +65 -0
- procler/config/changelog.py +148 -0
- procler/config/loader.py +256 -0
- procler/config/schema.py +315 -0
- procler/core/__init__.py +54 -0
- procler/core/context_base.py +117 -0
- procler/core/context_docker.py +384 -0
- procler/core/context_local.py +287 -0
- procler/core/daemon_detector.py +325 -0
- procler/core/events.py +74 -0
- procler/core/groups.py +419 -0
- procler/core/health.py +280 -0
- procler/core/log_tailer.py +262 -0
- procler/core/process_manager.py +1277 -0
- procler/core/recipes.py +330 -0
- procler/core/snippets.py +231 -0
- procler/core/variable_substitution.py +65 -0
- procler/db.py +96 -0
- procler/logging.py +41 -0
- procler/models.py +130 -0
- procler/py.typed +0 -0
- procler/settings.py +29 -0
- procler/static/assets/AboutView-BwZnsfpW.js +4 -0
- procler/static/assets/AboutView-UHbxWXcS.css +1 -0
- procler/static/assets/Code-HTS-H1S6.js +74 -0
- procler/static/assets/ConfigView-CGJcmp9G.css +1 -0
- procler/static/assets/ConfigView-aVtbRDf8.js +1 -0
- procler/static/assets/DashboardView-C5jw9Nsd.css +1 -0
- procler/static/assets/DashboardView-Dab7Cu9v.js +1 -0
- procler/static/assets/DataTable-z39TOAa4.js +746 -0
- procler/static/assets/DescriptionsItem-B2E8YbqJ.js +74 -0
- procler/static/assets/Divider-Dk-6aD2Y.js +42 -0
- procler/static/assets/Empty-MuygEHZM.js +24 -0
- procler/static/assets/Grid-CZ9QVKAT.js +1 -0
- procler/static/assets/GroupsView-BALG7i1X.js +1 -0
- procler/static/assets/GroupsView-gXAI1CVC.css +1 -0
- procler/static/assets/Input-e0xaxoWE.js +259 -0
- procler/static/assets/PhArrowsClockwise.vue-DqDg31az.js +1 -0
- procler/static/assets/PhCheckCircle.vue-Fwj9sh9m.js +1 -0
- procler/static/assets/PhEye.vue-JcPHciC2.js +1 -0
- procler/static/assets/PhPlay.vue-CZm7Gy3u.js +1 -0
- procler/static/assets/PhPlus.vue-yTWqKlSh.js +1 -0
- procler/static/assets/PhStop.vue-DxsqwIki.js +1 -0
- procler/static/assets/PhTrash.vue-DcqQbN1_.js +125 -0
- procler/static/assets/PhXCircle.vue-BXWmrabV.js +1 -0
- procler/static/assets/ProcessDetailView-DDbtIWq9.css +1 -0
- procler/static/assets/ProcessDetailView-DPtdNV-q.js +1 -0
- procler/static/assets/ProcessesView-B3a6Umur.js +1 -0
- procler/static/assets/ProcessesView-goLmghbJ.css +1 -0
- procler/static/assets/RecipesView-D2VxdneD.js +166 -0
- procler/static/assets/RecipesView-DXnFDCK4.css +1 -0
- procler/static/assets/Select-BBR17AHq.js +317 -0
- procler/static/assets/SnippetsView-B3a9q3AI.css +1 -0
- procler/static/assets/SnippetsView-DBCB2yGq.js +1 -0
- procler/static/assets/Spin-BXTjvFUk.js +90 -0
- procler/static/assets/Tag-Bh_qV63A.js +71 -0
- procler/static/assets/changelog-KkTT4H9-.js +1 -0
- procler/static/assets/groups-Zu-_v8ey.js +1 -0
- procler/static/assets/index-BsN-YMXq.css +1 -0
- procler/static/assets/index-BzW1XhyH.js +1282 -0
- procler/static/assets/procler-DOrSB1Vj.js +1 -0
- procler/static/assets/recipes-1w5SseGb.js +1 -0
- procler/static/index.html +17 -0
- procler/static/procler.png +0 -0
- procler-0.2.0.dist-info/METADATA +545 -0
- procler-0.2.0.dist-info/RECORD +83 -0
- procler-0.2.0.dist-info/WHEEL +4 -0
- procler-0.2.0.dist-info/entry_points.txt +2 -0
- procler-0.2.0.dist-info/licenses/LICENSE +21 -0
procler/config/schema.py
ADDED
|
@@ -0,0 +1,315 @@
|
|
|
1
|
+
"""Configuration schema for procler using Pydantic."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from enum import Enum
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, Field, field_validator, model_validator
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ContextType(str, Enum):
|
|
11
|
+
"""Execution context type."""
|
|
12
|
+
|
|
13
|
+
LOCAL = "local"
|
|
14
|
+
DOCKER = "docker"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class OnErrorAction(str, Enum):
|
|
18
|
+
"""What to do when a recipe step fails."""
|
|
19
|
+
|
|
20
|
+
STOP = "stop"
|
|
21
|
+
CONTINUE = "continue"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class HealthCheckDef(BaseModel):
|
|
25
|
+
"""Health check definition for a process."""
|
|
26
|
+
|
|
27
|
+
test: str # Command to run, e.g., "curl -f http://localhost:8000/health"
|
|
28
|
+
interval: str = "10s" # Time between checks
|
|
29
|
+
timeout: str = "5s" # How long to wait for check to complete
|
|
30
|
+
retries: int = 3 # Number of consecutive failures before unhealthy
|
|
31
|
+
start_period: str = "0s" # Grace period before checks start
|
|
32
|
+
|
|
33
|
+
def get_interval_seconds(self) -> float:
|
|
34
|
+
"""Parse interval to seconds."""
|
|
35
|
+
return self._parse_duration(self.interval)
|
|
36
|
+
|
|
37
|
+
def get_timeout_seconds(self) -> float:
|
|
38
|
+
"""Parse timeout to seconds."""
|
|
39
|
+
return self._parse_duration(self.timeout)
|
|
40
|
+
|
|
41
|
+
def get_start_period_seconds(self) -> float:
|
|
42
|
+
"""Parse start_period to seconds."""
|
|
43
|
+
return self._parse_duration(self.start_period)
|
|
44
|
+
|
|
45
|
+
def _parse_duration(self, duration: str) -> float:
|
|
46
|
+
"""Parse duration string to seconds."""
|
|
47
|
+
d = duration.strip().lower()
|
|
48
|
+
if d.endswith("ms"):
|
|
49
|
+
return float(d[:-2]) / 1000
|
|
50
|
+
elif d.endswith("s"):
|
|
51
|
+
return float(d[:-1])
|
|
52
|
+
elif d.endswith("m"):
|
|
53
|
+
return float(d[:-1]) * 60
|
|
54
|
+
else:
|
|
55
|
+
return float(d)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class DependencyCondition(str, Enum):
|
|
59
|
+
"""Conditions for process dependencies."""
|
|
60
|
+
|
|
61
|
+
STARTED = "started" # Just needs to be running
|
|
62
|
+
HEALTHY = "healthy" # Must pass health check
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class DependencyDef(BaseModel):
|
|
66
|
+
"""Dependency definition for a process."""
|
|
67
|
+
|
|
68
|
+
name: str # Process name
|
|
69
|
+
condition: DependencyCondition = DependencyCondition.STARTED
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class ProcessDef(BaseModel):
|
|
73
|
+
"""Process definition from config file."""
|
|
74
|
+
|
|
75
|
+
command: str
|
|
76
|
+
context: ContextType = ContextType.LOCAL
|
|
77
|
+
container: str | None = None
|
|
78
|
+
cwd: str | None = None
|
|
79
|
+
tags: list[str] = Field(default_factory=list)
|
|
80
|
+
description: str | None = None
|
|
81
|
+
healthcheck: HealthCheckDef | None = None
|
|
82
|
+
depends_on: list[str | DependencyDef] = Field(default_factory=list)
|
|
83
|
+
|
|
84
|
+
# Daemon mode configuration
|
|
85
|
+
daemon_mode: bool = False
|
|
86
|
+
daemon_match_pattern: str | None = None
|
|
87
|
+
daemon_pidfile: str | None = None
|
|
88
|
+
daemon_container: str | None = None # Container to detect daemon in (for docker exec commands)
|
|
89
|
+
adopt_existing: bool = False
|
|
90
|
+
|
|
91
|
+
def get_dependencies(self) -> list[DependencyDef]:
|
|
92
|
+
"""Get normalized dependency list."""
|
|
93
|
+
deps = []
|
|
94
|
+
for dep in self.depends_on:
|
|
95
|
+
if isinstance(dep, str):
|
|
96
|
+
deps.append(DependencyDef(name=dep))
|
|
97
|
+
else:
|
|
98
|
+
deps.append(dep)
|
|
99
|
+
return deps
|
|
100
|
+
|
|
101
|
+
@field_validator("container")
|
|
102
|
+
@classmethod
|
|
103
|
+
def docker_requires_container(cls, v, info):
|
|
104
|
+
"""Validate that docker context has container name."""
|
|
105
|
+
# Note: This runs per-field, full validation in model_validator
|
|
106
|
+
return v
|
|
107
|
+
|
|
108
|
+
@model_validator(mode="after")
|
|
109
|
+
def validate_daemon_mode(self):
|
|
110
|
+
"""Validate daemon mode configuration."""
|
|
111
|
+
if self.daemon_mode:
|
|
112
|
+
if not self.daemon_match_pattern and not self.daemon_pidfile:
|
|
113
|
+
raise ValueError("daemon_mode requires either daemon_match_pattern or daemon_pidfile")
|
|
114
|
+
if self.adopt_existing and not self.daemon_mode:
|
|
115
|
+
raise ValueError("adopt_existing requires daemon_mode=true")
|
|
116
|
+
return self
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
class GroupDef(BaseModel):
|
|
120
|
+
"""Process group definition."""
|
|
121
|
+
|
|
122
|
+
processes: list[str]
|
|
123
|
+
description: str | None = None
|
|
124
|
+
stop_order: list[str] | None = None # If None, reverse of processes
|
|
125
|
+
|
|
126
|
+
def get_stop_order(self) -> list[str]:
|
|
127
|
+
"""Get the stop order (explicit or reversed start order)."""
|
|
128
|
+
if self.stop_order:
|
|
129
|
+
return self.stop_order
|
|
130
|
+
return list(reversed(self.processes))
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
class RecipeStepStart(BaseModel):
|
|
134
|
+
"""Start a process."""
|
|
135
|
+
|
|
136
|
+
start: str
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class RecipeStepStop(BaseModel):
|
|
140
|
+
"""Stop a process."""
|
|
141
|
+
|
|
142
|
+
stop: str
|
|
143
|
+
ignore_error: bool = False
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
class RecipeStepRestart(BaseModel):
|
|
147
|
+
"""Restart a process."""
|
|
148
|
+
|
|
149
|
+
restart: str
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class RecipeStepGroupStart(BaseModel):
|
|
153
|
+
"""Start a process group."""
|
|
154
|
+
|
|
155
|
+
group_start: str
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
class RecipeStepGroupStop(BaseModel):
|
|
159
|
+
"""Stop a process group."""
|
|
160
|
+
|
|
161
|
+
group_stop: str
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
class RecipeStepWait(BaseModel):
|
|
165
|
+
"""Wait for a duration."""
|
|
166
|
+
|
|
167
|
+
wait: str # e.g., "2s", "500ms", "1m"
|
|
168
|
+
|
|
169
|
+
def get_seconds(self) -> float:
|
|
170
|
+
"""Parse duration string to seconds."""
|
|
171
|
+
duration = self.wait.strip().lower()
|
|
172
|
+
if duration.endswith("ms"):
|
|
173
|
+
return float(duration[:-2]) / 1000
|
|
174
|
+
elif duration.endswith("s"):
|
|
175
|
+
return float(duration[:-1])
|
|
176
|
+
elif duration.endswith("m"):
|
|
177
|
+
return float(duration[:-1]) * 60
|
|
178
|
+
else:
|
|
179
|
+
# Assume seconds if no unit
|
|
180
|
+
return float(duration)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
class RecipeStepExec(BaseModel):
|
|
184
|
+
"""Execute an arbitrary command."""
|
|
185
|
+
|
|
186
|
+
exec: str
|
|
187
|
+
context: ContextType = ContextType.LOCAL
|
|
188
|
+
container: str | None = None
|
|
189
|
+
cwd: str | None = None
|
|
190
|
+
timeout: str = "60s" # Default 60 second timeout
|
|
191
|
+
ignore_error: bool = False
|
|
192
|
+
|
|
193
|
+
def get_timeout_seconds(self) -> float:
|
|
194
|
+
"""Parse timeout string to seconds."""
|
|
195
|
+
timeout = self.timeout.strip().lower()
|
|
196
|
+
if timeout.endswith("ms"):
|
|
197
|
+
return float(timeout[:-2]) / 1000
|
|
198
|
+
elif timeout.endswith("s"):
|
|
199
|
+
return float(timeout[:-1])
|
|
200
|
+
elif timeout.endswith("m"):
|
|
201
|
+
return float(timeout[:-1]) * 60
|
|
202
|
+
else:
|
|
203
|
+
return float(timeout)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
# Union type for all recipe steps
|
|
207
|
+
RecipeStep = (
|
|
208
|
+
RecipeStepStart
|
|
209
|
+
| RecipeStepStop
|
|
210
|
+
| RecipeStepRestart
|
|
211
|
+
| RecipeStepGroupStart
|
|
212
|
+
| RecipeStepGroupStop
|
|
213
|
+
| RecipeStepWait
|
|
214
|
+
| RecipeStepExec
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def parse_recipe_step(step_data: dict) -> RecipeStep:
|
|
219
|
+
"""Parse a recipe step from dict to the appropriate type."""
|
|
220
|
+
if "start" in step_data:
|
|
221
|
+
return RecipeStepStart(**step_data)
|
|
222
|
+
elif "stop" in step_data:
|
|
223
|
+
return RecipeStepStop(**step_data)
|
|
224
|
+
elif "restart" in step_data:
|
|
225
|
+
return RecipeStepRestart(**step_data)
|
|
226
|
+
elif "group_start" in step_data:
|
|
227
|
+
return RecipeStepGroupStart(**step_data)
|
|
228
|
+
elif "group_stop" in step_data:
|
|
229
|
+
return RecipeStepGroupStop(**step_data)
|
|
230
|
+
elif "wait" in step_data:
|
|
231
|
+
return RecipeStepWait(**step_data)
|
|
232
|
+
elif "exec" in step_data:
|
|
233
|
+
return RecipeStepExec(**step_data)
|
|
234
|
+
else:
|
|
235
|
+
raise ValueError(f"Unknown recipe step type: {step_data}")
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
class RecipeDef(BaseModel):
|
|
239
|
+
"""Recipe definition - multi-step operation."""
|
|
240
|
+
|
|
241
|
+
description: str | None = None
|
|
242
|
+
steps: list[dict] # Raw dicts, parsed lazily
|
|
243
|
+
on_error: OnErrorAction = OnErrorAction.STOP
|
|
244
|
+
|
|
245
|
+
def get_steps(self) -> list[RecipeStep]:
|
|
246
|
+
"""Parse and return typed steps."""
|
|
247
|
+
return [parse_recipe_step(s) for s in self.steps]
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
class SnippetDef(BaseModel):
|
|
251
|
+
"""Snippet definition - simple reusable command."""
|
|
252
|
+
|
|
253
|
+
command: str
|
|
254
|
+
description: str | None = None
|
|
255
|
+
context: ContextType = ContextType.LOCAL
|
|
256
|
+
container: str | None = None
|
|
257
|
+
tags: list[str] = Field(default_factory=list)
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
class ProclerConfig(BaseModel):
|
|
261
|
+
"""Root configuration object."""
|
|
262
|
+
|
|
263
|
+
version: int = 1
|
|
264
|
+
vars: dict[str, str] = Field(default_factory=dict) # Variable substitution
|
|
265
|
+
processes: dict[str, ProcessDef] = Field(default_factory=dict)
|
|
266
|
+
groups: dict[str, GroupDef] = Field(default_factory=dict)
|
|
267
|
+
recipes: dict[str, RecipeDef] = Field(default_factory=dict)
|
|
268
|
+
snippets: dict[str, SnippetDef] = Field(default_factory=dict)
|
|
269
|
+
|
|
270
|
+
def validate_references(self) -> list[str]:
|
|
271
|
+
"""Validate that all references exist. Returns list of errors."""
|
|
272
|
+
errors = []
|
|
273
|
+
|
|
274
|
+
# Check group process references
|
|
275
|
+
for group_name, group in self.groups.items():
|
|
276
|
+
for proc in group.processes:
|
|
277
|
+
if proc not in self.processes:
|
|
278
|
+
errors.append(f"Group '{group_name}' references unknown process '{proc}'")
|
|
279
|
+
if group.stop_order:
|
|
280
|
+
for proc in group.stop_order:
|
|
281
|
+
if proc not in self.processes:
|
|
282
|
+
errors.append(f"Group '{group_name}' stop_order references unknown process '{proc}'")
|
|
283
|
+
|
|
284
|
+
# Check recipe references
|
|
285
|
+
for recipe_name, recipe in self.recipes.items():
|
|
286
|
+
for i, step_data in enumerate(recipe.steps):
|
|
287
|
+
if "start" in step_data and step_data["start"] not in self.processes:
|
|
288
|
+
errors.append(
|
|
289
|
+
f"Recipe '{recipe_name}' step {i+1} references unknown process '{step_data['start']}'"
|
|
290
|
+
)
|
|
291
|
+
if "stop" in step_data and step_data["stop"] not in self.processes:
|
|
292
|
+
errors.append(f"Recipe '{recipe_name}' step {i+1} references unknown process '{step_data['stop']}'")
|
|
293
|
+
if "restart" in step_data and step_data["restart"] not in self.processes:
|
|
294
|
+
errors.append(
|
|
295
|
+
f"Recipe '{recipe_name}' step {i+1} references unknown process '{step_data['restart']}'"
|
|
296
|
+
)
|
|
297
|
+
if "group_start" in step_data and step_data["group_start"] not in self.groups:
|
|
298
|
+
errors.append(
|
|
299
|
+
f"Recipe '{recipe_name}' step {i+1} references unknown group '{step_data['group_start']}'"
|
|
300
|
+
)
|
|
301
|
+
if "group_stop" in step_data and step_data["group_stop"] not in self.groups:
|
|
302
|
+
errors.append(
|
|
303
|
+
f"Recipe '{recipe_name}' step {i+1} references unknown group '{step_data['group_stop']}'"
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
# Check docker contexts have containers
|
|
307
|
+
for name, proc in self.processes.items():
|
|
308
|
+
if proc.context == ContextType.DOCKER and not proc.container:
|
|
309
|
+
errors.append(f"Process '{name}' has docker context but no container specified")
|
|
310
|
+
|
|
311
|
+
for name, snippet in self.snippets.items():
|
|
312
|
+
if snippet.context == ContextType.DOCKER and not snippet.container:
|
|
313
|
+
errors.append(f"Snippet '{name}' has docker context but no container specified")
|
|
314
|
+
|
|
315
|
+
return errors
|
procler/core/__init__.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"""Core business logic for Procler."""
|
|
2
|
+
|
|
3
|
+
from .context_base import ExecResult, ExecutionContext, ProcessHandle
|
|
4
|
+
from .context_docker import DockerContext, get_docker_context, is_docker_available
|
|
5
|
+
from .context_local import LocalContext, get_local_context
|
|
6
|
+
from .events import (
|
|
7
|
+
EVENT_LOG_ENTRY,
|
|
8
|
+
EVENT_STATUS_CHANGE,
|
|
9
|
+
EventBus,
|
|
10
|
+
get_event_bus,
|
|
11
|
+
reset_event_bus,
|
|
12
|
+
)
|
|
13
|
+
from .groups import GroupManager, get_group_manager, reset_group_manager
|
|
14
|
+
from .health import (
|
|
15
|
+
HealthChecker,
|
|
16
|
+
HealthState,
|
|
17
|
+
HealthStatus,
|
|
18
|
+
get_health_checker,
|
|
19
|
+
reset_health_checker,
|
|
20
|
+
)
|
|
21
|
+
from .process_manager import ProcessManager, get_process_manager
|
|
22
|
+
from .recipes import RecipeExecutor, get_recipe_executor, reset_recipe_executor
|
|
23
|
+
from .snippets import SnippetManager, get_snippet_manager
|
|
24
|
+
|
|
25
|
+
__all__ = [
|
|
26
|
+
"ExecResult",
|
|
27
|
+
"ExecutionContext",
|
|
28
|
+
"ProcessHandle",
|
|
29
|
+
"DockerContext",
|
|
30
|
+
"get_docker_context",
|
|
31
|
+
"is_docker_available",
|
|
32
|
+
"LocalContext",
|
|
33
|
+
"get_local_context",
|
|
34
|
+
"EVENT_LOG_ENTRY",
|
|
35
|
+
"EVENT_STATUS_CHANGE",
|
|
36
|
+
"EventBus",
|
|
37
|
+
"get_event_bus",
|
|
38
|
+
"reset_event_bus",
|
|
39
|
+
"ProcessManager",
|
|
40
|
+
"get_process_manager",
|
|
41
|
+
"SnippetManager",
|
|
42
|
+
"get_snippet_manager",
|
|
43
|
+
"GroupManager",
|
|
44
|
+
"get_group_manager",
|
|
45
|
+
"reset_group_manager",
|
|
46
|
+
"RecipeExecutor",
|
|
47
|
+
"get_recipe_executor",
|
|
48
|
+
"reset_recipe_executor",
|
|
49
|
+
"HealthChecker",
|
|
50
|
+
"HealthStatus",
|
|
51
|
+
"HealthState",
|
|
52
|
+
"get_health_checker",
|
|
53
|
+
"reset_health_checker",
|
|
54
|
+
]
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
"""Abstract base class for execution contexts."""
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from collections.abc import AsyncIterator, Callable
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class ProcessHandle:
|
|
10
|
+
"""Handle to a running process."""
|
|
11
|
+
|
|
12
|
+
pid: int
|
|
13
|
+
context_type: str
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class ExecResult:
|
|
18
|
+
"""Result of executing a command."""
|
|
19
|
+
|
|
20
|
+
exit_code: int
|
|
21
|
+
stdout: str
|
|
22
|
+
stderr: str
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ExecutionContext(ABC):
|
|
26
|
+
"""Abstract base for process execution contexts (local, docker, etc.)."""
|
|
27
|
+
|
|
28
|
+
@property
|
|
29
|
+
@abstractmethod
|
|
30
|
+
def context_type(self) -> str:
|
|
31
|
+
"""Return the context type identifier (e.g., 'local', 'docker')."""
|
|
32
|
+
...
|
|
33
|
+
|
|
34
|
+
@abstractmethod
|
|
35
|
+
async def start_process(
|
|
36
|
+
self,
|
|
37
|
+
command: str,
|
|
38
|
+
cwd: str | None = None,
|
|
39
|
+
env: dict[str, str] | None = None,
|
|
40
|
+
on_stdout: Callable[[str], None] | None = None,
|
|
41
|
+
on_stderr: Callable[[str], None] | None = None,
|
|
42
|
+
on_exit: Callable[[int], None] | None = None,
|
|
43
|
+
) -> ProcessHandle:
|
|
44
|
+
"""
|
|
45
|
+
Start a long-running process.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
command: The command to execute
|
|
49
|
+
cwd: Working directory (optional)
|
|
50
|
+
env: Environment variables to add (optional)
|
|
51
|
+
on_stdout: Callback for stdout lines
|
|
52
|
+
on_stderr: Callback for stderr lines
|
|
53
|
+
on_exit: Callback when process exits with exit code
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
ProcessHandle with the PID
|
|
57
|
+
"""
|
|
58
|
+
...
|
|
59
|
+
|
|
60
|
+
@abstractmethod
|
|
61
|
+
async def stop_process(self, handle: ProcessHandle, timeout: float = 10.0) -> int:
|
|
62
|
+
"""
|
|
63
|
+
Stop a running process.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
handle: The process handle from start_process
|
|
67
|
+
timeout: Seconds to wait for graceful shutdown before SIGKILL
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
Exit code of the process
|
|
71
|
+
"""
|
|
72
|
+
...
|
|
73
|
+
|
|
74
|
+
@abstractmethod
|
|
75
|
+
async def is_running(self, handle: ProcessHandle) -> bool:
|
|
76
|
+
"""Check if a process is still running."""
|
|
77
|
+
...
|
|
78
|
+
|
|
79
|
+
@abstractmethod
|
|
80
|
+
async def exec_command(
|
|
81
|
+
self,
|
|
82
|
+
command: str,
|
|
83
|
+
cwd: str | None = None,
|
|
84
|
+
env: dict[str, str] | None = None,
|
|
85
|
+
timeout: float | None = None,
|
|
86
|
+
) -> ExecResult:
|
|
87
|
+
"""
|
|
88
|
+
Execute a one-shot command and wait for completion.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
command: The command to execute
|
|
92
|
+
cwd: Working directory (optional)
|
|
93
|
+
env: Environment variables (optional)
|
|
94
|
+
timeout: Maximum execution time in seconds
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
ExecResult with exit code, stdout, and stderr
|
|
98
|
+
"""
|
|
99
|
+
...
|
|
100
|
+
|
|
101
|
+
@abstractmethod
|
|
102
|
+
async def stream_logs(
|
|
103
|
+
self,
|
|
104
|
+
handle: ProcessHandle,
|
|
105
|
+
follow: bool = True,
|
|
106
|
+
) -> AsyncIterator[tuple[str, str]]:
|
|
107
|
+
"""
|
|
108
|
+
Stream logs from a running process.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
handle: The process handle
|
|
112
|
+
follow: If True, keep streaming as new logs arrive
|
|
113
|
+
|
|
114
|
+
Yields:
|
|
115
|
+
Tuples of (stream_type, line) where stream_type is 'stdout' or 'stderr'
|
|
116
|
+
"""
|
|
117
|
+
...
|