procler 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. procler/__init__.py +3 -0
  2. procler/__main__.py +6 -0
  3. procler/api/__init__.py +5 -0
  4. procler/api/app.py +261 -0
  5. procler/api/deps.py +21 -0
  6. procler/api/routes/__init__.py +5 -0
  7. procler/api/routes/config.py +290 -0
  8. procler/api/routes/groups.py +62 -0
  9. procler/api/routes/logs.py +43 -0
  10. procler/api/routes/processes.py +185 -0
  11. procler/api/routes/recipes.py +69 -0
  12. procler/api/routes/snippets.py +134 -0
  13. procler/api/routes/ws.py +459 -0
  14. procler/cli.py +1478 -0
  15. procler/config/__init__.py +65 -0
  16. procler/config/changelog.py +148 -0
  17. procler/config/loader.py +256 -0
  18. procler/config/schema.py +315 -0
  19. procler/core/__init__.py +54 -0
  20. procler/core/context_base.py +117 -0
  21. procler/core/context_docker.py +384 -0
  22. procler/core/context_local.py +287 -0
  23. procler/core/daemon_detector.py +325 -0
  24. procler/core/events.py +74 -0
  25. procler/core/groups.py +419 -0
  26. procler/core/health.py +280 -0
  27. procler/core/log_tailer.py +262 -0
  28. procler/core/process_manager.py +1277 -0
  29. procler/core/recipes.py +330 -0
  30. procler/core/snippets.py +231 -0
  31. procler/core/variable_substitution.py +65 -0
  32. procler/db.py +96 -0
  33. procler/logging.py +41 -0
  34. procler/models.py +130 -0
  35. procler/py.typed +0 -0
  36. procler/settings.py +29 -0
  37. procler/static/assets/AboutView-BwZnsfpW.js +4 -0
  38. procler/static/assets/AboutView-UHbxWXcS.css +1 -0
  39. procler/static/assets/Code-HTS-H1S6.js +74 -0
  40. procler/static/assets/ConfigView-CGJcmp9G.css +1 -0
  41. procler/static/assets/ConfigView-aVtbRDf8.js +1 -0
  42. procler/static/assets/DashboardView-C5jw9Nsd.css +1 -0
  43. procler/static/assets/DashboardView-Dab7Cu9v.js +1 -0
  44. procler/static/assets/DataTable-z39TOAa4.js +746 -0
  45. procler/static/assets/DescriptionsItem-B2E8YbqJ.js +74 -0
  46. procler/static/assets/Divider-Dk-6aD2Y.js +42 -0
  47. procler/static/assets/Empty-MuygEHZM.js +24 -0
  48. procler/static/assets/Grid-CZ9QVKAT.js +1 -0
  49. procler/static/assets/GroupsView-BALG7i1X.js +1 -0
  50. procler/static/assets/GroupsView-gXAI1CVC.css +1 -0
  51. procler/static/assets/Input-e0xaxoWE.js +259 -0
  52. procler/static/assets/PhArrowsClockwise.vue-DqDg31az.js +1 -0
  53. procler/static/assets/PhCheckCircle.vue-Fwj9sh9m.js +1 -0
  54. procler/static/assets/PhEye.vue-JcPHciC2.js +1 -0
  55. procler/static/assets/PhPlay.vue-CZm7Gy3u.js +1 -0
  56. procler/static/assets/PhPlus.vue-yTWqKlSh.js +1 -0
  57. procler/static/assets/PhStop.vue-DxsqwIki.js +1 -0
  58. procler/static/assets/PhTrash.vue-DcqQbN1_.js +125 -0
  59. procler/static/assets/PhXCircle.vue-BXWmrabV.js +1 -0
  60. procler/static/assets/ProcessDetailView-DDbtIWq9.css +1 -0
  61. procler/static/assets/ProcessDetailView-DPtdNV-q.js +1 -0
  62. procler/static/assets/ProcessesView-B3a6Umur.js +1 -0
  63. procler/static/assets/ProcessesView-goLmghbJ.css +1 -0
  64. procler/static/assets/RecipesView-D2VxdneD.js +166 -0
  65. procler/static/assets/RecipesView-DXnFDCK4.css +1 -0
  66. procler/static/assets/Select-BBR17AHq.js +317 -0
  67. procler/static/assets/SnippetsView-B3a9q3AI.css +1 -0
  68. procler/static/assets/SnippetsView-DBCB2yGq.js +1 -0
  69. procler/static/assets/Spin-BXTjvFUk.js +90 -0
  70. procler/static/assets/Tag-Bh_qV63A.js +71 -0
  71. procler/static/assets/changelog-KkTT4H9-.js +1 -0
  72. procler/static/assets/groups-Zu-_v8ey.js +1 -0
  73. procler/static/assets/index-BsN-YMXq.css +1 -0
  74. procler/static/assets/index-BzW1XhyH.js +1282 -0
  75. procler/static/assets/procler-DOrSB1Vj.js +1 -0
  76. procler/static/assets/recipes-1w5SseGb.js +1 -0
  77. procler/static/index.html +17 -0
  78. procler/static/procler.png +0 -0
  79. procler-0.2.0.dist-info/METADATA +545 -0
  80. procler-0.2.0.dist-info/RECORD +83 -0
  81. procler-0.2.0.dist-info/WHEEL +4 -0
  82. procler-0.2.0.dist-info/entry_points.txt +2 -0
  83. procler-0.2.0.dist-info/licenses/LICENSE +21 -0
procler/cli.py ADDED
@@ -0,0 +1,1478 @@
1
+ """CLI interface for Procler."""
2
+
3
+ import json
4
+ import sys
5
+ from datetime import datetime
6
+ from typing import Any
7
+
8
+ import click
9
+
10
+ from . import __version__
11
+
12
+ # JSON output utilities
13
+
14
+
15
+ def output_json(data: dict[str, Any]) -> None:
16
+ """Output JSON to stdout."""
17
+ click.echo(json.dumps(data, indent=2, default=str))
18
+
19
+
20
+ def success_response(data: dict[str, Any] | None = None) -> dict[str, Any]:
21
+ """Create a success response envelope."""
22
+ response = {"success": True}
23
+ if data is not None:
24
+ response["data"] = data
25
+ return response
26
+
27
+
28
+ def error_response(
29
+ error: str,
30
+ error_code: str | None = None,
31
+ suggestion: str | None = None,
32
+ **extra: Any,
33
+ ) -> dict[str, Any]:
34
+ """Create an error response envelope."""
35
+ response = {"success": False, "error": error}
36
+ if error_code:
37
+ response["error_code"] = error_code
38
+ if suggestion:
39
+ response["suggestion"] = suggestion
40
+ response.update(extra)
41
+ return response
42
+
43
+
44
+ # CLI schema for capabilities command - LLM-friendly discovery
45
+
46
+ CLI_SCHEMA = {
47
+ "name": "procler",
48
+ "version": __version__,
49
+ "description": "LLM-first process manager for developers. All output is JSON.",
50
+ "output_format": (
51
+ "All commands return JSON with "
52
+ "{success: bool, data?: object, error?: string, error_code?: string, suggestion?: string}"
53
+ ),
54
+ "config_location": ".procler/ directory (per-project, version-controllable)",
55
+ "common_workflows": [
56
+ {
57
+ "name": "Start a dev environment",
58
+ "steps": ["procler group start backend", "procler status"],
59
+ "description": "Start all backend processes in order and verify status",
60
+ },
61
+ {
62
+ "name": "Graceful restart with migration",
63
+ "steps": ["procler recipe run deploy --dry-run", "procler recipe run deploy"],
64
+ "description": "Preview then execute a multi-step deployment recipe",
65
+ },
66
+ {
67
+ "name": "Debug a failing process",
68
+ "steps": ["procler status api", "procler logs api --tail 50", "procler restart api"],
69
+ "description": "Check status, view recent logs, then restart",
70
+ },
71
+ {
72
+ "name": "Initialize a new project",
73
+ "steps": [
74
+ "procler config init",
75
+ "# Edit .procler/config.yaml",
76
+ "procler config validate",
77
+ ],
78
+ "description": "Create config directory with template, then validate",
79
+ },
80
+ ],
81
+ "commands": {
82
+ "capabilities": {
83
+ "description": "Returns JSON schema of all commands (LLM discovery)",
84
+ "example": "procler capabilities",
85
+ "arguments": [],
86
+ "options": [],
87
+ },
88
+ "help-llm": {
89
+ "description": "Output comprehensive LLM-focused usage instructions in markdown format",
90
+ "example": "procler help-llm",
91
+ "arguments": [],
92
+ "options": [],
93
+ },
94
+ "status": {
95
+ "description": "Show process status (use with NAME to show single process details)",
96
+ "example": "procler status api",
97
+ "arguments": [
98
+ {
99
+ "name": "name",
100
+ "required": False,
101
+ "description": "Process name (optional, shows all if omitted)",
102
+ }
103
+ ],
104
+ "options": [],
105
+ },
106
+ "start": {
107
+ "description": "Start a process (idempotent - no-op if running)",
108
+ "example": "procler start api",
109
+ "arguments": [{"name": "name", "required": True, "description": "Process name"}],
110
+ "options": [],
111
+ },
112
+ "stop": {
113
+ "description": "Stop a process (idempotent - no-op if stopped)",
114
+ "example": "procler stop api",
115
+ "arguments": [{"name": "name", "required": True, "description": "Process name"}],
116
+ "options": [],
117
+ },
118
+ "restart": {
119
+ "description": "Restart a process (stop then start)",
120
+ "example": "procler restart api",
121
+ "arguments": [{"name": "name", "required": True, "description": "Process name"}],
122
+ "options": [],
123
+ },
124
+ "define": {
125
+ "description": "Define a new process in runtime DB (prefer config.yaml for persistence)",
126
+ "example": "procler define --name api --command 'uvicorn main:app' --cwd /app",
127
+ "arguments": [],
128
+ "options": [
129
+ {
130
+ "name": "--name",
131
+ "required": True,
132
+ "description": "Process name (CLI identifier)",
133
+ },
134
+ {"name": "--command", "required": True, "description": "Command to execute"},
135
+ {
136
+ "name": "--context",
137
+ "required": False,
138
+ "default": "local",
139
+ "choices": ["local", "docker"],
140
+ "description": "Execution context",
141
+ },
142
+ {
143
+ "name": "--container",
144
+ "required": False,
145
+ "description": "Docker container name (required if context=docker)",
146
+ },
147
+ {"name": "--cwd", "required": False, "description": "Working directory"},
148
+ {"name": "--display-name", "required": False, "description": "Human-friendly name"},
149
+ {"name": "--tags", "required": False, "description": "Comma-separated tags"},
150
+ {
151
+ "name": "--force",
152
+ "required": False,
153
+ "is_flag": True,
154
+ "description": "Overwrite existing process definition",
155
+ },
156
+ ],
157
+ },
158
+ "remove": {
159
+ "description": "Remove a process definition from runtime DB",
160
+ "example": "procler remove api",
161
+ "arguments": [{"name": "name", "required": True, "description": "Process name"}],
162
+ "options": [],
163
+ },
164
+ "list": {
165
+ "description": "List all process definitions",
166
+ "example": "procler list",
167
+ "arguments": [],
168
+ "options": [],
169
+ },
170
+ "logs": {
171
+ "description": "Get logs for a process",
172
+ "example": "procler logs api --tail 50 --since 5m",
173
+ "arguments": [{"name": "name", "required": True, "description": "Process name"}],
174
+ "options": [
175
+ {
176
+ "name": "--tail",
177
+ "required": False,
178
+ "default": 100,
179
+ "description": "Number of lines to return",
180
+ },
181
+ {
182
+ "name": "--since",
183
+ "required": False,
184
+ "description": "Time filter (e.g., '5m', '1h', ISO timestamp)",
185
+ },
186
+ {
187
+ "name": "--follow/-f",
188
+ "required": False,
189
+ "default": False,
190
+ "description": "Follow log output in real-time (outputs raw text, not JSON)",
191
+ },
192
+ ],
193
+ },
194
+ "exec": {
195
+ "description": "Execute an arbitrary command (one-off, not a managed process)",
196
+ "example": "procler exec 'ls -la' --context docker --container myapp",
197
+ "arguments": [{"name": "command", "required": True, "description": "Command to execute"}],
198
+ "options": [
199
+ {
200
+ "name": "--context",
201
+ "required": False,
202
+ "default": "local",
203
+ "choices": ["local", "docker"],
204
+ "description": "Execution context",
205
+ },
206
+ {
207
+ "name": "--container",
208
+ "required": False,
209
+ "description": "Docker container name",
210
+ },
211
+ {"name": "--cwd", "required": False, "description": "Working directory"},
212
+ ],
213
+ },
214
+ "snippet": {
215
+ "description": "Manage command snippets (reusable one-off commands)",
216
+ "subcommands": {
217
+ "list": {
218
+ "description": "List all snippets",
219
+ "example": "procler snippet list --tag docker",
220
+ "options": [{"name": "--tag", "required": False, "description": "Filter by tag"}],
221
+ },
222
+ "show": {
223
+ "description": "Show details of a specific snippet",
224
+ "example": "procler snippet show rebuild",
225
+ "arguments": [{"name": "name", "required": True, "description": "Snippet name"}],
226
+ },
227
+ "save": {
228
+ "description": "Save a new snippet",
229
+ "example": "procler snippet save --name rebuild --command 'docker compose build'",
230
+ "options": [
231
+ {"name": "--name", "required": True, "description": "Snippet name"},
232
+ {"name": "--command", "required": True, "description": "Command to save"},
233
+ {"name": "--description", "required": False, "description": "Description"},
234
+ {
235
+ "name": "--context",
236
+ "required": False,
237
+ "default": "local",
238
+ "description": "Execution context",
239
+ },
240
+ {
241
+ "name": "--container",
242
+ "required": False,
243
+ "description": "Docker container",
244
+ },
245
+ {
246
+ "name": "--tags",
247
+ "required": False,
248
+ "description": "Comma-separated tags",
249
+ },
250
+ ],
251
+ },
252
+ "run": {
253
+ "description": "Run a saved snippet",
254
+ "example": "procler snippet run rebuild",
255
+ "arguments": [{"name": "name", "required": True, "description": "Snippet name"}],
256
+ },
257
+ "remove": {
258
+ "description": "Remove a snippet",
259
+ "example": "procler snippet remove rebuild",
260
+ "arguments": [{"name": "name", "required": True, "description": "Snippet name"}],
261
+ },
262
+ },
263
+ },
264
+ "serve": {
265
+ "description": "Start the web server (API + optional Vue dashboard)",
266
+ "example": "procler serve --port 8000 --reload",
267
+ "arguments": [],
268
+ "options": [
269
+ {
270
+ "name": "--host",
271
+ "required": False,
272
+ "default": "127.0.0.1",
273
+ "description": "Host to bind",
274
+ },
275
+ {
276
+ "name": "--port",
277
+ "required": False,
278
+ "default": 8000,
279
+ "description": "Port to bind",
280
+ },
281
+ {
282
+ "name": "--reload",
283
+ "required": False,
284
+ "is_flag": True,
285
+ "description": "Enable hot reload",
286
+ },
287
+ ],
288
+ },
289
+ "group": {
290
+ "description": "Manage process groups (ordered start/stop from config.yaml)",
291
+ "subcommands": {
292
+ "list": {
293
+ "description": "List all groups defined in config",
294
+ "example": "procler group list",
295
+ },
296
+ "start": {
297
+ "description": "Start all processes in a group (in defined order)",
298
+ "example": "procler group start backend",
299
+ "arguments": [{"name": "name", "required": True, "description": "Group name"}],
300
+ },
301
+ "stop": {
302
+ "description": "Stop all processes in a group (in reverse/custom order)",
303
+ "example": "procler group stop backend",
304
+ "arguments": [{"name": "name", "required": True, "description": "Group name"}],
305
+ },
306
+ "status": {
307
+ "description": "Get status of all processes in a group",
308
+ "example": "procler group status backend",
309
+ "arguments": [{"name": "name", "required": True, "description": "Group name"}],
310
+ },
311
+ },
312
+ },
313
+ "recipe": {
314
+ "description": "Manage and run recipes (multi-step operations from config.yaml)",
315
+ "subcommands": {
316
+ "list": {
317
+ "description": "List all recipes defined in config",
318
+ "example": "procler recipe list",
319
+ },
320
+ "show": {
321
+ "description": "Show recipe details and steps",
322
+ "example": "procler recipe show deploy",
323
+ "arguments": [{"name": "name", "required": True, "description": "Recipe name"}],
324
+ },
325
+ "run": {
326
+ "description": "Execute a recipe (use --dry-run to preview)",
327
+ "example": "procler recipe run deploy --dry-run",
328
+ "arguments": [{"name": "name", "required": True, "description": "Recipe name"}],
329
+ "options": [
330
+ {
331
+ "name": "--dry-run",
332
+ "required": False,
333
+ "is_flag": True,
334
+ "description": "Show what would happen without executing",
335
+ },
336
+ {
337
+ "name": "--continue-on-error",
338
+ "required": False,
339
+ "is_flag": True,
340
+ "description": "Continue execution even if a step fails",
341
+ },
342
+ ],
343
+ },
344
+ },
345
+ },
346
+ "config": {
347
+ "description": "Manage configuration (.procler/ directory)",
348
+ "subcommands": {
349
+ "init": {
350
+ "description": "Initialize .procler/ config directory with template",
351
+ "example": "procler config init",
352
+ },
353
+ "validate": {
354
+ "description": "Validate config.yaml syntax and all references",
355
+ "example": "procler config validate",
356
+ },
357
+ "path": {
358
+ "description": "Show the resolved config directory path",
359
+ "example": "procler config path",
360
+ },
361
+ "explain": {
362
+ "description": "Explain what the config defines in plain language (LLM-friendly)",
363
+ "example": "procler config explain",
364
+ },
365
+ },
366
+ },
367
+ },
368
+ }
369
+
370
+
371
+ def _get_process_by_name(name: str):
372
+ """Get a process by name using sqler query."""
373
+ from sqler.query import SQLerField as F
374
+
375
+ from .db import init_database
376
+ from .models import Process
377
+
378
+ init_database()
379
+ results = Process.query().filter(F("name") == name).all()
380
+ return results[0] if results else None
381
+
382
+
383
+ def _process_to_dict(process) -> dict[str, Any]:
384
+ """Convert a Process model to a dict for JSON output."""
385
+ return {
386
+ "id": process._id,
387
+ "name": process.name,
388
+ "display_name": process.display_name,
389
+ "command": process.command,
390
+ "context_type": process.context_type,
391
+ "status": process.status,
392
+ "pid": process.pid,
393
+ "uptime_seconds": process.uptime_seconds,
394
+ }
395
+
396
+
397
+ @click.group()
398
+ @click.version_option(version=__version__, prog_name="procler")
399
+ def cli() -> None:
400
+ """Procler - LLM-first process manager for developers.
401
+
402
+ All commands output JSON for easy parsing by scripts and LLMs.
403
+ """
404
+ pass
405
+
406
+
407
+ @cli.command()
408
+ def capabilities() -> None:
409
+ """Returns JSON schema of all commands."""
410
+ output_json(success_response(CLI_SCHEMA))
411
+
412
+
413
+ @cli.command("help-llm")
414
+ def help_llm() -> None:
415
+ """Output comprehensive LLM-focused usage instructions."""
416
+ instructions = """# Procler - LLM-First Process Manager
417
+
418
+ ## Overview
419
+ Procler is a process manager designed for LLM integration. All CLI commands return
420
+ structured JSON with a consistent response format:
421
+ - `success`: boolean indicating operation result
422
+ - `data`: payload on success
423
+ - `error`: error message on failure
424
+ - `error_code`: machine-readable error identifier
425
+ - `suggestion`: actionable fix for errors
426
+
427
+ ## Quick Start
428
+
429
+ ### 1. Discover Commands
430
+ ```bash
431
+ procler capabilities # JSON schema of all commands
432
+ procler --help # Human-readable help
433
+ ```
434
+
435
+ ### 2. Initialize Config
436
+ ```bash
437
+ procler config init # Creates .procler/ directory
438
+ procler config validate # Validates config.yaml
439
+ procler config explain # Plain-language explanation
440
+ ```
441
+
442
+ ### 3. Process Management
443
+ ```bash
444
+ procler define --name api --command 'uvicorn main:app' --cwd /app
445
+ procler start api # Idempotent - safe to retry
446
+ procler stop api # Idempotent - safe to retry
447
+ procler restart api # Stop then start
448
+ procler restart api --clear-logs # Clear logs on restart
449
+ procler status api # Get process status with Linux state
450
+ procler logs api --tail 50
451
+ ```
452
+
453
+ ### 4. Groups (Ordered Start/Stop)
454
+ ```bash
455
+ procler group list
456
+ procler group start backend # Starts in order, waits for dependencies
457
+ procler group stop backend # Stops in reverse order
458
+ procler group status backend
459
+ ```
460
+
461
+ ### 5. Recipes (Multi-Step Operations)
462
+ ```bash
463
+ procler recipe list
464
+ procler recipe show deploy
465
+ procler recipe run deploy --dry-run # Preview
466
+ procler recipe run deploy # Execute
467
+ ```
468
+
469
+ ### 6. Snippets (Reusable Commands)
470
+ ```bash
471
+ procler snippet list
472
+ procler snippet save --name rebuild --command 'docker compose build'
473
+ procler snippet run rebuild
474
+ ```
475
+
476
+ ## Response Format
477
+
478
+ ### Success Response
479
+ ```json
480
+ {
481
+ "success": true,
482
+ "data": { ... }
483
+ }
484
+ ```
485
+
486
+ ### Error Response
487
+ ```json
488
+ {
489
+ "success": false,
490
+ "error": "Process 'api' not found",
491
+ "error_code": "process_not_found",
492
+ "suggestion": "Run 'procler list' to see available processes"
493
+ }
494
+ ```
495
+
496
+ ## Process Status Fields
497
+ - `status`: "running" | "stopped" | "failed"
498
+ - `pid`: Process ID (null if stopped)
499
+ - `uptime_seconds`: Time since start
500
+ - `linux_state`: Linux kernel state (R, S, D, Z, T)
501
+ - D state = "uninterruptible sleep" - process cannot be killed
502
+ - Z state = "zombie" - terminated but not reaped
503
+ - `warning`: Alert for problematic states
504
+
505
+ ## Config File (.procler/config.yaml)
506
+
507
+ ```yaml
508
+ version: 1
509
+
510
+ processes:
511
+ api:
512
+ command: uvicorn main:app --reload
513
+ context: local # or docker
514
+ container: my-container # if docker
515
+ cwd: /path/to/project
516
+ tags: [backend, api]
517
+ healthcheck:
518
+ test: "curl -f http://localhost:8000/health"
519
+ interval: 10s
520
+ timeout: 5s
521
+ retries: 3
522
+ start_period: 30s
523
+ depends_on:
524
+ - redis # Wait for started
525
+ - name: database
526
+ condition: healthy # Wait for health check
527
+
528
+ groups:
529
+ backend:
530
+ processes: [redis, database, api]
531
+ stop_order: [api, database, redis] # Optional
532
+
533
+ recipes:
534
+ deploy:
535
+ on_error: stop # or continue
536
+ steps:
537
+ - stop: api
538
+ - exec: "alembic upgrade head"
539
+ - start: api
540
+
541
+ snippets:
542
+ rebuild:
543
+ command: docker compose build
544
+ tags: [docker]
545
+ ```
546
+
547
+ ## Common Workflows
548
+
549
+ ### Start Development Environment
550
+ ```bash
551
+ procler group start backend && procler status
552
+ ```
553
+
554
+ ### Debug Failing Process
555
+ ```bash
556
+ procler status api
557
+ procler logs api --tail 100
558
+ procler restart api --clear-logs
559
+ ```
560
+
561
+ ### Graceful Deployment
562
+ ```bash
563
+ procler recipe run deploy --dry-run
564
+ procler recipe run deploy
565
+ ```
566
+
567
+ ## Web Server
568
+
569
+ ```bash
570
+ procler serve --port 8000 --reload # Development
571
+ procler serve --host 0.0.0.0 # Production
572
+ ```
573
+
574
+ REST API: http://localhost:8000/api
575
+ WebSocket: ws://localhost:8000/api/ws
576
+ OpenAPI: http://localhost:8000/api/docs
577
+
578
+ ## Environment Variables
579
+
580
+ | Variable | Default | Description |
581
+ |----------|---------|-------------|
582
+ | PROCLER_LOG_LEVEL | INFO | Log level (DEBUG, INFO, WARNING, ERROR) |
583
+ | PROCLER_LOG_FILE | - | Log file path (auto-rotates) |
584
+ | PROCLER_CONFIG_DIR | .procler/ | Config directory |
585
+ | PROCLER_DEBUG | - | Enable detailed error messages |
586
+
587
+ ## Exit Codes
588
+ - 0: Success
589
+ - 1: Operation failed (see error in JSON output)
590
+ """
591
+ # Output as JSON with the instructions as a field
592
+ output_json(
593
+ success_response(
594
+ {
595
+ "format": "markdown",
596
+ "instructions": instructions.strip(),
597
+ "tip": "Parse 'instructions' field for LLM consumption or pipe to less/cat for human reading",
598
+ }
599
+ )
600
+ )
601
+
602
+
603
+ @cli.command()
604
+ @click.argument("name", required=False)
605
+ def status(name: str | None) -> None:
606
+ """Show process status (use with NAME to show single process details)."""
607
+ import asyncio
608
+
609
+ from .core import get_process_manager
610
+
611
+ manager = get_process_manager()
612
+ result = asyncio.run(manager.status(name))
613
+
614
+ output_json(result)
615
+ if not result["success"]:
616
+ sys.exit(1)
617
+
618
+
619
+ @cli.command("list")
620
+ @click.option("--resolve", is_flag=True, help="Show commands with variables substituted")
621
+ def list_processes(resolve: bool) -> None:
622
+ """List all process definitions."""
623
+ from .core.variable_substitution import substitute_vars_from_config
624
+ from .db import init_database
625
+ from .models import Process
626
+
627
+ init_database()
628
+ processes = Process.query().all()
629
+
630
+ process_data = []
631
+ for process in processes:
632
+ command = process.command
633
+ daemon_container = getattr(process, "daemon_container", None)
634
+
635
+ if resolve:
636
+ command = substitute_vars_from_config(command)
637
+ if daemon_container:
638
+ daemon_container = substitute_vars_from_config(daemon_container)
639
+
640
+ process_data.append(
641
+ {
642
+ "id": process._id,
643
+ "name": process.name,
644
+ "display_name": process.display_name,
645
+ "command": command,
646
+ "context_type": process.context_type,
647
+ "container_name": process.container_name,
648
+ "cwd": process.cwd,
649
+ "tags": process.tags or [],
650
+ "daemon_mode": getattr(process, "daemon_mode", False) or None,
651
+ "daemon_match_pattern": getattr(process, "daemon_match_pattern", None),
652
+ "daemon_container": daemon_container,
653
+ }
654
+ )
655
+
656
+ output_json(success_response({"processes": process_data}))
657
+
658
+
659
+ @cli.command()
660
+ @click.option("--name", required=True, help="Process name (CLI identifier)")
661
+ @click.option("--command", "cmd", required=True, help="Command to execute")
662
+ @click.option(
663
+ "--context",
664
+ type=click.Choice(["local", "docker"]),
665
+ default="local",
666
+ help="Execution context",
667
+ )
668
+ @click.option("--container", help="Docker container name (required if context=docker)")
669
+ @click.option("--cwd", help="Working directory")
670
+ @click.option("--display-name", help="Human-friendly name")
671
+ @click.option("--tags", help="Comma-separated tags")
672
+ @click.option("--daemon-mode", is_flag=True, help="Enable daemon mode (process forks to background)")
673
+ @click.option("--daemon-pattern", help="Process name pattern to match daemon (e.g., 'msgd')")
674
+ @click.option("--daemon-pidfile", help="Path to daemon pidfile")
675
+ @click.option("--daemon-container", help="Container name for daemon detection (use with docker exec commands)")
676
+ @click.option("--adopt-existing", is_flag=True, help="Adopt existing daemon if running")
677
+ @click.option("--force", is_flag=True, help="Overwrite existing process definition")
678
+ def define(
679
+ name: str,
680
+ cmd: str,
681
+ context: str,
682
+ container: str | None,
683
+ cwd: str | None,
684
+ display_name: str | None,
685
+ tags: str | None,
686
+ daemon_mode: bool,
687
+ daemon_pattern: str | None,
688
+ daemon_pidfile: str | None,
689
+ daemon_container: str | None,
690
+ adopt_existing: bool,
691
+ force: bool,
692
+ ) -> None:
693
+ """Define a new process."""
694
+ from .db import init_database
695
+ from .models import Process
696
+
697
+ if context == "docker" and not container:
698
+ output_json(
699
+ error_response(
700
+ "Container name required for docker context",
701
+ error_code="missing_container",
702
+ suggestion="Use --container <name> to specify the Docker container",
703
+ )
704
+ )
705
+ sys.exit(1)
706
+
707
+ init_database()
708
+
709
+ # Check if process already exists
710
+ existing = _get_process_by_name(name)
711
+ if existing:
712
+ if not force:
713
+ output_json(
714
+ error_response(
715
+ f"Process '{name}' already exists",
716
+ error_code="process_exists",
717
+ suggestion=f"Use --force to overwrite, or 'procler remove {name}' first",
718
+ )
719
+ )
720
+ sys.exit(1)
721
+ # Force mode: delete existing and continue
722
+ existing.delete()
723
+
724
+ tag_list = [t.strip() for t in tags.split(",")] if tags else None
725
+
726
+ # Validate daemon mode options
727
+ if daemon_mode and not daemon_pattern and not daemon_pidfile:
728
+ output_json(
729
+ error_response(
730
+ "daemon_mode requires either --daemon-pattern or --daemon-pidfile",
731
+ error_code="invalid_daemon_config",
732
+ suggestion="Use --daemon-pattern <pattern> to specify how to find the daemon",
733
+ )
734
+ )
735
+ sys.exit(1)
736
+
737
+ if adopt_existing and not daemon_mode:
738
+ output_json(
739
+ error_response(
740
+ "--adopt-existing requires --daemon-mode",
741
+ error_code="invalid_daemon_config",
742
+ suggestion="Add --daemon-mode to enable daemon mode",
743
+ )
744
+ )
745
+ sys.exit(1)
746
+
747
+ process = Process(
748
+ name=name,
749
+ command=cmd,
750
+ context_type=context,
751
+ display_name=display_name,
752
+ container_name=container,
753
+ cwd=cwd,
754
+ tags=tag_list,
755
+ daemon_mode=daemon_mode,
756
+ daemon_match_pattern=daemon_pattern,
757
+ daemon_pidfile=daemon_pidfile,
758
+ daemon_container=daemon_container,
759
+ adopt_existing=adopt_existing,
760
+ created_at=datetime.now().isoformat(),
761
+ updated_at=datetime.now().isoformat(),
762
+ )
763
+ process.save()
764
+
765
+ process_info = {
766
+ "id": process._id,
767
+ "name": process.name,
768
+ "command": process.command,
769
+ "context_type": process.context_type,
770
+ }
771
+ if daemon_mode:
772
+ process_info["daemon_mode"] = True
773
+ process_info["daemon_match_pattern"] = daemon_pattern
774
+ process_info["daemon_pidfile"] = daemon_pidfile
775
+ process_info["adopt_existing"] = adopt_existing
776
+
777
+ output_json(success_response({"action": "created", "process": process_info}))
778
+
779
+
780
+ @cli.command()
781
+ @click.argument("name")
782
+ def remove(name: str) -> None:
783
+ """Remove a process definition."""
784
+ from .db import init_database
785
+
786
+ init_database()
787
+
788
+ process = _get_process_by_name(name)
789
+ if not process:
790
+ output_json(
791
+ error_response(
792
+ f"Process '{name}' not found",
793
+ error_code="process_not_found",
794
+ suggestion="Run 'procler list' to see available processes",
795
+ )
796
+ )
797
+ sys.exit(1)
798
+
799
+ process.delete()
800
+ output_json(success_response({"action": "removed", "name": name}))
801
+
802
+
803
+ @cli.command()
804
+ @click.argument("name")
805
+ def start(name: str) -> None:
806
+ """Start a process (idempotent - no-op if running)."""
807
+ import asyncio
808
+
809
+ from .core import get_process_manager
810
+
811
+ manager = get_process_manager()
812
+ result = asyncio.run(manager.start(name))
813
+
814
+ output_json(result)
815
+ if not result["success"]:
816
+ sys.exit(1)
817
+
818
+
819
+ @cli.command()
820
+ @click.argument("name")
821
+ def stop(name: str) -> None:
822
+ """Stop a process (idempotent - no-op if stopped)."""
823
+ import asyncio
824
+
825
+ from .core import get_process_manager
826
+
827
+ manager = get_process_manager()
828
+ result = asyncio.run(manager.stop(name))
829
+
830
+ output_json(result)
831
+ if not result["success"]:
832
+ sys.exit(1)
833
+
834
+
835
+ @cli.command()
836
+ @click.argument("name")
837
+ @click.option("--clear-logs", is_flag=True, help="Delete old logs before restarting")
838
+ def restart(name: str, clear_logs: bool) -> None:
839
+ """Restart a process (stop then start)."""
840
+ import asyncio
841
+
842
+ from .core import get_process_manager
843
+
844
+ manager = get_process_manager()
845
+ result = asyncio.run(manager.restart(name, clear_logs=clear_logs))
846
+
847
+ output_json(result)
848
+ if not result["success"]:
849
+ sys.exit(1)
850
+
851
+
852
+ @cli.command()
853
+ @click.argument("name")
854
+ @click.option("--tail", default=100, help="Number of lines to return")
855
+ @click.option("--since", help="Time filter (e.g., '5m', '1h', ISO timestamp)")
856
+ @click.option("-f", "--follow", is_flag=True, help="Follow log output (stream new lines)")
857
+ def logs(name: str, tail: int, since: str | None, follow: bool) -> None:
858
+ """Get logs for a process."""
859
+ import asyncio
860
+ import time
861
+
862
+ from .core import get_process_manager
863
+
864
+ manager = get_process_manager()
865
+
866
+ if not follow:
867
+ # Standard mode: get logs and exit
868
+ result = asyncio.run(manager.logs(name, tail=tail, since=since))
869
+ output_json(result)
870
+ if not result["success"]:
871
+ sys.exit(1)
872
+ return
873
+
874
+ # Follow mode: continuously poll for new logs
875
+ # First, get initial logs
876
+ result = asyncio.run(manager.logs(name, tail=tail, since=since))
877
+ if not result["success"]:
878
+ output_json(result)
879
+ sys.exit(1)
880
+
881
+ # Print initial logs (raw, not JSON for readability)
882
+ last_timestamp = None
883
+ for entry in result.get("data", {}).get("logs", []):
884
+ ts = entry.get("timestamp", "")
885
+ stream = entry.get("stream", "stdout")
886
+ line = entry.get("line", "")
887
+ prefix = f"[{ts}] " if ts else ""
888
+ stream_prefix = "[stderr] " if stream == "stderr" else ""
889
+ click.echo(f"{prefix}{stream_prefix}{line}")
890
+ last_timestamp = ts
891
+
892
+ # Poll for new logs
893
+ try:
894
+ while True:
895
+ time.sleep(1) # Poll every second
896
+ # Get logs since last timestamp
897
+ since_filter = last_timestamp if last_timestamp else None
898
+ result = asyncio.run(manager.logs(name, tail=1000, since=since_filter))
899
+ if result["success"]:
900
+ entries = result.get("data", {}).get("logs", [])
901
+ for entry in entries:
902
+ ts = entry.get("timestamp", "")
903
+ # Skip if same or older than last seen
904
+ if last_timestamp and ts and ts <= last_timestamp:
905
+ continue
906
+ stream = entry.get("stream", "stdout")
907
+ line = entry.get("line", "")
908
+ prefix = f"[{ts}] " if ts else ""
909
+ stream_prefix = "[stderr] " if stream == "stderr" else ""
910
+ click.echo(f"{prefix}{stream_prefix}{line}")
911
+ if ts:
912
+ last_timestamp = ts
913
+ except KeyboardInterrupt:
914
+ click.echo("\nStopped following logs.")
915
+
916
+
917
+ @cli.command("exec")
918
+ @click.argument("command")
919
+ @click.option(
920
+ "--context",
921
+ type=click.Choice(["local", "docker"]),
922
+ default="local",
923
+ help="Execution context",
924
+ )
925
+ @click.option("--container", help="Docker container name")
926
+ @click.option("--cwd", help="Working directory")
927
+ @click.option("--timeout", default=60.0, help="Maximum execution time in seconds")
928
+ def exec_cmd(
929
+ command: str,
930
+ context: str,
931
+ container: str | None,
932
+ cwd: str | None,
933
+ timeout: float,
934
+ ) -> None:
935
+ """Execute an arbitrary command."""
936
+ import asyncio
937
+
938
+ from .core import get_process_manager
939
+
940
+ if context == "docker" and not container:
941
+ output_json(
942
+ error_response(
943
+ "Container name required for docker context",
944
+ error_code="missing_container",
945
+ suggestion="Use --container <name> to specify the Docker container",
946
+ )
947
+ )
948
+ sys.exit(1)
949
+
950
+ manager = get_process_manager()
951
+ result = asyncio.run(
952
+ manager.exec_command(
953
+ command=command,
954
+ context_type=context,
955
+ container_name=container,
956
+ cwd=cwd,
957
+ timeout=timeout,
958
+ )
959
+ )
960
+
961
+ output_json(result)
962
+ if not result["success"]:
963
+ sys.exit(1)
964
+
965
+
966
+ # Snippet subcommands
967
+ @cli.group()
968
+ def snippet() -> None:
969
+ """Manage command snippets."""
970
+ pass
971
+
972
+
973
+ @snippet.command("list")
974
+ @click.option("--tag", help="Filter by tag")
975
+ def snippet_list(tag: str | None) -> None:
976
+ """List all snippets."""
977
+ from .core import get_snippet_manager
978
+
979
+ manager = get_snippet_manager()
980
+ result = manager.list_snippets(tag=tag)
981
+
982
+ output_json(result)
983
+
984
+
985
+ @snippet.command("show")
986
+ @click.argument("name")
987
+ def snippet_show(name: str) -> None:
988
+ """Show details of a specific snippet."""
989
+ from .core import get_snippet_manager
990
+
991
+ manager = get_snippet_manager()
992
+ snippet = manager._get_snippet_by_name(name)
993
+
994
+ if not snippet:
995
+ output_json(
996
+ error_response(
997
+ f"Snippet '{name}' not found",
998
+ error_code="snippet_not_found",
999
+ suggestion="Run 'procler snippet list' to see available snippets",
1000
+ )
1001
+ )
1002
+ sys.exit(1)
1003
+
1004
+ output_json(success_response({"snippet": manager._snippet_to_dict(snippet)}))
1005
+
1006
+
1007
+ @snippet.command("save")
1008
+ @click.option("--name", required=True, help="Snippet name")
1009
+ @click.option("--command", "cmd", required=True, help="Command to save")
1010
+ @click.option("--description", help="Description of what the snippet does")
1011
+ @click.option(
1012
+ "--context",
1013
+ type=click.Choice(["local", "docker"]),
1014
+ default="local",
1015
+ help="Execution context",
1016
+ )
1017
+ @click.option("--container", help="Docker container name (required if context=docker)")
1018
+ @click.option("--tags", help="Comma-separated tags")
1019
+ def snippet_save(
1020
+ name: str,
1021
+ cmd: str,
1022
+ description: str | None,
1023
+ context: str,
1024
+ container: str | None,
1025
+ tags: str | None,
1026
+ ) -> None:
1027
+ """Save a new snippet."""
1028
+ from .core import get_snippet_manager
1029
+
1030
+ if context == "docker" and not container:
1031
+ output_json(
1032
+ error_response(
1033
+ "Container name required for docker context",
1034
+ error_code="missing_container",
1035
+ suggestion="Use --container <name> to specify the Docker container",
1036
+ )
1037
+ )
1038
+ sys.exit(1)
1039
+
1040
+ tag_list = [t.strip() for t in tags.split(",")] if tags else None
1041
+
1042
+ manager = get_snippet_manager()
1043
+ result = manager.save_snippet(
1044
+ name=name,
1045
+ command=cmd,
1046
+ description=description,
1047
+ context_type=context,
1048
+ container_name=container,
1049
+ tags=tag_list,
1050
+ )
1051
+
1052
+ output_json(result)
1053
+ if not result["success"]:
1054
+ sys.exit(1)
1055
+
1056
+
1057
+ @snippet.command("run")
1058
+ @click.argument("name")
1059
+ def snippet_run(name: str) -> None:
1060
+ """Run a saved snippet."""
1061
+ import asyncio
1062
+
1063
+ from .core import get_snippet_manager
1064
+
1065
+ manager = get_snippet_manager()
1066
+ result = asyncio.run(manager.run_snippet(name))
1067
+
1068
+ output_json(result)
1069
+ if not result["success"]:
1070
+ sys.exit(1)
1071
+
1072
+
1073
+ @snippet.command("remove")
1074
+ @click.argument("name")
1075
+ def snippet_remove(name: str) -> None:
1076
+ """Remove a snippet."""
1077
+ from .core import get_snippet_manager
1078
+
1079
+ manager = get_snippet_manager()
1080
+ result = manager.remove_snippet(name)
1081
+
1082
+ output_json(result)
1083
+ if not result["success"]:
1084
+ sys.exit(1)
1085
+
1086
+
1087
+ @cli.command()
1088
+ @click.option("--host", default="127.0.0.1", help="Host to bind")
1089
+ @click.option("--port", default=8000, help="Port to bind")
1090
+ @click.option("--reload", is_flag=True, help="Enable hot reload")
1091
+ def serve(host: str, port: int, reload: bool) -> None:
1092
+ """Start the web server."""
1093
+ import uvicorn
1094
+
1095
+ from .db import init_database
1096
+
1097
+ # Initialize database before starting server
1098
+ init_database()
1099
+
1100
+ uvicorn.run(
1101
+ "procler.api:app",
1102
+ host=host,
1103
+ port=port,
1104
+ reload=reload,
1105
+ )
1106
+
1107
+
1108
+ # Group subcommands
1109
+ @cli.group()
1110
+ def group() -> None:
1111
+ """Manage process groups (defined in config.yaml)."""
1112
+ pass
1113
+
1114
+
1115
+ @group.command("list")
1116
+ def group_list() -> None:
1117
+ """List all defined groups."""
1118
+ from .core import get_group_manager
1119
+
1120
+ manager = get_group_manager()
1121
+ result = manager.list_groups()
1122
+ output_json(result)
1123
+
1124
+
1125
+ @group.command("start")
1126
+ @click.argument("name")
1127
+ def group_start(name: str) -> None:
1128
+ """Start all processes in a group (in order)."""
1129
+ import asyncio
1130
+
1131
+ from .core import get_group_manager
1132
+
1133
+ manager = get_group_manager()
1134
+ result = asyncio.run(manager.start_group(name))
1135
+
1136
+ output_json(result)
1137
+ if not result["success"]:
1138
+ sys.exit(1)
1139
+
1140
+
1141
+ @group.command("stop")
1142
+ @click.argument("name")
1143
+ def group_stop(name: str) -> None:
1144
+ """Stop all processes in a group (in reverse order)."""
1145
+ import asyncio
1146
+
1147
+ from .core import get_group_manager
1148
+
1149
+ manager = get_group_manager()
1150
+ result = asyncio.run(manager.stop_group(name))
1151
+
1152
+ output_json(result)
1153
+ if not result["success"]:
1154
+ sys.exit(1)
1155
+
1156
+
1157
+ @group.command("status")
1158
+ @click.argument("name")
1159
+ def group_status(name: str) -> None:
1160
+ """Get status of all processes in a group."""
1161
+ import asyncio
1162
+
1163
+ from .core import get_group_manager
1164
+
1165
+ manager = get_group_manager()
1166
+ result = asyncio.run(manager.status_group(name))
1167
+
1168
+ output_json(result)
1169
+ if not result["success"]:
1170
+ sys.exit(1)
1171
+
1172
+
1173
+ # Recipe subcommands
1174
+ @cli.group()
1175
+ def recipe() -> None:
1176
+ """Manage and run recipes (multi-step operations)."""
1177
+ pass
1178
+
1179
+
1180
+ @recipe.command("list")
1181
+ def recipe_list() -> None:
1182
+ """List all defined recipes."""
1183
+ from .core import get_recipe_executor
1184
+
1185
+ executor = get_recipe_executor()
1186
+ result = executor.list_recipes()
1187
+ output_json(result)
1188
+
1189
+
1190
+ @recipe.command("show")
1191
+ @click.argument("name")
1192
+ def recipe_show(name: str) -> None:
1193
+ """Show recipe details and steps."""
1194
+ from .core import get_recipe_executor
1195
+
1196
+ executor = get_recipe_executor()
1197
+ result = executor.get_recipe(name)
1198
+
1199
+ output_json(result)
1200
+ if not result["success"]:
1201
+ sys.exit(1)
1202
+
1203
+
1204
+ @recipe.command("run")
1205
+ @click.argument("name")
1206
+ @click.option("--dry-run", is_flag=True, help="Show what would happen without executing")
1207
+ @click.option("--continue-on-error", is_flag=True, help="Continue execution even if a step fails")
1208
+ def recipe_run(name: str, dry_run: bool, continue_on_error: bool) -> None:
1209
+ """Execute a recipe."""
1210
+ import asyncio
1211
+
1212
+ from .core import get_recipe_executor
1213
+
1214
+ executor = get_recipe_executor()
1215
+
1216
+ # Only pass continue_on_error if explicitly set
1217
+ kwargs = {"dry_run": dry_run}
1218
+ if continue_on_error:
1219
+ kwargs["continue_on_error"] = True
1220
+
1221
+ result = asyncio.run(executor.run_recipe(name, **kwargs))
1222
+
1223
+ output_json(result)
1224
+ if not result["success"]:
1225
+ sys.exit(1)
1226
+
1227
+
1228
+ # Config subcommands
1229
+ @cli.group()
1230
+ def config() -> None:
1231
+ """Manage configuration (config.yaml)."""
1232
+ pass
1233
+
1234
+
1235
+ @config.command("init")
1236
+ @click.option("--force", is_flag=True, help="Overwrite existing config")
1237
+ def config_init(force: bool) -> None:
1238
+ """Initialize .procler/ config directory with template."""
1239
+ from .config import find_config_dir, generate_template_config
1240
+
1241
+ config_dir = find_config_dir()
1242
+ config_file = config_dir / "config.yaml"
1243
+
1244
+ # Create directory if needed
1245
+ config_dir.mkdir(parents=True, exist_ok=True)
1246
+
1247
+ if config_file.exists() and not force:
1248
+ output_json(
1249
+ error_response(
1250
+ f"Config file already exists: {config_file}",
1251
+ error_code="config_exists",
1252
+ suggestion="Use --force to overwrite",
1253
+ )
1254
+ )
1255
+ sys.exit(1)
1256
+
1257
+ # Write template
1258
+ template = generate_template_config()
1259
+ config_file.write_text(template)
1260
+
1261
+ # Create .gitignore for state.db
1262
+ gitignore = config_dir / ".gitignore"
1263
+ if not gitignore.exists():
1264
+ gitignore.write_text("# Runtime state - not version controlled\nstate.db\n")
1265
+
1266
+ output_json(
1267
+ success_response(
1268
+ {
1269
+ "action": "initialized",
1270
+ "config_dir": str(config_dir),
1271
+ "config_file": str(config_file),
1272
+ "files_created": ["config.yaml", ".gitignore"],
1273
+ }
1274
+ )
1275
+ )
1276
+
1277
+
1278
+ @config.command("validate")
1279
+ def config_validate() -> None:
1280
+ """Validate config.yaml syntax and references."""
1281
+ from .config import get_config_file_path, reload_config
1282
+
1283
+ config_path = get_config_file_path()
1284
+
1285
+ if not config_path.exists():
1286
+ output_json(
1287
+ error_response(
1288
+ f"Config file not found: {config_path}",
1289
+ error_code="config_not_found",
1290
+ suggestion="Run 'procler config init' to create one",
1291
+ )
1292
+ )
1293
+ sys.exit(1)
1294
+
1295
+ try:
1296
+ # Force reload to catch parse errors
1297
+ cfg = reload_config()
1298
+ errors = cfg.validate_references()
1299
+
1300
+ if errors:
1301
+ output_json(
1302
+ error_response(
1303
+ "Config validation failed",
1304
+ error_code="validation_failed",
1305
+ errors=errors,
1306
+ )
1307
+ )
1308
+ sys.exit(1)
1309
+
1310
+ output_json(
1311
+ success_response(
1312
+ {
1313
+ "valid": True,
1314
+ "config_file": str(config_path),
1315
+ "summary": {
1316
+ "processes": len(cfg.processes),
1317
+ "groups": len(cfg.groups),
1318
+ "recipes": len(cfg.recipes),
1319
+ "snippets": len(cfg.snippets),
1320
+ },
1321
+ }
1322
+ )
1323
+ )
1324
+
1325
+ except Exception as e:
1326
+ output_json(
1327
+ error_response(
1328
+ f"Failed to parse config: {e}",
1329
+ error_code="parse_error",
1330
+ )
1331
+ )
1332
+ sys.exit(1)
1333
+
1334
+
1335
+ @config.command("path")
1336
+ def config_path() -> None:
1337
+ """Show the config directory path."""
1338
+ from .config import find_config_dir, get_changelog_path, get_config_file_path, get_state_db_path
1339
+
1340
+ config_dir = find_config_dir()
1341
+
1342
+ output_json(
1343
+ success_response(
1344
+ {
1345
+ "config_dir": str(config_dir),
1346
+ "config_file": str(get_config_file_path()),
1347
+ "changelog": str(get_changelog_path()),
1348
+ "state_db": str(get_state_db_path()),
1349
+ "exists": {
1350
+ "config_dir": config_dir.exists(),
1351
+ "config_file": get_config_file_path().exists(),
1352
+ "changelog": get_changelog_path().exists(),
1353
+ "state_db": get_state_db_path().exists(),
1354
+ },
1355
+ }
1356
+ )
1357
+ )
1358
+
1359
+
1360
+ @config.command("explain")
1361
+ def config_explain() -> None:
1362
+ """Explain what the config defines in plain language (LLM-friendly)."""
1363
+ from .config import get_config, get_config_file_path
1364
+
1365
+ cfg = get_config()
1366
+ config_path = get_config_file_path()
1367
+
1368
+ if not config_path.exists():
1369
+ output_json(
1370
+ success_response(
1371
+ {
1372
+ "summary": "No config file found. Run 'procler config init' to create one.",
1373
+ "sections": [],
1374
+ }
1375
+ )
1376
+ )
1377
+ return
1378
+
1379
+ sections = []
1380
+
1381
+ # Explain processes
1382
+ if cfg.processes:
1383
+ proc_explanations = []
1384
+ for name, proc in cfg.processes.items():
1385
+ ctx = "locally" if proc.context.value == "local" else f"in Docker container '{proc.container}'"
1386
+ desc = f"'{name}': runs `{proc.command}` {ctx}"
1387
+ if proc.cwd:
1388
+ desc += f" (working dir: {proc.cwd})"
1389
+ if proc.description:
1390
+ desc += f" - {proc.description}"
1391
+ proc_explanations.append(desc)
1392
+ sections.append(
1393
+ {
1394
+ "type": "processes",
1395
+ "title": f"{len(cfg.processes)} Process Definitions",
1396
+ "explanation": "These processes can be started, stopped, and monitored individually.",
1397
+ "items": proc_explanations,
1398
+ }
1399
+ )
1400
+
1401
+ # Explain groups
1402
+ if cfg.groups:
1403
+ group_explanations = []
1404
+ for name, group in cfg.groups.items():
1405
+ stop_order = group.get_stop_order()
1406
+ is_reversed = stop_order == list(reversed(group.processes))
1407
+ stop_desc = "reversed order" if is_reversed else f"custom order: {' -> '.join(stop_order)}"
1408
+ desc = f"'{name}': starts [{' -> '.join(group.processes)}], stops in {stop_desc}"
1409
+ if group.description:
1410
+ desc += f" - {group.description}"
1411
+ group_explanations.append(desc)
1412
+ sections.append(
1413
+ {
1414
+ "type": "groups",
1415
+ "title": f"{len(cfg.groups)} Process Groups",
1416
+ "explanation": "Groups start processes in order and stop them in reverse (or custom) order.",
1417
+ "items": group_explanations,
1418
+ }
1419
+ )
1420
+
1421
+ # Explain recipes
1422
+ if cfg.recipes:
1423
+ recipe_explanations = []
1424
+ for name, recipe in cfg.recipes.items():
1425
+ steps = recipe.get_steps()
1426
+ step_summary = f"{len(steps)} steps"
1427
+ error_handling = "stops on error" if recipe.on_error.value == "stop" else "continues on error"
1428
+ desc = f"'{name}': {step_summary}, {error_handling}"
1429
+ if recipe.description:
1430
+ desc += f" - {recipe.description}"
1431
+ recipe_explanations.append(desc)
1432
+ sections.append(
1433
+ {
1434
+ "type": "recipes",
1435
+ "title": f"{len(cfg.recipes)} Recipes",
1436
+ "explanation": "Recipes are multi-step operations that automate common workflows.",
1437
+ "items": recipe_explanations,
1438
+ }
1439
+ )
1440
+
1441
+ # Explain snippets
1442
+ if cfg.snippets:
1443
+ snippet_explanations = []
1444
+ for name, snippet in cfg.snippets.items():
1445
+ ctx = "locally" if snippet.context.value == "local" else f"in Docker '{snippet.container}'"
1446
+ desc = f"'{name}': `{snippet.command}` ({ctx})"
1447
+ if snippet.description:
1448
+ desc += f" - {snippet.description}"
1449
+ snippet_explanations.append(desc)
1450
+ sections.append(
1451
+ {
1452
+ "type": "snippets",
1453
+ "title": f"{len(cfg.snippets)} Snippets",
1454
+ "explanation": "Snippets are reusable commands you can run quickly.",
1455
+ "items": snippet_explanations,
1456
+ }
1457
+ )
1458
+
1459
+ # Build summary
1460
+ total = len(cfg.processes) + len(cfg.groups) + len(cfg.recipes) + len(cfg.snippets)
1461
+ summary = (
1462
+ f"Config defines {total} items: {len(cfg.processes)} processes, "
1463
+ f"{len(cfg.groups)} groups, {len(cfg.recipes)} recipes, {len(cfg.snippets)} snippets."
1464
+ )
1465
+
1466
+ output_json(
1467
+ success_response(
1468
+ {
1469
+ "summary": summary,
1470
+ "sections": sections,
1471
+ "config_file": str(config_path),
1472
+ }
1473
+ )
1474
+ )
1475
+
1476
+
1477
+ if __name__ == "__main__":
1478
+ cli()