aptdata 0.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aptdata/__init__.py +3 -0
- aptdata/cli/__init__.py +5 -0
- aptdata/cli/app.py +247 -0
- aptdata/cli/commands/__init__.py +9 -0
- aptdata/cli/commands/config_cmd.py +128 -0
- aptdata/cli/commands/mesh_cmd.py +435 -0
- aptdata/cli/commands/plugin_cmd.py +107 -0
- aptdata/cli/commands/system_cmd.py +90 -0
- aptdata/cli/commands/telemetry_cmd.py +57 -0
- aptdata/cli/completions.py +56 -0
- aptdata/cli/interactive.py +269 -0
- aptdata/cli/rendering/__init__.py +31 -0
- aptdata/cli/rendering/console.py +119 -0
- aptdata/cli/rendering/logger.py +26 -0
- aptdata/cli/rendering/panels.py +87 -0
- aptdata/cli/rendering/tables.py +81 -0
- aptdata/cli/scaffold.py +1089 -0
- aptdata/config/__init__.py +13 -0
- aptdata/config/parser.py +136 -0
- aptdata/config/schema.py +27 -0
- aptdata/config/secrets.py +60 -0
- aptdata/core/__init__.py +46 -0
- aptdata/core/context.py +31 -0
- aptdata/core/dataset.py +39 -0
- aptdata/core/lineage.py +213 -0
- aptdata/core/state.py +27 -0
- aptdata/core/system.py +317 -0
- aptdata/core/workflow.py +372 -0
- aptdata/mcp/__init__.py +5 -0
- aptdata/mcp/server.py +198 -0
- aptdata/plugins/__init__.py +77 -0
- aptdata/plugins/ai/__init__.py +6 -0
- aptdata/plugins/ai/chunking.py +66 -0
- aptdata/plugins/ai/embeddings.py +56 -0
- aptdata/plugins/base.py +57 -0
- aptdata/plugins/dataset.py +62 -0
- aptdata/plugins/governance/__init__.py +32 -0
- aptdata/plugins/governance/catalog.py +115 -0
- aptdata/plugins/governance/classification.py +44 -0
- aptdata/plugins/governance/lineage_store.py +49 -0
- aptdata/plugins/governance/rules.py +180 -0
- aptdata/plugins/local_fs.py +241 -0
- aptdata/plugins/manager.py +142 -0
- aptdata/plugins/postgres.py +113 -0
- aptdata/plugins/quality/__init__.py +39 -0
- aptdata/plugins/quality/contract.py +128 -0
- aptdata/plugins/quality/expectations.py +310 -0
- aptdata/plugins/quality/report.py +94 -0
- aptdata/plugins/quality/validator.py +139 -0
- aptdata/plugins/rest.py +135 -0
- aptdata/plugins/transform/__init__.py +14 -0
- aptdata/plugins/transform/pandas.py +129 -0
- aptdata/plugins/transform/spark.py +134 -0
- aptdata/plugins/vector/__init__.py +6 -0
- aptdata/plugins/vector/base.py +19 -0
- aptdata/plugins/vector/qdrant.py +41 -0
- aptdata/telemetry/__init__.py +5 -0
- aptdata/telemetry/instrumentation.py +164 -0
- aptdata/tui/__init__.py +5 -0
- aptdata/tui/monitor.py +279 -0
- aptdata-0.0.2.dist-info/METADATA +330 -0
- aptdata-0.0.2.dist-info/RECORD +65 -0
- aptdata-0.0.2.dist-info/WHEEL +4 -0
- aptdata-0.0.2.dist-info/entry_points.txt +3 -0
- aptdata-0.0.2.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,435 @@
|
|
|
1
|
+
"""CLI sub-commands for mesh component orchestration."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import subprocess
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
import typer
|
|
10
|
+
|
|
11
|
+
from aptdata.cli.rendering.console import SmartConsole
|
|
12
|
+
|
|
13
|
+
mesh_app = typer.Typer(
|
|
14
|
+
name="mesh", help="Orchestrate mesh components (job-wheel, docker-compose-app, …)."
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
_MESH_FILE = "mesh.yaml"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _load_mesh(path: Path) -> dict:
|
|
21
|
+
"""Load and parse a mesh.yaml file."""
|
|
22
|
+
try:
|
|
23
|
+
import yaml # type: ignore[import]
|
|
24
|
+
except ImportError:
|
|
25
|
+
# Minimal YAML parser for simple key: value lines (no external dep required)
|
|
26
|
+
import re
|
|
27
|
+
|
|
28
|
+
# Minimal fallback parser: handles only top-level scalar key: value lines.
|
|
29
|
+
# Does NOT support nested structures, lists, or multi-line values.
|
|
30
|
+
# Install PyYAML (`pip install pyyaml`) for full YAML support.
|
|
31
|
+
data: dict = {}
|
|
32
|
+
for line in path.read_text(encoding="utf-8").splitlines():
|
|
33
|
+
line = line.strip()
|
|
34
|
+
if not line or line.startswith("#"):
|
|
35
|
+
continue
|
|
36
|
+
match = re.match(r'^(\w[\w\-]*):\s*"?([^"]*)"?\s*$', line)
|
|
37
|
+
if match:
|
|
38
|
+
data[match.group(1)] = match.group(2).strip()
|
|
39
|
+
return data
|
|
40
|
+
|
|
41
|
+
return yaml.safe_load(path.read_text(encoding="utf-8")) or {}
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _find_mesh_yaml(directory: Path) -> Path | None: # noqa: UP007
|
|
45
|
+
"""Return the path to mesh.yaml in *directory* or None if not found."""
|
|
46
|
+
candidate = directory / _MESH_FILE
|
|
47
|
+
return candidate if candidate.exists() else None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@mesh_app.command("list")
|
|
51
|
+
def mesh_list(
|
|
52
|
+
directory: Path = typer.Option(
|
|
53
|
+
Path("."),
|
|
54
|
+
"--dir",
|
|
55
|
+
"-d",
|
|
56
|
+
help="Directory to scan for mesh.yaml files.",
|
|
57
|
+
exists=False,
|
|
58
|
+
file_okay=False,
|
|
59
|
+
dir_okay=True,
|
|
60
|
+
resolve_path=True,
|
|
61
|
+
),
|
|
62
|
+
json_mode: bool = typer.Option(False, "--json", help="Emit JSON lines."),
|
|
63
|
+
) -> None:
|
|
64
|
+
"""List mesh components found under *directory*.
|
|
65
|
+
|
|
66
|
+
Recursively searches for mesh.yaml files and displays component metadata.
|
|
67
|
+
|
|
68
|
+
Examples
|
|
69
|
+
--------
|
|
70
|
+
aptdata mesh list
|
|
71
|
+
aptdata mesh list --dir ./projects
|
|
72
|
+
"""
|
|
73
|
+
console = SmartConsole(json_mode=json_mode)
|
|
74
|
+
root = directory.resolve()
|
|
75
|
+
components: list[dict] = []
|
|
76
|
+
|
|
77
|
+
for mesh_file in sorted(root.rglob(_MESH_FILE)):
|
|
78
|
+
try:
|
|
79
|
+
data = _load_mesh(mesh_file)
|
|
80
|
+
components.append(
|
|
81
|
+
{
|
|
82
|
+
"component": data.get("component", mesh_file.parent.name),
|
|
83
|
+
"type": data.get("type", "unknown"),
|
|
84
|
+
"version": data.get("version", ""),
|
|
85
|
+
"path": str(mesh_file.parent),
|
|
86
|
+
}
|
|
87
|
+
)
|
|
88
|
+
except Exception as exc: # noqa: BLE001
|
|
89
|
+
components.append(
|
|
90
|
+
{
|
|
91
|
+
"component": mesh_file.parent.name,
|
|
92
|
+
"type": "unknown",
|
|
93
|
+
"version": "",
|
|
94
|
+
"path": str(mesh_file.parent),
|
|
95
|
+
"error": str(exc),
|
|
96
|
+
}
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
if json_mode:
|
|
100
|
+
print(
|
|
101
|
+
json.dumps({"components": components, "count": len(components)}), flush=True
|
|
102
|
+
)
|
|
103
|
+
else:
|
|
104
|
+
if not components:
|
|
105
|
+
console.warning(f"No mesh.yaml files found under '{root}'.")
|
|
106
|
+
return
|
|
107
|
+
|
|
108
|
+
from rich.table import Table # noqa: PLC0415
|
|
109
|
+
|
|
110
|
+
table = Table(title="Mesh Components", show_header=True)
|
|
111
|
+
table.add_column("Component", style="cyan")
|
|
112
|
+
table.add_column("Type", style="magenta")
|
|
113
|
+
table.add_column("Version")
|
|
114
|
+
table.add_column("Path", style="dim")
|
|
115
|
+
for c in components:
|
|
116
|
+
table.add_row(c["component"], c["type"], c["version"], c["path"])
|
|
117
|
+
console.render(table)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
@mesh_app.command("run")
|
|
121
|
+
def mesh_run(
|
|
122
|
+
component: str = typer.Argument(
|
|
123
|
+
..., help="Component name or path containing mesh.yaml."
|
|
124
|
+
),
|
|
125
|
+
directory: Path = typer.Option(
|
|
126
|
+
Path("."),
|
|
127
|
+
"--dir",
|
|
128
|
+
"-d",
|
|
129
|
+
help="Base directory to search for the component.",
|
|
130
|
+
exists=False,
|
|
131
|
+
resolve_path=True,
|
|
132
|
+
),
|
|
133
|
+
dry_run: bool = typer.Option(
|
|
134
|
+
False, "--dry-run", help="Show what would run without executing."
|
|
135
|
+
),
|
|
136
|
+
json_mode: bool = typer.Option(False, "--json", help="Emit JSON lines."),
|
|
137
|
+
) -> None:
|
|
138
|
+
"""Run a mesh component by name.
|
|
139
|
+
|
|
140
|
+
Locates the component's mesh.yaml, detects its type, and executes it.
|
|
141
|
+
|
|
142
|
+
Supported types
|
|
143
|
+
---------------
|
|
144
|
+
* ``job-wheel`` — runs the installed wheel entry-point
|
|
145
|
+
* ``docker-compose-app`` — runs ``docker compose up`` in the component dir
|
|
146
|
+
|
|
147
|
+
Examples
|
|
148
|
+
--------
|
|
149
|
+
aptdata mesh run my_job
|
|
150
|
+
aptdata mesh run my_app --dry-run
|
|
151
|
+
"""
|
|
152
|
+
console = SmartConsole(json_mode=json_mode)
|
|
153
|
+
root = directory.resolve()
|
|
154
|
+
|
|
155
|
+
# Find mesh.yaml for the given component (by name or direct path)
|
|
156
|
+
mesh_file: Path | None = None # noqa: UP007
|
|
157
|
+
component_path = root / component
|
|
158
|
+
if component_path.is_dir():
|
|
159
|
+
mesh_file = _find_mesh_yaml(component_path)
|
|
160
|
+
if mesh_file is None:
|
|
161
|
+
for candidate in root.rglob(_MESH_FILE):
|
|
162
|
+
try:
|
|
163
|
+
data = _load_mesh(candidate)
|
|
164
|
+
if data.get("component") == component:
|
|
165
|
+
mesh_file = candidate
|
|
166
|
+
break
|
|
167
|
+
except Exception: # noqa: BLE001
|
|
168
|
+
continue
|
|
169
|
+
|
|
170
|
+
if mesh_file is None:
|
|
171
|
+
msg = f"Component '{component}' not found. No mesh.yaml located under '{root}'."
|
|
172
|
+
if json_mode:
|
|
173
|
+
print(
|
|
174
|
+
json.dumps(
|
|
175
|
+
{"event": "mesh.error", "component": component, "error": msg}
|
|
176
|
+
),
|
|
177
|
+
flush=True,
|
|
178
|
+
)
|
|
179
|
+
else:
|
|
180
|
+
console.error(msg)
|
|
181
|
+
raise typer.Exit(1)
|
|
182
|
+
|
|
183
|
+
config = _load_mesh(mesh_file)
|
|
184
|
+
comp_type = config.get("type", "unknown")
|
|
185
|
+
comp_dir = mesh_file.parent
|
|
186
|
+
|
|
187
|
+
if json_mode:
|
|
188
|
+
print(
|
|
189
|
+
json.dumps(
|
|
190
|
+
{
|
|
191
|
+
"event": "mesh.run.started",
|
|
192
|
+
"component": component,
|
|
193
|
+
"type": comp_type,
|
|
194
|
+
"dry_run": dry_run,
|
|
195
|
+
}
|
|
196
|
+
),
|
|
197
|
+
flush=True,
|
|
198
|
+
)
|
|
199
|
+
else:
|
|
200
|
+
console.info(f"Running component [bold cyan]{component}[/] (type: {comp_type})")
|
|
201
|
+
|
|
202
|
+
if dry_run:
|
|
203
|
+
_show_dry_run(component, comp_type, comp_dir, config, json_mode, console)
|
|
204
|
+
return
|
|
205
|
+
|
|
206
|
+
try:
|
|
207
|
+
if comp_type == "job-wheel":
|
|
208
|
+
_run_job_wheel(component, comp_dir, config)
|
|
209
|
+
elif comp_type == "docker-compose-app":
|
|
210
|
+
_run_docker_compose(component, comp_dir, config)
|
|
211
|
+
else:
|
|
212
|
+
raise ValueError(f"Unsupported component type '{comp_type}'.")
|
|
213
|
+
except Exception as exc: # noqa: BLE001
|
|
214
|
+
if json_mode:
|
|
215
|
+
print(
|
|
216
|
+
json.dumps(
|
|
217
|
+
{
|
|
218
|
+
"event": "mesh.run.error",
|
|
219
|
+
"component": component,
|
|
220
|
+
"error": str(exc),
|
|
221
|
+
}
|
|
222
|
+
),
|
|
223
|
+
flush=True,
|
|
224
|
+
)
|
|
225
|
+
else:
|
|
226
|
+
console.error(f"Failed to run '{component}': {exc}")
|
|
227
|
+
raise typer.Exit(1) from exc
|
|
228
|
+
|
|
229
|
+
if json_mode:
|
|
230
|
+
print(
|
|
231
|
+
json.dumps(
|
|
232
|
+
{
|
|
233
|
+
"event": "mesh.run.completed",
|
|
234
|
+
"component": component,
|
|
235
|
+
"type": comp_type,
|
|
236
|
+
}
|
|
237
|
+
),
|
|
238
|
+
flush=True,
|
|
239
|
+
)
|
|
240
|
+
else:
|
|
241
|
+
console.success(f"Component [bold cyan]{component}[/] finished successfully.")
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
@mesh_app.command("build")
|
|
245
|
+
def mesh_build(
|
|
246
|
+
component: str = typer.Argument(
|
|
247
|
+
..., help="Component name or path containing mesh.yaml."
|
|
248
|
+
),
|
|
249
|
+
directory: Path = typer.Option(
|
|
250
|
+
Path("."),
|
|
251
|
+
"--dir",
|
|
252
|
+
"-d",
|
|
253
|
+
help="Base directory to search for the component.",
|
|
254
|
+
exists=False,
|
|
255
|
+
resolve_path=True,
|
|
256
|
+
),
|
|
257
|
+
json_mode: bool = typer.Option(False, "--json", help="Emit JSON lines."),
|
|
258
|
+
) -> None:
|
|
259
|
+
"""Build a mesh component.
|
|
260
|
+
|
|
261
|
+
Supported types
|
|
262
|
+
---------------
|
|
263
|
+
* ``job-wheel`` — builds the Python wheel (``pip wheel .``)
|
|
264
|
+
* ``docker-compose-app`` — builds Docker images (``docker compose build``)
|
|
265
|
+
|
|
266
|
+
Examples
|
|
267
|
+
--------
|
|
268
|
+
aptdata mesh build my_job
|
|
269
|
+
aptdata mesh build my_app --json
|
|
270
|
+
"""
|
|
271
|
+
console = SmartConsole(json_mode=json_mode)
|
|
272
|
+
root = directory.resolve()
|
|
273
|
+
|
|
274
|
+
mesh_file: Path | None = None # noqa: UP007
|
|
275
|
+
component_path = root / component
|
|
276
|
+
if component_path.is_dir():
|
|
277
|
+
mesh_file = _find_mesh_yaml(component_path)
|
|
278
|
+
if mesh_file is None:
|
|
279
|
+
for candidate in root.rglob(_MESH_FILE):
|
|
280
|
+
try:
|
|
281
|
+
data = _load_mesh(candidate)
|
|
282
|
+
if data.get("component") == component:
|
|
283
|
+
mesh_file = candidate
|
|
284
|
+
break
|
|
285
|
+
except Exception: # noqa: BLE001
|
|
286
|
+
continue
|
|
287
|
+
|
|
288
|
+
if mesh_file is None:
|
|
289
|
+
msg = f"Component '{component}' not found. No mesh.yaml located under '{root}'."
|
|
290
|
+
if json_mode:
|
|
291
|
+
print(
|
|
292
|
+
json.dumps(
|
|
293
|
+
{"event": "mesh.error", "component": component, "error": msg}
|
|
294
|
+
),
|
|
295
|
+
flush=True,
|
|
296
|
+
)
|
|
297
|
+
else:
|
|
298
|
+
console.error(msg)
|
|
299
|
+
raise typer.Exit(1)
|
|
300
|
+
|
|
301
|
+
config = _load_mesh(mesh_file)
|
|
302
|
+
comp_type = config.get("type", "unknown")
|
|
303
|
+
comp_dir = mesh_file.parent
|
|
304
|
+
|
|
305
|
+
if json_mode:
|
|
306
|
+
print(
|
|
307
|
+
json.dumps(
|
|
308
|
+
{
|
|
309
|
+
"event": "mesh.build.started",
|
|
310
|
+
"component": component,
|
|
311
|
+
"type": comp_type,
|
|
312
|
+
}
|
|
313
|
+
),
|
|
314
|
+
flush=True,
|
|
315
|
+
)
|
|
316
|
+
else:
|
|
317
|
+
console.info(
|
|
318
|
+
f"Building component [bold cyan]{component}[/] (type: {comp_type})"
|
|
319
|
+
)
|
|
320
|
+
|
|
321
|
+
try:
|
|
322
|
+
if comp_type == "job-wheel":
|
|
323
|
+
_build_job_wheel(component, comp_dir)
|
|
324
|
+
elif comp_type == "docker-compose-app":
|
|
325
|
+
_build_docker_compose(component, comp_dir)
|
|
326
|
+
else:
|
|
327
|
+
raise ValueError(f"Unsupported component type '{comp_type}'.")
|
|
328
|
+
except Exception as exc: # noqa: BLE001
|
|
329
|
+
if json_mode:
|
|
330
|
+
print(
|
|
331
|
+
json.dumps(
|
|
332
|
+
{
|
|
333
|
+
"event": "mesh.build.error",
|
|
334
|
+
"component": component,
|
|
335
|
+
"error": str(exc),
|
|
336
|
+
}
|
|
337
|
+
),
|
|
338
|
+
flush=True,
|
|
339
|
+
)
|
|
340
|
+
else:
|
|
341
|
+
console.error(f"Failed to build '{component}': {exc}")
|
|
342
|
+
raise typer.Exit(1) from exc
|
|
343
|
+
|
|
344
|
+
if json_mode:
|
|
345
|
+
print(
|
|
346
|
+
json.dumps(
|
|
347
|
+
{
|
|
348
|
+
"event": "mesh.build.completed",
|
|
349
|
+
"component": component,
|
|
350
|
+
"type": comp_type,
|
|
351
|
+
}
|
|
352
|
+
),
|
|
353
|
+
flush=True,
|
|
354
|
+
)
|
|
355
|
+
else:
|
|
356
|
+
console.success(f"Component [bold cyan]{component}[/] built successfully.")
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
# ---------------------------------------------------------------------------
|
|
360
|
+
# Internal helpers
|
|
361
|
+
# ---------------------------------------------------------------------------
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
def _show_dry_run(
|
|
365
|
+
component: str,
|
|
366
|
+
comp_type: str,
|
|
367
|
+
comp_dir: Path,
|
|
368
|
+
config: dict,
|
|
369
|
+
json_mode: bool,
|
|
370
|
+
console: SmartConsole,
|
|
371
|
+
) -> None:
|
|
372
|
+
if comp_type == "job-wheel":
|
|
373
|
+
entrypoint = config.get("run", {}).get("entrypoint", component)
|
|
374
|
+
cmd = [entrypoint] + config.get("run", {}).get("args", [])
|
|
375
|
+
elif comp_type == "docker-compose-app":
|
|
376
|
+
cmd = ["docker", "compose", "up"]
|
|
377
|
+
else:
|
|
378
|
+
cmd = ["<unknown>"]
|
|
379
|
+
|
|
380
|
+
if json_mode:
|
|
381
|
+
print(
|
|
382
|
+
json.dumps(
|
|
383
|
+
{
|
|
384
|
+
"event": "mesh.run.dry_run",
|
|
385
|
+
"component": component,
|
|
386
|
+
"type": comp_type,
|
|
387
|
+
"command": cmd,
|
|
388
|
+
}
|
|
389
|
+
),
|
|
390
|
+
flush=True,
|
|
391
|
+
)
|
|
392
|
+
else:
|
|
393
|
+
console.info(f"[dry-run] Would execute: {' '.join(cmd)} in {comp_dir}")
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
def _run_job_wheel(component: str, comp_dir: Path, config: dict) -> None:
|
|
397
|
+
run_cfg = config.get("run", {})
|
|
398
|
+
if isinstance(run_cfg, dict):
|
|
399
|
+
entrypoint = run_cfg.get("entrypoint", f"{component}-job")
|
|
400
|
+
args = run_cfg.get("args", [])
|
|
401
|
+
else:
|
|
402
|
+
entrypoint = f"{component}-job"
|
|
403
|
+
args = []
|
|
404
|
+
cmd = [entrypoint, *args]
|
|
405
|
+
result = subprocess.run(cmd, cwd=comp_dir, check=False) # noqa: S603
|
|
406
|
+
if result.returncode != 0:
|
|
407
|
+
raise RuntimeError(f"Job '{component}' exited with code {result.returncode}.")
|
|
408
|
+
|
|
409
|
+
|
|
410
|
+
def _run_docker_compose(component: str, comp_dir: Path, config: dict) -> None:
|
|
411
|
+
cmd = ["docker", "compose", "up"]
|
|
412
|
+
result = subprocess.run(cmd, cwd=comp_dir, check=False) # noqa: S603
|
|
413
|
+
if result.returncode != 0:
|
|
414
|
+
raise RuntimeError(
|
|
415
|
+
f"Docker Compose app '{component}' exited with code {result.returncode}."
|
|
416
|
+
)
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
def _build_job_wheel(component: str, comp_dir: Path) -> None:
|
|
420
|
+
cmd = ["pip", "wheel", ".", "-w", "dist/", "--no-deps"]
|
|
421
|
+
result = subprocess.run(cmd, cwd=comp_dir, check=False) # noqa: S603
|
|
422
|
+
if result.returncode != 0:
|
|
423
|
+
raise RuntimeError(
|
|
424
|
+
f"Wheel build for '{component}' failed with code {result.returncode}."
|
|
425
|
+
)
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
def _build_docker_compose(component: str, comp_dir: Path) -> None:
|
|
429
|
+
cmd = ["docker", "compose", "build"]
|
|
430
|
+
result = subprocess.run(cmd, cwd=comp_dir, check=False) # noqa: S603
|
|
431
|
+
if result.returncode != 0:
|
|
432
|
+
raise RuntimeError(
|
|
433
|
+
f"Docker Compose build for '{component}' failed"
|
|
434
|
+
f" with code {result.returncode}."
|
|
435
|
+
)
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
"""CLI sub-commands for plugin management."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
|
|
7
|
+
import typer
|
|
8
|
+
|
|
9
|
+
from aptdata.cli.rendering.console import SmartConsole
|
|
10
|
+
from aptdata.cli.rendering.tables import plugin_schema_table, plugins_table
|
|
11
|
+
|
|
12
|
+
plugin_app = typer.Typer(name="plugin", help="Manage and inspect plugins.")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@plugin_app.command("list")
|
|
16
|
+
def plugin_list(
|
|
17
|
+
json_mode: bool = typer.Option(False, "--json", help="Emit JSON lines."),
|
|
18
|
+
) -> None:
|
|
19
|
+
"""List all registered reader and writer plugins."""
|
|
20
|
+
from aptdata.plugins import plugin_manager # noqa: PLC0415
|
|
21
|
+
|
|
22
|
+
console = SmartConsole(json_mode=json_mode)
|
|
23
|
+
plugins = plugin_manager.list_plugins()
|
|
24
|
+
|
|
25
|
+
if json_mode:
|
|
26
|
+
print(json.dumps(plugins), flush=True)
|
|
27
|
+
else:
|
|
28
|
+
if not plugins.get("readers") and not plugins.get("writers"):
|
|
29
|
+
console.warning("No plugins registered.")
|
|
30
|
+
else:
|
|
31
|
+
console.render(plugins_table(plugins))
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@plugin_app.command("inspect")
|
|
35
|
+
def plugin_inspect(
|
|
36
|
+
name: str = typer.Argument(..., help="Plugin name."),
|
|
37
|
+
json_mode: bool = typer.Option(False, "--json", help="Emit JSON lines."),
|
|
38
|
+
) -> None:
|
|
39
|
+
"""Show constructor schema for a plugin."""
|
|
40
|
+
from aptdata.plugins import plugin_manager # noqa: PLC0415
|
|
41
|
+
|
|
42
|
+
console = SmartConsole(json_mode=json_mode)
|
|
43
|
+
|
|
44
|
+
try:
|
|
45
|
+
schema = plugin_manager.get_plugin_schema(name)
|
|
46
|
+
except KeyError as exc:
|
|
47
|
+
console.error(str(exc))
|
|
48
|
+
raise typer.Exit(1) from exc
|
|
49
|
+
|
|
50
|
+
if json_mode:
|
|
51
|
+
print(json.dumps(schema, default=str), flush=True)
|
|
52
|
+
else:
|
|
53
|
+
console.render(plugin_schema_table(schema))
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@plugin_app.command("preview")
|
|
57
|
+
def plugin_preview(
|
|
58
|
+
reader: str = typer.Argument(..., help="Reader plugin name."),
|
|
59
|
+
limit: int = typer.Option(5, "--limit", "-n", help="Number of records to preview."),
|
|
60
|
+
) -> None:
|
|
61
|
+
"""Execute a reader plugin and preview the first N records."""
|
|
62
|
+
from aptdata.plugins import plugin_manager # noqa: PLC0415
|
|
63
|
+
|
|
64
|
+
console = SmartConsole(json_mode=False)
|
|
65
|
+
|
|
66
|
+
try:
|
|
67
|
+
with console.spinner(f"Reading from '{reader}'..."):
|
|
68
|
+
records = plugin_manager.preview_dataset(reader)
|
|
69
|
+
records = records[:limit]
|
|
70
|
+
if not records:
|
|
71
|
+
console.warning("No records returned.")
|
|
72
|
+
else:
|
|
73
|
+
from rich.table import Table # noqa: PLC0415
|
|
74
|
+
|
|
75
|
+
table = Table(title=f"Preview: {reader} (first {len(records)} records)")
|
|
76
|
+
for col in records[0].keys():
|
|
77
|
+
table.add_column(str(col))
|
|
78
|
+
for row in records:
|
|
79
|
+
table.add_row(*[str(v) for v in row.values()])
|
|
80
|
+
console.render(table)
|
|
81
|
+
except KeyError as exc:
|
|
82
|
+
console.error(str(exc))
|
|
83
|
+
raise typer.Exit(1) from exc
|
|
84
|
+
except Exception as exc: # noqa: BLE001
|
|
85
|
+
console.error(f"Preview failed: {exc}")
|
|
86
|
+
raise typer.Exit(1) from exc
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
@plugin_app.command("load")
|
|
90
|
+
def plugin_load(
|
|
91
|
+
module_path: str = typer.Argument(..., help="Dotted Python module path to load."),
|
|
92
|
+
) -> None:
|
|
93
|
+
"""Dynamically import a plugin module."""
|
|
94
|
+
from aptdata.plugins import plugin_manager # noqa: PLC0415
|
|
95
|
+
|
|
96
|
+
console = SmartConsole(json_mode=False)
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
with console.spinner(f"Loading '{module_path}'..."):
|
|
100
|
+
mod = plugin_manager.load_module(module_path)
|
|
101
|
+
console.success(f"Module '{mod.__name__}' loaded successfully.")
|
|
102
|
+
except ModuleNotFoundError as exc:
|
|
103
|
+
console.error(f"Module not found: {exc}")
|
|
104
|
+
raise typer.Exit(1) from exc
|
|
105
|
+
except Exception as exc: # noqa: BLE001
|
|
106
|
+
console.error(f"Load failed: {exc}")
|
|
107
|
+
raise typer.Exit(1) from exc
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
"""CLI sub-commands for system registry management."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
|
|
7
|
+
import typer
|
|
8
|
+
|
|
9
|
+
from aptdata.cli.rendering.console import SmartConsole
|
|
10
|
+
from aptdata.cli.rendering.panels import system_detail_panel
|
|
11
|
+
from aptdata.cli.rendering.tables import systems_table
|
|
12
|
+
|
|
13
|
+
system_app = typer.Typer(name="system", help="Inspect and validate registered systems.")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@system_app.command("list")
|
|
17
|
+
def system_list(
|
|
18
|
+
json_mode: bool = typer.Option(False, "--json", help="Emit JSON lines."),
|
|
19
|
+
) -> None:
|
|
20
|
+
"""List all registered systems."""
|
|
21
|
+
from aptdata.plugins import registry # noqa: PLC0415
|
|
22
|
+
|
|
23
|
+
console = SmartConsole(json_mode=json_mode)
|
|
24
|
+
names = registry.list_systems()
|
|
25
|
+
|
|
26
|
+
if json_mode:
|
|
27
|
+
print(json.dumps({"systems": names, "count": len(names)}), flush=True)
|
|
28
|
+
else:
|
|
29
|
+
if not names:
|
|
30
|
+
console.warning("No systems registered.")
|
|
31
|
+
else:
|
|
32
|
+
console.render(systems_table(names))
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@system_app.command("info")
|
|
36
|
+
def system_info(
|
|
37
|
+
name: str = typer.Argument(..., help="System name."),
|
|
38
|
+
json_mode: bool = typer.Option(False, "--json", help="Emit JSON lines."),
|
|
39
|
+
) -> None:
|
|
40
|
+
"""Show detailed info about a registered system."""
|
|
41
|
+
from aptdata.plugins import registry # noqa: PLC0415
|
|
42
|
+
|
|
43
|
+
console = SmartConsole(json_mode=json_mode)
|
|
44
|
+
system_cls = registry.get(name)
|
|
45
|
+
|
|
46
|
+
if system_cls is None:
|
|
47
|
+
console.error(f"System '{name}' not found in registry.")
|
|
48
|
+
raise typer.Exit(1)
|
|
49
|
+
|
|
50
|
+
if json_mode:
|
|
51
|
+
print(
|
|
52
|
+
json.dumps(
|
|
53
|
+
{
|
|
54
|
+
"name": name,
|
|
55
|
+
"class": system_cls.__name__,
|
|
56
|
+
"module": getattr(system_cls, "__module__", "unknown"),
|
|
57
|
+
"doc": (system_cls.__doc__ or "").strip(),
|
|
58
|
+
}
|
|
59
|
+
),
|
|
60
|
+
flush=True,
|
|
61
|
+
)
|
|
62
|
+
else:
|
|
63
|
+
console.render(system_detail_panel(name, system_cls))
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@system_app.command("validate")
|
|
67
|
+
def system_validate(
|
|
68
|
+
name: str = typer.Argument(..., help="System name."),
|
|
69
|
+
) -> None:
|
|
70
|
+
"""Instantiate a system and compile all its flows without executing."""
|
|
71
|
+
from aptdata.plugins import registry # noqa: PLC0415
|
|
72
|
+
|
|
73
|
+
console = SmartConsole(json_mode=False)
|
|
74
|
+
system_cls = registry.get(name)
|
|
75
|
+
|
|
76
|
+
if system_cls is None:
|
|
77
|
+
console.error(f"System '{name}' not found in registry.")
|
|
78
|
+
raise typer.Exit(1)
|
|
79
|
+
|
|
80
|
+
try:
|
|
81
|
+
with console.spinner(f"Validating system '{name}'..."):
|
|
82
|
+
instance = system_cls(system_id=name)
|
|
83
|
+
flows = getattr(instance, "_flows", None) or getattr(instance, "flows", [])
|
|
84
|
+
for flow in flows:
|
|
85
|
+
if hasattr(flow, "compile"):
|
|
86
|
+
flow.compile()
|
|
87
|
+
console.success(f"System '{name}' validated successfully.")
|
|
88
|
+
except Exception as exc: # noqa: BLE001
|
|
89
|
+
console.error(f"Validation failed: {exc}")
|
|
90
|
+
raise typer.Exit(1) from exc
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"""CLI sub-commands for telemetry management."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
|
|
7
|
+
import typer
|
|
8
|
+
|
|
9
|
+
from aptdata.cli.rendering.console import SmartConsole
|
|
10
|
+
from aptdata.cli.rendering.tables import telemetry_status_table
|
|
11
|
+
|
|
12
|
+
telemetry_app = typer.Typer(
|
|
13
|
+
name="telemetry", help="Inspect OpenTelemetry telemetry status."
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _get_telemetry_status() -> dict:
|
|
18
|
+
"""Return a dict describing the current OTel tracer provider."""
|
|
19
|
+
from opentelemetry import trace # noqa: PLC0415
|
|
20
|
+
|
|
21
|
+
provider = trace.get_tracer_provider()
|
|
22
|
+
provider_type = type(provider).__name__
|
|
23
|
+
configured = provider_type != "ProxyTracerProvider"
|
|
24
|
+
return {
|
|
25
|
+
"configured": configured,
|
|
26
|
+
"provider": provider_type,
|
|
27
|
+
"service": "aptdata",
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@telemetry_app.command("status")
|
|
32
|
+
def telemetry_status(
|
|
33
|
+
json_mode: bool = typer.Option(False, "--json", help="Emit JSON lines."),
|
|
34
|
+
) -> None:
|
|
35
|
+
"""Show OpenTelemetry configuration status."""
|
|
36
|
+
console = SmartConsole(json_mode=json_mode)
|
|
37
|
+
status = _get_telemetry_status()
|
|
38
|
+
|
|
39
|
+
if json_mode:
|
|
40
|
+
print(json.dumps(status), flush=True)
|
|
41
|
+
else:
|
|
42
|
+
console.render(telemetry_status_table(status))
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@telemetry_app.command("export")
|
|
46
|
+
def telemetry_export(
|
|
47
|
+
fmt: str = typer.Option("json", "--format", "-f", help="Export format (json)."),
|
|
48
|
+
) -> None:
|
|
49
|
+
"""Export collected telemetry spans/metrics."""
|
|
50
|
+
console = SmartConsole(json_mode=False)
|
|
51
|
+
status = _get_telemetry_status()
|
|
52
|
+
|
|
53
|
+
if fmt == "json":
|
|
54
|
+
console.print(json.dumps({"telemetry": status}, indent=2, default=str))
|
|
55
|
+
else:
|
|
56
|
+
console.error(f"Unsupported format: {fmt}")
|
|
57
|
+
raise typer.Exit(1)
|