cloudcap-cli 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cloudcap/__init__.py ADDED
@@ -0,0 +1,3 @@
1
+ """CloudCap terminal client (HTTP API)."""
2
+
3
+ __version__ = "0.1.1"
cloudcap/__main__.py ADDED
@@ -0,0 +1,4 @@
1
+ from cloudcap.cli import main
2
+
3
+ if __name__ == "__main__":
4
+ raise SystemExit(main())
cloudcap/cli.py ADDED
@@ -0,0 +1,1090 @@
1
+ """
2
+ CloudCap CLI — terminal-first workflow against the HTTP API.
3
+
4
+ Config: ~/.cloudcap/config.json (mode 0600): url, token, org_id, environment_id.
5
+ Override with CLOUDCAP_URL, CLOUDCAP_TOKEN, CLOUDCAP_ORG_ID, CLOUDCAP_ENV_ID.
6
+ """
7
+ from __future__ import annotations
8
+
9
+ import argparse
10
+ import json
11
+ import os
12
+ import sys
13
+ from pathlib import Path
14
+ from typing import Any
15
+ from uuid import UUID
16
+
17
+ import httpx
18
+
19
+ from cloudcap import __version__
20
+
21
+ CONFIG_DIR = Path.home() / ".cloudcap"
22
+ CONFIG_PATH = CONFIG_DIR / "config.json"
23
+
24
+
25
+ def _wants_json(args: argparse.Namespace) -> bool:
26
+ return getattr(args, "cli_output", "text") == "json"
27
+
28
+
29
+ def _dump_json(obj: Any, *, compact: bool) -> str:
30
+ if compact:
31
+ return json.dumps(obj, separators=(",", ":"), default=str)
32
+ return json.dumps(obj, indent=2, default=str)
33
+
34
+
35
+ def _stdout_json(obj: Any) -> None:
36
+ print(_dump_json(obj, compact=True))
37
+
38
+
39
+ def _load_config() -> dict[str, Any]:
40
+ if not CONFIG_PATH.is_file():
41
+ return {}
42
+ try:
43
+ return json.loads(CONFIG_PATH.read_text(encoding="utf-8"))
44
+ except json.JSONDecodeError:
45
+ return {}
46
+
47
+
48
+ def _save_config(cfg: dict[str, Any]) -> None:
49
+ CONFIG_DIR.mkdir(parents=True, exist_ok=True)
50
+ CONFIG_PATH.write_text(json.dumps(cfg, indent=2), encoding="utf-8")
51
+ try:
52
+ os.chmod(CONFIG_PATH, 0o600)
53
+ except OSError:
54
+ pass
55
+
56
+
57
+ def _client() -> tuple[httpx.Client, dict[str, Any]]:
58
+ cfg = _load_config()
59
+ base = (cfg.get("url") or os.environ.get("CLOUDCAP_URL") or "http://localhost:8000/v1").rstrip("/")
60
+ token = cfg.get("token") or os.environ.get("CLOUDCAP_TOKEN") or ""
61
+ headers = {}
62
+ if token:
63
+ headers["Authorization"] = f"Bearer {token}"
64
+ return httpx.Client(base_url=base, headers=headers, timeout=120.0), cfg
65
+
66
+
67
+ def cmd_login(args: argparse.Namespace) -> int:
68
+ base = (args.url or os.environ.get("CLOUDCAP_URL") or "http://localhost:8000/v1").rstrip("/")
69
+ want_json = _wants_json(args)
70
+ if getattr(args, "employee", False):
71
+ slug = (args.org_slug or "").strip()
72
+ email = (args.email or "").strip()
73
+ if not slug or not email:
74
+ print("Employee login requires --org-slug and --email", file=sys.stderr)
75
+ return 1
76
+ r = httpx.post(
77
+ f"{base}/login/employee",
78
+ json={"org_slug": slug, "email": email},
79
+ timeout=30.0,
80
+ )
81
+ if r.status_code != 200:
82
+ try:
83
+ detail = r.json().get("detail")
84
+ except Exception:
85
+ detail = r.text
86
+ print(f"Employee login failed ({r.status_code}): {detail}", file=sys.stderr)
87
+ return 1
88
+ data = r.json()
89
+ token = data.get("token")
90
+ org_id = data.get("org_id")
91
+ if not token:
92
+ print("Login returned no token", file=sys.stderr)
93
+ return 1
94
+ cfg = _load_config()
95
+ cfg.update({"url": base, "token": token, "org_id": org_id})
96
+ _save_config(cfg)
97
+ if want_json:
98
+ _stdout_json(
99
+ {
100
+ "ok": True,
101
+ "mode": "employee",
102
+ "config_path": str(CONFIG_PATH),
103
+ "org_id": str(org_id) if org_id else None,
104
+ "email": email,
105
+ }
106
+ )
107
+ else:
108
+ print(f"Logged in as {email} (employee session). org_id={org_id}")
109
+ print(f"Config written to {CONFIG_PATH}")
110
+ return 0
111
+ if args.demo:
112
+ r = httpx.post(f"{base}/demo/session/start", json={}, timeout=30.0)
113
+ r.raise_for_status()
114
+ data = r.json()
115
+ token = data.get("token")
116
+ org_id = data.get("org_id")
117
+ if not token:
118
+ print("Demo session returned no token", file=sys.stderr)
119
+ return 1
120
+ cfg = _load_config()
121
+ cfg.update({"url": base, "token": token, "org_id": org_id})
122
+ _save_config(cfg)
123
+ if want_json:
124
+ _stdout_json(
125
+ {
126
+ "ok": True,
127
+ "mode": "demo",
128
+ "config_path": str(CONFIG_PATH),
129
+ "org_id": str(org_id) if org_id else None,
130
+ }
131
+ )
132
+ else:
133
+ print(f"Logged in (demo). org_id={org_id}")
134
+ print(f"Config written to {CONFIG_PATH}")
135
+ return 0
136
+ if not args.token:
137
+ print("Provide --demo or --token", file=sys.stderr)
138
+ return 1
139
+ cfg = _load_config()
140
+ cfg.update({"url": base, "token": args.token.strip()})
141
+ if args.org_id:
142
+ cfg["org_id"] = args.org_id.strip()
143
+ _save_config(cfg)
144
+ probe = httpx.Client(base_url=base, headers={"Authorization": f"Bearer {args.token.strip()}"}, timeout=30.0)
145
+ try:
146
+ check = probe.get("/auth/me")
147
+ finally:
148
+ probe.close()
149
+ if check.status_code == 401:
150
+ print("Login failed: invalid or expired CloudCap credentials.", file=sys.stderr)
151
+ return 1
152
+ if check.status_code != 200:
153
+ if check.status_code >= 500:
154
+ print("Login failed: server error while verifying token.", file=sys.stderr)
155
+ else:
156
+ try:
157
+ body = check.json()
158
+ detail = body.get("detail")
159
+ if isinstance(detail, str):
160
+ print(f"Login failed: {detail}", file=sys.stderr)
161
+ else:
162
+ print("Login failed: invalid or rejected credentials.", file=sys.stderr)
163
+ except Exception:
164
+ print("Login failed: invalid or rejected credentials.", file=sys.stderr)
165
+ return 1
166
+ if want_json:
167
+ _stdout_json({"ok": True, "mode": "token", "config_path": str(CONFIG_PATH)})
168
+ else:
169
+ print(f"Token verified. Config: {CONFIG_PATH}")
170
+ return 0
171
+
172
+
173
+ def cmd_whoami(args: argparse.Namespace) -> int:
174
+ client, cfg = _client()
175
+ r = client.get("/auth/me")
176
+ if r.status_code != 200:
177
+ if r.status_code == 401:
178
+ print("Invalid or expired CloudCap credentials.", file=sys.stderr)
179
+ else:
180
+ print(r.text, file=sys.stderr)
181
+ return 1
182
+ me = r.json()
183
+ if isinstance(me, dict):
184
+ org_raw = me.get("org_id")
185
+ if not org_raw:
186
+ org_raw = cfg.get("org_id") or os.environ.get("CLOUDCAP_ORG_ID")
187
+ if org_raw:
188
+ try:
189
+ ro = client.get(
190
+ "/onboarding/operator-aws-readiness",
191
+ params={"organization_id": str(org_raw).strip()},
192
+ )
193
+ if ro.status_code == 200:
194
+ data = ro.json()
195
+ if isinstance(data, dict):
196
+ me["operator_aws_readiness"] = {
197
+ "ready": data.get("ready"),
198
+ "reasons": data.get("reasons") or [],
199
+ }
200
+ except (OSError, ValueError, TypeError):
201
+ pass
202
+ print(_dump_json(me, compact=_wants_json(args)))
203
+ return 0
204
+
205
+
206
+ def cmd_savings_list(args: argparse.Namespace) -> int:
207
+ client, _ = _client()
208
+ r = client.get("/savings/events")
209
+ if r.status_code != 200:
210
+ _print_api_error(r)
211
+ return 1
212
+ print(_dump_json(r.json(), compact=_wants_json(args)))
213
+ return 0
214
+
215
+
216
+ def cmd_savings_show(args: argparse.Namespace) -> int:
217
+ client, _ = _client()
218
+ eid = str(args.event_id).strip()
219
+ r = client.get(f"/savings/events/{eid}")
220
+ if r.status_code != 200:
221
+ _print_api_error(r)
222
+ return 1
223
+ print(_dump_json(r.json(), compact=_wants_json(args)))
224
+ return 0
225
+
226
+
227
+ def cmd_savings_breakdown(args: argparse.Namespace) -> int:
228
+ client, _ = _client()
229
+ eid = str(args.event_id).strip()
230
+ ev = client.get(f"/savings/events/{eid}")
231
+ if ev.status_code != 200:
232
+ _print_api_error(ev)
233
+ return 1
234
+ sm = client.get(f"/savings/events/{eid}/samples", params={"limit": 500})
235
+ if sm.status_code != 200:
236
+ _print_api_error(sm)
237
+ return 1
238
+ body = {"event": ev.json(), "samples": sm.json()}
239
+ print(_dump_json(body, compact=_wants_json(args)))
240
+ return 0
241
+
242
+
243
+ def cmd_savings_download(args: argparse.Namespace) -> int:
244
+ client, _ = _client()
245
+ eid = str(args.event_id).strip()
246
+ ev = client.get(f"/savings/events/{eid}")
247
+ pr = client.get(f"/savings/events/{eid}/proofs")
248
+ sm = client.get(f"/savings/events/{eid}/samples", params={"limit": 2000})
249
+ if ev.status_code != 200 or pr.status_code != 200 or sm.status_code != 200:
250
+ for label, resp in (("event", ev), ("proofs", pr), ("samples", sm)):
251
+ if resp.status_code != 200:
252
+ print(f"{label}:", file=sys.stderr)
253
+ _print_api_error(resp)
254
+ return 1
255
+ bundle = {"event": ev.json(), "proofs": pr.json(), "samples": sm.json()}
256
+ text = _dump_json(bundle, compact=False)
257
+ out_file = (getattr(args, "output_file", None) or "").strip()
258
+ if out_file:
259
+ Path(out_file).write_text(text, encoding="utf-8")
260
+ if _wants_json(args):
261
+ _stdout_json({"ok": True, "path": str(Path(out_file).resolve())})
262
+ else:
263
+ print(f"Wrote {out_file}")
264
+ else:
265
+ print(_dump_json(bundle, compact=_wants_json(args)))
266
+ return 0
267
+
268
+
269
+ def cmd_pipeline_status(args: argparse.Namespace) -> int:
270
+ run_id = (getattr(args, "run_id", None) or args.run_id_opt or "").strip()
271
+ if not run_id:
272
+ print("Provide RUN_ID or --run-id", file=sys.stderr)
273
+ return 1
274
+ client, _ = _client()
275
+ rd = client.get(f"/pipeline/runs/{run_id}/detail")
276
+ if rd.status_code == 401:
277
+ print("Invalid or expired CloudCap credentials.", file=sys.stderr)
278
+ return 1
279
+ if rd.status_code != 200:
280
+ _print_api_error(rd)
281
+ return 1
282
+ detail = rd.json()
283
+ er = detail.get("execution_readiness") or {}
284
+ tr = client.get("/system/terraform-runtime")
285
+ if tr.status_code == 200:
286
+ tf_body: dict[str, Any] = tr.json() if isinstance(tr.json(), dict) else {}
287
+ else:
288
+ tf_body = {"http_error": tr.status_code, "body": (tr.text or "")[:500]}
289
+
290
+ as_json = _wants_json(args) or getattr(args, "emit_json", False)
291
+ if as_json:
292
+ out: dict[str, Any] = {
293
+ "pipeline_run_id": run_id,
294
+ "execution_readiness": er,
295
+ "terraform_runtime": tf_body,
296
+ }
297
+ ss = client.get("/health/system-status/detailed")
298
+ if ss.status_code == 200 and isinstance(ss.json(), dict):
299
+ out["system_status_operator_execution"] = (ss.json() or {}).get("operator_execution")
300
+ print(_dump_json(out, compact=_wants_json(args)))
301
+ return 0
302
+
303
+ print(f"Pipeline run {run_id}")
304
+ print(f" can_apply_backend_terraform: {er.get('can_apply_backend_terraform')}")
305
+ print(f" backend_apply_globally_configured: {er.get('backend_apply_globally_configured')}")
306
+ print(f" pipeline_execution_mode: {er.get('pipeline_execution_mode')}")
307
+ print(
308
+ " plan_binary / workspace / bundle_consistent: "
309
+ f"{er.get('terraform_plan_binary_present')} / {er.get('terraform_workspace_zip_present')} / {er.get('terraform_deploy_bundle_consistent')}"
310
+ )
311
+ blockers = er.get("blockers") or []
312
+ if blockers:
313
+ print(" Blockers:")
314
+ for b in blockers:
315
+ code = b.get("code")
316
+ msg = b.get("message")
317
+ print(f" - [{code}] {msg}")
318
+ rem = b.get("remediation")
319
+ if rem:
320
+ print(f" → {rem}")
321
+ else:
322
+ print(" Blockers: (none)")
323
+
324
+ print("Terraform runtime (GET …/system/terraform-runtime):")
325
+ print(
326
+ f" terraform_available: {tf_body.get('terraform_available')} "
327
+ f"terraform_version_ok: {tf_body.get('terraform_version_ok')} "
328
+ f"workdir_writable: {tf_body.get('workdir_writable')}"
329
+ )
330
+ if tf_body.get("terraform_version_line"):
331
+ print(f" version: {tf_body.get('terraform_version_line')}")
332
+
333
+ ss = client.get("/health/system-status/detailed")
334
+ if ss.status_code == 200 and isinstance(ss.json(), dict):
335
+ oe = (ss.json() or {}).get("operator_execution") or {}
336
+ if oe:
337
+ print("System operator_execution (GET …/health/system-status/detailed):")
338
+ print(f" backend_apply_globally_configured: {oe.get('backend_apply_globally_configured')}")
339
+ print(f" pipeline_execution_mode: {oe.get('pipeline_execution_mode')}")
340
+ return 0
341
+
342
+
343
+ def cmd_context_set(args: argparse.Namespace) -> int:
344
+ cfg = _load_config()
345
+ if args.org_id:
346
+ cfg["org_id"] = args.org_id.strip()
347
+ if args.environment_id:
348
+ cfg["environment_id"] = args.environment_id.strip()
349
+ _save_config(cfg)
350
+ payload = {k: cfg.get(k) for k in ("url", "org_id", "environment_id")}
351
+ print(_dump_json(payload, compact=_wants_json(args)))
352
+ return 0
353
+
354
+
355
+ def cmd_context_show(args: argparse.Namespace) -> int:
356
+ cfg = _load_config()
357
+ print(_dump_json(cfg, compact=_wants_json(args)))
358
+ return 0
359
+
360
+
361
+ def _require_org(cfg: dict[str, Any]) -> UUID:
362
+ raw = cfg.get("org_id") or os.environ.get("CLOUDCAP_ORG_ID")
363
+ if not raw:
364
+ print("Set org via cloudcap context set --org-id ... or CLOUDCAP_ORG_ID", file=sys.stderr)
365
+ raise SystemExit(1)
366
+ return UUID(str(raw))
367
+
368
+
369
+ def _resolve_org_id(client: httpx.Client, cfg: dict[str, Any]) -> UUID:
370
+ try:
371
+ r = client.get("/auth/me")
372
+ if r.status_code == 200:
373
+ data = r.json()
374
+ raw = data.get("org_id")
375
+ if raw:
376
+ return UUID(str(raw))
377
+ except (ValueError, TypeError):
378
+ pass
379
+ return _require_org(cfg)
380
+
381
+
382
+ def _print_api_error(response: httpx.Response) -> None:
383
+ print(f"HTTP {response.status_code}", file=sys.stderr)
384
+ try:
385
+ err = response.json()
386
+ detail = err.get("detail")
387
+ if isinstance(detail, str):
388
+ print(detail, file=sys.stderr)
389
+ return
390
+ if isinstance(detail, list):
391
+ for item in detail:
392
+ if isinstance(item, dict) and item.get("msg"):
393
+ loc = item.get("loc") or ()
394
+ prefix = f"{' '.join(str(x) for x in loc)}: " if loc else ""
395
+ print(f"{prefix}{item['msg']}", file=sys.stderr)
396
+ else:
397
+ print(item, file=sys.stderr)
398
+ return
399
+ except Exception:
400
+ pass
401
+ print(response.text, file=sys.stderr)
402
+
403
+
404
+ def _check_operator_aws_readiness(client: httpx.Client, org_id: UUID) -> None:
405
+ r = client.get(
406
+ "/onboarding/operator-aws-readiness",
407
+ params={"organization_id": str(org_id)},
408
+ )
409
+ if r.status_code != 200:
410
+ print(r.text, file=sys.stderr)
411
+ raise SystemExit(1)
412
+ data = r.json()
413
+ if not isinstance(data, dict):
414
+ print(f"Unexpected operator-aws-readiness response (expected object): {data!r}", file=sys.stderr)
415
+ raise SystemExit(1)
416
+ if not data.get("ready"):
417
+ print(
418
+ "AWS operator onboarding is not complete for this organization. "
419
+ "In the web app, add a cloud account with encrypted operator credentials and region, then retry.",
420
+ file=sys.stderr,
421
+ )
422
+ for reason in data.get("reasons") or []:
423
+ print(f" - {reason}", file=sys.stderr)
424
+ raise SystemExit(1)
425
+
426
+
427
+ def _evaluate_hints(out: dict[str, Any]) -> list[str]:
428
+ hints: list[str] = []
429
+ dep = out.get("deploy_eligible")
430
+ ev_only = out.get("terraform_evaluate_only")
431
+ bcons = out.get("terraform_deploy_bundle_consistent")
432
+ if dep:
433
+ hints.append(
434
+ "Run is deploy-capable: use cloudcap deploy after approval (if required)."
435
+ )
436
+ elif ev_only:
437
+ hints.append(
438
+ "Evaluate-only run: add --plan-binary and --workspace-zip for deploy-capable run (see docs/pipeline-cli.md)."
439
+ )
440
+ elif bcons is False:
441
+ hints.append(
442
+ "Deploy bundle incomplete: workspace zip must include .terraform.lock.hcl next to main.tf, or fix artifact consistency."
443
+ )
444
+ return hints
445
+
446
+
447
+ def cmd_evaluate(args: argparse.Namespace) -> int:
448
+ want_json = _wants_json(args)
449
+ client, cfg = _client()
450
+ org_id = _resolve_org_id(client, cfg)
451
+ _check_operator_aws_readiness(client, org_id)
452
+ plan_path = Path(args.plan)
453
+ if not plan_path.is_file():
454
+ print(f"Not found: {plan_path}", file=sys.stderr)
455
+ return 1
456
+
457
+ env_id = args.environment_id or cfg.get("environment_id") or os.environ.get("CLOUDCAP_ENV_ID")
458
+ cloud_account_id = args.cloud_account_id or cfg.get("cloud_account_id")
459
+
460
+ body: dict[str, Any] = {
461
+ "organization_id": str(org_id),
462
+ "source": "cli",
463
+ }
464
+ if env_id:
465
+ body["environment_id"] = str(env_id).strip()
466
+ if cloud_account_id:
467
+ body["cloud_account_id"] = str(cloud_account_id).strip()
468
+
469
+ r = client.post("/pipeline/runs", json=body)
470
+ if r.status_code != 201:
471
+ print(r.text, file=sys.stderr)
472
+ return 1
473
+ run = r.json()
474
+ run_id = run["id"]
475
+ log: list[str] = []
476
+ if not want_json:
477
+ print(f"Run {run_id} created")
478
+
479
+ mime = "application/json" if plan_path.suffix.lower() == ".json" else "application/octet-stream"
480
+ art_type = "terraform_plan_json" if "json" in plan_path.suffix.lower() else "terraform_plan_binary"
481
+ files = {"file": (plan_path.name, plan_path.read_bytes(), mime)}
482
+ data = {"artifact_type": art_type, "metadata_json": "{}"}
483
+ r2 = client.post(f"/pipeline/runs/{run_id}/artifacts", data=data, files=files)
484
+ if r2.status_code != 200:
485
+ print(r2.text, file=sys.stderr)
486
+ return 1
487
+ if not want_json:
488
+ print("Artifact uploaded")
489
+
490
+ eval_body: dict[str, Any] = {"region": args.region, "requested_by": "cloudcap-cli"}
491
+ if args.environment_id:
492
+ eval_body["environment_id"] = str(args.environment_id).strip()
493
+ if getattr(args, "workload_id", None):
494
+ eval_body["workload_id"] = str(args.workload_id).strip()
495
+ plan_binary = getattr(args, "plan_binary", None)
496
+ if plan_binary:
497
+ pb = Path(plan_binary)
498
+ if not pb.is_file():
499
+ print(f"Not found: {pb}", file=sys.stderr)
500
+ return 1
501
+ files_b = {"file": (pb.name, pb.read_bytes(), "application/octet-stream")}
502
+ data_b = {"artifact_type": "terraform_plan_binary", "metadata_json": "{}"}
503
+ r2b = client.post(f"/pipeline/runs/{run_id}/artifacts", data=data_b, files=files_b)
504
+ if r2b.status_code != 200:
505
+ _print_api_error(r2b)
506
+ return 1
507
+ if not want_json:
508
+ print("Binary plan artifact uploaded")
509
+
510
+ workspace_zip = getattr(args, "workspace_zip", None)
511
+ if workspace_zip:
512
+ wz = Path(workspace_zip)
513
+ if not wz.is_file():
514
+ print(f"Not found: {wz}", file=sys.stderr)
515
+ return 1
516
+ files_w = {"file": (wz.name, wz.read_bytes(), "application/zip")}
517
+ data_w = {"artifact_type": "terraform_workspace_zip", "metadata_json": "{}"}
518
+ rw = client.post(f"/pipeline/runs/{run_id}/artifacts", data=data_w, files=files_w)
519
+ if rw.status_code != 200:
520
+ _print_api_error(rw)
521
+ return 1
522
+ if not want_json:
523
+ print("Terraform workspace zip uploaded (main.tf + .terraform.lock.hcl)", file=sys.stderr)
524
+
525
+ r3 = client.post(f"/pipeline/runs/{run_id}/evaluate", json=eval_body)
526
+ if r3.status_code != 200:
527
+ _print_api_error(r3)
528
+ return 1
529
+ out = r3.json()
530
+ hints = _evaluate_hints(out) if isinstance(out, dict) else []
531
+
532
+ if want_json:
533
+ payload: dict[str, Any] = {
534
+ "pipeline_run_id": run_id,
535
+ "evaluation": out,
536
+ "hints": hints,
537
+ }
538
+ _stdout_json(payload)
539
+ else:
540
+ print(json.dumps(out, indent=2))
541
+ print("", file=sys.stderr)
542
+ for h in hints:
543
+ print(h, file=sys.stderr)
544
+ return 0
545
+
546
+
547
+ def cmd_deploy(args: argparse.Namespace) -> int:
548
+ want_json = _wants_json(args)
549
+ client, cfg = _client()
550
+ org_id = _resolve_org_id(client, cfg)
551
+ _check_operator_aws_readiness(client, org_id)
552
+ run_id = (args.run_id_opt or args.run_id_arg or "").strip()
553
+ if not run_id:
554
+ print(
555
+ "cloudcap deploy: error: RUN_ID (positional) or --run-id is required",
556
+ file=sys.stderr,
557
+ )
558
+ return 2
559
+
560
+ messages: list[str] = []
561
+
562
+ r0 = client.get(f"/pipeline/runs/{run_id}")
563
+ if r0.status_code != 200:
564
+ _print_api_error(r0)
565
+ return 1
566
+ run_data = r0.json()
567
+
568
+ plan_binary = getattr(args, "plan_binary", None)
569
+ if plan_binary:
570
+ pb = Path(plan_binary)
571
+ if not pb.is_file():
572
+ print(f"Not found: {pb}", file=sys.stderr)
573
+ return 1
574
+ if not run_data.get("terraform_plan_binary_present"):
575
+ files_b = {"file": (pb.name, pb.read_bytes(), "application/octet-stream")}
576
+ data_b = {"artifact_type": "terraform_plan_binary", "metadata_json": "{}"}
577
+ r_up = client.post(f"/pipeline/runs/{run_id}/artifacts", data=data_b, files=files_b)
578
+ if r_up.status_code != 200:
579
+ _print_api_error(r_up)
580
+ return 1
581
+ messages.append("Binary plan artifact uploaded")
582
+ else:
583
+ messages.append("Run already has a Terraform plan binary; skipped --plan-binary upload.")
584
+
585
+ workspace_zip = getattr(args, "workspace_zip", None)
586
+ if workspace_zip:
587
+ wz = Path(workspace_zip)
588
+ if not wz.is_file():
589
+ print(f"Not found: {wz}", file=sys.stderr)
590
+ return 1
591
+ r0w = client.get(f"/pipeline/runs/{run_id}")
592
+ if r0w.status_code != 200:
593
+ _print_api_error(r0w)
594
+ return 1
595
+ wd = r0w.json()
596
+ if not wd.get("terraform_workspace_zip_present"):
597
+ files_w = {"file": (wz.name, wz.read_bytes(), "application/zip")}
598
+ data_w = {"artifact_type": "terraform_workspace_zip", "metadata_json": "{}"}
599
+ r_upw = client.post(f"/pipeline/runs/{run_id}/artifacts", data=data_w, files=files_w)
600
+ if r_upw.status_code != 200:
601
+ _print_api_error(r_upw)
602
+ return 1
603
+ messages.append("Terraform workspace zip uploaded")
604
+ else:
605
+ messages.append("Run already has terraform_workspace_zip; skipped --workspace-zip upload.")
606
+
607
+ r0b = client.get(f"/pipeline/runs/{run_id}")
608
+ if r0b.status_code == 200:
609
+ rd = r0b.json()
610
+ if not rd.get("terraform_plan_binary_present"):
611
+ messages.append(
612
+ "This run has no binary Terraform plan — upload with cloudcap deploy --plan-binary if needed."
613
+ )
614
+ elif not rd.get("deploy_eligible"):
615
+ if rd.get("terraform_plan_binary_present") and not rd.get("terraform_workspace_zip_present"):
616
+ messages.append("Terraform workspace zip required for backend apply — use --workspace-zip.")
617
+ else:
618
+ messages.append(
619
+ "Deploy not eligible yet (approval, policy, or execution settings). See GET /pipeline/runs/{id}."
620
+ )
621
+
622
+ r = client.post(f"/pipeline/runs/{run_id}/apply")
623
+ if r.status_code != 200:
624
+ _print_api_error(r)
625
+ if r.status_code >= 500:
626
+ print(
627
+ "If this persists, check API logs (e.g. docker compose logs backend --tail=80) for the traceback.",
628
+ file=sys.stderr,
629
+ )
630
+ return 1
631
+ apply_body = r.json()
632
+ exit_code = 0
633
+ detail_json: Any = None
634
+ rd = client.get(f"/pipeline/runs/{run_id}/detail")
635
+ if rd.status_code != 200:
636
+ messages.append(f"Could not GET /pipeline/runs/{run_id}/detail (HTTP {rd.status_code}).")
637
+ else:
638
+ detail_json = rd.json()
639
+ exs = detail_json.get("executions") or []
640
+ ui = detail_json.get("execution_ui_status")
641
+ if exs:
642
+ last = exs[-1]
643
+ st = last.get("status")
644
+ messages.append(f"Terraform execution: status={st} ui={ui}")
645
+ fr = (last.get("failure_reason") or "").strip()
646
+ fc = (last.get("metadata_json") or {}).get("failure_code")
647
+ if st in ("failed", "blocked") and fr:
648
+ if fc:
649
+ messages.append(f"Terraform failure code: {fc}")
650
+ clipped = fr if len(fr) <= 8000 else fr[:8000] + "\n… (truncated; see GET …/detail)"
651
+ messages.append(f"Terraform failure: {clipped}")
652
+ if st in ("failed", "blocked"):
653
+ exit_code = 1
654
+ else:
655
+ base = str(client.base_url).rstrip("/")
656
+ messages.append(
657
+ "Warning: no terraform execution rows for this run. "
658
+ f"CLOUDCAP_URL should include /v1 (this CLI uses {base})."
659
+ )
660
+
661
+ if want_json:
662
+ _stdout_json(
663
+ {
664
+ "pipeline_run_id": run_id,
665
+ "apply_response": apply_body,
666
+ "detail": detail_json,
667
+ "messages": messages,
668
+ "exit_code": exit_code,
669
+ }
670
+ )
671
+ else:
672
+ print(json.dumps(apply_body, indent=2))
673
+ for m in messages:
674
+ print(m, file=sys.stderr)
675
+ return exit_code
676
+
677
+
678
+ def cmd_optimize_list(args: argparse.Namespace) -> int:
679
+ client, cfg = _client()
680
+ rid = args.run_id.strip()
681
+ params: dict[str, Any] = {"context_type": "pipeline", "context_id": rid}
682
+ if cfg.get("org_id"):
683
+ params["org_id"] = str(cfg["org_id"]).strip()
684
+ r = client.get("/optimization/techniques", params=params)
685
+ if r.status_code == 404:
686
+ print("Run not found or no access.", file=sys.stderr)
687
+ return 1
688
+ if r.status_code != 200:
689
+ print(r.text, file=sys.stderr)
690
+ return 1
691
+ rows = r.json()
692
+ if not rows:
693
+ if _wants_json(args):
694
+ _stdout_json([])
695
+ else:
696
+ print("No recommendations for this run.")
697
+ return 0
698
+ if _wants_json(args):
699
+ _stdout_json(rows)
700
+ return 0
701
+ for i, t in enumerate(rows, start=1):
702
+ if not isinstance(t, dict):
703
+ continue
704
+ print(
705
+ f"{i}.\t{t.get('id')}\t{t.get('technique_id')}\t"
706
+ f"${float(t.get('estimated_monthly_savings') or 0):,.2f}/mo\t"
707
+ f"estimate={t.get('estimate_type') or 'n/a'}\t"
708
+ f"approval={'yes' if t.get('requires_approval') else 'no'}\t"
709
+ f"{t.get('title', '')[:60]}"
710
+ )
711
+ return 0
712
+
713
+
714
+ def cmd_optimize_show(args: argparse.Namespace) -> int:
715
+ client, _ = _client()
716
+ rid = args.run_id.strip()
717
+ ref = args.recommendation.strip()
718
+ params: dict[str, Any] = {"context_type": "pipeline", "context_id": rid}
719
+ r = client.get("/optimization/techniques", params=params)
720
+ if r.status_code != 200:
721
+ print(r.text, file=sys.stderr)
722
+ return 1
723
+ rows = [x for x in r.json() if isinstance(x, dict)]
724
+ pick: dict[str, Any] | None = None
725
+ if ref.isdigit():
726
+ idx = int(ref) - 1
727
+ if 0 <= idx < len(rows):
728
+ pick = rows[idx]
729
+ else:
730
+ for x in rows:
731
+ if str(x.get("id")) == ref:
732
+ pick = x
733
+ break
734
+ if not pick:
735
+ print("Recommendation not found (use list index or UUID).", file=sys.stderr)
736
+ return 1
737
+ detail = client.get(f"/optimization/techniques/{pick['id']}")
738
+ if detail.status_code != 200:
739
+ print(detail.text, file=sys.stderr)
740
+ return 1
741
+ print(_dump_json(detail.json(), compact=_wants_json(args)))
742
+ return 0
743
+
744
+
745
+ def _parse_recommendation_refs(ref: str, rows: list[dict[str, Any]]) -> tuple[dict[str, Any] | None, list[str]]:
746
+ parts = [p.strip() for p in ref.split(",") if p.strip()]
747
+ if not parts:
748
+ return None, []
749
+ picks: list[dict[str, Any]] = []
750
+ for p in parts:
751
+ row: dict[str, Any] | None = None
752
+ if p.isdigit():
753
+ idx = int(p) - 1
754
+ if 0 <= idx < len(rows):
755
+ row = rows[idx]
756
+ else:
757
+ for x in rows:
758
+ if str(x.get("id")) == p:
759
+ row = x
760
+ break
761
+ if row is None:
762
+ return None, []
763
+ picks.append(row)
764
+ primary = picks[0]
765
+ merge_ids = [str(x["id"]) for x in picks[1:]]
766
+ return primary, merge_ids
767
+
768
+
769
+ def cmd_optimize_apply(args: argparse.Namespace) -> int:
770
+ want_json = _wants_json(args)
771
+ client, _ = _client()
772
+ rid = args.run_id.strip()
773
+ ref = args.recommendation.strip()
774
+ params: dict[str, Any] = {"context_type": "pipeline", "context_id": rid}
775
+ r = client.get("/optimization/techniques", params=params)
776
+ if r.status_code != 200:
777
+ print(r.text, file=sys.stderr)
778
+ return 1
779
+ rows = [x for x in r.json() if isinstance(x, dict)]
780
+ pick, merge_ids = _parse_recommendation_refs(ref, rows)
781
+ if not pick:
782
+ print("Recommendation not found.", file=sys.stderr)
783
+ return 1
784
+ tech_id = pick["id"]
785
+ needs = bool(pick.get("requires_approval"))
786
+ path = f"/optimization/techniques/{tech_id}/approve" if needs else f"/optimization/techniques/{tech_id}/apply"
787
+ body: dict[str, Any] = {"approved_by": args.approved_by or "cli"}
788
+ if merge_ids:
789
+ body["merge_candidate_ids"] = merge_ids
790
+ r2 = client.post(path, json=body)
791
+ if r2.status_code == 400:
792
+ _print_api_error(r2)
793
+ return 2
794
+ if r2.status_code == 428:
795
+ print("This recommendation requires approval via API approve endpoint.", file=sys.stderr)
796
+ return 1
797
+ if r2.status_code not in (200, 201):
798
+ print(r2.text, file=sys.stderr)
799
+ return 1
800
+ data = r2.json()
801
+ child = data.get("reevaluation_pipeline_run_id") if isinstance(data, dict) else None
802
+ if want_json:
803
+ out = dict(data) if isinstance(data, dict) else {"result": data}
804
+ if child:
805
+ out["follow_up_pipeline_run_id"] = child
806
+ _stdout_json(out)
807
+ else:
808
+ print(json.dumps(data, indent=2))
809
+ if child:
810
+ print(f"\nFollow-up pipeline run: {child}", file=sys.stderr)
811
+ return 0
812
+
813
+
814
+ def cmd_optimize_download(args: argparse.Namespace) -> int:
815
+ client, _ = _client()
816
+ rid = args.run_id.strip()
817
+ ref = args.recommendation.strip()
818
+ out_dir = Path(args.output_dir).expanduser().resolve()
819
+ out_dir.mkdir(parents=True, exist_ok=True)
820
+ params: dict[str, Any] = {"context_type": "pipeline", "context_id": rid}
821
+ r = client.get("/optimization/techniques", params=params)
822
+ if r.status_code != 200:
823
+ print(r.text, file=sys.stderr)
824
+ return 1
825
+ rows = [x for x in r.json() if isinstance(x, dict)]
826
+ pick: dict[str, Any] | None = None
827
+ if ref.isdigit():
828
+ idx = int(ref) - 1
829
+ if 0 <= idx < len(rows):
830
+ pick = rows[idx]
831
+ else:
832
+ for x in rows:
833
+ if str(x.get("id")) == ref:
834
+ pick = x
835
+ break
836
+ if not pick:
837
+ print("Recommendation not found.", file=sys.stderr)
838
+ return 1
839
+ exp = client.get(f"/pipeline/runs/{rid}/candidates/{pick['id']}/export")
840
+ if exp.status_code != 200:
841
+ print(exp.text, file=sys.stderr)
842
+ return 1
843
+ path = out_dir / f"candidate-{pick['id']}-export.json"
844
+ path.write_text(json.dumps(exp.json(), indent=2), encoding="utf-8")
845
+ if _wants_json(args):
846
+ _stdout_json({"ok": True, "path": str(path)})
847
+ else:
848
+ print(f"Wrote {path}")
849
+ return 0
850
+
851
+
852
+ def cmd_optimize_download_bundle(args: argparse.Namespace) -> int:
853
+ client, _ = _client()
854
+ bid = args.bundle_id.strip()
855
+ out_dir = Path(args.output_dir).expanduser().resolve()
856
+ out_dir.mkdir(parents=True, exist_ok=True)
857
+ r = client.get(f"/optimization/bundles/{bid}/zip")
858
+ if r.status_code != 200:
859
+ print(r.text, file=sys.stderr)
860
+ return 1
861
+ path = out_dir / f"optimization-bundle-{bid}.zip"
862
+ path.write_bytes(r.content)
863
+ if _wants_json(args):
864
+ _stdout_json({"ok": True, "path": str(path)})
865
+ else:
866
+ print(f"Wrote {path}")
867
+ return 0
868
+
869
+
870
+ def cmd_aws_test(args: argparse.Namespace) -> int:
871
+ client, _ = _client()
872
+ aid = args.account_id.strip()
873
+ r = client.post(f"/cloud/accounts/{aid}/test-operator-connection")
874
+ if r.status_code != 200:
875
+ print(r.text, file=sys.stderr)
876
+ return 1
877
+ print(_dump_json(r.json(), compact=_wants_json(args)))
878
+ return 0
879
+
880
+
881
+ def main(argv: list[str] | None = None) -> int:
882
+ parser = argparse.ArgumentParser(
883
+ prog="cloudcap",
884
+ description="CloudCap terminal client — call the HTTP API from your shell.",
885
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
886
+ epilog=(
887
+ "Environment variables (override ~/.cloudcap/config.json): "
888
+ "CLOUDCAP_URL (API base, include /v1), CLOUDCAP_TOKEN, CLOUDCAP_ORG_ID, CLOUDCAP_ENV_ID."
889
+ ),
890
+ )
891
+ parser.add_argument(
892
+ "--version",
893
+ action="version",
894
+ version=f"%(prog)s {__version__}",
895
+ )
896
+ parser.add_argument(
897
+ "--output",
898
+ choices=("text", "json"),
899
+ default="text",
900
+ dest="cli_output",
901
+ help="json: single JSON object on stdout for scripting/CI; errors still on stderr",
902
+ )
903
+ sub = parser.add_subparsers(dest="command", required=True)
904
+
905
+ p_login = sub.add_parser("login", help="Save API token (or start demo / employee session)")
906
+ p_login.add_argument("--url", default=None, help="API base including /v1")
907
+ p_login.add_argument(
908
+ "--demo",
909
+ action="store_true",
910
+ help="POST /v1/demo/session/start (requires ENABLE_DEMO_SCENARIOS=true and fixture orgs; see README)",
911
+ )
912
+ p_login.add_argument(
913
+ "--employee",
914
+ action="store_true",
915
+ help="POST /login/employee (org_slug + email; local/demo only; requires organization_members row)",
916
+ )
917
+ p_login.add_argument("--org-slug", default=None, help="With --employee: organization slug")
918
+ p_login.add_argument("--email", default=None, help="With --employee: member email")
919
+ p_login.add_argument("--token", default=None, help="Bearer token to store")
920
+ p_login.add_argument("--org-id", default=None, help="Organization UUID (with --token)")
921
+ p_login.set_defaults(func=cmd_login)
922
+
923
+ p_who = sub.add_parser("whoami", help="GET /auth/me")
924
+ p_who.set_defaults(func=cmd_whoami)
925
+
926
+ p_pipe = sub.add_parser("pipeline", help="Pipeline run probes")
927
+ pipe_sub = p_pipe.add_subparsers(dest="pipe_cmd", required=True)
928
+ p_pstat = pipe_sub.add_parser(
929
+ "status",
930
+ help="Execution readiness for a run + terraform-runtime preflight",
931
+ )
932
+ p_pstat.add_argument("run_id", nargs="?", metavar="RUN_ID", help="Pipeline run UUID")
933
+ p_pstat.add_argument("--run-id", dest="run_id_opt", default=None, help="Pipeline run UUID (alternative to positional)")
934
+ p_pstat.add_argument(
935
+ "--json",
936
+ action="store_true",
937
+ dest="emit_json",
938
+ help="Same as --output json (deprecated; prefer global --output json)",
939
+ )
940
+ p_pstat.set_defaults(func=cmd_pipeline_status)
941
+
942
+ p_cs = sub.add_parser("context", help="Show or set default org/environment")
943
+ cs_sub = p_cs.add_subparsers(dest="ctx_cmd", required=True)
944
+ p_show = cs_sub.add_parser("show")
945
+ p_show.set_defaults(func=cmd_context_show)
946
+ p_set = cs_sub.add_parser("set")
947
+ p_set.add_argument("--org-id", default=None)
948
+ p_set.add_argument("--environment-id", default=None)
949
+ p_set.set_defaults(func=cmd_context_set)
950
+
951
+ p_ev = sub.add_parser(
952
+ "evaluate",
953
+ help="Create run, upload plan artifact(s), run policy evaluation",
954
+ formatter_class=argparse.RawDescriptionHelpFormatter,
955
+ epilog=(
956
+ "Evaluate-only: pass plan.json (or a single .tfplan) alone for policy/cost evaluation.\n"
957
+ "Deploy-capable (backend terraform apply): pass plan.json, then --plan-binary pointing to the .tfplan "
958
+ "from the same terraform root, and --workspace-zip containing main.tf and .terraform.lock.hcl "
959
+ "from that root. Both --plan-binary and --workspace-zip are required together for deploy-capable runs.\n"
960
+ "See docs/pipeline-cli.md and docs/pipeline-deploy-approval.md."
961
+ ),
962
+ )
963
+ p_ev.add_argument("plan", help="Path to plan.json or .tfplan (first artifact)")
964
+ p_ev.add_argument("--region", default="us-east-1")
965
+ p_ev.add_argument(
966
+ "--environment-id",
967
+ default=None,
968
+ help="Default environment for the pipeline run and evaluation scope (optional)",
969
+ )
970
+ p_ev.add_argument(
971
+ "--workload-id",
972
+ default=None,
973
+ help="Workload for policy/predeploy scope; default workload is used if omitted",
974
+ )
975
+ p_ev.add_argument("--cloud-account-id", default=None)
976
+ p_ev.add_argument(
977
+ "--plan-binary",
978
+ default=None,
979
+ metavar="PATH",
980
+ help="Binary tfplan from terraform plan -out= (pair with --workspace-zip for deploy-capable)",
981
+ )
982
+ p_ev.add_argument(
983
+ "--workspace-zip",
984
+ default=None,
985
+ metavar="PATH",
986
+ help="Zip of terraform root: main.tf, .terraform.lock.hcl, and other .tf files (pair with --plan-binary)",
987
+ )
988
+ p_ev.set_defaults(func=cmd_evaluate)
989
+
990
+ p_dep = sub.add_parser("deploy", help="POST /pipeline/runs/{id}/apply (backend terraform)")
991
+ p_dep.add_argument(
992
+ "run_id_arg",
993
+ nargs="?",
994
+ metavar="RUN_ID",
995
+ help="Pipeline run UUID (optional if --run-id is set)",
996
+ )
997
+ p_dep.add_argument("--run-id", dest="run_id_opt", default=None, help="Pipeline run UUID")
998
+ p_dep.add_argument(
999
+ "--plan-binary",
1000
+ default=None,
1001
+ metavar="PATH",
1002
+ help="If the run has no binary plan yet, upload this .tfplan then apply",
1003
+ )
1004
+ p_dep.add_argument(
1005
+ "--workspace-zip",
1006
+ default=None,
1007
+ metavar="PATH",
1008
+ help="If the run has no workspace zip yet, upload this zip (main.tf + lock file) then apply",
1009
+ )
1010
+ p_dep.set_defaults(func=cmd_deploy)
1011
+
1012
+ p_opt = sub.add_parser("optimize", help="List, apply, and download pipeline optimizations")
1013
+ opt_sub = p_opt.add_subparsers(dest="opt_cmd", required=True)
1014
+
1015
+ p_opt_list = opt_sub.add_parser("list", help="List recommendations for a pipeline run")
1016
+ p_opt_list.add_argument("--run-id", required=True, help="Pipeline run UUID")
1017
+ p_opt_list.set_defaults(func=cmd_optimize_list)
1018
+
1019
+ p_opt_show = opt_sub.add_parser("show", help="Show one recommendation by index (1-based) or UUID")
1020
+ p_opt_show.add_argument("--run-id", required=True)
1021
+ p_opt_show.add_argument(
1022
+ "--recommendation",
1023
+ required=True,
1024
+ help="Index from list or optimization candidate UUID",
1025
+ )
1026
+ p_opt_show.set_defaults(func=cmd_optimize_show)
1027
+
1028
+ p_opt_apply = opt_sub.add_parser("apply", help="Apply recommendation (creates optimized run + bundle)")
1029
+ p_opt_apply.add_argument("--run-id", required=True)
1030
+ p_opt_apply.add_argument(
1031
+ "--recommendation",
1032
+ required=True,
1033
+ help="Index or UUID; comma-separated for merged apply (e.g. 1,3 or uuid,uuid)",
1034
+ )
1035
+ p_opt_apply.add_argument("--approved-by", default="cli", dest="approved_by")
1036
+ p_opt_apply.set_defaults(func=cmd_optimize_apply)
1037
+
1038
+ p_opt_dl = opt_sub.add_parser(
1039
+ "download",
1040
+ help="Download candidate export JSON (original vs revised delta)",
1041
+ )
1042
+ p_opt_dl.add_argument("--run-id", required=True)
1043
+ p_opt_dl.add_argument("--recommendation", required=True)
1044
+ p_opt_dl.add_argument("--output-dir", required=True)
1045
+ p_opt_dl.set_defaults(func=cmd_optimize_download)
1046
+
1047
+ p_opt_bdl = opt_sub.add_parser(
1048
+ "download-bundle",
1049
+ help="Download ZIP of optimization bundle artifacts",
1050
+ )
1051
+ p_opt_bdl.add_argument("--bundle-id", required=True)
1052
+ p_opt_bdl.add_argument("--output-dir", required=True)
1053
+ p_opt_bdl.set_defaults(func=cmd_optimize_download_bundle)
1054
+
1055
+ p_sav = sub.add_parser("savings", help="Savings events, samples, and reconciliation")
1056
+ sav_sub = p_sav.add_subparsers(dest="sav_cmd", required=True)
1057
+ p_sav_list = sav_sub.add_parser("list", help="GET /savings/events")
1058
+ p_sav_list.set_defaults(func=cmd_savings_list)
1059
+ p_sav_show = sav_sub.add_parser("show", help="GET /savings/events/{id}")
1060
+ p_sav_show.add_argument("--event-id", required=True)
1061
+ p_sav_show.set_defaults(func=cmd_savings_show)
1062
+ p_sav_br = sav_sub.add_parser("breakdown", help="Event detail + sample table JSON")
1063
+ p_sav_br.add_argument("--event-id", required=True)
1064
+ p_sav_br.set_defaults(func=cmd_savings_breakdown)
1065
+ p_sav_dl = sav_sub.add_parser("download", help="Bundle event + proofs + samples (stdout or --output-file)")
1066
+ p_sav_dl.add_argument("--event-id", required=True)
1067
+ p_sav_dl.add_argument(
1068
+ "--output-file",
1069
+ default="",
1070
+ dest="output_file",
1071
+ help="Write JSON to this file instead of stdout",
1072
+ )
1073
+ p_sav_dl.set_defaults(func=cmd_savings_download)
1074
+
1075
+ p_aws = sub.add_parser("aws", help="AWS helpers")
1076
+ aws_sub = p_aws.add_subparsers(dest="aws_cmd", required=True)
1077
+ p_test = aws_sub.add_parser("test-connection", help="Live STS for stored operator credentials")
1078
+ p_test.add_argument("--account-id", required=True)
1079
+ p_test.set_defaults(func=cmd_aws_test)
1080
+
1081
+ args = parser.parse_args(argv)
1082
+ fn = getattr(args, "func", None)
1083
+ if fn is None:
1084
+ parser.print_help()
1085
+ return 1
1086
+ return fn(args)
1087
+
1088
+
1089
+ if __name__ == "__main__":
1090
+ raise SystemExit(main())
@@ -0,0 +1,66 @@
1
+ Metadata-Version: 2.4
2
+ Name: cloudcap-cli
3
+ Version: 0.1.1
4
+ Summary: CloudCap CLI — terminal client for the CloudCap HTTP API
5
+ Author: CloudCap
6
+ License: Proprietary
7
+ Keywords: cloudcap,cli,terraform,cloud
8
+ Classifier: Development Status :: 4 - Beta
9
+ Classifier: Environment :: Console
10
+ Classifier: Intended Audience :: Developers
11
+ Classifier: Programming Language :: Python :: 3
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Classifier: Programming Language :: Python :: 3.13
15
+ Classifier: Topic :: Utilities
16
+ Requires-Python: >=3.11
17
+ Description-Content-Type: text/markdown
18
+ Requires-Dist: httpx>=0.27.0
19
+ Provides-Extra: dev
20
+ Requires-Dist: pytest>=8.2.0; extra == "dev"
21
+ Requires-Dist: build>=1.2.0; extra == "dev"
22
+
23
+ # cloudcap (PyPI)
24
+
25
+ Terminal client for the CloudCap HTTP API: evaluate Terraform plans, deploy, optimize, and manage context from your shell.
26
+
27
+ ## Install
28
+
29
+ ```bash
30
+ pipx install cloudcap-cli
31
+ # or
32
+ pip install cloudcap-cli
33
+ ```
34
+
35
+ Requires **Python 3.11+**. Only dependency: **httpx**.
36
+
37
+ ## Quick start
38
+
39
+ ```bash
40
+ export CLOUDCAP_URL="https://your-api.example.com/v1"
41
+ cloudcap login --token "YOUR_BEARER_OR_PAT"
42
+ cloudcap whoami
43
+ cloudcap evaluate plan.json --region us-east-1
44
+ ```
45
+
46
+ Machine-readable output:
47
+
48
+ ```bash
49
+ cloudcap --output json whoami
50
+ cloudcap --output json pipeline status RUN_ID
51
+ ```
52
+
53
+ Full documentation: [docs/cli-install.md](../docs/cli-install.md) in the CloudCap repo.
54
+
55
+ ## Configuration
56
+
57
+ - File: `~/.cloudcap/config.json` (created by `cloudcap login`)
58
+ - Env overrides: `CLOUDCAP_URL`, `CLOUDCAP_TOKEN`, `CLOUDCAP_ORG_ID`, `CLOUDCAP_ENV_ID`
59
+
60
+ ## Docker (CI)
61
+
62
+ See `Dockerfile` in this directory; build with a wheel or pin `pip install cloudcap-cli==VERSION`.
63
+
64
+ ## Server package
65
+
66
+ The API server is published separately as **`cloudcap-backend`**. Install the CLI with `pip install cloudcap-cli` without pulling server dependencies. The command on PATH remains **`cloudcap`**.
@@ -0,0 +1,8 @@
1
+ cloudcap/__init__.py,sha256=SxV54EJhZYEQPgu-gpKOsow8Aec92_ZjHvFjSMGPygU,66
2
+ cloudcap/__main__.py,sha256=od4Kh2VAfhyaEMP2JyBYk39XzvP_oSshuOyErv29R1k,87
3
+ cloudcap/cli.py,sha256=tyzkBftsKaHHJU8BMmO5CT5mHXfjlxMavbbdGwbNMwE,41179
4
+ cloudcap_cli-0.1.1.dist-info/METADATA,sha256=ogYXTO3InqpzgIRfbVwmxJSZYmCewjygDi6xZpnLzcQ,1933
5
+ cloudcap_cli-0.1.1.dist-info/WHEEL,sha256=aeYiig01lYGDzBgS8HxWXOg3uV61G9ijOsup-k9o1sk,91
6
+ cloudcap_cli-0.1.1.dist-info/entry_points.txt,sha256=Izqnma61C2Vgej0KX_yxMSnqM-o2nyA6aQ0DEcUCM_8,47
7
+ cloudcap_cli-0.1.1.dist-info/top_level.txt,sha256=yFx9-Mtm09ABXokvV8GQHwMfXWAw46FUQAYYqx-elC4,9
8
+ cloudcap_cli-0.1.1.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (82.0.1)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ cloudcap = cloudcap.cli:main
@@ -0,0 +1 @@
1
+ cloudcap