npmctl 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npmctl/__init__.py +3 -0
- npmctl/__main__.py +5 -0
- npmctl/adoption.py +5 -0
- npmctl/apply.py +238 -0
- npmctl/cli.py +407 -0
- npmctl/client/__init__.py +5 -0
- npmctl/client/access_lists.py +10 -0
- npmctl/client/auth.py +5 -0
- npmctl/client/base.py +275 -0
- npmctl/client/certificates.py +10 -0
- npmctl/client/contracts.py +30 -0
- npmctl/client/proxy_hosts.py +10 -0
- npmctl/config.py +40 -0
- npmctl/diagnostics.py +41 -0
- npmctl/errors.py +31 -0
- npmctl/loader.py +281 -0
- npmctl/logging.py +13 -0
- npmctl/metadata.py +78 -0
- npmctl/migrations/__init__.py +5 -0
- npmctl/migrations/base.py +18 -0
- npmctl/migrations/registry.py +86 -0
- npmctl/migrations/v1.py +7 -0
- npmctl/models.py +750 -0
- npmctl/operational.py +245 -0
- npmctl/output.py +50 -0
- npmctl/planner.py +459 -0
- npmctl/plugins.py +95 -0
- npmctl/py.typed +0 -0
- npmctl/schema.py +170 -0
- npmctl/validation.py +5 -0
- npmctl-0.3.1.dist-info/METADATA +208 -0
- npmctl-0.3.1.dist-info/RECORD +34 -0
- npmctl-0.3.1.dist-info/WHEEL +4 -0
- npmctl-0.3.1.dist-info/entry_points.txt +3 -0
npmctl/__init__.py
ADDED
npmctl/__main__.py
ADDED
npmctl/adoption.py
ADDED
npmctl/apply.py
ADDED
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
"""Apply owner-scoped plans to NPM."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from npmctl.client import NpmClient
|
|
9
|
+
from npmctl.errors import ApiError, ConflictError, ValidationError
|
|
10
|
+
from npmctl.metadata import merge_managed_meta
|
|
11
|
+
from npmctl.models import (
|
|
12
|
+
DesiredAccessList,
|
|
13
|
+
DesiredCertificate,
|
|
14
|
+
DesiredGenericResource,
|
|
15
|
+
DesiredProxyHost,
|
|
16
|
+
ExistingResource,
|
|
17
|
+
PlanAction,
|
|
18
|
+
ResourceKind,
|
|
19
|
+
)
|
|
20
|
+
from npmctl.planner import Plan, PlanOperation
|
|
21
|
+
from npmctl.schema import Capabilities
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass(slots=True)
|
|
25
|
+
class ApplyResult:
|
|
26
|
+
"""Result of applying a plan."""
|
|
27
|
+
|
|
28
|
+
applied: bool
|
|
29
|
+
mutations: list[dict[str, Any]] = field(default_factory=list)
|
|
30
|
+
|
|
31
|
+
def to_dict(self) -> dict[str, Any]:
|
|
32
|
+
return {"applied": self.applied, "mutations": list(self.mutations)}
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class ApplyEngine:
|
|
36
|
+
"""Executes a validated plan in dependency order."""
|
|
37
|
+
|
|
38
|
+
def __init__(self, *, client: NpmClient, capabilities: Capabilities) -> None:
|
|
39
|
+
self.client = client
|
|
40
|
+
self.capabilities = capabilities
|
|
41
|
+
self.created_by_resource_id: dict[str, ExistingResource] = {}
|
|
42
|
+
|
|
43
|
+
def apply(self, plan: Plan) -> ApplyResult:
|
|
44
|
+
"""Apply the plan. Conflicts prevent all mutations."""
|
|
45
|
+
|
|
46
|
+
if plan.conflicts:
|
|
47
|
+
raise ConflictError("refusing to apply plan with conflicts")
|
|
48
|
+
for operation in plan.operations:
|
|
49
|
+
if operation.desired is not None and operation.existing is not None:
|
|
50
|
+
self.created_by_resource_id.setdefault(operation.desired.identity.resource_id, operation.existing)
|
|
51
|
+
result = ApplyResult(applied=True)
|
|
52
|
+
for operation in _ordered_operations(plan.operations):
|
|
53
|
+
if operation.action == PlanAction.NOOP:
|
|
54
|
+
continue
|
|
55
|
+
mutation = self._apply_operation(operation)
|
|
56
|
+
result.mutations.append(mutation)
|
|
57
|
+
return result
|
|
58
|
+
|
|
59
|
+
def _apply_operation(self, operation: PlanOperation) -> dict[str, Any]:
|
|
60
|
+
if operation.action == PlanAction.CREATE:
|
|
61
|
+
return self._create(operation)
|
|
62
|
+
if operation.action == PlanAction.UPDATE:
|
|
63
|
+
return self._update(operation)
|
|
64
|
+
if operation.action == PlanAction.ADOPT:
|
|
65
|
+
return self._adopt(operation)
|
|
66
|
+
if operation.action == PlanAction.DELETE:
|
|
67
|
+
return self._delete(operation)
|
|
68
|
+
raise ValidationError(f"unsupported apply operation {operation.action}")
|
|
69
|
+
|
|
70
|
+
def _create(self, operation: PlanOperation) -> dict[str, Any]:
|
|
71
|
+
desired = _require_desired(operation)
|
|
72
|
+
payload = self._payload_for(desired)
|
|
73
|
+
created = self.client.create_resource(desired.kind, payload)
|
|
74
|
+
self.created_by_resource_id[desired.identity.resource_id] = created
|
|
75
|
+
return {
|
|
76
|
+
"action": "create",
|
|
77
|
+
"kind": desired.kind.value,
|
|
78
|
+
"resource_id": desired.identity.resource_id,
|
|
79
|
+
"id": created.id,
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
def _update(self, operation: PlanOperation) -> dict[str, Any]:
|
|
83
|
+
desired = _require_desired(operation)
|
|
84
|
+
existing = _require_existing(operation)
|
|
85
|
+
payload = self._merge_existing_with_desired(existing, desired)
|
|
86
|
+
cap = self.capabilities.for_kind(desired.kind)
|
|
87
|
+
updated = self.client.update_resource(desired.kind, existing.id, payload, method=cap.update_method or "put")
|
|
88
|
+
return {
|
|
89
|
+
"action": "update",
|
|
90
|
+
"kind": desired.kind.value,
|
|
91
|
+
"resource_id": desired.identity.resource_id,
|
|
92
|
+
"id": updated.id,
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
def _adopt(self, operation: PlanOperation) -> dict[str, Any]:
|
|
96
|
+
desired = _require_desired(operation)
|
|
97
|
+
existing = _require_existing(operation)
|
|
98
|
+
payload = _updateable_existing_payload(existing)
|
|
99
|
+
payload["meta"] = merge_managed_meta(payload.get("meta"), desired.meta)
|
|
100
|
+
cap = self.capabilities.for_kind(desired.kind)
|
|
101
|
+
updated = self.client.update_resource(desired.kind, existing.id, payload, method=cap.update_method or "put")
|
|
102
|
+
return {
|
|
103
|
+
"action": "adopt",
|
|
104
|
+
"kind": desired.kind.value,
|
|
105
|
+
"resource_id": desired.identity.resource_id,
|
|
106
|
+
"id": updated.id,
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
def _delete(self, operation: PlanOperation) -> dict[str, Any]:
|
|
110
|
+
existing = _require_existing(operation)
|
|
111
|
+
deleted = self.client.delete_resource(existing.kind, existing.id)
|
|
112
|
+
if not deleted:
|
|
113
|
+
raise ApiError(f"delete failed for {existing.kind.value} id={existing.id}")
|
|
114
|
+
resource_id = existing.identity.resource_id if existing.identity else None
|
|
115
|
+
return {"action": "delete", "kind": existing.kind.value, "resource_id": resource_id, "id": existing.id}
|
|
116
|
+
|
|
117
|
+
def _merge_existing_with_desired(
|
|
118
|
+
self,
|
|
119
|
+
existing: ExistingResource,
|
|
120
|
+
desired: DesiredProxyHost | DesiredCertificate | DesiredAccessList | DesiredGenericResource,
|
|
121
|
+
) -> dict[str, Any]:
|
|
122
|
+
payload = self._payload_for(desired)
|
|
123
|
+
payload["meta"] = merge_managed_meta(existing.raw.get("meta"), desired.meta)
|
|
124
|
+
return payload
|
|
125
|
+
|
|
126
|
+
def _payload_for(
|
|
127
|
+
self, desired: DesiredProxyHost | DesiredCertificate | DesiredAccessList | DesiredGenericResource
|
|
128
|
+
) -> dict[str, Any]:
|
|
129
|
+
if isinstance(desired, DesiredProxyHost):
|
|
130
|
+
certificate_id = self._resolve_reference(desired.certificate_ref, ResourceKind.CERTIFICATE)
|
|
131
|
+
access_list_id = self._resolve_reference(desired.access_list_ref, ResourceKind.ACCESS_LIST)
|
|
132
|
+
return desired.to_payload(certificate_id=certificate_id, access_list_id=access_list_id)
|
|
133
|
+
return desired.to_payload()
|
|
134
|
+
|
|
135
|
+
def _resolve_reference(self, ref: str | None, kind: ResourceKind) -> int | None:
|
|
136
|
+
if ref is None:
|
|
137
|
+
return None
|
|
138
|
+
created = self.created_by_resource_id.get(ref)
|
|
139
|
+
if created is not None:
|
|
140
|
+
if created.kind != kind:
|
|
141
|
+
raise ValidationError(f"reference {ref!r} resolved to {created.kind.value}, expected {kind.value}")
|
|
142
|
+
return created.id
|
|
143
|
+
raise ValidationError(f"unresolved {kind.value} reference: {ref}")
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def _ordered_operations(operations: tuple[PlanOperation, ...]) -> list[PlanOperation]:
|
|
147
|
+
creates_updates_adopts = [
|
|
148
|
+
op for op in operations if op.action in {PlanAction.CREATE, PlanAction.UPDATE, PlanAction.ADOPT}
|
|
149
|
+
]
|
|
150
|
+
deletes = [op for op in operations if op.action == PlanAction.DELETE]
|
|
151
|
+
order = {
|
|
152
|
+
ResourceKind.CERTIFICATE: 0,
|
|
153
|
+
ResourceKind.ACCESS_LIST: 1,
|
|
154
|
+
ResourceKind.REDIRECTION_HOST: 2,
|
|
155
|
+
ResourceKind.DEAD_HOST: 2,
|
|
156
|
+
ResourceKind.STREAM: 2,
|
|
157
|
+
ResourceKind.USER: 2,
|
|
158
|
+
ResourceKind.SETTING: 2,
|
|
159
|
+
ResourceKind.PROXY_HOST: 3,
|
|
160
|
+
}
|
|
161
|
+
delete_order = {
|
|
162
|
+
ResourceKind.PROXY_HOST: 0,
|
|
163
|
+
ResourceKind.REDIRECTION_HOST: 1,
|
|
164
|
+
ResourceKind.DEAD_HOST: 1,
|
|
165
|
+
ResourceKind.STREAM: 1,
|
|
166
|
+
ResourceKind.USER: 1,
|
|
167
|
+
ResourceKind.SETTING: 1,
|
|
168
|
+
ResourceKind.ACCESS_LIST: 2,
|
|
169
|
+
ResourceKind.CERTIFICATE: 3,
|
|
170
|
+
}
|
|
171
|
+
return sorted(creates_updates_adopts, key=lambda op: order[op.kind]) + sorted(
|
|
172
|
+
deletes, key=lambda op: delete_order[op.kind]
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def _require_desired(
|
|
177
|
+
operation: PlanOperation,
|
|
178
|
+
) -> DesiredProxyHost | DesiredCertificate | DesiredAccessList | DesiredGenericResource:
|
|
179
|
+
if operation.desired is None:
|
|
180
|
+
raise ValidationError(f"operation {operation.action} requires desired resource")
|
|
181
|
+
return operation.desired
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def _require_existing(operation: PlanOperation) -> ExistingResource:
|
|
185
|
+
if operation.existing is None:
|
|
186
|
+
raise ValidationError(f"operation {operation.action} requires existing resource")
|
|
187
|
+
return operation.existing
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def _updateable_existing_payload(existing: ExistingResource) -> dict[str, Any]:
|
|
191
|
+
fields = {
|
|
192
|
+
ResourceKind.PROXY_HOST: (
|
|
193
|
+
"domain_names",
|
|
194
|
+
"forward_scheme",
|
|
195
|
+
"forward_host",
|
|
196
|
+
"forward_port",
|
|
197
|
+
"certificate_id",
|
|
198
|
+
"ssl_forced",
|
|
199
|
+
"hsts_enabled",
|
|
200
|
+
"hsts_subdomains",
|
|
201
|
+
"http2_support",
|
|
202
|
+
"block_exploits",
|
|
203
|
+
"caching_enabled",
|
|
204
|
+
"allow_websocket_upgrade",
|
|
205
|
+
"access_list_id",
|
|
206
|
+
"advanced_config",
|
|
207
|
+
"enabled",
|
|
208
|
+
"locations",
|
|
209
|
+
"meta",
|
|
210
|
+
),
|
|
211
|
+
ResourceKind.ACCESS_LIST: ("name", "satisfy_any", "pass_auth", "items", "clients", "meta"),
|
|
212
|
+
ResourceKind.CERTIFICATE: ("provider", "nice_name", "domain_names", "meta"),
|
|
213
|
+
ResourceKind.REDIRECTION_HOST: ("domain_names", "forward_domain_name", "meta"),
|
|
214
|
+
ResourceKind.DEAD_HOST: ("domain_names", "meta"),
|
|
215
|
+
ResourceKind.STREAM: ("incoming_port", "forward_host", "forward_port", "protocol", "meta"),
|
|
216
|
+
ResourceKind.USER: ("name", "email", "roles", "is_disabled", "meta"),
|
|
217
|
+
ResourceKind.SETTING: ("name", "value", "meta"),
|
|
218
|
+
}[existing.kind]
|
|
219
|
+
payload = {field: existing.raw[field] for field in fields if field in existing.raw}
|
|
220
|
+
if existing.kind == ResourceKind.PROXY_HOST:
|
|
221
|
+
defaults = {
|
|
222
|
+
"access_list_id": 0,
|
|
223
|
+
"certificate_id": 0,
|
|
224
|
+
"ssl_forced": 0,
|
|
225
|
+
"hsts_enabled": 0,
|
|
226
|
+
"hsts_subdomains": 0,
|
|
227
|
+
"http2_support": 0,
|
|
228
|
+
"block_exploits": 0,
|
|
229
|
+
"caching_enabled": 0,
|
|
230
|
+
"allow_websocket_upgrade": 0,
|
|
231
|
+
"advanced_config": "",
|
|
232
|
+
"enabled": 1,
|
|
233
|
+
"locations": [],
|
|
234
|
+
}
|
|
235
|
+
for field, default in defaults.items():
|
|
236
|
+
if payload.get(field) is None:
|
|
237
|
+
payload[field] = default
|
|
238
|
+
return payload
|
npmctl/cli.py
ADDED
|
@@ -0,0 +1,407 @@
|
|
|
1
|
+
"""Command-line interface for npmctl."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, Sequence
|
|
10
|
+
|
|
11
|
+
from npmctl import __version__
|
|
12
|
+
from npmctl.apply import ApplyEngine
|
|
13
|
+
from npmctl.client import NpmClient
|
|
14
|
+
from npmctl.config import apply_config, load_config
|
|
15
|
+
from npmctl.diagnostics import doctor_report, environment_report
|
|
16
|
+
from npmctl.errors import ApiError, CapabilityError, ConflictError, MigrationError, NpmctlError, ValidationError
|
|
17
|
+
from npmctl.loader import load_desired_state
|
|
18
|
+
from npmctl.migrations import migrate_path
|
|
19
|
+
from npmctl.operational import (
|
|
20
|
+
compliance_artifacts,
|
|
21
|
+
drift_report,
|
|
22
|
+
rollback_plan,
|
|
23
|
+
transaction_report,
|
|
24
|
+
validate_compliance_gate,
|
|
25
|
+
validate_plan_output,
|
|
26
|
+
write_json,
|
|
27
|
+
write_state_backup,
|
|
28
|
+
)
|
|
29
|
+
from npmctl.output import format_plan_text, write_error, write_output
|
|
30
|
+
from npmctl.plugins import PluginRegistry
|
|
31
|
+
from npmctl.planner import PlannerOptions, compute_plan
|
|
32
|
+
from npmctl.schema import Capabilities, load_openapi_schema
|
|
33
|
+
|
|
34
|
+
EXIT_OK = 0
|
|
35
|
+
EXIT_CONFLICT = 1
|
|
36
|
+
EXIT_USAGE_OR_VALIDATION = 2
|
|
37
|
+
EXIT_API = 3
|
|
38
|
+
EXIT_CAPABILITY = 4
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def build_parser() -> argparse.ArgumentParser:
|
|
42
|
+
"""Build the CLI parser."""
|
|
43
|
+
|
|
44
|
+
parser = argparse.ArgumentParser(
|
|
45
|
+
prog="npmctl",
|
|
46
|
+
description="Owner-scoped plan/apply/adopt controller for Nginx Proxy Manager resources.",
|
|
47
|
+
)
|
|
48
|
+
parser.add_argument("--version", action="version", version=f"npmctl {__version__}")
|
|
49
|
+
parser.add_argument("--config", help="TOML config file with [npmctl] settings")
|
|
50
|
+
parser.add_argument("--base-url", default=os.getenv("NPM_BASE_URL"), help="NPM API URL, e.g. http://host:81/api")
|
|
51
|
+
parser.add_argument("--identity", default=os.getenv("NPM_IDENTITY"), help="NPM login identity")
|
|
52
|
+
parser.add_argument("--secret", default=os.getenv("NPM_SECRET"), help="NPM login secret")
|
|
53
|
+
parser.add_argument(
|
|
54
|
+
"--timeout",
|
|
55
|
+
default=float(os.getenv("NPM_TIMEOUT_S")) if os.getenv("NPM_TIMEOUT_S") else None,
|
|
56
|
+
type=float,
|
|
57
|
+
help="HTTP timeout seconds",
|
|
58
|
+
)
|
|
59
|
+
parser.add_argument("--output", choices=("text", "json"), default="text", help="Output format")
|
|
60
|
+
|
|
61
|
+
sub = parser.add_subparsers(dest="command", required=True)
|
|
62
|
+
|
|
63
|
+
validate = sub.add_parser("validate", help="Validate desired state")
|
|
64
|
+
validate.add_argument("desired_state")
|
|
65
|
+
|
|
66
|
+
migrate = sub.add_parser("migrate", help="Migrate desired-state schema")
|
|
67
|
+
migrate.add_argument("path")
|
|
68
|
+
migrate.add_argument("--write", action="store_true", help="Write migrated files")
|
|
69
|
+
migrate.add_argument("--check", action="store_true", help="Fail if migration is needed")
|
|
70
|
+
|
|
71
|
+
health = sub.add_parser("health", help="Call NPM API health endpoint")
|
|
72
|
+
health.set_defaults(needs_api=True)
|
|
73
|
+
|
|
74
|
+
doctor = sub.add_parser("doctor", help="Diagnose config, API reachability, and capabilities")
|
|
75
|
+
doctor.set_defaults(needs_api=False)
|
|
76
|
+
|
|
77
|
+
env = sub.add_parser("env", help="Show redacted npmctl environment diagnostics")
|
|
78
|
+
env.set_defaults(needs_api=False)
|
|
79
|
+
|
|
80
|
+
version = sub.add_parser("version", help="Show machine-readable version metadata")
|
|
81
|
+
version.add_argument("--json", action="store_true", help="Emit JSON version metadata")
|
|
82
|
+
|
|
83
|
+
completion = sub.add_parser("completion", help="Generate shell completion script")
|
|
84
|
+
completion.add_argument("shell", choices=("bash", "powershell", "zsh"))
|
|
85
|
+
|
|
86
|
+
schema = sub.add_parser("schema", help="OpenAPI schema commands")
|
|
87
|
+
schema_sub = schema.add_subparsers(dest="schema_command", required=True)
|
|
88
|
+
fetch = schema_sub.add_parser("fetch", help="Fetch /schema from NPM")
|
|
89
|
+
fetch.add_argument("--write", help="Write schema JSON to path")
|
|
90
|
+
fetch.set_defaults(needs_api=True)
|
|
91
|
+
caps = schema_sub.add_parser("capabilities", help="Show detected endpoint capabilities")
|
|
92
|
+
caps.add_argument("--schema", help="Schema JSON path; fetches from NPM when omitted")
|
|
93
|
+
caps.set_defaults(needs_api_optional_schema=True)
|
|
94
|
+
check = schema_sub.add_parser("check", help="Validate required endpoint capabilities")
|
|
95
|
+
check.add_argument("--schema", help="Schema JSON path; fetches from NPM when omitted")
|
|
96
|
+
check.set_defaults(needs_api_optional_schema=True)
|
|
97
|
+
|
|
98
|
+
plan = sub.add_parser("plan", help="Compute owner-scoped CRUD plan")
|
|
99
|
+
_add_reconcile_args(plan)
|
|
100
|
+
plan.add_argument("--validate-output", action="store_true", help="Validate plan output against npmctl schema")
|
|
101
|
+
plan.set_defaults(needs_api=True)
|
|
102
|
+
|
|
103
|
+
apply = sub.add_parser("apply", help="Apply a clean owner-scoped CRUD plan")
|
|
104
|
+
_add_reconcile_args(apply)
|
|
105
|
+
apply.add_argument("--dry-run", action="store_true", help="Plan but do not mutate NPM")
|
|
106
|
+
apply.add_argument("--backup-dir", help="Write remote state backup before apply")
|
|
107
|
+
apply.add_argument("--report", help="Write structured apply transaction report")
|
|
108
|
+
apply.add_argument("--rollback-plan", help="Write best-effort rollback plan")
|
|
109
|
+
apply.add_argument("--audit-log", dest="audit_log_path", help="Write local audit log JSON for this apply")
|
|
110
|
+
apply.add_argument("--validate-output", action="store_true", help="Validate plan output against npmctl schema")
|
|
111
|
+
apply.set_defaults(needs_api=True)
|
|
112
|
+
|
|
113
|
+
adopt = sub.add_parser("adopt", help="Adopt unmanaged matching resources by writing metadata")
|
|
114
|
+
_add_reconcile_args(adopt)
|
|
115
|
+
adopt.add_argument("--allow-field-drift", action="store_true", help="Allow adopting resources whose fields differ")
|
|
116
|
+
adopt.add_argument("--force", action="store_true", help="Alias for --allow-field-drift with explicit intent")
|
|
117
|
+
adopt.add_argument("--validate-output", action="store_true", help="Validate plan output against npmctl schema")
|
|
118
|
+
adopt.set_defaults(needs_api=True, adopt=True)
|
|
119
|
+
|
|
120
|
+
drift = sub.add_parser("drift", help="Report remote drift without applying mutations")
|
|
121
|
+
_add_reconcile_args(drift)
|
|
122
|
+
drift.set_defaults(needs_api=True)
|
|
123
|
+
|
|
124
|
+
audit = sub.add_parser("audit-log", help="Read NPM audit log entries")
|
|
125
|
+
audit.add_argument("--since", help="Optional since filter passed to NPM")
|
|
126
|
+
audit.set_defaults(needs_api=True)
|
|
127
|
+
|
|
128
|
+
compliance = sub.add_parser("compliance", help="Compliance artifact commands")
|
|
129
|
+
compliance_sub = compliance.add_subparsers(dest="compliance_command", required=True)
|
|
130
|
+
artifacts = compliance_sub.add_parser(
|
|
131
|
+
"artifacts", help="Generate SBOM, provenance, scan, and release-gate artifacts"
|
|
132
|
+
)
|
|
133
|
+
artifacts.add_argument("--output-dir", required=True)
|
|
134
|
+
artifacts.add_argument("--source-dir", default=".")
|
|
135
|
+
artifacts.add_argument("--dist-dir")
|
|
136
|
+
gate = compliance_sub.add_parser("gate", help="Validate generated compliance artifacts")
|
|
137
|
+
gate.add_argument("--artifact-dir", required=True)
|
|
138
|
+
|
|
139
|
+
plugins = sub.add_parser("plugins", help="Inspect runtime plugin discovery")
|
|
140
|
+
plugins_sub = plugins.add_subparsers(dest="plugins_command", required=True)
|
|
141
|
+
plugins_sub.add_parser("list", help="List discovered plugin providers")
|
|
142
|
+
|
|
143
|
+
return parser
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def _add_reconcile_args(parser: argparse.ArgumentParser) -> None:
|
|
147
|
+
parser.add_argument("desired_state")
|
|
148
|
+
parser.add_argument("--owner", help="Limit operation to one owner scope")
|
|
149
|
+
parser.add_argument("--no-updates", action="store_true", help="Conflict on owned drift instead of updating")
|
|
150
|
+
parser.add_argument("--prune-owned", action="store_true", help="Delete owned resources absent from desired state")
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def main(argv: Sequence[str] | None = None) -> int:
|
|
154
|
+
parser = build_parser()
|
|
155
|
+
args = parser.parse_args(argv)
|
|
156
|
+
try:
|
|
157
|
+
apply_config(args, load_config(getattr(args, "config", None)))
|
|
158
|
+
return _dispatch(args, parser)
|
|
159
|
+
except ValidationError as exc:
|
|
160
|
+
write_error(args.output, "validation_error", str(exc))
|
|
161
|
+
return EXIT_USAGE_OR_VALIDATION
|
|
162
|
+
except MigrationError as exc:
|
|
163
|
+
write_error(args.output, "migration_error", str(exc))
|
|
164
|
+
return EXIT_USAGE_OR_VALIDATION
|
|
165
|
+
except ConflictError as exc:
|
|
166
|
+
write_error(args.output, "conflict_error", str(exc))
|
|
167
|
+
return EXIT_CONFLICT
|
|
168
|
+
except CapabilityError as exc:
|
|
169
|
+
write_error(args.output, "capability_error", _redact_cli_message(str(exc), args))
|
|
170
|
+
return EXIT_CAPABILITY
|
|
171
|
+
except ApiError as exc:
|
|
172
|
+
write_error(args.output, "api_error", _redact_cli_message(str(exc), args))
|
|
173
|
+
return EXIT_API
|
|
174
|
+
except NpmctlError as exc:
|
|
175
|
+
write_error(args.output, "npmctl_error", str(exc))
|
|
176
|
+
return EXIT_USAGE_OR_VALIDATION
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def _dispatch(args: argparse.Namespace, parser: argparse.ArgumentParser) -> int:
|
|
180
|
+
if args.command == "validate":
|
|
181
|
+
desired = load_desired_state(args.desired_state)
|
|
182
|
+
payload = _desired_summary(desired)
|
|
183
|
+
write_output(args.output, payload, _format_validate_text(payload))
|
|
184
|
+
return EXIT_OK
|
|
185
|
+
|
|
186
|
+
if args.command == "env":
|
|
187
|
+
payload = {"ok": True, "environment": environment_report(dict(os.environ))}
|
|
188
|
+
write_output(args.output, payload, json.dumps(payload, indent=2, sort_keys=True))
|
|
189
|
+
return EXIT_OK
|
|
190
|
+
|
|
191
|
+
if args.command == "version":
|
|
192
|
+
payload = {"package": "npmctl", "version": __version__, "schema_version": 1, "api_profile": "npm-2.10.4"}
|
|
193
|
+
text = json.dumps(payload, indent=2, sort_keys=True) if args.json or args.output == "json" else __version__
|
|
194
|
+
write_output("json" if args.json else args.output, payload, text)
|
|
195
|
+
return EXIT_OK
|
|
196
|
+
|
|
197
|
+
if args.command == "completion":
|
|
198
|
+
payload = {"ok": True, "shell": args.shell}
|
|
199
|
+
write_output(args.output, payload, _completion_script(args.shell))
|
|
200
|
+
return EXIT_OK
|
|
201
|
+
|
|
202
|
+
if args.command == "compliance":
|
|
203
|
+
if args.compliance_command == "gate":
|
|
204
|
+
payload = validate_compliance_gate(args.artifact_dir)
|
|
205
|
+
write_output(args.output, payload, json.dumps(payload, indent=2, sort_keys=True))
|
|
206
|
+
return EXIT_OK if payload["ok"] else EXIT_USAGE_OR_VALIDATION
|
|
207
|
+
paths = compliance_artifacts(
|
|
208
|
+
args.output_dir,
|
|
209
|
+
package_name="npmctl",
|
|
210
|
+
version=__version__,
|
|
211
|
+
source_dir=args.source_dir,
|
|
212
|
+
dist_dir=args.dist_dir,
|
|
213
|
+
)
|
|
214
|
+
payload = {"ok": True, "artifacts": [str(path) for path in paths]}
|
|
215
|
+
write_output(args.output, payload, json.dumps(payload, indent=2, sort_keys=True))
|
|
216
|
+
return EXIT_OK
|
|
217
|
+
|
|
218
|
+
if args.command == "plugins":
|
|
219
|
+
registry = PluginRegistry.discover()
|
|
220
|
+
payload = {"ok": True, "plugins": registry.to_dict()}
|
|
221
|
+
write_output(args.output, payload, json.dumps(payload, indent=2, sort_keys=True))
|
|
222
|
+
return EXIT_OK
|
|
223
|
+
|
|
224
|
+
if args.command == "migrate":
|
|
225
|
+
if args.write and args.check:
|
|
226
|
+
raise ValidationError("migrate --write and --check cannot be combined")
|
|
227
|
+
results = migrate_path(args.path, write=args.write)
|
|
228
|
+
changed = [result for result in results if result.changed]
|
|
229
|
+
payload = {
|
|
230
|
+
"ok": not bool(changed and args.check),
|
|
231
|
+
"changed": len(changed),
|
|
232
|
+
"files": [str(result.path) for result in changed],
|
|
233
|
+
"written": bool(args.write),
|
|
234
|
+
}
|
|
235
|
+
write_output(args.output, payload, f"migrations needed: {len(changed)}\nwritten: {str(args.write).lower()}")
|
|
236
|
+
return EXIT_USAGE_OR_VALIDATION if changed and args.check else EXIT_OK
|
|
237
|
+
|
|
238
|
+
client: NpmClient | None = None
|
|
239
|
+
if getattr(args, "needs_api", False) or (
|
|
240
|
+
getattr(args, "needs_api_optional_schema", False) and not getattr(args, "schema", None)
|
|
241
|
+
):
|
|
242
|
+
_require_api_args(args, parser)
|
|
243
|
+
client = NpmClient(
|
|
244
|
+
base_url=args.base_url, identity=args.identity, secret=args.secret, timeout_s=args.timeout or 15.0
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
if args.command == "doctor":
|
|
248
|
+
health = None
|
|
249
|
+
capabilities = None
|
|
250
|
+
if args.base_url and args.identity and args.secret:
|
|
251
|
+
client = NpmClient(
|
|
252
|
+
base_url=args.base_url,
|
|
253
|
+
identity=args.identity,
|
|
254
|
+
secret=args.secret,
|
|
255
|
+
timeout_s=args.timeout or 15.0,
|
|
256
|
+
)
|
|
257
|
+
health = client.health()
|
|
258
|
+
capabilities = client.capabilities().to_dict()
|
|
259
|
+
payload = doctor_report(args=args, health=health, capabilities=capabilities)
|
|
260
|
+
write_output(args.output, payload, json.dumps(payload, indent=2, sort_keys=True))
|
|
261
|
+
return EXIT_OK if payload["ok"] else EXIT_USAGE_OR_VALIDATION
|
|
262
|
+
|
|
263
|
+
if args.command == "health":
|
|
264
|
+
assert client is not None
|
|
265
|
+
payload = client.health()
|
|
266
|
+
write_output(args.output, payload, json.dumps(payload, indent=2, sort_keys=True))
|
|
267
|
+
return EXIT_OK
|
|
268
|
+
|
|
269
|
+
if args.command == "schema":
|
|
270
|
+
return _schema_command(args, client)
|
|
271
|
+
|
|
272
|
+
if args.command == "audit-log":
|
|
273
|
+
assert client is not None
|
|
274
|
+
payload = {"ok": True, "entries": client.audit_log(since=args.since)}
|
|
275
|
+
write_output(args.output, payload, json.dumps(payload, indent=2, sort_keys=True))
|
|
276
|
+
return EXIT_OK
|
|
277
|
+
|
|
278
|
+
if args.command in {"plan", "apply", "adopt", "drift"}:
|
|
279
|
+
assert client is not None
|
|
280
|
+
desired = load_desired_state(args.desired_state)
|
|
281
|
+
capabilities = client.capabilities()
|
|
282
|
+
existing = client.existing_state(
|
|
283
|
+
include_certificates=capabilities.certificates.list,
|
|
284
|
+
include_access_lists=capabilities.access_lists.list,
|
|
285
|
+
)
|
|
286
|
+
options = PlannerOptions(
|
|
287
|
+
owner=args.owner,
|
|
288
|
+
allow_updates=not args.no_updates,
|
|
289
|
+
prune_owned=args.prune_owned,
|
|
290
|
+
adopt=args.command == "adopt",
|
|
291
|
+
strict_adopt=not (getattr(args, "allow_field_drift", False) or getattr(args, "force", False)),
|
|
292
|
+
allow_field_drift=getattr(args, "allow_field_drift", False) or getattr(args, "force", False),
|
|
293
|
+
)
|
|
294
|
+
plan = compute_plan(desired=desired, existing=existing, capabilities=capabilities, options=options)
|
|
295
|
+
if args.command == "drift":
|
|
296
|
+
payload = drift_report(plan)
|
|
297
|
+
write_output(args.output, payload, json.dumps(payload, indent=2, sort_keys=True))
|
|
298
|
+
return EXIT_OK if payload["ok"] else EXIT_CONFLICT
|
|
299
|
+
plan_payload = plan.to_dict()
|
|
300
|
+
if getattr(args, "validate_output", False):
|
|
301
|
+
try:
|
|
302
|
+
validate_plan_output(plan_payload)
|
|
303
|
+
except ValueError as exc:
|
|
304
|
+
raise ValidationError(str(exc)) from exc
|
|
305
|
+
if args.command == "plan" or getattr(args, "dry_run", False):
|
|
306
|
+
write_output(args.output, plan_payload, format_plan_text(plan))
|
|
307
|
+
return EXIT_OK if plan.ok else EXIT_CONFLICT
|
|
308
|
+
if not plan.ok:
|
|
309
|
+
write_output(args.output, plan_payload, format_plan_text(plan))
|
|
310
|
+
return EXIT_CONFLICT
|
|
311
|
+
if getattr(args, "backup_dir", None):
|
|
312
|
+
write_state_backup(args.backup_dir, existing)
|
|
313
|
+
result = ApplyEngine(client=client, capabilities=capabilities).apply(plan)
|
|
314
|
+
payload = transaction_report(plan, result)
|
|
315
|
+
if getattr(args, "report", None):
|
|
316
|
+
write_json(args.report, payload)
|
|
317
|
+
if getattr(args, "rollback_plan", None):
|
|
318
|
+
write_json(args.rollback_plan, rollback_plan(plan))
|
|
319
|
+
if getattr(args, "audit_log_path", None):
|
|
320
|
+
write_json(args.audit_log_path, {"ok": True, "operation": "apply", "summary": payload["summary"]})
|
|
321
|
+
text = format_plan_text(plan) + f"\napplied: true\nmutations: {len(result.mutations)}"
|
|
322
|
+
write_output(args.output, payload, text)
|
|
323
|
+
return EXIT_OK
|
|
324
|
+
|
|
325
|
+
parser.error(f"unsupported command: {args.command}") # pragma: no cover - argparse exits
|
|
326
|
+
return EXIT_USAGE_OR_VALIDATION # pragma: no cover
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
def _schema_command(args: argparse.Namespace, client: NpmClient | None) -> int:
|
|
330
|
+
if args.schema_command == "fetch":
|
|
331
|
+
assert client is not None
|
|
332
|
+
payload = client.openapi_schema()
|
|
333
|
+
if args.write:
|
|
334
|
+
Path(args.write).write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8")
|
|
335
|
+
write_output(args.output, payload, json.dumps(payload, indent=2, sort_keys=True))
|
|
336
|
+
return EXIT_OK
|
|
337
|
+
schema_doc = load_openapi_schema(args.schema) if args.schema else client.openapi_schema() # type: ignore[union-attr]
|
|
338
|
+
capabilities = Capabilities.from_openapi(schema_doc)
|
|
339
|
+
payload = capabilities.to_dict()
|
|
340
|
+
if args.schema_command == "check":
|
|
341
|
+
required = [
|
|
342
|
+
capabilities.proxy_hosts.list,
|
|
343
|
+
capabilities.proxy_hosts.create,
|
|
344
|
+
capabilities.proxy_hosts.update,
|
|
345
|
+
capabilities.proxy_hosts.delete,
|
|
346
|
+
]
|
|
347
|
+
payload["ok"] = all(required)
|
|
348
|
+
write_output(args.output, payload, json.dumps(payload, indent=2, sort_keys=True))
|
|
349
|
+
return EXIT_OK if payload["ok"] else EXIT_CAPABILITY
|
|
350
|
+
write_output(args.output, payload, json.dumps(payload, indent=2, sort_keys=True))
|
|
351
|
+
return EXIT_OK
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
def _require_api_args(args: argparse.Namespace, parser: argparse.ArgumentParser) -> None:
|
|
355
|
+
if not (args.base_url and args.identity and args.secret):
|
|
356
|
+
parser.error("--base-url, --identity, and --secret are required, or set NPM_BASE_URL/NPM_IDENTITY/NPM_SECRET")
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
def _desired_summary(desired: Any) -> dict[str, Any]:
|
|
360
|
+
return {
|
|
361
|
+
"ok": True,
|
|
362
|
+
"schemaVersion": desired.schema_version,
|
|
363
|
+
"proxy_hosts": len(desired.proxy_hosts),
|
|
364
|
+
"certificates": len(desired.certificates),
|
|
365
|
+
"access_lists": len(desired.access_lists),
|
|
366
|
+
"redirection_hosts": len(desired.redirection_hosts),
|
|
367
|
+
"dead_hosts": len(desired.dead_hosts),
|
|
368
|
+
"streams": len(desired.streams),
|
|
369
|
+
"users": len(desired.users),
|
|
370
|
+
"settings": len(desired.settings),
|
|
371
|
+
"source_files": list(desired.source_files),
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
def _format_validate_text(payload: dict[str, Any]) -> str:
|
|
376
|
+
return (
|
|
377
|
+
"desired state valid\n"
|
|
378
|
+
f"proxy hosts: {payload['proxy_hosts']}\n"
|
|
379
|
+
f"certificates: {payload['certificates']}\n"
|
|
380
|
+
f"access lists: {payload['access_lists']}\n"
|
|
381
|
+
f"redirection hosts: {payload['redirection_hosts']}\n"
|
|
382
|
+
f"dead hosts: {payload['dead_hosts']}\n"
|
|
383
|
+
f"streams: {payload['streams']}\n"
|
|
384
|
+
f"users: {payload['users']}\n"
|
|
385
|
+
f"settings: {payload['settings']}"
|
|
386
|
+
)
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
def _redact_cli_message(message: str, args: argparse.Namespace) -> str:
|
|
390
|
+
redacted = message
|
|
391
|
+
for value in (getattr(args, "identity", None), getattr(args, "secret", None)):
|
|
392
|
+
if value:
|
|
393
|
+
redacted = redacted.replace(str(value), "***")
|
|
394
|
+
return redacted
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
def _completion_script(shell: str) -> str:
|
|
398
|
+
commands = "validate migrate health doctor env version completion schema plan apply adopt drift audit-log compliance plugins"
|
|
399
|
+
if shell == "powershell":
|
|
400
|
+
return f"Register-ArgumentCompleter -Native -CommandName npmctl -ScriptBlock {{ param($wordToComplete) '{commands}'.Split(' ') | Where-Object {{ $_ -like \"$wordToComplete*\" }} }}\n"
|
|
401
|
+
if shell == "zsh":
|
|
402
|
+
return f"#compdef npmctl\n_arguments '1:command:({commands})'\n"
|
|
403
|
+
return f'complete -W "{commands}" npmctl\n'
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
if __name__ == "__main__": # pragma: no cover
|
|
407
|
+
raise SystemExit(main())
|