scc-cli 1.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of scc-cli might be problematic. Click here for more details.
- scc_cli/__init__.py +15 -0
- scc_cli/audit/__init__.py +37 -0
- scc_cli/audit/parser.py +191 -0
- scc_cli/audit/reader.py +180 -0
- scc_cli/auth.py +145 -0
- scc_cli/claude_adapter.py +485 -0
- scc_cli/cli.py +259 -0
- scc_cli/cli_admin.py +706 -0
- scc_cli/cli_audit.py +245 -0
- scc_cli/cli_common.py +166 -0
- scc_cli/cli_config.py +527 -0
- scc_cli/cli_exceptions.py +705 -0
- scc_cli/cli_helpers.py +244 -0
- scc_cli/cli_init.py +272 -0
- scc_cli/cli_launch.py +1454 -0
- scc_cli/cli_org.py +1428 -0
- scc_cli/cli_support.py +322 -0
- scc_cli/cli_team.py +892 -0
- scc_cli/cli_worktree.py +865 -0
- scc_cli/config.py +583 -0
- scc_cli/console.py +562 -0
- scc_cli/constants.py +79 -0
- scc_cli/contexts.py +377 -0
- scc_cli/deprecation.py +54 -0
- scc_cli/deps.py +189 -0
- scc_cli/docker/__init__.py +127 -0
- scc_cli/docker/core.py +466 -0
- scc_cli/docker/credentials.py +726 -0
- scc_cli/docker/launch.py +604 -0
- scc_cli/doctor/__init__.py +99 -0
- scc_cli/doctor/checks.py +1074 -0
- scc_cli/doctor/render.py +346 -0
- scc_cli/doctor/types.py +66 -0
- scc_cli/errors.py +288 -0
- scc_cli/evaluation/__init__.py +27 -0
- scc_cli/evaluation/apply_exceptions.py +207 -0
- scc_cli/evaluation/evaluate.py +97 -0
- scc_cli/evaluation/models.py +80 -0
- scc_cli/exit_codes.py +55 -0
- scc_cli/git.py +1521 -0
- scc_cli/json_command.py +166 -0
- scc_cli/json_output.py +96 -0
- scc_cli/kinds.py +62 -0
- scc_cli/marketplace/__init__.py +123 -0
- scc_cli/marketplace/adapter.py +74 -0
- scc_cli/marketplace/compute.py +377 -0
- scc_cli/marketplace/constants.py +87 -0
- scc_cli/marketplace/managed.py +135 -0
- scc_cli/marketplace/materialize.py +723 -0
- scc_cli/marketplace/normalize.py +548 -0
- scc_cli/marketplace/render.py +257 -0
- scc_cli/marketplace/resolve.py +459 -0
- scc_cli/marketplace/schema.py +506 -0
- scc_cli/marketplace/sync.py +260 -0
- scc_cli/marketplace/team_cache.py +195 -0
- scc_cli/marketplace/team_fetch.py +688 -0
- scc_cli/marketplace/trust.py +244 -0
- scc_cli/models/__init__.py +41 -0
- scc_cli/models/exceptions.py +273 -0
- scc_cli/models/plugin_audit.py +434 -0
- scc_cli/org_templates.py +269 -0
- scc_cli/output_mode.py +167 -0
- scc_cli/panels.py +113 -0
- scc_cli/platform.py +350 -0
- scc_cli/profiles.py +960 -0
- scc_cli/remote.py +443 -0
- scc_cli/schemas/__init__.py +1 -0
- scc_cli/schemas/org-v1.schema.json +456 -0
- scc_cli/schemas/team-config.v1.schema.json +163 -0
- scc_cli/sessions.py +425 -0
- scc_cli/setup.py +588 -0
- scc_cli/source_resolver.py +470 -0
- scc_cli/stats.py +378 -0
- scc_cli/stores/__init__.py +13 -0
- scc_cli/stores/exception_store.py +251 -0
- scc_cli/subprocess_utils.py +88 -0
- scc_cli/teams.py +382 -0
- scc_cli/templates/__init__.py +2 -0
- scc_cli/templates/org/__init__.py +0 -0
- scc_cli/templates/org/minimal.json +19 -0
- scc_cli/templates/org/reference.json +74 -0
- scc_cli/templates/org/strict.json +38 -0
- scc_cli/templates/org/teams.json +42 -0
- scc_cli/templates/statusline.sh +75 -0
- scc_cli/theme.py +348 -0
- scc_cli/ui/__init__.py +124 -0
- scc_cli/ui/branding.py +68 -0
- scc_cli/ui/chrome.py +395 -0
- scc_cli/ui/dashboard/__init__.py +62 -0
- scc_cli/ui/dashboard/_dashboard.py +677 -0
- scc_cli/ui/dashboard/loaders.py +395 -0
- scc_cli/ui/dashboard/models.py +184 -0
- scc_cli/ui/dashboard/orchestrator.py +390 -0
- scc_cli/ui/formatters.py +443 -0
- scc_cli/ui/gate.py +350 -0
- scc_cli/ui/help.py +157 -0
- scc_cli/ui/keys.py +538 -0
- scc_cli/ui/list_screen.py +431 -0
- scc_cli/ui/picker.py +700 -0
- scc_cli/ui/prompts.py +200 -0
- scc_cli/ui/wizard.py +675 -0
- scc_cli/update.py +680 -0
- scc_cli/utils/__init__.py +39 -0
- scc_cli/utils/fixit.py +264 -0
- scc_cli/utils/fuzzy.py +124 -0
- scc_cli/utils/locks.py +101 -0
- scc_cli/utils/ttl.py +376 -0
- scc_cli/validate.py +455 -0
- scc_cli-1.4.1.dist-info/METADATA +369 -0
- scc_cli-1.4.1.dist-info/RECORD +113 -0
- scc_cli-1.4.1.dist-info/WHEEL +4 -0
- scc_cli-1.4.1.dist-info/entry_points.txt +2 -0
- scc_cli-1.4.1.dist-info/licenses/LICENSE +21 -0
scc_cli/doctor/checks.py
ADDED
|
@@ -0,0 +1,1074 @@
|
|
|
1
|
+
"""Health check functions for the doctor module.
|
|
2
|
+
|
|
3
|
+
This module contains all check functions organized by category:
|
|
4
|
+
- JSON validation helpers
|
|
5
|
+
- Environment checks (Git, Docker, WSL2, Workspace)
|
|
6
|
+
- Configuration checks
|
|
7
|
+
- Organization & Marketplace checks
|
|
8
|
+
- Cache & State checks
|
|
9
|
+
|
|
10
|
+
All check functions return CheckResult or CheckResult | None.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
import json
|
|
16
|
+
import os
|
|
17
|
+
from datetime import datetime, timezone
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Any, cast
|
|
20
|
+
|
|
21
|
+
from ..theme import Indicators
|
|
22
|
+
from .types import CheckResult, JsonValidationResult
|
|
23
|
+
|
|
24
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
25
|
+
# JSON Validation Helpers
|
|
26
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def validate_json_file(file_path: Path) -> JsonValidationResult:
|
|
30
|
+
"""
|
|
31
|
+
Validate a JSON file and extract detailed error information.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
file_path: Path to the JSON file to validate
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
JsonValidationResult with validation status and error details
|
|
38
|
+
"""
|
|
39
|
+
if not file_path.exists():
|
|
40
|
+
return JsonValidationResult(valid=True, file_path=file_path)
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
content = file_path.read_text(encoding="utf-8")
|
|
44
|
+
json.loads(content)
|
|
45
|
+
return JsonValidationResult(valid=True, file_path=file_path)
|
|
46
|
+
except json.JSONDecodeError as e:
|
|
47
|
+
code_frame = format_code_frame(content, e.lineno, e.colno, file_path)
|
|
48
|
+
return JsonValidationResult(
|
|
49
|
+
valid=False,
|
|
50
|
+
error_message=e.msg,
|
|
51
|
+
line=e.lineno,
|
|
52
|
+
column=e.colno,
|
|
53
|
+
file_path=file_path,
|
|
54
|
+
code_frame=code_frame,
|
|
55
|
+
)
|
|
56
|
+
except OSError as e:
|
|
57
|
+
return JsonValidationResult(
|
|
58
|
+
valid=False,
|
|
59
|
+
error_message=f"Cannot read file: {e}",
|
|
60
|
+
file_path=file_path,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def format_code_frame(
|
|
65
|
+
content: str,
|
|
66
|
+
error_line: int,
|
|
67
|
+
error_col: int,
|
|
68
|
+
file_path: Path,
|
|
69
|
+
context_lines: int = 2,
|
|
70
|
+
) -> str:
|
|
71
|
+
"""
|
|
72
|
+
Format a code frame showing the error location with context.
|
|
73
|
+
|
|
74
|
+
Creates a visual representation like:
|
|
75
|
+
10 │ "selected_profile": "dev-team",
|
|
76
|
+
11 │ "preferences": {
|
|
77
|
+
→ 12 │ "auto_update": true
|
|
78
|
+
│ ^
|
|
79
|
+
13 │ "show_tips": false
|
|
80
|
+
14 │ }
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
content: The file content
|
|
84
|
+
error_line: Line number where error occurred (1-indexed)
|
|
85
|
+
error_col: Column number where error occurred (1-indexed)
|
|
86
|
+
file_path: Path to the file (for display)
|
|
87
|
+
context_lines: Number of lines to show before/after error
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
Formatted code frame string with Rich markup
|
|
91
|
+
"""
|
|
92
|
+
lines = content.splitlines()
|
|
93
|
+
total_lines = len(lines)
|
|
94
|
+
|
|
95
|
+
# Calculate line range to display
|
|
96
|
+
start_line = max(1, error_line - context_lines)
|
|
97
|
+
end_line = min(total_lines, error_line + context_lines)
|
|
98
|
+
|
|
99
|
+
# Calculate padding for line numbers
|
|
100
|
+
max_line_num = end_line
|
|
101
|
+
line_num_width = len(str(max_line_num))
|
|
102
|
+
|
|
103
|
+
frame_lines = []
|
|
104
|
+
|
|
105
|
+
# Add file path header
|
|
106
|
+
frame_lines.append(f"[dim]File: {file_path}[/dim]")
|
|
107
|
+
frame_lines.append("")
|
|
108
|
+
|
|
109
|
+
for line_num in range(start_line, end_line + 1):
|
|
110
|
+
line_content = lines[line_num - 1] if line_num <= total_lines else ""
|
|
111
|
+
|
|
112
|
+
# Truncate long lines to prevent secret leakage (keep first 80 chars)
|
|
113
|
+
if len(line_content) > 80:
|
|
114
|
+
line_content = line_content[:77] + "..."
|
|
115
|
+
|
|
116
|
+
if line_num == error_line:
|
|
117
|
+
# Error line with arrow indicator
|
|
118
|
+
frame_lines.append(
|
|
119
|
+
f"[bold red]{Indicators.get('ARROW')} {line_num:>{line_num_width}} │[/bold red] "
|
|
120
|
+
f"[white]{_escape_rich(line_content)}[/white]"
|
|
121
|
+
)
|
|
122
|
+
# Caret line pointing to error column
|
|
123
|
+
caret_padding = " " * (line_num_width + 4 + max(0, error_col - 1))
|
|
124
|
+
frame_lines.append(f"[bold red]{caret_padding}^[/bold red]")
|
|
125
|
+
else:
|
|
126
|
+
# Context line
|
|
127
|
+
frame_lines.append(
|
|
128
|
+
f"[dim] {line_num:>{line_num_width}} │[/dim] "
|
|
129
|
+
f"[dim]{_escape_rich(line_content)}[/dim]"
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
return "\n".join(frame_lines)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def _escape_rich(text: str) -> str:
|
|
136
|
+
"""Escape Rich markup characters in text."""
|
|
137
|
+
return text.replace("[", "\\[").replace("]", "\\]")
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def get_json_error_hints(error_message: str) -> list[str]:
|
|
141
|
+
"""
|
|
142
|
+
Get helpful hints based on common JSON error messages.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
error_message: The JSON decode error message
|
|
146
|
+
|
|
147
|
+
Returns:
|
|
148
|
+
List of helpful hints for fixing the error
|
|
149
|
+
"""
|
|
150
|
+
hints = []
|
|
151
|
+
error_lower = error_message.lower()
|
|
152
|
+
|
|
153
|
+
if "expecting" in error_lower and "," in error_lower:
|
|
154
|
+
hints.append("Missing comma between values")
|
|
155
|
+
elif "expecting property name" in error_lower:
|
|
156
|
+
hints.append("Trailing comma after last item (not allowed in JSON)")
|
|
157
|
+
hints.append("Missing closing brace or bracket")
|
|
158
|
+
elif "expecting value" in error_lower:
|
|
159
|
+
hints.append("Missing value after colon or comma")
|
|
160
|
+
hints.append("Empty array or object element")
|
|
161
|
+
elif "expecting ':'" in error_lower:
|
|
162
|
+
hints.append("Missing colon after property name")
|
|
163
|
+
elif "unterminated string" in error_lower or "invalid \\escape" in error_lower:
|
|
164
|
+
hints.append("Unclosed string quote or invalid escape sequence")
|
|
165
|
+
elif "extra data" in error_lower:
|
|
166
|
+
hints.append("Multiple root objects (JSON must have single root)")
|
|
167
|
+
|
|
168
|
+
if not hints:
|
|
169
|
+
hints.append("Check JSON syntax near the indicated line")
|
|
170
|
+
|
|
171
|
+
return hints
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
175
|
+
# Environment Checks (Git, Docker, WSL2, Workspace)
|
|
176
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def check_git() -> CheckResult:
|
|
180
|
+
"""Check if Git is installed and accessible."""
|
|
181
|
+
from .. import git as git_module
|
|
182
|
+
|
|
183
|
+
if not git_module.check_git_installed():
|
|
184
|
+
return CheckResult(
|
|
185
|
+
name="Git",
|
|
186
|
+
passed=False,
|
|
187
|
+
message="Git is not installed or not in PATH",
|
|
188
|
+
fix_hint="Install Git from https://git-scm.com/downloads",
|
|
189
|
+
fix_url="https://git-scm.com/downloads",
|
|
190
|
+
severity="error",
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
version = git_module.get_git_version()
|
|
194
|
+
return CheckResult(
|
|
195
|
+
name="Git",
|
|
196
|
+
passed=True,
|
|
197
|
+
message="Git is installed and accessible",
|
|
198
|
+
version=version,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def check_docker() -> CheckResult:
|
|
203
|
+
"""Check if Docker is installed and running."""
|
|
204
|
+
from .. import docker as docker_module
|
|
205
|
+
|
|
206
|
+
version = docker_module.get_docker_version()
|
|
207
|
+
|
|
208
|
+
if version is None:
|
|
209
|
+
return CheckResult(
|
|
210
|
+
name="Docker",
|
|
211
|
+
passed=False,
|
|
212
|
+
message="Docker is not installed or not running",
|
|
213
|
+
fix_hint="Install Docker Desktop from https://docker.com/products/docker-desktop",
|
|
214
|
+
fix_url="https://docker.com/products/docker-desktop",
|
|
215
|
+
severity="error",
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
# Parse and check minimum version
|
|
219
|
+
current = docker_module._parse_version(version)
|
|
220
|
+
required = docker_module._parse_version(docker_module.MIN_DOCKER_VERSION)
|
|
221
|
+
|
|
222
|
+
if current < required:
|
|
223
|
+
return CheckResult(
|
|
224
|
+
name="Docker",
|
|
225
|
+
passed=False,
|
|
226
|
+
message=f"Docker version {'.'.join(map(str, current))} is below minimum {docker_module.MIN_DOCKER_VERSION}",
|
|
227
|
+
version=version,
|
|
228
|
+
fix_hint="Update Docker Desktop to the latest version",
|
|
229
|
+
fix_url="https://docker.com/products/docker-desktop",
|
|
230
|
+
severity="error",
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
return CheckResult(
|
|
234
|
+
name="Docker",
|
|
235
|
+
passed=True,
|
|
236
|
+
message="Docker is installed and meets version requirements",
|
|
237
|
+
version=version,
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def check_docker_sandbox() -> CheckResult:
|
|
242
|
+
"""Check if Docker sandbox feature is available."""
|
|
243
|
+
from .. import docker as docker_module
|
|
244
|
+
|
|
245
|
+
if not docker_module.check_docker_sandbox():
|
|
246
|
+
return CheckResult(
|
|
247
|
+
name="Docker Sandbox",
|
|
248
|
+
passed=False,
|
|
249
|
+
message="Docker sandbox feature is not available",
|
|
250
|
+
fix_hint=f"Requires Docker Desktop {docker_module.MIN_DOCKER_VERSION}+ with sandbox feature enabled",
|
|
251
|
+
fix_url="https://docs.docker.com/desktop/features/sandbox/",
|
|
252
|
+
severity="error",
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
return CheckResult(
|
|
256
|
+
name="Docker Sandbox",
|
|
257
|
+
passed=True,
|
|
258
|
+
message="Docker sandbox feature is available",
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def check_docker_running() -> CheckResult:
|
|
263
|
+
"""Check if Docker daemon is running."""
|
|
264
|
+
import subprocess
|
|
265
|
+
|
|
266
|
+
try:
|
|
267
|
+
result = subprocess.run(
|
|
268
|
+
["docker", "info"],
|
|
269
|
+
capture_output=True,
|
|
270
|
+
timeout=10,
|
|
271
|
+
)
|
|
272
|
+
if result.returncode == 0:
|
|
273
|
+
return CheckResult(
|
|
274
|
+
name="Docker Daemon",
|
|
275
|
+
passed=True,
|
|
276
|
+
message="Docker daemon is running",
|
|
277
|
+
)
|
|
278
|
+
else:
|
|
279
|
+
return CheckResult(
|
|
280
|
+
name="Docker Daemon",
|
|
281
|
+
passed=False,
|
|
282
|
+
message="Docker daemon is not running",
|
|
283
|
+
fix_hint="Start Docker Desktop or run 'sudo systemctl start docker'",
|
|
284
|
+
severity="error",
|
|
285
|
+
)
|
|
286
|
+
except (subprocess.TimeoutExpired, FileNotFoundError):
|
|
287
|
+
return CheckResult(
|
|
288
|
+
name="Docker Daemon",
|
|
289
|
+
passed=False,
|
|
290
|
+
message="Could not connect to Docker daemon",
|
|
291
|
+
fix_hint="Ensure Docker Desktop is running",
|
|
292
|
+
severity="error",
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
def check_wsl2() -> tuple[CheckResult, bool]:
|
|
297
|
+
"""Check WSL2 environment and return (result, is_wsl2)."""
|
|
298
|
+
from .. import platform as platform_module
|
|
299
|
+
|
|
300
|
+
is_wsl2 = platform_module.is_wsl2()
|
|
301
|
+
|
|
302
|
+
if is_wsl2:
|
|
303
|
+
return (
|
|
304
|
+
CheckResult(
|
|
305
|
+
name="WSL2 Environment",
|
|
306
|
+
passed=True,
|
|
307
|
+
message="Running in WSL2 (recommended for Windows)",
|
|
308
|
+
severity="info",
|
|
309
|
+
),
|
|
310
|
+
True,
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
return (
|
|
314
|
+
CheckResult(
|
|
315
|
+
name="WSL2 Environment",
|
|
316
|
+
passed=True,
|
|
317
|
+
message="Not running in WSL2",
|
|
318
|
+
severity="info",
|
|
319
|
+
),
|
|
320
|
+
False,
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
def check_workspace_path(workspace: Path | None = None) -> CheckResult:
|
|
325
|
+
"""Check if workspace path is optimal (not on Windows mount in WSL2)."""
|
|
326
|
+
from .. import platform as platform_module
|
|
327
|
+
|
|
328
|
+
if workspace is None:
|
|
329
|
+
return CheckResult(
|
|
330
|
+
name="Workspace Path",
|
|
331
|
+
passed=True,
|
|
332
|
+
message="No workspace specified",
|
|
333
|
+
severity="info",
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
if platform_module.is_wsl2() and platform_module.is_windows_mount_path(workspace):
|
|
337
|
+
return CheckResult(
|
|
338
|
+
name="Workspace Path",
|
|
339
|
+
passed=False,
|
|
340
|
+
message=f"Workspace is on Windows filesystem: {workspace}",
|
|
341
|
+
fix_hint="Move project to ~/projects inside WSL for better performance",
|
|
342
|
+
severity="warning",
|
|
343
|
+
)
|
|
344
|
+
|
|
345
|
+
return CheckResult(
|
|
346
|
+
name="Workspace Path",
|
|
347
|
+
passed=True,
|
|
348
|
+
message=f"Workspace path is optimal: {workspace}",
|
|
349
|
+
)
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
353
|
+
# Configuration Checks
|
|
354
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
def check_user_config_valid() -> CheckResult:
|
|
358
|
+
"""Check if user configuration file is valid JSON.
|
|
359
|
+
|
|
360
|
+
Validates ~/.config/scc/config.json for JSON syntax errors
|
|
361
|
+
and provides helpful error messages with code frames.
|
|
362
|
+
|
|
363
|
+
Returns:
|
|
364
|
+
CheckResult with user config validation status.
|
|
365
|
+
"""
|
|
366
|
+
from .. import config
|
|
367
|
+
|
|
368
|
+
config_file = config.CONFIG_FILE
|
|
369
|
+
|
|
370
|
+
if not config_file.exists():
|
|
371
|
+
return CheckResult(
|
|
372
|
+
name="User Config",
|
|
373
|
+
passed=True,
|
|
374
|
+
message="No user config file (using defaults)",
|
|
375
|
+
severity="info",
|
|
376
|
+
)
|
|
377
|
+
|
|
378
|
+
result = validate_json_file(config_file)
|
|
379
|
+
|
|
380
|
+
if result.valid:
|
|
381
|
+
return CheckResult(
|
|
382
|
+
name="User Config",
|
|
383
|
+
passed=True,
|
|
384
|
+
message=f"User config is valid JSON: {config_file}",
|
|
385
|
+
)
|
|
386
|
+
|
|
387
|
+
# Build error message with hints
|
|
388
|
+
error_msg = f"Invalid JSON in {config_file.name}"
|
|
389
|
+
if result.line is not None:
|
|
390
|
+
error_msg += f" at line {result.line}"
|
|
391
|
+
if result.column is not None:
|
|
392
|
+
error_msg += f", column {result.column}"
|
|
393
|
+
|
|
394
|
+
# Get helpful hints
|
|
395
|
+
hints = get_json_error_hints(result.error_message or "")
|
|
396
|
+
fix_hint = f"Error: {result.error_message}\n"
|
|
397
|
+
fix_hint += "Hints:\n"
|
|
398
|
+
for hint in hints:
|
|
399
|
+
fix_hint += f" • {hint}\n"
|
|
400
|
+
fix_hint += f"Edit with: $EDITOR {config_file}"
|
|
401
|
+
|
|
402
|
+
return CheckResult(
|
|
403
|
+
name="User Config",
|
|
404
|
+
passed=False,
|
|
405
|
+
message=error_msg,
|
|
406
|
+
fix_hint=fix_hint,
|
|
407
|
+
severity="error",
|
|
408
|
+
code_frame=result.code_frame,
|
|
409
|
+
)
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
def check_config_directory() -> CheckResult:
|
|
413
|
+
"""Check if configuration directory exists and is writable."""
|
|
414
|
+
from .. import config
|
|
415
|
+
|
|
416
|
+
config_dir = config.CONFIG_DIR
|
|
417
|
+
|
|
418
|
+
if not config_dir.exists():
|
|
419
|
+
try:
|
|
420
|
+
config_dir.mkdir(parents=True, exist_ok=True)
|
|
421
|
+
return CheckResult(
|
|
422
|
+
name="Config Directory",
|
|
423
|
+
passed=True,
|
|
424
|
+
message=f"Created config directory: {config_dir}",
|
|
425
|
+
)
|
|
426
|
+
except PermissionError:
|
|
427
|
+
return CheckResult(
|
|
428
|
+
name="Config Directory",
|
|
429
|
+
passed=False,
|
|
430
|
+
message=f"Cannot create config directory: {config_dir}",
|
|
431
|
+
fix_hint="Check permissions on parent directory",
|
|
432
|
+
severity="error",
|
|
433
|
+
)
|
|
434
|
+
|
|
435
|
+
# Check if writable
|
|
436
|
+
test_file = config_dir / ".write_test"
|
|
437
|
+
try:
|
|
438
|
+
test_file.touch()
|
|
439
|
+
test_file.unlink()
|
|
440
|
+
return CheckResult(
|
|
441
|
+
name="Config Directory",
|
|
442
|
+
passed=True,
|
|
443
|
+
message=f"Config directory is writable: {config_dir}",
|
|
444
|
+
)
|
|
445
|
+
except (PermissionError, OSError):
|
|
446
|
+
return CheckResult(
|
|
447
|
+
name="Config Directory",
|
|
448
|
+
passed=False,
|
|
449
|
+
message=f"Config directory is not writable: {config_dir}",
|
|
450
|
+
fix_hint=f"Check permissions: chmod 755 {config_dir}",
|
|
451
|
+
severity="error",
|
|
452
|
+
)
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
456
|
+
# Organization & Marketplace Checks
|
|
457
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
458
|
+
|
|
459
|
+
|
|
460
|
+
def load_cached_org_config() -> dict[str, Any] | None:
|
|
461
|
+
"""Load cached organization config from cache directory.
|
|
462
|
+
|
|
463
|
+
Returns:
|
|
464
|
+
Cached org config dict if valid, None otherwise.
|
|
465
|
+
"""
|
|
466
|
+
from .. import config
|
|
467
|
+
|
|
468
|
+
cache_file = config.CACHE_DIR / "org_config.json"
|
|
469
|
+
|
|
470
|
+
if not cache_file.exists():
|
|
471
|
+
return None
|
|
472
|
+
|
|
473
|
+
try:
|
|
474
|
+
content = cache_file.read_text()
|
|
475
|
+
return cast(dict[str, Any], json.loads(content))
|
|
476
|
+
except (json.JSONDecodeError, OSError):
|
|
477
|
+
return None
|
|
478
|
+
|
|
479
|
+
|
|
480
|
+
def check_org_config_reachable() -> CheckResult | None:
|
|
481
|
+
"""Check if organization config URL is reachable.
|
|
482
|
+
|
|
483
|
+
Returns:
|
|
484
|
+
CheckResult if org config is configured, None for standalone mode.
|
|
485
|
+
"""
|
|
486
|
+
from .. import config
|
|
487
|
+
from ..remote import fetch_org_config
|
|
488
|
+
|
|
489
|
+
user_config = config.load_user_config()
|
|
490
|
+
|
|
491
|
+
# Skip for standalone mode
|
|
492
|
+
if user_config.get("standalone"):
|
|
493
|
+
return None
|
|
494
|
+
|
|
495
|
+
# Skip if no org source configured
|
|
496
|
+
org_source = user_config.get("organization_source")
|
|
497
|
+
if not org_source:
|
|
498
|
+
return None
|
|
499
|
+
|
|
500
|
+
url = org_source.get("url")
|
|
501
|
+
if not url:
|
|
502
|
+
return None
|
|
503
|
+
|
|
504
|
+
auth = org_source.get("auth")
|
|
505
|
+
|
|
506
|
+
# Try to fetch org config
|
|
507
|
+
try:
|
|
508
|
+
org_config, etag, status = fetch_org_config(url, auth=auth, etag=None)
|
|
509
|
+
except Exception as e:
|
|
510
|
+
return CheckResult(
|
|
511
|
+
name="Org Config",
|
|
512
|
+
passed=False,
|
|
513
|
+
message=f"Failed to fetch org config: {e}",
|
|
514
|
+
fix_hint="Check network connection and URL",
|
|
515
|
+
severity="error",
|
|
516
|
+
)
|
|
517
|
+
|
|
518
|
+
if status == 401:
|
|
519
|
+
return CheckResult(
|
|
520
|
+
name="Org Config",
|
|
521
|
+
passed=False,
|
|
522
|
+
message=f"Authentication required (401) for {url}",
|
|
523
|
+
fix_hint="Configure auth with: scc setup",
|
|
524
|
+
severity="error",
|
|
525
|
+
)
|
|
526
|
+
|
|
527
|
+
if status == 403:
|
|
528
|
+
return CheckResult(
|
|
529
|
+
name="Org Config",
|
|
530
|
+
passed=False,
|
|
531
|
+
message=f"Access denied (403) for {url}",
|
|
532
|
+
fix_hint="Check your access permissions",
|
|
533
|
+
severity="error",
|
|
534
|
+
)
|
|
535
|
+
|
|
536
|
+
if status != 200 or org_config is None:
|
|
537
|
+
return CheckResult(
|
|
538
|
+
name="Org Config",
|
|
539
|
+
passed=False,
|
|
540
|
+
message=f"Failed to fetch org config (status: {status})",
|
|
541
|
+
fix_hint="Check URL and network connection",
|
|
542
|
+
severity="error",
|
|
543
|
+
)
|
|
544
|
+
|
|
545
|
+
org_name = org_config.get("organization", {}).get("name", "Unknown")
|
|
546
|
+
return CheckResult(
|
|
547
|
+
name="Org Config",
|
|
548
|
+
passed=True,
|
|
549
|
+
message=f"Connected to: {org_name}",
|
|
550
|
+
)
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
def check_marketplace_auth_available() -> CheckResult | None:
|
|
554
|
+
"""Check if marketplace authentication token is available.
|
|
555
|
+
|
|
556
|
+
Returns:
|
|
557
|
+
CheckResult if marketplace is configured, None otherwise.
|
|
558
|
+
"""
|
|
559
|
+
from .. import config
|
|
560
|
+
from ..remote import resolve_auth
|
|
561
|
+
|
|
562
|
+
user_config = config.load_user_config()
|
|
563
|
+
org_config = load_cached_org_config()
|
|
564
|
+
|
|
565
|
+
# Skip if no org config
|
|
566
|
+
if org_config is None:
|
|
567
|
+
return None
|
|
568
|
+
|
|
569
|
+
# Skip if no profile selected
|
|
570
|
+
profile_name = user_config.get("selected_profile")
|
|
571
|
+
if not profile_name:
|
|
572
|
+
return None
|
|
573
|
+
|
|
574
|
+
# Find the profile
|
|
575
|
+
profiles = org_config.get("profiles", {})
|
|
576
|
+
profile = profiles.get(profile_name)
|
|
577
|
+
if not profile:
|
|
578
|
+
return None
|
|
579
|
+
|
|
580
|
+
# Find the marketplace (dict-based schema)
|
|
581
|
+
marketplace_name = profile.get("marketplace")
|
|
582
|
+
marketplaces = org_config.get("marketplaces", {})
|
|
583
|
+
marketplace = marketplaces.get(marketplace_name)
|
|
584
|
+
|
|
585
|
+
if marketplace is None:
|
|
586
|
+
return CheckResult(
|
|
587
|
+
name="Marketplace Auth",
|
|
588
|
+
passed=False,
|
|
589
|
+
message=f"Marketplace '{marketplace_name}' not found in org config",
|
|
590
|
+
severity="error",
|
|
591
|
+
)
|
|
592
|
+
|
|
593
|
+
# Check auth requirement
|
|
594
|
+
auth_spec = marketplace.get("auth")
|
|
595
|
+
|
|
596
|
+
if auth_spec is None:
|
|
597
|
+
return CheckResult(
|
|
598
|
+
name="Marketplace Auth",
|
|
599
|
+
passed=True,
|
|
600
|
+
message="Public marketplace (no auth needed)",
|
|
601
|
+
)
|
|
602
|
+
|
|
603
|
+
# Try to resolve auth
|
|
604
|
+
try:
|
|
605
|
+
token = resolve_auth(auth_spec)
|
|
606
|
+
if token:
|
|
607
|
+
return CheckResult(
|
|
608
|
+
name="Marketplace Auth",
|
|
609
|
+
passed=True,
|
|
610
|
+
message=f"{auth_spec} is set",
|
|
611
|
+
)
|
|
612
|
+
else:
|
|
613
|
+
# Provide helpful hint based on auth type
|
|
614
|
+
if auth_spec.startswith("env:"):
|
|
615
|
+
var_name = auth_spec.split(":", 1)[1]
|
|
616
|
+
hint = f"Set with: export {var_name}=your-token"
|
|
617
|
+
else:
|
|
618
|
+
cmd = auth_spec.split(":", 1)[1] if ":" in auth_spec else auth_spec
|
|
619
|
+
hint = f"Run manually to debug: {cmd}"
|
|
620
|
+
|
|
621
|
+
return CheckResult(
|
|
622
|
+
name="Marketplace Auth",
|
|
623
|
+
passed=False,
|
|
624
|
+
message=f"{auth_spec} not set or invalid",
|
|
625
|
+
fix_hint=hint,
|
|
626
|
+
severity="error",
|
|
627
|
+
)
|
|
628
|
+
except Exception as e:
|
|
629
|
+
return CheckResult(
|
|
630
|
+
name="Marketplace Auth",
|
|
631
|
+
passed=False,
|
|
632
|
+
message=f"Auth resolution failed: {e}",
|
|
633
|
+
severity="error",
|
|
634
|
+
)
|
|
635
|
+
|
|
636
|
+
|
|
637
|
+
def check_credential_injection() -> CheckResult | None:
|
|
638
|
+
"""Check what credentials will be injected into Docker container.
|
|
639
|
+
|
|
640
|
+
Shows env var NAMES only, never values. Prevents confusion about
|
|
641
|
+
whether tokens are being passed to the container.
|
|
642
|
+
|
|
643
|
+
Returns:
|
|
644
|
+
CheckResult showing injection status, None if no profile.
|
|
645
|
+
"""
|
|
646
|
+
from .. import config
|
|
647
|
+
|
|
648
|
+
user_config = config.load_user_config()
|
|
649
|
+
org_config = load_cached_org_config()
|
|
650
|
+
|
|
651
|
+
# Skip if no org config
|
|
652
|
+
if org_config is None:
|
|
653
|
+
return None
|
|
654
|
+
|
|
655
|
+
# Skip if no profile selected
|
|
656
|
+
profile_name = user_config.get("selected_profile")
|
|
657
|
+
if not profile_name:
|
|
658
|
+
return None
|
|
659
|
+
|
|
660
|
+
# Find the profile
|
|
661
|
+
profiles = org_config.get("profiles", {})
|
|
662
|
+
profile = profiles.get(profile_name)
|
|
663
|
+
if not profile:
|
|
664
|
+
return None
|
|
665
|
+
|
|
666
|
+
# Find the marketplace (dict-based schema)
|
|
667
|
+
marketplace_name = profile.get("marketplace")
|
|
668
|
+
marketplaces = org_config.get("marketplaces", {})
|
|
669
|
+
marketplace = marketplaces.get(marketplace_name)
|
|
670
|
+
|
|
671
|
+
if marketplace is None:
|
|
672
|
+
return None
|
|
673
|
+
|
|
674
|
+
# Check auth requirement
|
|
675
|
+
auth_spec = marketplace.get("auth")
|
|
676
|
+
|
|
677
|
+
if auth_spec is None:
|
|
678
|
+
return CheckResult(
|
|
679
|
+
name="Container Injection",
|
|
680
|
+
passed=True,
|
|
681
|
+
message="No credentials needed (public marketplace)",
|
|
682
|
+
)
|
|
683
|
+
|
|
684
|
+
# Determine what env vars will be injected
|
|
685
|
+
env_vars = []
|
|
686
|
+
|
|
687
|
+
if auth_spec.startswith("env:"):
|
|
688
|
+
var_name = auth_spec.split(":", 1)[1]
|
|
689
|
+
env_vars.append(var_name)
|
|
690
|
+
|
|
691
|
+
# Add standard vars based on marketplace type
|
|
692
|
+
marketplace_type = marketplace.get("type")
|
|
693
|
+
if marketplace_type == "gitlab" and var_name != "GITLAB_TOKEN":
|
|
694
|
+
env_vars.append("GITLAB_TOKEN")
|
|
695
|
+
elif marketplace_type == "github" and var_name != "GITHUB_TOKEN":
|
|
696
|
+
env_vars.append("GITHUB_TOKEN")
|
|
697
|
+
|
|
698
|
+
if env_vars:
|
|
699
|
+
env_list = ", ".join(env_vars)
|
|
700
|
+
return CheckResult(
|
|
701
|
+
name="Container Injection",
|
|
702
|
+
passed=True,
|
|
703
|
+
message=f"Will inject [{env_list}] into Docker env",
|
|
704
|
+
)
|
|
705
|
+
else:
|
|
706
|
+
return CheckResult(
|
|
707
|
+
name="Container Injection",
|
|
708
|
+
passed=True,
|
|
709
|
+
message="Command-based auth (resolved at runtime)",
|
|
710
|
+
)
|
|
711
|
+
|
|
712
|
+
|
|
713
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
714
|
+
# Cache & State Checks
|
|
715
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
716
|
+
|
|
717
|
+
|
|
718
|
+
def check_cache_readable() -> CheckResult:
|
|
719
|
+
"""Check if organization config cache is readable and valid.
|
|
720
|
+
|
|
721
|
+
Uses enhanced error display with code frames for JSON syntax errors.
|
|
722
|
+
|
|
723
|
+
Returns:
|
|
724
|
+
CheckResult with cache status.
|
|
725
|
+
"""
|
|
726
|
+
from .. import config
|
|
727
|
+
|
|
728
|
+
cache_file = config.CACHE_DIR / "org_config.json"
|
|
729
|
+
|
|
730
|
+
if not cache_file.exists():
|
|
731
|
+
return CheckResult(
|
|
732
|
+
name="Org Cache",
|
|
733
|
+
passed=True,
|
|
734
|
+
message="No cache file (will fetch on first use)",
|
|
735
|
+
severity="info",
|
|
736
|
+
)
|
|
737
|
+
|
|
738
|
+
# Use the new validation helper for enhanced error display
|
|
739
|
+
result = validate_json_file(cache_file)
|
|
740
|
+
|
|
741
|
+
if result.valid:
|
|
742
|
+
try:
|
|
743
|
+
content = cache_file.read_text()
|
|
744
|
+
org_config = json.loads(content)
|
|
745
|
+
|
|
746
|
+
# Calculate fingerprint
|
|
747
|
+
import hashlib
|
|
748
|
+
|
|
749
|
+
fingerprint = hashlib.sha256(content.encode()).hexdigest()[:12]
|
|
750
|
+
|
|
751
|
+
org_name = org_config.get("organization", {}).get("name", "Unknown")
|
|
752
|
+
return CheckResult(
|
|
753
|
+
name="Org Cache",
|
|
754
|
+
passed=True,
|
|
755
|
+
message=f"Cache valid: {org_name} (fingerprint: {fingerprint})",
|
|
756
|
+
)
|
|
757
|
+
except (json.JSONDecodeError, OSError) as e:
|
|
758
|
+
return CheckResult(
|
|
759
|
+
name="Org Cache",
|
|
760
|
+
passed=False,
|
|
761
|
+
message=f"Cannot read cache file: {e}",
|
|
762
|
+
fix_hint="Run 'scc setup' to refresh organization config",
|
|
763
|
+
severity="error",
|
|
764
|
+
)
|
|
765
|
+
|
|
766
|
+
# Invalid JSON - build detailed error message
|
|
767
|
+
error_msg = "Cache file is corrupted (invalid JSON)"
|
|
768
|
+
if result.line is not None:
|
|
769
|
+
error_msg += f" at line {result.line}"
|
|
770
|
+
if result.column is not None:
|
|
771
|
+
error_msg += f", column {result.column}"
|
|
772
|
+
|
|
773
|
+
# Get helpful hints
|
|
774
|
+
hints = get_json_error_hints(result.error_message or "")
|
|
775
|
+
fix_hint = f"Error: {result.error_message}\n"
|
|
776
|
+
fix_hint += "Hints:\n"
|
|
777
|
+
for hint in hints:
|
|
778
|
+
fix_hint += f" • {hint}\n"
|
|
779
|
+
fix_hint += "Fix: Run 'scc setup' to refresh organization config"
|
|
780
|
+
|
|
781
|
+
return CheckResult(
|
|
782
|
+
name="Org Cache",
|
|
783
|
+
passed=False,
|
|
784
|
+
message=error_msg,
|
|
785
|
+
fix_hint=fix_hint,
|
|
786
|
+
severity="error",
|
|
787
|
+
code_frame=result.code_frame,
|
|
788
|
+
)
|
|
789
|
+
|
|
790
|
+
|
|
791
|
+
def check_cache_ttl_status() -> CheckResult | None:
|
|
792
|
+
"""Check if cache is within TTL (time-to-live).
|
|
793
|
+
|
|
794
|
+
Returns:
|
|
795
|
+
CheckResult with TTL status, None if no cache metadata.
|
|
796
|
+
"""
|
|
797
|
+
from .. import config
|
|
798
|
+
|
|
799
|
+
meta_file = config.CACHE_DIR / "cache_meta.json"
|
|
800
|
+
|
|
801
|
+
if not meta_file.exists():
|
|
802
|
+
return None
|
|
803
|
+
|
|
804
|
+
try:
|
|
805
|
+
content = meta_file.read_text()
|
|
806
|
+
meta = json.loads(content)
|
|
807
|
+
except (json.JSONDecodeError, OSError):
|
|
808
|
+
return CheckResult(
|
|
809
|
+
name="Cache TTL",
|
|
810
|
+
passed=False,
|
|
811
|
+
message="Cache metadata is corrupted",
|
|
812
|
+
fix_hint="Run 'scc setup' to refresh organization config",
|
|
813
|
+
severity="warning",
|
|
814
|
+
)
|
|
815
|
+
|
|
816
|
+
org_meta = meta.get("org_config", {})
|
|
817
|
+
expires_at_str = org_meta.get("expires_at")
|
|
818
|
+
|
|
819
|
+
if not expires_at_str:
|
|
820
|
+
return CheckResult(
|
|
821
|
+
name="Cache TTL",
|
|
822
|
+
passed=True,
|
|
823
|
+
message="No expiration set in cache",
|
|
824
|
+
severity="info",
|
|
825
|
+
)
|
|
826
|
+
|
|
827
|
+
try:
|
|
828
|
+
# Parse ISO format datetime
|
|
829
|
+
expires_at = datetime.fromisoformat(expires_at_str.replace("Z", "+00:00"))
|
|
830
|
+
now = datetime.now(timezone.utc)
|
|
831
|
+
|
|
832
|
+
if now < expires_at:
|
|
833
|
+
remaining = expires_at - now
|
|
834
|
+
hours = remaining.total_seconds() / 3600
|
|
835
|
+
return CheckResult(
|
|
836
|
+
name="Cache TTL",
|
|
837
|
+
passed=True,
|
|
838
|
+
message=f"Cache valid for {hours:.1f} more hours",
|
|
839
|
+
)
|
|
840
|
+
else:
|
|
841
|
+
elapsed = now - expires_at
|
|
842
|
+
hours = elapsed.total_seconds() / 3600
|
|
843
|
+
return CheckResult(
|
|
844
|
+
name="Cache TTL",
|
|
845
|
+
passed=False,
|
|
846
|
+
message=f"Cache expired {hours:.1f} hours ago",
|
|
847
|
+
fix_hint="Run 'scc setup' to refresh organization config",
|
|
848
|
+
severity="warning",
|
|
849
|
+
)
|
|
850
|
+
except (ValueError, TypeError):
|
|
851
|
+
return CheckResult(
|
|
852
|
+
name="Cache TTL",
|
|
853
|
+
passed=False,
|
|
854
|
+
message="Invalid expiration date in cache metadata",
|
|
855
|
+
fix_hint="Run 'scc setup' to refresh organization config",
|
|
856
|
+
severity="warning",
|
|
857
|
+
)
|
|
858
|
+
|
|
859
|
+
|
|
860
|
+
def check_migration_status() -> CheckResult:
|
|
861
|
+
"""Check if legacy configuration has been migrated.
|
|
862
|
+
|
|
863
|
+
Returns:
|
|
864
|
+
CheckResult with migration status.
|
|
865
|
+
"""
|
|
866
|
+
from .. import config
|
|
867
|
+
|
|
868
|
+
legacy_dir = config.LEGACY_CONFIG_DIR
|
|
869
|
+
new_dir = config.CONFIG_DIR
|
|
870
|
+
|
|
871
|
+
# Both new and legacy exist - warn about cleanup
|
|
872
|
+
if legacy_dir.exists() and new_dir.exists():
|
|
873
|
+
return CheckResult(
|
|
874
|
+
name="Migration",
|
|
875
|
+
passed=False,
|
|
876
|
+
message=f"Legacy config still exists at {legacy_dir}",
|
|
877
|
+
fix_hint="You may delete the old directory manually",
|
|
878
|
+
severity="warning",
|
|
879
|
+
)
|
|
880
|
+
|
|
881
|
+
# Only legacy exists - needs migration
|
|
882
|
+
if legacy_dir.exists() and not new_dir.exists():
|
|
883
|
+
return CheckResult(
|
|
884
|
+
name="Migration",
|
|
885
|
+
passed=False,
|
|
886
|
+
message="Config migration needed",
|
|
887
|
+
fix_hint="Run any scc command to trigger automatic migration",
|
|
888
|
+
severity="warning",
|
|
889
|
+
)
|
|
890
|
+
|
|
891
|
+
# New config exists or fresh install
|
|
892
|
+
return CheckResult(
|
|
893
|
+
name="Migration",
|
|
894
|
+
passed=True,
|
|
895
|
+
message="No legacy configuration found",
|
|
896
|
+
)
|
|
897
|
+
|
|
898
|
+
|
|
899
|
+
def check_exception_stores() -> CheckResult:
|
|
900
|
+
"""Check if exception stores are readable and valid.
|
|
901
|
+
|
|
902
|
+
Validates both user and repo exception stores:
|
|
903
|
+
- JSON parse errors
|
|
904
|
+
- Schema version compatibility
|
|
905
|
+
- Backup files from corruption recovery
|
|
906
|
+
|
|
907
|
+
Returns:
|
|
908
|
+
CheckResult with exception store status.
|
|
909
|
+
"""
|
|
910
|
+
from ..stores.exception_store import RepoStore, UserStore
|
|
911
|
+
|
|
912
|
+
issues: list[str] = []
|
|
913
|
+
warnings: list[str] = []
|
|
914
|
+
|
|
915
|
+
# Check user store
|
|
916
|
+
user_store = UserStore()
|
|
917
|
+
user_path = user_store.path
|
|
918
|
+
|
|
919
|
+
if user_path.exists():
|
|
920
|
+
try:
|
|
921
|
+
user_file = user_store.read()
|
|
922
|
+
if user_file.schema_version > 1:
|
|
923
|
+
warnings.append(f"User store uses newer schema v{user_file.schema_version}")
|
|
924
|
+
except Exception as e:
|
|
925
|
+
issues.append(f"User store corrupt: {e}")
|
|
926
|
+
|
|
927
|
+
# Check for backup files indicating past corruption
|
|
928
|
+
backup_pattern = f"{user_path.name}.bak-*"
|
|
929
|
+
backup_dir = user_path.parent
|
|
930
|
+
backups = list(backup_dir.glob(backup_pattern))
|
|
931
|
+
if backups:
|
|
932
|
+
warnings.append(f"Found {len(backups)} user store backup(s)")
|
|
933
|
+
|
|
934
|
+
# Check repo store (if in a git repo)
|
|
935
|
+
try:
|
|
936
|
+
repo_store = RepoStore(Path.cwd())
|
|
937
|
+
repo_path = repo_store.path
|
|
938
|
+
|
|
939
|
+
if repo_path.exists():
|
|
940
|
+
try:
|
|
941
|
+
repo_file = repo_store.read()
|
|
942
|
+
if repo_file.schema_version > 1:
|
|
943
|
+
warnings.append(f"Repo store uses newer schema v{repo_file.schema_version}")
|
|
944
|
+
except Exception as e:
|
|
945
|
+
issues.append(f"Repo store corrupt: {e}")
|
|
946
|
+
|
|
947
|
+
# Check for backup files
|
|
948
|
+
backup_pattern = f"{repo_path.name}.bak-*"
|
|
949
|
+
backup_dir = repo_path.parent
|
|
950
|
+
backups = list(backup_dir.glob(backup_pattern))
|
|
951
|
+
if backups:
|
|
952
|
+
warnings.append(f"Found {len(backups)} repo store backup(s)")
|
|
953
|
+
except Exception:
|
|
954
|
+
# Not in a repo or repo store not accessible - that's fine
|
|
955
|
+
pass
|
|
956
|
+
|
|
957
|
+
# Build result
|
|
958
|
+
if issues:
|
|
959
|
+
return CheckResult(
|
|
960
|
+
name="Exception Stores",
|
|
961
|
+
passed=False,
|
|
962
|
+
message="; ".join(issues),
|
|
963
|
+
fix_hint="Run 'scc exceptions reset --user --yes' to reset corrupt stores",
|
|
964
|
+
severity="error",
|
|
965
|
+
)
|
|
966
|
+
|
|
967
|
+
if warnings:
|
|
968
|
+
return CheckResult(
|
|
969
|
+
name="Exception Stores",
|
|
970
|
+
passed=True,
|
|
971
|
+
message="; ".join(warnings),
|
|
972
|
+
fix_hint="Consider upgrading SCC or running 'scc exceptions cleanup'",
|
|
973
|
+
severity="warning",
|
|
974
|
+
)
|
|
975
|
+
|
|
976
|
+
return CheckResult(
|
|
977
|
+
name="Exception Stores",
|
|
978
|
+
passed=True,
|
|
979
|
+
message="Exception stores OK",
|
|
980
|
+
)
|
|
981
|
+
|
|
982
|
+
|
|
983
|
+
def check_proxy_environment() -> CheckResult:
|
|
984
|
+
"""Check for proxy environment variables.
|
|
985
|
+
|
|
986
|
+
This is an informational check that detects common proxy configurations.
|
|
987
|
+
It never fails - just provides visibility into the environment.
|
|
988
|
+
|
|
989
|
+
Returns:
|
|
990
|
+
CheckResult with proxy environment info (always passes, severity=info).
|
|
991
|
+
"""
|
|
992
|
+
proxy_vars = {
|
|
993
|
+
"HTTP_PROXY": os.environ.get("HTTP_PROXY"),
|
|
994
|
+
"http_proxy": os.environ.get("http_proxy"),
|
|
995
|
+
"HTTPS_PROXY": os.environ.get("HTTPS_PROXY"),
|
|
996
|
+
"https_proxy": os.environ.get("https_proxy"),
|
|
997
|
+
"NO_PROXY": os.environ.get("NO_PROXY"),
|
|
998
|
+
"no_proxy": os.environ.get("no_proxy"),
|
|
999
|
+
}
|
|
1000
|
+
|
|
1001
|
+
# Find which ones are set
|
|
1002
|
+
configured = {k: v for k, v in proxy_vars.items() if v}
|
|
1003
|
+
|
|
1004
|
+
if configured:
|
|
1005
|
+
# Summarize what's configured
|
|
1006
|
+
proxy_names = ", ".join(configured.keys())
|
|
1007
|
+
message = f"Proxy configured: {proxy_names}"
|
|
1008
|
+
else:
|
|
1009
|
+
message = "No proxy environment variables detected"
|
|
1010
|
+
|
|
1011
|
+
return CheckResult(
|
|
1012
|
+
name="Proxy Environment",
|
|
1013
|
+
passed=True,
|
|
1014
|
+
message=message,
|
|
1015
|
+
severity="info",
|
|
1016
|
+
)
|
|
1017
|
+
|
|
1018
|
+
|
|
1019
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
1020
|
+
# Check Orchestration
|
|
1021
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
1022
|
+
|
|
1023
|
+
|
|
1024
|
+
def run_all_checks() -> list[CheckResult]:
|
|
1025
|
+
"""Run all health checks and return list of results.
|
|
1026
|
+
|
|
1027
|
+
Includes both environment checks and organization/marketplace checks.
|
|
1028
|
+
|
|
1029
|
+
Returns:
|
|
1030
|
+
List of all CheckResult objects (excluding None results).
|
|
1031
|
+
"""
|
|
1032
|
+
results: list[CheckResult] = []
|
|
1033
|
+
|
|
1034
|
+
# Environment checks
|
|
1035
|
+
results.append(check_git())
|
|
1036
|
+
results.append(check_docker())
|
|
1037
|
+
results.append(check_docker_sandbox())
|
|
1038
|
+
results.append(check_docker_running())
|
|
1039
|
+
|
|
1040
|
+
wsl2_result, _ = check_wsl2()
|
|
1041
|
+
results.append(wsl2_result)
|
|
1042
|
+
|
|
1043
|
+
results.append(check_config_directory())
|
|
1044
|
+
|
|
1045
|
+
# User config validation (JSON syntax check)
|
|
1046
|
+
results.append(check_user_config_valid())
|
|
1047
|
+
|
|
1048
|
+
# Organization checks (may return None)
|
|
1049
|
+
org_check = check_org_config_reachable()
|
|
1050
|
+
if org_check is not None:
|
|
1051
|
+
results.append(org_check)
|
|
1052
|
+
|
|
1053
|
+
auth_check = check_marketplace_auth_available()
|
|
1054
|
+
if auth_check is not None:
|
|
1055
|
+
results.append(auth_check)
|
|
1056
|
+
|
|
1057
|
+
injection_check = check_credential_injection()
|
|
1058
|
+
if injection_check is not None:
|
|
1059
|
+
results.append(injection_check)
|
|
1060
|
+
|
|
1061
|
+
# Cache checks
|
|
1062
|
+
results.append(check_cache_readable())
|
|
1063
|
+
|
|
1064
|
+
ttl_check = check_cache_ttl_status()
|
|
1065
|
+
if ttl_check is not None:
|
|
1066
|
+
results.append(ttl_check)
|
|
1067
|
+
|
|
1068
|
+
# Migration check
|
|
1069
|
+
results.append(check_migration_status())
|
|
1070
|
+
|
|
1071
|
+
# Exception stores check
|
|
1072
|
+
results.append(check_exception_stores())
|
|
1073
|
+
|
|
1074
|
+
return results
|