scc-cli 1.5.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of scc-cli might be problematic. Click here for more details.
- scc_cli/__init__.py +15 -0
- scc_cli/audit/__init__.py +37 -0
- scc_cli/audit/parser.py +191 -0
- scc_cli/audit/reader.py +180 -0
- scc_cli/auth.py +145 -0
- scc_cli/claude_adapter.py +485 -0
- scc_cli/cli.py +311 -0
- scc_cli/cli_common.py +190 -0
- scc_cli/cli_helpers.py +244 -0
- scc_cli/commands/__init__.py +20 -0
- scc_cli/commands/admin.py +708 -0
- scc_cli/commands/audit.py +246 -0
- scc_cli/commands/config.py +528 -0
- scc_cli/commands/exceptions.py +696 -0
- scc_cli/commands/init.py +272 -0
- scc_cli/commands/launch/__init__.py +73 -0
- scc_cli/commands/launch/app.py +1247 -0
- scc_cli/commands/launch/render.py +309 -0
- scc_cli/commands/launch/sandbox.py +135 -0
- scc_cli/commands/launch/workspace.py +339 -0
- scc_cli/commands/org/__init__.py +49 -0
- scc_cli/commands/org/_builders.py +264 -0
- scc_cli/commands/org/app.py +41 -0
- scc_cli/commands/org/import_cmd.py +267 -0
- scc_cli/commands/org/init_cmd.py +269 -0
- scc_cli/commands/org/schema_cmd.py +76 -0
- scc_cli/commands/org/status_cmd.py +157 -0
- scc_cli/commands/org/update_cmd.py +330 -0
- scc_cli/commands/org/validate_cmd.py +138 -0
- scc_cli/commands/support.py +323 -0
- scc_cli/commands/team.py +910 -0
- scc_cli/commands/worktree/__init__.py +72 -0
- scc_cli/commands/worktree/_helpers.py +57 -0
- scc_cli/commands/worktree/app.py +170 -0
- scc_cli/commands/worktree/container_commands.py +385 -0
- scc_cli/commands/worktree/context_commands.py +61 -0
- scc_cli/commands/worktree/session_commands.py +128 -0
- scc_cli/commands/worktree/worktree_commands.py +734 -0
- scc_cli/config.py +647 -0
- scc_cli/confirm.py +20 -0
- scc_cli/console.py +562 -0
- scc_cli/contexts.py +394 -0
- scc_cli/core/__init__.py +68 -0
- scc_cli/core/constants.py +101 -0
- scc_cli/core/errors.py +297 -0
- scc_cli/core/exit_codes.py +91 -0
- scc_cli/core/workspace.py +57 -0
- scc_cli/deprecation.py +54 -0
- scc_cli/deps.py +189 -0
- scc_cli/docker/__init__.py +127 -0
- scc_cli/docker/core.py +467 -0
- scc_cli/docker/credentials.py +726 -0
- scc_cli/docker/launch.py +595 -0
- scc_cli/doctor/__init__.py +105 -0
- scc_cli/doctor/checks/__init__.py +166 -0
- scc_cli/doctor/checks/cache.py +314 -0
- scc_cli/doctor/checks/config.py +107 -0
- scc_cli/doctor/checks/environment.py +182 -0
- scc_cli/doctor/checks/json_helpers.py +157 -0
- scc_cli/doctor/checks/organization.py +264 -0
- scc_cli/doctor/checks/worktree.py +278 -0
- scc_cli/doctor/render.py +365 -0
- scc_cli/doctor/types.py +66 -0
- scc_cli/evaluation/__init__.py +27 -0
- scc_cli/evaluation/apply_exceptions.py +207 -0
- scc_cli/evaluation/evaluate.py +97 -0
- scc_cli/evaluation/models.py +80 -0
- scc_cli/git.py +84 -0
- scc_cli/json_command.py +166 -0
- scc_cli/json_output.py +159 -0
- scc_cli/kinds.py +65 -0
- scc_cli/marketplace/__init__.py +123 -0
- scc_cli/marketplace/adapter.py +74 -0
- scc_cli/marketplace/compute.py +377 -0
- scc_cli/marketplace/constants.py +87 -0
- scc_cli/marketplace/managed.py +135 -0
- scc_cli/marketplace/materialize.py +846 -0
- scc_cli/marketplace/normalize.py +548 -0
- scc_cli/marketplace/render.py +281 -0
- scc_cli/marketplace/resolve.py +459 -0
- scc_cli/marketplace/schema.py +506 -0
- scc_cli/marketplace/sync.py +279 -0
- scc_cli/marketplace/team_cache.py +195 -0
- scc_cli/marketplace/team_fetch.py +689 -0
- scc_cli/marketplace/trust.py +244 -0
- scc_cli/models/__init__.py +41 -0
- scc_cli/models/exceptions.py +273 -0
- scc_cli/models/plugin_audit.py +434 -0
- scc_cli/org_templates.py +269 -0
- scc_cli/output_mode.py +167 -0
- scc_cli/panels.py +113 -0
- scc_cli/platform.py +350 -0
- scc_cli/profiles.py +960 -0
- scc_cli/remote.py +443 -0
- scc_cli/schemas/__init__.py +1 -0
- scc_cli/schemas/org-v1.schema.json +456 -0
- scc_cli/schemas/team-config.v1.schema.json +163 -0
- scc_cli/services/__init__.py +1 -0
- scc_cli/services/git/__init__.py +79 -0
- scc_cli/services/git/branch.py +151 -0
- scc_cli/services/git/core.py +216 -0
- scc_cli/services/git/hooks.py +108 -0
- scc_cli/services/git/worktree.py +444 -0
- scc_cli/services/workspace/__init__.py +36 -0
- scc_cli/services/workspace/resolver.py +223 -0
- scc_cli/services/workspace/suspicious.py +200 -0
- scc_cli/sessions.py +425 -0
- scc_cli/setup.py +589 -0
- scc_cli/source_resolver.py +470 -0
- scc_cli/stats.py +378 -0
- scc_cli/stores/__init__.py +13 -0
- scc_cli/stores/exception_store.py +251 -0
- scc_cli/subprocess_utils.py +88 -0
- scc_cli/teams.py +383 -0
- scc_cli/templates/__init__.py +2 -0
- scc_cli/templates/org/__init__.py +0 -0
- scc_cli/templates/org/minimal.json +19 -0
- scc_cli/templates/org/reference.json +74 -0
- scc_cli/templates/org/strict.json +38 -0
- scc_cli/templates/org/teams.json +42 -0
- scc_cli/templates/statusline.sh +75 -0
- scc_cli/theme.py +348 -0
- scc_cli/ui/__init__.py +154 -0
- scc_cli/ui/branding.py +68 -0
- scc_cli/ui/chrome.py +401 -0
- scc_cli/ui/dashboard/__init__.py +62 -0
- scc_cli/ui/dashboard/_dashboard.py +794 -0
- scc_cli/ui/dashboard/loaders.py +452 -0
- scc_cli/ui/dashboard/models.py +185 -0
- scc_cli/ui/dashboard/orchestrator.py +735 -0
- scc_cli/ui/formatters.py +444 -0
- scc_cli/ui/gate.py +350 -0
- scc_cli/ui/git_interactive.py +869 -0
- scc_cli/ui/git_render.py +176 -0
- scc_cli/ui/help.py +157 -0
- scc_cli/ui/keys.py +615 -0
- scc_cli/ui/list_screen.py +437 -0
- scc_cli/ui/picker.py +763 -0
- scc_cli/ui/prompts.py +201 -0
- scc_cli/ui/quick_resume.py +116 -0
- scc_cli/ui/wizard.py +576 -0
- scc_cli/update.py +680 -0
- scc_cli/utils/__init__.py +39 -0
- scc_cli/utils/fixit.py +264 -0
- scc_cli/utils/fuzzy.py +124 -0
- scc_cli/utils/locks.py +114 -0
- scc_cli/utils/ttl.py +376 -0
- scc_cli/validate.py +455 -0
- scc_cli-1.5.3.dist-info/METADATA +401 -0
- scc_cli-1.5.3.dist-info/RECORD +153 -0
- scc_cli-1.5.3.dist-info/WHEEL +4 -0
- scc_cli-1.5.3.dist-info/entry_points.txt +2 -0
- scc_cli-1.5.3.dist-info/licenses/LICENSE +21 -0
scc_cli/stats.py
ADDED
|
@@ -0,0 +1,378 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Usage statistics tracking.
|
|
3
|
+
|
|
4
|
+
Phase 1: User-level only.
|
|
5
|
+
- Stats stored at ~/.cache/scc/usage.jsonl
|
|
6
|
+
- Users see only their own stats
|
|
7
|
+
- Manual aggregation via scc stats export
|
|
8
|
+
|
|
9
|
+
Handle:
|
|
10
|
+
- Session start/end recording
|
|
11
|
+
- Event JSONL file operations
|
|
12
|
+
- Stats aggregation and reporting
|
|
13
|
+
- Export functionality
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
from __future__ import annotations
|
|
17
|
+
|
|
18
|
+
import getpass
|
|
19
|
+
import hashlib
|
|
20
|
+
import json
|
|
21
|
+
import uuid
|
|
22
|
+
from dataclasses import dataclass
|
|
23
|
+
from datetime import datetime, timedelta
|
|
24
|
+
from pathlib import Path
|
|
25
|
+
from typing import TYPE_CHECKING, Any
|
|
26
|
+
|
|
27
|
+
from scc_cli.config import CACHE_DIR
|
|
28
|
+
|
|
29
|
+
if TYPE_CHECKING:
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
34
|
+
# Constants
|
|
35
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
36
|
+
|
|
37
|
+
USAGE_FILE = "usage.jsonl"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
41
|
+
# Identity Pseudonymization
|
|
42
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _get_machine_salt() -> str:
|
|
46
|
+
"""Get a machine-specific salt for hashing.
|
|
47
|
+
|
|
48
|
+
Uses hostname + home directory to create a unique salt per machine.
|
|
49
|
+
This ensures hashes are consistent on the same machine but different
|
|
50
|
+
across machines, protecting user privacy.
|
|
51
|
+
"""
|
|
52
|
+
import socket
|
|
53
|
+
|
|
54
|
+
hostname = socket.gethostname()
|
|
55
|
+
home = str(Path.home())
|
|
56
|
+
return f"{hostname}:{home}"
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def hash_identifier(identifier: str) -> str:
|
|
60
|
+
"""Hash an identifier for pseudonymization.
|
|
61
|
+
|
|
62
|
+
Creates a one-way hash that is:
|
|
63
|
+
- Consistent: same input always produces same output
|
|
64
|
+
- Not reversible: original identifier cannot be recovered
|
|
65
|
+
- Machine-specific: different machines produce different hashes
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
identifier: The identifier to hash (e.g., username, email)
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
A hex string hash of the identifier
|
|
72
|
+
"""
|
|
73
|
+
salt = _get_machine_salt()
|
|
74
|
+
combined = f"{salt}:{identifier}"
|
|
75
|
+
return hashlib.sha256(combined.encode("utf-8")).hexdigest()[:32]
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def get_username() -> str:
|
|
79
|
+
"""Get the current username.
|
|
80
|
+
|
|
81
|
+
This is separated into its own function for easier testing/mocking.
|
|
82
|
+
"""
|
|
83
|
+
return getpass.getuser()
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
87
|
+
# JSONL File Operations
|
|
88
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def _get_usage_file() -> Path:
|
|
92
|
+
"""Get the path to the usage JSONL file."""
|
|
93
|
+
return CACHE_DIR / USAGE_FILE
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _write_event(event: dict[str, Any]) -> None:
|
|
97
|
+
"""Append an event to the JSONL file.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
event: Event dict to write
|
|
101
|
+
"""
|
|
102
|
+
usage_file = _get_usage_file()
|
|
103
|
+
usage_file.parent.mkdir(parents=True, exist_ok=True)
|
|
104
|
+
|
|
105
|
+
with open(usage_file, "a", encoding="utf-8") as f:
|
|
106
|
+
f.write(json.dumps(event) + "\n")
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def read_usage_events() -> list[dict[str, Any]]:
|
|
110
|
+
"""Read all events from the usage JSONL file.
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
List of event dicts. Empty list if file doesn't exist or is empty.
|
|
114
|
+
Malformed JSON lines are skipped silently.
|
|
115
|
+
"""
|
|
116
|
+
usage_file = _get_usage_file()
|
|
117
|
+
|
|
118
|
+
if not usage_file.exists():
|
|
119
|
+
return []
|
|
120
|
+
|
|
121
|
+
events = []
|
|
122
|
+
try:
|
|
123
|
+
with open(usage_file, encoding="utf-8") as f:
|
|
124
|
+
for line in f:
|
|
125
|
+
line = line.strip()
|
|
126
|
+
if not line:
|
|
127
|
+
continue
|
|
128
|
+
try:
|
|
129
|
+
events.append(json.loads(line))
|
|
130
|
+
except json.JSONDecodeError:
|
|
131
|
+
# Skip malformed lines
|
|
132
|
+
continue
|
|
133
|
+
except OSError:
|
|
134
|
+
return []
|
|
135
|
+
|
|
136
|
+
return events
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
140
|
+
# Session Recording
|
|
141
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def record_session_start(
|
|
145
|
+
session_id: str,
|
|
146
|
+
project_name: str,
|
|
147
|
+
team_name: str | None,
|
|
148
|
+
expected_duration_hours: int,
|
|
149
|
+
stats_config: dict[str, Any] | None = None,
|
|
150
|
+
) -> None:
|
|
151
|
+
"""Record a session start event.
|
|
152
|
+
|
|
153
|
+
Args:
|
|
154
|
+
session_id: Unique identifier for this session
|
|
155
|
+
project_name: Name of the project/workspace
|
|
156
|
+
team_name: Name of the team (optional)
|
|
157
|
+
expected_duration_hours: Expected session duration from config
|
|
158
|
+
stats_config: Stats configuration dict (optional). If stats_config.enabled
|
|
159
|
+
is False, no event is recorded.
|
|
160
|
+
"""
|
|
161
|
+
# Check if stats are enabled
|
|
162
|
+
if stats_config is not None and not stats_config.get("enabled", True):
|
|
163
|
+
return
|
|
164
|
+
|
|
165
|
+
# Determine user identity mode
|
|
166
|
+
identity_mode = "hash"
|
|
167
|
+
if stats_config is not None:
|
|
168
|
+
identity_mode = stats_config.get("user_identity_mode", "hash")
|
|
169
|
+
|
|
170
|
+
# Build user_id_hash based on identity mode
|
|
171
|
+
user_id_hash: str | None = None
|
|
172
|
+
if identity_mode == "hash":
|
|
173
|
+
user_id_hash = hash_identifier(get_username())
|
|
174
|
+
# If identity_mode == "none", user_id_hash stays None
|
|
175
|
+
|
|
176
|
+
event = {
|
|
177
|
+
"event_type": "session_start",
|
|
178
|
+
"session_id": session_id,
|
|
179
|
+
"timestamp": datetime.now().isoformat(),
|
|
180
|
+
"project_name": project_name,
|
|
181
|
+
"team_name": team_name,
|
|
182
|
+
"expected_duration_hours": expected_duration_hours,
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
if user_id_hash is not None:
|
|
186
|
+
event["user_id_hash"] = user_id_hash
|
|
187
|
+
|
|
188
|
+
_write_event(event)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def record_session_end(
|
|
192
|
+
session_id: str,
|
|
193
|
+
actual_duration_minutes: int,
|
|
194
|
+
exit_status: str = "clean",
|
|
195
|
+
stats_config: dict[str, Any] | None = None,
|
|
196
|
+
) -> None:
|
|
197
|
+
"""Record a session end event.
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
session_id: Unique identifier matching the session start
|
|
201
|
+
actual_duration_minutes: Actual session duration in minutes
|
|
202
|
+
exit_status: How the session ended ('clean', 'crash', 'interrupted')
|
|
203
|
+
stats_config: Stats configuration dict (optional). If stats_config.enabled
|
|
204
|
+
is False, no event is recorded.
|
|
205
|
+
"""
|
|
206
|
+
# Check if stats are enabled
|
|
207
|
+
if stats_config is not None and not stats_config.get("enabled", True):
|
|
208
|
+
return
|
|
209
|
+
|
|
210
|
+
event = {
|
|
211
|
+
"event_type": "session_end",
|
|
212
|
+
"session_id": session_id,
|
|
213
|
+
"timestamp": datetime.now().isoformat(),
|
|
214
|
+
"actual_duration_minutes": actual_duration_minutes,
|
|
215
|
+
"exit_status": exit_status,
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
_write_event(event)
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
222
|
+
# Stats Report Dataclass
|
|
223
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
@dataclass
|
|
227
|
+
class StatsReport:
|
|
228
|
+
"""Aggregated usage statistics report.
|
|
229
|
+
|
|
230
|
+
Attributes:
|
|
231
|
+
total_sessions: Number of sessions in the period
|
|
232
|
+
total_duration_minutes: Sum of actual duration from completed sessions
|
|
233
|
+
incomplete_sessions: Sessions without a session_end event
|
|
234
|
+
by_project: Per-project breakdown {project: {sessions, duration_minutes}}
|
|
235
|
+
period_start: Start of the reporting period
|
|
236
|
+
period_end: End of the reporting period
|
|
237
|
+
"""
|
|
238
|
+
|
|
239
|
+
total_sessions: int
|
|
240
|
+
total_duration_minutes: int
|
|
241
|
+
incomplete_sessions: int
|
|
242
|
+
by_project: dict[str, dict[str, int]]
|
|
243
|
+
period_start: datetime
|
|
244
|
+
period_end: datetime
|
|
245
|
+
|
|
246
|
+
def to_dict(self) -> dict[str, Any]:
|
|
247
|
+
"""Convert to dict for JSON serialization."""
|
|
248
|
+
return {
|
|
249
|
+
"total_sessions": self.total_sessions,
|
|
250
|
+
"total_duration_minutes": self.total_duration_minutes,
|
|
251
|
+
"incomplete_sessions": self.incomplete_sessions,
|
|
252
|
+
"by_project": self.by_project,
|
|
253
|
+
"period_start": self.period_start.isoformat(),
|
|
254
|
+
"period_end": self.period_end.isoformat(),
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
259
|
+
# Stats Aggregation
|
|
260
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
def get_stats(days: int | None = None) -> StatsReport:
|
|
264
|
+
"""Aggregate usage statistics.
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
days: Number of days to include (None for all time)
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
StatsReport with aggregated statistics
|
|
271
|
+
"""
|
|
272
|
+
events = read_usage_events()
|
|
273
|
+
|
|
274
|
+
# Determine period
|
|
275
|
+
period_end = datetime.now()
|
|
276
|
+
if days is not None:
|
|
277
|
+
period_start = period_end - timedelta(days=days)
|
|
278
|
+
else:
|
|
279
|
+
period_start = datetime.min
|
|
280
|
+
|
|
281
|
+
# Filter events by period
|
|
282
|
+
session_starts: dict[str, dict[str, Any]] = {}
|
|
283
|
+
session_ends: dict[str, dict[str, Any]] = {}
|
|
284
|
+
|
|
285
|
+
for event in events:
|
|
286
|
+
event_time_str = event.get("timestamp")
|
|
287
|
+
if event_time_str:
|
|
288
|
+
try:
|
|
289
|
+
event_time = datetime.fromisoformat(event_time_str)
|
|
290
|
+
if days is not None and event_time < period_start:
|
|
291
|
+
continue
|
|
292
|
+
except (ValueError, TypeError):
|
|
293
|
+
pass
|
|
294
|
+
|
|
295
|
+
event_type = event.get("event_type")
|
|
296
|
+
session_id = event.get("session_id")
|
|
297
|
+
|
|
298
|
+
if event_type == "session_start" and session_id:
|
|
299
|
+
session_starts[session_id] = event
|
|
300
|
+
elif event_type == "session_end" and session_id:
|
|
301
|
+
session_ends[session_id] = event
|
|
302
|
+
|
|
303
|
+
# Count sessions and calculate duration
|
|
304
|
+
total_sessions = len(session_starts)
|
|
305
|
+
incomplete_sessions = 0
|
|
306
|
+
total_duration_minutes = 0
|
|
307
|
+
|
|
308
|
+
# Per-project breakdown
|
|
309
|
+
by_project: dict[str, dict[str, int]] = {}
|
|
310
|
+
|
|
311
|
+
for session_id, start_event in session_starts.items():
|
|
312
|
+
project = start_event.get("project_name", "unknown")
|
|
313
|
+
|
|
314
|
+
# Initialize project stats
|
|
315
|
+
if project not in by_project:
|
|
316
|
+
by_project[project] = {"sessions": 0, "duration_minutes": 0}
|
|
317
|
+
|
|
318
|
+
by_project[project]["sessions"] += 1
|
|
319
|
+
|
|
320
|
+
# Check if session has ended
|
|
321
|
+
if session_id in session_ends:
|
|
322
|
+
end_event = session_ends[session_id]
|
|
323
|
+
duration = end_event.get("actual_duration_minutes", 0)
|
|
324
|
+
total_duration_minutes += duration
|
|
325
|
+
by_project[project]["duration_minutes"] += duration
|
|
326
|
+
else:
|
|
327
|
+
incomplete_sessions += 1
|
|
328
|
+
|
|
329
|
+
return StatsReport(
|
|
330
|
+
total_sessions=total_sessions,
|
|
331
|
+
total_duration_minutes=total_duration_minutes,
|
|
332
|
+
incomplete_sessions=incomplete_sessions,
|
|
333
|
+
by_project=by_project,
|
|
334
|
+
period_start=period_start if days is not None else datetime.min,
|
|
335
|
+
period_end=period_end,
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
340
|
+
# Export Functions
|
|
341
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
def export_stats(days: int | None = None) -> str:
|
|
345
|
+
"""Export aggregated stats as JSON.
|
|
346
|
+
|
|
347
|
+
Args:
|
|
348
|
+
days: Number of days to include (None for all time)
|
|
349
|
+
|
|
350
|
+
Returns:
|
|
351
|
+
JSON string of StatsReport
|
|
352
|
+
"""
|
|
353
|
+
report = get_stats(days=days)
|
|
354
|
+
return json.dumps(report.to_dict(), indent=2)
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
def export_raw_events() -> str:
|
|
358
|
+
"""Export raw events as JSON array.
|
|
359
|
+
|
|
360
|
+
Returns:
|
|
361
|
+
JSON string containing array of all events
|
|
362
|
+
"""
|
|
363
|
+
events = read_usage_events()
|
|
364
|
+
return json.dumps(events, indent=2)
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
368
|
+
# Session ID Generation
|
|
369
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
def generate_session_id() -> str:
|
|
373
|
+
"""Generate a unique session ID.
|
|
374
|
+
|
|
375
|
+
Returns:
|
|
376
|
+
UUID string for session identification
|
|
377
|
+
"""
|
|
378
|
+
return str(uuid.uuid4())
|
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
"""Provide exception store implementations for SCC Phase 2.1.
|
|
2
|
+
|
|
3
|
+
Define storage backends for time-bounded exceptions:
|
|
4
|
+
- UserStore: Personal exceptions in ~/.config/scc/exceptions.json
|
|
5
|
+
- RepoStore: Shared repo exceptions in .scc/exceptions.json
|
|
6
|
+
|
|
7
|
+
Both stores implement the ExceptionStore protocol and handle:
|
|
8
|
+
- Reading/writing exception files with proper JSON formatting
|
|
9
|
+
- Pruning expired exceptions
|
|
10
|
+
- Backup-on-corrupt recovery
|
|
11
|
+
- Forward compatibility warnings for newer schema versions
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
import json
|
|
17
|
+
from datetime import datetime, timezone
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Protocol
|
|
20
|
+
|
|
21
|
+
import scc_cli.config
|
|
22
|
+
from scc_cli.console import err_line
|
|
23
|
+
from scc_cli.models.exceptions import ExceptionFile
|
|
24
|
+
|
|
25
|
+
# Current schema version supported by this implementation
|
|
26
|
+
CURRENT_SCHEMA_VERSION = 1
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _get_config_dir() -> Path:
|
|
30
|
+
"""Get CONFIG_DIR dynamically to support test patching."""
|
|
31
|
+
return scc_cli.config.CONFIG_DIR
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class ExceptionStore(Protocol):
|
|
35
|
+
"""Protocol for exception stores.
|
|
36
|
+
|
|
37
|
+
All stores must implement these methods for consistent behavior
|
|
38
|
+
across user and repo scopes.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
@property
|
|
42
|
+
def path(self) -> Path:
|
|
43
|
+
"""Return the path to the exceptions file."""
|
|
44
|
+
...
|
|
45
|
+
|
|
46
|
+
def read(self) -> ExceptionFile:
|
|
47
|
+
"""Read exceptions from storage.
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
ExceptionFile with current exceptions.
|
|
51
|
+
Returns empty ExceptionFile if file doesn't exist.
|
|
52
|
+
On corruption, backs up file and returns empty.
|
|
53
|
+
On newer schema, warns and returns empty (fail-open for local).
|
|
54
|
+
"""
|
|
55
|
+
...
|
|
56
|
+
|
|
57
|
+
def write(self, file: ExceptionFile) -> None:
|
|
58
|
+
"""Write exceptions to storage.
|
|
59
|
+
|
|
60
|
+
Creates parent directories if needed.
|
|
61
|
+
Uses deterministic JSON serialization (sorted keys, 2-space indent).
|
|
62
|
+
"""
|
|
63
|
+
...
|
|
64
|
+
|
|
65
|
+
def prune_expired(self) -> int:
|
|
66
|
+
"""Remove expired exceptions from storage.
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
Count of pruned exceptions.
|
|
70
|
+
"""
|
|
71
|
+
...
|
|
72
|
+
|
|
73
|
+
def backup(self) -> Path | None:
|
|
74
|
+
"""Create a backup of the current file.
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
Path to backup file, or None if no file exists.
|
|
78
|
+
"""
|
|
79
|
+
...
|
|
80
|
+
|
|
81
|
+
def reset(self) -> None:
|
|
82
|
+
"""Remove the exceptions file entirely."""
|
|
83
|
+
...
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class UserStore:
|
|
87
|
+
"""User-scoped exception store.
|
|
88
|
+
|
|
89
|
+
Stores personal exceptions at ~/.config/scc/exceptions.json.
|
|
90
|
+
These are machine-local and not shared with team.
|
|
91
|
+
"""
|
|
92
|
+
|
|
93
|
+
@property
|
|
94
|
+
def path(self) -> Path:
|
|
95
|
+
"""Return the path to the exceptions file."""
|
|
96
|
+
return _get_config_dir() / "exceptions.json"
|
|
97
|
+
|
|
98
|
+
def read(self) -> ExceptionFile:
|
|
99
|
+
"""Read exceptions from user store."""
|
|
100
|
+
if not self.path.exists():
|
|
101
|
+
return ExceptionFile()
|
|
102
|
+
|
|
103
|
+
try:
|
|
104
|
+
content = self.path.read_text()
|
|
105
|
+
data = json.loads(content)
|
|
106
|
+
except json.JSONDecodeError:
|
|
107
|
+
# Corrupt file - backup and return empty
|
|
108
|
+
self._backup_corrupt()
|
|
109
|
+
return ExceptionFile()
|
|
110
|
+
|
|
111
|
+
# Check schema version
|
|
112
|
+
schema_version = data.get("schema_version", 1)
|
|
113
|
+
if schema_version > CURRENT_SCHEMA_VERSION:
|
|
114
|
+
# Newer schema - warn and ignore (fail-open for local stores)
|
|
115
|
+
err_line(
|
|
116
|
+
f"⚠️ {self.path} was created by newer SCC (schema v{schema_version}).\n"
|
|
117
|
+
f" Local overrides ignored until you upgrade. Run: pip install --upgrade scc"
|
|
118
|
+
)
|
|
119
|
+
return ExceptionFile()
|
|
120
|
+
|
|
121
|
+
return ExceptionFile.from_dict(data)
|
|
122
|
+
|
|
123
|
+
def write(self, file: ExceptionFile) -> None:
|
|
124
|
+
"""Write exceptions to user store."""
|
|
125
|
+
self.path.parent.mkdir(parents=True, exist_ok=True)
|
|
126
|
+
self.path.write_text(file.to_json())
|
|
127
|
+
|
|
128
|
+
def prune_expired(self) -> int:
|
|
129
|
+
"""Remove expired exceptions from user store."""
|
|
130
|
+
ef = self.read()
|
|
131
|
+
original_count = len(ef.exceptions)
|
|
132
|
+
|
|
133
|
+
# Filter to active only
|
|
134
|
+
ef.exceptions = [e for e in ef.exceptions if not e.is_expired()]
|
|
135
|
+
|
|
136
|
+
pruned_count = original_count - len(ef.exceptions)
|
|
137
|
+
if pruned_count > 0:
|
|
138
|
+
self.write(ef)
|
|
139
|
+
|
|
140
|
+
return pruned_count
|
|
141
|
+
|
|
142
|
+
def backup(self) -> Path | None:
|
|
143
|
+
"""Create backup of user store."""
|
|
144
|
+
if not self.path.exists():
|
|
145
|
+
return None
|
|
146
|
+
|
|
147
|
+
timestamp = datetime.now(timezone.utc).strftime("%Y%m%d%H%M%S")
|
|
148
|
+
backup_path = self.path.parent / f"{self.path.name}.bak-{timestamp}"
|
|
149
|
+
backup_path.write_text(self.path.read_text())
|
|
150
|
+
return backup_path
|
|
151
|
+
|
|
152
|
+
def reset(self) -> None:
|
|
153
|
+
"""Remove user store file."""
|
|
154
|
+
if self.path.exists():
|
|
155
|
+
self.path.unlink()
|
|
156
|
+
|
|
157
|
+
def _backup_corrupt(self) -> None:
|
|
158
|
+
"""Backup corrupt file and warn user."""
|
|
159
|
+
timestamp = datetime.now(timezone.utc).strftime("%Y%m%d")
|
|
160
|
+
backup_path = self.path.parent / f"{self.path.name}.bak-{timestamp}"
|
|
161
|
+
backup_path.write_text(self.path.read_text())
|
|
162
|
+
err_line(
|
|
163
|
+
f"⚠️ Local exceptions file corrupted. Backed up to {backup_path}.\n"
|
|
164
|
+
f" Run `scc doctor` for details."
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class RepoStore:
|
|
169
|
+
"""Repo-scoped exception store.
|
|
170
|
+
|
|
171
|
+
Stores shared exceptions at <repo>/.scc/exceptions.json.
|
|
172
|
+
These can be committed (team-shared) or gitignored (personal repo workarounds).
|
|
173
|
+
"""
|
|
174
|
+
|
|
175
|
+
def __init__(self, repo_root: Path) -> None:
|
|
176
|
+
self._repo_root = repo_root
|
|
177
|
+
self._path = repo_root / ".scc" / "exceptions.json"
|
|
178
|
+
|
|
179
|
+
@property
|
|
180
|
+
def path(self) -> Path:
|
|
181
|
+
"""Return the path to the exceptions file."""
|
|
182
|
+
return self._path
|
|
183
|
+
|
|
184
|
+
def read(self) -> ExceptionFile:
|
|
185
|
+
"""Read exceptions from repo store."""
|
|
186
|
+
if not self._path.exists():
|
|
187
|
+
return ExceptionFile()
|
|
188
|
+
|
|
189
|
+
try:
|
|
190
|
+
content = self._path.read_text()
|
|
191
|
+
data = json.loads(content)
|
|
192
|
+
except json.JSONDecodeError:
|
|
193
|
+
# Corrupt file - backup and return empty
|
|
194
|
+
self._backup_corrupt()
|
|
195
|
+
return ExceptionFile()
|
|
196
|
+
|
|
197
|
+
# Check schema version
|
|
198
|
+
schema_version = data.get("schema_version", 1)
|
|
199
|
+
if schema_version > CURRENT_SCHEMA_VERSION:
|
|
200
|
+
# Newer schema - warn and ignore (fail-open for local stores)
|
|
201
|
+
err_line(
|
|
202
|
+
f"⚠️ {self._path} was created by newer SCC (schema v{schema_version}).\n"
|
|
203
|
+
f" Local overrides ignored until you upgrade. Run: pip install --upgrade scc"
|
|
204
|
+
)
|
|
205
|
+
return ExceptionFile()
|
|
206
|
+
|
|
207
|
+
return ExceptionFile.from_dict(data)
|
|
208
|
+
|
|
209
|
+
def write(self, file: ExceptionFile) -> None:
|
|
210
|
+
"""Write exceptions to repo store."""
|
|
211
|
+
self._path.parent.mkdir(parents=True, exist_ok=True)
|
|
212
|
+
self._path.write_text(file.to_json())
|
|
213
|
+
|
|
214
|
+
def prune_expired(self) -> int:
|
|
215
|
+
"""Remove expired exceptions from repo store."""
|
|
216
|
+
ef = self.read()
|
|
217
|
+
original_count = len(ef.exceptions)
|
|
218
|
+
|
|
219
|
+
# Filter to active only
|
|
220
|
+
ef.exceptions = [e for e in ef.exceptions if not e.is_expired()]
|
|
221
|
+
|
|
222
|
+
pruned_count = original_count - len(ef.exceptions)
|
|
223
|
+
if pruned_count > 0:
|
|
224
|
+
self.write(ef)
|
|
225
|
+
|
|
226
|
+
return pruned_count
|
|
227
|
+
|
|
228
|
+
def backup(self) -> Path | None:
|
|
229
|
+
"""Create backup of repo store."""
|
|
230
|
+
if not self._path.exists():
|
|
231
|
+
return None
|
|
232
|
+
|
|
233
|
+
timestamp = datetime.now(timezone.utc).strftime("%Y%m%d%H%M%S")
|
|
234
|
+
backup_path = self._path.with_suffix(f".json.bak-{timestamp}")
|
|
235
|
+
backup_path.write_text(self._path.read_text())
|
|
236
|
+
return backup_path
|
|
237
|
+
|
|
238
|
+
def reset(self) -> None:
|
|
239
|
+
"""Remove repo store file."""
|
|
240
|
+
if self._path.exists():
|
|
241
|
+
self._path.unlink()
|
|
242
|
+
|
|
243
|
+
def _backup_corrupt(self) -> None:
|
|
244
|
+
"""Backup corrupt file and warn user."""
|
|
245
|
+
timestamp = datetime.now(timezone.utc).strftime("%Y%m%d")
|
|
246
|
+
backup_path = self._path.with_suffix(f".json.bak-{timestamp}")
|
|
247
|
+
backup_path.write_text(self._path.read_text())
|
|
248
|
+
err_line(
|
|
249
|
+
f"⚠️ Repo exceptions file corrupted. Backed up to {backup_path}.\n"
|
|
250
|
+
f" Run `scc doctor` for details."
|
|
251
|
+
)
|