reporails-cli 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- reporails_cli/.env.example +1 -0
- reporails_cli/__init__.py +24 -0
- reporails_cli/bundled/.semgrepignore +51 -0
- reporails_cli/bundled/__init__.py +31 -0
- reporails_cli/bundled/capability-patterns.yml +54 -0
- reporails_cli/bundled/levels.yml +99 -0
- reporails_cli/core/__init__.py +35 -0
- reporails_cli/core/agents.py +147 -0
- reporails_cli/core/applicability.py +150 -0
- reporails_cli/core/bootstrap.py +147 -0
- reporails_cli/core/cache.py +352 -0
- reporails_cli/core/capability.py +245 -0
- reporails_cli/core/discover.py +362 -0
- reporails_cli/core/engine.py +177 -0
- reporails_cli/core/init.py +309 -0
- reporails_cli/core/levels.py +177 -0
- reporails_cli/core/models.py +329 -0
- reporails_cli/core/opengrep/__init__.py +34 -0
- reporails_cli/core/opengrep/runner.py +203 -0
- reporails_cli/core/opengrep/semgrepignore.py +39 -0
- reporails_cli/core/opengrep/templates.py +138 -0
- reporails_cli/core/registry.py +155 -0
- reporails_cli/core/sarif.py +181 -0
- reporails_cli/core/scorer.py +178 -0
- reporails_cli/core/semantic.py +193 -0
- reporails_cli/core/utils.py +139 -0
- reporails_cli/formatters/__init__.py +19 -0
- reporails_cli/formatters/json.py +137 -0
- reporails_cli/formatters/mcp.py +68 -0
- reporails_cli/formatters/text/__init__.py +32 -0
- reporails_cli/formatters/text/box.py +89 -0
- reporails_cli/formatters/text/chars.py +42 -0
- reporails_cli/formatters/text/compact.py +119 -0
- reporails_cli/formatters/text/components.py +117 -0
- reporails_cli/formatters/text/full.py +135 -0
- reporails_cli/formatters/text/rules.py +50 -0
- reporails_cli/formatters/text/violations.py +92 -0
- reporails_cli/interfaces/__init__.py +1 -0
- reporails_cli/interfaces/cli/__init__.py +7 -0
- reporails_cli/interfaces/cli/main.py +352 -0
- reporails_cli/interfaces/mcp/__init__.py +5 -0
- reporails_cli/interfaces/mcp/server.py +194 -0
- reporails_cli/interfaces/mcp/tools.py +136 -0
- reporails_cli/py.typed +0 -0
- reporails_cli/templates/__init__.py +65 -0
- reporails_cli/templates/cli_box.txt +10 -0
- reporails_cli/templates/cli_cta.txt +4 -0
- reporails_cli/templates/cli_delta.txt +1 -0
- reporails_cli/templates/cli_file_header.txt +1 -0
- reporails_cli/templates/cli_legend.txt +1 -0
- reporails_cli/templates/cli_pending.txt +3 -0
- reporails_cli/templates/cli_violation.txt +1 -0
- reporails_cli/templates/cli_working.txt +2 -0
- reporails_cli-0.0.1.dist-info/METADATA +108 -0
- reporails_cli-0.0.1.dist-info/RECORD +58 -0
- reporails_cli-0.0.1.dist-info/WHEEL +4 -0
- reporails_cli-0.0.1.dist-info/entry_points.txt +3 -0
- reporails_cli-0.0.1.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
"""Path helpers and config loading for reporails home directory."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import platform
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
import yaml
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from reporails_cli.core.models import GlobalConfig
|
|
13
|
+
|
|
14
|
+
# Constants
|
|
15
|
+
REPORAILS_HOME = Path.home() / ".reporails"
|
|
16
|
+
FRAMEWORK_REPO = "reporails/reporails-rules"
|
|
17
|
+
FRAMEWORK_RELEASE_URL = f"https://github.com/{FRAMEWORK_REPO}/releases/download"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def get_reporails_home() -> Path:
|
|
21
|
+
"""Get ~/.reporails directory."""
|
|
22
|
+
return REPORAILS_HOME
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def get_opengrep_bin() -> Path:
|
|
26
|
+
"""Get path to OpenGrep binary."""
|
|
27
|
+
home = get_reporails_home()
|
|
28
|
+
if platform.system().lower() == "windows":
|
|
29
|
+
return home / "bin" / "opengrep.exe"
|
|
30
|
+
return home / "bin" / "opengrep"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_rules_path() -> Path:
|
|
34
|
+
"""Get path to rules directory.
|
|
35
|
+
|
|
36
|
+
Prefers local ./checks/ directory if it has .yml files (development mode),
|
|
37
|
+
otherwise uses ~/.reporails/rules/ (installed mode).
|
|
38
|
+
"""
|
|
39
|
+
# Check for local checks directory (development mode)
|
|
40
|
+
local_checks = Path.cwd() / "checks"
|
|
41
|
+
if local_checks.exists() and any(local_checks.rglob("*.yml")):
|
|
42
|
+
return local_checks
|
|
43
|
+
|
|
44
|
+
# Fall back to global directory
|
|
45
|
+
return get_reporails_home() / "rules"
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def get_core_rules_path() -> Path:
|
|
49
|
+
"""Get path to core rules directory (~/.reporails/rules/core/)."""
|
|
50
|
+
return get_rules_path() / "core"
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def get_agent_rules_path(agent: str) -> Path:
|
|
54
|
+
"""Get path to agent-specific rules (~/.reporails/rules/agents/{agent}/rules/)."""
|
|
55
|
+
return get_rules_path() / "agents" / agent / "rules"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def get_agent_config_path(agent: str) -> Path:
|
|
59
|
+
"""Get path to agent config file (~/.reporails/rules/agents/{agent}/config.yml)."""
|
|
60
|
+
return get_rules_path() / "agents" / agent / "config.yml"
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def get_agent_vars(agent: str = "claude") -> dict[str, str | list[str]]:
|
|
64
|
+
"""Load template variables from agent config.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
agent: Agent identifier (default: claude)
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
Dict of template variables from the agent's config.yml vars section
|
|
71
|
+
"""
|
|
72
|
+
config_path = get_agent_config_path(agent)
|
|
73
|
+
if not config_path.exists():
|
|
74
|
+
return {}
|
|
75
|
+
|
|
76
|
+
try:
|
|
77
|
+
data = yaml.safe_load(config_path.read_text(encoding="utf-8")) or {}
|
|
78
|
+
vars_data = data.get("vars", {})
|
|
79
|
+
# Ensure all values are str or list[str]
|
|
80
|
+
result: dict[str, str | list[str]] = {}
|
|
81
|
+
for key, value in vars_data.items():
|
|
82
|
+
if isinstance(value, list):
|
|
83
|
+
result[key] = [str(v) for v in value]
|
|
84
|
+
else:
|
|
85
|
+
result[key] = str(value)
|
|
86
|
+
return result
|
|
87
|
+
except (yaml.YAMLError, OSError):
|
|
88
|
+
return {}
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def get_schemas_path() -> Path:
|
|
92
|
+
"""Get path to rule schemas (~/.reporails/rules/schemas/)."""
|
|
93
|
+
return get_rules_path() / "schemas"
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def get_version_file() -> Path:
|
|
97
|
+
"""Get path to version file (~/.reporails/version)."""
|
|
98
|
+
return get_reporails_home() / "version"
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def get_global_config_path() -> Path:
|
|
102
|
+
"""Get path to global config file (~/.reporails/config.yml)."""
|
|
103
|
+
return get_reporails_home() / "config.yml"
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def get_global_config() -> GlobalConfig:
|
|
107
|
+
"""Load global configuration from ~/.reporails/config.yml.
|
|
108
|
+
|
|
109
|
+
Returns default config if file doesn't exist.
|
|
110
|
+
"""
|
|
111
|
+
from reporails_cli.core.models import GlobalConfig
|
|
112
|
+
|
|
113
|
+
config_path = get_global_config_path()
|
|
114
|
+
if not config_path.exists():
|
|
115
|
+
return GlobalConfig()
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
data = yaml.safe_load(config_path.read_text(encoding="utf-8")) or {}
|
|
119
|
+
framework_path = data.get("framework_path")
|
|
120
|
+
return GlobalConfig(
|
|
121
|
+
framework_path=Path(framework_path) if framework_path else None,
|
|
122
|
+
auto_update_check=data.get("auto_update_check", True),
|
|
123
|
+
)
|
|
124
|
+
except (yaml.YAMLError, OSError):
|
|
125
|
+
return GlobalConfig()
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def get_installed_version() -> str | None:
|
|
129
|
+
"""Read installed framework version from ~/.reporails/version."""
|
|
130
|
+
version_file = get_version_file()
|
|
131
|
+
if not version_file.exists():
|
|
132
|
+
return None
|
|
133
|
+
try:
|
|
134
|
+
return version_file.read_text(encoding="utf-8").strip()
|
|
135
|
+
except OSError:
|
|
136
|
+
return None
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def is_initialized() -> bool:
|
|
140
|
+
"""Check if reporails has been initialized (opengrep + rules)."""
|
|
141
|
+
return get_opengrep_bin().exists() and get_rules_path().exists()
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
# Legacy alias for backward compatibility
|
|
145
|
+
def get_checks_path() -> Path:
|
|
146
|
+
"""Legacy alias for get_rules_path()."""
|
|
147
|
+
return get_rules_path()
|
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
"""Caching system - project-local for operations, global for analytics.
|
|
2
|
+
|
|
3
|
+
Project-local (.reporails/):
|
|
4
|
+
- backbone.yml # Project structure (committed)
|
|
5
|
+
- .cache/file-map.json # Fast file lookup (gitignored)
|
|
6
|
+
- .cache/judgment-cache.json # Semantic judgment results (gitignored)
|
|
7
|
+
|
|
8
|
+
Global (~/.reporails/analytics/):
|
|
9
|
+
- projects/{hash}.json # Per-project analytics
|
|
10
|
+
- aggregated.json # Cross-project insights (future)
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
import hashlib
|
|
16
|
+
import json
|
|
17
|
+
import subprocess
|
|
18
|
+
from dataclasses import dataclass, field
|
|
19
|
+
from datetime import UTC, datetime
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
from typing import Any
|
|
22
|
+
|
|
23
|
+
from reporails_cli.core.bootstrap import get_reporails_home
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def get_analytics_dir() -> Path:
|
|
27
|
+
"""Get global analytics directory."""
|
|
28
|
+
return get_reporails_home() / "analytics" / "projects"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# =============================================================================
|
|
32
|
+
# Project Identification
|
|
33
|
+
# =============================================================================
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def get_git_remote(target: Path) -> str | None:
|
|
37
|
+
"""Get git remote URL for project identification."""
|
|
38
|
+
try:
|
|
39
|
+
result = subprocess.run(
|
|
40
|
+
["git", "remote", "get-url", "origin"],
|
|
41
|
+
cwd=target,
|
|
42
|
+
capture_output=True,
|
|
43
|
+
text=True,
|
|
44
|
+
check=False,
|
|
45
|
+
)
|
|
46
|
+
if result.returncode == 0:
|
|
47
|
+
return result.stdout.strip()
|
|
48
|
+
except (OSError, subprocess.SubprocessError):
|
|
49
|
+
pass
|
|
50
|
+
return None
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def get_project_id(target: Path) -> str:
|
|
54
|
+
"""
|
|
55
|
+
Get unique project identifier.
|
|
56
|
+
|
|
57
|
+
Uses git remote URL hash for consistency across clones,
|
|
58
|
+
falls back to absolute path hash.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
target: Project root
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
12-character hex hash
|
|
65
|
+
"""
|
|
66
|
+
# Try git remote first (consistent across clones), fallback to absolute path
|
|
67
|
+
remote = get_git_remote(target)
|
|
68
|
+
source = remote or str(target.resolve())
|
|
69
|
+
|
|
70
|
+
return hashlib.sha256(source.encode()).hexdigest()[:12]
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def get_project_name(target: Path) -> str:
|
|
74
|
+
"""Get human-readable project name."""
|
|
75
|
+
return target.resolve().name
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
# =============================================================================
|
|
79
|
+
# Project-Local Cache (.reporails/)
|
|
80
|
+
# =============================================================================
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
@dataclass
|
|
84
|
+
class ProjectCache:
|
|
85
|
+
"""Project-local cache manager."""
|
|
86
|
+
|
|
87
|
+
target: Path
|
|
88
|
+
|
|
89
|
+
@property
|
|
90
|
+
def reporails_dir(self) -> Path:
|
|
91
|
+
"""Get project's .reporails directory."""
|
|
92
|
+
return self.target / ".reporails"
|
|
93
|
+
|
|
94
|
+
@property
|
|
95
|
+
def cache_dir(self) -> Path:
|
|
96
|
+
"""Get project's cache directory (.reporails/.cache/)."""
|
|
97
|
+
return self.reporails_dir / ".cache"
|
|
98
|
+
|
|
99
|
+
@property
|
|
100
|
+
def file_map_path(self) -> Path:
|
|
101
|
+
return self.cache_dir / "file-map.json"
|
|
102
|
+
|
|
103
|
+
@property
|
|
104
|
+
def backbone_path(self) -> Path:
|
|
105
|
+
return self.reporails_dir / "backbone.yml"
|
|
106
|
+
|
|
107
|
+
@property
|
|
108
|
+
def judgment_cache_path(self) -> Path:
|
|
109
|
+
return self.cache_dir / "judgment-cache.json"
|
|
110
|
+
|
|
111
|
+
def ensure_dir(self) -> None:
|
|
112
|
+
"""Create cache directory if needed."""
|
|
113
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
114
|
+
|
|
115
|
+
# File map operations
|
|
116
|
+
def load_file_map(self) -> dict[str, Any] | None:
|
|
117
|
+
"""Load cached file map."""
|
|
118
|
+
if not self.file_map_path.exists():
|
|
119
|
+
return None
|
|
120
|
+
try:
|
|
121
|
+
result: dict[str, Any] = json.loads(self.file_map_path.read_text(encoding="utf-8"))
|
|
122
|
+
return result
|
|
123
|
+
except (json.JSONDecodeError, OSError):
|
|
124
|
+
return None
|
|
125
|
+
|
|
126
|
+
def save_file_map(self, files: list[Path]) -> None:
|
|
127
|
+
"""Save file map to cache."""
|
|
128
|
+
self.ensure_dir()
|
|
129
|
+
relative_paths = [str(f.relative_to(self.target)) for f in files]
|
|
130
|
+
data = {
|
|
131
|
+
"version": 1,
|
|
132
|
+
"target": str(self.target),
|
|
133
|
+
"files": relative_paths,
|
|
134
|
+
"count": len(files),
|
|
135
|
+
"cached_at": datetime.now(UTC).isoformat(),
|
|
136
|
+
}
|
|
137
|
+
self.file_map_path.write_text(json.dumps(data, indent=2), encoding="utf-8")
|
|
138
|
+
|
|
139
|
+
def get_cached_files(self) -> list[Path] | None:
|
|
140
|
+
"""Get files from cache if valid."""
|
|
141
|
+
data = self.load_file_map()
|
|
142
|
+
if data is None:
|
|
143
|
+
return None
|
|
144
|
+
|
|
145
|
+
files = [self.target / p for p in data.get("files", [])]
|
|
146
|
+
# Validate: check if files still exist
|
|
147
|
+
if all(f.exists() for f in files):
|
|
148
|
+
return files
|
|
149
|
+
return None
|
|
150
|
+
|
|
151
|
+
# Judgment cache operations
|
|
152
|
+
def load_judgment_cache(self) -> dict[str, Any]:
|
|
153
|
+
"""Load cached semantic judgments."""
|
|
154
|
+
if not self.judgment_cache_path.exists():
|
|
155
|
+
return {"version": 1, "judgments": {}}
|
|
156
|
+
try:
|
|
157
|
+
result: dict[str, Any] = json.loads(
|
|
158
|
+
self.judgment_cache_path.read_text(encoding="utf-8")
|
|
159
|
+
)
|
|
160
|
+
return result
|
|
161
|
+
except (json.JSONDecodeError, OSError):
|
|
162
|
+
return {"version": 1, "judgments": {}}
|
|
163
|
+
|
|
164
|
+
def save_judgment_cache(self, data: dict[str, Any]) -> None:
|
|
165
|
+
"""Save judgment cache."""
|
|
166
|
+
self.ensure_dir()
|
|
167
|
+
data["version"] = 1
|
|
168
|
+
data["updated_at"] = datetime.now(UTC).isoformat()
|
|
169
|
+
self.judgment_cache_path.write_text(json.dumps(data, indent=2), encoding="utf-8")
|
|
170
|
+
|
|
171
|
+
def get_cached_judgment(self, file_path: str, content_hash: str) -> dict[str, Any] | None:
|
|
172
|
+
"""Get cached judgment for a file if hash matches."""
|
|
173
|
+
cache = self.load_judgment_cache()
|
|
174
|
+
judgments = cache.get("judgments", {})
|
|
175
|
+
|
|
176
|
+
if file_path not in judgments:
|
|
177
|
+
return None
|
|
178
|
+
|
|
179
|
+
entry = judgments[file_path]
|
|
180
|
+
if entry.get("content_hash") != content_hash:
|
|
181
|
+
return None # File changed, cache invalid
|
|
182
|
+
|
|
183
|
+
results: dict[str, Any] | None = entry.get("results")
|
|
184
|
+
return results
|
|
185
|
+
|
|
186
|
+
def set_cached_judgment(
|
|
187
|
+
self, file_path: str, content_hash: str, results: dict[str, Any]
|
|
188
|
+
) -> None:
|
|
189
|
+
"""Cache judgment results for a file."""
|
|
190
|
+
cache = self.load_judgment_cache()
|
|
191
|
+
cache.setdefault("judgments", {})[file_path] = {
|
|
192
|
+
"content_hash": content_hash,
|
|
193
|
+
"evaluated_at": datetime.now(UTC).isoformat(),
|
|
194
|
+
"results": results,
|
|
195
|
+
}
|
|
196
|
+
self.save_judgment_cache(cache)
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
# =============================================================================
|
|
200
|
+
# Global Analytics Cache (~/.reporails/analytics/)
|
|
201
|
+
# =============================================================================
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
@dataclass
|
|
205
|
+
class AnalyticsEntry:
|
|
206
|
+
"""Single analytics entry for a project scan."""
|
|
207
|
+
|
|
208
|
+
timestamp: str
|
|
209
|
+
score: float
|
|
210
|
+
level: str
|
|
211
|
+
violations_count: int
|
|
212
|
+
rules_checked: int
|
|
213
|
+
elapsed_ms: float
|
|
214
|
+
instruction_files: int
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
@dataclass
|
|
218
|
+
class ProjectAnalytics:
|
|
219
|
+
"""Analytics for a single project."""
|
|
220
|
+
|
|
221
|
+
project_id: str
|
|
222
|
+
project_name: str
|
|
223
|
+
project_path: str
|
|
224
|
+
first_seen: str
|
|
225
|
+
last_seen: str
|
|
226
|
+
scan_count: int = 0
|
|
227
|
+
history: list[AnalyticsEntry] = field(default_factory=list)
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def get_project_analytics_path(project_id: str) -> Path:
|
|
231
|
+
"""Get path to project's analytics file."""
|
|
232
|
+
return get_analytics_dir() / f"{project_id}.json"
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def load_project_analytics(project_id: str) -> ProjectAnalytics | None:
|
|
236
|
+
"""Load analytics for a project."""
|
|
237
|
+
path = get_project_analytics_path(project_id)
|
|
238
|
+
if not path.exists():
|
|
239
|
+
return None
|
|
240
|
+
try:
|
|
241
|
+
data = json.loads(path.read_text(encoding="utf-8"))
|
|
242
|
+
return ProjectAnalytics(
|
|
243
|
+
project_id=data["project_id"],
|
|
244
|
+
project_name=data["project_name"],
|
|
245
|
+
project_path=data["project_path"],
|
|
246
|
+
first_seen=data["first_seen"],
|
|
247
|
+
last_seen=data["last_seen"],
|
|
248
|
+
scan_count=data.get("scan_count", 0),
|
|
249
|
+
history=[AnalyticsEntry(**entry) for entry in data.get("history", [])],
|
|
250
|
+
)
|
|
251
|
+
except (json.JSONDecodeError, KeyError, OSError):
|
|
252
|
+
return None
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def save_project_analytics(analytics: ProjectAnalytics) -> None:
|
|
256
|
+
"""Save project analytics."""
|
|
257
|
+
path = get_project_analytics_path(analytics.project_id)
|
|
258
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
259
|
+
|
|
260
|
+
data = {
|
|
261
|
+
"project_id": analytics.project_id,
|
|
262
|
+
"project_name": analytics.project_name,
|
|
263
|
+
"project_path": analytics.project_path,
|
|
264
|
+
"first_seen": analytics.first_seen,
|
|
265
|
+
"last_seen": analytics.last_seen,
|
|
266
|
+
"scan_count": analytics.scan_count,
|
|
267
|
+
"history": [
|
|
268
|
+
{
|
|
269
|
+
"timestamp": e.timestamp,
|
|
270
|
+
"score": e.score,
|
|
271
|
+
"level": e.level,
|
|
272
|
+
"violations_count": e.violations_count,
|
|
273
|
+
"rules_checked": e.rules_checked,
|
|
274
|
+
"elapsed_ms": e.elapsed_ms,
|
|
275
|
+
"instruction_files": e.instruction_files,
|
|
276
|
+
}
|
|
277
|
+
for e in analytics.history
|
|
278
|
+
],
|
|
279
|
+
}
|
|
280
|
+
path.write_text(json.dumps(data, indent=2), encoding="utf-8")
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def get_previous_scan(target: Path) -> AnalyticsEntry | None:
|
|
284
|
+
"""Get the most recent scan entry for comparison (before current run is recorded).
|
|
285
|
+
|
|
286
|
+
Args:
|
|
287
|
+
target: Project root
|
|
288
|
+
|
|
289
|
+
Returns:
|
|
290
|
+
Last AnalyticsEntry or None if no previous scan
|
|
291
|
+
"""
|
|
292
|
+
project_id = get_project_id(target)
|
|
293
|
+
analytics = load_project_analytics(project_id)
|
|
294
|
+
if analytics is None or len(analytics.history) < 1:
|
|
295
|
+
return None
|
|
296
|
+
return analytics.history[-1] # Last recorded scan
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def record_scan(
|
|
300
|
+
target: Path,
|
|
301
|
+
score: float,
|
|
302
|
+
level: str,
|
|
303
|
+
violations_count: int,
|
|
304
|
+
rules_checked: int,
|
|
305
|
+
elapsed_ms: float,
|
|
306
|
+
instruction_files: int,
|
|
307
|
+
) -> None:
|
|
308
|
+
"""
|
|
309
|
+
Record a scan in global analytics (quiet collection).
|
|
310
|
+
|
|
311
|
+
Args:
|
|
312
|
+
target: Project root
|
|
313
|
+
score: Validation score
|
|
314
|
+
level: Capability level
|
|
315
|
+
violations_count: Number of violations
|
|
316
|
+
rules_checked: Number of rules checked
|
|
317
|
+
elapsed_ms: Scan duration
|
|
318
|
+
instruction_files: Number of instruction files scanned
|
|
319
|
+
"""
|
|
320
|
+
project_id = get_project_id(target)
|
|
321
|
+
now = datetime.now(UTC).isoformat()
|
|
322
|
+
|
|
323
|
+
# Load or create analytics
|
|
324
|
+
analytics = load_project_analytics(project_id)
|
|
325
|
+
if analytics is None:
|
|
326
|
+
analytics = ProjectAnalytics(
|
|
327
|
+
project_id=project_id,
|
|
328
|
+
project_name=get_project_name(target),
|
|
329
|
+
project_path=str(target.resolve()),
|
|
330
|
+
first_seen=now,
|
|
331
|
+
last_seen=now,
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
# Update analytics
|
|
335
|
+
analytics.last_seen = now
|
|
336
|
+
analytics.scan_count += 1
|
|
337
|
+
|
|
338
|
+
# Add history entry (keep last 100)
|
|
339
|
+
entry = AnalyticsEntry(
|
|
340
|
+
timestamp=now,
|
|
341
|
+
score=score,
|
|
342
|
+
level=level,
|
|
343
|
+
violations_count=violations_count,
|
|
344
|
+
rules_checked=rules_checked,
|
|
345
|
+
elapsed_ms=elapsed_ms,
|
|
346
|
+
instruction_files=instruction_files,
|
|
347
|
+
)
|
|
348
|
+
analytics.history.append(entry)
|
|
349
|
+
analytics.history = analytics.history[-100:] # Keep last 100
|
|
350
|
+
|
|
351
|
+
# Save
|
|
352
|
+
save_project_analytics(analytics)
|