tears-cli 0.1.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tears/__init__.py +1 -0
- tears/__main__.py +4 -0
- tears/checker.py +135 -0
- tears/cli.py +39 -0
- tears/config.py +184 -0
- tears/exclude.py +38 -0
- tears/graph/__init__.py +42 -0
- tears/graph/grimp_builder.py +126 -0
- tears/header.py +30 -0
- tears/hook.py +318 -0
- tears/rules.py +52 -0
- tears/scan.py +65 -0
- tears_cli-0.1.0a1.dist-info/METADATA +8 -0
- tears_cli-0.1.0a1.dist-info/RECORD +16 -0
- tears_cli-0.1.0a1.dist-info/WHEEL +4 -0
- tears_cli-0.1.0a1.dist-info/entry_points.txt +3 -0
tears/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# @tear: 3
|
tears/__main__.py
ADDED
tears/checker.py
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""The pure checker: ImportGraph + TearsConfig -> list of FileReports.
|
|
3
|
+
|
|
4
|
+
This module knows nothing about the filesystem, parsing, or output formatting.
|
|
5
|
+
It composes the rule functions over the data the graph exposes.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from dataclasses import dataclass, field
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Literal
|
|
13
|
+
|
|
14
|
+
from tears.config import TearsConfig
|
|
15
|
+
from tears.graph import ImportGraph
|
|
16
|
+
from tears.rules import can_import, check_directory_requirement
|
|
17
|
+
|
|
18
|
+
Severity = Literal["fail", "warn"]
|
|
19
|
+
Status = Literal["ok", "warn", "fail"]
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass(frozen=True)
|
|
23
|
+
class Issue:
|
|
24
|
+
severity: Severity
|
|
25
|
+
message: str
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass(frozen=True)
|
|
29
|
+
class FileReport:
|
|
30
|
+
path: Path
|
|
31
|
+
tier: int | None
|
|
32
|
+
issues: tuple[Issue, ...] = field(default_factory=lambda: ())
|
|
33
|
+
|
|
34
|
+
@property
|
|
35
|
+
def status(self) -> Status:
|
|
36
|
+
if any(i.severity == "fail" for i in self.issues):
|
|
37
|
+
return "fail"
|
|
38
|
+
if any(i.severity == "warn" for i in self.issues):
|
|
39
|
+
return "warn"
|
|
40
|
+
return "ok"
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@dataclass(frozen=True)
|
|
44
|
+
class CheckReport:
|
|
45
|
+
files: tuple[FileReport, ...]
|
|
46
|
+
|
|
47
|
+
@property
|
|
48
|
+
def exit_code(self) -> int:
|
|
49
|
+
return 1 if any(f.status == "fail" for f in self.files) else 0
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def failure_count(self) -> int:
|
|
53
|
+
return sum(1 for f in self.files if f.status == "fail")
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def warning_count(self) -> int:
|
|
57
|
+
return sum(1 for f in self.files if f.status == "warn")
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def check(
|
|
61
|
+
graph: ImportGraph,
|
|
62
|
+
config: TearsConfig,
|
|
63
|
+
*,
|
|
64
|
+
repo_root: Path,
|
|
65
|
+
) -> CheckReport:
|
|
66
|
+
"""Run all v1 checks: missing headers, directory requirements, import tiers."""
|
|
67
|
+
resolved_rules = config.resolved_import_rules()
|
|
68
|
+
missing_severity: Severity = "fail" if config.missing_header == "error" else "warn"
|
|
69
|
+
|
|
70
|
+
reports: list[FileReport] = []
|
|
71
|
+
for file_path in sorted(graph.files(), key=str):
|
|
72
|
+
tier = graph.tier_of(file_path)
|
|
73
|
+
effective_tier = tier if tier is not None else config.max_tear
|
|
74
|
+
issues: list[Issue] = []
|
|
75
|
+
|
|
76
|
+
if tier is None:
|
|
77
|
+
issues.append(
|
|
78
|
+
Issue(
|
|
79
|
+
severity=missing_severity,
|
|
80
|
+
message=f"missing @tear header (treated as tear {config.max_tear})",
|
|
81
|
+
)
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
rel_path = _relative_posix(file_path, repo_root)
|
|
85
|
+
if not check_directory_requirement(rel_path, effective_tier, config.directory_requirements):
|
|
86
|
+
required = _required_tier(rel_path, config.directory_requirements)
|
|
87
|
+
issues.append(
|
|
88
|
+
Issue(
|
|
89
|
+
severity="fail",
|
|
90
|
+
message=f"directory requires tear {required}, file is tear {effective_tier}",
|
|
91
|
+
)
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
for target in sorted(graph.imports_of(file_path), key=str):
|
|
95
|
+
target_tier = graph.tier_of(target)
|
|
96
|
+
target_effective = target_tier if target_tier is not None else config.max_tear
|
|
97
|
+
if can_import(effective_tier, target_effective, resolved_rules):
|
|
98
|
+
continue
|
|
99
|
+
target_rel = _relative_posix(target, repo_root)
|
|
100
|
+
issues.append(
|
|
101
|
+
Issue(
|
|
102
|
+
severity="fail",
|
|
103
|
+
message=(
|
|
104
|
+
f"imports {target_rel} (tear {target_effective}): "
|
|
105
|
+
f"tear {effective_tier} cannot import from tear {target_effective}"
|
|
106
|
+
),
|
|
107
|
+
)
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
reports.append(FileReport(path=file_path, tier=tier, issues=tuple(issues)))
|
|
111
|
+
|
|
112
|
+
return CheckReport(files=tuple(reports))
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _relative_posix(path: Path, root: Path) -> str:
|
|
116
|
+
try:
|
|
117
|
+
return path.resolve().relative_to(root.resolve()).as_posix()
|
|
118
|
+
except ValueError:
|
|
119
|
+
return path.as_posix()
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _required_tier(rel_path: str, requirements: dict[str, int]) -> int | None:
|
|
123
|
+
file_segments = tuple(p for p in rel_path.strip("/").split("/") if p)
|
|
124
|
+
longest_match: int | None = None
|
|
125
|
+
longest_len = -1
|
|
126
|
+
for dir_key, required_tier in requirements.items():
|
|
127
|
+
dir_segments = tuple(p for p in dir_key.strip("/").split("/") if p)
|
|
128
|
+
if len(dir_segments) > len(file_segments):
|
|
129
|
+
continue
|
|
130
|
+
if file_segments[: len(dir_segments)] != dir_segments:
|
|
131
|
+
continue
|
|
132
|
+
if len(dir_segments) > longest_len:
|
|
133
|
+
longest_len = len(dir_segments)
|
|
134
|
+
longest_match = required_tier
|
|
135
|
+
return longest_match
|
tears/cli.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""`tears` — bare CLI entry point. No subcommands in v1."""
|
|
3
|
+
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import argparse
|
|
7
|
+
import sys
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from tears.config import ConfigError
|
|
11
|
+
from tears.scan import run_scan
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def main(argv: list[str] | None = None) -> int:
|
|
15
|
+
parser = argparse.ArgumentParser(
|
|
16
|
+
prog="tears",
|
|
17
|
+
description="Tiered Enforcement, Authorship Review System — scan a repo.",
|
|
18
|
+
)
|
|
19
|
+
parser.add_argument(
|
|
20
|
+
"path",
|
|
21
|
+
nargs="?",
|
|
22
|
+
default=".",
|
|
23
|
+
help="Path to scan (defaults to the current directory).",
|
|
24
|
+
)
|
|
25
|
+
args = parser.parse_args(argv)
|
|
26
|
+
|
|
27
|
+
repo_root = Path(args.path).resolve()
|
|
28
|
+
try:
|
|
29
|
+
report, output = run_scan(repo_root)
|
|
30
|
+
except ConfigError as exc:
|
|
31
|
+
print(f"error: {exc}", file=sys.stderr)
|
|
32
|
+
return 2
|
|
33
|
+
|
|
34
|
+
sys.stdout.write(output)
|
|
35
|
+
return report.exit_code
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
if __name__ == "__main__":
|
|
39
|
+
raise SystemExit(main())
|
tears/config.py
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""`.tears.toml` parsing and validation."""
|
|
3
|
+
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import tomllib
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, cast
|
|
10
|
+
|
|
11
|
+
CONFIG_FILENAME = ".tears.toml"
|
|
12
|
+
MISSING_HEADER_VALUES = ("warn", "error")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ConfigError(ValueError):
|
|
16
|
+
"""Raised when `.tears.toml` is malformed or fails schema validation."""
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass(frozen=True)
|
|
20
|
+
class TearsConfig:
|
|
21
|
+
"""Validated, resolved tears configuration.
|
|
22
|
+
|
|
23
|
+
`directory_requirements` keys are normalized (trailing slashes stripped).
|
|
24
|
+
`import_rules` is the raw, possibly-partial mapping; use
|
|
25
|
+
`resolved_import_rules()` to get the full per-tier allow-set with defaults filled in.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
max_tear: int = 3
|
|
29
|
+
directory_requirements: dict[str, int] = field(default_factory=lambda: {})
|
|
30
|
+
exclude: list[str] = field(default_factory=lambda: [])
|
|
31
|
+
source_roots: list[str] = field(default_factory=lambda: ["."])
|
|
32
|
+
import_rules: dict[int, int] | None = None
|
|
33
|
+
missing_header: str = "warn"
|
|
34
|
+
|
|
35
|
+
def __post_init__(self) -> None:
|
|
36
|
+
if self.max_tear < 1:
|
|
37
|
+
raise ConfigError(f"max_tear must be at least 1, got {self.max_tear}")
|
|
38
|
+
if self.missing_header not in MISSING_HEADER_VALUES:
|
|
39
|
+
raise ConfigError(
|
|
40
|
+
f"missing_header must be one of {MISSING_HEADER_VALUES}, "
|
|
41
|
+
f"got {self.missing_header!r}"
|
|
42
|
+
)
|
|
43
|
+
for path, tier in self.directory_requirements.items():
|
|
44
|
+
if not 0 <= tier <= self.max_tear:
|
|
45
|
+
raise ConfigError(
|
|
46
|
+
f"directory_requirements[{path!r}] = {tier}: "
|
|
47
|
+
f"tear level {tier} exceeds max_tear {self.max_tear}"
|
|
48
|
+
)
|
|
49
|
+
if self.import_rules is not None:
|
|
50
|
+
for importer, max_allowed in self.import_rules.items():
|
|
51
|
+
if not 0 <= importer <= self.max_tear:
|
|
52
|
+
raise ConfigError(
|
|
53
|
+
f"import_rules key {importer}: "
|
|
54
|
+
f"tear level {importer} exceeds max_tear {self.max_tear}"
|
|
55
|
+
)
|
|
56
|
+
if not 0 <= max_allowed <= self.max_tear:
|
|
57
|
+
raise ConfigError(
|
|
58
|
+
f"import_rules[{importer}] = {max_allowed}: "
|
|
59
|
+
f"max_allowed {max_allowed} exceeds max_tear {self.max_tear}"
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
def resolved_import_rules(self) -> dict[int, frozenset[int]]:
|
|
63
|
+
"""Full matrix with defaults filled in for any unspecified tier."""
|
|
64
|
+
resolved: dict[int, frozenset[int]] = {}
|
|
65
|
+
for tier in range(self.max_tear + 1):
|
|
66
|
+
if self.import_rules is not None and tier in self.import_rules:
|
|
67
|
+
resolved[tier] = frozenset(range(self.import_rules[tier] + 1))
|
|
68
|
+
else:
|
|
69
|
+
resolved[tier] = frozenset(range(tier + 1))
|
|
70
|
+
return resolved
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def load_config(repo_root: Path) -> TearsConfig:
|
|
74
|
+
"""Load `.tears.toml` from `repo_root`. Missing file => defaults.
|
|
75
|
+
|
|
76
|
+
Malformed TOML or a schema failure raises `ConfigError` with a clear message
|
|
77
|
+
naming the file and the problem.
|
|
78
|
+
"""
|
|
79
|
+
config_path = repo_root / CONFIG_FILENAME
|
|
80
|
+
if not config_path.exists():
|
|
81
|
+
return TearsConfig()
|
|
82
|
+
|
|
83
|
+
try:
|
|
84
|
+
raw = tomllib.loads(config_path.read_text())
|
|
85
|
+
except tomllib.TOMLDecodeError as exc:
|
|
86
|
+
raise ConfigError(f"{CONFIG_FILENAME}: malformed TOML: {exc}") from exc
|
|
87
|
+
|
|
88
|
+
return _from_mapping(raw, source=CONFIG_FILENAME)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def _from_mapping(raw: dict[str, Any], *, source: str) -> TearsConfig:
|
|
92
|
+
kwargs: dict[str, Any] = {}
|
|
93
|
+
|
|
94
|
+
if "max_tear" in raw:
|
|
95
|
+
kwargs["max_tear"] = _require_int(raw["max_tear"], "max_tear", source)
|
|
96
|
+
|
|
97
|
+
if "directory_requirements" in raw:
|
|
98
|
+
dr_raw = _require_mapping(raw["directory_requirements"], "directory_requirements", source)
|
|
99
|
+
normalized: dict[str, int] = {}
|
|
100
|
+
for key, value in dr_raw.items():
|
|
101
|
+
if not isinstance(key, str) or not isinstance(value, int) or isinstance(value, bool):
|
|
102
|
+
raise ConfigError(
|
|
103
|
+
f"{source}: directory_requirements entries must be str -> int, "
|
|
104
|
+
f"got {key!r} -> {value!r}"
|
|
105
|
+
)
|
|
106
|
+
normalized[key.rstrip("/")] = value
|
|
107
|
+
kwargs["directory_requirements"] = normalized
|
|
108
|
+
|
|
109
|
+
if "exclude" in raw:
|
|
110
|
+
exclude_raw = _require_list(raw["exclude"], "exclude", source)
|
|
111
|
+
exclude: list[str] = []
|
|
112
|
+
for item in exclude_raw:
|
|
113
|
+
if not isinstance(item, str):
|
|
114
|
+
raise ConfigError(f"{source}: exclude entries must be strings, got {item!r}")
|
|
115
|
+
exclude.append(item)
|
|
116
|
+
kwargs["exclude"] = exclude
|
|
117
|
+
|
|
118
|
+
if "imports" in raw:
|
|
119
|
+
imports_raw = _require_mapping(raw["imports"], "imports", source)
|
|
120
|
+
if "source_roots" in imports_raw:
|
|
121
|
+
sr_raw = imports_raw["source_roots"]
|
|
122
|
+
if not isinstance(sr_raw, list):
|
|
123
|
+
raise ConfigError(
|
|
124
|
+
f"{source}: imports.source_roots must be a list, got {type(sr_raw).__name__}"
|
|
125
|
+
)
|
|
126
|
+
source_roots: list[str] = []
|
|
127
|
+
for item in cast(list[Any], sr_raw):
|
|
128
|
+
if not isinstance(item, str):
|
|
129
|
+
raise ConfigError(
|
|
130
|
+
f"{source}: imports.source_roots entries must be strings, got {item!r}"
|
|
131
|
+
)
|
|
132
|
+
source_roots.append(item)
|
|
133
|
+
kwargs["source_roots"] = source_roots
|
|
134
|
+
|
|
135
|
+
if "import_rules" in raw:
|
|
136
|
+
ir_raw = _require_mapping(raw["import_rules"], "import_rules", source)
|
|
137
|
+
rules: dict[int, int] = {}
|
|
138
|
+
for key, value in ir_raw.items():
|
|
139
|
+
# TOML keys are always strings; convert to int.
|
|
140
|
+
try:
|
|
141
|
+
key_int = int(cast(str, key))
|
|
142
|
+
except (ValueError, TypeError) as exc:
|
|
143
|
+
raise ConfigError(
|
|
144
|
+
f"{source}: import_rules keys must be integer-valued strings, got {key!r}"
|
|
145
|
+
) from exc
|
|
146
|
+
if not isinstance(value, int) or isinstance(value, bool):
|
|
147
|
+
raise ConfigError(
|
|
148
|
+
f"{source}: import_rules[{key_int}] must be an int, "
|
|
149
|
+
f"got {type(value).__name__}"
|
|
150
|
+
)
|
|
151
|
+
rules[key_int] = value
|
|
152
|
+
kwargs["import_rules"] = rules
|
|
153
|
+
|
|
154
|
+
if "missing_header" in raw:
|
|
155
|
+
kwargs["missing_header"] = _require_str(raw["missing_header"], "missing_header", source)
|
|
156
|
+
|
|
157
|
+
try:
|
|
158
|
+
return TearsConfig(**kwargs)
|
|
159
|
+
except ConfigError as exc:
|
|
160
|
+
raise ConfigError(f"{source}: {exc}") from None
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def _require_int(value: Any, key: str, source: str) -> int:
|
|
164
|
+
if not isinstance(value, int) or isinstance(value, bool):
|
|
165
|
+
raise ConfigError(f"{source}: {key} must be int, got {type(value).__name__}")
|
|
166
|
+
return value
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def _require_str(value: Any, key: str, source: str) -> str:
|
|
170
|
+
if not isinstance(value, str):
|
|
171
|
+
raise ConfigError(f"{source}: {key} must be str, got {type(value).__name__}")
|
|
172
|
+
return value
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def _require_mapping(value: Any, key: str, source: str) -> dict[Any, Any]:
|
|
176
|
+
if not isinstance(value, dict):
|
|
177
|
+
raise ConfigError(f"{source}: {key} must be a mapping, got {type(value).__name__}")
|
|
178
|
+
return cast(dict[Any, Any], value)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def _require_list(value: Any, key: str, source: str) -> list[Any]:
|
|
182
|
+
if not isinstance(value, list):
|
|
183
|
+
raise ConfigError(f"{source}: {key} must be a list, got {type(value).__name__}")
|
|
184
|
+
return cast(list[Any], value)
|
tears/exclude.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# @tear: 2
|
|
2
|
+
"""Exclude-pattern matching shared by the graph builder and the Claude hook.
|
|
3
|
+
|
|
4
|
+
Patterns are fnmatch-style with `**` extended to match across path separators
|
|
5
|
+
(`**/foo.py` matches `a/b/c/foo.py`). Paths are matched relative to the repo root
|
|
6
|
+
in POSIX form.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import fnmatch
|
|
12
|
+
import re
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def is_excluded(file_path: Path, repo_root: Path, patterns: list[str]) -> bool:
|
|
17
|
+
"""True if `file_path` matches any of `patterns` relative to `repo_root`."""
|
|
18
|
+
if not patterns:
|
|
19
|
+
return False
|
|
20
|
+
try:
|
|
21
|
+
rel = file_path.relative_to(repo_root).as_posix()
|
|
22
|
+
except ValueError:
|
|
23
|
+
try:
|
|
24
|
+
rel = file_path.resolve().relative_to(repo_root.resolve()).as_posix()
|
|
25
|
+
except ValueError:
|
|
26
|
+
return False
|
|
27
|
+
return any(_match_glob(rel, p) for p in patterns)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _match_glob(path: str, pattern: str) -> bool:
|
|
31
|
+
return re.compile(_glob_to_regex(pattern)).fullmatch(path) is not None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _glob_to_regex(pattern: str) -> str:
|
|
35
|
+
placeholder = "\x00DOUBLESTAR\x00"
|
|
36
|
+
p = pattern.replace("**", placeholder)
|
|
37
|
+
p = fnmatch.translate(p).rstrip("\\Z")
|
|
38
|
+
return p.replace(re.escape(placeholder), ".*")
|
tears/graph/__init__.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""Import graph abstraction.
|
|
3
|
+
|
|
4
|
+
The checker depends on the `ImportGraph` Protocol; concrete builders (currently
|
|
5
|
+
`grimp_builder.GrimpImportGraph`) implement it. This lets us swap builders
|
|
6
|
+
without touching checker logic.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
from collections.abc import Iterable
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Protocol
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ImportGraph(Protocol):
|
|
17
|
+
"""Builders provide repo-wide tier and import data."""
|
|
18
|
+
|
|
19
|
+
def files(self) -> Iterable[Path]:
|
|
20
|
+
"""All in-scope Python files in the repo (excluded files omitted)."""
|
|
21
|
+
...
|
|
22
|
+
|
|
23
|
+
def tier_of(self, file: Path) -> int | None:
|
|
24
|
+
"""Tier from the file's @tear header, or None if missing/malformed."""
|
|
25
|
+
...
|
|
26
|
+
|
|
27
|
+
def imports_of(self, file: Path) -> Iterable[Path]:
|
|
28
|
+
"""Files this file directly imports — resolved to repo files.
|
|
29
|
+
|
|
30
|
+
Unresolvable targets (stdlib, third-party, dynamic) and excluded targets
|
|
31
|
+
are omitted.
|
|
32
|
+
"""
|
|
33
|
+
...
|
|
34
|
+
|
|
35
|
+
def importers_of(self, file: Path) -> Iterable[Path]:
|
|
36
|
+
"""Files that directly import this file. Builders may raise
|
|
37
|
+
NotImplementedError if reverse-dep queries aren't needed by the checker.
|
|
38
|
+
"""
|
|
39
|
+
...
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
__all__ = ["ImportGraph"]
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""grimp-backed `ImportGraph` implementation.
|
|
3
|
+
|
|
4
|
+
Builds the graph by:
|
|
5
|
+
1. Walking the configured `source_roots` to discover top-level Python packages.
|
|
6
|
+
2. Calling `grimp.build_graph(*pkgs)` to get all imports.
|
|
7
|
+
3. Mapping grimp's dotted module names back to absolute repo file paths.
|
|
8
|
+
4. Parsing each file's `@tear` header.
|
|
9
|
+
5. Applying the `exclude` patterns so excluded files are invisible to the checker.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
import contextlib
|
|
15
|
+
import sys
|
|
16
|
+
from collections.abc import Iterable
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import Any, cast
|
|
19
|
+
|
|
20
|
+
import grimp
|
|
21
|
+
|
|
22
|
+
from tears.config import TearsConfig
|
|
23
|
+
from tears.exclude import is_excluded
|
|
24
|
+
from tears.header import parse_tear_level
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class GrimpImportGraph:
|
|
28
|
+
"""`ImportGraph` over a real repo, backed by grimp."""
|
|
29
|
+
|
|
30
|
+
def __init__(
|
|
31
|
+
self,
|
|
32
|
+
*,
|
|
33
|
+
files: dict[Path, int | None],
|
|
34
|
+
imports: dict[Path, frozenset[Path]],
|
|
35
|
+
importers: dict[Path, frozenset[Path]],
|
|
36
|
+
) -> None:
|
|
37
|
+
self._files = files
|
|
38
|
+
self._imports = imports
|
|
39
|
+
self._importers = importers
|
|
40
|
+
|
|
41
|
+
def files(self) -> Iterable[Path]:
|
|
42
|
+
return self._files.keys()
|
|
43
|
+
|
|
44
|
+
def tier_of(self, file: Path) -> int | None:
|
|
45
|
+
return self._files.get(file)
|
|
46
|
+
|
|
47
|
+
def imports_of(self, file: Path) -> Iterable[Path]:
|
|
48
|
+
return self._imports.get(file, frozenset())
|
|
49
|
+
|
|
50
|
+
def importers_of(self, file: Path) -> Iterable[Path]:
|
|
51
|
+
return self._importers.get(file, frozenset())
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def build_grimp_graph(repo_root: Path, config: TearsConfig) -> GrimpImportGraph:
|
|
55
|
+
"""Build the import graph for `repo_root` under `config`."""
|
|
56
|
+
repo_root = repo_root.resolve()
|
|
57
|
+
source_root_paths = [(repo_root / r).resolve() for r in config.source_roots]
|
|
58
|
+
|
|
59
|
+
packages: list[tuple[str, Path]] = []
|
|
60
|
+
for sr in source_root_paths:
|
|
61
|
+
if not sr.is_dir():
|
|
62
|
+
continue
|
|
63
|
+
for child in sorted(sr.iterdir()):
|
|
64
|
+
if child.is_dir() and (child / "__init__.py").exists():
|
|
65
|
+
packages.append((child.name, sr))
|
|
66
|
+
|
|
67
|
+
if not packages:
|
|
68
|
+
return GrimpImportGraph(files={}, imports={}, importers={})
|
|
69
|
+
|
|
70
|
+
sys_path_added: list[str] = []
|
|
71
|
+
for sr in source_root_paths:
|
|
72
|
+
sr_str = str(sr)
|
|
73
|
+
if sr_str not in sys.path:
|
|
74
|
+
sys.path.insert(0, sr_str)
|
|
75
|
+
sys_path_added.append(sr_str)
|
|
76
|
+
|
|
77
|
+
try:
|
|
78
|
+
package_names = [name for name, _ in packages]
|
|
79
|
+
graph = cast(Any, grimp.build_graph(*package_names)) # pyright: ignore[reportUnknownMemberType]
|
|
80
|
+
finally:
|
|
81
|
+
for sr_str in sys_path_added:
|
|
82
|
+
with contextlib.suppress(ValueError):
|
|
83
|
+
sys.path.remove(sr_str)
|
|
84
|
+
|
|
85
|
+
package_roots = {name: sr / name for name, sr in packages}
|
|
86
|
+
module_to_file = _build_module_index(package_roots)
|
|
87
|
+
|
|
88
|
+
files: dict[Path, int | None] = {}
|
|
89
|
+
for file_path in module_to_file.values():
|
|
90
|
+
if is_excluded(file_path, repo_root, config.exclude):
|
|
91
|
+
continue
|
|
92
|
+
files[file_path] = parse_tear_level(file_path.read_text(), max_tear=config.max_tear)
|
|
93
|
+
|
|
94
|
+
imports: dict[Path, set[Path]] = {f: set() for f in files}
|
|
95
|
+
importers: dict[Path, set[Path]] = {f: set() for f in files}
|
|
96
|
+
|
|
97
|
+
file_to_module = {f: m for m, f in module_to_file.items()}
|
|
98
|
+
|
|
99
|
+
for file_path in files:
|
|
100
|
+
module = file_to_module[file_path]
|
|
101
|
+
for imported_module in graph.find_modules_directly_imported_by(module):
|
|
102
|
+
target = module_to_file.get(imported_module)
|
|
103
|
+
if target is None or target not in files:
|
|
104
|
+
continue
|
|
105
|
+
imports[file_path].add(target)
|
|
106
|
+
importers[target].add(file_path)
|
|
107
|
+
|
|
108
|
+
return GrimpImportGraph(
|
|
109
|
+
files=files,
|
|
110
|
+
imports={k: frozenset(v) for k, v in imports.items()},
|
|
111
|
+
importers={k: frozenset(v) for k, v in importers.items()},
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _build_module_index(package_roots: dict[str, Path]) -> dict[str, Path]:
|
|
116
|
+
"""Map every reachable module name to its source file."""
|
|
117
|
+
index: dict[str, Path] = {}
|
|
118
|
+
for pkg_name, pkg_root in package_roots.items():
|
|
119
|
+
for py_file in pkg_root.rglob("*.py"):
|
|
120
|
+
rel = py_file.relative_to(pkg_root)
|
|
121
|
+
parts = rel.with_suffix("").parts
|
|
122
|
+
if parts[-1] == "__init__":
|
|
123
|
+
parts = parts[:-1]
|
|
124
|
+
module = ".".join((pkg_name, *parts)) if parts else pkg_name
|
|
125
|
+
index[module] = py_file.resolve()
|
|
126
|
+
return index
|
tears/header.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""Parse the `@tear` header from a Python source file."""
|
|
3
|
+
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import re
|
|
7
|
+
|
|
8
|
+
HEADER_RE = re.compile(r"^[ \t]*#[ \t]*@tear:[ \t]*(\d+)(?![\w.])")
|
|
9
|
+
|
|
10
|
+
MAX_LINES = 5
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def parse_tear_level(content: str, *, max_tear: int = 3) -> int | None:
|
|
14
|
+
"""Return the worst (highest) valid tier found in the first 5 lines, else None.
|
|
15
|
+
|
|
16
|
+
A valid header is a line whose first non-whitespace token is `#`, followed by
|
|
17
|
+
`@tear: <digits>`, where digits parse as an integer in [0, max_tear]. Out-of-range
|
|
18
|
+
values are treated as malformed (not as that integer). `1.5` and `-1` do not match.
|
|
19
|
+
"""
|
|
20
|
+
worst: int | None = None
|
|
21
|
+
for line in content.splitlines()[:MAX_LINES]:
|
|
22
|
+
match = HEADER_RE.match(line)
|
|
23
|
+
if match is None:
|
|
24
|
+
continue
|
|
25
|
+
value = int(match.group(1))
|
|
26
|
+
if value < 0 or value > max_tear:
|
|
27
|
+
continue
|
|
28
|
+
if worst is None or value > worst:
|
|
29
|
+
worst = value
|
|
30
|
+
return worst
|
tears/hook.py
ADDED
|
@@ -0,0 +1,318 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""Claude Code PostToolUse hook for tears.
|
|
3
|
+
|
|
4
|
+
Demotes the `@tear` header in every file Claude writes or edits to `max_tear`
|
|
5
|
+
(default 3). This is the enforcement backstop: a human reviewing the resulting
|
|
6
|
+
diff must consciously re-promote the tier to attest that they read the code.
|
|
7
|
+
If they leave the demotion in place, that's the attestation that they didn't.
|
|
8
|
+
|
|
9
|
+
Invocation:
|
|
10
|
+
- As a Claude Code hook: receives a JSON payload on stdin describing the tool
|
|
11
|
+
call (`{"tool_input": {"file_path": "..."}, ...}`). Register in
|
|
12
|
+
`.claude/settings.json` under `hooks.PostToolUse`.
|
|
13
|
+
- Manually: `python -m tears.hook FILE [FILE ...]`. Useful for testing and
|
|
14
|
+
bulk-demoting a list of files.
|
|
15
|
+
|
|
16
|
+
Scope:
|
|
17
|
+
- **Replacement is universal.** Any file with an existing `@tear: <digit>` header
|
|
18
|
+
in any line-comment style (`#`, `//`, `--`, `;`) or block-comment style
|
|
19
|
+
(`<!-- ... -->`, `/* ... */`) gets its digit rewritten to `max_tear`.
|
|
20
|
+
- **Insertion is type-specific.** A file *without* a header gets a new one
|
|
21
|
+
inserted if its extension or filename is in `COMMENT_STYLES` /
|
|
22
|
+
`FILENAME_STYLES` — covers most common dev files: Python, JS/TS, Go, Rust,
|
|
23
|
+
C/C++/C#, Java, Kotlin, Swift, Ruby, Shell, TOML, YAML, INI, SQL, Lua,
|
|
24
|
+
HTML/XML/Markdown/SVG, CSS/SCSS, Makefile, Dockerfile, .gitignore, .env, etc.
|
|
25
|
+
- **Multi-language scanning is still v2.** The hook covers many comment styles
|
|
26
|
+
cheaply; the *scanner* (`tears`) still only enforces import rules on `.py`.
|
|
27
|
+
See plan §1 for the asymmetric-scope rationale.
|
|
28
|
+
|
|
29
|
+
Behavior:
|
|
30
|
+
- **Matcher.** `.claude/settings.json` matches `Edit|Write|MultiEdit` only. Doesn't
|
|
31
|
+
catch `NotebookEdit` or any future file-touching tool — extend the matcher if
|
|
32
|
+
you need them.
|
|
33
|
+
- **One file per invocation.** `Edit`, `Write`, and `MultiEdit` each operate on a
|
|
34
|
+
single `tool_input.file_path` (MultiEdit applies multiple edits to one file).
|
|
35
|
+
The stdin parser returns a 1-element list. A future bulk-edit tool with a list
|
|
36
|
+
payload would need parser changes.
|
|
37
|
+
- **Silent on bad input.** Empty stdin, malformed JSON, missing fields, paths
|
|
38
|
+
that don't exist, and excluded paths all return 0 with no output. The hook
|
|
39
|
+
never breaks Claude Code's flow.
|
|
40
|
+
- **Broken `.tears.toml` is non-fatal.** Falls back to `TearsConfig()` defaults so
|
|
41
|
+
a malformed config can't stop Claude from editing files. The `tears` CLI
|
|
42
|
+
itself still hard-fails on a broken config — only the hook is lenient.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
from __future__ import annotations
|
|
46
|
+
|
|
47
|
+
import json
|
|
48
|
+
import re
|
|
49
|
+
import sys
|
|
50
|
+
from pathlib import Path
|
|
51
|
+
from typing import Any, cast
|
|
52
|
+
|
|
53
|
+
from tears.config import ConfigError, TearsConfig, load_config
|
|
54
|
+
from tears.exclude import is_excluded
|
|
55
|
+
|
|
56
|
+
# Match a line whose first non-whitespace token looks like a comment marker
|
|
57
|
+
# (one or more non-alphanumeric non-whitespace chars), followed by `@tear:` and
|
|
58
|
+
# digits. Captures the full prefix and the digits separately so we can rewrite
|
|
59
|
+
# the digit in place. The non-alphanumeric requirement keeps us from matching
|
|
60
|
+
# `@tear: 1` inside a string literal like `x = "@tear: 1"`.
|
|
61
|
+
LINE_HEADER_RE = re.compile(
|
|
62
|
+
r"^([ \t]*[^A-Za-z0-9\s]+[ \t]*@tear:[ \t]*)(\d+)"
|
|
63
|
+
)
|
|
64
|
+
SHEBANG_RE = re.compile(r"^#!")
|
|
65
|
+
ENCODING_RE = re.compile(r"coding[=:]\s*[-\w.]+")
|
|
66
|
+
|
|
67
|
+
MAX_LINES = 5
|
|
68
|
+
|
|
69
|
+
# Extensions where we know how to *insert* a fresh header. Replacement works
|
|
70
|
+
# universally; only insertion needs the comment markers. Each value is
|
|
71
|
+
# (opener, closer) — `closer` is None for line comments (`#`, `//`, `--`, `;`)
|
|
72
|
+
# and a string for block comments (`<!-- ... -->`, `/* ... */`).
|
|
73
|
+
CommentStyle = tuple[str, str | None]
|
|
74
|
+
|
|
75
|
+
COMMENT_STYLES: dict[str, CommentStyle] = {
|
|
76
|
+
# Hash line comment
|
|
77
|
+
".py": ("#", None),
|
|
78
|
+
".rb": ("#", None),
|
|
79
|
+
".pl": ("#", None),
|
|
80
|
+
".sh": ("#", None),
|
|
81
|
+
".bash": ("#", None),
|
|
82
|
+
".zsh": ("#", None),
|
|
83
|
+
".fish": ("#", None),
|
|
84
|
+
".toml": ("#", None),
|
|
85
|
+
".yml": ("#", None),
|
|
86
|
+
".yaml": ("#", None),
|
|
87
|
+
".r": ("#", None),
|
|
88
|
+
".ex": ("#", None),
|
|
89
|
+
".exs": ("#", None),
|
|
90
|
+
# Double-slash line comment
|
|
91
|
+
".js": ("//", None),
|
|
92
|
+
".mjs": ("//", None),
|
|
93
|
+
".cjs": ("//", None),
|
|
94
|
+
".ts": ("//", None),
|
|
95
|
+
".tsx": ("//", None),
|
|
96
|
+
".jsx": ("//", None),
|
|
97
|
+
".go": ("//", None),
|
|
98
|
+
".rs": ("//", None),
|
|
99
|
+
".java": ("//", None),
|
|
100
|
+
".kt": ("//", None),
|
|
101
|
+
".swift": ("//", None),
|
|
102
|
+
".c": ("//", None),
|
|
103
|
+
".cpp": ("//", None),
|
|
104
|
+
".cc": ("//", None),
|
|
105
|
+
".cxx": ("//", None),
|
|
106
|
+
".h": ("//", None),
|
|
107
|
+
".hpp": ("//", None),
|
|
108
|
+
".cs": ("//", None),
|
|
109
|
+
".scala": ("//", None),
|
|
110
|
+
".dart": ("//", None),
|
|
111
|
+
".zig": ("//", None),
|
|
112
|
+
# Double-dash line comment
|
|
113
|
+
".sql": ("--", None),
|
|
114
|
+
".lua": ("--", None),
|
|
115
|
+
".hs": ("--", None),
|
|
116
|
+
".elm": ("--", None),
|
|
117
|
+
# Semicolon line comment
|
|
118
|
+
".ini": (";", None),
|
|
119
|
+
".cfg": (";", None),
|
|
120
|
+
".clj": (";", None),
|
|
121
|
+
".lisp": (";", None),
|
|
122
|
+
# HTML / XML / Markdown block comment
|
|
123
|
+
".html": ("<!--", "-->"),
|
|
124
|
+
".htm": ("<!--", "-->"),
|
|
125
|
+
".xml": ("<!--", "-->"),
|
|
126
|
+
".md": ("<!--", "-->"),
|
|
127
|
+
".markdown": ("<!--", "-->"),
|
|
128
|
+
".svg": ("<!--", "-->"),
|
|
129
|
+
# CSS block comment
|
|
130
|
+
".css": ("/*", "*/"),
|
|
131
|
+
".scss": ("/*", "*/"),
|
|
132
|
+
".less": ("/*", "*/"),
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
# Extensionless files keyed by name. Looked up only when `extension` is empty
|
|
136
|
+
# or unknown.
|
|
137
|
+
FILENAME_STYLES: dict[str, CommentStyle] = {
|
|
138
|
+
"Makefile": ("#", None),
|
|
139
|
+
"Dockerfile": ("#", None),
|
|
140
|
+
"Rakefile": ("#", None),
|
|
141
|
+
"Gemfile": ("#", None),
|
|
142
|
+
".gitignore": ("#", None),
|
|
143
|
+
".gitattributes": ("#", None),
|
|
144
|
+
".dockerignore": ("#", None),
|
|
145
|
+
".env": ("#", None),
|
|
146
|
+
".notears": ("#", None),
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def apply_hook(
|
|
151
|
+
content: str,
|
|
152
|
+
*,
|
|
153
|
+
max_tear: int = 3,
|
|
154
|
+
extension: str = ".py",
|
|
155
|
+
filename: str = "",
|
|
156
|
+
) -> str:
|
|
157
|
+
"""Return `content` with the `@tear` header rewritten to `max_tear`.
|
|
158
|
+
|
|
159
|
+
Two steps:
|
|
160
|
+
1. **Replacement (universal).** Scan the first 5 lines for any `@tear: <digit>`
|
|
161
|
+
in a comment-like position. Replace each digit with `max_tear`. Preserves
|
|
162
|
+
indentation, comment markers, trailing tokens (`-->`, `*/`), and line
|
|
163
|
+
endings.
|
|
164
|
+
2. **Insertion (type-specific).** If no header was found AND we know the
|
|
165
|
+
comment syntax for the file (looked up by `extension` then `filename`),
|
|
166
|
+
insert a new header. Insertion respects shebangs always; PEP 263 encoding
|
|
167
|
+
declarations are also respected (universally — they look like `# coding:
|
|
168
|
+
utf-8` and similar magic-comment patterns exist outside Python too).
|
|
169
|
+
"""
|
|
170
|
+
lines = content.splitlines(keepends=True)
|
|
171
|
+
|
|
172
|
+
replaced = False
|
|
173
|
+
for i, line in enumerate(lines[:MAX_LINES]):
|
|
174
|
+
new_line, n = LINE_HEADER_RE.subn(rf"\g<1>{max_tear}", line, count=1)
|
|
175
|
+
if n:
|
|
176
|
+
lines[i] = new_line
|
|
177
|
+
replaced = True
|
|
178
|
+
|
|
179
|
+
if replaced:
|
|
180
|
+
return "".join(lines)
|
|
181
|
+
|
|
182
|
+
style = _resolve_style(extension, filename)
|
|
183
|
+
if style is None:
|
|
184
|
+
return content
|
|
185
|
+
|
|
186
|
+
insert_at = 0
|
|
187
|
+
if lines and SHEBANG_RE.match(lines[0]):
|
|
188
|
+
insert_at = 1
|
|
189
|
+
if insert_at < len(lines) and ENCODING_RE.search(lines[insert_at]):
|
|
190
|
+
insert_at += 1
|
|
191
|
+
|
|
192
|
+
ending = _detect_line_ending(lines)
|
|
193
|
+
lines.insert(insert_at, _format_header(style, max_tear) + ending)
|
|
194
|
+
return "".join(lines)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def _resolve_style(extension: str, filename: str) -> CommentStyle | None:
|
|
198
|
+
"""Look up the comment style for a file. Extension wins; filename is a
|
|
199
|
+
fallback for extensionless files (Makefile, Dockerfile, .gitignore)."""
|
|
200
|
+
style = COMMENT_STYLES.get(extension.lower())
|
|
201
|
+
if style is not None:
|
|
202
|
+
return style
|
|
203
|
+
return FILENAME_STYLES.get(filename)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def _format_header(style: CommentStyle, max_tear: int) -> str:
|
|
207
|
+
"""Render an `@tear: N` header in the appropriate comment style."""
|
|
208
|
+
opener, closer = style
|
|
209
|
+
if closer is None:
|
|
210
|
+
return f"{opener} @tear: {max_tear}"
|
|
211
|
+
return f"{opener} @tear: {max_tear} {closer}"
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def process_file(
|
|
215
|
+
path: Path,
|
|
216
|
+
*,
|
|
217
|
+
max_tear: int,
|
|
218
|
+
exclude: list[str],
|
|
219
|
+
repo_root: Path,
|
|
220
|
+
) -> bool:
|
|
221
|
+
"""Apply the hook to a single file. Returns True iff the file was modified.
|
|
222
|
+
|
|
223
|
+
Excluded paths and missing files are silently skipped. The decision about
|
|
224
|
+
*what* to do with the file (replace / insert / no-op) lives in `apply_hook`.
|
|
225
|
+
"""
|
|
226
|
+
if not path.is_file():
|
|
227
|
+
return False
|
|
228
|
+
if is_excluded(path, repo_root, exclude):
|
|
229
|
+
return False
|
|
230
|
+
content = path.read_text()
|
|
231
|
+
new_content = apply_hook(
|
|
232
|
+
content, max_tear=max_tear, extension=path.suffix, filename=path.name
|
|
233
|
+
)
|
|
234
|
+
if new_content == content:
|
|
235
|
+
return False
|
|
236
|
+
path.write_text(new_content)
|
|
237
|
+
return True
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def main(argv: list[str] | None = None) -> int:
|
|
241
|
+
"""Entry point. Reads file paths from argv, or stdin JSON if none provided."""
|
|
242
|
+
if argv is None:
|
|
243
|
+
argv = sys.argv[1:]
|
|
244
|
+
|
|
245
|
+
paths: list[Path] = [Path(arg) for arg in argv] if argv else _paths_from_stdin()
|
|
246
|
+
if not paths:
|
|
247
|
+
return 0
|
|
248
|
+
|
|
249
|
+
repo_root = _find_repo_root(paths[0])
|
|
250
|
+
try:
|
|
251
|
+
config = load_config(repo_root)
|
|
252
|
+
except ConfigError:
|
|
253
|
+
# Broken config shouldn't break Claude Code. Fall back to defaults.
|
|
254
|
+
config = TearsConfig()
|
|
255
|
+
|
|
256
|
+
for path in paths:
|
|
257
|
+
try:
|
|
258
|
+
process_file(
|
|
259
|
+
path,
|
|
260
|
+
max_tear=config.max_tear,
|
|
261
|
+
exclude=config.exclude,
|
|
262
|
+
repo_root=repo_root,
|
|
263
|
+
)
|
|
264
|
+
except OSError:
|
|
265
|
+
continue
|
|
266
|
+
return 0
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def _detect_line_ending(lines: list[str]) -> str:
|
|
270
|
+
for line in lines:
|
|
271
|
+
if line.endswith("\r\n"):
|
|
272
|
+
return "\r\n"
|
|
273
|
+
if line.endswith("\n"):
|
|
274
|
+
return "\n"
|
|
275
|
+
return "\n"
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def _paths_from_stdin() -> list[Path]:
|
|
279
|
+
raw = sys.stdin.read().strip()
|
|
280
|
+
if not raw:
|
|
281
|
+
return []
|
|
282
|
+
try:
|
|
283
|
+
payload: Any = json.loads(raw)
|
|
284
|
+
except json.JSONDecodeError:
|
|
285
|
+
return []
|
|
286
|
+
if not isinstance(payload, dict):
|
|
287
|
+
return []
|
|
288
|
+
tool_input = cast(dict[str, Any], payload).get("tool_input")
|
|
289
|
+
if not isinstance(tool_input, dict):
|
|
290
|
+
return []
|
|
291
|
+
file_path = cast(dict[str, Any], tool_input).get("file_path")
|
|
292
|
+
if isinstance(file_path, str):
|
|
293
|
+
return [Path(file_path)]
|
|
294
|
+
return []
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def _find_repo_root(start: Path) -> Path:
|
|
298
|
+
"""Walk up from `start` for the repo root. Fall back to cwd.
|
|
299
|
+
|
|
300
|
+
`.git/` wins over `.tears.toml` because nested configs exist legitimately
|
|
301
|
+
(test fixtures, monorepo subprojects). The canonical repo marker is `.git/`.
|
|
302
|
+
Only fall back to `.tears.toml` for repos that haven't been git-init'd yet.
|
|
303
|
+
"""
|
|
304
|
+
here = start.resolve()
|
|
305
|
+
if here.is_file():
|
|
306
|
+
here = here.parent
|
|
307
|
+
ancestors = (here, *here.parents)
|
|
308
|
+
for ancestor in ancestors:
|
|
309
|
+
if (ancestor / ".git").exists():
|
|
310
|
+
return ancestor
|
|
311
|
+
for ancestor in ancestors:
|
|
312
|
+
if (ancestor / ".tears.toml").exists():
|
|
313
|
+
return ancestor
|
|
314
|
+
return Path.cwd()
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
if __name__ == "__main__":
|
|
318
|
+
raise SystemExit(main())
|
tears/rules.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""Pure rule functions: tier comparison and directory requirements.
|
|
3
|
+
|
|
4
|
+
These functions know nothing about files, imports, or graphs — they take primitive
|
|
5
|
+
inputs and return booleans. The checker composes them.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def can_import(
|
|
12
|
+
importer_tier: int,
|
|
13
|
+
target_tier: int,
|
|
14
|
+
resolved_rules: dict[int, frozenset[int]],
|
|
15
|
+
) -> bool:
|
|
16
|
+
"""Is `importer_tier` allowed to import from `target_tier`?
|
|
17
|
+
|
|
18
|
+
`resolved_rules` is the pre-computed full matrix from
|
|
19
|
+
`TearsConfig.resolved_import_rules()`. Per-edge check is one set membership.
|
|
20
|
+
"""
|
|
21
|
+
return target_tier in resolved_rules[importer_tier]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def check_directory_requirement(
|
|
25
|
+
file_path: str,
|
|
26
|
+
file_tier: int,
|
|
27
|
+
requirements: dict[str, int],
|
|
28
|
+
) -> bool:
|
|
29
|
+
"""Does `file_tier` satisfy the longest-prefix-matching directory requirement?
|
|
30
|
+
|
|
31
|
+
Matching is path-segment aware: `src/auth` matches `src/auth/tokens.py` but NOT
|
|
32
|
+
`src/authentic/foo.py`. Files in unrestricted directories pass.
|
|
33
|
+
"""
|
|
34
|
+
file_segments = _segments(file_path)
|
|
35
|
+
longest_match: int | None = None
|
|
36
|
+
longest_len = -1
|
|
37
|
+
for dir_key, required_tier in requirements.items():
|
|
38
|
+
dir_segments = _segments(dir_key)
|
|
39
|
+
if len(dir_segments) > len(file_segments):
|
|
40
|
+
continue
|
|
41
|
+
if file_segments[: len(dir_segments)] != dir_segments:
|
|
42
|
+
continue
|
|
43
|
+
if len(dir_segments) > longest_len:
|
|
44
|
+
longest_len = len(dir_segments)
|
|
45
|
+
longest_match = required_tier
|
|
46
|
+
if longest_match is None:
|
|
47
|
+
return True
|
|
48
|
+
return file_tier <= longest_match
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def _segments(path: str) -> tuple[str, ...]:
|
|
52
|
+
return tuple(p for p in path.strip("/").split("/") if p)
|
tears/scan.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""Scan orchestration and output formatting.
|
|
3
|
+
|
|
4
|
+
Loads the config, builds the import graph via grimp, runs the checker, prints a
|
|
5
|
+
human-readable report. The exact output format here is pinned by snapshot tests
|
|
6
|
+
in `tests/scan/fixtures/`.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
from io import StringIO
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
from tears.checker import CheckReport, FileReport, check
|
|
15
|
+
from tears.config import load_config
|
|
16
|
+
from tears.graph.grimp_builder import build_grimp_graph
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def run_scan(repo_root: Path) -> tuple[CheckReport, str]:
|
|
20
|
+
"""Run a full scan of `repo_root`. Returns the report and formatted output."""
|
|
21
|
+
config = load_config(repo_root)
|
|
22
|
+
graph = build_grimp_graph(repo_root, config)
|
|
23
|
+
report = check(graph, config, repo_root=repo_root)
|
|
24
|
+
return report, format_report(report, repo_root=repo_root)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def format_report(report: CheckReport, *, repo_root: Path) -> str:
|
|
28
|
+
"""Format a `CheckReport` for human consumption."""
|
|
29
|
+
out = StringIO()
|
|
30
|
+
for fr in report.files:
|
|
31
|
+
out.write(_format_file(fr, repo_root=repo_root))
|
|
32
|
+
out.write(_format_summary(report))
|
|
33
|
+
return out.getvalue()
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _format_file(fr: FileReport, *, repo_root: Path) -> str:
|
|
37
|
+
label = {"ok": "OK ", "warn": "WARN ", "fail": "FAIL "}[fr.status]
|
|
38
|
+
rel = _relative(fr.path, repo_root)
|
|
39
|
+
tier_suffix = f" (tear {fr.tier})" if fr.tier is not None else ""
|
|
40
|
+
line = f"{label} {rel}{tier_suffix}\n"
|
|
41
|
+
issues = "".join(f" - {i.message}\n" for i in fr.issues)
|
|
42
|
+
suffix = "\n" if fr.issues else ""
|
|
43
|
+
return line + issues + suffix
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _format_summary(report: CheckReport) -> str:
|
|
47
|
+
n = len(report.files)
|
|
48
|
+
failures = report.failure_count
|
|
49
|
+
warnings = report.warning_count
|
|
50
|
+
return (
|
|
51
|
+
f"{n} {_plural(n, 'file', 'files')} checked, "
|
|
52
|
+
f"{failures} {_plural(failures, 'failure', 'failures')}, "
|
|
53
|
+
f"{warnings} {_plural(warnings, 'warning', 'warnings')}\n"
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _plural(n: int, singular: str, plural: str) -> str:
|
|
58
|
+
return singular if n == 1 else plural
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _relative(path: Path, root: Path) -> str:
|
|
62
|
+
try:
|
|
63
|
+
return path.resolve().relative_to(root.resolve()).as_posix()
|
|
64
|
+
except ValueError:
|
|
65
|
+
return path.as_posix()
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: tears-cli
|
|
3
|
+
Version: 0.1.0a1
|
|
4
|
+
Summary: Tiered Enforcement, Authorship Review System — vibe-code responsibly.
|
|
5
|
+
Author: Hillel Twersky
|
|
6
|
+
Author-email: Hillel Twersky <35217356+Thillel@users.noreply.github.com>
|
|
7
|
+
Requires-Dist: grimp>=3.4
|
|
8
|
+
Requires-Python: >=3.11
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
tears/__init__.py,sha256=jgPo5r8xynvu78EjGYFfiPXPxQ_xY5XhdEgYXwPh9lM,11
|
|
2
|
+
tears/__main__.py,sha256=6nzjgYLoIrtot_o2FvWuKb6j6jo_rMGczVmiSEL3vEE,64
|
|
3
|
+
tears/checker.py,sha256=fC1lPsEpCuY590a_CNE8J3SQMgnTC34rw5cUTQQZZLE,4409
|
|
4
|
+
tears/cli.py,sha256=rh_ozR4cbRpy4H4rRLScFnyi9ak_v5ZPDHT34jC1tOs,930
|
|
5
|
+
tears/config.py,sha256=uAgT1DKxc3WoOPEof7tEmdKCtbmi8r2o8sMpkl9g1Wc,7401
|
|
6
|
+
tears/exclude.py,sha256=nkqvDESnkQhcIEAHw6zET2mW6ikHkVYDp_F6XusmG2I,1197
|
|
7
|
+
tears/graph/__init__.py,sha256=RW5Mr-3bCmsCOchh-zLxdJvUMQE1br2oz2DLZ2Mdda0,1236
|
|
8
|
+
tears/graph/grimp_builder.py,sha256=o8G2tTiTv2wpYsd-meINzsIYG1IEGXQ7UjrOlMoRTAE,4338
|
|
9
|
+
tears/header.py,sha256=MGWFgAK-YbrEKnaD_BgT6KbFn4rfi-QMbXfAbXpgKN4,973
|
|
10
|
+
tears/hook.py,sha256=leFpCPnmQdH8wsqOkA5yJv-WcK81_zS5MyUQSQjOWnc,10770
|
|
11
|
+
tears/rules.py,sha256=3kwo91eepQYgBmbHNZEYdhXTttQcAPqqaG0eUdPe_fA,1693
|
|
12
|
+
tears/scan.py,sha256=TyHJgIqfFY7fiMxWxrPb38WG7B2Z8Dp2gCKVilKDa3o,2187
|
|
13
|
+
tears_cli-0.1.0a1.dist-info/WHEEL,sha256=fWriCkzqm-pffF5af4gJC9iI5FMFaJTuN9UxxxzOmdY,81
|
|
14
|
+
tears_cli-0.1.0a1.dist-info/entry_points.txt,sha256=LQ_6hCwT5_mYfi1Tu3ad5F6YrSYuGYhhyVb7qkgaHyQ,42
|
|
15
|
+
tears_cli-0.1.0a1.dist-info/METADATA,sha256=6PG7a09-6Ch4StEN2yRWdZJ2Krj6lOR8jO_cMTCjOqQ,282
|
|
16
|
+
tears_cli-0.1.0a1.dist-info/RECORD,,
|