tears-cli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tears/__init__.py +1 -0
- tears/__main__.py +4 -0
- tears/checker.py +153 -0
- tears/cli.py +341 -0
- tears/config.py +235 -0
- tears/exclude.py +38 -0
- tears/graph/__init__.py +42 -0
- tears/graph/grimp_builder.py +145 -0
- tears/header.py +30 -0
- tears/hook.py +84 -0
- tears/mutate.py +124 -0
- tears/rules.py +52 -0
- tears/scan.py +83 -0
- tears/styles.py +91 -0
- tears_cli-0.1.0.dist-info/METADATA +29 -0
- tears_cli-0.1.0.dist-info/RECORD +18 -0
- tears_cli-0.1.0.dist-info/WHEEL +4 -0
- tears_cli-0.1.0.dist-info/entry_points.txt +3 -0
tears/config.py
ADDED
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""`.tears.toml` parsing and validation."""
|
|
3
|
+
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import tomllib
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, cast
|
|
10
|
+
|
|
11
|
+
CONFIG_FILENAME = ".tears.toml"
|
|
12
|
+
MISSING_HEADER_VALUES = ("warn", "error")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _path_segments(path: str) -> tuple[str, ...]:
|
|
16
|
+
return tuple(p for p in path.strip("/").split("/") if p)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ConfigError(ValueError):
|
|
20
|
+
"""Raised when `.tears.toml` is malformed or fails schema validation."""
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass(frozen=True)
|
|
24
|
+
class TearsConfig:
|
|
25
|
+
"""Validated, resolved tears configuration.
|
|
26
|
+
|
|
27
|
+
`directory_requirements` keys are normalized (trailing slashes stripped).
|
|
28
|
+
`import_rules` is the raw, possibly-partial mapping; use
|
|
29
|
+
`resolved_import_rules()` to get the full per-tier allow-set with defaults filled in.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
max_tear: int = 3
|
|
33
|
+
directory_requirements: dict[str, int] = field(default_factory=lambda: {})
|
|
34
|
+
exclude: list[str] = field(default_factory=lambda: [])
|
|
35
|
+
source_roots: list[str] = field(default_factory=lambda: ["."])
|
|
36
|
+
import_rules: dict[int, int] | None = None
|
|
37
|
+
missing_header: str = "warn"
|
|
38
|
+
default_tear: int | None = None
|
|
39
|
+
default_tears: dict[str, int] = field(default_factory=lambda: {})
|
|
40
|
+
|
|
41
|
+
def __post_init__(self) -> None:
|
|
42
|
+
if self.max_tear < 1:
|
|
43
|
+
raise ConfigError(f"max_tear must be at least 1, got {self.max_tear}")
|
|
44
|
+
if self.missing_header not in MISSING_HEADER_VALUES:
|
|
45
|
+
raise ConfigError(
|
|
46
|
+
f"missing_header must be one of {MISSING_HEADER_VALUES}, "
|
|
47
|
+
f"got {self.missing_header!r}"
|
|
48
|
+
)
|
|
49
|
+
for path, tier in self.directory_requirements.items():
|
|
50
|
+
if not 0 <= tier <= self.max_tear:
|
|
51
|
+
raise ConfigError(
|
|
52
|
+
f"directory_requirements[{path!r}] = {tier}: "
|
|
53
|
+
f"tear level {tier} exceeds max_tear {self.max_tear}"
|
|
54
|
+
)
|
|
55
|
+
if self.import_rules is not None:
|
|
56
|
+
for importer, max_allowed in self.import_rules.items():
|
|
57
|
+
if not 0 <= importer <= self.max_tear:
|
|
58
|
+
raise ConfigError(
|
|
59
|
+
f"import_rules key {importer}: "
|
|
60
|
+
f"tear level {importer} exceeds max_tear {self.max_tear}"
|
|
61
|
+
)
|
|
62
|
+
if not 0 <= max_allowed <= self.max_tear:
|
|
63
|
+
raise ConfigError(
|
|
64
|
+
f"import_rules[{importer}] = {max_allowed}: "
|
|
65
|
+
f"max_allowed {max_allowed} exceeds max_tear {self.max_tear}"
|
|
66
|
+
)
|
|
67
|
+
if self.default_tear is not None and not 0 <= self.default_tear <= self.max_tear:
|
|
68
|
+
raise ConfigError(f"default_tear {self.default_tear} exceeds max_tear {self.max_tear}")
|
|
69
|
+
for path, tear in self.default_tears.items():
|
|
70
|
+
if not 0 <= tear <= self.max_tear:
|
|
71
|
+
raise ConfigError(
|
|
72
|
+
f"default_tears[{path!r}] = {tear}: "
|
|
73
|
+
f"tear level {tear} exceeds max_tear {self.max_tear}"
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
def resolved_import_rules(self) -> dict[int, frozenset[int]]:
|
|
77
|
+
"""Full matrix with defaults filled in for any unspecified tier."""
|
|
78
|
+
resolved: dict[int, frozenset[int]] = {}
|
|
79
|
+
for tier in range(self.max_tear + 1):
|
|
80
|
+
if self.import_rules is not None and tier in self.import_rules:
|
|
81
|
+
resolved[tier] = frozenset(range(self.import_rules[tier] + 1))
|
|
82
|
+
else:
|
|
83
|
+
resolved[tier] = frozenset(range(tier + 1))
|
|
84
|
+
return resolved
|
|
85
|
+
|
|
86
|
+
def resolve_missing_tier(self, rel_path: str) -> tuple[int, bool]:
|
|
87
|
+
"""Return (effective_tier, was_defaulted) for a file with no @tear header.
|
|
88
|
+
|
|
89
|
+
Lookup order: longest-prefix match in default_tears → global default_tear
|
|
90
|
+
→ max_tear (not defaulted; caller should emit the missing-header warning).
|
|
91
|
+
"""
|
|
92
|
+
file_segs = _path_segments(rel_path)
|
|
93
|
+
longest_len = -1
|
|
94
|
+
matched: int | None = None
|
|
95
|
+
for dir_key, tier in self.default_tears.items():
|
|
96
|
+
dir_segs = _path_segments(dir_key)
|
|
97
|
+
if len(dir_segs) > len(file_segs):
|
|
98
|
+
continue
|
|
99
|
+
if file_segs[: len(dir_segs)] != dir_segs:
|
|
100
|
+
continue
|
|
101
|
+
if len(dir_segs) > longest_len:
|
|
102
|
+
longest_len = len(dir_segs)
|
|
103
|
+
matched = tier
|
|
104
|
+
if matched is not None:
|
|
105
|
+
return matched, True
|
|
106
|
+
if self.default_tear is not None:
|
|
107
|
+
return self.default_tear, True
|
|
108
|
+
return self.max_tear, False
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def load_config(repo_root: Path) -> TearsConfig:
|
|
112
|
+
"""Load `.tears.toml` from `repo_root`. Missing file => defaults.
|
|
113
|
+
|
|
114
|
+
Malformed TOML or a schema failure raises `ConfigError` with a clear message
|
|
115
|
+
naming the file and the problem.
|
|
116
|
+
"""
|
|
117
|
+
config_path = repo_root / CONFIG_FILENAME
|
|
118
|
+
if not config_path.exists():
|
|
119
|
+
return TearsConfig()
|
|
120
|
+
|
|
121
|
+
try:
|
|
122
|
+
raw = tomllib.loads(config_path.read_text())
|
|
123
|
+
except tomllib.TOMLDecodeError as exc:
|
|
124
|
+
raise ConfigError(f"{CONFIG_FILENAME}: malformed TOML: {exc}") from exc
|
|
125
|
+
|
|
126
|
+
return _from_mapping(raw, source=CONFIG_FILENAME)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def _from_mapping(raw: dict[str, Any], *, source: str) -> TearsConfig:
|
|
130
|
+
kwargs: dict[str, Any] = {}
|
|
131
|
+
|
|
132
|
+
if "max_tear" in raw:
|
|
133
|
+
kwargs["max_tear"] = _require_int(raw["max_tear"], "max_tear", source)
|
|
134
|
+
|
|
135
|
+
if "directory_requirements" in raw:
|
|
136
|
+
dr_raw = _require_mapping(raw["directory_requirements"], "directory_requirements", source)
|
|
137
|
+
normalized: dict[str, int] = {}
|
|
138
|
+
for key, value in dr_raw.items():
|
|
139
|
+
if not isinstance(key, str) or not isinstance(value, int) or isinstance(value, bool):
|
|
140
|
+
raise ConfigError(
|
|
141
|
+
f"{source}: directory_requirements entries must be str -> int, "
|
|
142
|
+
f"got {key!r} -> {value!r}"
|
|
143
|
+
)
|
|
144
|
+
normalized[key.rstrip("/")] = value
|
|
145
|
+
kwargs["directory_requirements"] = normalized
|
|
146
|
+
|
|
147
|
+
if "exclude" in raw:
|
|
148
|
+
exclude_raw = _require_list(raw["exclude"], "exclude", source)
|
|
149
|
+
exclude: list[str] = []
|
|
150
|
+
for item in exclude_raw:
|
|
151
|
+
if not isinstance(item, str):
|
|
152
|
+
raise ConfigError(f"{source}: exclude entries must be strings, got {item!r}")
|
|
153
|
+
exclude.append(item)
|
|
154
|
+
kwargs["exclude"] = exclude
|
|
155
|
+
|
|
156
|
+
if "imports" in raw:
|
|
157
|
+
imports_raw = _require_mapping(raw["imports"], "imports", source)
|
|
158
|
+
if "source_roots" in imports_raw:
|
|
159
|
+
sr_raw = imports_raw["source_roots"]
|
|
160
|
+
if not isinstance(sr_raw, list):
|
|
161
|
+
raise ConfigError(
|
|
162
|
+
f"{source}: imports.source_roots must be a list, got {type(sr_raw).__name__}"
|
|
163
|
+
)
|
|
164
|
+
source_roots: list[str] = []
|
|
165
|
+
for item in cast(list[Any], sr_raw):
|
|
166
|
+
if not isinstance(item, str):
|
|
167
|
+
raise ConfigError(
|
|
168
|
+
f"{source}: imports.source_roots entries must be strings, got {item!r}"
|
|
169
|
+
)
|
|
170
|
+
source_roots.append(item)
|
|
171
|
+
kwargs["source_roots"] = source_roots
|
|
172
|
+
|
|
173
|
+
if "import_rules" in raw:
|
|
174
|
+
ir_raw = _require_mapping(raw["import_rules"], "import_rules", source)
|
|
175
|
+
rules: dict[int, int] = {}
|
|
176
|
+
for key, value in ir_raw.items():
|
|
177
|
+
# TOML keys are always strings; convert to int.
|
|
178
|
+
try:
|
|
179
|
+
key_int = int(cast(str, key))
|
|
180
|
+
except (ValueError, TypeError) as exc:
|
|
181
|
+
raise ConfigError(
|
|
182
|
+
f"{source}: import_rules keys must be integer-valued strings, got {key!r}"
|
|
183
|
+
) from exc
|
|
184
|
+
if not isinstance(value, int) or isinstance(value, bool):
|
|
185
|
+
raise ConfigError(
|
|
186
|
+
f"{source}: import_rules[{key_int}] must be an int, got {type(value).__name__}"
|
|
187
|
+
)
|
|
188
|
+
rules[key_int] = value
|
|
189
|
+
kwargs["import_rules"] = rules
|
|
190
|
+
|
|
191
|
+
if "missing_header" in raw:
|
|
192
|
+
kwargs["missing_header"] = _require_str(raw["missing_header"], "missing_header", source)
|
|
193
|
+
|
|
194
|
+
if "default_tear" in raw:
|
|
195
|
+
kwargs["default_tear"] = _require_int(raw["default_tear"], "default_tear", source)
|
|
196
|
+
|
|
197
|
+
if "default_tears" in raw:
|
|
198
|
+
dt_raw = _require_mapping(raw["default_tears"], "default_tears", source)
|
|
199
|
+
default_tears: dict[str, int] = {}
|
|
200
|
+
for key, value in dt_raw.items():
|
|
201
|
+
if not isinstance(key, str) or not isinstance(value, int) or isinstance(value, bool):
|
|
202
|
+
raise ConfigError(
|
|
203
|
+
f"{source}: default_tears entries must be str -> int, got {key!r} -> {value!r}"
|
|
204
|
+
)
|
|
205
|
+
default_tears[key.rstrip("/")] = value
|
|
206
|
+
kwargs["default_tears"] = default_tears
|
|
207
|
+
|
|
208
|
+
try:
|
|
209
|
+
return TearsConfig(**kwargs)
|
|
210
|
+
except ConfigError as exc:
|
|
211
|
+
raise ConfigError(f"{source}: {exc}") from None
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def _require_int(value: Any, key: str, source: str) -> int:
|
|
215
|
+
if not isinstance(value, int) or isinstance(value, bool):
|
|
216
|
+
raise ConfigError(f"{source}: {key} must be int, got {type(value).__name__}")
|
|
217
|
+
return value
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def _require_str(value: Any, key: str, source: str) -> str:
|
|
221
|
+
if not isinstance(value, str):
|
|
222
|
+
raise ConfigError(f"{source}: {key} must be str, got {type(value).__name__}")
|
|
223
|
+
return value
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def _require_mapping(value: Any, key: str, source: str) -> dict[Any, Any]:
|
|
227
|
+
if not isinstance(value, dict):
|
|
228
|
+
raise ConfigError(f"{source}: {key} must be a mapping, got {type(value).__name__}")
|
|
229
|
+
return cast(dict[Any, Any], value)
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def _require_list(value: Any, key: str, source: str) -> list[Any]:
|
|
233
|
+
if not isinstance(value, list):
|
|
234
|
+
raise ConfigError(f"{source}: {key} must be a list, got {type(value).__name__}")
|
|
235
|
+
return cast(list[Any], value)
|
tears/exclude.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# @tear: 2
|
|
2
|
+
"""Exclude-pattern matching shared by the graph builder and the Claude hook.
|
|
3
|
+
|
|
4
|
+
Patterns are fnmatch-style with `**` extended to match across path separators
|
|
5
|
+
(`**/foo.py` matches `a/b/c/foo.py`). Paths are matched relative to the repo root
|
|
6
|
+
in POSIX form.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import fnmatch
|
|
12
|
+
import re
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def is_excluded(file_path: Path, repo_root: Path, patterns: list[str]) -> bool:
|
|
17
|
+
"""True if `file_path` matches any of `patterns` relative to `repo_root`."""
|
|
18
|
+
if not patterns:
|
|
19
|
+
return False
|
|
20
|
+
try:
|
|
21
|
+
rel = file_path.relative_to(repo_root).as_posix()
|
|
22
|
+
except ValueError:
|
|
23
|
+
try:
|
|
24
|
+
rel = file_path.resolve().relative_to(repo_root.resolve()).as_posix()
|
|
25
|
+
except ValueError:
|
|
26
|
+
return False
|
|
27
|
+
return any(_match_glob(rel, p) for p in patterns)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _match_glob(path: str, pattern: str) -> bool:
|
|
31
|
+
return re.compile(_glob_to_regex(pattern)).fullmatch(path) is not None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _glob_to_regex(pattern: str) -> str:
|
|
35
|
+
placeholder = "\x00DOUBLESTAR\x00"
|
|
36
|
+
p = pattern.replace("**", placeholder)
|
|
37
|
+
p = fnmatch.translate(p).rstrip("\\Z")
|
|
38
|
+
return p.replace(re.escape(placeholder), ".*")
|
tears/graph/__init__.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""Import graph abstraction.
|
|
3
|
+
|
|
4
|
+
The checker depends on the `ImportGraph` Protocol; concrete builders (currently
|
|
5
|
+
`grimp_builder.GrimpImportGraph`) implement it. This lets us swap builders
|
|
6
|
+
without touching checker logic.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
from collections.abc import Iterable
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Protocol
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ImportGraph(Protocol):
|
|
17
|
+
"""Builders provide repo-wide tier and import data."""
|
|
18
|
+
|
|
19
|
+
def files(self) -> Iterable[Path]:
|
|
20
|
+
"""All in-scope Python files in the repo (excluded files omitted)."""
|
|
21
|
+
...
|
|
22
|
+
|
|
23
|
+
def tier_of(self, file: Path) -> int | None:
|
|
24
|
+
"""Tier from the file's @tear header, or None if missing/malformed."""
|
|
25
|
+
...
|
|
26
|
+
|
|
27
|
+
def imports_of(self, file: Path) -> Iterable[Path]:
|
|
28
|
+
"""Files this file directly imports — resolved to repo files.
|
|
29
|
+
|
|
30
|
+
Unresolvable targets (stdlib, third-party, dynamic) and excluded targets
|
|
31
|
+
are omitted.
|
|
32
|
+
"""
|
|
33
|
+
...
|
|
34
|
+
|
|
35
|
+
def importers_of(self, file: Path) -> Iterable[Path]:
|
|
36
|
+
"""Files that directly import this file. Builders may raise
|
|
37
|
+
NotImplementedError if reverse-dep queries aren't needed by the checker.
|
|
38
|
+
"""
|
|
39
|
+
...
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
__all__ = ["ImportGraph"]
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""grimp-backed `ImportGraph` implementation.
|
|
3
|
+
|
|
4
|
+
Builds the graph by:
|
|
5
|
+
1. Walking the configured `source_roots` to discover top-level Python packages.
|
|
6
|
+
2. Calling `grimp.build_graph(*pkgs)` to get all imports.
|
|
7
|
+
3. Mapping grimp's dotted module names back to absolute repo file paths.
|
|
8
|
+
4. Parsing each file's `@tear` header.
|
|
9
|
+
5. Applying the `exclude` patterns so excluded files are invisible to the checker.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
import contextlib
|
|
15
|
+
import subprocess
|
|
16
|
+
import sys
|
|
17
|
+
from collections.abc import Iterable
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Any, cast
|
|
20
|
+
|
|
21
|
+
import grimp
|
|
22
|
+
|
|
23
|
+
from tears.config import TearsConfig
|
|
24
|
+
from tears.exclude import is_excluded
|
|
25
|
+
from tears.header import parse_tear_level
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class GrimpImportGraph:
|
|
29
|
+
"""`ImportGraph` over a real repo, backed by grimp."""
|
|
30
|
+
|
|
31
|
+
def __init__(
|
|
32
|
+
self,
|
|
33
|
+
*,
|
|
34
|
+
files: dict[Path, int | None],
|
|
35
|
+
imports: dict[Path, frozenset[Path]],
|
|
36
|
+
importers: dict[Path, frozenset[Path]],
|
|
37
|
+
) -> None:
|
|
38
|
+
self._files = files
|
|
39
|
+
self._imports = imports
|
|
40
|
+
self._importers = importers
|
|
41
|
+
|
|
42
|
+
def files(self) -> Iterable[Path]:
|
|
43
|
+
return self._files.keys()
|
|
44
|
+
|
|
45
|
+
def tier_of(self, file: Path) -> int | None:
|
|
46
|
+
return self._files.get(file)
|
|
47
|
+
|
|
48
|
+
def imports_of(self, file: Path) -> Iterable[Path]:
|
|
49
|
+
return self._imports.get(file, frozenset())
|
|
50
|
+
|
|
51
|
+
def importers_of(self, file: Path) -> Iterable[Path]:
|
|
52
|
+
return self._importers.get(file, frozenset())
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def build_grimp_graph(repo_root: Path, config: TearsConfig) -> GrimpImportGraph:
|
|
56
|
+
"""Build the import graph for `repo_root` under `config`."""
|
|
57
|
+
repo_root = repo_root.resolve()
|
|
58
|
+
source_root_paths = [(repo_root / r).resolve() for r in config.source_roots]
|
|
59
|
+
|
|
60
|
+
packages: list[tuple[str, Path]] = []
|
|
61
|
+
for sr in source_root_paths:
|
|
62
|
+
if not sr.is_dir():
|
|
63
|
+
continue
|
|
64
|
+
for child in sorted(sr.iterdir()):
|
|
65
|
+
if (
|
|
66
|
+
child.is_dir()
|
|
67
|
+
and (child / "__init__.py").exists()
|
|
68
|
+
and not _git_ignored(child, repo_root)
|
|
69
|
+
):
|
|
70
|
+
packages.append((child.name, sr))
|
|
71
|
+
|
|
72
|
+
if not packages:
|
|
73
|
+
return GrimpImportGraph(files={}, imports={}, importers={})
|
|
74
|
+
|
|
75
|
+
sys_path_added: list[str] = []
|
|
76
|
+
for sr in source_root_paths:
|
|
77
|
+
sr_str = str(sr)
|
|
78
|
+
if sr_str not in sys.path:
|
|
79
|
+
sys.path.insert(0, sr_str)
|
|
80
|
+
sys_path_added.append(sr_str)
|
|
81
|
+
|
|
82
|
+
try:
|
|
83
|
+
package_names = [name for name, _ in packages]
|
|
84
|
+
graph = cast(Any, grimp.build_graph(*package_names)) # pyright: ignore[reportUnknownMemberType]
|
|
85
|
+
finally:
|
|
86
|
+
for sr_str in sys_path_added:
|
|
87
|
+
with contextlib.suppress(ValueError):
|
|
88
|
+
sys.path.remove(sr_str)
|
|
89
|
+
|
|
90
|
+
package_roots = {name: sr / name for name, sr in packages}
|
|
91
|
+
module_to_file = _build_module_index(package_roots)
|
|
92
|
+
|
|
93
|
+
files: dict[Path, int | None] = {}
|
|
94
|
+
for file_path in module_to_file.values():
|
|
95
|
+
if is_excluded(file_path, repo_root, config.exclude):
|
|
96
|
+
continue
|
|
97
|
+
files[file_path] = parse_tear_level(file_path.read_text(), max_tear=config.max_tear)
|
|
98
|
+
|
|
99
|
+
imports: dict[Path, set[Path]] = {f: set() for f in files}
|
|
100
|
+
importers: dict[Path, set[Path]] = {f: set() for f in files}
|
|
101
|
+
|
|
102
|
+
file_to_module = {f: m for m, f in module_to_file.items()}
|
|
103
|
+
|
|
104
|
+
for file_path in files:
|
|
105
|
+
module = file_to_module[file_path]
|
|
106
|
+
for imported_module in graph.find_modules_directly_imported_by(module):
|
|
107
|
+
target = module_to_file.get(imported_module)
|
|
108
|
+
if target is None or target not in files:
|
|
109
|
+
continue
|
|
110
|
+
imports[file_path].add(target)
|
|
111
|
+
importers[target].add(file_path)
|
|
112
|
+
|
|
113
|
+
return GrimpImportGraph(
|
|
114
|
+
files=files,
|
|
115
|
+
imports={k: frozenset(v) for k, v in imports.items()},
|
|
116
|
+
importers={k: frozenset(v) for k, v in importers.items()},
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _build_module_index(package_roots: dict[str, Path]) -> dict[str, Path]:
|
|
121
|
+
"""Map every reachable module name to its source file."""
|
|
122
|
+
index: dict[str, Path] = {}
|
|
123
|
+
for pkg_name, pkg_root in package_roots.items():
|
|
124
|
+
for py_file in pkg_root.rglob("*.py"):
|
|
125
|
+
rel = py_file.relative_to(pkg_root)
|
|
126
|
+
parts = rel.with_suffix("").parts
|
|
127
|
+
if parts[-1] == "__init__":
|
|
128
|
+
parts = parts[:-1]
|
|
129
|
+
module = ".".join((pkg_name, *parts)) if parts else pkg_name
|
|
130
|
+
index[module] = py_file.resolve()
|
|
131
|
+
return index
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def _git_ignored(path: Path, repo_root: Path) -> bool:
|
|
135
|
+
"""Return True if `path` is ignored by git, False if not or if git is unavailable."""
|
|
136
|
+
try:
|
|
137
|
+
result = subprocess.run(
|
|
138
|
+
["git", "check-ignore", "-q", "--", str(path)],
|
|
139
|
+
capture_output=True,
|
|
140
|
+
cwd=repo_root,
|
|
141
|
+
timeout=5,
|
|
142
|
+
)
|
|
143
|
+
return result.returncode == 0
|
|
144
|
+
except (OSError, subprocess.TimeoutExpired):
|
|
145
|
+
return False
|
tears/header.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""Parse the `@tear` header from a Python source file."""
|
|
3
|
+
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import re
|
|
7
|
+
|
|
8
|
+
HEADER_RE = re.compile(r"^[ \t]*#[ \t]*@tear:[ \t]*(\d+)(?![\w.])")
|
|
9
|
+
|
|
10
|
+
MAX_LINES = 5
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def parse_tear_level(content: str, *, max_tear: int = 3) -> int | None:
|
|
14
|
+
"""Return the worst (highest) valid tier found in the first 5 lines, else None.
|
|
15
|
+
|
|
16
|
+
A valid header is a line whose first non-whitespace token is `#`, followed by
|
|
17
|
+
`@tear: <digits>`, where digits parse as an integer in [0, max_tear]. Out-of-range
|
|
18
|
+
values are treated as malformed (not as that integer). `1.5` and `-1` do not match.
|
|
19
|
+
"""
|
|
20
|
+
worst: int | None = None
|
|
21
|
+
for line in content.splitlines()[:MAX_LINES]:
|
|
22
|
+
match = HEADER_RE.match(line)
|
|
23
|
+
if match is None:
|
|
24
|
+
continue
|
|
25
|
+
value = int(match.group(1))
|
|
26
|
+
if value < 0 or value > max_tear:
|
|
27
|
+
continue
|
|
28
|
+
if worst is None or value > worst:
|
|
29
|
+
worst = value
|
|
30
|
+
return worst
|
tears/hook.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
# @tear: 3
|
|
2
|
+
"""Claude Code PostToolUse hook for tears.
|
|
3
|
+
|
|
4
|
+
Demotes the `@tear` header in every file Claude writes or edits to `max_tear`
|
|
5
|
+
(default 3). This is the enforcement backstop: a human reviewing the resulting
|
|
6
|
+
diff must consciously re-promote the tier to attest that they read the code.
|
|
7
|
+
If they leave the demotion in place, that's the attestation that they didn't.
|
|
8
|
+
|
|
9
|
+
Invocation:
|
|
10
|
+
- As a Claude Code hook: receives a JSON payload on stdin describing the tool
|
|
11
|
+
call (`{"tool_input": {"file_path": "..."}, ...}`). Register in
|
|
12
|
+
`.claude/settings.json` under `hooks.PostToolUse`.
|
|
13
|
+
- Manually: `python -m tears.hook FILE [FILE ...]`. Useful for testing and
|
|
14
|
+
bulk-demoting a list of files.
|
|
15
|
+
|
|
16
|
+
Behavior:
|
|
17
|
+
- **Silent on bad input.** Empty stdin, malformed JSON, missing fields, paths
|
|
18
|
+
that don't exist, and excluded paths all return 0 with no output.
|
|
19
|
+
- **Broken `.tears.toml` is non-fatal.** Falls back to `TearsConfig()` defaults.
|
|
20
|
+
|
|
21
|
+
The mutation logic lives in `tears.mutate`; this module is the entry point only.
|
|
22
|
+
`set_tear` and `process_file` are re-exported here for backward compatibility.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
from __future__ import annotations
|
|
26
|
+
|
|
27
|
+
import json
|
|
28
|
+
import sys
|
|
29
|
+
from pathlib import Path
|
|
30
|
+
from typing import Any, cast
|
|
31
|
+
|
|
32
|
+
from tears.config import ConfigError, TearsConfig, load_config
|
|
33
|
+
from tears.mutate import find_repo_root, process_file
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def main(argv: list[str] | None = None) -> int:
|
|
37
|
+
"""Entry point. Reads file paths from argv, or stdin JSON if none provided."""
|
|
38
|
+
if argv is None:
|
|
39
|
+
argv = sys.argv[1:]
|
|
40
|
+
|
|
41
|
+
paths: list[Path] = [Path(arg) for arg in argv] if argv else _paths_from_stdin()
|
|
42
|
+
if not paths:
|
|
43
|
+
return 0
|
|
44
|
+
|
|
45
|
+
repo_root = find_repo_root(paths[0])
|
|
46
|
+
try:
|
|
47
|
+
config = load_config(repo_root)
|
|
48
|
+
except ConfigError:
|
|
49
|
+
config = TearsConfig()
|
|
50
|
+
|
|
51
|
+
for path in paths:
|
|
52
|
+
try:
|
|
53
|
+
process_file(
|
|
54
|
+
path,
|
|
55
|
+
tear=config.max_tear,
|
|
56
|
+
exclude=config.exclude,
|
|
57
|
+
repo_root=repo_root,
|
|
58
|
+
)
|
|
59
|
+
except OSError:
|
|
60
|
+
continue
|
|
61
|
+
return 0
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _paths_from_stdin() -> list[Path]:
|
|
65
|
+
raw = sys.stdin.read().strip()
|
|
66
|
+
if not raw:
|
|
67
|
+
return []
|
|
68
|
+
try:
|
|
69
|
+
payload: Any = json.loads(raw)
|
|
70
|
+
except json.JSONDecodeError:
|
|
71
|
+
return []
|
|
72
|
+
if not isinstance(payload, dict):
|
|
73
|
+
return []
|
|
74
|
+
tool_input = cast(dict[str, Any], payload).get("tool_input")
|
|
75
|
+
if not isinstance(tool_input, dict):
|
|
76
|
+
return []
|
|
77
|
+
file_path = cast(dict[str, Any], tool_input).get("file_path")
|
|
78
|
+
if isinstance(file_path, str):
|
|
79
|
+
return [Path(file_path)]
|
|
80
|
+
return []
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
if __name__ == "__main__":
|
|
84
|
+
raise SystemExit(main())
|