@torka/claude-qol 0.4.0 → 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -174,6 +174,20 @@ Edit `.claude/scripts/auto_approve_safe.rules.json`:
174
174
  }
175
175
  ```
176
176
 
177
+ #### Rules Lint (optional)
178
+
179
+ Check for invalid, duplicate, or dead patterns in the hook rules:
180
+
181
+ ```bash
182
+ python3 vt-claude-qol/hooks/auto_approve_safe_rules_check.py
183
+ ```
184
+
185
+ By default this checks `vt-claude-qol/hooks/auto_approve_safe.rules.json`. To lint the installed copy, pass it explicitly:
186
+
187
+ ```bash
188
+ python3 vt-claude-qol/hooks/auto_approve_safe_rules_check.py .claude/scripts/auto_approve_safe.rules.json
189
+ ```
190
+
177
191
  ### Context Monitor
178
192
 
179
193
  Status line script showing:
@@ -85,7 +85,7 @@ def split_compound_shell_command(command: str) -> list[str]:
85
85
  if not command:
86
86
  return []
87
87
  # Common patterns produced by agents: `cd x && pnpm test`, `cmd1; cmd2`
88
- return [p.strip() for p in re.split(r"\s*(?:&&|;)\s*", command) if p.strip()]
88
+ return [p.strip() for p in re.split(r"\s*(?:&&|;|\|)\s*", command) if p.strip()]
89
89
 
90
90
 
91
91
  def is_shell_file_read_command(command: str) -> bool:
@@ -212,6 +212,8 @@ def make_decision(tool_name: str, tool_input: dict, rules: dict) -> tuple[str, s
212
212
 
213
213
  def output_decision(decision: str, reason: str) -> None:
214
214
  """Output the hook decision in Claude Code's expected format."""
215
+ if decision == "ask":
216
+ return # No output = defer to Claude Code's internal permission system
215
217
  output = {
216
218
  "hookSpecificOutput": {
217
219
  "hookEventName": "PreToolUse",
@@ -1,38 +1,31 @@
1
1
  {
2
+ "_doc_allow": "Groups: system info, file reading, version checks, git, node/npm/yarn/pnpm, python, rust/go, search/text tools, network (localhost only), filesystem, github CLI, process management, background tasks",
3
+ "_doc_deny": "Blocks: privilege escalation, destructive rm, system modification, fork bombs, pipe-to-shell, mass kill, xargs rm, netcat, crontab",
4
+ "_doc_sensitive": "Protects: env files, crypto keys, SSH, cloud credentials, package registry configs, secrets/tokens, Docker/K8s configs",
5
+
2
6
  "allow_patterns": [
3
- "^pwd$",
4
- "^whoami$",
5
- "^date$",
7
+ "^(pwd|whoami|date)$",
6
8
  "^uname(\\s+-a)?$",
7
9
  "^which\\s+\\S+$",
8
- "^echo\\s+",
10
+ "^echo\\s+[^|;&]+$",
9
11
 
12
+ "^(cat|head|tail|wc|less|file|stat|du|df)\\s+[^|;&]+$",
10
13
  "^ls(\\s+.*)?$",
11
- "^cat\\s+",
12
- "^head\\s+",
13
- "^tail\\s+",
14
- "^wc\\s+",
15
- "^less\\s+",
16
- "^file\\s+",
17
- "^stat\\s+",
18
- "^du\\s+",
19
- "^df\\s+",
20
14
  "^tree(\\s+.*)?$",
21
15
 
22
- "^python(3)?\\s+--version$",
23
- "^node\\s+--version$",
24
- "^npm\\s+--version$",
25
- "^pnpm\\s+--version$",
26
- "^yarn\\s+--version$",
27
- "^uv\\s+--version$",
16
+ "^(python(3)?|node|npm|pnpm|yarn|uv)\\s+--version$",
28
17
 
29
- "^git\\s+(status|diff|log|show|branch|remote|stash\\s+list)(\\s+.*)?$",
30
- "^git\\s+-C\\s+[^|;&]+\\s+(status|diff|log|show|branch)(\\s+.*)?$",
31
- "^git\\s+-C\\s+[^|;&]+\\s+(add|commit|checkout|fetch|pull|push)(\\s+.*)?$",
18
+ "^git\\s+-C\\s+[^|;&]+\\s+(status|diff|log|show|branch|add|commit|checkout|fetch|pull|push)(\\s+.*)?$",
19
+ "^git\\s+(status|diff|log|show|branch|remote|add|commit|checkout|fetch|pull|push|worktree|merge|rebase|tag|switch|restore|rev-parse|ls-files|check-ignore|update-index|stash\\s+(list|push|pop|drop|apply))(\\s+.*)?$",
32
20
  "^git\\s+rm\\s+[^|;&]+$",
21
+ "^git\\s+rev-list\\s+[^|;&]+$",
22
+ "^git\\s+ls-tree\\s+[^|;&]+\\s*\\|\\s*(grep|head|tail|wc)[^|;&]*$",
23
+ "^git\\s+commit\\s+-m\\s+\"\\$\\(cat\\s+<<'?EOF'?",
24
+ "^git\\s+stash(\\s+--include-untracked|\\s+--all|\\s+-u|\\s+-a)?(\\s+-m\\s+.*)?$",
33
25
 
34
26
  "^pnpm\\s+(test|run\\s+(test|lint|typecheck|type-check|check|build|dev|start)|install|i|add|remove)(\\s+.*)?$",
35
- "^npm\\s+(test|run\\s+(test|lint|lint:fix|typecheck|type-check|check|check-types|build|dev|dev:next|start)|install|i|ci)(\\s+.*)?$",
27
+ "^npm\\s+(test|install|i|ci)(\\s+.*)?$",
28
+ "^npm\\s+run\\s+[\\w:-]+(\\s+[^|;&]*)?(\\s*2>&1)?(\\s*&)?$",
36
29
  "^npm\\s+whoami$",
37
30
  "^npm\\s+list(\\s+(-g|--global))?(\\s+[\\w@/-]+)?$",
38
31
  "^npm\\s+ls(\\s+[\\w@/-]+)*$",
@@ -40,8 +33,8 @@
40
33
  "^npm\\s+pack\\s+[\\w@/-]+\\s+--dry-run",
41
34
  "^yarn\\s+(test|lint|typecheck|type-check|check|build|dev|start|install|add|remove)(\\s+.*)?$",
42
35
  "^npx\\s+(tsc|eslint|prettier|vitest|jest)(\\s+.*)?$",
43
- "^npx\\s+tsx\\s+[^|;&]+$",
44
- "^npx\\s+shadcn@latest\\s+(add|init)(\\s+[\\w@/-]+)*(\\s+--yes)?$",
36
+ "^npx\\s+shadcn@latest\\s+(add|init)(\\s+[\\w@/-]+)*(\\s+--yes)?(\\s+2>&1)?$",
37
+ "^(PORT=\\d+\\s+)?npx\\s+playwright\\s+(test|install)\\b",
45
38
 
46
39
  "^pytest(\\s+.*)?$",
47
40
  "^python(3)?\\s+-m\\s+pytest(\\s+.*)?$",
@@ -56,60 +49,45 @@
56
49
  "^cargo\\s+(check|test|clippy|fmt\\s+--check|build)(\\s+.*)?$",
57
50
  "^go\\s+(test|vet|fmt|build)(\\s+.*)?$",
58
51
 
59
- "^jq\\s+",
60
- "^grep\\s+",
61
- "^rg\\s+",
62
- "^find\\s+",
63
- "^fd\\s+",
64
- "^ag\\s+",
65
- "^awk\\s+",
66
- "^sed\\s+-n\\s+",
67
- "^sort(\\s+.*)?$",
68
- "^uniq(\\s+.*)?$",
69
- "^cut\\s+",
70
- "^tr\\s+",
71
- "^diff\\s+",
72
- "^comm\\s+",
52
+ "^(jq|grep|rg|fd|ag|awk|cut|tr|diff|comm)\\s+[^|;&]+$",
53
+ "^find\\s+(?!.*-exec)[^|;&]+$",
54
+ "^sed\\s+-n\\s+[^|;&]+$",
55
+ "^(sort|uniq)(\\s+.*)?$",
73
56
 
74
57
  "^curl\\s+.*--head",
75
58
  "^curl\\s+-I\\s+",
76
59
  "^ping\\s+-c\\s+\\d+\\s+",
77
- "^dig\\s+",
78
- "^nslookup\\s+",
79
- "^host\\s+",
60
+ "^(dig|nslookup|host)\\s+",
80
61
 
81
62
  "^mkdir(\\s+.*)?$",
82
- "^touch\\s+",
83
- "^cp\\s+",
84
- "^mv\\s+",
63
+ "^touch\\s+[^|;&]+$",
64
+ "^(cp|mv)\\s+[^|;&]+$",
85
65
  "^rmdir\\s+[^|;&]+$",
86
66
 
87
- "^git\\s+(add|commit|checkout|fetch|pull|push|worktree|merge|rebase|stash\\s+(push|pop|drop|apply)|tag|switch|restore)(\\s+.*)?$",
88
-
89
67
  "^gh\\s+(pr|issue|repo|release|workflow|run|api|auth)(\\s+.*)?$",
90
-
91
- "^git\\s+add\\s+[^|;&]+\\s*&&\\s*git\\s+commit\\s+-m\\s+",
92
-
93
- "^git\\s+rev-parse(\\s+.*)?$",
94
- "^git\\s+stash(\\s+--include-untracked|\\s+--all|\\s+-u|\\s+-a)?(\\s+-m\\s+.*)?$",
95
- "^git\\s+rev-list\\s+[^|;&]+$",
96
- "^git\\s+ls-files(\\s+.*)?$",
97
- "^git\\s+check-ignore(\\s+.*)?$",
98
- "^git\\s+update-index(\\s+.*)?$",
99
-
100
- "^test\\s+(-[dfeL])\\s+[^|;&]+\\s+(&&|\\|\\|)\\s+echo\\s+",
101
68
  "^npm\\s+update\\s+[@\\w/-]+$",
102
69
 
103
- "^sleep\\s+\\d+$",
70
+ "^sleep\\s+[0-9.]+$",
104
71
 
105
- "^chmod\\s+[0-6][0-7][0-7]\\s+",
72
+ "^chmod\\s+[0-6][0-7][0-7]\\s+[^|;&]+$",
106
73
 
107
74
  "^lsof\\s+(-[a-zA-Z]+\\s+)*-?i\\s*[:\\d]+",
75
+ "^lsof\\s+-ti:\\d+(\\s+2>/dev/null)?$",
108
76
  "^npm\\s+run\\s+clean(\\s+.*)?$",
109
77
  "^npm\\s+(dedupe|cache\\s+clean)(\\s+.*)?$",
110
- "^curl\\s+(-[sIo]+\\s+)*https?://localhost[:/][^|;&]*$",
111
- "^curl\\s+[^|;&]*http://localhost[:/][^|;&]*$",
112
- "^pgrep\\s+"
78
+
79
+ "^curl\\s+(-[a-zA-Z]+\\s+)*https?://localhost[:/][^|;&]*(\\s*2>&1)?(\\s*2>/dev/null)?$",
80
+ "^curl\\s+-X\\s+(POST|GET|PUT|DELETE)\\s+['\"]?https?://localhost[:/][^|;&]*$",
81
+ "^curl\\s+-s\\s+-o\\s+/dev/null\\s+-w\\s+[^|;&]*https?://localhost[^|;&]*$",
82
+
83
+ "^pgrep\\s+[^|;&]+$",
84
+ "^ps\\s+aux.*$",
85
+
86
+ "^npm\\s+run\\s+dev(\\s+--\\s+-p\\s+\\d+)?\\s*(>\\s*(/tmp/[^|;&]+|/dev/null)\\s+)?2>&1\\s*&$",
87
+
88
+ "^test\\s+-[dfeL]\\s+[^|;&]+$",
89
+
90
+ "^xxd\\s+[^|;&]+$"
113
91
  ],
114
92
 
115
93
  "deny_patterns": [
@@ -117,7 +95,8 @@
117
95
  "^doas\\b",
118
96
  "(?<!git\\s)\\brm\\s+.*(-r|-rf|-fr|--recursive)",
119
97
  "(?<!git\\s)\\brm\\s+-[^\\s]*r",
120
- "^rm\\s+/",
98
+ "^rm\\s+/$",
99
+ "^rm\\s+/(etc|usr|var|bin|sbin|lib|boot|dev|proc|sys|root|home)\\b",
121
100
  "\\bmkfs\\.",
122
101
  "\\bdd\\b.*\\bof=",
123
102
  "\\bshutdown\\b",
@@ -134,12 +113,15 @@
134
113
  "\\bfork\\s*bomb",
135
114
  "\\bkill\\s+-9\\s+-1",
136
115
  "\\bpkill\\s+-9",
137
- "\\bkillall\\b"
116
+ "\\bkillall\\b",
117
+ "\\bxargs\\b.*\\brm\\b",
118
+ "\\b(nc|ncat|netcat)\\b",
119
+ "\\bcrontab\\s+-[er]"
138
120
  ],
139
121
 
140
122
  "sensitive_paths": [
141
- "\\.env$",
142
- "\\.env\\.",
123
+ "\\.env(?!\\.example)$",
124
+ "\\.env\\.(?!example)",
143
125
  "\\.pem$",
144
126
  "\\.key$",
145
127
  "\\.crt$",
@@ -161,6 +143,8 @@
161
143
  "\\.netrc$",
162
144
  "\\bsecrets?\\b",
163
145
  "\\bpassw(?:ord)?s?\\.(txt|json|env|yaml|yml|ini|conf|cfg)$",
164
- "\\btoken"
146
+ "\\btokens?\\b",
147
+ "\\.docker/config\\.json",
148
+ "\\.kube/config"
165
149
  ]
166
150
  }
@@ -0,0 +1,166 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Lint auto_approve_safe rules for duplicates, invalid regex, and likely-dead patterns.
4
+
5
+ Usage:
6
+ python3 vt-claude-qol/hooks/auto_approve_safe_rules_check.py [path ...]
7
+ """
8
+
9
+ from __future__ import annotations
10
+
11
+ import json
12
+ import re
13
+ import sys
14
+ from pathlib import Path
15
+ from typing import Iterable
16
+
17
+
18
+ DEFAULT_RULES = [
19
+ Path(__file__).resolve().parent / "auto_approve_safe.rules.json",
20
+ ]
21
+
22
+
23
+ def load_rules(path: Path) -> dict:
24
+ with path.open(encoding="utf-8") as f:
25
+ return json.load(f)
26
+
27
+
28
+ def iter_patterns(rules: dict) -> Iterable[tuple[str, str]]:
29
+ for key in ("allow_patterns", "deny_patterns", "sensitive_paths"):
30
+ for pattern in rules.get(key, []):
31
+ yield key, pattern
32
+
33
+
34
+ def compile_patterns(section: str, patterns: list[str]) -> list[str]:
35
+ errors: list[str] = []
36
+ flags = re.IGNORECASE if section in ("allow_patterns", "deny_patterns") else 0
37
+ for pattern in patterns:
38
+ try:
39
+ re.compile(pattern, flags)
40
+ except re.error as exc:
41
+ errors.append(f"{section}: {pattern} -> {exc}")
42
+ return errors
43
+
44
+
45
+ def find_duplicates(patterns: list[str]) -> list[str]:
46
+ seen: dict[str, int] = {}
47
+ dups: list[str] = []
48
+ for pattern in patterns:
49
+ seen[pattern] = seen.get(pattern, 0) + 1
50
+ for pattern, count in sorted(seen.items()):
51
+ if count > 1:
52
+ dups.append(f"{pattern} (x{count})")
53
+ return dups
54
+
55
+
56
+ def find_dead_by_split(patterns: list[str]) -> list[str]:
57
+ """Flag patterns containing literal && or ; outside character classes."""
58
+ dead: list[str] = []
59
+ for pattern in patterns:
60
+ # Strip character classes [...] before checking for && / ;
61
+ stripped = re.sub(r"\[.*?\]", "", pattern)
62
+ if "&&" in stripped or re.search(r"(?<!\\);", stripped):
63
+ dead.append(pattern)
64
+ return dead
65
+
66
+
67
+ def extract_command_heads(pattern: str) -> set[str]:
68
+ """
69
+ Heuristic extraction of leading command(s) for overlap checks.
70
+ Handles ^cmd and ^(cmd1|cmd2|...)\\s+ forms.
71
+ """
72
+ if not pattern.startswith("^"):
73
+ return set()
74
+
75
+ group_match = re.match(r"^\^\(([^)]+)\)\\s+", pattern)
76
+ if group_match:
77
+ heads = set()
78
+ for token in group_match.group(1).split("|"):
79
+ token = token.strip()
80
+ if re.fullmatch(r"[A-Za-z0-9_-]+", token):
81
+ heads.add(token)
82
+ return heads
83
+
84
+ word_match = re.match(r"^\^([A-Za-z0-9_-]+)\\b", pattern)
85
+ if word_match:
86
+ return {word_match.group(1)}
87
+
88
+ return set()
89
+
90
+
91
+ def find_potential_overlaps(patterns: list[str]) -> list[str]:
92
+ """
93
+ Conservative, heuristic warnings: flags permissive patterns that may
94
+ subsume more specific ones for the same command head.
95
+ """
96
+ by_head: dict[str, list[str]] = {}
97
+ for pattern in patterns:
98
+ for head in extract_command_heads(pattern):
99
+ by_head.setdefault(head, []).append(pattern)
100
+
101
+ warnings: list[str] = []
102
+ permissive_markers = (".*", "(\\s+.*)?")
103
+ for head, group in sorted(by_head.items()):
104
+ permissive = [p for p in group if any(m in p for m in permissive_markers)]
105
+ if permissive and len(group) > 1:
106
+ warnings.append(
107
+ f"{head}: permissive patterns may overlap others -> "
108
+ + ", ".join(permissive)
109
+ )
110
+ return warnings
111
+
112
+
113
+ def main() -> int:
114
+ paths = [Path(p) for p in sys.argv[1:]] or DEFAULT_RULES
115
+ any_fail = False
116
+
117
+ for path in paths:
118
+ if not path.exists():
119
+ print(f"[missing] {path}")
120
+ any_fail = True
121
+ continue
122
+
123
+ rules = load_rules(path)
124
+ print(f"\n== {path} ==")
125
+
126
+ for section in ("allow_patterns", "deny_patterns", "sensitive_paths"):
127
+ patterns = rules.get(section, [])
128
+ errors = compile_patterns(section, patterns)
129
+ if errors:
130
+ any_fail = True
131
+ print(f"[invalid regex] {section}")
132
+ for err in errors:
133
+ print(f" - {err}")
134
+
135
+ dups = find_duplicates(patterns)
136
+ if dups:
137
+ print(f"[duplicates] {section}")
138
+ for dup in dups:
139
+ print(f" - {dup}")
140
+
141
+ allow = rules.get("allow_patterns", [])
142
+ dead = find_dead_by_split(allow)
143
+ if dead:
144
+ print("[dead by split] allow_patterns")
145
+ for pattern in dead:
146
+ print(f" - {pattern}")
147
+
148
+ overlaps = find_potential_overlaps(allow)
149
+ if overlaps:
150
+ print("[possible overlaps] allow_patterns")
151
+ for warning in overlaps:
152
+ print(f" - {warning}")
153
+
154
+ allow_set = set(allow)
155
+ deny_set = set(rules.get("deny_patterns", []))
156
+ both = sorted(allow_set & deny_set)
157
+ if both:
158
+ print("[allow/deny conflict]")
159
+ for pattern in both:
160
+ print(f" - {pattern}")
161
+
162
+ return 1 if any_fail else 0
163
+
164
+
165
+ if __name__ == "__main__":
166
+ raise SystemExit(main())
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@torka/claude-qol",
3
- "version": "0.4.0",
3
+ "version": "0.4.1",
4
4
  "description": "Claude Code quality-of-life improvements: auto-approve hooks, context monitoring, and status line enhancements",
5
5
  "keywords": [
6
6
  "claude-code",