dotenvdrift 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3 @@
1
+ __all__ = ["__version__"]
2
+
3
+ __version__ = "0.1.0"
@@ -0,0 +1,4 @@
1
+ from .cli import main
2
+
3
+ if __name__ == "__main__":
4
+ raise SystemExit(main())
dotenvdrift/cli.py ADDED
@@ -0,0 +1,76 @@
1
+ from __future__ import annotations
2
+
3
+ import argparse
4
+ import json
5
+ import sys
6
+ from dataclasses import asdict
7
+
8
+ from .__init__ import __version__
9
+ from .core import AuditResult, audit, select_issues
10
+
11
+
12
+ def build_parser() -> argparse.ArgumentParser:
13
+ parser = argparse.ArgumentParser(
14
+ prog="dotenvdrift",
15
+ description="Catch env var drift between code, .env.example, Docker, and GitHub Actions.",
16
+ )
17
+ parser.add_argument("path", nargs="?", default=".", help="Repository path")
18
+ parser.add_argument("--json", action="store_true", help="Print machine-readable output")
19
+ parser.add_argument("--strict", action="store_true", help="Exit non-zero when any issue is found")
20
+ parser.add_argument(
21
+ "--only",
22
+ choices=("missing", "undocumented", "unused"),
23
+ help="Show a single issue group",
24
+ )
25
+ parser.add_argument("--version", action="version", version=f"%(prog)s {__version__}")
26
+ return parser
27
+
28
+
29
+ def main(argv: list[str] | None = None) -> int:
30
+ args = build_parser().parse_args(argv)
31
+ try:
32
+ result = audit(args.path)
33
+ except (FileNotFoundError, NotADirectoryError) as error:
34
+ print(f"error: {error}", file=sys.stderr)
35
+ return 2
36
+ if args.json:
37
+ print(render_json(result, args.only))
38
+ else:
39
+ print(render_text(result, args.only))
40
+ total = sum(len(items) for _, items in select_issues(result, args.only))
41
+ return 1 if args.strict and total else 0
42
+
43
+
44
+ def render_json(result: AuditResult, only: str | None) -> str:
45
+ payload = {
46
+ "root": result.root,
47
+ "documented_files": result.documented_files,
48
+ "counts": result.counts(only),
49
+ "issues": {
50
+ name: [asdict(issue) for issue in issues]
51
+ for name, issues in select_issues(result, only)
52
+ },
53
+ }
54
+ return json.dumps(payload, indent=2, sort_keys=True)
55
+
56
+
57
+ def render_text(result: AuditResult, only: str | None) -> str:
58
+ groups = select_issues(result, only)
59
+ total = sum(len(items) for _, items in groups)
60
+ if total == 0:
61
+ return "✓ no env drift found"
62
+
63
+ lines: list[str] = []
64
+ for name, issues in groups:
65
+ if not issues:
66
+ continue
67
+ lines.append(name)
68
+ for issue in issues:
69
+ label = issue.name.ljust(20)
70
+ if issue.first_seen:
71
+ lines.append(f" {label} {issue.first_seen}")
72
+ else:
73
+ lines.append(f" {label}")
74
+ lines.append("")
75
+ lines.append(f"✗ {total} drift issue{'s' if total != 1 else ''}")
76
+ return "\n".join(lines).rstrip()
dotenvdrift/core.py ADDED
@@ -0,0 +1,260 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ from collections import defaultdict
5
+ from collections.abc import Iterator
6
+ from dataclasses import dataclass
7
+ from pathlib import Path
8
+ import re
9
+
10
+ IGNORE_DIRS = {".git", "node_modules", ".venv", "venv", "dist", "build", "__pycache__"}
11
+ CODE_EXTENSIONS = {".py", ".js", ".jsx", ".ts", ".tsx", ".mjs", ".cjs"}
12
+ YAML_EXTENSIONS = {".yml", ".yaml"}
13
+ MAX_FILE_BYTES = 1_000_000
14
+ VAR_RE = r"[A-Z][A-Z0-9_]*"
15
+
16
+ PYTHON_PATTERNS = (
17
+ re.compile(rf"os\.getenv\(\s*[\"']({VAR_RE})[\"']"),
18
+ re.compile(rf"os\.environ\.get\(\s*[\"']({VAR_RE})[\"']"),
19
+ re.compile(rf"os\.environ\[\s*[\"']({VAR_RE})[\"']\s*\]"),
20
+ )
21
+ JS_PATTERNS = (
22
+ re.compile(rf"process\.env\.({VAR_RE})\b"),
23
+ re.compile(rf"process\.env\[\s*[\"']({VAR_RE})[\"']\s*\]"),
24
+ re.compile(rf"import\.meta\.env\.({VAR_RE})\b"),
25
+ )
26
+ ACTIONS_INLINE_PATTERNS = (
27
+ re.compile(rf"\$\{{\{{\s*secrets\.({VAR_RE})\s*\}}\}}"),
28
+ re.compile(rf"\$\{{\{{\s*vars\.({VAR_RE})\s*\}}\}}"),
29
+ )
30
+ DOCKER_SUBSTITUTION = re.compile(rf"\$\{{({VAR_RE})(?::-[^}}]*)?\}}")
31
+ ENV_KEY_LINE = re.compile(rf"^({VAR_RE})\s*:")
32
+ ENV_FILE_KEY = re.compile(rf"^(?:export\s+)?({VAR_RE})\s*=")
33
+
34
+
35
+ @dataclass(frozen=True, slots=True)
36
+ class Hit:
37
+ source: str
38
+ path: str
39
+ line: int
40
+
41
+
42
+ @dataclass(frozen=True, slots=True)
43
+ class Issue:
44
+ kind: str
45
+ name: str
46
+ first_seen: str | None
47
+
48
+
49
+ @dataclass(slots=True)
50
+ class AuditResult:
51
+ root: str
52
+ documented_files: list[str]
53
+ missing: list[Issue]
54
+ undocumented: list[Issue]
55
+ unused: list[Issue]
56
+
57
+ def counts(self, only: str | None = None) -> dict[str, int]:
58
+ counts = {"missing": 0, "undocumented": 0, "unused": 0}
59
+ for name, issues in select_issues(self, only):
60
+ counts[name] = len(issues)
61
+ counts["total"] = counts["missing"] + counts["undocumented"] + counts["unused"]
62
+ return counts
63
+
64
+
65
+ class ReferenceIndex:
66
+ def __init__(self) -> None:
67
+ self._hits: dict[str, list[Hit]] = defaultdict(list)
68
+
69
+ def add(self, name: str, source: str, path: Path, line: int) -> None:
70
+ hit = Hit(source=source, path=path.as_posix(), line=line)
71
+ if hit not in self._hits[name]:
72
+ self._hits[name].append(hit)
73
+
74
+ def names(self) -> set[str]:
75
+ return set(self._hits)
76
+
77
+ def hits_for(self, name: str) -> list[Hit]:
78
+ return self._hits.get(name, [])
79
+
80
+ def first_seen(self, name: str) -> str | None:
81
+ hits = self.hits_for(name)
82
+ if not hits:
83
+ return None
84
+ first = sorted(hits, key=lambda item: (item.path, item.line))[0]
85
+ return f"{first.path}:{first.line}"
86
+
87
+ def has_source(self, name: str, source: str) -> bool:
88
+ return any(hit.source == source for hit in self.hits_for(name))
89
+
90
+
91
+ def audit(root: str | Path) -> AuditResult:
92
+ base = resolve_root(root)
93
+ documented, documented_files, documented_locations = read_documented_keys(base)
94
+ refs = collect_references(base)
95
+
96
+ missing_names = sorted(
97
+ name for name in refs.names() if name not in documented and refs.has_source(name, "code")
98
+ )
99
+ undocumented_names = sorted(
100
+ name
101
+ for name in refs.names()
102
+ if name not in documented
103
+ and not refs.has_source(name, "code")
104
+ and (refs.has_source(name, "actions") or refs.has_source(name, "docker"))
105
+ )
106
+ unused_names = sorted(name for name in documented if name not in refs.names())
107
+
108
+ return AuditResult(
109
+ root=base.as_posix(),
110
+ documented_files=documented_files,
111
+ missing=[Issue("missing", name, refs.first_seen(name)) for name in missing_names],
112
+ undocumented=[Issue("undocumented", name, refs.first_seen(name)) for name in undocumented_names],
113
+ unused=[Issue("unused", name, documented_locations.get(name)) for name in unused_names],
114
+ )
115
+
116
+
117
+ def resolve_root(root: str | Path) -> Path:
118
+ base = Path(root).expanduser().resolve()
119
+ if not base.exists():
120
+ raise FileNotFoundError(f"repository path not found: {base}")
121
+ if not base.is_dir():
122
+ raise NotADirectoryError(f"repository path is not a directory: {base}")
123
+ return base
124
+
125
+
126
+ def read_documented_keys(root: Path) -> tuple[set[str], list[str], dict[str, str]]:
127
+ files: list[str] = []
128
+ keys: set[str] = set()
129
+ locations: dict[str, str] = {}
130
+ for path in walk_files(root):
131
+ if path.name != ".env.example":
132
+ continue
133
+ relative = path.relative_to(root).as_posix()
134
+ files.append(relative)
135
+ for line_number, raw_line in enumerate(read_text(path).splitlines(), start=1):
136
+ line = raw_line.strip()
137
+ if not line or line.startswith("#"):
138
+ continue
139
+ match = ENV_FILE_KEY.match(line)
140
+ if match:
141
+ name = match.group(1)
142
+ keys.add(name)
143
+ locations.setdefault(name, f"{relative}:{line_number}")
144
+ return keys, sorted(files), locations
145
+
146
+
147
+ def collect_references(root: Path) -> ReferenceIndex:
148
+ refs = ReferenceIndex()
149
+ for path in walk_files(root):
150
+ if is_oversized(path):
151
+ continue
152
+ suffix = path.suffix.lower()
153
+ text = read_text(path)
154
+ rel_path = path.relative_to(root)
155
+ if suffix in CODE_EXTENSIONS:
156
+ if suffix == ".py":
157
+ scan_patterns(text, rel_path, PYTHON_PATTERNS, refs, "code")
158
+ else:
159
+ scan_patterns(text, rel_path, JS_PATTERNS, refs, "code")
160
+ continue
161
+ if suffix in YAML_EXTENSIONS:
162
+ if is_actions_file(rel_path):
163
+ scan_actions(text, rel_path, refs)
164
+ elif is_compose_file(rel_path):
165
+ scan_compose(text, rel_path, refs)
166
+ return refs
167
+
168
+
169
+ def scan_patterns(text: str, path: Path, patterns: tuple[re.Pattern[str], ...], refs: ReferenceIndex, source: str) -> None:
170
+ for line_number, line in enumerate(text.splitlines(), start=1):
171
+ for pattern in patterns:
172
+ for match in pattern.finditer(line):
173
+ refs.add(match.group(1), source, path, line_number)
174
+
175
+
176
+ def scan_actions(text: str, path: Path, refs: ReferenceIndex) -> None:
177
+ for line_number, line in enumerate(text.splitlines(), start=1):
178
+ for pattern in ACTIONS_INLINE_PATTERNS:
179
+ for match in pattern.finditer(line):
180
+ refs.add(match.group(1), "actions", path, line_number)
181
+ scan_yaml_block_keys(text, path, refs, block_name="env", source="actions")
182
+
183
+
184
+ def scan_compose(text: str, path: Path, refs: ReferenceIndex) -> None:
185
+ for line_number, line in enumerate(text.splitlines(), start=1):
186
+ for match in DOCKER_SUBSTITUTION.finditer(line):
187
+ refs.add(match.group(1), "docker", path, line_number)
188
+ scan_yaml_block_keys(text, path, refs, block_name="environment", source="docker")
189
+
190
+
191
+ def scan_yaml_block_keys(text: str, path: Path, refs: ReferenceIndex, *, block_name: str, source: str) -> None:
192
+ active_indent: int | None = None
193
+ for line_number, raw_line in enumerate(text.splitlines(), start=1):
194
+ line = raw_line.rstrip()
195
+ stripped = line.strip()
196
+ if not stripped or stripped.startswith("#"):
197
+ continue
198
+ indent = len(line) - len(line.lstrip())
199
+ if active_indent is not None and indent <= active_indent:
200
+ active_indent = None
201
+ if stripped == f"{block_name}:" or stripped.startswith(f"{block_name}: #"):
202
+ active_indent = indent
203
+ continue
204
+ if active_indent is None:
205
+ continue
206
+ match = ENV_KEY_LINE.match(stripped)
207
+ if match:
208
+ refs.add(match.group(1), source, path, line_number)
209
+ continue
210
+ if stripped.startswith("- "):
211
+ list_match = re.match(rf"-\s*({VAR_RE})=", stripped)
212
+ if list_match:
213
+ refs.add(list_match.group(1), source, path, line_number)
214
+
215
+
216
+ def walk_files(root: Path) -> Iterator[Path]:
217
+ for current_root, dirnames, filenames in os.walk(root):
218
+ dirnames[:] = sorted(name for name in dirnames if name not in IGNORE_DIRS)
219
+ for filename in sorted(filenames):
220
+ yield Path(current_root, filename)
221
+
222
+
223
+ def is_oversized(path: Path) -> bool:
224
+ try:
225
+ return path.stat().st_size > MAX_FILE_BYTES
226
+ except OSError:
227
+ return True
228
+
229
+
230
+ def read_text(path: Path) -> str:
231
+ try:
232
+ return path.read_text(encoding="utf-8")
233
+ except UnicodeDecodeError:
234
+ try:
235
+ return path.read_text(encoding="utf-8", errors="ignore")
236
+ except OSError:
237
+ return ""
238
+ except OSError:
239
+ return ""
240
+
241
+
242
+ def is_actions_file(path: Path) -> bool:
243
+ parts = path.parts
244
+ return len(parts) >= 3 and parts[0] == ".github" and parts[1] == "workflows"
245
+
246
+
247
+ def is_compose_file(path: Path) -> bool:
248
+ name = path.name.lower()
249
+ return name in {"docker-compose.yml", "docker-compose.yaml", "compose.yml", "compose.yaml"}
250
+
251
+
252
+ def select_issues(result: AuditResult, only: str | None = None) -> list[tuple[str, list[Issue]]]:
253
+ groups = [
254
+ ("missing", result.missing),
255
+ ("undocumented", result.undocumented),
256
+ ("unused", result.unused),
257
+ ]
258
+ if only is None:
259
+ return groups
260
+ return [group for group in groups if group[0] == only]
@@ -0,0 +1,122 @@
1
+ Metadata-Version: 2.4
2
+ Name: dotenvdrift
3
+ Version: 0.1.0
4
+ Summary: Repo-local CLI for env drift across code, .env.example, Docker, and GitHub Actions.
5
+ License-Expression: MIT
6
+ Project-URL: Homepage, https://github.com/KevinHansen90/dotenvdrift
7
+ Project-URL: Repository, https://github.com/KevinHansen90/dotenvdrift
8
+ Project-URL: Issues, https://github.com/KevinHansen90/dotenvdrift/issues
9
+ Keywords: dotenv,env,env-drift,config-drift,cli,github-actions,docker,repo-local
10
+ Classifier: Development Status :: 3 - Alpha
11
+ Classifier: Environment :: Console
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: Programming Language :: Python :: 3
14
+ Classifier: Programming Language :: Python :: 3.11
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Programming Language :: Python :: 3.13
17
+ Classifier: Topic :: Software Development :: Quality Assurance
18
+ Classifier: Topic :: Software Development :: Build Tools
19
+ Requires-Python: >=3.11
20
+ Description-Content-Type: text/markdown
21
+ License-File: LICENSE
22
+ Dynamic: license-file
23
+
24
+ <p align="center">
25
+ <img src="https://raw.githubusercontent.com/KevinHansen90/dotenvdrift/main/docs/assets/dotenvdrift-logo.png" alt="dotenvdrift logo" width="720">
26
+ </p>
27
+
28
+ <h1 align="center">dotenvdrift</h1>
29
+
30
+ <p align="center">
31
+ <a href="https://www.python.org/downloads/">
32
+ <img src="https://img.shields.io/badge/python-3.11%2B-3776AB?logo=python&logoColor=white" alt="Python 3.11+">
33
+ </a>
34
+ <a href="LICENSE">
35
+ <img src="https://img.shields.io/badge/license-MIT-2ea44f" alt="MIT License">
36
+ </a>
37
+ </p>
38
+
39
+ <p align="center">
40
+ Small repo-local CLI that catches env drift between code, <code>.env.example</code>, Docker / docker-compose, and GitHub Actions.
41
+ </p>
42
+
43
+ ## Why
44
+
45
+ I built this after repeatedly hitting env drift while moving quickly, including with coding agents: the code was done, but `.env.example`, Docker, docker-compose, and GitHub Actions had drifted apart.
46
+
47
+ That failure mode is not specific to AI-assisted repos. It breaks onboarding, local runs, and CI in any repo. This tool is just especially handy in fast-moving codebases where config drift shows up early.
48
+
49
+ ## What It Checks
50
+
51
+ - `missing`: used in code but absent from `.env.example`
52
+ - `undocumented`: referenced in Docker or GitHub Actions but not documented locally
53
+ - `unused`: still listed in `.env.example` but no longer referenced
54
+
55
+ It scans Python and JS/TS code, `.env.example`, Docker / docker-compose files, and GitHub Actions workflows with simple deterministic patterns.
56
+
57
+ ## Install
58
+
59
+ Requires Python 3.11+.
60
+
61
+ ```bash
62
+ uv sync
63
+ ```
64
+
65
+ Fallback:
66
+
67
+ ```bash
68
+ python -m pip install .
69
+ ```
70
+
71
+ ## Usage
72
+
73
+ ```bash
74
+ uv run dotenvdrift .
75
+ uv run dotenvdrift examples/broken-repo
76
+ uv run dotenvdrift . --json
77
+ uv run dotenvdrift . --strict
78
+ uv run dotenvdrift . --only missing
79
+ ```
80
+
81
+ Sample output:
82
+
83
+ ```text
84
+ missing
85
+ NODE_ENV web/client.ts:2
86
+ OPENAI_API_KEY app/settings.py:4
87
+ VITE_API_BASE_URL web/client.ts:1
88
+
89
+ undocumented
90
+ DATABASE_URL docker-compose.yml:5
91
+ PYPI_TOKEN .github/workflows/release.yml:11
92
+ RELEASE_REGION .github/workflows/release.yml:7
93
+
94
+ unused
95
+ DEBUG_SQL .env.example:2
96
+
97
+ ✗ 7 drift issues
98
+ ```
99
+
100
+ ## Limits
101
+
102
+ - It never loads or prints env values, only names and locations.
103
+ - It uses line-based regex scans, not ASTs or YAML parsers.
104
+ - It stays generic. AWS, GCP, Azure, and crypto repos work when they use env vars, but there is no provider-specific logic.
105
+ - It does not sync secrets, manage vaults, validate schemas, or auto-fix anything.
106
+
107
+ ## Development
108
+
109
+ ```bash
110
+ uv run python -m unittest discover -s tests -p 'test_*.py' -v
111
+ uv build
112
+ ```
113
+
114
+ ## Exit Codes
115
+
116
+ - `0`: no issues, or issues found without `--strict`
117
+ - `1`: issues found with `--strict`
118
+ - `2`: invalid repository path
119
+
120
+ ## License
121
+
122
+ MIT
@@ -0,0 +1,10 @@
1
+ dotenvdrift/__init__.py,sha256=4t_crzhrLum--oyowUMxtjBTzUtWp7oRTF22ewEvJG4,49
2
+ dotenvdrift/__main__.py,sha256=MHKZ_ae3fSLGTLUUMOx15fWdeOnJSHhq-zslRP5F5Lc,79
3
+ dotenvdrift/cli.py,sha256=bSdz-5-T1NucIuZOeyN7T-uGj-2Poi6wYfyLhY_SYn0,2565
4
+ dotenvdrift/core.py,sha256=LUx4PsJJQGPFG4oc765doXppoCeoDFVl1M3qahVuoz4,9184
5
+ dotenvdrift-0.1.0.dist-info/licenses/LICENSE,sha256=v2spsd7N1pKFFh2G8wGP_45iwe5S0DYiJzG4im8Rupc,1066
6
+ dotenvdrift-0.1.0.dist-info/METADATA,sha256=ra6PnQXwJQkRFJRmgPBl-X_CgpIVfYzQQAwq7hhnZpA,3746
7
+ dotenvdrift-0.1.0.dist-info/WHEEL,sha256=aeYiig01lYGDzBgS8HxWXOg3uV61G9ijOsup-k9o1sk,91
8
+ dotenvdrift-0.1.0.dist-info/entry_points.txt,sha256=gDSg_tGTq5K12zA-FkfkdeyKDtxTlQ8YJB4bG1uVhOw,53
9
+ dotenvdrift-0.1.0.dist-info/top_level.txt,sha256=OKMLFvjVQY2JO-y6xpUTJC7MUp-YV37eMtx8dS8BB2Q,12
10
+ dotenvdrift-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (82.0.1)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ dotenvdrift = dotenvdrift.cli:main
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Your Name
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ dotenvdrift