envdrift 4.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- envdrift/__init__.py +30 -0
- envdrift/_version.py +34 -0
- envdrift/api.py +192 -0
- envdrift/cli.py +42 -0
- envdrift/cli_commands/__init__.py +1 -0
- envdrift/cli_commands/diff.py +91 -0
- envdrift/cli_commands/encryption.py +630 -0
- envdrift/cli_commands/encryption_helpers.py +93 -0
- envdrift/cli_commands/hook.py +75 -0
- envdrift/cli_commands/init_cmd.py +117 -0
- envdrift/cli_commands/partial.py +222 -0
- envdrift/cli_commands/sync.py +1140 -0
- envdrift/cli_commands/validate.py +109 -0
- envdrift/cli_commands/vault.py +376 -0
- envdrift/cli_commands/version.py +15 -0
- envdrift/config.py +489 -0
- envdrift/constants.json +18 -0
- envdrift/core/__init__.py +30 -0
- envdrift/core/diff.py +233 -0
- envdrift/core/encryption.py +400 -0
- envdrift/core/parser.py +260 -0
- envdrift/core/partial_encryption.py +239 -0
- envdrift/core/schema.py +253 -0
- envdrift/core/validator.py +312 -0
- envdrift/encryption/__init__.py +117 -0
- envdrift/encryption/base.py +217 -0
- envdrift/encryption/dotenvx.py +236 -0
- envdrift/encryption/sops.py +458 -0
- envdrift/env_files.py +60 -0
- envdrift/integrations/__init__.py +21 -0
- envdrift/integrations/dotenvx.py +689 -0
- envdrift/integrations/precommit.py +266 -0
- envdrift/integrations/sops.py +85 -0
- envdrift/output/__init__.py +21 -0
- envdrift/output/rich.py +424 -0
- envdrift/py.typed +0 -0
- envdrift/sync/__init__.py +26 -0
- envdrift/sync/config.py +218 -0
- envdrift/sync/engine.py +383 -0
- envdrift/sync/operations.py +138 -0
- envdrift/sync/result.py +99 -0
- envdrift/vault/__init__.py +107 -0
- envdrift/vault/aws.py +282 -0
- envdrift/vault/azure.py +170 -0
- envdrift/vault/base.py +150 -0
- envdrift/vault/gcp.py +210 -0
- envdrift/vault/hashicorp.py +238 -0
- envdrift-4.2.1.dist-info/METADATA +160 -0
- envdrift-4.2.1.dist-info/RECORD +52 -0
- envdrift-4.2.1.dist-info/WHEEL +4 -0
- envdrift-4.2.1.dist-info/entry_points.txt +2 -0
- envdrift-4.2.1.dist-info/licenses/LICENSE +21 -0
envdrift/core/schema.py
ADDED
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
"""Schema loader for Pydantic Settings classes."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import importlib
|
|
6
|
+
import os
|
|
7
|
+
import sys
|
|
8
|
+
from dataclasses import dataclass, field
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
from pydantic_settings import BaseSettings
|
|
13
|
+
|
|
14
|
+
# Environment variable to signal schema extraction mode
|
|
15
|
+
ENVDRIFT_SCHEMA_EXTRACTION = "ENVDRIFT_SCHEMA_EXTRACTION"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class FieldMetadata:
|
|
20
|
+
"""Metadata about a settings field."""
|
|
21
|
+
|
|
22
|
+
name: str
|
|
23
|
+
required: bool
|
|
24
|
+
sensitive: bool
|
|
25
|
+
default: Any
|
|
26
|
+
description: str | None
|
|
27
|
+
field_type: type
|
|
28
|
+
annotation: str
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def is_optional(self) -> bool:
|
|
32
|
+
"""
|
|
33
|
+
Indicates that the field can be omitted because it has a default value.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
`true` if the field can be omitted because it has a default value, `false` otherwise.
|
|
37
|
+
"""
|
|
38
|
+
return not self.required
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class SchemaMetadata:
|
|
43
|
+
"""Complete schema metadata."""
|
|
44
|
+
|
|
45
|
+
class_name: str
|
|
46
|
+
module_path: str
|
|
47
|
+
fields: dict[str, FieldMetadata] = field(default_factory=dict)
|
|
48
|
+
extra_policy: str = "ignore" # "forbid", "ignore", "allow"
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def required_fields(self) -> list[str]:
|
|
52
|
+
"""
|
|
53
|
+
List the names of fields marked as required in the schema.
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
list[str]: Field names for which FieldMetadata.required is True.
|
|
57
|
+
"""
|
|
58
|
+
return [name for name, f in self.fields.items() if f.required]
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def optional_fields(self) -> list[str]:
|
|
62
|
+
"""
|
|
63
|
+
List optional field names from the schema.
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
list[str]: Field names whose corresponding FieldMetadata.required is False.
|
|
67
|
+
"""
|
|
68
|
+
return [name for name, f in self.fields.items() if not f.required]
|
|
69
|
+
|
|
70
|
+
@property
|
|
71
|
+
def sensitive_fields(self) -> list[str]:
|
|
72
|
+
"""
|
|
73
|
+
List names of fields that are marked as sensitive.
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
list[str]: Field names for which the corresponding FieldMetadata.sensitive is True.
|
|
77
|
+
"""
|
|
78
|
+
return [name for name, f in self.fields.items() if f.sensitive]
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class SchemaLoadError(Exception):
|
|
82
|
+
"""Error loading schema."""
|
|
83
|
+
|
|
84
|
+
pass
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class SchemaLoader:
|
|
88
|
+
"""Load and introspect Pydantic Settings classes."""
|
|
89
|
+
|
|
90
|
+
def load(self, dotted_path: str, service_dir: Path | str | None = None) -> type[BaseSettings]:
|
|
91
|
+
"""
|
|
92
|
+
Load a Pydantic BaseSettings subclass specified by a dotted path.
|
|
93
|
+
|
|
94
|
+
Parameters:
|
|
95
|
+
dotted_path (str): Dotted import path with class name separated by `:`, e.g. "module.path:SettingsClass".
|
|
96
|
+
service_dir (Path | str | None): Optional directory to temporarily add to sys.path to assist imports.
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
type[BaseSettings]: The resolved Pydantic Settings class.
|
|
100
|
+
|
|
101
|
+
Raises:
|
|
102
|
+
SchemaLoadError: If the path format is invalid, the module cannot be imported, the class is missing,
|
|
103
|
+
or the resolved object is not a subclass of `BaseSettings`.
|
|
104
|
+
"""
|
|
105
|
+
# Add service directory to path if provided
|
|
106
|
+
if service_dir:
|
|
107
|
+
service_dir = Path(service_dir).resolve()
|
|
108
|
+
if str(service_dir) not in sys.path:
|
|
109
|
+
sys.path.insert(0, str(service_dir))
|
|
110
|
+
|
|
111
|
+
# Parse the dotted path
|
|
112
|
+
if ":" not in dotted_path:
|
|
113
|
+
raise SchemaLoadError(
|
|
114
|
+
f"Invalid schema path '{dotted_path}'. Expected format: 'module.path:ClassName'"
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
module_path, class_name = dotted_path.rsplit(":", 1)
|
|
118
|
+
|
|
119
|
+
# Set environment variable to signal schema extraction mode
|
|
120
|
+
# This allows user code to skip Settings instantiation during import
|
|
121
|
+
os.environ[ENVDRIFT_SCHEMA_EXTRACTION] = "1"
|
|
122
|
+
try:
|
|
123
|
+
module = importlib.import_module(module_path)
|
|
124
|
+
except ImportError as e:
|
|
125
|
+
raise SchemaLoadError(f"Cannot import module '{module_path}': {e}") from e
|
|
126
|
+
finally:
|
|
127
|
+
# Clean up the environment variable
|
|
128
|
+
os.environ.pop(ENVDRIFT_SCHEMA_EXTRACTION, None)
|
|
129
|
+
|
|
130
|
+
try:
|
|
131
|
+
settings_cls = getattr(module, class_name)
|
|
132
|
+
except AttributeError as e:
|
|
133
|
+
raise SchemaLoadError(
|
|
134
|
+
f"Class '{class_name}' not found in module '{module_path}'"
|
|
135
|
+
) from e
|
|
136
|
+
|
|
137
|
+
# Verify it's a BaseSettings subclass
|
|
138
|
+
if not isinstance(settings_cls, type) or not issubclass(settings_cls, BaseSettings):
|
|
139
|
+
raise SchemaLoadError(f"'{class_name}' is not a Pydantic BaseSettings subclass")
|
|
140
|
+
|
|
141
|
+
return settings_cls
|
|
142
|
+
|
|
143
|
+
def extract_metadata(self, settings_cls: type[BaseSettings]) -> SchemaMetadata:
|
|
144
|
+
"""
|
|
145
|
+
Builds a SchemaMetadata instance describing the given Pydantic BaseSettings class, including each field's metadata and the model's extra policy.
|
|
146
|
+
|
|
147
|
+
Inspects the class's model_config.extra (defaulting to "ignore") and model_fields to populate FieldMetadata entries; for required fields the stored default is None, sensitivity is read from a field's json_schema_extra["sensitive"] if present, and type annotations fall back to "Any" when not available.
|
|
148
|
+
|
|
149
|
+
Parameters:
|
|
150
|
+
settings_cls (type[BaseSettings]): The Pydantic BaseSettings subclass to inspect.
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
SchemaMetadata: Metadata for the settings class, including field map and extra policy.
|
|
154
|
+
"""
|
|
155
|
+
schema = SchemaMetadata(
|
|
156
|
+
class_name=settings_cls.__name__,
|
|
157
|
+
module_path=settings_cls.__module__,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
# Determine extra policy from model_config
|
|
161
|
+
model_config = getattr(settings_cls, "model_config", {})
|
|
162
|
+
if isinstance(model_config, dict):
|
|
163
|
+
extra = model_config.get("extra", "ignore")
|
|
164
|
+
else:
|
|
165
|
+
# SettingsConfigDict object
|
|
166
|
+
extra = getattr(model_config, "extra", "ignore")
|
|
167
|
+
|
|
168
|
+
schema.extra_policy = extra if extra else "ignore"
|
|
169
|
+
|
|
170
|
+
# Extract field metadata
|
|
171
|
+
for field_name, field_info in settings_cls.model_fields.items():
|
|
172
|
+
# Check if field is required
|
|
173
|
+
is_required = field_info.is_required()
|
|
174
|
+
|
|
175
|
+
# Check if marked as sensitive
|
|
176
|
+
extra_schema = field_info.json_schema_extra
|
|
177
|
+
is_sensitive = False
|
|
178
|
+
if isinstance(extra_schema, dict):
|
|
179
|
+
is_sensitive = extra_schema.get("sensitive", False)
|
|
180
|
+
|
|
181
|
+
# Get default value
|
|
182
|
+
default_value = None if is_required else field_info.default
|
|
183
|
+
|
|
184
|
+
# Get description
|
|
185
|
+
description = field_info.description
|
|
186
|
+
|
|
187
|
+
# Get type annotation as string
|
|
188
|
+
annotation = field_info.annotation
|
|
189
|
+
if annotation is not None:
|
|
190
|
+
if hasattr(annotation, "__name__"):
|
|
191
|
+
type_str = annotation.__name__
|
|
192
|
+
else:
|
|
193
|
+
type_str = str(annotation)
|
|
194
|
+
else:
|
|
195
|
+
type_str = "Any"
|
|
196
|
+
|
|
197
|
+
schema.fields[field_name] = FieldMetadata(
|
|
198
|
+
name=field_name,
|
|
199
|
+
required=is_required,
|
|
200
|
+
sensitive=is_sensitive,
|
|
201
|
+
default=default_value,
|
|
202
|
+
description=description,
|
|
203
|
+
field_type=annotation if annotation else type(None),
|
|
204
|
+
annotation=type_str,
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
return schema
|
|
208
|
+
|
|
209
|
+
def get_schema_metadata_func(
|
|
210
|
+
self, module_path: str, service_dir: Path | str | None = None
|
|
211
|
+
) -> dict[str, Any] | None:
|
|
212
|
+
"""
|
|
213
|
+
Invoke a module-level get_schema_metadata() function if present and return its result.
|
|
214
|
+
|
|
215
|
+
Parameters:
|
|
216
|
+
module_path (str): Dotted module path to import (e.g., "config.settings").
|
|
217
|
+
service_dir (Path | str | None): Optional directory to add to sys.path to aid importing the module.
|
|
218
|
+
|
|
219
|
+
Returns:
|
|
220
|
+
dict[str, Any] | None: The dictionary returned by get_schema_metadata() if callable and executed successfully,
|
|
221
|
+
or `None` if the module cannot be imported or the function is absent.
|
|
222
|
+
"""
|
|
223
|
+
if service_dir:
|
|
224
|
+
service_dir = Path(service_dir).resolve()
|
|
225
|
+
if str(service_dir) not in sys.path:
|
|
226
|
+
sys.path.insert(0, str(service_dir))
|
|
227
|
+
|
|
228
|
+
try:
|
|
229
|
+
module = importlib.import_module(module_path)
|
|
230
|
+
except ImportError:
|
|
231
|
+
return None
|
|
232
|
+
|
|
233
|
+
func = getattr(module, "get_schema_metadata", None)
|
|
234
|
+
if callable(func):
|
|
235
|
+
return func()
|
|
236
|
+
|
|
237
|
+
return None
|
|
238
|
+
|
|
239
|
+
def load_and_extract(
|
|
240
|
+
self, dotted_path: str, service_dir: Path | str | None = None
|
|
241
|
+
) -> SchemaMetadata:
|
|
242
|
+
"""
|
|
243
|
+
Convenience method that loads a Pydantic BaseSettings class from a dotted path and returns its SchemaMetadata.
|
|
244
|
+
|
|
245
|
+
Parameters:
|
|
246
|
+
dotted_path (str): Dotted import path with class name, e.g. "config.settings:ProductionSettings".
|
|
247
|
+
service_dir (Path | str | None): Optional directory to add to sys.path to assist imports.
|
|
248
|
+
|
|
249
|
+
Returns:
|
|
250
|
+
SchemaMetadata: Metadata describing the loaded settings class and its fields.
|
|
251
|
+
"""
|
|
252
|
+
settings_cls = self.load(dotted_path, service_dir)
|
|
253
|
+
return self.extract_metadata(settings_cls)
|
|
@@ -0,0 +1,312 @@
|
|
|
1
|
+
"""Validation logic for .env files against Pydantic schemas."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
|
|
8
|
+
from envdrift.core.parser import EncryptionStatus, EnvFile
|
|
9
|
+
from envdrift.core.schema import SchemaMetadata
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class ValidationResult:
|
|
14
|
+
"""Result of schema validation."""
|
|
15
|
+
|
|
16
|
+
valid: bool
|
|
17
|
+
missing_required: set[str] = field(default_factory=set)
|
|
18
|
+
missing_optional: set[str] = field(default_factory=set)
|
|
19
|
+
extra_vars: set[str] = field(default_factory=set)
|
|
20
|
+
unencrypted_secrets: set[str] = field(default_factory=set)
|
|
21
|
+
type_errors: dict[str, str] = field(default_factory=dict)
|
|
22
|
+
warnings: list[str] = field(default_factory=list)
|
|
23
|
+
|
|
24
|
+
@property
|
|
25
|
+
def has_errors(self) -> bool:
|
|
26
|
+
"""
|
|
27
|
+
Return whether the validation contains any errors (exclude warnings).
|
|
28
|
+
|
|
29
|
+
Checks for missing required variables, type errors, or extra variables
|
|
30
|
+
present when the schema forbids extras. Unencrypted secrets are warnings,
|
|
31
|
+
not errors - use `envdrift encrypt --check` for strict enforcement.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
True if any errors are present, False otherwise.
|
|
35
|
+
"""
|
|
36
|
+
return bool(self.missing_required) or bool(self.type_errors) or bool(self.extra_vars)
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def error_count(self) -> int:
|
|
40
|
+
"""
|
|
41
|
+
Compute the total number of validation error entries.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
int: Sum of missing required variables, type errors, and extra variables.
|
|
45
|
+
"""
|
|
46
|
+
return len(self.missing_required) + len(self.type_errors) + len(self.extra_vars)
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def warning_count(self) -> int:
|
|
50
|
+
"""
|
|
51
|
+
Compute the total number of warning entries.
|
|
52
|
+
|
|
53
|
+
Combines explicit warnings, missing optional variables, and unencrypted secrets.
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
The total count of warnings as an integer.
|
|
57
|
+
"""
|
|
58
|
+
return len(self.warnings) + len(self.missing_optional) + len(self.unencrypted_secrets)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class Validator:
|
|
62
|
+
"""Validate .env files against Pydantic schemas."""
|
|
63
|
+
|
|
64
|
+
# Patterns that suggest a value is a secret
|
|
65
|
+
SECRET_PATTERNS = [
|
|
66
|
+
re.compile(r"^sk[-_]", re.IGNORECASE), # API keys (Stripe, OpenAI)
|
|
67
|
+
re.compile(r"^pk[-_]", re.IGNORECASE), # Public/private keys
|
|
68
|
+
re.compile(r"password", re.IGNORECASE), # Passwords
|
|
69
|
+
re.compile(r"secret", re.IGNORECASE), # Secrets
|
|
70
|
+
re.compile(r"^ghp_"), # GitHub personal tokens
|
|
71
|
+
re.compile(r"^gho_"), # GitHub OAuth tokens
|
|
72
|
+
re.compile(r"^ghu_"), # GitHub user tokens
|
|
73
|
+
re.compile(r"^xox[baprs]-"), # Slack tokens
|
|
74
|
+
re.compile(r"^AKIA[0-9A-Z]{16}$"), # AWS access keys
|
|
75
|
+
re.compile(r"^postgres://.*:.*@"), # DB URLs with credentials
|
|
76
|
+
re.compile(r"^postgresql://.*:.*@"),
|
|
77
|
+
re.compile(r"^mysql://.*:.*@"),
|
|
78
|
+
re.compile(r"^redis://.*:.*@"),
|
|
79
|
+
re.compile(r"^mongodb://.*:.*@"),
|
|
80
|
+
re.compile(r"^mongodb\+srv://.*:.*@"),
|
|
81
|
+
re.compile(r"eyJ[A-Za-z0-9_-]+\.eyJ"), # JWT tokens
|
|
82
|
+
]
|
|
83
|
+
|
|
84
|
+
# Variable names that suggest sensitive content
|
|
85
|
+
SENSITIVE_VAR_PATTERNS = [
|
|
86
|
+
re.compile(r".*_KEY$", re.IGNORECASE),
|
|
87
|
+
re.compile(r".*_SECRET$", re.IGNORECASE),
|
|
88
|
+
re.compile(r".*_TOKEN$", re.IGNORECASE),
|
|
89
|
+
re.compile(r".*_PASSWORD$", re.IGNORECASE),
|
|
90
|
+
re.compile(r".*_PASS$", re.IGNORECASE),
|
|
91
|
+
re.compile(r".*_CREDENTIAL.*", re.IGNORECASE),
|
|
92
|
+
re.compile(r".*_API_KEY$", re.IGNORECASE),
|
|
93
|
+
re.compile(r"^JWT_.*", re.IGNORECASE),
|
|
94
|
+
re.compile(r"^AUTH_.*", re.IGNORECASE),
|
|
95
|
+
re.compile(r".*_DSN$", re.IGNORECASE), # Sentry DSN
|
|
96
|
+
]
|
|
97
|
+
|
|
98
|
+
def validate(
|
|
99
|
+
self,
|
|
100
|
+
env_file: EnvFile,
|
|
101
|
+
schema: SchemaMetadata,
|
|
102
|
+
check_encryption: bool = True,
|
|
103
|
+
check_extra: bool = True,
|
|
104
|
+
) -> ValidationResult:
|
|
105
|
+
"""Validate env file against schema.
|
|
106
|
+
|
|
107
|
+
Checks:
|
|
108
|
+
1. All required vars exist
|
|
109
|
+
2. No unexpected vars (if schema has extra="forbid")
|
|
110
|
+
3. Sensitive vars are encrypted
|
|
111
|
+
4. Values match expected types (basic check)
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
env_file: Parsed env file
|
|
115
|
+
schema: Schema metadata
|
|
116
|
+
check_encryption: Whether to check if sensitive vars are encrypted
|
|
117
|
+
check_extra: Whether to check for extra variables
|
|
118
|
+
|
|
119
|
+
Returns:
|
|
120
|
+
ValidationResult with all issues found
|
|
121
|
+
"""
|
|
122
|
+
result = ValidationResult(valid=True)
|
|
123
|
+
|
|
124
|
+
env_var_names = set(env_file.variables.keys())
|
|
125
|
+
schema_field_names = set(schema.fields.keys())
|
|
126
|
+
|
|
127
|
+
# Check for missing required variables
|
|
128
|
+
for field_name, field_meta in schema.fields.items():
|
|
129
|
+
if field_meta.required and field_name not in env_var_names:
|
|
130
|
+
result.missing_required.add(field_name)
|
|
131
|
+
|
|
132
|
+
# Check for missing optional variables (as warning)
|
|
133
|
+
for field_name, field_meta in schema.fields.items():
|
|
134
|
+
if not field_meta.required and field_name not in env_var_names:
|
|
135
|
+
result.missing_optional.add(field_name)
|
|
136
|
+
|
|
137
|
+
# Check for extra variables
|
|
138
|
+
if check_extra:
|
|
139
|
+
extra = env_var_names - schema_field_names
|
|
140
|
+
if extra:
|
|
141
|
+
if schema.extra_policy == "forbid":
|
|
142
|
+
result.extra_vars = extra
|
|
143
|
+
else:
|
|
144
|
+
# Just a warning when extra is "ignore" or "allow"
|
|
145
|
+
for var_name in extra:
|
|
146
|
+
result.warnings.append(f"Extra variable '{var_name}' not in schema")
|
|
147
|
+
|
|
148
|
+
# Check encryption status for sensitive variables
|
|
149
|
+
if check_encryption:
|
|
150
|
+
for field_name, field_meta in schema.fields.items():
|
|
151
|
+
if field_name not in env_file.variables:
|
|
152
|
+
continue
|
|
153
|
+
|
|
154
|
+
env_var = env_file.variables[field_name]
|
|
155
|
+
|
|
156
|
+
# Check schema-defined sensitive fields
|
|
157
|
+
if field_meta.sensitive:
|
|
158
|
+
if env_var.encryption_status == EncryptionStatus.PLAINTEXT:
|
|
159
|
+
result.unencrypted_secrets.add(field_name)
|
|
160
|
+
|
|
161
|
+
# Also check for suspicious plaintext values
|
|
162
|
+
for var_name, env_var in env_file.variables.items():
|
|
163
|
+
if env_var.encryption_status == EncryptionStatus.PLAINTEXT:
|
|
164
|
+
if self.is_value_suspicious(env_var.value):
|
|
165
|
+
if var_name not in schema.sensitive_fields:
|
|
166
|
+
result.warnings.append(
|
|
167
|
+
f"'{var_name}' looks like a secret but "
|
|
168
|
+
"is not marked sensitive in schema"
|
|
169
|
+
)
|
|
170
|
+
if self.is_name_suspicious(var_name):
|
|
171
|
+
if var_name not in schema.sensitive_fields:
|
|
172
|
+
result.warnings.append(
|
|
173
|
+
f"'{var_name}' has a name suggesting sensitive data "
|
|
174
|
+
"but is not marked sensitive in schema"
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
# Basic type validation
|
|
178
|
+
for field_name, field_meta in schema.fields.items():
|
|
179
|
+
if field_name not in env_file.variables:
|
|
180
|
+
continue
|
|
181
|
+
|
|
182
|
+
env_var = env_file.variables[field_name]
|
|
183
|
+
type_error = self._check_type(env_var.value, field_meta.field_type)
|
|
184
|
+
if type_error:
|
|
185
|
+
result.type_errors[field_name] = type_error
|
|
186
|
+
|
|
187
|
+
# Determine overall validity
|
|
188
|
+
# Note: unencrypted_secrets are warnings, not errors
|
|
189
|
+
# Use `envdrift encrypt --check` for strict encryption enforcement
|
|
190
|
+
result.valid = not (result.missing_required or result.type_errors or result.extra_vars)
|
|
191
|
+
|
|
192
|
+
return result
|
|
193
|
+
|
|
194
|
+
def is_value_suspicious(self, value: str) -> bool:
|
|
195
|
+
"""
|
|
196
|
+
Determine whether a plaintext value matches any known secret-like pattern.
|
|
197
|
+
|
|
198
|
+
Returns:
|
|
199
|
+
`true` if the value matches any secret-like pattern, `false` otherwise.
|
|
200
|
+
"""
|
|
201
|
+
for pattern in self.SECRET_PATTERNS:
|
|
202
|
+
if pattern.search(value):
|
|
203
|
+
return True
|
|
204
|
+
return False
|
|
205
|
+
|
|
206
|
+
def is_name_suspicious(self, name: str) -> bool:
|
|
207
|
+
"""
|
|
208
|
+
Determine whether an environment variable name indicates it contains sensitive data.
|
|
209
|
+
|
|
210
|
+
Parameters:
|
|
211
|
+
name (str): Environment variable name to evaluate.
|
|
212
|
+
|
|
213
|
+
Returns:
|
|
214
|
+
bool: `True` if the variable name matches a sensitive pattern, `False` otherwise.
|
|
215
|
+
"""
|
|
216
|
+
for pattern in self.SENSITIVE_VAR_PATTERNS:
|
|
217
|
+
if pattern.match(name):
|
|
218
|
+
return True
|
|
219
|
+
return False
|
|
220
|
+
|
|
221
|
+
def _check_type(self, value: str, expected_type: type) -> str | None:
|
|
222
|
+
"""
|
|
223
|
+
Validate a plaintext .env value against an expected Python type.
|
|
224
|
+
|
|
225
|
+
Parameters:
|
|
226
|
+
value (str): The raw value read from a .env file.
|
|
227
|
+
expected_type (type): The Python type expected for the value (e.g., int, float, bool, list).
|
|
228
|
+
|
|
229
|
+
Notes:
|
|
230
|
+
If `expected_type` is None or `value` is an empty string, no type check is performed and the function returns None.
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
str | None: An error message describing the type mismatch, or `None` if the value is acceptable or no check was performed.
|
|
234
|
+
"""
|
|
235
|
+
if expected_type is None or value == "":
|
|
236
|
+
return None
|
|
237
|
+
|
|
238
|
+
# Skip type check for encrypted values (supports both dotenvx and SOPS)
|
|
239
|
+
# dotenvx format: encrypted:...
|
|
240
|
+
# SOPS format: ENC[AES256_GCM,...
|
|
241
|
+
if value.startswith("encrypted:") or value.startswith("ENC["):
|
|
242
|
+
return None
|
|
243
|
+
|
|
244
|
+
type_name = getattr(expected_type, "__name__", str(expected_type))
|
|
245
|
+
|
|
246
|
+
# Handle int
|
|
247
|
+
if type_name == "int":
|
|
248
|
+
try:
|
|
249
|
+
int(value)
|
|
250
|
+
except ValueError:
|
|
251
|
+
return f"Expected integer, got '{value}'"
|
|
252
|
+
|
|
253
|
+
# Handle float
|
|
254
|
+
elif type_name == "float":
|
|
255
|
+
try:
|
|
256
|
+
float(value)
|
|
257
|
+
except ValueError:
|
|
258
|
+
return f"Expected float, got '{value}'"
|
|
259
|
+
|
|
260
|
+
# Handle bool
|
|
261
|
+
elif type_name == "bool":
|
|
262
|
+
if value.lower() not in ("true", "false", "1", "0", "yes", "no"):
|
|
263
|
+
return f"Expected boolean, got '{value}'"
|
|
264
|
+
|
|
265
|
+
# Handle list (basic check for list-like structure)
|
|
266
|
+
elif type_name == "list":
|
|
267
|
+
# Lists in .env are typically comma-separated or JSON
|
|
268
|
+
# We'll accept anything here, just check it's not obviously wrong
|
|
269
|
+
pass
|
|
270
|
+
|
|
271
|
+
return None
|
|
272
|
+
|
|
273
|
+
def generate_fix_template(self, result: ValidationResult, schema: SchemaMetadata) -> str:
|
|
274
|
+
"""
|
|
275
|
+
Generate a .env snippet that provides assignments for any missing schema variables.
|
|
276
|
+
|
|
277
|
+
Parameters:
|
|
278
|
+
result (ValidationResult): Validation outcome containing `missing_required` and `missing_optional` sets.
|
|
279
|
+
schema (SchemaMetadata): Schema metadata used to include field descriptions, defaults, and sensitivity flags.
|
|
280
|
+
|
|
281
|
+
Returns:
|
|
282
|
+
template (str): A newline-separated .env template. Required sensitive fields use the placeholder
|
|
283
|
+
`encrypted:YOUR_VALUE_HERE`; optional fields include commented defaults when available.
|
|
284
|
+
"""
|
|
285
|
+
lines = []
|
|
286
|
+
|
|
287
|
+
if result.missing_required:
|
|
288
|
+
lines.append("# Missing required variables:")
|
|
289
|
+
for var_name in sorted(result.missing_required):
|
|
290
|
+
field_meta = schema.fields.get(var_name)
|
|
291
|
+
if field_meta and field_meta.description:
|
|
292
|
+
lines.append(f"# {field_meta.description}")
|
|
293
|
+
if field_meta and field_meta.sensitive:
|
|
294
|
+
lines.append(f'{var_name}="encrypted:YOUR_VALUE_HERE"')
|
|
295
|
+
else:
|
|
296
|
+
lines.append(f"{var_name}=")
|
|
297
|
+
lines.append("")
|
|
298
|
+
|
|
299
|
+
if result.missing_optional:
|
|
300
|
+
lines.append("# Missing optional variables (have defaults):")
|
|
301
|
+
for var_name in sorted(result.missing_optional):
|
|
302
|
+
field_meta = schema.fields.get(var_name)
|
|
303
|
+
if field_meta and field_meta.description:
|
|
304
|
+
lines.append(f"# {field_meta.description}")
|
|
305
|
+
default = field_meta.default if field_meta else None
|
|
306
|
+
if default is not None:
|
|
307
|
+
lines.append(f"# {var_name}={default}")
|
|
308
|
+
else:
|
|
309
|
+
lines.append(f"# {var_name}=")
|
|
310
|
+
lines.append("")
|
|
311
|
+
|
|
312
|
+
return "\n".join(lines)
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
"""Encryption backend interfaces for multiple encryption tools."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from envdrift.encryption.base import (
|
|
9
|
+
EncryptionBackend,
|
|
10
|
+
EncryptionBackendError,
|
|
11
|
+
EncryptionNotFoundError,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class EncryptionProvider(Enum):
|
|
16
|
+
"""Supported encryption providers."""
|
|
17
|
+
|
|
18
|
+
DOTENVX = "dotenvx"
|
|
19
|
+
SOPS = "sops"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def get_encryption_backend(provider: EncryptionProvider | str, **config) -> EncryptionBackend:
|
|
23
|
+
"""
|
|
24
|
+
Create and return a provider-specific EncryptionBackend.
|
|
25
|
+
|
|
26
|
+
Parameters:
|
|
27
|
+
provider (EncryptionProvider | str): Encryption provider enum or name ("dotenvx", "sops").
|
|
28
|
+
**config: Provider-specific configuration:
|
|
29
|
+
- For "dotenvx": `auto_install` (bool) - optional, defaults to False.
|
|
30
|
+
- For "sops": `config_file` (str) - optional path to .sops.yaml.
|
|
31
|
+
`age_key` (str) - optional age private key (SOPS_AGE_KEY).
|
|
32
|
+
`age_key_file` (str) - optional age key file path (SOPS_AGE_KEY_FILE).
|
|
33
|
+
`auto_install` (bool) - optional, defaults to False.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
EncryptionBackend: A configured backend instance for the requested provider.
|
|
37
|
+
|
|
38
|
+
Raises:
|
|
39
|
+
ValueError: If the provider is unsupported.
|
|
40
|
+
"""
|
|
41
|
+
if isinstance(provider, str):
|
|
42
|
+
provider = EncryptionProvider(provider)
|
|
43
|
+
|
|
44
|
+
if provider == EncryptionProvider.DOTENVX:
|
|
45
|
+
from envdrift.encryption.dotenvx import DotenvxEncryptionBackend
|
|
46
|
+
|
|
47
|
+
return DotenvxEncryptionBackend(
|
|
48
|
+
auto_install=config.get("auto_install", False),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
elif provider == EncryptionProvider.SOPS:
|
|
52
|
+
from envdrift.encryption.sops import SOPSEncryptionBackend
|
|
53
|
+
|
|
54
|
+
return SOPSEncryptionBackend(
|
|
55
|
+
config_file=config.get("config_file"),
|
|
56
|
+
age_key=config.get("age_key"),
|
|
57
|
+
age_key_file=config.get("age_key_file"),
|
|
58
|
+
auto_install=config.get("auto_install", False),
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
raise ValueError(f"Unsupported encryption provider: {provider}")
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def detect_encryption_provider(file_path) -> EncryptionProvider | None:
|
|
65
|
+
"""
|
|
66
|
+
Auto-detect which encryption provider was used to encrypt a file.
|
|
67
|
+
|
|
68
|
+
Parameters:
|
|
69
|
+
file_path: Path to the encrypted file.
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
EncryptionProvider if detected, None if file is not encrypted or unknown format.
|
|
73
|
+
"""
|
|
74
|
+
path = Path(file_path)
|
|
75
|
+
if not path.exists():
|
|
76
|
+
return None
|
|
77
|
+
|
|
78
|
+
content = path.read_text(encoding="utf-8")
|
|
79
|
+
|
|
80
|
+
# Check for dotenvx markers first (most common)
|
|
81
|
+
dotenvx_markers = ["#/---BEGIN DOTENV ENCRYPTED---/", "DOTENV_PUBLIC_KEY"]
|
|
82
|
+
for marker in dotenvx_markers:
|
|
83
|
+
if marker in content:
|
|
84
|
+
return EncryptionProvider.DOTENVX
|
|
85
|
+
|
|
86
|
+
# Check for SOPS markers
|
|
87
|
+
# SOPS encrypted files have "sops" key in YAML/JSON or ENC[] markers in dotenv
|
|
88
|
+
sops_markers = [
|
|
89
|
+
"sops:", # YAML format
|
|
90
|
+
'"sops":', # JSON format
|
|
91
|
+
"ENC[AES256_GCM,", # SOPS encrypted value marker
|
|
92
|
+
]
|
|
93
|
+
for marker in sops_markers:
|
|
94
|
+
if marker in content:
|
|
95
|
+
return EncryptionProvider.SOPS
|
|
96
|
+
|
|
97
|
+
# Check for .sops.yaml in same directory or parent
|
|
98
|
+
sops_config_locations = [
|
|
99
|
+
path.parent / ".sops.yaml",
|
|
100
|
+
path.parent.parent / ".sops.yaml",
|
|
101
|
+
]
|
|
102
|
+
for sops_config in sops_config_locations:
|
|
103
|
+
if sops_config.exists():
|
|
104
|
+
# .sops.yaml alone does not prove encryption; avoid false positives for plaintext files.
|
|
105
|
+
return None
|
|
106
|
+
|
|
107
|
+
return None
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
__all__ = [
|
|
111
|
+
"EncryptionBackend",
|
|
112
|
+
"EncryptionBackendError",
|
|
113
|
+
"EncryptionNotFoundError",
|
|
114
|
+
"EncryptionProvider",
|
|
115
|
+
"detect_encryption_provider",
|
|
116
|
+
"get_encryption_backend",
|
|
117
|
+
]
|