xenfra-sdk 0.2.2__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,431 @@
1
+ """
2
+ Security Scanner - Pre-deployment secret detection for Xenfra.
3
+
4
+ This module scans codebases for:
5
+ - Hardcoded secrets (API keys, passwords, tokens)
6
+ - Exposed .env files
7
+ - Missing .gitignore entries
8
+ """
9
+
10
+ import re
11
+ from pathlib import Path
12
+ from typing import List, Dict, Optional, Any
13
+ from dataclasses import dataclass, field
14
+ from enum import Enum
15
+
16
+
17
+ class Severity(str, Enum):
18
+ """Severity levels for security issues."""
19
+ CRITICAL = "critical" # Must fix before deploy
20
+ WARNING = "warning" # Should fix
21
+ INFO = "info" # Nice to have
22
+
23
+
24
+ @dataclass
25
+ class SecurityIssue:
26
+ """A single security issue found in the codebase."""
27
+ severity: Severity
28
+ issue_type: str
29
+ file: str
30
+ line: Optional[int] = None
31
+ description: str = ""
32
+ suggestion: str = ""
33
+ match: str = "" # The actual matched content (redacted)
34
+
35
+ def to_dict(self) -> dict:
36
+ return {
37
+ "severity": self.severity.value,
38
+ "type": self.issue_type,
39
+ "file": self.file,
40
+ "line": self.line,
41
+ "description": self.description,
42
+ "suggestion": self.suggestion,
43
+ "match": self.match,
44
+ }
45
+
46
+
47
+ @dataclass
48
+ class ScanResult:
49
+ """Result of a security scan."""
50
+ passed: bool
51
+ issues: List[SecurityIssue] = field(default_factory=list)
52
+ files_scanned: int = 0
53
+
54
+ @property
55
+ def critical_count(self) -> int:
56
+ return sum(1 for i in self.issues if i.severity == Severity.CRITICAL)
57
+
58
+ @property
59
+ def warning_count(self) -> int:
60
+ return sum(1 for i in self.issues if i.severity == Severity.WARNING)
61
+
62
+ @property
63
+ def info_count(self) -> int:
64
+ return sum(1 for i in self.issues if i.severity == Severity.INFO)
65
+
66
+ @property
67
+ def summary(self) -> str:
68
+ if self.passed:
69
+ return f"No issues found ({self.files_scanned} files scanned)"
70
+ parts = []
71
+ if self.critical_count:
72
+ parts.append(f"{self.critical_count} critical")
73
+ if self.warning_count:
74
+ parts.append(f"{self.warning_count} warning")
75
+ if self.info_count:
76
+ parts.append(f"{self.info_count} info")
77
+ return f"{len(self.issues)} issues found ({', '.join(parts)})"
78
+
79
+ def to_dict(self) -> dict:
80
+ return {
81
+ "passed": self.passed,
82
+ "issues": [i.to_dict() for i in self.issues],
83
+ "files_scanned": self.files_scanned,
84
+ "summary": self.summary,
85
+ "critical_count": self.critical_count,
86
+ "warning_count": self.warning_count,
87
+ "info_count": self.info_count,
88
+ }
89
+
90
+
91
+ # Secret detection patterns
92
+ # Format: (name, pattern, severity, description, suggestion)
93
+ SECRET_PATTERNS = [
94
+ # AWS
95
+ (
96
+ "aws_access_key",
97
+ r"AKIA[0-9A-Z]{16}",
98
+ Severity.CRITICAL,
99
+ "AWS Access Key ID found",
100
+ "Move to environment variable: AWS_ACCESS_KEY_ID"
101
+ ),
102
+ (
103
+ "aws_secret_key",
104
+ r"(?i)(aws_secret_access_key|aws_secret_key)\s*[=:]\s*['\"]?([A-Za-z0-9/+=]{40})['\"]?",
105
+ Severity.CRITICAL,
106
+ "AWS Secret Access Key found",
107
+ "Move to environment variable: AWS_SECRET_ACCESS_KEY"
108
+ ),
109
+
110
+ # Generic API Keys
111
+ (
112
+ "api_key",
113
+ r"(?i)(api[_-]?key|apikey)\s*[=:]\s*['\"]([a-zA-Z0-9_\-]{20,})['\"]",
114
+ Severity.CRITICAL,
115
+ "Hardcoded API key found",
116
+ "Move to environment variable"
117
+ ),
118
+ (
119
+ "secret_key",
120
+ r"(?i)(secret[_-]?key|secretkey)\s*[=:]\s*['\"]([a-zA-Z0-9_\-]{20,})['\"]",
121
+ Severity.CRITICAL,
122
+ "Hardcoded secret key found",
123
+ "Move to environment variable"
124
+ ),
125
+
126
+ # Database URLs
127
+ (
128
+ "database_url",
129
+ r"(?i)(postgres|mysql|mongodb|redis)://[^\s'\"]+:[^\s'\"]+@",
130
+ Severity.CRITICAL,
131
+ "Database URL with credentials found",
132
+ "Move to environment variable: DATABASE_URL"
133
+ ),
134
+
135
+ # Private Keys
136
+ (
137
+ "private_key",
138
+ r"-----BEGIN\s+(?:RSA|EC|DSA|OPENSSH)?\s*PRIVATE\s+KEY-----",
139
+ Severity.CRITICAL,
140
+ "Private key found in source code",
141
+ "Move to a secure key management system"
142
+ ),
143
+
144
+ # JWT Secrets
145
+ (
146
+ "jwt_secret",
147
+ r"(?i)(jwt[_-]?secret|jwt[_-]?key)\s*[=:]\s*['\"]([a-zA-Z0-9_\-]{16,})['\"]",
148
+ Severity.CRITICAL,
149
+ "JWT secret found in source code",
150
+ "Move to environment variable: JWT_SECRET"
151
+ ),
152
+
153
+ # Passwords
154
+ (
155
+ "password",
156
+ r"(?i)(password|passwd|pwd)\s*[=:]\s*['\"]([^'\"]{8,})['\"]",
157
+ Severity.WARNING,
158
+ "Possible hardcoded password found",
159
+ "Move to environment variable or use a secrets manager"
160
+ ),
161
+
162
+ # Bearer Tokens
163
+ (
164
+ "bearer_token",
165
+ r"(?i)bearer\s+[a-zA-Z0-9_\-\.]+",
166
+ Severity.WARNING,
167
+ "Bearer token found in source code",
168
+ "Move to environment variable"
169
+ ),
170
+
171
+ # GitHub Tokens
172
+ (
173
+ "github_token",
174
+ r"gh[pousr]_[A-Za-z0-9_]{36,}",
175
+ Severity.CRITICAL,
176
+ "GitHub personal access token found",
177
+ "Move to environment variable: GITHUB_TOKEN"
178
+ ),
179
+
180
+ # Stripe Keys
181
+ (
182
+ "stripe_key",
183
+ r"sk_live_[0-9a-zA-Z]{24,}",
184
+ Severity.CRITICAL,
185
+ "Stripe live secret key found",
186
+ "Move to environment variable: STRIPE_SECRET_KEY"
187
+ ),
188
+ (
189
+ "stripe_publishable",
190
+ r"pk_live_[0-9a-zA-Z]{24,}",
191
+ Severity.WARNING,
192
+ "Stripe live publishable key found (less sensitive but should be environment variable)",
193
+ "Move to environment variable: STRIPE_PUBLISHABLE_KEY"
194
+ ),
195
+
196
+ # OpenAI / Anthropic
197
+ (
198
+ "openai_key",
199
+ r"sk-[a-zA-Z0-9]{48,}",
200
+ Severity.CRITICAL,
201
+ "OpenAI API key found",
202
+ "Move to environment variable: OPENAI_API_KEY"
203
+ ),
204
+
205
+ # DigitalOcean
206
+ (
207
+ "digitalocean_token",
208
+ r"dop_v1_[a-f0-9]{64}",
209
+ Severity.CRITICAL,
210
+ "DigitalOcean API token found",
211
+ "Move to environment variable: DIGITAL_OCEAN_TOKEN"
212
+ ),
213
+
214
+ # Slack
215
+ (
216
+ "slack_token",
217
+ r"xox[baprs]-[0-9A-Za-z\-]+",
218
+ Severity.CRITICAL,
219
+ "Slack token found",
220
+ "Move to environment variable: SLACK_TOKEN"
221
+ ),
222
+ ]
223
+
224
+ # Files to skip during scanning
225
+ SKIP_EXTENSIONS = {
226
+ ".pyc", ".pyo", ".class", ".o", ".so", ".dylib",
227
+ ".png", ".jpg", ".jpeg", ".gif", ".ico", ".svg",
228
+ ".woff", ".woff2", ".ttf", ".eot",
229
+ ".zip", ".tar", ".gz", ".rar",
230
+ ".pdf", ".doc", ".docx",
231
+ ".lock", ".sum",
232
+ }
233
+
234
+ SKIP_DIRECTORIES = {
235
+ ".git", ".hg", ".svn",
236
+ "__pycache__", ".pytest_cache", ".mypy_cache",
237
+ "node_modules", "venv", ".venv", "env",
238
+ ".tox", ".nox",
239
+ "dist", "build", "*.egg-info",
240
+ }
241
+
242
+
243
+ def _should_skip_file(path: Path) -> bool:
244
+ """Check if file should be skipped."""
245
+ # Skip by extension
246
+ if path.suffix.lower() in SKIP_EXTENSIONS:
247
+ return True
248
+
249
+ # Skip large files (>1MB)
250
+ try:
251
+ if path.stat().st_size > 1_000_000:
252
+ return True
253
+ except OSError:
254
+ return True
255
+
256
+ return False
257
+
258
+
259
+ def _should_skip_directory(name: str) -> bool:
260
+ """Check if directory should be skipped."""
261
+ return name in SKIP_DIRECTORIES or name.startswith(".")
262
+
263
+
264
+ def _redact_secret(match: str, keep_chars: int = 4) -> str:
265
+ """Redact a secret, keeping only first few characters."""
266
+ if len(match) <= keep_chars * 2:
267
+ return "***REDACTED***"
268
+ return f"{match[:keep_chars]}...{match[-keep_chars:]}"
269
+
270
+
271
+ def scan_file_content(content: str, filename: str) -> List[SecurityIssue]:
272
+ """Scan a single file's content for secrets."""
273
+ issues = []
274
+ lines = content.split("\n")
275
+
276
+ for pattern_name, pattern, severity, description, suggestion in SECRET_PATTERNS:
277
+ for line_num, line in enumerate(lines, 1):
278
+ matches = re.finditer(pattern, line)
279
+ for match in matches:
280
+ # Skip if it's clearly a placeholder or example
281
+ matched_text = match.group(0)
282
+ lower_text = matched_text.lower()
283
+ if any(skip in lower_text for skip in [
284
+ "example", "placeholder", "your_", "xxx", "changeme",
285
+ "todo", "fixme", "replace", "insert", "<your"
286
+ ]):
287
+ continue
288
+
289
+ issues.append(SecurityIssue(
290
+ severity=severity,
291
+ issue_type=pattern_name,
292
+ file=filename,
293
+ line=line_num,
294
+ description=description,
295
+ suggestion=suggestion,
296
+ match=_redact_secret(matched_text),
297
+ ))
298
+
299
+ return issues
300
+
301
+
302
+ def scan_directory(path: str) -> ScanResult:
303
+ """Scan a directory for security issues."""
304
+ root = Path(path)
305
+ issues = []
306
+ files_scanned = 0
307
+
308
+ if not root.exists():
309
+ return ScanResult(passed=True, files_scanned=0)
310
+
311
+ # Check for .gitignore issues
312
+ gitignore_path = root / ".gitignore"
313
+ env_path = root / ".env"
314
+
315
+ if env_path.exists():
316
+ # .env exists - check if it's in .gitignore
317
+ gitignore_content = ""
318
+ if gitignore_path.exists():
319
+ gitignore_content = gitignore_path.read_text(errors="ignore")
320
+
321
+ if ".env" not in gitignore_content:
322
+ issues.append(SecurityIssue(
323
+ severity=Severity.CRITICAL,
324
+ issue_type="exposed_env",
325
+ file=".env",
326
+ description=".env file exists but is not in .gitignore",
327
+ suggestion="Add '.env' to .gitignore to prevent committing secrets",
328
+ ))
329
+
330
+ # Scan all files
331
+ for file_path in root.rglob("*"):
332
+ # Skip directories
333
+ if file_path.is_dir():
334
+ continue
335
+
336
+ # Skip if in a skip directory
337
+ if any(_should_skip_directory(part) for part in file_path.parts):
338
+ continue
339
+
340
+ # Skip certain file types
341
+ if _should_skip_file(file_path):
342
+ continue
343
+
344
+ try:
345
+ content = file_path.read_text(errors="ignore")
346
+ files_scanned += 1
347
+
348
+ relative_path = str(file_path.relative_to(root))
349
+ file_issues = scan_file_content(content, relative_path)
350
+ issues.extend(file_issues)
351
+
352
+ except Exception:
353
+ # Skip files we can't read
354
+ continue
355
+
356
+ # Sort by severity (critical first)
357
+ severity_order = {Severity.CRITICAL: 0, Severity.WARNING: 1, Severity.INFO: 2}
358
+ issues.sort(key=lambda x: severity_order.get(x.severity, 99))
359
+
360
+ passed = not any(i.severity == Severity.CRITICAL for i in issues)
361
+
362
+ return ScanResult(
363
+ passed=passed,
364
+ issues=issues,
365
+ files_scanned=files_scanned,
366
+ )
367
+
368
+
369
+ def scan_file_list(files: List[Dict[str, str]]) -> ScanResult:
370
+ """
371
+ Scan a list of files provided as dicts with 'path' and 'content' keys.
372
+
373
+ This is useful for scanning files that haven't been written to disk yet,
374
+ like files uploaded via API before deployment.
375
+
376
+ Args:
377
+ files: List of dicts with 'path' (or 'file') and 'content' keys
378
+
379
+ Returns:
380
+ ScanResult with any issues found
381
+ """
382
+ issues = []
383
+ files_scanned = 0
384
+
385
+ # Check if .env is in the list but .gitignore doesn't include it
386
+ file_paths = [f.get("path") or f.get("file", "") for f in files]
387
+ has_env = any(p.endswith(".env") or p == ".env" for p in file_paths)
388
+
389
+ gitignore_content = ""
390
+ for f in files:
391
+ path = f.get("path") or f.get("file", "")
392
+ if path == ".gitignore" or path.endswith("/.gitignore"):
393
+ gitignore_content = f.get("content", "")
394
+ break
395
+
396
+ if has_env and ".env" not in gitignore_content:
397
+ issues.append(SecurityIssue(
398
+ severity=Severity.CRITICAL,
399
+ issue_type="exposed_env",
400
+ file=".env",
401
+ description=".env file found but not in .gitignore",
402
+ suggestion="Add '.env' to .gitignore to prevent committing secrets",
403
+ ))
404
+
405
+ # Scan each file
406
+ for file_info in files:
407
+ path = file_info.get("path") or file_info.get("file", "unknown")
408
+ content = file_info.get("content", "")
409
+
410
+ if not content:
411
+ continue
412
+
413
+ # Skip binary-looking content
414
+ if "\x00" in content[:1000]:
415
+ continue
416
+
417
+ files_scanned += 1
418
+ file_issues = scan_file_content(content, path)
419
+ issues.extend(file_issues)
420
+
421
+ # Sort by severity
422
+ severity_order = {Severity.CRITICAL: 0, Severity.WARNING: 1, Severity.INFO: 2}
423
+ issues.sort(key=lambda x: severity_order.get(x.severity, 99))
424
+
425
+ passed = not any(i.severity == Severity.CRITICAL for i in issues)
426
+
427
+ return ScanResult(
428
+ passed=passed,
429
+ issues=issues,
430
+ files_scanned=files_scanned,
431
+ )
@@ -0,0 +1,14 @@
1
+ # Caddyfile template for multi-service routing
2
+ # Generated by Xenfra for microservices deployments
3
+
4
+ :80 {
5
+ {% for service in services %}
6
+ route /{{ service.name }}* {
7
+ reverse_proxy localhost:{{ service.port }}
8
+ }
9
+ {% endfor %}
10
+
11
+ route / {
12
+ respond "Xenfra Gateway - {{ project_name }}" 200
13
+ }
14
+ }
@@ -1,38 +1,41 @@
1
- # Dockerfile template for Python web applications
2
- FROM {{ python_version | default('python:3.11-slim') }}
3
-
4
- WORKDIR /app
5
-
6
- {% if package_manager != 'pip' %}
7
- # Install uv package manager and add to PATH
8
- RUN apt-get update && apt-get install -y curl && \
9
- curl -LsSf https://astral.sh/uv/install.sh | sh && \
10
- apt-get remove -y curl && \
11
- apt-get clean && \
12
- rm -rf /var/lib/apt/lists/*
13
- ENV PATH="/root/.local/bin:/root/.cargo/bin:$PATH"
14
- {% endif %}
15
-
16
- {% if dependency_file == 'pyproject.toml' %}
17
- # For pyproject.toml, copy all files first (hatchling needs README.md etc.)
18
- COPY . .
19
-
20
- # Install dependencies
21
- RUN uv pip install --system --no-cache .
22
- {% else %}
23
- COPY {{ dependency_file | default('requirements.txt') }} .
24
-
25
- # Install dependencies
26
- {% if package_manager == 'pip' %}
27
- RUN pip install --no-cache-dir -r {{ dependency_file | default('requirements.txt') }}
28
- {% else %}
29
- RUN uv pip install --system --no-cache -r {{ dependency_file | default('requirements.txt') }}
30
- {% endif %}
31
-
32
- COPY . .
33
- {% endif %}
34
-
35
- # Expose the application port
36
- EXPOSE {{ port | default(8000) }}
37
-
38
- # The command to run the application will be in docker-compose.yml
1
+ # Dockerfile template for Python web applications
2
+ FROM {{ python_version | default('python:3.11-slim') }}
3
+
4
+ WORKDIR /app
5
+
6
+ {% if package_manager != 'pip' %}
7
+ # Install uv package manager and add to PATH
8
+ RUN apt-get update && apt-get install -y curl && \
9
+ curl -LsSf https://astral.sh/uv/install.sh | sh && \
10
+ apt-get remove -y curl && \
11
+ apt-get clean && \
12
+ rm -rf /var/lib/apt/lists/*
13
+ ENV PATH="/root/.local/bin:/root/.cargo/bin:$PATH"
14
+ {% endif %}
15
+
16
+ {% if dependency_file == 'pyproject.toml' %}
17
+ # For pyproject.toml, copy all files first (hatchling needs README.md etc.)
18
+ COPY . .
19
+
20
+ # Install dependencies
21
+ RUN uv pip install --system --no-cache .
22
+ {% else %}
23
+ COPY {{ dependency_file | default('requirements.txt') }} .
24
+
25
+ # Install dependencies
26
+ {% if package_manager == 'pip' %}
27
+ RUN pip install --no-cache-dir -r {{ dependency_file | default('requirements.txt') }}
28
+ {% endif %}
29
+
30
+ {% if missing_deps %}
31
+ # Auto-heal missing dependencies (Zen Mode)
32
+ RUN pip install --no-cache-dir {{ missing_deps | join(' ') }}
33
+ {% endif %}
34
+
35
+ COPY . .
36
+ {% endif %}
37
+
38
+ # Expose the application port
39
+ EXPOSE {{ port | default(8000) }}
40
+
41
+ # The command to run the application will be in docker-compose.yml