kailash 0.8.5__py3-none-any.whl → 0.8.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. kailash/__init__.py +5 -5
  2. kailash/channels/__init__.py +2 -1
  3. kailash/channels/mcp_channel.py +23 -4
  4. kailash/cli/validate_imports.py +202 -0
  5. kailash/core/resilience/bulkhead.py +15 -5
  6. kailash/core/resilience/circuit_breaker.py +4 -1
  7. kailash/core/resilience/health_monitor.py +312 -84
  8. kailash/edge/migration/edge_migration_service.py +384 -0
  9. kailash/mcp_server/protocol.py +26 -0
  10. kailash/mcp_server/server.py +1081 -8
  11. kailash/mcp_server/subscriptions.py +1560 -0
  12. kailash/mcp_server/transports.py +305 -0
  13. kailash/middleware/gateway/event_store.py +1 -0
  14. kailash/nodes/base.py +77 -1
  15. kailash/nodes/code/python.py +44 -3
  16. kailash/nodes/data/async_sql.py +42 -20
  17. kailash/nodes/edge/edge_migration_node.py +16 -12
  18. kailash/nodes/governance.py +410 -0
  19. kailash/nodes/rag/registry.py +1 -1
  20. kailash/nodes/transaction/distributed_transaction_manager.py +48 -1
  21. kailash/nodes/transaction/saga_state_storage.py +2 -1
  22. kailash/nodes/validation.py +8 -8
  23. kailash/runtime/local.py +30 -0
  24. kailash/runtime/validation/__init__.py +7 -15
  25. kailash/runtime/validation/import_validator.py +446 -0
  26. kailash/runtime/validation/suggestion_engine.py +5 -5
  27. kailash/utils/data_paths.py +74 -0
  28. kailash/workflow/builder.py +183 -4
  29. kailash/workflow/mermaid_visualizer.py +3 -1
  30. kailash/workflow/templates.py +6 -6
  31. kailash/workflow/validation.py +134 -3
  32. {kailash-0.8.5.dist-info → kailash-0.8.7.dist-info}/METADATA +20 -17
  33. {kailash-0.8.5.dist-info → kailash-0.8.7.dist-info}/RECORD +37 -31
  34. {kailash-0.8.5.dist-info → kailash-0.8.7.dist-info}/WHEEL +0 -0
  35. {kailash-0.8.5.dist-info → kailash-0.8.7.dist-info}/entry_points.txt +0 -0
  36. {kailash-0.8.5.dist-info → kailash-0.8.7.dist-info}/licenses/LICENSE +0 -0
  37. {kailash-0.8.5.dist-info → kailash-0.8.7.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,446 @@
1
+ """
2
+ Import path validator for production deployment compatibility.
3
+
4
+ This module detects relative imports that fail in production environments
5
+ and provides guidance for absolute import patterns.
6
+
7
+ Based on Gold Standard: sdk-users/7-gold-standards/absolute-imports-gold-standard.md
8
+ """
9
+
10
+ import ast
11
+ import logging
12
+ import os
13
+ import re
14
+ from dataclasses import dataclass
15
+ from enum import Enum
16
+ from pathlib import Path
17
+ from typing import Dict, List, Optional, Set, Tuple
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class ImportIssueType(Enum):
23
+ """Types of import issues that can be detected."""
24
+
25
+ RELATIVE_IMPORT = "relative_import"
26
+ IMPLICIT_RELATIVE = "implicit_relative"
27
+ LOCAL_IMPORT = "local_import"
28
+ AMBIGUOUS_IMPORT = "ambiguous_import"
29
+
30
+
31
+ @dataclass
32
+ class ImportIssue:
33
+ """Represents an import issue found in a file."""
34
+
35
+ file_path: str
36
+ line_number: int
37
+ import_statement: str
38
+ issue_type: ImportIssueType
39
+ severity: str # "critical", "warning", "info"
40
+ message: str
41
+ suggestion: str
42
+ gold_standard_ref: str = (
43
+ "sdk-users/7-gold-standards/absolute-imports-gold-standard.md"
44
+ )
45
+
46
+
47
+ class ImportPathValidator:
48
+ """
49
+ Validates import paths for production deployment compatibility.
50
+
51
+ Detects relative imports that work in development but fail in production
52
+ when applications run from repository root.
53
+ """
54
+
55
+ def __init__(self, repo_root: Optional[str] = None):
56
+ """
57
+ Initialize import path validator.
58
+
59
+ Args:
60
+ repo_root: Repository root path. If None, tries to auto-detect.
61
+ """
62
+ self.repo_root = Path(repo_root) if repo_root else self._find_repo_root()
63
+ self.sdk_modules = self._identify_sdk_modules()
64
+ self.issues: List[ImportIssue] = []
65
+
66
+ def _find_repo_root(self) -> Path:
67
+ """Find repository root by looking for key markers."""
68
+ current = Path.cwd()
69
+
70
+ # Look for common repo markers
71
+ markers = [".git", "pyproject.toml", "setup.py", "requirements.txt"]
72
+
73
+ while current != current.parent:
74
+ for marker in markers:
75
+ if (current / marker).exists():
76
+ return current
77
+ current = current.parent
78
+
79
+ # Fallback to current directory
80
+ return Path.cwd()
81
+
82
+ def _identify_sdk_modules(self) -> Set[str]:
83
+ """Identify SDK module names for import validation."""
84
+ sdk_modules = set()
85
+
86
+ # Check for src structure
87
+ src_path = self.repo_root / "src"
88
+ if src_path.exists():
89
+ for item in src_path.iterdir():
90
+ if item.is_dir() and (item / "__init__.py").exists():
91
+ sdk_modules.add(item.name)
92
+
93
+ # Common SDK module names
94
+ sdk_modules.update(["kailash", "dataflow", "nexus"])
95
+
96
+ return sdk_modules
97
+
98
+ def validate_file(self, file_path: str) -> List[ImportIssue]:
99
+ """
100
+ Validate imports in a single Python file.
101
+
102
+ Args:
103
+ file_path: Path to Python file to validate
104
+
105
+ Returns:
106
+ List of import issues found
107
+ """
108
+ file_path = Path(file_path)
109
+ if not file_path.exists() or not file_path.suffix == ".py":
110
+ return []
111
+
112
+ issues = []
113
+
114
+ try:
115
+ with open(file_path, "r", encoding="utf-8") as f:
116
+ content = f.read()
117
+
118
+ # Parse AST to find imports
119
+ tree = ast.parse(content, filename=str(file_path))
120
+
121
+ # Check each import statement
122
+ for node in ast.walk(tree):
123
+ if isinstance(node, ast.ImportFrom):
124
+ issue = self._check_import_from(node, file_path, content)
125
+ if issue:
126
+ issues.append(issue)
127
+ elif isinstance(node, ast.Import):
128
+ issue = self._check_import(node, file_path, content)
129
+ if issue:
130
+ issues.append(issue)
131
+
132
+ except Exception as e:
133
+ logger.warning(f"Failed to parse {file_path}: {e}")
134
+
135
+ return issues
136
+
137
+ def _check_import_from(
138
+ self, node: ast.ImportFrom, file_path: Path, content: str
139
+ ) -> Optional[ImportIssue]:
140
+ """Check 'from X import Y' statements."""
141
+ if node.level > 0:
142
+ # Explicit relative import (from . import x, from .. import x)
143
+ import_str = self._get_import_string(node, content)
144
+
145
+ return ImportIssue(
146
+ file_path=str(file_path),
147
+ line_number=node.lineno,
148
+ import_statement=import_str,
149
+ issue_type=ImportIssueType.RELATIVE_IMPORT,
150
+ severity="critical",
151
+ message="Relative import will fail in production deployment",
152
+ suggestion=self._generate_absolute_import_suggestion(node, file_path),
153
+ )
154
+
155
+ elif node.module:
156
+ # Check for implicit relative imports
157
+ module_parts = node.module.split(".")
158
+ first_part = module_parts[0]
159
+
160
+ # Check if this looks like a local module import
161
+ if self._is_likely_local_import(first_part, file_path):
162
+ import_str = self._get_import_string(node, content)
163
+
164
+ return ImportIssue(
165
+ file_path=str(file_path),
166
+ line_number=node.lineno,
167
+ import_statement=import_str,
168
+ issue_type=ImportIssueType.IMPLICIT_RELATIVE,
169
+ severity="critical",
170
+ message=f"Implicit relative import '{first_part}' will fail when run from repo root",
171
+ suggestion=self._generate_absolute_import_suggestion(
172
+ node, file_path
173
+ ),
174
+ )
175
+
176
+ return None
177
+
178
+ def _check_import(
179
+ self, node: ast.Import, file_path: Path, content: str
180
+ ) -> Optional[ImportIssue]:
181
+ """Check 'import X' statements."""
182
+ # Generally less problematic, but check for ambiguous local imports
183
+ for alias in node.names:
184
+ name_parts = alias.name.split(".")
185
+ first_part = name_parts[0]
186
+
187
+ if self._is_likely_local_import(first_part, file_path):
188
+ import_str = f"import {alias.name}"
189
+
190
+ return ImportIssue(
191
+ file_path=str(file_path),
192
+ line_number=node.lineno,
193
+ import_statement=import_str,
194
+ issue_type=ImportIssueType.LOCAL_IMPORT,
195
+ severity="warning",
196
+ message=f"Local module import '{first_part}' may be ambiguous in production",
197
+ suggestion=f"Consider using absolute import: from {self._get_module_path(file_path)} import {first_part}",
198
+ )
199
+
200
+ return None
201
+
202
+ def _is_likely_local_import(self, module_name: str, file_path: Path) -> bool:
203
+ """
204
+ Check if a module name is likely a local/relative import.
205
+
206
+ Returns True if:
207
+ - Module exists as sibling to current file
208
+ - Module is not a known SDK module
209
+ - Module is not a standard library module
210
+ """
211
+ # Skip if it's a known SDK module
212
+ if module_name in self.sdk_modules:
213
+ return False
214
+
215
+ # Skip if it's likely a third-party or stdlib module
216
+ if module_name in [
217
+ "os",
218
+ "sys",
219
+ "json",
220
+ "logging",
221
+ "typing",
222
+ "pathlib",
223
+ "pytest",
224
+ "unittest",
225
+ "numpy",
226
+ "pandas",
227
+ "requests",
228
+ ]:
229
+ return False
230
+
231
+ # Check if module exists as sibling
232
+ parent_dir = file_path.parent
233
+ possible_module = parent_dir / module_name
234
+ possible_file = parent_dir / f"{module_name}.py"
235
+
236
+ if possible_module.exists() or possible_file.exists():
237
+ return True
238
+
239
+ # Check common local module patterns
240
+ local_patterns = ["contracts", "nodes", "core", "utils", "models", "schemas"]
241
+ if module_name in local_patterns:
242
+ return True
243
+
244
+ return False
245
+
246
+ def _get_import_string(self, node: ast.ImportFrom, content: str) -> str:
247
+ """Extract the actual import string from source."""
248
+ lines = content.split("\n")
249
+ if 0 <= node.lineno - 1 < len(lines):
250
+ return lines[node.lineno - 1].strip()
251
+ return f"from {node.module} import ..."
252
+
253
+ def _get_module_path(self, file_path: Path) -> str:
254
+ """Get the absolute module path for a file's directory."""
255
+ try:
256
+ # Get relative path from repo root
257
+ rel_path = file_path.relative_to(self.repo_root)
258
+
259
+ # Convert to module path (excluding the filename)
260
+ parts = list(rel_path.parts[:-1]) # Remove filename
261
+
262
+ # Join all parts to create module path
263
+ return ".".join(parts) if parts else ""
264
+
265
+ except ValueError:
266
+ # File not under repo root
267
+ return "src.your_module"
268
+
269
+ def _generate_absolute_import_suggestion(
270
+ self, node: ast.ImportFrom, file_path: Path
271
+ ) -> str:
272
+ """Generate suggested absolute import."""
273
+ module_base = self._get_module_path(file_path)
274
+ # Debug
275
+ # print(f"DEBUG: file_path={file_path}, module_base={module_base}, node.level={node.level}, node.module={node.module}")
276
+
277
+ if node.level > 0:
278
+ # Handle relative imports
279
+ module_parts = module_base.split(".") if module_base else []
280
+
281
+ # For relative imports, we need to go up 'level' directories
282
+ # But note: for a file in package a.b.c:
283
+ # - level 1 (.) = current package (a.b.c)
284
+ # - level 2 (..) = parent package (a.b)
285
+ # - level 3 (...) = grandparent (a)
286
+ # Since we want the parent, we go up (level-1) from current
287
+ if len(module_parts) > node.level - 1:
288
+ # Go to the appropriate parent level
289
+ if node.level == 1:
290
+ # Same directory
291
+ parent_parts = module_parts
292
+ else:
293
+ # Go up (level-1) directories
294
+ parent_parts = module_parts[: -(node.level - 1)]
295
+
296
+ if node.module:
297
+ # Append the relative module path
298
+ suggested_module = ".".join(parent_parts + node.module.split("."))
299
+ else:
300
+ # Just the parent module
301
+ suggested_module = ".".join(parent_parts)
302
+ else:
303
+ # Can't go up that many levels, use what we have
304
+ if node.module:
305
+ suggested_module = node.module
306
+ else:
307
+ suggested_module = module_base
308
+ else:
309
+ # Implicit relative - the module is in the current directory
310
+ if module_base:
311
+ suggested_module = f"{module_base}.{node.module}"
312
+ else:
313
+ suggested_module = node.module
314
+
315
+ # Format the suggestion
316
+ if hasattr(node, "names") and node.names:
317
+ imports = ", ".join(alias.name for alias in node.names)
318
+ return f"from {suggested_module} import {imports}"
319
+ else:
320
+ return f"from {suggested_module} import ..."
321
+
322
+ def validate_directory(
323
+ self, directory: str, recursive: bool = True
324
+ ) -> List[ImportIssue]:
325
+ """
326
+ Validate all Python files in a directory.
327
+
328
+ Args:
329
+ directory: Directory path to validate
330
+ recursive: Whether to scan subdirectories
331
+
332
+ Returns:
333
+ List of all import issues found
334
+ """
335
+ directory = Path(directory)
336
+ if not directory.exists() or not directory.is_dir():
337
+ return []
338
+
339
+ all_issues = []
340
+
341
+ pattern = "**/*.py" if recursive else "*.py"
342
+ for py_file in directory.glob(pattern):
343
+ # Skip test files by default (can be configured)
344
+ if "test" in py_file.name or "__pycache__" in str(py_file):
345
+ continue
346
+
347
+ issues = self.validate_file(py_file)
348
+ all_issues.extend(issues)
349
+
350
+ return all_issues
351
+
352
+ def generate_report(self, issues: List[ImportIssue]) -> str:
353
+ """
354
+ Generate a human-readable report of import issues.
355
+
356
+ Args:
357
+ issues: List of import issues to report
358
+
359
+ Returns:
360
+ Formatted report string
361
+ """
362
+ if not issues:
363
+ return "✅ No import issues found! All imports are production-ready."
364
+
365
+ report = []
366
+ report.append("🚨 IMPORT VALIDATION REPORT")
367
+ report.append("=" * 60)
368
+ report.append(
369
+ f"Found {len(issues)} import issues that may fail in production\n"
370
+ )
371
+
372
+ # Group by severity
373
+ critical_issues = [i for i in issues if i.severity == "critical"]
374
+ warning_issues = [i for i in issues if i.severity == "warning"]
375
+
376
+ if critical_issues:
377
+ report.append("🔴 CRITICAL ISSUES (Will fail in production)")
378
+ report.append("-" * 60)
379
+ for issue in critical_issues:
380
+ report.append(f"\nFile: {issue.file_path}")
381
+ report.append(f"Line {issue.line_number}: {issue.import_statement}")
382
+ report.append(f"Issue: {issue.message}")
383
+ report.append(f"Fix: {issue.suggestion}")
384
+
385
+ if warning_issues:
386
+ report.append("\n🟡 WARNINGS (May cause issues)")
387
+ report.append("-" * 60)
388
+ for issue in warning_issues:
389
+ report.append(f"\nFile: {issue.file_path}")
390
+ report.append(f"Line {issue.line_number}: {issue.import_statement}")
391
+ report.append(f"Issue: {issue.message}")
392
+ report.append(f"Suggestion: {issue.suggestion}")
393
+
394
+ report.append(
395
+ f"\n📚 See gold standard: {issues[0].gold_standard_ref if issues else 'N/A'}"
396
+ )
397
+
398
+ return "\n".join(report)
399
+
400
+ def fix_imports_in_file(
401
+ self, file_path: str, dry_run: bool = True
402
+ ) -> List[Tuple[str, str]]:
403
+ """
404
+ Attempt to fix import issues in a file.
405
+
406
+ Args:
407
+ file_path: Path to file to fix
408
+ dry_run: If True, only return proposed changes without modifying file
409
+
410
+ Returns:
411
+ List of (original, fixed) import tuples
412
+ """
413
+ issues = self.validate_file(file_path)
414
+ if not issues:
415
+ return []
416
+
417
+ fixes = []
418
+
419
+ with open(file_path, "r", encoding="utf-8") as f:
420
+ lines = f.readlines()
421
+
422
+ # Sort issues by line number in reverse to avoid offset issues
423
+ issues.sort(key=lambda x: x.line_number, reverse=True)
424
+
425
+ for issue in issues:
426
+ if issue.severity == "critical":
427
+ line_idx = issue.line_number - 1
428
+ if 0 <= line_idx < len(lines):
429
+ original = lines[line_idx].rstrip()
430
+
431
+ # Simple replacement based on suggestion
432
+ # In practice, this would need more sophisticated AST rewriting
433
+ fixed = lines[line_idx].replace(
434
+ issue.import_statement, issue.suggestion
435
+ )
436
+
437
+ fixes.append((original, fixed.rstrip()))
438
+
439
+ if not dry_run:
440
+ lines[line_idx] = fixed
441
+
442
+ if not dry_run and fixes:
443
+ with open(file_path, "w", encoding="utf-8") as f:
444
+ f.writelines(lines)
445
+
446
+ return fixes
@@ -69,7 +69,7 @@ class ValidationSuggestionEngine:
69
69
  ErrorCategory.TYPE_MISMATCH: {
70
70
  "message": "The parameter type doesn't match what the node expects. Check the data types being passed through the connection.",
71
71
  "code_template": '# Check the output type from source node\n# Expected: {expected_type}\n# Got: {actual_type}\nworkflow.add_connection("{source}", "{source_port}", "{target}", "{target_port}")',
72
- "doc_link": "sdk-users/validation/common-mistakes.md#type-mismatch",
72
+ "doc_link": "sdk-users/2-core-concepts/validation/common-mistakes.md#type-mismatch",
73
73
  "alternatives": [
74
74
  "Add a data transformation node between source and target",
75
75
  "Check if you're connecting to the correct output port",
@@ -79,7 +79,7 @@ class ValidationSuggestionEngine:
79
79
  ErrorCategory.MISSING_PARAMETER: {
80
80
  "message": "A required parameter is missing. Make sure all required parameters are provided via connections or node configuration.",
81
81
  "code_template": '# Add the missing parameter connection:\nworkflow.add_connection("{source}", "{source_port}", "{target}", "{missing_param}")\n\n# Or provide it directly in node configuration:\nworkflow.add_node("{node_type}", "{target}", {{"{missing_param}": "value"}})',
82
- "doc_link": "sdk-users/validation/common-mistakes.md#missing-parameters",
82
+ "doc_link": "sdk-users/2-core-concepts/validation/common-mistakes.md#missing-parameters",
83
83
  "alternatives": [
84
84
  "Provide the parameter directly in node configuration",
85
85
  "Create a PythonCodeNode to generate the required parameter",
@@ -89,7 +89,7 @@ class ValidationSuggestionEngine:
89
89
  ErrorCategory.CONSTRAINT_VIOLATION: {
90
90
  "message": "The parameter value violates validation constraints. Check the parameter requirements for this node type.",
91
91
  "code_template": '# Ensure parameter meets requirements:\n# {constraint_details}\nworkflow.add_connection("{source}", "{source_port}", "{target}", "{target_port}")\n\n# Or add validation in source node:\nworkflow.add_node("PythonCodeNode", "validator", {{"code": "result = max(0, input_value)"}})',
92
- "doc_link": "sdk-users/nodes/node-selection-guide.md#parameter-validation",
92
+ "doc_link": "sdk-users/6-reference/nodes/node-selection-guide.md#parameter-validation",
93
93
  "alternatives": [
94
94
  "Add data validation/transformation before the target node",
95
95
  "Check the node documentation for parameter requirements",
@@ -99,7 +99,7 @@ class ValidationSuggestionEngine:
99
99
  ErrorCategory.SECURITY_VIOLATION: {
100
100
  "message": "Potential security issue detected in parameter value. This could indicate SQL injection, script injection, or other security vulnerabilities.",
101
101
  "code_template": '# Use parameterized/sanitized approach:\n# For SQL operations:\nworkflow.add_node("SQLDatabaseNode", "safe_query", {{\n "query": "SELECT * FROM table WHERE id = $1",\n "params": ["user_input"]\n}})\n\n# For user input, add validation:\nworkflow.add_node("PythonCodeNode", "sanitizer", {{"code": "result = sanitize_input(user_data)"}})',
102
- "doc_link": "sdk-users/enterprise/security-patterns.md#input-validation",
102
+ "doc_link": "sdk-users/5-enterprise/security-patterns.md#input-validation",
103
103
  "alternatives": [
104
104
  "Use parameterized queries instead of string concatenation",
105
105
  "Add input sanitization/validation nodes",
@@ -110,7 +110,7 @@ class ValidationSuggestionEngine:
110
110
  ErrorCategory.UNKNOWN: {
111
111
  "message": "An unexpected validation error occurred. Check the error details and node documentation.",
112
112
  "code_template": '# General troubleshooting:\n# 1. Check node documentation for parameter requirements\n# 2. Verify data types and formats\n# 3. Test with simpler data first\nworkflow.add_connection("{source}", "{source_port}", "{target}", "{target_port}")',
113
- "doc_link": "sdk-users/developer/05-troubleshooting.md",
113
+ "doc_link": "sdk-users/3-development/guides/troubleshooting.md",
114
114
  "alternatives": [
115
115
  "Check the node documentation for specific requirements",
116
116
  "Test with simplified data to isolate the issue",
@@ -0,0 +1,74 @@
1
+ """Data path utilities for examples and PythonCodeNode execution.
2
+
3
+ This module provides helper functions for constructing standardized paths
4
+ to input and output data files used in workflows and examples.
5
+ """
6
+
7
+ import os
8
+ from pathlib import Path
9
+
10
+
11
+ def get_project_root() -> Path:
12
+ """Get the project root directory.
13
+
14
+ Returns:
15
+ Path to the kailash_python_sdk project root
16
+ """
17
+ # Find the project root by looking for setup.py or pyproject.toml
18
+ current = Path(__file__).resolve()
19
+ for parent in current.parents:
20
+ if (parent / "setup.py").exists() or (parent / "pyproject.toml").exists():
21
+ return parent
22
+ # Fallback to going up from src/kailash/utils to project root
23
+ return current.parent.parent.parent
24
+
25
+
26
+ def get_input_data_path(filename: str) -> str:
27
+ """Get the full path to an input data file.
28
+
29
+ Args:
30
+ filename: Name of the input data file
31
+
32
+ Returns:
33
+ Full path to the input data file
34
+ """
35
+ project_root = get_project_root()
36
+ return str(project_root / "data" / "inputs" / filename)
37
+
38
+
39
+ def get_output_data_path(filename: str) -> str:
40
+ """Get the full path to an output data file.
41
+
42
+ Args:
43
+ filename: Name of the output data file
44
+
45
+ Returns:
46
+ Full path to the output data file
47
+ """
48
+ project_root = get_project_root()
49
+ output_path = project_root / "data" / "outputs" / filename
50
+
51
+ # Ensure the output directory exists
52
+ output_path.parent.mkdir(parents=True, exist_ok=True)
53
+
54
+ return str(output_path)
55
+
56
+
57
+ def get_data_path(subfolder: str, filename: str) -> str:
58
+ """Get the full path to a data file in a specific subfolder.
59
+
60
+ Args:
61
+ subfolder: Subfolder within the data directory (e.g., 'inputs', 'outputs', 'templates')
62
+ filename: Name of the data file
63
+
64
+ Returns:
65
+ Full path to the data file
66
+ """
67
+ project_root = get_project_root()
68
+ data_path = project_root / "data" / subfolder / filename
69
+
70
+ # Ensure the directory exists for output-type operations
71
+ if subfolder in ("outputs", "exports", "tracking"):
72
+ data_path.parent.mkdir(parents=True, exist_ok=True)
73
+
74
+ return str(data_path)