kailash 0.8.4__py3-none-any.whl ā 0.8.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +5 -11
- kailash/channels/__init__.py +2 -1
- kailash/channels/mcp_channel.py +23 -4
- kailash/cli/__init__.py +11 -1
- kailash/cli/validate_imports.py +202 -0
- kailash/cli/validation_audit.py +570 -0
- kailash/core/actors/supervisor.py +1 -1
- kailash/core/resilience/bulkhead.py +15 -5
- kailash/core/resilience/circuit_breaker.py +74 -1
- kailash/core/resilience/health_monitor.py +433 -33
- kailash/edge/compliance.py +33 -0
- kailash/edge/consistency.py +609 -0
- kailash/edge/coordination/__init__.py +30 -0
- kailash/edge/coordination/global_ordering.py +355 -0
- kailash/edge/coordination/leader_election.py +217 -0
- kailash/edge/coordination/partition_detector.py +296 -0
- kailash/edge/coordination/raft.py +485 -0
- kailash/edge/discovery.py +63 -1
- kailash/edge/migration/__init__.py +19 -0
- kailash/edge/migration/edge_migration_service.py +384 -0
- kailash/edge/migration/edge_migrator.py +832 -0
- kailash/edge/monitoring/__init__.py +21 -0
- kailash/edge/monitoring/edge_monitor.py +736 -0
- kailash/edge/prediction/__init__.py +10 -0
- kailash/edge/prediction/predictive_warmer.py +591 -0
- kailash/edge/resource/__init__.py +102 -0
- kailash/edge/resource/cloud_integration.py +796 -0
- kailash/edge/resource/cost_optimizer.py +949 -0
- kailash/edge/resource/docker_integration.py +919 -0
- kailash/edge/resource/kubernetes_integration.py +893 -0
- kailash/edge/resource/platform_integration.py +913 -0
- kailash/edge/resource/predictive_scaler.py +959 -0
- kailash/edge/resource/resource_analyzer.py +824 -0
- kailash/edge/resource/resource_pools.py +610 -0
- kailash/integrations/dataflow_edge.py +261 -0
- kailash/mcp_server/registry_integration.py +1 -1
- kailash/mcp_server/server.py +351 -8
- kailash/mcp_server/transports.py +305 -0
- kailash/middleware/gateway/event_store.py +1 -0
- kailash/monitoring/__init__.py +18 -0
- kailash/monitoring/alerts.py +646 -0
- kailash/monitoring/metrics.py +677 -0
- kailash/nodes/__init__.py +2 -0
- kailash/nodes/ai/semantic_memory.py +2 -2
- kailash/nodes/base.py +622 -1
- kailash/nodes/code/python.py +44 -3
- kailash/nodes/data/async_sql.py +42 -20
- kailash/nodes/edge/__init__.py +36 -0
- kailash/nodes/edge/base.py +240 -0
- kailash/nodes/edge/cloud_node.py +710 -0
- kailash/nodes/edge/coordination.py +239 -0
- kailash/nodes/edge/docker_node.py +825 -0
- kailash/nodes/edge/edge_data.py +582 -0
- kailash/nodes/edge/edge_migration_node.py +396 -0
- kailash/nodes/edge/edge_monitoring_node.py +421 -0
- kailash/nodes/edge/edge_state.py +673 -0
- kailash/nodes/edge/edge_warming_node.py +393 -0
- kailash/nodes/edge/kubernetes_node.py +652 -0
- kailash/nodes/edge/platform_node.py +766 -0
- kailash/nodes/edge/resource_analyzer_node.py +378 -0
- kailash/nodes/edge/resource_optimizer_node.py +501 -0
- kailash/nodes/edge/resource_scaler_node.py +397 -0
- kailash/nodes/governance.py +410 -0
- kailash/nodes/ports.py +676 -0
- kailash/nodes/rag/registry.py +1 -1
- kailash/nodes/transaction/distributed_transaction_manager.py +48 -1
- kailash/nodes/transaction/saga_state_storage.py +2 -1
- kailash/nodes/validation.py +8 -8
- kailash/runtime/local.py +374 -1
- kailash/runtime/validation/__init__.py +12 -0
- kailash/runtime/validation/connection_context.py +119 -0
- kailash/runtime/validation/enhanced_error_formatter.py +202 -0
- kailash/runtime/validation/error_categorizer.py +164 -0
- kailash/runtime/validation/import_validator.py +446 -0
- kailash/runtime/validation/metrics.py +380 -0
- kailash/runtime/validation/performance.py +615 -0
- kailash/runtime/validation/suggestion_engine.py +212 -0
- kailash/testing/fixtures.py +2 -2
- kailash/utils/data_paths.py +74 -0
- kailash/workflow/builder.py +413 -8
- kailash/workflow/contracts.py +418 -0
- kailash/workflow/edge_infrastructure.py +369 -0
- kailash/workflow/mermaid_visualizer.py +3 -1
- kailash/workflow/migration.py +3 -3
- kailash/workflow/templates.py +6 -6
- kailash/workflow/type_inference.py +669 -0
- kailash/workflow/validation.py +134 -3
- {kailash-0.8.4.dist-info ā kailash-0.8.6.dist-info}/METADATA +52 -34
- {kailash-0.8.4.dist-info ā kailash-0.8.6.dist-info}/RECORD +93 -42
- kailash/nexus/__init__.py +0 -21
- kailash/nexus/cli/__init__.py +0 -5
- kailash/nexus/cli/__main__.py +0 -6
- kailash/nexus/cli/main.py +0 -176
- kailash/nexus/factory.py +0 -413
- kailash/nexus/gateway.py +0 -545
- {kailash-0.8.4.dist-info ā kailash-0.8.6.dist-info}/WHEEL +0 -0
- {kailash-0.8.4.dist-info ā kailash-0.8.6.dist-info}/entry_points.txt +0 -0
- {kailash-0.8.4.dist-info ā kailash-0.8.6.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.8.4.dist-info ā kailash-0.8.6.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,446 @@
|
|
1
|
+
"""
|
2
|
+
Import path validator for production deployment compatibility.
|
3
|
+
|
4
|
+
This module detects relative imports that fail in production environments
|
5
|
+
and provides guidance for absolute import patterns.
|
6
|
+
|
7
|
+
Based on Gold Standard: sdk-users/7-gold-standards/absolute-imports-gold-standard.md
|
8
|
+
"""
|
9
|
+
|
10
|
+
import ast
|
11
|
+
import logging
|
12
|
+
import os
|
13
|
+
import re
|
14
|
+
from dataclasses import dataclass
|
15
|
+
from enum import Enum
|
16
|
+
from pathlib import Path
|
17
|
+
from typing import Dict, List, Optional, Set, Tuple
|
18
|
+
|
19
|
+
logger = logging.getLogger(__name__)
|
20
|
+
|
21
|
+
|
22
|
+
class ImportIssueType(Enum):
|
23
|
+
"""Types of import issues that can be detected."""
|
24
|
+
|
25
|
+
RELATIVE_IMPORT = "relative_import"
|
26
|
+
IMPLICIT_RELATIVE = "implicit_relative"
|
27
|
+
LOCAL_IMPORT = "local_import"
|
28
|
+
AMBIGUOUS_IMPORT = "ambiguous_import"
|
29
|
+
|
30
|
+
|
31
|
+
@dataclass
|
32
|
+
class ImportIssue:
|
33
|
+
"""Represents an import issue found in a file."""
|
34
|
+
|
35
|
+
file_path: str
|
36
|
+
line_number: int
|
37
|
+
import_statement: str
|
38
|
+
issue_type: ImportIssueType
|
39
|
+
severity: str # "critical", "warning", "info"
|
40
|
+
message: str
|
41
|
+
suggestion: str
|
42
|
+
gold_standard_ref: str = (
|
43
|
+
"sdk-users/7-gold-standards/absolute-imports-gold-standard.md"
|
44
|
+
)
|
45
|
+
|
46
|
+
|
47
|
+
class ImportPathValidator:
|
48
|
+
"""
|
49
|
+
Validates import paths for production deployment compatibility.
|
50
|
+
|
51
|
+
Detects relative imports that work in development but fail in production
|
52
|
+
when applications run from repository root.
|
53
|
+
"""
|
54
|
+
|
55
|
+
def __init__(self, repo_root: Optional[str] = None):
|
56
|
+
"""
|
57
|
+
Initialize import path validator.
|
58
|
+
|
59
|
+
Args:
|
60
|
+
repo_root: Repository root path. If None, tries to auto-detect.
|
61
|
+
"""
|
62
|
+
self.repo_root = Path(repo_root) if repo_root else self._find_repo_root()
|
63
|
+
self.sdk_modules = self._identify_sdk_modules()
|
64
|
+
self.issues: List[ImportIssue] = []
|
65
|
+
|
66
|
+
def _find_repo_root(self) -> Path:
|
67
|
+
"""Find repository root by looking for key markers."""
|
68
|
+
current = Path.cwd()
|
69
|
+
|
70
|
+
# Look for common repo markers
|
71
|
+
markers = [".git", "pyproject.toml", "setup.py", "requirements.txt"]
|
72
|
+
|
73
|
+
while current != current.parent:
|
74
|
+
for marker in markers:
|
75
|
+
if (current / marker).exists():
|
76
|
+
return current
|
77
|
+
current = current.parent
|
78
|
+
|
79
|
+
# Fallback to current directory
|
80
|
+
return Path.cwd()
|
81
|
+
|
82
|
+
def _identify_sdk_modules(self) -> Set[str]:
|
83
|
+
"""Identify SDK module names for import validation."""
|
84
|
+
sdk_modules = set()
|
85
|
+
|
86
|
+
# Check for src structure
|
87
|
+
src_path = self.repo_root / "src"
|
88
|
+
if src_path.exists():
|
89
|
+
for item in src_path.iterdir():
|
90
|
+
if item.is_dir() and (item / "__init__.py").exists():
|
91
|
+
sdk_modules.add(item.name)
|
92
|
+
|
93
|
+
# Common SDK module names
|
94
|
+
sdk_modules.update(["kailash", "dataflow", "nexus"])
|
95
|
+
|
96
|
+
return sdk_modules
|
97
|
+
|
98
|
+
def validate_file(self, file_path: str) -> List[ImportIssue]:
|
99
|
+
"""
|
100
|
+
Validate imports in a single Python file.
|
101
|
+
|
102
|
+
Args:
|
103
|
+
file_path: Path to Python file to validate
|
104
|
+
|
105
|
+
Returns:
|
106
|
+
List of import issues found
|
107
|
+
"""
|
108
|
+
file_path = Path(file_path)
|
109
|
+
if not file_path.exists() or not file_path.suffix == ".py":
|
110
|
+
return []
|
111
|
+
|
112
|
+
issues = []
|
113
|
+
|
114
|
+
try:
|
115
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
116
|
+
content = f.read()
|
117
|
+
|
118
|
+
# Parse AST to find imports
|
119
|
+
tree = ast.parse(content, filename=str(file_path))
|
120
|
+
|
121
|
+
# Check each import statement
|
122
|
+
for node in ast.walk(tree):
|
123
|
+
if isinstance(node, ast.ImportFrom):
|
124
|
+
issue = self._check_import_from(node, file_path, content)
|
125
|
+
if issue:
|
126
|
+
issues.append(issue)
|
127
|
+
elif isinstance(node, ast.Import):
|
128
|
+
issue = self._check_import(node, file_path, content)
|
129
|
+
if issue:
|
130
|
+
issues.append(issue)
|
131
|
+
|
132
|
+
except Exception as e:
|
133
|
+
logger.warning(f"Failed to parse {file_path}: {e}")
|
134
|
+
|
135
|
+
return issues
|
136
|
+
|
137
|
+
def _check_import_from(
|
138
|
+
self, node: ast.ImportFrom, file_path: Path, content: str
|
139
|
+
) -> Optional[ImportIssue]:
|
140
|
+
"""Check 'from X import Y' statements."""
|
141
|
+
if node.level > 0:
|
142
|
+
# Explicit relative import (from . import x, from .. import x)
|
143
|
+
import_str = self._get_import_string(node, content)
|
144
|
+
|
145
|
+
return ImportIssue(
|
146
|
+
file_path=str(file_path),
|
147
|
+
line_number=node.lineno,
|
148
|
+
import_statement=import_str,
|
149
|
+
issue_type=ImportIssueType.RELATIVE_IMPORT,
|
150
|
+
severity="critical",
|
151
|
+
message="Relative import will fail in production deployment",
|
152
|
+
suggestion=self._generate_absolute_import_suggestion(node, file_path),
|
153
|
+
)
|
154
|
+
|
155
|
+
elif node.module:
|
156
|
+
# Check for implicit relative imports
|
157
|
+
module_parts = node.module.split(".")
|
158
|
+
first_part = module_parts[0]
|
159
|
+
|
160
|
+
# Check if this looks like a local module import
|
161
|
+
if self._is_likely_local_import(first_part, file_path):
|
162
|
+
import_str = self._get_import_string(node, content)
|
163
|
+
|
164
|
+
return ImportIssue(
|
165
|
+
file_path=str(file_path),
|
166
|
+
line_number=node.lineno,
|
167
|
+
import_statement=import_str,
|
168
|
+
issue_type=ImportIssueType.IMPLICIT_RELATIVE,
|
169
|
+
severity="critical",
|
170
|
+
message=f"Implicit relative import '{first_part}' will fail when run from repo root",
|
171
|
+
suggestion=self._generate_absolute_import_suggestion(
|
172
|
+
node, file_path
|
173
|
+
),
|
174
|
+
)
|
175
|
+
|
176
|
+
return None
|
177
|
+
|
178
|
+
def _check_import(
|
179
|
+
self, node: ast.Import, file_path: Path, content: str
|
180
|
+
) -> Optional[ImportIssue]:
|
181
|
+
"""Check 'import X' statements."""
|
182
|
+
# Generally less problematic, but check for ambiguous local imports
|
183
|
+
for alias in node.names:
|
184
|
+
name_parts = alias.name.split(".")
|
185
|
+
first_part = name_parts[0]
|
186
|
+
|
187
|
+
if self._is_likely_local_import(first_part, file_path):
|
188
|
+
import_str = f"import {alias.name}"
|
189
|
+
|
190
|
+
return ImportIssue(
|
191
|
+
file_path=str(file_path),
|
192
|
+
line_number=node.lineno,
|
193
|
+
import_statement=import_str,
|
194
|
+
issue_type=ImportIssueType.LOCAL_IMPORT,
|
195
|
+
severity="warning",
|
196
|
+
message=f"Local module import '{first_part}' may be ambiguous in production",
|
197
|
+
suggestion=f"Consider using absolute import: from {self._get_module_path(file_path)} import {first_part}",
|
198
|
+
)
|
199
|
+
|
200
|
+
return None
|
201
|
+
|
202
|
+
def _is_likely_local_import(self, module_name: str, file_path: Path) -> bool:
|
203
|
+
"""
|
204
|
+
Check if a module name is likely a local/relative import.
|
205
|
+
|
206
|
+
Returns True if:
|
207
|
+
- Module exists as sibling to current file
|
208
|
+
- Module is not a known SDK module
|
209
|
+
- Module is not a standard library module
|
210
|
+
"""
|
211
|
+
# Skip if it's a known SDK module
|
212
|
+
if module_name in self.sdk_modules:
|
213
|
+
return False
|
214
|
+
|
215
|
+
# Skip if it's likely a third-party or stdlib module
|
216
|
+
if module_name in [
|
217
|
+
"os",
|
218
|
+
"sys",
|
219
|
+
"json",
|
220
|
+
"logging",
|
221
|
+
"typing",
|
222
|
+
"pathlib",
|
223
|
+
"pytest",
|
224
|
+
"unittest",
|
225
|
+
"numpy",
|
226
|
+
"pandas",
|
227
|
+
"requests",
|
228
|
+
]:
|
229
|
+
return False
|
230
|
+
|
231
|
+
# Check if module exists as sibling
|
232
|
+
parent_dir = file_path.parent
|
233
|
+
possible_module = parent_dir / module_name
|
234
|
+
possible_file = parent_dir / f"{module_name}.py"
|
235
|
+
|
236
|
+
if possible_module.exists() or possible_file.exists():
|
237
|
+
return True
|
238
|
+
|
239
|
+
# Check common local module patterns
|
240
|
+
local_patterns = ["contracts", "nodes", "core", "utils", "models", "schemas"]
|
241
|
+
if module_name in local_patterns:
|
242
|
+
return True
|
243
|
+
|
244
|
+
return False
|
245
|
+
|
246
|
+
def _get_import_string(self, node: ast.ImportFrom, content: str) -> str:
|
247
|
+
"""Extract the actual import string from source."""
|
248
|
+
lines = content.split("\n")
|
249
|
+
if 0 <= node.lineno - 1 < len(lines):
|
250
|
+
return lines[node.lineno - 1].strip()
|
251
|
+
return f"from {node.module} import ..."
|
252
|
+
|
253
|
+
def _get_module_path(self, file_path: Path) -> str:
|
254
|
+
"""Get the absolute module path for a file's directory."""
|
255
|
+
try:
|
256
|
+
# Get relative path from repo root
|
257
|
+
rel_path = file_path.relative_to(self.repo_root)
|
258
|
+
|
259
|
+
# Convert to module path (excluding the filename)
|
260
|
+
parts = list(rel_path.parts[:-1]) # Remove filename
|
261
|
+
|
262
|
+
# Join all parts to create module path
|
263
|
+
return ".".join(parts) if parts else ""
|
264
|
+
|
265
|
+
except ValueError:
|
266
|
+
# File not under repo root
|
267
|
+
return "src.your_module"
|
268
|
+
|
269
|
+
def _generate_absolute_import_suggestion(
|
270
|
+
self, node: ast.ImportFrom, file_path: Path
|
271
|
+
) -> str:
|
272
|
+
"""Generate suggested absolute import."""
|
273
|
+
module_base = self._get_module_path(file_path)
|
274
|
+
# Debug
|
275
|
+
# print(f"DEBUG: file_path={file_path}, module_base={module_base}, node.level={node.level}, node.module={node.module}")
|
276
|
+
|
277
|
+
if node.level > 0:
|
278
|
+
# Handle relative imports
|
279
|
+
module_parts = module_base.split(".") if module_base else []
|
280
|
+
|
281
|
+
# For relative imports, we need to go up 'level' directories
|
282
|
+
# But note: for a file in package a.b.c:
|
283
|
+
# - level 1 (.) = current package (a.b.c)
|
284
|
+
# - level 2 (..) = parent package (a.b)
|
285
|
+
# - level 3 (...) = grandparent (a)
|
286
|
+
# Since we want the parent, we go up (level-1) from current
|
287
|
+
if len(module_parts) > node.level - 1:
|
288
|
+
# Go to the appropriate parent level
|
289
|
+
if node.level == 1:
|
290
|
+
# Same directory
|
291
|
+
parent_parts = module_parts
|
292
|
+
else:
|
293
|
+
# Go up (level-1) directories
|
294
|
+
parent_parts = module_parts[: -(node.level - 1)]
|
295
|
+
|
296
|
+
if node.module:
|
297
|
+
# Append the relative module path
|
298
|
+
suggested_module = ".".join(parent_parts + node.module.split("."))
|
299
|
+
else:
|
300
|
+
# Just the parent module
|
301
|
+
suggested_module = ".".join(parent_parts)
|
302
|
+
else:
|
303
|
+
# Can't go up that many levels, use what we have
|
304
|
+
if node.module:
|
305
|
+
suggested_module = node.module
|
306
|
+
else:
|
307
|
+
suggested_module = module_base
|
308
|
+
else:
|
309
|
+
# Implicit relative - the module is in the current directory
|
310
|
+
if module_base:
|
311
|
+
suggested_module = f"{module_base}.{node.module}"
|
312
|
+
else:
|
313
|
+
suggested_module = node.module
|
314
|
+
|
315
|
+
# Format the suggestion
|
316
|
+
if hasattr(node, "names") and node.names:
|
317
|
+
imports = ", ".join(alias.name for alias in node.names)
|
318
|
+
return f"from {suggested_module} import {imports}"
|
319
|
+
else:
|
320
|
+
return f"from {suggested_module} import ..."
|
321
|
+
|
322
|
+
def validate_directory(
|
323
|
+
self, directory: str, recursive: bool = True
|
324
|
+
) -> List[ImportIssue]:
|
325
|
+
"""
|
326
|
+
Validate all Python files in a directory.
|
327
|
+
|
328
|
+
Args:
|
329
|
+
directory: Directory path to validate
|
330
|
+
recursive: Whether to scan subdirectories
|
331
|
+
|
332
|
+
Returns:
|
333
|
+
List of all import issues found
|
334
|
+
"""
|
335
|
+
directory = Path(directory)
|
336
|
+
if not directory.exists() or not directory.is_dir():
|
337
|
+
return []
|
338
|
+
|
339
|
+
all_issues = []
|
340
|
+
|
341
|
+
pattern = "**/*.py" if recursive else "*.py"
|
342
|
+
for py_file in directory.glob(pattern):
|
343
|
+
# Skip test files by default (can be configured)
|
344
|
+
if "test" in py_file.name or "__pycache__" in str(py_file):
|
345
|
+
continue
|
346
|
+
|
347
|
+
issues = self.validate_file(py_file)
|
348
|
+
all_issues.extend(issues)
|
349
|
+
|
350
|
+
return all_issues
|
351
|
+
|
352
|
+
def generate_report(self, issues: List[ImportIssue]) -> str:
|
353
|
+
"""
|
354
|
+
Generate a human-readable report of import issues.
|
355
|
+
|
356
|
+
Args:
|
357
|
+
issues: List of import issues to report
|
358
|
+
|
359
|
+
Returns:
|
360
|
+
Formatted report string
|
361
|
+
"""
|
362
|
+
if not issues:
|
363
|
+
return "ā
No import issues found! All imports are production-ready."
|
364
|
+
|
365
|
+
report = []
|
366
|
+
report.append("šØ IMPORT VALIDATION REPORT")
|
367
|
+
report.append("=" * 60)
|
368
|
+
report.append(
|
369
|
+
f"Found {len(issues)} import issues that may fail in production\n"
|
370
|
+
)
|
371
|
+
|
372
|
+
# Group by severity
|
373
|
+
critical_issues = [i for i in issues if i.severity == "critical"]
|
374
|
+
warning_issues = [i for i in issues if i.severity == "warning"]
|
375
|
+
|
376
|
+
if critical_issues:
|
377
|
+
report.append("š“ CRITICAL ISSUES (Will fail in production)")
|
378
|
+
report.append("-" * 60)
|
379
|
+
for issue in critical_issues:
|
380
|
+
report.append(f"\nFile: {issue.file_path}")
|
381
|
+
report.append(f"Line {issue.line_number}: {issue.import_statement}")
|
382
|
+
report.append(f"Issue: {issue.message}")
|
383
|
+
report.append(f"Fix: {issue.suggestion}")
|
384
|
+
|
385
|
+
if warning_issues:
|
386
|
+
report.append("\nš” WARNINGS (May cause issues)")
|
387
|
+
report.append("-" * 60)
|
388
|
+
for issue in warning_issues:
|
389
|
+
report.append(f"\nFile: {issue.file_path}")
|
390
|
+
report.append(f"Line {issue.line_number}: {issue.import_statement}")
|
391
|
+
report.append(f"Issue: {issue.message}")
|
392
|
+
report.append(f"Suggestion: {issue.suggestion}")
|
393
|
+
|
394
|
+
report.append(
|
395
|
+
f"\nš See gold standard: {issues[0].gold_standard_ref if issues else 'N/A'}"
|
396
|
+
)
|
397
|
+
|
398
|
+
return "\n".join(report)
|
399
|
+
|
400
|
+
def fix_imports_in_file(
|
401
|
+
self, file_path: str, dry_run: bool = True
|
402
|
+
) -> List[Tuple[str, str]]:
|
403
|
+
"""
|
404
|
+
Attempt to fix import issues in a file.
|
405
|
+
|
406
|
+
Args:
|
407
|
+
file_path: Path to file to fix
|
408
|
+
dry_run: If True, only return proposed changes without modifying file
|
409
|
+
|
410
|
+
Returns:
|
411
|
+
List of (original, fixed) import tuples
|
412
|
+
"""
|
413
|
+
issues = self.validate_file(file_path)
|
414
|
+
if not issues:
|
415
|
+
return []
|
416
|
+
|
417
|
+
fixes = []
|
418
|
+
|
419
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
420
|
+
lines = f.readlines()
|
421
|
+
|
422
|
+
# Sort issues by line number in reverse to avoid offset issues
|
423
|
+
issues.sort(key=lambda x: x.line_number, reverse=True)
|
424
|
+
|
425
|
+
for issue in issues:
|
426
|
+
if issue.severity == "critical":
|
427
|
+
line_idx = issue.line_number - 1
|
428
|
+
if 0 <= line_idx < len(lines):
|
429
|
+
original = lines[line_idx].rstrip()
|
430
|
+
|
431
|
+
# Simple replacement based on suggestion
|
432
|
+
# In practice, this would need more sophisticated AST rewriting
|
433
|
+
fixed = lines[line_idx].replace(
|
434
|
+
issue.import_statement, issue.suggestion
|
435
|
+
)
|
436
|
+
|
437
|
+
fixes.append((original, fixed.rstrip()))
|
438
|
+
|
439
|
+
if not dry_run:
|
440
|
+
lines[line_idx] = fixed
|
441
|
+
|
442
|
+
if not dry_run and fixes:
|
443
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
444
|
+
f.writelines(lines)
|
445
|
+
|
446
|
+
return fixes
|