codeshift 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. codeshift/__init__.py +8 -0
  2. codeshift/analyzer/__init__.py +5 -0
  3. codeshift/analyzer/risk_assessor.py +388 -0
  4. codeshift/api/__init__.py +1 -0
  5. codeshift/api/auth.py +182 -0
  6. codeshift/api/config.py +73 -0
  7. codeshift/api/database.py +215 -0
  8. codeshift/api/main.py +103 -0
  9. codeshift/api/models/__init__.py +55 -0
  10. codeshift/api/models/auth.py +108 -0
  11. codeshift/api/models/billing.py +92 -0
  12. codeshift/api/models/migrate.py +42 -0
  13. codeshift/api/models/usage.py +116 -0
  14. codeshift/api/routers/__init__.py +5 -0
  15. codeshift/api/routers/auth.py +440 -0
  16. codeshift/api/routers/billing.py +395 -0
  17. codeshift/api/routers/migrate.py +304 -0
  18. codeshift/api/routers/usage.py +291 -0
  19. codeshift/api/routers/webhooks.py +289 -0
  20. codeshift/cli/__init__.py +5 -0
  21. codeshift/cli/commands/__init__.py +7 -0
  22. codeshift/cli/commands/apply.py +352 -0
  23. codeshift/cli/commands/auth.py +842 -0
  24. codeshift/cli/commands/diff.py +221 -0
  25. codeshift/cli/commands/scan.py +368 -0
  26. codeshift/cli/commands/upgrade.py +436 -0
  27. codeshift/cli/commands/upgrade_all.py +518 -0
  28. codeshift/cli/main.py +221 -0
  29. codeshift/cli/quota.py +210 -0
  30. codeshift/knowledge/__init__.py +50 -0
  31. codeshift/knowledge/cache.py +167 -0
  32. codeshift/knowledge/generator.py +231 -0
  33. codeshift/knowledge/models.py +151 -0
  34. codeshift/knowledge/parser.py +270 -0
  35. codeshift/knowledge/sources.py +388 -0
  36. codeshift/knowledge_base/__init__.py +17 -0
  37. codeshift/knowledge_base/loader.py +102 -0
  38. codeshift/knowledge_base/models.py +110 -0
  39. codeshift/migrator/__init__.py +23 -0
  40. codeshift/migrator/ast_transforms.py +256 -0
  41. codeshift/migrator/engine.py +395 -0
  42. codeshift/migrator/llm_migrator.py +320 -0
  43. codeshift/migrator/transforms/__init__.py +19 -0
  44. codeshift/migrator/transforms/fastapi_transformer.py +174 -0
  45. codeshift/migrator/transforms/pandas_transformer.py +236 -0
  46. codeshift/migrator/transforms/pydantic_v1_to_v2.py +637 -0
  47. codeshift/migrator/transforms/requests_transformer.py +218 -0
  48. codeshift/migrator/transforms/sqlalchemy_transformer.py +175 -0
  49. codeshift/scanner/__init__.py +6 -0
  50. codeshift/scanner/code_scanner.py +352 -0
  51. codeshift/scanner/dependency_parser.py +473 -0
  52. codeshift/utils/__init__.py +5 -0
  53. codeshift/utils/api_client.py +266 -0
  54. codeshift/utils/cache.py +318 -0
  55. codeshift/utils/config.py +71 -0
  56. codeshift/utils/llm_client.py +221 -0
  57. codeshift/validator/__init__.py +6 -0
  58. codeshift/validator/syntax_checker.py +183 -0
  59. codeshift/validator/test_runner.py +224 -0
  60. codeshift-0.2.0.dist-info/METADATA +326 -0
  61. codeshift-0.2.0.dist-info/RECORD +65 -0
  62. codeshift-0.2.0.dist-info/WHEEL +5 -0
  63. codeshift-0.2.0.dist-info/entry_points.txt +2 -0
  64. codeshift-0.2.0.dist-info/licenses/LICENSE +21 -0
  65. codeshift-0.2.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,473 @@
1
+ """Parser for dependency files (requirements.txt, pyproject.toml)."""
2
+
3
+ from dataclasses import dataclass, field
4
+ from pathlib import Path
5
+ from typing import Any
6
+
7
+ import toml
8
+ from packaging.requirements import Requirement
9
+ from packaging.specifiers import SpecifierSet
10
+ from packaging.version import Version
11
+
12
+
13
+ @dataclass
14
+ class Dependency:
15
+ """Represents a project dependency."""
16
+
17
+ name: str
18
+ version_spec: str | None = None
19
+ extras: list[str] = field(default_factory=list)
20
+ source_file: Path | None = None
21
+
22
+ @property
23
+ def min_version(self) -> Version | None:
24
+ """Get the minimum version from the specifier."""
25
+ if not self.version_spec:
26
+ return None
27
+
28
+ try:
29
+ specifier = SpecifierSet(self.version_spec)
30
+ for spec in specifier:
31
+ if spec.operator in (">=", "==", "~="):
32
+ return Version(spec.version)
33
+ except Exception:
34
+ pass
35
+ return None
36
+
37
+ @property
38
+ def max_version(self) -> Version | None:
39
+ """Get the maximum version from the specifier."""
40
+ if not self.version_spec:
41
+ return None
42
+
43
+ try:
44
+ specifier = SpecifierSet(self.version_spec)
45
+ for spec in specifier:
46
+ if spec.operator in ("<=", "<", "=="):
47
+ return Version(spec.version)
48
+ except Exception:
49
+ pass
50
+ return None
51
+
52
+ def is_version_compatible(self, version: str) -> bool:
53
+ """Check if a version is compatible with this dependency's spec."""
54
+ if not self.version_spec:
55
+ return True
56
+
57
+ try:
58
+ specifier = SpecifierSet(self.version_spec)
59
+ return Version(version) in specifier
60
+ except Exception:
61
+ return True
62
+
63
+
64
+ class DependencyParser:
65
+ """Parser for extracting dependencies from project files."""
66
+
67
+ def __init__(self, project_path: Path):
68
+ """Initialize the parser.
69
+
70
+ Args:
71
+ project_path: Root path of the project
72
+ """
73
+ self.project_path = project_path
74
+
75
+ def parse_all(self) -> list[Dependency]:
76
+ """Parse dependencies from all available sources.
77
+
78
+ Returns:
79
+ List of all dependencies found
80
+ """
81
+ dependencies = []
82
+
83
+ # Try pyproject.toml first
84
+ pyproject_deps = self.parse_pyproject_toml()
85
+ dependencies.extend(pyproject_deps)
86
+
87
+ # Also check requirements.txt
88
+ requirements_deps = self.parse_requirements_txt()
89
+ dependencies.extend(requirements_deps)
90
+
91
+ # Also check setup.py (basic parsing)
92
+ setup_deps = self.parse_setup_py()
93
+ dependencies.extend(setup_deps)
94
+
95
+ # Deduplicate by name (prefer pyproject.toml)
96
+ seen = set()
97
+ unique = []
98
+ for dep in dependencies:
99
+ if dep.name.lower() not in seen:
100
+ seen.add(dep.name.lower())
101
+ unique.append(dep)
102
+
103
+ return unique
104
+
105
+ def parse_pyproject_toml(self) -> list[Dependency]:
106
+ """Parse dependencies from pyproject.toml.
107
+
108
+ Returns:
109
+ List of dependencies found
110
+ """
111
+ pyproject_path = self.project_path / "pyproject.toml"
112
+ if not pyproject_path.exists():
113
+ return []
114
+
115
+ try:
116
+ data = toml.load(pyproject_path)
117
+ except Exception:
118
+ return []
119
+
120
+ dependencies = []
121
+
122
+ # Standard project dependencies
123
+ project_deps = data.get("project", {}).get("dependencies", [])
124
+ for dep_str in project_deps:
125
+ dep = self._parse_requirement_string(dep_str)
126
+ if dep:
127
+ dep.source_file = pyproject_path
128
+ dependencies.append(dep)
129
+
130
+ # Optional dependencies
131
+ optional_deps = data.get("project", {}).get("optional-dependencies", {})
132
+ for group_deps in optional_deps.values():
133
+ for dep_str in group_deps:
134
+ dep = self._parse_requirement_string(dep_str)
135
+ if dep:
136
+ dep.source_file = pyproject_path
137
+ dependencies.append(dep)
138
+
139
+ # Poetry dependencies
140
+ poetry_deps = data.get("tool", {}).get("poetry", {}).get("dependencies", {})
141
+ for name, spec in poetry_deps.items():
142
+ if name.lower() == "python":
143
+ continue
144
+ dep = self._parse_poetry_dep(name, spec)
145
+ if dep:
146
+ dep.source_file = pyproject_path
147
+ dependencies.append(dep)
148
+
149
+ # Poetry dev dependencies
150
+ dev_deps = data.get("tool", {}).get("poetry", {}).get("dev-dependencies", {})
151
+ for name, spec in dev_deps.items():
152
+ dep = self._parse_poetry_dep(name, spec)
153
+ if dep:
154
+ dep.source_file = pyproject_path
155
+ dependencies.append(dep)
156
+
157
+ return dependencies
158
+
159
+ def parse_requirements_txt(self) -> list[Dependency]:
160
+ """Parse dependencies from requirements.txt.
161
+
162
+ Returns:
163
+ List of dependencies found
164
+ """
165
+ requirements_path = self.project_path / "requirements.txt"
166
+ if not requirements_path.exists():
167
+ return []
168
+
169
+ dependencies = []
170
+
171
+ try:
172
+ content = requirements_path.read_text()
173
+ except Exception:
174
+ return []
175
+
176
+ for line in content.splitlines():
177
+ line = line.strip()
178
+ # Skip comments and empty lines
179
+ if not line or line.startswith("#") or line.startswith("-"):
180
+ continue
181
+
182
+ dep = self._parse_requirement_string(line)
183
+ if dep:
184
+ dep.source_file = requirements_path
185
+ dependencies.append(dep)
186
+
187
+ return dependencies
188
+
189
+ def parse_setup_py(self) -> list[Dependency]:
190
+ """Parse dependencies from setup.py (basic parsing).
191
+
192
+ Returns:
193
+ List of dependencies found
194
+ """
195
+ setup_path = self.project_path / "setup.py"
196
+ if not setup_path.exists():
197
+ return []
198
+
199
+ # This is a very basic parser that looks for install_requires
200
+ # A proper implementation would use AST parsing
201
+ try:
202
+ content = setup_path.read_text()
203
+ except Exception:
204
+ return []
205
+
206
+ dependencies = []
207
+
208
+ # Look for install_requires = [...] pattern
209
+ import re
210
+
211
+ match = re.search(r"install_requires\s*=\s*\[(.*?)\]", content, re.DOTALL)
212
+ if match:
213
+ deps_str = match.group(1)
214
+ # Extract quoted strings
215
+ for dep_match in re.finditer(r"['\"]([^'\"]+)['\"]", deps_str):
216
+ dep = self._parse_requirement_string(dep_match.group(1))
217
+ if dep:
218
+ dep.source_file = setup_path
219
+ dependencies.append(dep)
220
+
221
+ return dependencies
222
+
223
+ def get_dependency(self, name: str) -> Dependency | None:
224
+ """Get a specific dependency by name.
225
+
226
+ Args:
227
+ name: Name of the dependency to find
228
+
229
+ Returns:
230
+ Dependency if found, None otherwise
231
+ """
232
+ all_deps = self.parse_all()
233
+ name_lower = name.lower()
234
+ for dep in all_deps:
235
+ if dep.name.lower() == name_lower:
236
+ return dep
237
+ return None
238
+
239
+ def _parse_requirement_string(self, req_str: str) -> Dependency | None:
240
+ """Parse a requirement string like 'pydantic>=1.10,<2.0'.
241
+
242
+ Args:
243
+ req_str: The requirement string to parse
244
+
245
+ Returns:
246
+ Dependency object or None if parsing fails
247
+ """
248
+ try:
249
+ req = Requirement(req_str)
250
+ return Dependency(
251
+ name=req.name,
252
+ version_spec=str(req.specifier) if req.specifier else None,
253
+ extras=list(req.extras) if req.extras else [],
254
+ )
255
+ except Exception:
256
+ # Try basic parsing
257
+ import re
258
+
259
+ match = re.match(r"([a-zA-Z0-9_-]+)(.*)", req_str)
260
+ if match:
261
+ return Dependency(
262
+ name=match.group(1),
263
+ version_spec=match.group(2).strip() or None,
264
+ )
265
+ return None
266
+
267
+ def _parse_poetry_dep(self, name: str, spec: Any) -> Dependency | None:
268
+ """Parse a Poetry-style dependency specification.
269
+
270
+ Args:
271
+ name: Name of the dependency
272
+ spec: Version specification (string, dict, or other)
273
+
274
+ Returns:
275
+ Dependency object or None
276
+ """
277
+ if isinstance(spec, str):
278
+ # Simple version spec like "^1.10"
279
+ version_spec = self._convert_poetry_version(spec)
280
+ return Dependency(name=name, version_spec=version_spec)
281
+ elif isinstance(spec, dict):
282
+ version = spec.get("version", "")
283
+ dict_version_spec: str | None = (
284
+ self._convert_poetry_version(version) if version else None
285
+ )
286
+ extras = spec.get("extras", [])
287
+ return Dependency(name=name, version_spec=dict_version_spec, extras=extras)
288
+
289
+ return None
290
+
291
+ def _convert_poetry_version(self, version: str) -> str:
292
+ """Convert Poetry version syntax to PEP 440.
293
+
294
+ Args:
295
+ version: Poetry version string (e.g., "^1.10", "~1.10")
296
+
297
+ Returns:
298
+ PEP 440 compatible version string
299
+ """
300
+ if version.startswith("^"):
301
+ # Caret: ^1.2.3 means >=1.2.3,<2.0.0
302
+ base = version[1:]
303
+ parts = base.split(".")
304
+ if len(parts) >= 1:
305
+ major = int(parts[0])
306
+ return f">={base},<{major + 1}.0.0"
307
+ elif version.startswith("~"):
308
+ # Tilde: ~1.2.3 means >=1.2.3,<1.3.0
309
+ base = version[1:]
310
+ parts = base.split(".")
311
+ if len(parts) >= 2:
312
+ major_str = parts[0]
313
+ minor = int(parts[1])
314
+ return f">={base},<{major_str}.{minor + 1}.0"
315
+
316
+ return version
317
+
318
+ def update_dependency_version(self, name: str, new_version: str) -> list[tuple[Path, bool]]:
319
+ """Update the version of a dependency in all source files.
320
+
321
+ Args:
322
+ name: Name of the dependency to update.
323
+ new_version: New version to set (e.g., "2.5.0").
324
+
325
+ Returns:
326
+ List of (file_path, success) tuples for each file updated.
327
+ """
328
+ results = []
329
+
330
+ # Try to update in pyproject.toml
331
+ pyproject_path = self.project_path / "pyproject.toml"
332
+ if pyproject_path.exists():
333
+ success = self._update_pyproject_toml(name, new_version)
334
+ results.append((pyproject_path, success))
335
+
336
+ # Try to update in requirements.txt
337
+ requirements_path = self.project_path / "requirements.txt"
338
+ if requirements_path.exists():
339
+ success = self._update_requirements_txt(name, new_version)
340
+ results.append((requirements_path, success))
341
+
342
+ # Try to update in setup.py
343
+ setup_path = self.project_path / "setup.py"
344
+ if setup_path.exists():
345
+ success = self._update_setup_py(name, new_version)
346
+ if success:
347
+ results.append((setup_path, success))
348
+
349
+ return results
350
+
351
+ def _update_pyproject_toml(self, name: str, new_version: str) -> bool:
352
+ """Update a dependency version in pyproject.toml.
353
+
354
+ Args:
355
+ name: Name of the dependency.
356
+ new_version: New version to set.
357
+
358
+ Returns:
359
+ True if update was successful.
360
+ """
361
+ import re
362
+
363
+ pyproject_path = self.project_path / "pyproject.toml"
364
+ if not pyproject_path.exists():
365
+ return False
366
+
367
+ try:
368
+ content = pyproject_path.read_text()
369
+ original_content = content
370
+
371
+ # Pattern for standard dependencies: "pydantic>=1.0,<2.0" or "pydantic==1.10.0"
372
+ # Match the package name followed by version specifiers
373
+ pattern = rf'("{name})((?:[><=!~]+[^"]*)?)"'
374
+ replacement = rf'"\1>={new_version}"'
375
+ content = re.sub(pattern, replacement, content, flags=re.IGNORECASE)
376
+
377
+ # Pattern for Poetry dependencies: pydantic = "^1.10" or pydantic = {version = "^1.10"}
378
+ # Simple string version
379
+ poetry_pattern = rf'(\[tool\.poetry\.(?:dev-)?dependencies\].*?{name}\s*=\s*)"([^"]*)"'
380
+ poetry_replacement = rf'\1"^{new_version}"'
381
+ content = re.sub(
382
+ poetry_pattern, poetry_replacement, content, flags=re.IGNORECASE | re.DOTALL
383
+ )
384
+
385
+ # Poetry dict version: version = "^1.10"
386
+ poetry_dict_pattern = rf'({name}\s*=\s*\{{[^}}]*version\s*=\s*)"([^"]*)"'
387
+ poetry_dict_replacement = rf'\1"^{new_version}"'
388
+ content = re.sub(
389
+ poetry_dict_pattern, poetry_dict_replacement, content, flags=re.IGNORECASE
390
+ )
391
+
392
+ if content != original_content:
393
+ pyproject_path.write_text(content)
394
+ return True
395
+
396
+ return False
397
+
398
+ except Exception:
399
+ return False
400
+
401
+ def _update_requirements_txt(self, name: str, new_version: str) -> bool:
402
+ """Update a dependency version in requirements.txt.
403
+
404
+ Args:
405
+ name: Name of the dependency.
406
+ new_version: New version to set.
407
+
408
+ Returns:
409
+ True if update was successful.
410
+ """
411
+ import re
412
+
413
+ requirements_path = self.project_path / "requirements.txt"
414
+ if not requirements_path.exists():
415
+ return False
416
+
417
+ try:
418
+ content = requirements_path.read_text()
419
+ original_content = content
420
+
421
+ # Pattern: pydantic>=1.0 or pydantic==1.10.0 or just pydantic
422
+ pattern = rf"^({name})([><=!~]+[^\s#]*)?(\s*#.*)?$"
423
+
424
+ def replace_line(match: re.Match) -> str:
425
+ pkg_name = match.group(1)
426
+ comment = match.group(3) or ""
427
+ return f"{pkg_name}>={new_version}{comment}"
428
+
429
+ content = re.sub(pattern, replace_line, content, flags=re.IGNORECASE | re.MULTILINE)
430
+
431
+ if content != original_content:
432
+ requirements_path.write_text(content)
433
+ return True
434
+
435
+ return False
436
+
437
+ except Exception:
438
+ return False
439
+
440
+ def _update_setup_py(self, name: str, new_version: str) -> bool:
441
+ """Update a dependency version in setup.py.
442
+
443
+ Args:
444
+ name: Name of the dependency.
445
+ new_version: New version to set.
446
+
447
+ Returns:
448
+ True if update was successful.
449
+ """
450
+ import re
451
+
452
+ setup_path = self.project_path / "setup.py"
453
+ if not setup_path.exists():
454
+ return False
455
+
456
+ try:
457
+ content = setup_path.read_text()
458
+ original_content = content
459
+
460
+ # Pattern for install_requires entries: "pydantic>=1.0" or 'pydantic>=1.0'
461
+ for quote in ['"', "'"]:
462
+ pattern = rf"{quote}({name})([><=!~]+[^{quote}]*)?{quote}"
463
+ replacement = rf"{quote}\1>={new_version}{quote}"
464
+ content = re.sub(pattern, replacement, content, flags=re.IGNORECASE)
465
+
466
+ if content != original_content:
467
+ setup_path.write_text(content)
468
+ return True
469
+
470
+ return False
471
+
472
+ except Exception:
473
+ return False
@@ -0,0 +1,5 @@
1
+ """Utility functions and classes for PyResolve."""
2
+
3
+ from codeshift.utils.config import Config, ProjectConfig
4
+
5
+ __all__ = ["Config", "ProjectConfig"]