codeshift 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codeshift/__init__.py +8 -0
- codeshift/analyzer/__init__.py +5 -0
- codeshift/analyzer/risk_assessor.py +388 -0
- codeshift/api/__init__.py +1 -0
- codeshift/api/auth.py +182 -0
- codeshift/api/config.py +73 -0
- codeshift/api/database.py +215 -0
- codeshift/api/main.py +103 -0
- codeshift/api/models/__init__.py +55 -0
- codeshift/api/models/auth.py +108 -0
- codeshift/api/models/billing.py +92 -0
- codeshift/api/models/migrate.py +42 -0
- codeshift/api/models/usage.py +116 -0
- codeshift/api/routers/__init__.py +5 -0
- codeshift/api/routers/auth.py +440 -0
- codeshift/api/routers/billing.py +395 -0
- codeshift/api/routers/migrate.py +304 -0
- codeshift/api/routers/usage.py +291 -0
- codeshift/api/routers/webhooks.py +289 -0
- codeshift/cli/__init__.py +5 -0
- codeshift/cli/commands/__init__.py +7 -0
- codeshift/cli/commands/apply.py +352 -0
- codeshift/cli/commands/auth.py +842 -0
- codeshift/cli/commands/diff.py +221 -0
- codeshift/cli/commands/scan.py +368 -0
- codeshift/cli/commands/upgrade.py +436 -0
- codeshift/cli/commands/upgrade_all.py +518 -0
- codeshift/cli/main.py +221 -0
- codeshift/cli/quota.py +210 -0
- codeshift/knowledge/__init__.py +50 -0
- codeshift/knowledge/cache.py +167 -0
- codeshift/knowledge/generator.py +231 -0
- codeshift/knowledge/models.py +151 -0
- codeshift/knowledge/parser.py +270 -0
- codeshift/knowledge/sources.py +388 -0
- codeshift/knowledge_base/__init__.py +17 -0
- codeshift/knowledge_base/loader.py +102 -0
- codeshift/knowledge_base/models.py +110 -0
- codeshift/migrator/__init__.py +23 -0
- codeshift/migrator/ast_transforms.py +256 -0
- codeshift/migrator/engine.py +395 -0
- codeshift/migrator/llm_migrator.py +320 -0
- codeshift/migrator/transforms/__init__.py +19 -0
- codeshift/migrator/transforms/fastapi_transformer.py +174 -0
- codeshift/migrator/transforms/pandas_transformer.py +236 -0
- codeshift/migrator/transforms/pydantic_v1_to_v2.py +637 -0
- codeshift/migrator/transforms/requests_transformer.py +218 -0
- codeshift/migrator/transforms/sqlalchemy_transformer.py +175 -0
- codeshift/scanner/__init__.py +6 -0
- codeshift/scanner/code_scanner.py +352 -0
- codeshift/scanner/dependency_parser.py +473 -0
- codeshift/utils/__init__.py +5 -0
- codeshift/utils/api_client.py +266 -0
- codeshift/utils/cache.py +318 -0
- codeshift/utils/config.py +71 -0
- codeshift/utils/llm_client.py +221 -0
- codeshift/validator/__init__.py +6 -0
- codeshift/validator/syntax_checker.py +183 -0
- codeshift/validator/test_runner.py +224 -0
- codeshift-0.2.0.dist-info/METADATA +326 -0
- codeshift-0.2.0.dist-info/RECORD +65 -0
- codeshift-0.2.0.dist-info/WHEEL +5 -0
- codeshift-0.2.0.dist-info/entry_points.txt +2 -0
- codeshift-0.2.0.dist-info/licenses/LICENSE +21 -0
- codeshift-0.2.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,518 @@
|
|
|
1
|
+
"""Upgrade-all command for migrating all outdated packages to their latest versions."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
import click
|
|
8
|
+
from rich.console import Console
|
|
9
|
+
from rich.panel import Panel
|
|
10
|
+
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
|
|
11
|
+
from rich.table import Table
|
|
12
|
+
|
|
13
|
+
from codeshift.cli.commands.scan import (
|
|
14
|
+
compare_versions,
|
|
15
|
+
get_latest_version,
|
|
16
|
+
is_major_upgrade,
|
|
17
|
+
parse_version,
|
|
18
|
+
)
|
|
19
|
+
from codeshift.knowledge import (
|
|
20
|
+
GeneratedKnowledgeBase,
|
|
21
|
+
generate_knowledge_base_sync,
|
|
22
|
+
is_tier_1_library,
|
|
23
|
+
)
|
|
24
|
+
from codeshift.migrator.ast_transforms import TransformChange, TransformResult, TransformStatus
|
|
25
|
+
from codeshift.migrator.transforms.fastapi_transformer import transform_fastapi
|
|
26
|
+
from codeshift.migrator.transforms.pandas_transformer import transform_pandas
|
|
27
|
+
from codeshift.migrator.transforms.pydantic_v1_to_v2 import transform_pydantic_v1_to_v2
|
|
28
|
+
from codeshift.migrator.transforms.requests_transformer import transform_requests
|
|
29
|
+
from codeshift.migrator.transforms.sqlalchemy_transformer import transform_sqlalchemy
|
|
30
|
+
from codeshift.scanner import CodeScanner, DependencyParser
|
|
31
|
+
from codeshift.utils.config import ProjectConfig
|
|
32
|
+
|
|
33
|
+
console = Console()
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def save_multi_state(project_path: Path, state: dict) -> None:
|
|
37
|
+
"""Save the multi-library migration state."""
|
|
38
|
+
state_dir = project_path / ".codeshift"
|
|
39
|
+
state_dir.mkdir(parents=True, exist_ok=True)
|
|
40
|
+
state_file = state_dir / "state.json"
|
|
41
|
+
state_file.write_text(json.dumps(state, indent=2, default=str))
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def run_single_upgrade(
|
|
45
|
+
library: str,
|
|
46
|
+
target: str,
|
|
47
|
+
project_path: Path,
|
|
48
|
+
project_config: ProjectConfig,
|
|
49
|
+
verbose: bool,
|
|
50
|
+
) -> tuple[list[TransformResult], GeneratedKnowledgeBase | None]:
|
|
51
|
+
"""Run upgrade for a single library and return results.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
library: Library name to upgrade.
|
|
55
|
+
target: Target version.
|
|
56
|
+
project_path: Path to the project.
|
|
57
|
+
project_config: Project configuration.
|
|
58
|
+
verbose: Whether to show verbose output.
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
Tuple of (list of transform results, generated knowledge base).
|
|
62
|
+
"""
|
|
63
|
+
results: list[TransformResult] = []
|
|
64
|
+
generated_kb: GeneratedKnowledgeBase | None = None
|
|
65
|
+
|
|
66
|
+
# Get current version
|
|
67
|
+
dep_parser = DependencyParser(project_path)
|
|
68
|
+
current_dep = dep_parser.get_dependency(library)
|
|
69
|
+
|
|
70
|
+
current_version = None
|
|
71
|
+
if current_dep and current_dep.version_spec:
|
|
72
|
+
import re
|
|
73
|
+
|
|
74
|
+
version_match = re.search(r"(\d+\.\d+(?:\.\d+)?)", current_dep.version_spec)
|
|
75
|
+
if version_match:
|
|
76
|
+
current_version = version_match.group(1)
|
|
77
|
+
|
|
78
|
+
# Fetch knowledge sources
|
|
79
|
+
try:
|
|
80
|
+
generated_kb = generate_knowledge_base_sync(
|
|
81
|
+
package=library,
|
|
82
|
+
old_version=current_version or "1.0",
|
|
83
|
+
new_version=target,
|
|
84
|
+
)
|
|
85
|
+
except Exception:
|
|
86
|
+
pass
|
|
87
|
+
|
|
88
|
+
# Scan for library usage
|
|
89
|
+
scanner = CodeScanner(library, exclude_patterns=project_config.exclude)
|
|
90
|
+
scan_result = scanner.scan_directory(project_path)
|
|
91
|
+
|
|
92
|
+
if not scan_result.imports:
|
|
93
|
+
return results, generated_kb
|
|
94
|
+
|
|
95
|
+
# Get unique files with imports
|
|
96
|
+
files_to_transform = set()
|
|
97
|
+
for imp in scan_result.imports:
|
|
98
|
+
files_to_transform.add(imp.file_path)
|
|
99
|
+
|
|
100
|
+
# Select transformer based on library
|
|
101
|
+
transform_func = {
|
|
102
|
+
"pydantic": transform_pydantic_v1_to_v2,
|
|
103
|
+
"fastapi": transform_fastapi,
|
|
104
|
+
"sqlalchemy": transform_sqlalchemy,
|
|
105
|
+
"pandas": transform_pandas,
|
|
106
|
+
"requests": transform_requests,
|
|
107
|
+
}.get(library)
|
|
108
|
+
|
|
109
|
+
if not transform_func:
|
|
110
|
+
return results, generated_kb
|
|
111
|
+
|
|
112
|
+
for file_path in files_to_transform:
|
|
113
|
+
try:
|
|
114
|
+
source_code = file_path.read_text()
|
|
115
|
+
transformed_code, changes = transform_func(source_code)
|
|
116
|
+
|
|
117
|
+
result = TransformResult(
|
|
118
|
+
file_path=file_path,
|
|
119
|
+
status=TransformStatus.SUCCESS if changes else TransformStatus.NO_CHANGES,
|
|
120
|
+
original_code=source_code,
|
|
121
|
+
transformed_code=transformed_code,
|
|
122
|
+
changes=[
|
|
123
|
+
TransformChange(
|
|
124
|
+
description=c.description,
|
|
125
|
+
line_number=c.line_number,
|
|
126
|
+
original=c.original,
|
|
127
|
+
replacement=c.replacement,
|
|
128
|
+
transform_name=c.transform_name,
|
|
129
|
+
confidence=getattr(c, "confidence", 1.0),
|
|
130
|
+
)
|
|
131
|
+
for c in changes
|
|
132
|
+
],
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
if result.has_changes:
|
|
136
|
+
results.append(result)
|
|
137
|
+
|
|
138
|
+
except Exception:
|
|
139
|
+
pass
|
|
140
|
+
|
|
141
|
+
return results, generated_kb
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
@click.command("upgrade-all")
|
|
145
|
+
@click.option(
|
|
146
|
+
"--path",
|
|
147
|
+
"-p",
|
|
148
|
+
type=click.Path(exists=True),
|
|
149
|
+
default=".",
|
|
150
|
+
help="Path to the project to analyze",
|
|
151
|
+
)
|
|
152
|
+
@click.option(
|
|
153
|
+
"--tier1-only",
|
|
154
|
+
is_flag=True,
|
|
155
|
+
help="Only upgrade Tier 1 libraries (deterministic transforms)",
|
|
156
|
+
)
|
|
157
|
+
@click.option(
|
|
158
|
+
"--major-only",
|
|
159
|
+
is_flag=True,
|
|
160
|
+
help="Only perform major version upgrades",
|
|
161
|
+
)
|
|
162
|
+
@click.option(
|
|
163
|
+
"--include",
|
|
164
|
+
"-i",
|
|
165
|
+
multiple=True,
|
|
166
|
+
help="Only include specific libraries (can be specified multiple times)",
|
|
167
|
+
)
|
|
168
|
+
@click.option(
|
|
169
|
+
"--exclude",
|
|
170
|
+
"-e",
|
|
171
|
+
multiple=True,
|
|
172
|
+
help="Exclude specific libraries (can be specified multiple times)",
|
|
173
|
+
)
|
|
174
|
+
@click.option(
|
|
175
|
+
"--update-deps/--no-update-deps",
|
|
176
|
+
default=True,
|
|
177
|
+
help="Update dependency files (pyproject.toml, requirements.txt) with new versions",
|
|
178
|
+
)
|
|
179
|
+
@click.option(
|
|
180
|
+
"--dry-run",
|
|
181
|
+
is_flag=True,
|
|
182
|
+
help="Show what would be changed without saving state",
|
|
183
|
+
)
|
|
184
|
+
@click.option(
|
|
185
|
+
"--verbose",
|
|
186
|
+
"-v",
|
|
187
|
+
is_flag=True,
|
|
188
|
+
help="Show detailed output",
|
|
189
|
+
)
|
|
190
|
+
def upgrade_all(
|
|
191
|
+
path: str,
|
|
192
|
+
tier1_only: bool,
|
|
193
|
+
major_only: bool,
|
|
194
|
+
include: tuple,
|
|
195
|
+
exclude: tuple,
|
|
196
|
+
update_deps: bool,
|
|
197
|
+
dry_run: bool,
|
|
198
|
+
verbose: bool,
|
|
199
|
+
) -> None:
|
|
200
|
+
"""Upgrade all outdated packages to their latest versions.
|
|
201
|
+
|
|
202
|
+
This command scans your project for outdated dependencies, identifies which
|
|
203
|
+
ones have available migrations, and applies all transformations at once.
|
|
204
|
+
|
|
205
|
+
By default, it upgrades:
|
|
206
|
+
- All Tier 1 libraries (pydantic, fastapi, sqlalchemy, pandas, requests)
|
|
207
|
+
- Any library with a major version upgrade available
|
|
208
|
+
|
|
209
|
+
After migration, dependency files (pyproject.toml, requirements.txt) are
|
|
210
|
+
automatically updated with the new versions unless --no-update-deps is specified.
|
|
211
|
+
|
|
212
|
+
\b
|
|
213
|
+
Examples:
|
|
214
|
+
codeshift upgrade-all
|
|
215
|
+
codeshift upgrade-all --tier1-only
|
|
216
|
+
codeshift upgrade-all --include pydantic --include fastapi
|
|
217
|
+
codeshift upgrade-all --exclude pandas
|
|
218
|
+
codeshift upgrade-all --no-update-deps
|
|
219
|
+
codeshift upgrade-all --dry-run
|
|
220
|
+
"""
|
|
221
|
+
project_path = Path(path).resolve()
|
|
222
|
+
project_config = ProjectConfig.from_pyproject(project_path)
|
|
223
|
+
|
|
224
|
+
console.print(
|
|
225
|
+
Panel(
|
|
226
|
+
"[bold]Scanning project for upgradeable dependencies[/]\n\n" f"Path: {project_path}",
|
|
227
|
+
title="PyResolve Upgrade All",
|
|
228
|
+
)
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
# Parse dependencies
|
|
232
|
+
dep_parser = DependencyParser(project_path)
|
|
233
|
+
dependencies = dep_parser.parse_all()
|
|
234
|
+
|
|
235
|
+
if not dependencies:
|
|
236
|
+
console.print("[yellow]No dependencies found in project.[/]")
|
|
237
|
+
return
|
|
238
|
+
|
|
239
|
+
console.print(f"\nFound [cyan]{len(dependencies)}[/] dependencies")
|
|
240
|
+
|
|
241
|
+
# Check for updates
|
|
242
|
+
outdated = []
|
|
243
|
+
|
|
244
|
+
with Progress(
|
|
245
|
+
SpinnerColumn(),
|
|
246
|
+
TextColumn("[progress.description]{task.description}"),
|
|
247
|
+
BarColumn(),
|
|
248
|
+
TaskProgressColumn(),
|
|
249
|
+
console=console,
|
|
250
|
+
) as progress:
|
|
251
|
+
task = progress.add_task("Checking for updates...", total=len(dependencies))
|
|
252
|
+
|
|
253
|
+
for dep in dependencies:
|
|
254
|
+
progress.update(task, description=f"Checking {dep.name}...")
|
|
255
|
+
|
|
256
|
+
current_version = parse_version(dep.version_spec) if dep.version_spec else None
|
|
257
|
+
latest_version = get_latest_version(dep.name)
|
|
258
|
+
|
|
259
|
+
if latest_version and current_version:
|
|
260
|
+
if compare_versions(current_version, latest_version):
|
|
261
|
+
is_major = is_major_upgrade(current_version, latest_version)
|
|
262
|
+
is_tier1 = is_tier_1_library(dep.name)
|
|
263
|
+
|
|
264
|
+
outdated.append(
|
|
265
|
+
{
|
|
266
|
+
"name": dep.name,
|
|
267
|
+
"current": current_version,
|
|
268
|
+
"latest": latest_version,
|
|
269
|
+
"is_major": is_major,
|
|
270
|
+
"is_tier1": is_tier1,
|
|
271
|
+
}
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
progress.advance(task)
|
|
275
|
+
|
|
276
|
+
if not outdated:
|
|
277
|
+
console.print("\n[green]All dependencies are up to date![/]")
|
|
278
|
+
return
|
|
279
|
+
|
|
280
|
+
# Filter to upgradeable packages
|
|
281
|
+
upgradeable = []
|
|
282
|
+
|
|
283
|
+
for pkg in outdated:
|
|
284
|
+
# Apply include filter
|
|
285
|
+
if include and pkg["name"] not in include:
|
|
286
|
+
continue
|
|
287
|
+
|
|
288
|
+
# Apply exclude filter
|
|
289
|
+
if pkg["name"] in exclude:
|
|
290
|
+
continue
|
|
291
|
+
|
|
292
|
+
# Apply tier1-only filter
|
|
293
|
+
if tier1_only and not pkg["is_tier1"]:
|
|
294
|
+
continue
|
|
295
|
+
|
|
296
|
+
# Apply major-only filter
|
|
297
|
+
if major_only and not pkg["is_major"]:
|
|
298
|
+
continue
|
|
299
|
+
|
|
300
|
+
# By default, include Tier 1 libraries and major upgrades
|
|
301
|
+
if not (tier1_only or major_only or include):
|
|
302
|
+
if not (pkg["is_tier1"] or pkg["is_major"]):
|
|
303
|
+
continue
|
|
304
|
+
|
|
305
|
+
upgradeable.append(pkg)
|
|
306
|
+
|
|
307
|
+
if not upgradeable:
|
|
308
|
+
console.print("\n[yellow]No upgradeable packages found matching the criteria.[/]")
|
|
309
|
+
console.print("[dim]Use --verbose to see all outdated packages, or adjust filters.[/]")
|
|
310
|
+
|
|
311
|
+
if verbose and outdated:
|
|
312
|
+
console.print("\nOutdated packages (not matching criteria):")
|
|
313
|
+
for pkg in outdated:
|
|
314
|
+
tier_label = "[green]Tier 1[/]" if pkg["is_tier1"] else "[dim]Tier 2/3[/]"
|
|
315
|
+
type_label = "[red]Major[/]" if pkg["is_major"] else "[dim]Minor/Patch[/]"
|
|
316
|
+
console.print(
|
|
317
|
+
f" {pkg['name']} {pkg['current']} → {pkg['latest']} {tier_label} {type_label}"
|
|
318
|
+
)
|
|
319
|
+
return
|
|
320
|
+
|
|
321
|
+
# Display packages to upgrade
|
|
322
|
+
console.print(f"\n[bold]Packages to upgrade ({len(upgradeable)})[/]\n")
|
|
323
|
+
|
|
324
|
+
table = Table()
|
|
325
|
+
table.add_column("Package", style="cyan")
|
|
326
|
+
table.add_column("Current", justify="right")
|
|
327
|
+
table.add_column("Target", justify="right")
|
|
328
|
+
table.add_column("Type", justify="center")
|
|
329
|
+
table.add_column("Tier", justify="center")
|
|
330
|
+
|
|
331
|
+
for pkg in upgradeable:
|
|
332
|
+
type_str = "[red]Major[/]" if pkg["is_major"] else "[yellow]Minor/Patch[/]"
|
|
333
|
+
tier_str = "[green]Tier 1[/]" if pkg["is_tier1"] else "[dim]Tier 2/3[/]"
|
|
334
|
+
table.add_row(str(pkg["name"]), str(pkg["current"]), str(pkg["latest"]), type_str, tier_str)
|
|
335
|
+
|
|
336
|
+
console.print(table)
|
|
337
|
+
|
|
338
|
+
# Run upgrades for each package
|
|
339
|
+
console.print("\n[bold]Running migrations...[/]\n")
|
|
340
|
+
|
|
341
|
+
all_results: dict[str, list[dict]] = {}
|
|
342
|
+
migration_summary: list[dict] = []
|
|
343
|
+
|
|
344
|
+
with Progress(
|
|
345
|
+
SpinnerColumn(),
|
|
346
|
+
TextColumn("[progress.description]{task.description}"),
|
|
347
|
+
BarColumn(),
|
|
348
|
+
TaskProgressColumn(),
|
|
349
|
+
console=console,
|
|
350
|
+
) as progress:
|
|
351
|
+
task = progress.add_task("Upgrading packages...", total=len(upgradeable))
|
|
352
|
+
|
|
353
|
+
for pkg in upgradeable:
|
|
354
|
+
progress.update(task, description=f"Upgrading {pkg['name']} to {pkg['latest']}...")
|
|
355
|
+
|
|
356
|
+
results, generated_kb = run_single_upgrade(
|
|
357
|
+
library=str(pkg["name"]),
|
|
358
|
+
target=str(pkg["latest"]),
|
|
359
|
+
project_path=project_path,
|
|
360
|
+
project_config=project_config,
|
|
361
|
+
verbose=verbose,
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
if results:
|
|
365
|
+
all_results[str(pkg["name"])] = [
|
|
366
|
+
{
|
|
367
|
+
"file_path": str(r.file_path),
|
|
368
|
+
"original_code": r.original_code,
|
|
369
|
+
"transformed_code": r.transformed_code,
|
|
370
|
+
"change_count": r.change_count,
|
|
371
|
+
"status": r.status.value,
|
|
372
|
+
"changes": [
|
|
373
|
+
{
|
|
374
|
+
"description": c.description,
|
|
375
|
+
"line_number": c.line_number,
|
|
376
|
+
"original": c.original,
|
|
377
|
+
"replacement": c.replacement,
|
|
378
|
+
"transform_name": c.transform_name,
|
|
379
|
+
}
|
|
380
|
+
for c in r.changes
|
|
381
|
+
],
|
|
382
|
+
}
|
|
383
|
+
for r in results
|
|
384
|
+
]
|
|
385
|
+
|
|
386
|
+
migration_summary.append(
|
|
387
|
+
{
|
|
388
|
+
"library": pkg["name"],
|
|
389
|
+
"from_version": pkg["current"],
|
|
390
|
+
"to_version": pkg["latest"],
|
|
391
|
+
"files_changed": len(results),
|
|
392
|
+
"total_changes": sum(r.change_count for r in results),
|
|
393
|
+
"breaking_changes_detected": (
|
|
394
|
+
len(generated_kb.breaking_changes) if generated_kb else 0
|
|
395
|
+
),
|
|
396
|
+
}
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
progress.advance(task)
|
|
400
|
+
|
|
401
|
+
# Display results summary
|
|
402
|
+
if not migration_summary:
|
|
403
|
+
console.print(
|
|
404
|
+
"[green]No changes needed![/] Your code appears to be compatible with the latest versions."
|
|
405
|
+
)
|
|
406
|
+
return
|
|
407
|
+
|
|
408
|
+
console.print("\n[bold]Migration Summary[/]\n")
|
|
409
|
+
|
|
410
|
+
summary_table = Table()
|
|
411
|
+
summary_table.add_column("Library", style="cyan")
|
|
412
|
+
summary_table.add_column("Migration", justify="center")
|
|
413
|
+
summary_table.add_column("Files", justify="right")
|
|
414
|
+
summary_table.add_column("Changes", justify="right")
|
|
415
|
+
summary_table.add_column("Status", justify="center")
|
|
416
|
+
|
|
417
|
+
total_files = 0
|
|
418
|
+
total_changes = 0
|
|
419
|
+
|
|
420
|
+
for summary in migration_summary:
|
|
421
|
+
summary_table.add_row(
|
|
422
|
+
summary["library"],
|
|
423
|
+
f"{summary['from_version']} → {summary['to_version']}",
|
|
424
|
+
str(summary["files_changed"]),
|
|
425
|
+
str(summary["total_changes"]),
|
|
426
|
+
"[green]Ready[/]",
|
|
427
|
+
)
|
|
428
|
+
total_files += summary["files_changed"]
|
|
429
|
+
total_changes += summary["total_changes"]
|
|
430
|
+
|
|
431
|
+
console.print(summary_table)
|
|
432
|
+
console.print(
|
|
433
|
+
f"\n[bold]Total:[/] [cyan]{total_changes}[/] changes across [cyan]{total_files}[/] files"
|
|
434
|
+
)
|
|
435
|
+
|
|
436
|
+
# Show detailed changes if verbose
|
|
437
|
+
if verbose:
|
|
438
|
+
console.print("\n[bold]Change Details[/]")
|
|
439
|
+
for lib_name, lib_results in all_results.items():
|
|
440
|
+
console.print(f"\n[bold cyan]{lib_name}[/]")
|
|
441
|
+
for result_dict in lib_results:
|
|
442
|
+
try:
|
|
443
|
+
display_path = str(
|
|
444
|
+
Path(str(result_dict["file_path"])).relative_to(project_path)
|
|
445
|
+
)
|
|
446
|
+
except ValueError:
|
|
447
|
+
display_path = str(result_dict["file_path"])
|
|
448
|
+
console.print(f" [cyan]{display_path}[/]:")
|
|
449
|
+
for change_dict in result_dict["changes"]:
|
|
450
|
+
console.print(f" • {change_dict['description']}")
|
|
451
|
+
|
|
452
|
+
# Update dependency files with new versions
|
|
453
|
+
if update_deps and migration_summary:
|
|
454
|
+
console.print("\n[bold]Updating dependency files...[/]\n")
|
|
455
|
+
|
|
456
|
+
dep_parser = DependencyParser(project_path)
|
|
457
|
+
dep_updates: list[tuple[str, str, list[tuple[Path, bool]]]] = []
|
|
458
|
+
|
|
459
|
+
for summary in migration_summary:
|
|
460
|
+
if not dry_run:
|
|
461
|
+
update_results = dep_parser.update_dependency_version(
|
|
462
|
+
str(summary["library"]), str(summary["to_version"])
|
|
463
|
+
)
|
|
464
|
+
dep_updates.append(
|
|
465
|
+
(str(summary["library"]), str(summary["to_version"]), update_results)
|
|
466
|
+
)
|
|
467
|
+
else:
|
|
468
|
+
# In dry run, just show what would be updated
|
|
469
|
+
dep_updates.append((str(summary["library"]), str(summary["to_version"]), []))
|
|
470
|
+
|
|
471
|
+
# Display update results
|
|
472
|
+
if dry_run:
|
|
473
|
+
console.print("[dim]Would update the following dependencies:[/]")
|
|
474
|
+
for lib_name, version, _ in dep_updates:
|
|
475
|
+
console.print(f" [cyan]{lib_name}[/] → [green]>={version}[/]")
|
|
476
|
+
else:
|
|
477
|
+
files_updated: set[Path] = set()
|
|
478
|
+
for lib_name, version, update_results in dep_updates:
|
|
479
|
+
for file_path, success in update_results:
|
|
480
|
+
if success:
|
|
481
|
+
files_updated.add(file_path)
|
|
482
|
+
if verbose:
|
|
483
|
+
console.print(
|
|
484
|
+
f" Updated [cyan]{lib_name}[/] to [green]>={version}[/] in {file_path.name}"
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
if files_updated:
|
|
488
|
+
console.print(f"Updated versions in: {', '.join(f.name for f in files_updated)}")
|
|
489
|
+
else:
|
|
490
|
+
console.print(
|
|
491
|
+
"[dim]No dependency files were updated (dependencies may not be pinned)[/]"
|
|
492
|
+
)
|
|
493
|
+
|
|
494
|
+
# Save state
|
|
495
|
+
if not dry_run:
|
|
496
|
+
# Merge all results into a combined state format
|
|
497
|
+
# This maintains compatibility with diff/apply commands
|
|
498
|
+
combined_results: list[dict[str, Any]] = []
|
|
499
|
+
for lib_name, lib_results in all_results.items():
|
|
500
|
+
for result_dict in lib_results:
|
|
501
|
+
# Add library info to each result for tracking
|
|
502
|
+
result_dict["library"] = lib_name
|
|
503
|
+
combined_results.append(result_dict)
|
|
504
|
+
|
|
505
|
+
state = {
|
|
506
|
+
"library": "multiple",
|
|
507
|
+
"migrations": migration_summary,
|
|
508
|
+
"project_path": str(project_path),
|
|
509
|
+
"results": combined_results,
|
|
510
|
+
}
|
|
511
|
+
save_multi_state(project_path, state)
|
|
512
|
+
|
|
513
|
+
console.print("\n[dim]State saved to .codeshift/state.json[/]")
|
|
514
|
+
console.print("\nNext steps:")
|
|
515
|
+
console.print(" [cyan]codeshift diff[/] - View detailed diff of proposed changes")
|
|
516
|
+
console.print(" [cyan]codeshift apply[/] - Apply changes to your files")
|
|
517
|
+
else:
|
|
518
|
+
console.print("\n[dim]Dry run mode - no state saved[/]")
|