converge-cli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- converge/__init__.py +42 -0
- converge/cli/__init__.py +0 -0
- converge/cli/explain.py +43 -0
- converge/cli/main.py +184 -0
- converge/graph/__init__.py +0 -0
- converge/graph/queries.py +45 -0
- converge/graph/store.py +135 -0
- converge/models.py +83 -0
- converge/scanner/__init__.py +0 -0
- converge/scanner/ast_parser.py +63 -0
- converge/scanner/project.py +87 -0
- converge/scanner/scanner.py +52 -0
- converge/scanner/service_detector.py +69 -0
- converge/solver/__init__.py +0 -0
- converge/solver/conflict.py +77 -0
- converge/solver/planner.py +67 -0
- converge/validation/__init__.py +0 -0
- converge/validation/sandbox.py +68 -0
- converge/validation/smoke.py +43 -0
- converge_cli-0.1.0.dist-info/METADATA +33 -0
- converge_cli-0.1.0.dist-info/RECORD +23 -0
- converge_cli-0.1.0.dist-info/WHEEL +4 -0
- converge_cli-0.1.0.dist-info/entry_points.txt +2 -0
converge/__init__.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Converge API
|
|
3
|
+
Main entrypoints for using Converge programmatically.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from converge.graph.queries import GraphQueries
|
|
7
|
+
from converge.graph.store import GraphStore
|
|
8
|
+
from converge.models import (
|
|
9
|
+
EntityType,
|
|
10
|
+
GraphEntity,
|
|
11
|
+
GraphRelationship,
|
|
12
|
+
Module,
|
|
13
|
+
Package,
|
|
14
|
+
RelationshipType,
|
|
15
|
+
Repository,
|
|
16
|
+
Route,
|
|
17
|
+
)
|
|
18
|
+
from converge.scanner.scanner import Scanner
|
|
19
|
+
from converge.solver.conflict import ConflictDetector, ConflictType
|
|
20
|
+
from converge.solver.planner import RepairPlan, RepairPlanner
|
|
21
|
+
from converge.validation.sandbox import UVSandbox
|
|
22
|
+
from converge.validation.smoke import ValidationRunner
|
|
23
|
+
|
|
24
|
+
__all__ = [
|
|
25
|
+
"Scanner",
|
|
26
|
+
"GraphStore",
|
|
27
|
+
"GraphQueries",
|
|
28
|
+
"ConflictDetector",
|
|
29
|
+
"RepairPlanner",
|
|
30
|
+
"UVSandbox",
|
|
31
|
+
"ValidationRunner",
|
|
32
|
+
"EntityType",
|
|
33
|
+
"RelationshipType",
|
|
34
|
+
"GraphEntity",
|
|
35
|
+
"GraphRelationship",
|
|
36
|
+
"Repository",
|
|
37
|
+
"Package",
|
|
38
|
+
"Module",
|
|
39
|
+
"Route",
|
|
40
|
+
"ConflictType",
|
|
41
|
+
"RepairPlan",
|
|
42
|
+
]
|
converge/cli/__init__.py
ADDED
|
File without changes
|
converge/cli/explain.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
import networkx as nx
|
|
6
|
+
from rich.console import Console
|
|
7
|
+
from rich.tree import Tree
|
|
8
|
+
|
|
9
|
+
from converge.models import RelationshipType
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ExplainabilityEngine:
|
|
13
|
+
def __init__(self, G: nx.DiGraph[Any], console: Console):
|
|
14
|
+
self.G = G
|
|
15
|
+
self.console = console
|
|
16
|
+
|
|
17
|
+
def render_dependency_tree(self, root_id: str) -> None:
|
|
18
|
+
if root_id not in self.G:
|
|
19
|
+
self.console.print(f"[red]Entity {root_id} not found in graph.[/red]")
|
|
20
|
+
return
|
|
21
|
+
|
|
22
|
+
root_data = self.G.nodes[root_id]
|
|
23
|
+
tree = Tree(f"[bold blue]{root_data.get('name', root_id)}[/bold blue] ({root_data.get('type')})")
|
|
24
|
+
|
|
25
|
+
self._build_tree(root_id, tree, RelationshipType.REQUIRES)
|
|
26
|
+
self.console.print(tree)
|
|
27
|
+
|
|
28
|
+
def _build_tree(self, node_id: str, tree: Tree, edge_type: str, depth: int = 0, max_depth: int = 3) -> None:
|
|
29
|
+
if depth > max_depth:
|
|
30
|
+
tree.add("[dim]... (max depth reached)[/dim]")
|
|
31
|
+
return
|
|
32
|
+
|
|
33
|
+
for succ in self.G.successors(node_id):
|
|
34
|
+
edge_data = self.G.get_edge_data(node_id, succ)
|
|
35
|
+
if edge_data and edge_data.get("type") == edge_type:
|
|
36
|
+
succ_data = self.G.nodes[succ]
|
|
37
|
+
branch = tree.add(f"[green]{succ_data.get('name', succ)}[/green]")
|
|
38
|
+
self._build_tree(succ, branch, edge_type, depth + 1, max_depth)
|
|
39
|
+
|
|
40
|
+
def explain_conflict(self, conflict_id: str) -> None:
|
|
41
|
+
self.console.print(f"[bold red]Conflict Analysis:[/bold red] {conflict_id}")
|
|
42
|
+
# In a real system we would extract this directly from the DB
|
|
43
|
+
self.console.print("[white]Detailed path reasoning would be rendered here.[/white]")
|
converge/cli/main.py
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from rich.console import Console
|
|
3
|
+
|
|
4
|
+
from converge.cli.explain import ExplainabilityEngine
|
|
5
|
+
from converge.graph.store import GraphStore
|
|
6
|
+
from converge.scanner.scanner import Scanner
|
|
7
|
+
from converge.solver.conflict import Conflict, ConflictDetector, ConflictType
|
|
8
|
+
from converge.solver.planner import RepairPlan, RepairPlanner
|
|
9
|
+
from converge.validation.sandbox import UVSandbox
|
|
10
|
+
from converge.validation.smoke import ValidationRunner
|
|
11
|
+
|
|
12
|
+
app = typer.Typer(
|
|
13
|
+
help="Converge: A Python-first repository intelligence and environment convergence platform.",
|
|
14
|
+
no_args_is_help=True,
|
|
15
|
+
)
|
|
16
|
+
console = Console()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@app.command()
|
|
20
|
+
def scan(
|
|
21
|
+
path: str = typer.Argument(".", help="Path to the repository to scan"),
|
|
22
|
+
dry_run: bool = typer.Option(False, "--dry-run", help="Perform a dry run without saving to the database"),
|
|
23
|
+
) -> None:
|
|
24
|
+
"""
|
|
25
|
+
Scan a codebase to build a graph of repositories, packages, modules, and services.
|
|
26
|
+
"""
|
|
27
|
+
console.print(f"[bold green]Scanning repository at:[/bold green] {path}")
|
|
28
|
+
|
|
29
|
+
scanner = Scanner(path)
|
|
30
|
+
entities, rels = scanner.scan_all()
|
|
31
|
+
|
|
32
|
+
console.print(f"Found [bold cyan]{len(entities)}[/bold cyan] entities and [bold cyan]{len(rels)}[/bold cyan] relationships.")
|
|
33
|
+
|
|
34
|
+
if dry_run:
|
|
35
|
+
console.print("[yellow]Dry run mode enabled. Results will not be saved.[/yellow]")
|
|
36
|
+
else:
|
|
37
|
+
store = GraphStore()
|
|
38
|
+
# Save to SQLite
|
|
39
|
+
for e in entities:
|
|
40
|
+
store.add_entity(e)
|
|
41
|
+
for r in rels:
|
|
42
|
+
store.add_relationship(r)
|
|
43
|
+
console.print("[green]Successfully persisted graph to converge_graph.db[/green]")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _run_validation(
|
|
48
|
+
path: str, conflicts: list[Conflict], plans: list[RepairPlan], console: Console
|
|
49
|
+
) -> None:
|
|
50
|
+
console.print("\n[bold green]Validating plans in sandboxed environment...[/bold green]")
|
|
51
|
+
sandbox = UVSandbox(path)
|
|
52
|
+
runner = ValidationRunner(sandbox)
|
|
53
|
+
|
|
54
|
+
# Determine packages to smoke test based on unresolved imports
|
|
55
|
+
smoke_targets = []
|
|
56
|
+
for c in conflicts:
|
|
57
|
+
if c.type == ConflictType.UNRESOLVED_IMPORT:
|
|
58
|
+
smoke_targets.append(c.involved_entities[1].replace("pkg:", ""))
|
|
59
|
+
|
|
60
|
+
scores = runner.score_plans(plans, smoke_targets)
|
|
61
|
+
|
|
62
|
+
best_plan = None
|
|
63
|
+
for plan_id, success in scores.items():
|
|
64
|
+
if success:
|
|
65
|
+
best_plan = next(p for p in plans if p.id == plan_id)
|
|
66
|
+
console.print(f"[green]Plan {plan_id} passed validation![/green]")
|
|
67
|
+
break
|
|
68
|
+
else:
|
|
69
|
+
console.print(f"[red]Plan {plan_id} failed validation.[/red]")
|
|
70
|
+
|
|
71
|
+
if best_plan:
|
|
72
|
+
console.print(f"\n[bold green]Successfully found working plan: {best_plan.id}[/bold green]")
|
|
73
|
+
console.print("[white]In a full implementation, Converge would now rewrite pyproject.toml and your lockfile.[/white]")
|
|
74
|
+
else:
|
|
75
|
+
console.print("\n[bold red]All candidate plans failed validation.[/bold red]")
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@app.command()
|
|
79
|
+
def fix(
|
|
80
|
+
path: str = typer.Argument(".", help="Path to the repository to fix"),
|
|
81
|
+
apply: bool = typer.Option(False, "--apply", help="Apply the fix plan after validation"),
|
|
82
|
+
) -> None:
|
|
83
|
+
"""
|
|
84
|
+
Identify conflicts in the environment and automatically generate and apply a repair plan.
|
|
85
|
+
"""
|
|
86
|
+
console.print(f"[bold blue]Analyzing environment conflicts for:[/bold blue] {path}")
|
|
87
|
+
|
|
88
|
+
store = GraphStore()
|
|
89
|
+
try:
|
|
90
|
+
G = store.load_networkx()
|
|
91
|
+
except Exception as e:
|
|
92
|
+
console.print(f"[red]Failed to load graph. Did you run `converge scan` first? Error: {e}[/red]")
|
|
93
|
+
return
|
|
94
|
+
|
|
95
|
+
detector = ConflictDetector(G)
|
|
96
|
+
conflicts = list(detector.detect_all())
|
|
97
|
+
|
|
98
|
+
if not conflicts:
|
|
99
|
+
console.print("[green]No conflicts detected in the graph![/green]")
|
|
100
|
+
return
|
|
101
|
+
|
|
102
|
+
console.print(f"[yellow]Detected {len(conflicts)} conflicts. Generating repair plans...[/yellow]")
|
|
103
|
+
planner = RepairPlanner(conflicts)
|
|
104
|
+
plans = planner.generate_plans()
|
|
105
|
+
|
|
106
|
+
for plan in plans:
|
|
107
|
+
console.print(f"\n[cyan]Candidate Plan:[/cyan] {plan.id}")
|
|
108
|
+
console.print(f"[white]Rationale: {plan.rationale}[/white]")
|
|
109
|
+
for action in plan.actions:
|
|
110
|
+
console.print(f" - [magenta]{action.action_type}[/magenta]: {action.description}")
|
|
111
|
+
|
|
112
|
+
if not apply:
|
|
113
|
+
console.print("\n[yellow]Running in dry-run mode. Use --apply to execute the fix.[/yellow]")
|
|
114
|
+
else:
|
|
115
|
+
_run_validation(path, conflicts, plans, console)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@app.command()
|
|
120
|
+
def doctor() -> None:
|
|
121
|
+
"""
|
|
122
|
+
Diagnose issues with the environment, parsers, resolvers, or cache.
|
|
123
|
+
"""
|
|
124
|
+
console.print("[bold cyan]Running Converge Diagnostics...[/bold cyan]")
|
|
125
|
+
# TODO: Implement checks for uv, python, sqlite, etc.
|
|
126
|
+
console.print("[green]System looks good![/green]")
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
@app.command()
|
|
130
|
+
def deps(
|
|
131
|
+
target: str = typer.Argument(..., help="Entity ID to trace (e.g. repo:converge)"),
|
|
132
|
+
) -> None:
|
|
133
|
+
"""
|
|
134
|
+
Show the dependency tree for a particular entity.
|
|
135
|
+
"""
|
|
136
|
+
store = GraphStore()
|
|
137
|
+
try:
|
|
138
|
+
G = store.load_networkx()
|
|
139
|
+
except Exception as e:
|
|
140
|
+
console.print(f"[red]Failed to load graph. Error: {e}[/red]")
|
|
141
|
+
return
|
|
142
|
+
|
|
143
|
+
engine = ExplainabilityEngine(G, console)
|
|
144
|
+
engine.render_dependency_tree(target)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
@app.command()
|
|
148
|
+
def validate(
|
|
149
|
+
path: str = typer.Argument(".", help="Path to the repository to validate"),
|
|
150
|
+
) -> None:
|
|
151
|
+
"""
|
|
152
|
+
Run isolation and validation checks on the current environment without attempting a fix.
|
|
153
|
+
"""
|
|
154
|
+
console.print("[bold blue]Running pure validation phase on environment...[/bold blue]")
|
|
155
|
+
sandbox = UVSandbox(path)
|
|
156
|
+
# Simply create sandbox to verify toolchain
|
|
157
|
+
sandbox.create()
|
|
158
|
+
console.print("[green]Sandbox toolchain is working and isolated execution succeeded.[/green]")
|
|
159
|
+
sandbox.cleanup()
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
@app.command()
|
|
163
|
+
def explain(
|
|
164
|
+
target: str = typer.Argument(..., help="Entity or conflict ID to explain"),
|
|
165
|
+
) -> None:
|
|
166
|
+
"""
|
|
167
|
+
Explain the current state of the graph or details of a specific conflict/fix.
|
|
168
|
+
"""
|
|
169
|
+
store = GraphStore()
|
|
170
|
+
try:
|
|
171
|
+
G = store.load_networkx()
|
|
172
|
+
except Exception as e:
|
|
173
|
+
console.print(f"[red]Failed to load graph. Error: {e}[/red]")
|
|
174
|
+
return
|
|
175
|
+
|
|
176
|
+
engine = ExplainabilityEngine(G, console)
|
|
177
|
+
if "conflict:" in target:
|
|
178
|
+
engine.explain_conflict(target)
|
|
179
|
+
else:
|
|
180
|
+
engine.render_dependency_tree(target)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
if __name__ == "__main__":
|
|
184
|
+
app()
|
|
File without changes
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
import networkx as nx
|
|
6
|
+
|
|
7
|
+
from converge.models import RelationshipType
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class GraphQueries:
|
|
11
|
+
"""
|
|
12
|
+
Abstractions for traversing the NetworkX graph.
|
|
13
|
+
Used for inferring constraints and paths.
|
|
14
|
+
"""
|
|
15
|
+
def __init__(self, G: nx.DiGraph[Any]):
|
|
16
|
+
self.G = G
|
|
17
|
+
|
|
18
|
+
def get_dependencies_for_package(self, package_id: str) -> list[str]:
|
|
19
|
+
"""Returns all packages that this package requires directly."""
|
|
20
|
+
deps = []
|
|
21
|
+
if package_id in self.G:
|
|
22
|
+
for neighbor in self.G.successors(package_id):
|
|
23
|
+
edge_data = self.G.get_edge_data(package_id, neighbor)
|
|
24
|
+
if edge_data and edge_data.get("type") == RelationshipType.REQUIRES:
|
|
25
|
+
deps.append(neighbor)
|
|
26
|
+
return deps
|
|
27
|
+
|
|
28
|
+
def get_version_conflicts(self) -> list[tuple[str, str]]:
|
|
29
|
+
"""Returns all edges of type CONFLICTS_WITH"""
|
|
30
|
+
conflicts = []
|
|
31
|
+
for u, v, data in self.G.edges(data=True):
|
|
32
|
+
if data.get("type") == RelationshipType.CONFLICTS_WITH:
|
|
33
|
+
conflicts.append((u, v))
|
|
34
|
+
return conflicts
|
|
35
|
+
|
|
36
|
+
def find_shortest_dependency_path(self, root_id: str, target_id: str) -> list[str] | None:
|
|
37
|
+
"""Finds the shortest REQUIRES path from root to target."""
|
|
38
|
+
try:
|
|
39
|
+
# Only traverse REQUIRES edges
|
|
40
|
+
requires_graph = nx.DiGraph(
|
|
41
|
+
((u, v, d) for u, v, d in self.G.edges(data=True) if d.get('type') == RelationshipType.REQUIRES)
|
|
42
|
+
)
|
|
43
|
+
return nx.shortest_path(requires_graph, source=root_id, target=target_id)
|
|
44
|
+
except nx.NetworkXNoPath:
|
|
45
|
+
return None
|
converge/graph/store.py
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from collections.abc import Generator
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
import networkx as nx
|
|
8
|
+
from sqlalchemy import text
|
|
9
|
+
from sqlmodel import Field, Session, SQLModel, create_engine, select
|
|
10
|
+
|
|
11
|
+
from converge.models import EntityType, GraphEntity, GraphRelationship, RelationshipType
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# SQLModel classes for SQLite persistence
|
|
15
|
+
class SQLEntity(SQLModel, table=True):
|
|
16
|
+
id: str = Field(primary_key=True)
|
|
17
|
+
type: str # Map to EntityType
|
|
18
|
+
name: str
|
|
19
|
+
metadata_json: str = "{}" # JSON serialized metadata
|
|
20
|
+
|
|
21
|
+
def to_pydantic(self) -> GraphEntity:
|
|
22
|
+
return GraphEntity(
|
|
23
|
+
id=self.id,
|
|
24
|
+
type=EntityType(self.type),
|
|
25
|
+
name=self.name,
|
|
26
|
+
metadata=json.loads(self.metadata_json)
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def from_pydantic(cls, entity: GraphEntity) -> SQLEntity:
|
|
31
|
+
return cls(
|
|
32
|
+
id=entity.id,
|
|
33
|
+
type=entity.type.value,
|
|
34
|
+
name=entity.name,
|
|
35
|
+
metadata_json=json.dumps(entity.metadata)
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class SQLRelationship(SQLModel, table=True):
|
|
40
|
+
# Composite PK logically: source, target, type
|
|
41
|
+
# For SQLModel without explicit composite primary key class simplicity, we use a surrogate
|
|
42
|
+
id: int | None = Field(default=None, primary_key=True)
|
|
43
|
+
source_id: str = Field(index=True)
|
|
44
|
+
target_id: str = Field(index=True)
|
|
45
|
+
type: str # Map to RelationshipType
|
|
46
|
+
metadata_json: str = "{}"
|
|
47
|
+
|
|
48
|
+
def to_pydantic(self) -> GraphRelationship:
|
|
49
|
+
return GraphRelationship(
|
|
50
|
+
source_id=self.source_id,
|
|
51
|
+
target_id=self.target_id,
|
|
52
|
+
type=RelationshipType(self.type),
|
|
53
|
+
metadata=json.loads(self.metadata_json)
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
@classmethod
|
|
57
|
+
def from_pydantic(cls, rel: GraphRelationship) -> SQLRelationship:
|
|
58
|
+
return cls(
|
|
59
|
+
source_id=rel.source_id,
|
|
60
|
+
target_id=rel.target_id,
|
|
61
|
+
type=rel.type.value,
|
|
62
|
+
metadata_json=json.dumps(rel.metadata)
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class GraphStore:
|
|
67
|
+
"""
|
|
68
|
+
Manages physical persistence of the graph using SQLModel (SQLite).
|
|
69
|
+
Provides methods to persist and re-hydrate NetworkX graphs.
|
|
70
|
+
"""
|
|
71
|
+
def __init__(self, db_url: str = "sqlite:///converge_graph.db"):
|
|
72
|
+
self.engine = create_engine(db_url)
|
|
73
|
+
SQLModel.metadata.create_all(self.engine)
|
|
74
|
+
|
|
75
|
+
def get_session(self) -> Generator[Session, None, None]:
|
|
76
|
+
with Session(self.engine) as session:
|
|
77
|
+
yield session
|
|
78
|
+
|
|
79
|
+
def add_entity(self, entity: GraphEntity) -> None:
|
|
80
|
+
with Session(self.engine) as session:
|
|
81
|
+
sql_ent = SQLEntity.from_pydantic(entity)
|
|
82
|
+
session.merge(sql_ent)
|
|
83
|
+
session.commit()
|
|
84
|
+
|
|
85
|
+
def add_relationship(self, rel: GraphRelationship) -> None:
|
|
86
|
+
with Session(self.engine) as session:
|
|
87
|
+
# Simple deduplication strategy
|
|
88
|
+
stmt = select(SQLRelationship).where(
|
|
89
|
+
SQLRelationship.source_id == rel.source_id,
|
|
90
|
+
SQLRelationship.target_id == rel.target_id,
|
|
91
|
+
SQLRelationship.type == rel.type.value
|
|
92
|
+
)
|
|
93
|
+
existing = session.exec(stmt).first()
|
|
94
|
+
if not existing:
|
|
95
|
+
sql_rel = SQLRelationship.from_pydantic(rel)
|
|
96
|
+
session.add(sql_rel)
|
|
97
|
+
session.commit()
|
|
98
|
+
|
|
99
|
+
def load_networkx(self) -> nx.DiGraph[Any]:
|
|
100
|
+
"""Hydrates a fully loaded NetworkX directed graph from SQLite."""
|
|
101
|
+
G: nx.DiGraph[Any] = nx.DiGraph()
|
|
102
|
+
with Session(self.engine) as session:
|
|
103
|
+
entities = session.exec(select(SQLEntity)).all()
|
|
104
|
+
for e in entities:
|
|
105
|
+
p_ent = e.to_pydantic()
|
|
106
|
+
G.add_node(p_ent.id, **p_ent.model_dump())
|
|
107
|
+
|
|
108
|
+
rels = session.exec(select(SQLRelationship)).all()
|
|
109
|
+
for r in rels:
|
|
110
|
+
p_rel = r.to_pydantic()
|
|
111
|
+
G.add_edge(p_rel.source_id, p_rel.target_id, type=p_rel.type, metadata=p_rel.metadata)
|
|
112
|
+
return G
|
|
113
|
+
|
|
114
|
+
def save_networkx(self, G: nx.DiGraph[Any]) -> None:
|
|
115
|
+
"""Persists a NetworkX digraph into SQLite."""
|
|
116
|
+
with Session(self.engine) as session:
|
|
117
|
+
# Clear existing logic for hard reset, or intelligent merge.
|
|
118
|
+
# Fast path: wipe and replace
|
|
119
|
+
session.execute(text("DELETE FROM sqlrelationship"))
|
|
120
|
+
session.execute(text("DELETE FROM sqlentity"))
|
|
121
|
+
|
|
122
|
+
for _node_id, data in G.nodes(data=True):
|
|
123
|
+
ent = GraphEntity.model_validate(data)
|
|
124
|
+
session.add(SQLEntity.from_pydantic(ent))
|
|
125
|
+
|
|
126
|
+
for src, dst, data in G.edges(data=True):
|
|
127
|
+
rel = GraphRelationship(
|
|
128
|
+
source_id=src,
|
|
129
|
+
target_id=dst,
|
|
130
|
+
type=data["type"],
|
|
131
|
+
metadata=data.get("metadata", {})
|
|
132
|
+
)
|
|
133
|
+
session.add(SQLRelationship.from_pydantic(rel))
|
|
134
|
+
|
|
135
|
+
session.commit()
|
converge/models.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
from enum import StrEnum
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
from pydantic import BaseModel, Field
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class EntityType(StrEnum):
|
|
8
|
+
REPOSITORY = "repository"
|
|
9
|
+
PACKAGE = "package"
|
|
10
|
+
ENVIRONMENT = "environment"
|
|
11
|
+
PYTHON_VERSION = "python_version"
|
|
12
|
+
MODULE = "module"
|
|
13
|
+
ROUTE = "route"
|
|
14
|
+
EXTERNAL_API = "external_api"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class RelationshipType(StrEnum):
|
|
18
|
+
IMPORTS = "imports"
|
|
19
|
+
REQUIRES = "requires"
|
|
20
|
+
CONFLICTS_WITH = "conflicts_with"
|
|
21
|
+
CALLS = "calls"
|
|
22
|
+
EXPOSES = "exposes"
|
|
23
|
+
CONFIGURED_BY = "configured_by"
|
|
24
|
+
DEPENDS_ON = "depends_on"
|
|
25
|
+
VALIDATED_BY = "validated_by"
|
|
26
|
+
BROKEN_BY = "broken_by"
|
|
27
|
+
FIXED_BY = "fixed_by"
|
|
28
|
+
SERVED_AT = "served_at"
|
|
29
|
+
REFERENCES_ENV = "references_env"
|
|
30
|
+
USES_INTERPRETER = "uses_interpreter"
|
|
31
|
+
BELONGS_TO = "belongs_to"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class GraphEntity(BaseModel):
|
|
35
|
+
id: str
|
|
36
|
+
type: EntityType
|
|
37
|
+
name: str
|
|
38
|
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class GraphRelationship(BaseModel):
|
|
42
|
+
source_id: str
|
|
43
|
+
target_id: str
|
|
44
|
+
type: RelationshipType
|
|
45
|
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
# Specialized Entity Models
|
|
49
|
+
class Repository(GraphEntity):
|
|
50
|
+
type: EntityType = EntityType.REPOSITORY
|
|
51
|
+
path: str
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class Package(GraphEntity):
|
|
55
|
+
type: EntityType = EntityType.PACKAGE
|
|
56
|
+
version: str | None = None
|
|
57
|
+
source: str = "pypi"
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class Environment(GraphEntity):
|
|
61
|
+
type: EntityType = EntityType.ENVIRONMENT
|
|
62
|
+
path: str
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class PythonVersion(GraphEntity):
|
|
66
|
+
type: EntityType = EntityType.PYTHON_VERSION
|
|
67
|
+
version: str
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class Module(GraphEntity):
|
|
71
|
+
type: EntityType = EntityType.MODULE
|
|
72
|
+
file_path: str
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class Route(GraphEntity):
|
|
76
|
+
type: EntityType = EntityType.ROUTE
|
|
77
|
+
method: str
|
|
78
|
+
path: str
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class ExternalAPI(GraphEntity):
|
|
82
|
+
type: EntityType = EntityType.EXTERNAL_API
|
|
83
|
+
url: str
|
|
File without changes
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import ast
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
from converge.models import GraphRelationship, Module, RelationshipType
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class PythonASTParser:
|
|
8
|
+
"""
|
|
9
|
+
Parses Python files to extract imports and module relationships.
|
|
10
|
+
"""
|
|
11
|
+
def __init__(self, root_dir: str):
|
|
12
|
+
self.root_dir = Path(root_dir)
|
|
13
|
+
|
|
14
|
+
def scan_directory(self) -> tuple[list[Module], list[GraphRelationship]]:
|
|
15
|
+
"""Walks the directory and parses all Python files."""
|
|
16
|
+
modules = []
|
|
17
|
+
relationships = []
|
|
18
|
+
|
|
19
|
+
for p in self.root_dir.rglob("*.py"):
|
|
20
|
+
# Skip hidden dirs or common virtualenvs
|
|
21
|
+
if any(part.startswith(".") or part in ("venv", "env", "node_modules") for part in p.parts):
|
|
22
|
+
continue
|
|
23
|
+
|
|
24
|
+
mod_id = f"mod:{p.relative_to(self.root_dir)}"
|
|
25
|
+
mod = Module(
|
|
26
|
+
id=mod_id,
|
|
27
|
+
name=p.name,
|
|
28
|
+
file_path=str(p)
|
|
29
|
+
)
|
|
30
|
+
modules.append(mod)
|
|
31
|
+
|
|
32
|
+
try:
|
|
33
|
+
content = p.read_text(encoding="utf-8")
|
|
34
|
+
tree = ast.parse(content, filename=str(p))
|
|
35
|
+
|
|
36
|
+
for node in ast.walk(tree):
|
|
37
|
+
if isinstance(node, ast.Import):
|
|
38
|
+
for name in node.names:
|
|
39
|
+
target_pkg = name.name.split('.')[0] # Heuristic: top level package
|
|
40
|
+
relationships.append(
|
|
41
|
+
GraphRelationship(
|
|
42
|
+
source_id=mod_id,
|
|
43
|
+
target_id=f"pkg:{target_pkg}",
|
|
44
|
+
type=RelationshipType.IMPORTS,
|
|
45
|
+
metadata={"full_import": name.name, "line": node.lineno}
|
|
46
|
+
)
|
|
47
|
+
)
|
|
48
|
+
elif isinstance(node, ast.ImportFrom):
|
|
49
|
+
if node.module:
|
|
50
|
+
target_pkg = node.module.split('.')[0]
|
|
51
|
+
relationships.append(
|
|
52
|
+
GraphRelationship(
|
|
53
|
+
source_id=mod_id,
|
|
54
|
+
target_id=f"pkg:{target_pkg}",
|
|
55
|
+
type=RelationshipType.IMPORTS,
|
|
56
|
+
metadata={"full_import": node.module, "line": node.lineno}
|
|
57
|
+
)
|
|
58
|
+
)
|
|
59
|
+
except (SyntaxError, UnicodeDecodeError):
|
|
60
|
+
# We skip files we can't parse safely
|
|
61
|
+
continue
|
|
62
|
+
|
|
63
|
+
return modules, relationships
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import tomllib
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
from converge.models import GraphRelationship, Package, RelationshipType
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class ProjectParser:
|
|
8
|
+
"""
|
|
9
|
+
Scans project configuration files like pyproject.toml and requirements.txt.
|
|
10
|
+
"""
|
|
11
|
+
def __init__(self, root_dir: str):
|
|
12
|
+
self.root_dir = Path(root_dir)
|
|
13
|
+
|
|
14
|
+
def parse_pyproject(self) -> tuple[list[Package], list[GraphRelationship]]:
|
|
15
|
+
"""Parses pyproject.toml returning Package entities and REQUIRES relationships."""
|
|
16
|
+
toml_path = self.root_dir / "pyproject.toml"
|
|
17
|
+
if not toml_path.exists():
|
|
18
|
+
return [], []
|
|
19
|
+
|
|
20
|
+
with open(toml_path, "rb") as f:
|
|
21
|
+
try:
|
|
22
|
+
data = tomllib.load(f)
|
|
23
|
+
except tomllib.TOMLDecodeError:
|
|
24
|
+
return [], []
|
|
25
|
+
|
|
26
|
+
packages = []
|
|
27
|
+
relationships = []
|
|
28
|
+
|
|
29
|
+
# Extract dependencies
|
|
30
|
+
deps = data.get("project", {}).get("dependencies", [])
|
|
31
|
+
for dep in deps:
|
|
32
|
+
# Very basic parsing, a real implementation would use packaging.requirements
|
|
33
|
+
pkg_name = dep.split(">=")[0].split("==")[0].split("<=")[0].split("~=")[0].strip()
|
|
34
|
+
pkg_id = f"pkg:{pkg_name}"
|
|
35
|
+
|
|
36
|
+
pkg = Package(
|
|
37
|
+
id=pkg_id,
|
|
38
|
+
name=pkg_name,
|
|
39
|
+
metadata={"constraint": dep}
|
|
40
|
+
)
|
|
41
|
+
packages.append(pkg)
|
|
42
|
+
|
|
43
|
+
# The repository requires this package
|
|
44
|
+
rel = GraphRelationship(
|
|
45
|
+
source_id=f"repo:{self.root_dir.name}",
|
|
46
|
+
target_id=pkg_id,
|
|
47
|
+
type=RelationshipType.REQUIRES,
|
|
48
|
+
metadata={"source": "pyproject.toml"}
|
|
49
|
+
)
|
|
50
|
+
relationships.append(rel)
|
|
51
|
+
|
|
52
|
+
return packages, relationships
|
|
53
|
+
|
|
54
|
+
def parse_requirements_txt(self) -> tuple[list[Package], list[GraphRelationship]]:
|
|
55
|
+
"""Parses requirements.txt returning Package entities and REQUIRES relationships."""
|
|
56
|
+
req_path = self.root_dir / "requirements.txt"
|
|
57
|
+
if not req_path.exists():
|
|
58
|
+
return [], []
|
|
59
|
+
|
|
60
|
+
packages = []
|
|
61
|
+
relationships = []
|
|
62
|
+
|
|
63
|
+
with open(req_path) as f:
|
|
64
|
+
for line in f:
|
|
65
|
+
line = line.strip()
|
|
66
|
+
if not line or line.startswith("#"):
|
|
67
|
+
continue
|
|
68
|
+
# Same naive split
|
|
69
|
+
pkg_name = line.split(">=")[0].split("==")[0].split("<=")[0].split("~=")[0].strip()
|
|
70
|
+
pkg_id = f"pkg:{pkg_name}"
|
|
71
|
+
|
|
72
|
+
pkg = Package(
|
|
73
|
+
id=pkg_id,
|
|
74
|
+
name=pkg_name,
|
|
75
|
+
metadata={"constraint": line}
|
|
76
|
+
)
|
|
77
|
+
packages.append(pkg)
|
|
78
|
+
|
|
79
|
+
rel = GraphRelationship(
|
|
80
|
+
source_id=f"repo:{self.root_dir.name}",
|
|
81
|
+
target_id=pkg_id,
|
|
82
|
+
type=RelationshipType.REQUIRES,
|
|
83
|
+
metadata={"source": "requirements.txt"}
|
|
84
|
+
)
|
|
85
|
+
relationships.append(rel)
|
|
86
|
+
|
|
87
|
+
return packages, relationships
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from converge.models import GraphEntity, GraphRelationship
|
|
4
|
+
from converge.scanner.ast_parser import PythonASTParser
|
|
5
|
+
from converge.scanner.project import ProjectParser
|
|
6
|
+
from converge.scanner.service_detector import ServiceDetector
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Scanner:
|
|
10
|
+
"""
|
|
11
|
+
Orchestrates the different parsers to build a comprehensive graph of the codebase.
|
|
12
|
+
"""
|
|
13
|
+
def __init__(self, root_dir: str):
|
|
14
|
+
self.root_dir = Path(root_dir)
|
|
15
|
+
self.entities: list[GraphEntity] = []
|
|
16
|
+
self.relationships: list[GraphRelationship] = []
|
|
17
|
+
|
|
18
|
+
def scan_all(self) -> tuple[list[GraphEntity], list[GraphRelationship]]:
|
|
19
|
+
# 1. Parse Project files
|
|
20
|
+
project_parser = ProjectParser(str(self.root_dir))
|
|
21
|
+
|
|
22
|
+
pkgs, rels = project_parser.parse_pyproject()
|
|
23
|
+
self.entities.extend(pkgs)
|
|
24
|
+
self.relationships.extend(rels)
|
|
25
|
+
|
|
26
|
+
pkgs_req, rels_req = project_parser.parse_requirements_txt()
|
|
27
|
+
self.entities.extend(pkgs_req)
|
|
28
|
+
self.relationships.extend(rels_req)
|
|
29
|
+
|
|
30
|
+
# 2. Parse Python AST for imports
|
|
31
|
+
ast_parser = PythonASTParser(str(self.root_dir))
|
|
32
|
+
mods, mod_rels = ast_parser.scan_directory()
|
|
33
|
+
self.entities.extend(mods)
|
|
34
|
+
self.relationships.extend(mod_rels)
|
|
35
|
+
|
|
36
|
+
# 3. Detect services and routes
|
|
37
|
+
for p in self.root_dir.rglob("*.py"):
|
|
38
|
+
if any(part.startswith(".") or part in ("venv", "env", "node_modules") for part in p.parts):
|
|
39
|
+
continue
|
|
40
|
+
|
|
41
|
+
# Use relative paths for IDs to match ast_parser
|
|
42
|
+
rel_path = p.relative_to(self.root_dir)
|
|
43
|
+
routes, route_rels = ServiceDetector.scan_file(p)
|
|
44
|
+
|
|
45
|
+
# Map absolute path back to relative mod ID in relationships
|
|
46
|
+
for r in route_rels:
|
|
47
|
+
r.source_id = f"mod:{rel_path}"
|
|
48
|
+
|
|
49
|
+
self.entities.extend(routes)
|
|
50
|
+
self.relationships.extend(route_rels)
|
|
51
|
+
|
|
52
|
+
return self.entities, self.relationships
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import ast
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
from converge.models import GraphRelationship, RelationshipType, Route
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class ServiceDetector(ast.NodeVisitor):
|
|
8
|
+
"""
|
|
9
|
+
Heuristic AST visitor to detect FastAPI/Flask routes and services.
|
|
10
|
+
"""
|
|
11
|
+
def __init__(self, file_path: Path):
|
|
12
|
+
self.file_path = file_path
|
|
13
|
+
self.routes: list[Route] = []
|
|
14
|
+
self.relationships: list[GraphRelationship] = []
|
|
15
|
+
|
|
16
|
+
def visit_FunctionDef(self, node: ast.FunctionDef) -> None:
|
|
17
|
+
"""
|
|
18
|
+
Looks for decorators like @app.get('/path') to infer routes.
|
|
19
|
+
"""
|
|
20
|
+
for decorator in node.decorator_list:
|
|
21
|
+
if isinstance(decorator, ast.Call):
|
|
22
|
+
func = decorator.func
|
|
23
|
+
|
|
24
|
+
# Check for forms like app.get(...) or router.post(...)
|
|
25
|
+
if isinstance(func, ast.Attribute):
|
|
26
|
+
method_name = func.attr.upper()
|
|
27
|
+
|
|
28
|
+
if method_name in {"GET", "POST", "PUT", "DELETE", "PATCH"}:
|
|
29
|
+
# Try to extract the path from the first argument
|
|
30
|
+
if decorator.args and isinstance(decorator.args[0], ast.Constant):
|
|
31
|
+
route_path = decorator.args[0].value
|
|
32
|
+
|
|
33
|
+
# Ensure route_path is a string
|
|
34
|
+
if isinstance(route_path, bytes):
|
|
35
|
+
route_path = route_path.decode("utf-8")
|
|
36
|
+
elif not isinstance(route_path, str):
|
|
37
|
+
route_path = str(route_path)
|
|
38
|
+
|
|
39
|
+
route_id = f"route:{method_name}:{route_path}"
|
|
40
|
+
route = Route(
|
|
41
|
+
id=route_id,
|
|
42
|
+
name=f"{method_name} {route_path}",
|
|
43
|
+
method=method_name,
|
|
44
|
+
path=route_path,
|
|
45
|
+
metadata={"file": str(self.file_path), "line": node.lineno}
|
|
46
|
+
)
|
|
47
|
+
self.routes.append(route)
|
|
48
|
+
|
|
49
|
+
# The module exposes this route
|
|
50
|
+
mod_id = f"mod:{self.file_path}"
|
|
51
|
+
rel = GraphRelationship(
|
|
52
|
+
source_id=mod_id,
|
|
53
|
+
target_id=route_id,
|
|
54
|
+
type=RelationshipType.EXPOSES
|
|
55
|
+
)
|
|
56
|
+
self.relationships.append(rel)
|
|
57
|
+
|
|
58
|
+
self.generic_visit(node)
|
|
59
|
+
|
|
60
|
+
@classmethod
|
|
61
|
+
def scan_file(cls, path: Path) -> tuple[list[Route], list[GraphRelationship]]:
|
|
62
|
+
try:
|
|
63
|
+
content = path.read_text(encoding="utf-8")
|
|
64
|
+
tree = ast.parse(content, filename=str(path))
|
|
65
|
+
detector = cls(path)
|
|
66
|
+
detector.visit(tree)
|
|
67
|
+
return detector.routes, detector.relationships
|
|
68
|
+
except (SyntaxError, UnicodeDecodeError):
|
|
69
|
+
return [], []
|
|
File without changes
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
import networkx as nx
|
|
6
|
+
from pydantic import BaseModel
|
|
7
|
+
|
|
8
|
+
from converge.graph.queries import GraphQueries
|
|
9
|
+
from converge.models import RelationshipType
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ConflictType(str):
|
|
13
|
+
MISSING_PACKAGE = "missing_package"
|
|
14
|
+
VERSION_CLASH = "version_clash"
|
|
15
|
+
UNRESOLVED_IMPORT = "unresolved_import"
|
|
16
|
+
|
|
17
|
+
class Conflict(BaseModel):
|
|
18
|
+
id: str
|
|
19
|
+
type: str # ConflictType
|
|
20
|
+
description: str
|
|
21
|
+
involved_entities: list[str]
|
|
22
|
+
metadata: dict[str, Any] = {}
|
|
23
|
+
|
|
24
|
+
class ConflictDetector:
|
|
25
|
+
"""
|
|
26
|
+
Analyzes the graph to find broken relationships or unmet constraints.
|
|
27
|
+
"""
|
|
28
|
+
def __init__(self, G: nx.DiGraph[Any]):
|
|
29
|
+
self.G = G
|
|
30
|
+
self.queries = GraphQueries(G)
|
|
31
|
+
|
|
32
|
+
def detect_all(self) -> list[Conflict]:
|
|
33
|
+
conflicts = []
|
|
34
|
+
conflicts.extend(self._detect_unresolved_imports())
|
|
35
|
+
conflicts.extend(self._detect_version_clashes())
|
|
36
|
+
# In a real system, we'd also check if the installed environment matches requirements
|
|
37
|
+
return conflicts
|
|
38
|
+
|
|
39
|
+
def _detect_unresolved_imports(self) -> list[Conflict]:
|
|
40
|
+
"""
|
|
41
|
+
Finds IMPORTS edges that do not point to a known installed package or internal module.
|
|
42
|
+
"""
|
|
43
|
+
conflicts = []
|
|
44
|
+
for u, v, data in self.G.edges(data=True):
|
|
45
|
+
if data.get("type") == RelationshipType.IMPORTS or data.get("type") == RelationshipType.IMPORTS.value:
|
|
46
|
+
# An import is valid if the target package has been declared via REQUIRES from a repo/project
|
|
47
|
+
has_requires = False
|
|
48
|
+
for predecessor in self.G.predecessors(v):
|
|
49
|
+
edge_preds = self.G.get_edge_data(predecessor, v)
|
|
50
|
+
if edge_preds and (edge_preds.get("type") == RelationshipType.REQUIRES or edge_preds.get("type") == RelationshipType.REQUIRES.value):
|
|
51
|
+
has_requires = True
|
|
52
|
+
break
|
|
53
|
+
|
|
54
|
+
if not has_requires:
|
|
55
|
+
# We might have imported a third-party package without adding to pyproject.toml
|
|
56
|
+
c = Conflict(
|
|
57
|
+
id=f"conflict:unresolved_{u}_{v}",
|
|
58
|
+
type=ConflictType.UNRESOLVED_IMPORT,
|
|
59
|
+
description=f"Module {u} imports {v}, but it is not declared in dependencies.",
|
|
60
|
+
involved_entities=[u, v],
|
|
61
|
+
metadata={"import_data": data}
|
|
62
|
+
)
|
|
63
|
+
conflicts.append(c)
|
|
64
|
+
return conflicts
|
|
65
|
+
|
|
66
|
+
def _detect_version_clashes(self) -> list[Conflict]:
|
|
67
|
+
clashes = self.queries.get_version_conflicts()
|
|
68
|
+
conflicts = []
|
|
69
|
+
for u, v in clashes:
|
|
70
|
+
c = Conflict(
|
|
71
|
+
id=f"conflict:clash_{u}_{v}",
|
|
72
|
+
type=ConflictType.VERSION_CLASH,
|
|
73
|
+
description=f"Version conflict between {u} and {v}.",
|
|
74
|
+
involved_entities=[u, v]
|
|
75
|
+
)
|
|
76
|
+
conflicts.append(c)
|
|
77
|
+
return conflicts
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
from pydantic import BaseModel
|
|
2
|
+
|
|
3
|
+
from converge.solver.conflict import Conflict, ConflictType
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class RepairActionType(str):
|
|
7
|
+
ADD_DEPENDENCY = "add_dependency"
|
|
8
|
+
PIN_VERSION = "pin_version"
|
|
9
|
+
UPGRADE_DEPENDENCY = "upgrade_dependency"
|
|
10
|
+
DOWNGRADE_DEPENDENCY = "downgrade_dependency"
|
|
11
|
+
|
|
12
|
+
class RepairAction(BaseModel):
|
|
13
|
+
action_type: str
|
|
14
|
+
target_package: str
|
|
15
|
+
target_version: str = "latest"
|
|
16
|
+
description: str
|
|
17
|
+
|
|
18
|
+
class RepairPlan(BaseModel):
|
|
19
|
+
id: str
|
|
20
|
+
rationale: str
|
|
21
|
+
actions: list[RepairAction]
|
|
22
|
+
|
|
23
|
+
class RepairPlanner:
|
|
24
|
+
"""
|
|
25
|
+
Generates Candidate Repair Plans based on detected conflicts.
|
|
26
|
+
"""
|
|
27
|
+
def __init__(self, conflicts: list[Conflict]):
|
|
28
|
+
self.conflicts = conflicts
|
|
29
|
+
|
|
30
|
+
def generate_plans(self) -> list[RepairPlan]:
|
|
31
|
+
plans = []
|
|
32
|
+
|
|
33
|
+
# We handle generating plans for unhandled imports
|
|
34
|
+
actions = []
|
|
35
|
+
for c in self.conflicts:
|
|
36
|
+
if c.type == ConflictType.UNRESOLVED_IMPORT:
|
|
37
|
+
# Extract the package name from 'pkg:name'
|
|
38
|
+
target = c.involved_entities[1]
|
|
39
|
+
pkg_name = target.replace("pkg:", "")
|
|
40
|
+
# Simple rule: add dependency to pyproject.toml
|
|
41
|
+
action = RepairAction(
|
|
42
|
+
action_type=RepairActionType.ADD_DEPENDENCY,
|
|
43
|
+
target_package=pkg_name,
|
|
44
|
+
description=f"Add {pkg_name} to pyproject.toml dependencies to satisfy import."
|
|
45
|
+
)
|
|
46
|
+
actions.append(action)
|
|
47
|
+
|
|
48
|
+
elif c.type == ConflictType.VERSION_CLASH:
|
|
49
|
+
# In a real engine, we calculate the intersection of semver ranges.
|
|
50
|
+
target = c.involved_entities[1]
|
|
51
|
+
pkg_name = target.replace("pkg:", "")
|
|
52
|
+
action = RepairAction(
|
|
53
|
+
action_type=RepairActionType.PIN_VERSION,
|
|
54
|
+
target_package=pkg_name,
|
|
55
|
+
description=f"Pin {pkg_name} to a safe version."
|
|
56
|
+
)
|
|
57
|
+
actions.append(action)
|
|
58
|
+
|
|
59
|
+
if actions:
|
|
60
|
+
plan = RepairPlan(
|
|
61
|
+
id="plan:001",
|
|
62
|
+
rationale="Candidate plan to fix missing and conflicting dependencies.",
|
|
63
|
+
actions=actions
|
|
64
|
+
)
|
|
65
|
+
plans.append(plan)
|
|
66
|
+
|
|
67
|
+
return plans
|
|
File without changes
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import shutil
|
|
2
|
+
import subprocess
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from converge.solver.planner import RepairActionType, RepairPlan
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class SandboxError(Exception):
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
class UVSandbox:
|
|
12
|
+
"""
|
|
13
|
+
Manages isolated Python environments using `uv`.
|
|
14
|
+
"""
|
|
15
|
+
def __init__(self, base_dir: str):
|
|
16
|
+
self.base_dir = Path(base_dir)
|
|
17
|
+
self.venv_path = self.base_dir / ".venv-converge-test"
|
|
18
|
+
|
|
19
|
+
def create(self, python_version: str | None = None) -> None:
|
|
20
|
+
"""Creates a fresh virtual environment."""
|
|
21
|
+
if self.venv_path.exists():
|
|
22
|
+
shutil.rmtree(self.venv_path)
|
|
23
|
+
|
|
24
|
+
cmd = ["uv", "venv", str(self.venv_path)]
|
|
25
|
+
if python_version:
|
|
26
|
+
cmd.extend(["--python", python_version])
|
|
27
|
+
|
|
28
|
+
result = subprocess.run(cmd, capture_output=True, text=True)
|
|
29
|
+
if result.returncode != 0:
|
|
30
|
+
raise SandboxError(f"Failed to create venv: {result.stderr}")
|
|
31
|
+
|
|
32
|
+
def apply_plan(self, plan: RepairPlan) -> None:
|
|
33
|
+
"""Installs exactly what the repair plan dictates."""
|
|
34
|
+
# For a real implementation, we would modify pyproject.toml in a temp dir.
|
|
35
|
+
# Here we just `uv pip install` into the sandbox.
|
|
36
|
+
to_install = []
|
|
37
|
+
for action in plan.actions:
|
|
38
|
+
if action.action_type in (
|
|
39
|
+
RepairActionType.ADD_DEPENDENCY,
|
|
40
|
+
RepairActionType.PIN_VERSION,
|
|
41
|
+
RepairActionType.UPGRADE_DEPENDENCY,
|
|
42
|
+
RepairActionType.DOWNGRADE_DEPENDENCY
|
|
43
|
+
):
|
|
44
|
+
if action.target_version and action.target_version != "latest":
|
|
45
|
+
to_install.append(f"{action.target_package}=={action.target_version}")
|
|
46
|
+
else:
|
|
47
|
+
to_install.append(action.target_package)
|
|
48
|
+
|
|
49
|
+
if to_install:
|
|
50
|
+
self._uv_pip_install(to_install)
|
|
51
|
+
|
|
52
|
+
def _uv_pip_install(self, packages: list[str]) -> None:
|
|
53
|
+
python_exec = str(self.venv_path / "bin" / "python")
|
|
54
|
+
cmd = ["uv", "pip", "install", "--python", python_exec] + packages
|
|
55
|
+
result = subprocess.run(cmd, capture_output=True, text=True)
|
|
56
|
+
if result.returncode != 0:
|
|
57
|
+
raise SandboxError(f"uv install failed: {result.stderr}")
|
|
58
|
+
|
|
59
|
+
def run_python_cmd(self, code: str) -> bool:
|
|
60
|
+
"""Runs a snippet of Python code in the sandbox and returns True if successful."""
|
|
61
|
+
python_exec = str(self.venv_path / "bin" / "python")
|
|
62
|
+
cmd = [python_exec, "-c", code]
|
|
63
|
+
result = subprocess.run(cmd, capture_output=True, text=True)
|
|
64
|
+
return result.returncode == 0
|
|
65
|
+
|
|
66
|
+
def cleanup(self) -> None:
|
|
67
|
+
if self.venv_path.exists():
|
|
68
|
+
shutil.rmtree(self.venv_path)
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
|
|
2
|
+
from converge.solver.planner import RepairPlan
|
|
3
|
+
from converge.validation.sandbox import UVSandbox
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class ValidationRunner:
|
|
7
|
+
"""
|
|
8
|
+
Validates candidate repair plans by executing tests in a sandbox.
|
|
9
|
+
"""
|
|
10
|
+
def __init__(self, sandbox: UVSandbox):
|
|
11
|
+
self.sandbox = sandbox
|
|
12
|
+
|
|
13
|
+
def validate_plan(self, plan: RepairPlan, smoke_imports: list[str]) -> bool:
|
|
14
|
+
"""
|
|
15
|
+
Applies a plan to the sandbox and checks if smoke_imports are resolvable.
|
|
16
|
+
"""
|
|
17
|
+
try:
|
|
18
|
+
self.sandbox.create()
|
|
19
|
+
self.sandbox.apply_plan(plan)
|
|
20
|
+
|
|
21
|
+
# Smoke tests: ensure we can import the target packages
|
|
22
|
+
success = True
|
|
23
|
+
for imp in smoke_imports:
|
|
24
|
+
if not self.sandbox.run_python_cmd(f"import {imp}"):
|
|
25
|
+
success = False
|
|
26
|
+
break
|
|
27
|
+
|
|
28
|
+
return success
|
|
29
|
+
except Exception:
|
|
30
|
+
return False
|
|
31
|
+
finally:
|
|
32
|
+
self.sandbox.cleanup()
|
|
33
|
+
|
|
34
|
+
def score_plans(self, plans: list[RepairPlan], smoke_imports: list[str]) -> dict[str, bool]:
|
|
35
|
+
"""
|
|
36
|
+
Scores multiple plans by attempting them in isolated sandboxes.
|
|
37
|
+
Returns a dict mapping plan ID to Success (True/False).
|
|
38
|
+
"""
|
|
39
|
+
results = {}
|
|
40
|
+
for plan in plans:
|
|
41
|
+
success = self.validate_plan(plan, smoke_imports)
|
|
42
|
+
results[plan.id] = success
|
|
43
|
+
return results
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: converge-cli
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A Python-first repository intelligence and environment convergence platform.
|
|
5
|
+
Author-email: Converge Authors <authors@example.com>
|
|
6
|
+
Requires-Python: >=3.12
|
|
7
|
+
Requires-Dist: networkx>=3.0
|
|
8
|
+
Requires-Dist: pydantic>=2.0.0
|
|
9
|
+
Requires-Dist: rich>=13.0.0
|
|
10
|
+
Requires-Dist: sqlmodel>=0.0.16
|
|
11
|
+
Requires-Dist: typer>=0.12.0
|
|
12
|
+
Requires-Dist: uv>=0.1.0
|
|
13
|
+
Provides-Extra: dev
|
|
14
|
+
Requires-Dist: mypy>=1.9.0; extra == 'dev'
|
|
15
|
+
Requires-Dist: pytest>=8.0.0; extra == 'dev'
|
|
16
|
+
Requires-Dist: ruff>=0.3.0; extra == 'dev'
|
|
17
|
+
Description-Content-Type: text/markdown
|
|
18
|
+
|
|
19
|
+
# Converge
|
|
20
|
+
|
|
21
|
+
A Python-first repository intelligence and environment convergence platform. It scans a codebase, builds a graph of repositories, packages, modules, services, routes, environment dependencies, and external APIs, detects dependency and compatibility failures, and automatically iterates toward a working environment using deterministic resolution, validation, and repair loops.
|
|
22
|
+
|
|
23
|
+
## Installation
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
pip install -e .
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
## Usage
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
converge --help
|
|
33
|
+
```
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
converge/__init__.py,sha256=-nG6rBT_RMWfkXmvLGXRgWBJ8iQ2iX8fr3qW0ZjDtwA,948
|
|
2
|
+
converge/models.py,sha256=q2mEbuO7CCu1HBFPQDnrni9hAlaFjnlClRbzAkeD0KE,1800
|
|
3
|
+
converge/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
converge/cli/explain.py,sha256=hSggwWcqCwgxfHt0fZfab73E-cxZorjDQpvBMdrGF9Q,1690
|
|
5
|
+
converge/cli/main.py,sha256=6YTq7copBpgib2Co0ePB43brCXeN4y5d0ZAiFr2Q5mQ,6262
|
|
6
|
+
converge/graph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
+
converge/graph/queries.py,sha256=RHNKYR2Rpbjf_lHb9EGEFq-Qawe1OB_8N9MYKacgcxY,1660
|
|
8
|
+
converge/graph/store.py,sha256=4V_YDVuSiDonuqCb2dtCh8qVe6oMoyqJQjP0i5kAbEM,4853
|
|
9
|
+
converge/scanner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
+
converge/scanner/ast_parser.py,sha256=aZheFmYdrVjubr0iRwKr9MB5OmHjO1e7XsysvUzI3Kg,2545
|
|
11
|
+
converge/scanner/project.py,sha256=f-lBd8JtVv_1BB0WDZAZognyegkmv9gN6-WDGYwKQuc,2971
|
|
12
|
+
converge/scanner/scanner.py,sha256=kTKLCKDwvh0dZw-mxNHAIX16SJe5D_DQXA5Meqb3cGU,1930
|
|
13
|
+
converge/scanner/service_detector.py,sha256=9PllLNGTo_t-j02RTqO7eg-Gson8D24_ZfAv0e3ZfaQ,2908
|
|
14
|
+
converge/solver/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
|
+
converge/solver/conflict.py,sha256=PnOXqw9cg_L9vHDq747HcT5FxfxVpYW3tq__7LP8apc,2961
|
|
16
|
+
converge/solver/planner.py,sha256=QLhFD8lUKIy4-Y57R4Ri1mfnZlwZDFQrpOXCPLYM7UA,2262
|
|
17
|
+
converge/validation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
+
converge/validation/sandbox.py,sha256=oSXIYt52Ea5e32CqFOtUCjUPKasEuHkQEPXnFZquwhI,2623
|
|
19
|
+
converge/validation/smoke.py,sha256=G1K9n3xtjHvyEwo2OvFW82IiXL4H5WSKk6dFcS7dodY,1388
|
|
20
|
+
converge_cli-0.1.0.dist-info/METADATA,sha256=sywu5iiU6-LgdNPlbPc5--h2q2CrTA9GobfDlBt9bNI,1060
|
|
21
|
+
converge_cli-0.1.0.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
|
|
22
|
+
converge_cli-0.1.0.dist-info/entry_points.txt,sha256=cjU4H0wcPaj4RYKZffhCTqq98AjBBTgUM2aRtBqLmzs,51
|
|
23
|
+
converge_cli-0.1.0.dist-info/RECORD,,
|