modforge-cli 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,208 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ from collections.abc import Iterable
5
+ import hashlib
6
+ import json
7
+ from pathlib import Path
8
+
9
+ import aiohttp
10
+ from rich.console import Console
11
+ from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn
12
+
13
+ from modforge_cli.api import ModrinthAPIConfig
14
+
15
+ console = Console()
16
+
17
+
18
+ class ModDownloader:
19
+ def __init__(
20
+ self,
21
+ api: ModrinthAPIConfig,
22
+ mc_version: str,
23
+ loader: str,
24
+ output_dir: Path,
25
+ index_file: Path,
26
+ session: aiohttp.ClientSession,
27
+ ):
28
+ self.api = api
29
+ self.mc_version = mc_version
30
+ self.loader = loader
31
+ self.output_dir = output_dir
32
+ self.index_file = index_file
33
+ self.session = session
34
+
35
+ self.index = json.loads(index_file.read_text())
36
+
37
+ # Ensure files array exists
38
+ if "files" not in self.index:
39
+ self.index["files"] = []
40
+
41
+ def _select_compatible_version(self, versions: list[dict]) -> dict | None:
42
+ """
43
+ Select the most appropriate version based on:
44
+ 1. Loader compatibility (fabric/forge/quilt/neoforge)
45
+ 2. Minecraft version
46
+ 3. Version type (prefer release > beta > alpha)
47
+ """
48
+ # Normalize loader name for comparison
49
+ loader_lower = self.loader.lower()
50
+
51
+ # Filter versions that match both MC version and loader
52
+ compatible = []
53
+ for v in versions:
54
+ # Check if MC version matches
55
+ if self.mc_version not in v.get("game_versions", []):
56
+ continue
57
+
58
+ # Check if loader matches (case-insensitive)
59
+ loaders = [l.lower() for l in v.get("loaders", [])]
60
+ if loader_lower not in loaders:
61
+ continue
62
+
63
+ compatible.append(v)
64
+
65
+ if not compatible:
66
+ return None
67
+
68
+ # Prioritize by version type: release > beta > alpha
69
+ version_priority = {"release": 3, "beta": 2, "alpha": 1}
70
+
71
+ def version_score(v) -> int:
72
+ vtype = v.get("version_type", "alpha")
73
+ return version_priority.get(vtype, 0)
74
+
75
+ # Sort by version type, then by date (newest first)
76
+ compatible.sort(key=lambda v: (version_score(v), v.get("date_published", "")), reverse=True)
77
+
78
+ return compatible[0]
79
+
80
+ async def download_all(self, project_ids: Iterable[str]) -> None:
81
+ """
82
+ Download all mods and register them in modrinth.index.json.
83
+
84
+ The index format follows Modrinth's standard:
85
+ - files: array of all mods with hashes, URLs, and metadata
86
+ - dependencies: MC version and loader version
87
+ """
88
+ tasks = [self._download_project(pid) for pid in project_ids]
89
+
90
+ with Progress(
91
+ SpinnerColumn(),
92
+ TextColumn("[bold cyan]{task.description}"),
93
+ BarColumn(),
94
+ TextColumn("{task.completed}/{task.total}"),
95
+ console=console,
96
+ ) as progress:
97
+ task_id = progress.add_task("Downloading mods", total=len(tasks))
98
+ for coro in asyncio.as_completed(tasks):
99
+ await coro
100
+ progress.advance(task_id)
101
+
102
+ # Write updated index
103
+ self.index_file.write_text(json.dumps(self.index, indent=2))
104
+
105
+ async def _download_project(self, project_id: str) -> None:
106
+ # 1. Fetch all versions for this project
107
+ url = self.api.project_versions(project_id)
108
+
109
+ try:
110
+ async with self.session.get(url) as r:
111
+ if r.status != 200:
112
+ console.print(
113
+ f"[red]Failed to fetch versions for {project_id}: HTTP {r.status}[/red]"
114
+ )
115
+ return
116
+ versions = await r.json()
117
+ except Exception as e:
118
+ console.print(f"[red]Error fetching {project_id}: {e}[/red]")
119
+ return
120
+
121
+ if not versions:
122
+ console.print(f"[yellow]No versions found for {project_id}[/yellow]")
123
+ return
124
+
125
+ # 2. Select compatible version
126
+ version = self._select_compatible_version(versions)
127
+
128
+ if not version:
129
+ console.print(
130
+ f"[yellow]No compatible version for {project_id}[/yellow]\n"
131
+ f"[dim] Required: MC {self.mc_version}, Loader: {self.loader}[/dim]"
132
+ )
133
+ return
134
+
135
+ # 3. Find primary file
136
+ files = version.get("files", [])
137
+ primary_file = next((f for f in files if f.get("primary")), None)
138
+
139
+ if not primary_file and files:
140
+ # Fallback to first file if no primary is marked
141
+ primary_file = files[0]
142
+
143
+ if not primary_file:
144
+ console.print(
145
+ f"[yellow]No files found for {project_id} version {version.get('version_number')}[/yellow]"
146
+ )
147
+ return
148
+
149
+ # 4. Download file to mods/ directory
150
+ dest = self.output_dir / primary_file["filename"]
151
+
152
+ # Check if already registered in index
153
+ existing_entry = next(
154
+ (f for f in self.index["files"] if f["path"] == f"mods/{primary_file['filename']}"),
155
+ None,
156
+ )
157
+
158
+ if existing_entry and dest.exists():
159
+ # Verify hash matches
160
+ existing_hash = hashlib.sha1(dest.read_bytes()).hexdigest()
161
+ if existing_hash == primary_file["hashes"]["sha1"]:
162
+ console.print(f"[dim]✓ {primary_file['filename']} (cached)[/dim]")
163
+ return
164
+
165
+ # Download the file
166
+ try:
167
+ async with self.session.get(primary_file["url"]) as r:
168
+ if r.status != 200:
169
+ console.print(
170
+ f"[red]Failed to download {primary_file['filename']}: HTTP {r.status}[/red]"
171
+ )
172
+ return
173
+ data = await r.read()
174
+ dest.write_bytes(data)
175
+ except Exception as e:
176
+ console.print(f"[red]Download error for {primary_file['filename']}: {e}[/red]")
177
+ return
178
+
179
+ # 5. Verify hash
180
+ sha1 = hashlib.sha1(data).hexdigest()
181
+ sha512 = hashlib.sha512(data).hexdigest()
182
+
183
+ if sha1 != primary_file["hashes"]["sha1"]:
184
+ dest.unlink(missing_ok=True)
185
+ raise RuntimeError(
186
+ f"Hash mismatch for {primary_file['filename']}\n"
187
+ f" Expected: {primary_file['hashes']['sha1']}\n"
188
+ f" Got: {sha1}"
189
+ )
190
+
191
+ # 6. Register in index (Modrinth format)
192
+ file_entry = {
193
+ "path": f"mods/{primary_file['filename']}",
194
+ "hashes": {"sha1": sha1, "sha512": sha512},
195
+ "downloads": [primary_file["url"]],
196
+ "fileSize": primary_file["size"],
197
+ }
198
+
199
+ # Remove existing entry if present (update scenario)
200
+ self.index["files"] = [f for f in self.index["files"] if f["path"] != file_entry["path"]]
201
+
202
+ # Add new entry
203
+ self.index["files"].append(file_entry)
204
+
205
+ console.print(
206
+ f"[green]✓[/green] {primary_file['filename']} "
207
+ f"[dim](v{version.get('version_number')}, {self.loader})[/dim]"
208
+ )
@@ -0,0 +1,66 @@
1
+ from typing import Literal
2
+
3
+ from pydantic import BaseModel, Field, TypeAdapter
4
+
5
+
6
+ class BaseAPIModel(BaseModel):
7
+ model_config = {"extra": "ignore"}
8
+
9
+
10
+ class Manifest(BaseModel):
11
+ name: str
12
+ minecraft: str
13
+ loader: str
14
+ loader_version: str | None = None
15
+ mods: list[str] = Field(default_factory=list)
16
+ resourcepacks: list[str] = Field(default_factory=list)
17
+ shaderpacks: list[str] = Field(default_factory=list)
18
+
19
+
20
+ class Hit(BaseAPIModel):
21
+ project_id: str
22
+ project_type: str
23
+ slug: str
24
+ categories: list[str] = Field(default_factory=list)
25
+ versions: list[str] = Field(default_factory=list)
26
+
27
+
28
+ class SearchResult(BaseAPIModel):
29
+ hits: list[Hit] = Field(default_factory=list)
30
+
31
+
32
+ class Dependency(BaseAPIModel):
33
+ dependency_type: Literal["required", "optional", "incompatible", "embedded"] | None = None
34
+ file_name: str | None = None
35
+ project_id: str | None = None
36
+ version_id: str | None = None
37
+
38
+
39
+ class File(BaseAPIModel):
40
+ id: str | None = None
41
+ hashes: dict[str, str] = Field(default_factory=dict)
42
+ url: str | None = None
43
+ filename: str | None = None
44
+ primary: bool | None = None
45
+ size: int | None = None
46
+ file_type: str | None = None
47
+
48
+
49
+ class ProjectVersion(BaseAPIModel):
50
+ id: str
51
+ project_id: str
52
+ version_number: str
53
+ version_type: str
54
+ dependencies: list[Dependency] = Field(default_factory=list)
55
+ files: list[File] = Field(default_factory=list)
56
+ game_versions: list[str] = Field(default_factory=list)
57
+ loaders: list[str] = Field(default_factory=list)
58
+
59
+ @property
60
+ def is_release(self) -> bool:
61
+ return self.version_type == "release"
62
+
63
+
64
+ ProjectVersionList = TypeAdapter(list[ProjectVersion])
65
+
66
+ __all__ = ["Manifest", "SearchResult", "ProjectVersion", "ProjectVersionList"]
@@ -0,0 +1,161 @@
1
+ from __future__ import annotations
2
+
3
+ from collections import deque
4
+ from collections.abc import Iterable
5
+ import json
6
+ from pathlib import Path
7
+ from typing import TypedDict
8
+ from urllib.parse import urlparse
9
+ from urllib.request import urlopen
10
+
11
+ from jsonschema import ValidationError, validate
12
+
13
+
14
+ class NormalizedModRule(TypedDict):
15
+ conflicts: set[str]
16
+ sub_mods: set[str]
17
+
18
+
19
+ NormalizedPolicyRules = dict[str, NormalizedModRule]
20
+
21
+
22
+ class PolicyError(RuntimeError):
23
+ pass
24
+
25
+
26
+ # -------- schema cache (performance + offline safety) --------
27
+ _SCHEMA_CACHE: dict[str, dict] = {}
28
+
29
+
30
+ def _load_schema(schema_ref: str, base_path: Path) -> dict:
31
+ if schema_ref in _SCHEMA_CACHE:
32
+ return _SCHEMA_CACHE[schema_ref]
33
+
34
+ parsed = urlparse(schema_ref)
35
+
36
+ try:
37
+ if parsed.scheme in ("http", "https", "file"):
38
+ with urlopen(schema_ref) as resp:
39
+ schema = json.load(resp)
40
+ else:
41
+ schema_path = (base_path.parent / schema_ref).resolve()
42
+ if not schema_path.exists():
43
+ raise PolicyError(f"Schema not found: {schema_path}")
44
+ schema = json.loads(schema_path.read_text())
45
+ except Exception as e:
46
+ raise PolicyError(f"Failed to load schema '{schema_ref}': {e}") from e
47
+
48
+ _SCHEMA_CACHE[schema_ref] = schema
49
+ return schema
50
+
51
+
52
+ class ModPolicy:
53
+ """
54
+ Enforces mod compatibility rules:
55
+ - removes conflicts
56
+ - injects recommended sub-mods
57
+ """
58
+
59
+ def __init__(self, policy_path: str | Path = "configs/policy.json"):
60
+ self.policy_path = policy_path if isinstance(policy_path, Path) else Path(policy_path)
61
+ self.rules: NormalizedPolicyRules = {}
62
+ self.schema_ref: str | None = None
63
+
64
+ self._load()
65
+ self._validate()
66
+ self._normalize()
67
+
68
+ # ---------- loading & validation ----------
69
+
70
+ def _load(self) -> None:
71
+ try:
72
+ raw = json.loads(self.policy_path.read_text())
73
+
74
+ self.schema_ref = raw.get("$schema")
75
+ if not self.schema_ref:
76
+ raise PolicyError("Policy file missing $schema field")
77
+
78
+ raw.pop("$schema", None)
79
+ self.rules = raw
80
+
81
+ del raw
82
+ except Exception as e:
83
+ raise PolicyError(f"Failed to load policy: {e}") from e
84
+
85
+ def _validate(self) -> None:
86
+ try:
87
+ schema = _load_schema(self.schema_ref, self.policy_path)
88
+ validate(instance=self.rules, schema=schema)
89
+ del schema
90
+ except ValidationError as e:
91
+ raise PolicyError(f"Policy schema violation:\n{e.message}") from e
92
+ except Exception as e:
93
+ raise PolicyError(f"Schema validation failed: {e}") from e
94
+
95
+ def _normalize(self) -> None:
96
+ """
97
+ Normalize rule values into sets for O(1) lookups
98
+ """
99
+ for rule in self.rules.values():
100
+ rule["conflicts"] = set(rule.get("conflicts", []))
101
+ rule["sub_mods"] = set(rule.get("sub_mods", []))
102
+
103
+ # ---------- public API ----------
104
+
105
+ def apply(self, mods: Iterable[str]) -> set[str]:
106
+ """
107
+ Apply policy to a mod set.
108
+ Recursively adds sub-mods and removes conflicts.
109
+ Explicit mods always win over implicit ones.
110
+ """
111
+ explicit: set[str] = set(mods)
112
+ active: set[str] = set(explicit)
113
+ implicit: set[str] = set()
114
+
115
+ queue = deque(active)
116
+
117
+ # 1. Expand sub-mods recursively
118
+ while queue:
119
+ current = queue.popleft()
120
+ rule = self.rules.get(current)
121
+ if not rule:
122
+ continue
123
+
124
+ for sub in rule["sub_mods"]:
125
+ if sub not in active:
126
+ active.add(sub)
127
+ implicit.add(sub)
128
+ queue.append(sub)
129
+
130
+ # 2. Resolve conflicts (implicit loses first)
131
+ for mod in sorted(active):
132
+ rule = self.rules.get(mod)
133
+ if not rule:
134
+ continue
135
+
136
+ for conflict in rule["conflicts"]:
137
+ if conflict in active:
138
+ if conflict in explicit and mod in explicit:
139
+ raise PolicyError(f"Explicit mod conflict: {mod} ↔ {conflict}")
140
+
141
+ if conflict in implicit:
142
+ active.remove(conflict)
143
+ implicit.discard(conflict)
144
+
145
+ del queue, explicit, implicit
146
+ return active
147
+
148
+ def diff(self, mods: Iterable[str]) -> dict[str, list[str]]:
149
+ """
150
+ Show what would change without applying.
151
+ """
152
+ original = set(mods)
153
+ final = self.apply(mods)
154
+
155
+ diff = {
156
+ "added": sorted(final - original),
157
+ "removed": sorted(original - final),
158
+ }
159
+
160
+ del original, final
161
+ return diff
@@ -0,0 +1,184 @@
1
+ import asyncio
2
+ from collections import deque
3
+ from collections.abc import Iterable
4
+
5
+ import aiohttp
6
+
7
+ from modforge_cli.api import ModrinthAPIConfig
8
+ from modforge_cli.core.models import ProjectVersion, ProjectVersionList, SearchResult
9
+ from modforge_cli.core.policy import ModPolicy
10
+
11
+ try:
12
+ from modforge_cli.__version__ import __author__, __version__
13
+ except ImportError:
14
+ __version__ = "unknown"
15
+ __author__ = "Frank1o3"
16
+
17
+
18
+ class ModResolver:
19
+ def __init__(
20
+ self,
21
+ *,
22
+ policy: ModPolicy,
23
+ api: ModrinthAPIConfig,
24
+ mc_version: str,
25
+ loader: str,
26
+ ) -> None:
27
+ self.policy = policy
28
+ self.api = api
29
+ self.mc_version = mc_version
30
+ self.loader = loader
31
+
32
+ self._headers = {"User-Agent": f"{__author__}/ModForge-CLI/{__version__}"}
33
+
34
+ def _select_version(self, versions: list[ProjectVersion]) -> ProjectVersion | None:
35
+ """
36
+ Prefer:
37
+ 1. Release versions
38
+ 2. Matching MC + loader
39
+ """
40
+ for v in versions:
41
+ if v.is_release and self.mc_version in v.game_versions and self.loader in v.loaders:
42
+ return v
43
+
44
+ for v in versions:
45
+ if self.mc_version in v.game_versions and self.loader in v.loaders:
46
+ return v
47
+
48
+ return None
49
+
50
+ async def _search_project(self, slug: str, session: aiohttp.ClientSession) -> str | None:
51
+ """Search for a project by slug and return its project_id"""
52
+ url = self.api.search(
53
+ slug,
54
+ game_versions=[self.mc_version],
55
+ loaders=[self.loader],
56
+ )
57
+
58
+ try:
59
+ async with session.get(url) as response:
60
+ data = SearchResult.model_validate_json(await response.text())
61
+
62
+ for hit in data.hits:
63
+ if hit.project_type != "mod":
64
+ continue
65
+ if self.mc_version not in hit.versions:
66
+ continue
67
+ return hit.project_id
68
+ except Exception as e:
69
+ print(f"Warning: Failed to search for '{slug}': {e}")
70
+
71
+ return None
72
+
73
+ async def _fetch_versions(
74
+ self, project_id: str, session: aiohttp.ClientSession
75
+ ) -> list[ProjectVersion]:
76
+ """Fetch all versions for a project"""
77
+ url = self.api.project_versions(project_id)
78
+
79
+ try:
80
+ async with session.get(url) as response:
81
+ return ProjectVersionList.validate_json(await response.text())
82
+ except Exception as e:
83
+ print(f"Warning: Failed to fetch versions for '{project_id}': {e}")
84
+ return []
85
+
86
+ async def resolve(self, mods: Iterable[str], session: aiohttp.ClientSession) -> set[str]:
87
+ """
88
+ Asynchronously resolve all mod dependencies.
89
+
90
+ Args:
91
+ mods: Initial list of mod slugs
92
+ session: Active aiohttp session
93
+
94
+ Returns:
95
+ Set of resolved project IDs
96
+ """
97
+ expanded = self.policy.apply(mods)
98
+
99
+ resolved: set[str] = set()
100
+ queue: deque[str] = deque()
101
+
102
+ search_cache: dict[str, str | None] = {}
103
+ version_cache: dict[str, list[ProjectVersion]] = {}
104
+
105
+ # ---- Phase 1: slug → project_id (parallel) ----
106
+ search_tasks = []
107
+ slugs_to_search = []
108
+
109
+ for slug in expanded:
110
+ if slug not in search_cache:
111
+ slugs_to_search.append(slug)
112
+ search_tasks.append(self._search_project(slug, session))
113
+
114
+ if search_tasks:
115
+ search_results = await asyncio.gather(*search_tasks, return_exceptions=True)
116
+
117
+ for slug, result in zip(slugs_to_search, search_results):
118
+ if isinstance(result, Exception):
119
+ print(f"Error searching for '{slug}': {result}")
120
+ search_cache[slug] = None
121
+ else:
122
+ search_cache[slug] = result
123
+
124
+ # Add found projects to queue
125
+ for slug in expanded:
126
+ project_id = search_cache.get(slug)
127
+ if project_id and project_id not in resolved:
128
+ resolved.add(project_id)
129
+ queue.append(project_id)
130
+
131
+ # ---- Phase 2: dependency resolution (batched) ----
132
+ BATCH_SIZE = 10
133
+
134
+ while queue:
135
+ # Process in batches to avoid overwhelming the API
136
+ batch = []
137
+ for _ in range(min(len(queue), BATCH_SIZE)):
138
+ if queue:
139
+ batch.append(queue.popleft())
140
+
141
+ # Fetch versions for batch in parallel
142
+ version_tasks = []
143
+ projects_to_fetch = []
144
+
145
+ for pid in batch:
146
+ if pid not in version_cache:
147
+ projects_to_fetch.append(pid)
148
+ version_tasks.append(self._fetch_versions(pid, session))
149
+
150
+ if version_tasks:
151
+ version_results = await asyncio.gather(*version_tasks, return_exceptions=True)
152
+
153
+ for pid, result in zip(projects_to_fetch, version_results, strict=False):
154
+ if isinstance(result, Exception):
155
+ print(f"Error fetching versions for '{pid}': {result}")
156
+ version_cache[pid] = []
157
+ else:
158
+ version_cache[pid] = result
159
+
160
+ # Process dependencies
161
+ for pid in batch:
162
+ versions = version_cache.get(pid, [])
163
+ version = self._select_version(versions)
164
+
165
+ if not version:
166
+ print(f"Warning: No compatible version found for '{pid}'")
167
+ continue
168
+
169
+ for dep in version.dependencies:
170
+ dtype = dep.dependency_type
171
+ dep_id = dep.project_id
172
+
173
+ if not dep_id:
174
+ continue
175
+
176
+ if dtype == "incompatible":
177
+ raise RuntimeError(f"Incompatible dependency detected: {pid} ↔ {dep_id}")
178
+
179
+ if dtype in ("required", "optional") and dep_id not in resolved:
180
+ resolved.add(dep_id)
181
+ queue.append(dep_id)
182
+
183
+ del queue, expanded, search_cache, version_cache
184
+ return resolved