viperx 0.9.49__tar.gz → 0.9.72__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {viperx-0.9.49 → viperx-0.9.72}/PKG-INFO +1 -1
- {viperx-0.9.49 → viperx-0.9.72}/pyproject.toml +1 -1
- viperx-0.9.72/src/viperx/config_engine.py +325 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/core.py +1 -0
- viperx-0.9.72/src/viperx/report.py +15 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/templates/README.md.j2 +79 -23
- viperx-0.9.72/src/viperx/templates/__init__.py.j2 +8 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/templates/config.py.j2 +4 -4
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/templates/config.yaml.j2 +3 -3
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/templates/pyproject.toml.j2 +2 -2
- viperx-0.9.49/src/viperx/config_engine.py +0 -199
- viperx-0.9.49/src/viperx/templates/__init__.py.j2 +0 -8
- {viperx-0.9.49 → viperx-0.9.72}/README.md +0 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/__init__.py +0 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/constants.py +0 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/licenses.py +0 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/main.py +0 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/templates/Base.ipynb.j2 +0 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/templates/Base_General.ipynb.j2 +0 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/templates/Base_Kaggle.ipynb.j2 +0 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/templates/data_loader.py.j2 +0 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/templates/main.py.j2 +0 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/templates/viperx_config.yaml.j2 +0 -0
- {viperx-0.9.49 → viperx-0.9.72}/src/viperx/utils.py +0 -0
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
import yaml
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from rich.console import Console
|
|
4
|
+
from rich.panel import Panel
|
|
5
|
+
|
|
6
|
+
from viperx.core import ProjectGenerator
|
|
7
|
+
from viperx.constants import DEFAULT_LICENSE, DEFAULT_BUILDER, TYPE_CLASSIC, TYPE_ML, TYPE_DL, FRAMEWORK_PYTORCH
|
|
8
|
+
|
|
9
|
+
console = Console()
|
|
10
|
+
|
|
11
|
+
class ConfigEngine:
|
|
12
|
+
"""
|
|
13
|
+
Orchestrates project creation and updates based on a declarative YAML config.
|
|
14
|
+
Implements the 'Infrastructure as Code' pattern for ViperX.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
def __init__(self, config_path: Path, verbose: bool = False):
|
|
18
|
+
self.config_path = config_path
|
|
19
|
+
self.verbose = verbose
|
|
20
|
+
self.config = self._load_config()
|
|
21
|
+
self.root_path = Path.cwd()
|
|
22
|
+
|
|
23
|
+
def _load_config(self) -> dict:
|
|
24
|
+
"""Load and validate the YAML configuration."""
|
|
25
|
+
if not self.config_path.exists():
|
|
26
|
+
console.print(f"[bold red]Error:[/bold red] Config file not found at {self.config_path}")
|
|
27
|
+
raise FileNotFoundError(f"Config file not found: {self.config_path}")
|
|
28
|
+
|
|
29
|
+
with open(self.config_path, "r") as f:
|
|
30
|
+
try:
|
|
31
|
+
data = yaml.safe_load(f)
|
|
32
|
+
except yaml.YAMLError as e:
|
|
33
|
+
console.print(f"[bold red]Error:[/bold red] Invalid YAML format: {e}")
|
|
34
|
+
raise ValueError("Invalid YAML")
|
|
35
|
+
|
|
36
|
+
# Basic Validation
|
|
37
|
+
if "project" not in data or "name" not in data["project"]:
|
|
38
|
+
console.print("[bold red]Error:[/bold red] Config must contain 'project.name'")
|
|
39
|
+
raise ValueError("Missing project.name")
|
|
40
|
+
|
|
41
|
+
return data
|
|
42
|
+
|
|
43
|
+
def apply(self):
|
|
44
|
+
"""Apply the configuration to the current directory."""
|
|
45
|
+
from viperx.report import UpdateReport
|
|
46
|
+
from viperx.utils import sanitize_project_name
|
|
47
|
+
|
|
48
|
+
report = UpdateReport()
|
|
49
|
+
project_conf = self.config.get("project", {})
|
|
50
|
+
settings_conf = self.config.get("settings", {})
|
|
51
|
+
workspace_conf = self.config.get("workspace", {})
|
|
52
|
+
|
|
53
|
+
project_name = project_conf.get("name")
|
|
54
|
+
clean_name = sanitize_project_name(project_name)
|
|
55
|
+
|
|
56
|
+
# Determine Root
|
|
57
|
+
current_root = self.root_path / clean_name
|
|
58
|
+
# Heuristic: Are we already inside?
|
|
59
|
+
if self.root_path.name == project_name or self.root_path.name == clean_name:
|
|
60
|
+
current_root = self.root_path
|
|
61
|
+
|
|
62
|
+
# ---------------------------------------------------------
|
|
63
|
+
# Phase 0: Context Aggregation (PRESERVED LOGIC)
|
|
64
|
+
# ---------------------------------------------------------
|
|
65
|
+
# We assume dependencies logic is required for both generation and validation.
|
|
66
|
+
|
|
67
|
+
root_use_config = settings_conf.get("use_config", True)
|
|
68
|
+
root_use_env = settings_conf.get("use_env", False)
|
|
69
|
+
root_use_tests = settings_conf.get("use_tests", True)
|
|
70
|
+
root_type = settings_conf.get("type", TYPE_CLASSIC)
|
|
71
|
+
root_framework = settings_conf.get("framework", FRAMEWORK_PYTORCH)
|
|
72
|
+
|
|
73
|
+
glob_has_config = root_use_config
|
|
74
|
+
glob_has_env = root_use_env
|
|
75
|
+
glob_is_ml_dl = root_type in [TYPE_ML, TYPE_DL]
|
|
76
|
+
glob_is_dl = root_type == TYPE_DL
|
|
77
|
+
glob_frameworks = {root_framework} if glob_is_dl else set()
|
|
78
|
+
|
|
79
|
+
project_scripts = {project_name: f"{clean_name}.main:main"} # Use clean mapping
|
|
80
|
+
|
|
81
|
+
# List for README generation (Order: Root, then packages)
|
|
82
|
+
packages_list = [{
|
|
83
|
+
"raw_name": project_name,
|
|
84
|
+
"clean_name": clean_name,
|
|
85
|
+
"use_config": root_use_config,
|
|
86
|
+
"use_tests": root_use_tests,
|
|
87
|
+
"use_env": root_use_env
|
|
88
|
+
}]
|
|
89
|
+
|
|
90
|
+
packages = workspace_conf.get("packages", [])
|
|
91
|
+
for pkg in packages:
|
|
92
|
+
# Scripts
|
|
93
|
+
pkg_name = pkg.get("name")
|
|
94
|
+
pkg_name_clean = sanitize_project_name(pkg_name)
|
|
95
|
+
project_scripts[pkg_name] = f"{pkg_name_clean}.main:main"
|
|
96
|
+
|
|
97
|
+
# Dependency Aggregation
|
|
98
|
+
p_config = pkg.get("use_config", settings_conf.get("use_config", True))
|
|
99
|
+
p_env = pkg.get("use_env", settings_conf.get("use_env", False))
|
|
100
|
+
p_tests = pkg.get("use_tests", settings_conf.get("use_tests", True))
|
|
101
|
+
p_type = pkg.get("type", TYPE_CLASSIC)
|
|
102
|
+
p_framework = pkg.get("framework", FRAMEWORK_PYTORCH)
|
|
103
|
+
|
|
104
|
+
if p_config: glob_has_config = True
|
|
105
|
+
if p_env: glob_has_env = True
|
|
106
|
+
if p_type in [TYPE_ML, TYPE_DL]: glob_is_ml_dl = True
|
|
107
|
+
if p_type == TYPE_DL:
|
|
108
|
+
glob_is_dl = True
|
|
109
|
+
glob_frameworks.add(p_framework)
|
|
110
|
+
|
|
111
|
+
packages_list.append({
|
|
112
|
+
"raw_name": pkg_name,
|
|
113
|
+
"clean_name": pkg_name_clean,
|
|
114
|
+
"use_config": p_config,
|
|
115
|
+
"use_tests": p_tests,
|
|
116
|
+
"use_env": p_env
|
|
117
|
+
})
|
|
118
|
+
|
|
119
|
+
dep_context = {
|
|
120
|
+
"has_config": glob_has_config,
|
|
121
|
+
"has_env": glob_has_env,
|
|
122
|
+
"is_ml_dl": glob_is_ml_dl,
|
|
123
|
+
"is_dl": glob_is_dl,
|
|
124
|
+
"frameworks": list(glob_frameworks),
|
|
125
|
+
"packages": packages_list
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
# ---------------------------------------------------------
|
|
129
|
+
# Phase 1: Root Project (Hydration vs Update)
|
|
130
|
+
# ---------------------------------------------------------
|
|
131
|
+
if not (current_root / "pyproject.toml").exists():
|
|
132
|
+
# CASE A: New Project (Hydration)
|
|
133
|
+
if not current_root.exists() and current_root != self.root_path:
|
|
134
|
+
report.added.append(f"Project '{project_name}' (Scaffolding)")
|
|
135
|
+
else:
|
|
136
|
+
report.added.append(f"Project Scaffolding in existing '{current_root.name}'")
|
|
137
|
+
|
|
138
|
+
gen = ProjectGenerator(
|
|
139
|
+
name=project_name, # Raw name
|
|
140
|
+
description=project_conf.get("description", ""),
|
|
141
|
+
type=settings_conf.get("type", TYPE_CLASSIC),
|
|
142
|
+
author=project_conf.get("author", None),
|
|
143
|
+
license=project_conf.get("license", DEFAULT_LICENSE),
|
|
144
|
+
builder=project_conf.get("builder", DEFAULT_BUILDER),
|
|
145
|
+
use_env=settings_conf.get("use_env", False),
|
|
146
|
+
use_config=settings_conf.get("use_config", True),
|
|
147
|
+
use_tests=settings_conf.get("use_tests", True),
|
|
148
|
+
framework=settings_conf.get("framework", FRAMEWORK_PYTORCH),
|
|
149
|
+
scripts=project_scripts,
|
|
150
|
+
dependency_context=dep_context,
|
|
151
|
+
verbose=self.verbose
|
|
152
|
+
)
|
|
153
|
+
# We generate at parent if we are creating subfolder, or current if inside
|
|
154
|
+
target_gen_path = current_root.parent if current_root != self.root_path else self.root_path
|
|
155
|
+
gen.generate(target_gen_path)
|
|
156
|
+
|
|
157
|
+
# Verify creation reference for packages
|
|
158
|
+
if not current_root.exists():
|
|
159
|
+
if (self.root_path / project_name).exists():
|
|
160
|
+
current_root = self.root_path / project_name
|
|
161
|
+
|
|
162
|
+
else:
|
|
163
|
+
# CASE B: Update Existing Project
|
|
164
|
+
self._update_root_metadata(current_root, project_conf, report)
|
|
165
|
+
|
|
166
|
+
# Conflict Checks (Root)
|
|
167
|
+
# Check use_env
|
|
168
|
+
if not root_use_env and (current_root / ".env").exists():
|
|
169
|
+
report.conflicts.append("Root: use_env=False but .env exists")
|
|
170
|
+
pass
|
|
171
|
+
|
|
172
|
+
# ---------------------------------------------------------
|
|
173
|
+
# Phase 2: Workspace Packages (Iterative Sync)
|
|
174
|
+
# ---------------------------------------------------------
|
|
175
|
+
|
|
176
|
+
for pkg in packages:
|
|
177
|
+
pkg_name = pkg.get("name")
|
|
178
|
+
pkg_name_clean = sanitize_project_name(pkg_name)
|
|
179
|
+
|
|
180
|
+
# Approximate check for existing package src directory
|
|
181
|
+
pkg_path = current_root / "src" / pkg_name_clean
|
|
182
|
+
# Also check if user used hyphens in folder name (classic behavior)
|
|
183
|
+
if not pkg_path.exists():
|
|
184
|
+
pkg_path_hyphen = current_root / "src" / pkg_name
|
|
185
|
+
if pkg_path_hyphen.exists():
|
|
186
|
+
pkg_path = pkg_path_hyphen
|
|
187
|
+
|
|
188
|
+
if pkg_path.exists():
|
|
189
|
+
# --- UPDATE CHECK ---
|
|
190
|
+
# Check for REMOVAL of features (Conflict Reporting)
|
|
191
|
+
p_use_env = pkg.get("use_env", settings_conf.get("use_env", False))
|
|
192
|
+
if not p_use_env and (pkg_path / ".env").exists():
|
|
193
|
+
report.conflicts.append(f"Package '{pkg_name}': use_env=False but .env exists")
|
|
194
|
+
|
|
195
|
+
# Check for Metadata updates (Assuming we don't sub-update dependencies often)
|
|
196
|
+
# We skip regeneration to be SAFE.
|
|
197
|
+
pass
|
|
198
|
+
else:
|
|
199
|
+
# --- NEW PACKAGE ---
|
|
200
|
+
report.added.append(f"Package '{pkg_name}'")
|
|
201
|
+
|
|
202
|
+
pkg_gen = ProjectGenerator(
|
|
203
|
+
name=pkg_name,
|
|
204
|
+
description=pkg.get("description", ""),
|
|
205
|
+
type=pkg.get("type", TYPE_CLASSIC),
|
|
206
|
+
author=project_conf.get("author", "Your Name"),
|
|
207
|
+
use_env=pkg.get("use_env", settings_conf.get("use_env", False)),
|
|
208
|
+
use_config=pkg.get("use_config", settings_conf.get("use_config", True)),
|
|
209
|
+
use_readme=pkg.get("use_readme", False),
|
|
210
|
+
use_tests=pkg.get("use_tests", settings_conf.get("use_tests", True)),
|
|
211
|
+
framework=pkg.get("framework", FRAMEWORK_PYTORCH),
|
|
212
|
+
scripts=project_scripts,
|
|
213
|
+
dependency_context=dep_context,
|
|
214
|
+
verbose=self.verbose
|
|
215
|
+
)
|
|
216
|
+
pkg_gen.add_to_workspace(current_root)
|
|
217
|
+
|
|
218
|
+
# Check for Deletions (Packages on disk not in config)
|
|
219
|
+
existing_pkgs = set()
|
|
220
|
+
if (current_root / "src").exists():
|
|
221
|
+
existing_pkgs = {p.name for p in (current_root / "src").iterdir() if p.is_dir()}
|
|
222
|
+
|
|
223
|
+
# We need to map config names to folder names to check existence
|
|
224
|
+
config_folder_names = {p["clean_name"] for p in packages_list if p["raw_name"] != project_name}
|
|
225
|
+
|
|
226
|
+
# Also include raw names if they exist on disk (classic case)
|
|
227
|
+
config_raw_names = {p["raw_name"] for p in packages_list if p["raw_name"] != project_name}
|
|
228
|
+
|
|
229
|
+
for ep in existing_pkgs:
|
|
230
|
+
if ep not in config_folder_names and ep not in config_raw_names:
|
|
231
|
+
report.deletions.append(f"Package '{ep}' found on disk but missing from config.")
|
|
232
|
+
|
|
233
|
+
# ---------------------------------------------------------
|
|
234
|
+
# Phase 3: Config Sync & Reporting
|
|
235
|
+
# ---------------------------------------------------------
|
|
236
|
+
# Sync viperx.yaml
|
|
237
|
+
system_config_path = current_root / "viperx.yaml"
|
|
238
|
+
if self.config_path.absolute() != system_config_path.absolute():
|
|
239
|
+
import shutil
|
|
240
|
+
shutil.copy2(self.config_path, system_config_path)
|
|
241
|
+
|
|
242
|
+
if report.added or report.updated:
|
|
243
|
+
report.manual_checks.append("Review README.md for any necessary updates (e.g. Project Name, Description).")
|
|
244
|
+
|
|
245
|
+
self._print_report(report)
|
|
246
|
+
|
|
247
|
+
def _update_root_metadata(self, root: Path, project_conf: dict, report):
|
|
248
|
+
"""Safely update pyproject.toml metadata."""
|
|
249
|
+
import toml
|
|
250
|
+
pyproject_path = root / "pyproject.toml"
|
|
251
|
+
if not pyproject_path.exists():
|
|
252
|
+
return
|
|
253
|
+
|
|
254
|
+
with open(pyproject_path, "r") as f:
|
|
255
|
+
data = toml.load(f)
|
|
256
|
+
|
|
257
|
+
changed = False
|
|
258
|
+
proj = data.get("project", {})
|
|
259
|
+
|
|
260
|
+
# 1. Description
|
|
261
|
+
new_desc = project_conf.get("description")
|
|
262
|
+
if new_desc and proj.get("description") != new_desc:
|
|
263
|
+
proj["description"] = new_desc
|
|
264
|
+
report.updated.append(f"Root description -> '{new_desc}'")
|
|
265
|
+
changed = True
|
|
266
|
+
|
|
267
|
+
# 2. Author (simplified, assumes list of dicts)
|
|
268
|
+
new_author = project_conf.get("author")
|
|
269
|
+
if new_author:
|
|
270
|
+
authors = proj.get("authors", [])
|
|
271
|
+
if authors and authors[0].get("name") != new_author:
|
|
272
|
+
authors[0]["name"] = new_author
|
|
273
|
+
report.updated.append(f"Root author -> '{new_author}'")
|
|
274
|
+
changed = True
|
|
275
|
+
|
|
276
|
+
# 3. License
|
|
277
|
+
new_license = project_conf.get("license")
|
|
278
|
+
current_lic = proj.get("license", {}).get("text")
|
|
279
|
+
if new_license and current_lic != new_license:
|
|
280
|
+
proj["license"] = {"text": new_license}
|
|
281
|
+
report.updated.append(f"Root license -> '{new_license}'")
|
|
282
|
+
changed = True
|
|
283
|
+
report.manual_checks.append("License type changed. Verify LICENSE file content.")
|
|
284
|
+
|
|
285
|
+
if changed:
|
|
286
|
+
data["project"] = proj
|
|
287
|
+
with open(pyproject_path, "w") as f:
|
|
288
|
+
toml.dump(data, f)
|
|
289
|
+
|
|
290
|
+
def _print_report(self, report):
|
|
291
|
+
from rich.tree import Tree
|
|
292
|
+
|
|
293
|
+
if not report.has_events:
|
|
294
|
+
console.print(Panel("✨ [bold green]Start[/bold green]\nNothing to change. Project is in sync.", border_style="green"))
|
|
295
|
+
return
|
|
296
|
+
|
|
297
|
+
tree = Tree("📝 [bold]Update Report[/bold]")
|
|
298
|
+
|
|
299
|
+
if report.added:
|
|
300
|
+
added_node = tree.add("[green]Added[/green]")
|
|
301
|
+
for item in report.added:
|
|
302
|
+
added_node.add(f"[green]+ {item}[/green]")
|
|
303
|
+
|
|
304
|
+
if report.updated:
|
|
305
|
+
updated_node = tree.add("[blue]Updated (Safe)[/blue]")
|
|
306
|
+
for item in report.updated:
|
|
307
|
+
updated_node.add(f"[blue]~ {item}[/blue]")
|
|
308
|
+
|
|
309
|
+
if report.conflicts:
|
|
310
|
+
con_node = tree.add("[yellow]Conflicts (No Action Taken)[/yellow]")
|
|
311
|
+
for item in report.conflicts:
|
|
312
|
+
con_node.add(f"[yellow]! {item}[/yellow]")
|
|
313
|
+
|
|
314
|
+
if report.deletions:
|
|
315
|
+
del_node = tree.add("[red]Deletions Detected (No Action Taken)[/red]")
|
|
316
|
+
for item in report.deletions:
|
|
317
|
+
del_node.add(f"[red]- {item}[/red]")
|
|
318
|
+
|
|
319
|
+
if report.manual_checks:
|
|
320
|
+
check_node = tree.add("[magenta]Manual Checks Required[/magenta]")
|
|
321
|
+
for item in report.manual_checks:
|
|
322
|
+
check_node.add(f"[magenta]? {item}[/magenta]")
|
|
323
|
+
|
|
324
|
+
console.print(tree)
|
|
325
|
+
console.print("\n[dim]Run completed.[/dim]")
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import List
|
|
3
|
+
|
|
4
|
+
@dataclass
|
|
5
|
+
class UpdateReport:
|
|
6
|
+
"""Collects events during the update process for final reporting."""
|
|
7
|
+
added: List[str] = field(default_factory=list)
|
|
8
|
+
updated: List[str] = field(default_factory=list)
|
|
9
|
+
conflicts: List[str] = field(default_factory=list)
|
|
10
|
+
deletions: List[str] = field(default_factory=list)
|
|
11
|
+
manual_checks: List[str] = field(default_factory=list)
|
|
12
|
+
|
|
13
|
+
@property
|
|
14
|
+
def has_events(self) -> bool:
|
|
15
|
+
return any([self.added, self.updated, self.conflicts, self.deletions, self.manual_checks])
|
|
@@ -7,31 +7,42 @@
|
|
|
7
7
|
## 🧐 Philosophy & Architecture
|
|
8
8
|
|
|
9
9
|
Values transparency and standard tooling over "black box" magic.
|
|
10
|
-
This project was generated with [ViperX](https://github.com/kpihx/viperx).sh/uv/)**, the extremely fast Python package and project manager written in Rust.
|
|
10
|
+
This project was generated with [ViperX](https://github.com/kpihx/viperx), using **[uv](https://docs.astral.sh/uv/)**, the extremely fast Python package and project manager written in Rust.
|
|
11
11
|
|
|
12
12
|
### Why `uv`?
|
|
13
13
|
Unlike traditional workflows (pip, poetry, venv mixing), `uv` manages the **entire lifecycle**:
|
|
14
14
|
- **Python Version**: It installs and manages the correct Python version for this project automatically.
|
|
15
15
|
- **Dependencies**: Locking is instant.
|
|
16
|
-
{
|
|
16
|
+
{%- if project_type == 'dl' %}
|
|
17
17
|
- **Stack**: {{ framework|title }}
|
|
18
|
-
{
|
|
18
|
+
{%- endif %}
|
|
19
19
|
- **Environment**: Virtual environments are managed internally, you just run `uv run`.
|
|
20
20
|
|
|
21
|
+
{%- if has_config or use_env %}
|
|
21
22
|
### ⚙️ Configuration
|
|
22
23
|
|
|
24
|
+
{%- if has_config %}
|
|
23
25
|
- **Config**: `src/{{ package_name }}/config.yaml` (Loaded automatically)
|
|
24
|
-
{
|
|
26
|
+
{%- endif %}
|
|
27
|
+
{%- if use_env %}
|
|
25
28
|
- **Environment**: `src/{{ package_name }}/.env` (Isolated variables)
|
|
26
|
-
{
|
|
29
|
+
- **Template**: `src/{{ package_name }}/.env.example` (Copy this to `.env`)
|
|
30
|
+
{%- endif %}
|
|
27
31
|
|
|
28
|
-
|
|
32
|
+
The project uses a **Config-in-Package** architecture:
|
|
33
|
+
{%- if has_config %}
|
|
29
34
|
1. `config.yaml` is inside the package.
|
|
30
|
-
|
|
31
|
-
|
|
35
|
+
1. `config.py` loads it safely (even in production wheels).
|
|
36
|
+
{%- endif %}
|
|
37
|
+
{%- if use_env %}
|
|
38
|
+
1. `.env` is isolated within the package source.
|
|
39
|
+
1. `.env.example` serves as a template for new developers.
|
|
40
|
+
{%- endif %}
|
|
41
|
+
{%- endif %}
|
|
32
42
|
|
|
33
43
|
---
|
|
34
44
|
|
|
45
|
+
{%- if not is_subpackage %}
|
|
35
46
|
## 🚀 Getting Started
|
|
36
47
|
|
|
37
48
|
### Prerequisites
|
|
@@ -42,31 +53,29 @@ No need to install Python or create venvs manually.
|
|
|
42
53
|
### Installation
|
|
43
54
|
|
|
44
55
|
```bash
|
|
45
|
-
#
|
|
46
|
-
git clone https://github.com/{{ author_name | lower | replace(" ", "") }}/{{ project_name }}.git
|
|
56
|
+
# Ensure you are in the project directory
|
|
47
57
|
cd {{ project_name }}
|
|
48
58
|
|
|
49
59
|
# Sync dependencies (creates .venv and installs python if needed)
|
|
50
60
|
uv sync
|
|
51
61
|
```
|
|
62
|
+
{%- endif %}
|
|
52
63
|
|
|
53
64
|
## 🧑💻 Usage
|
|
54
65
|
|
|
55
|
-
The entry point depends on your project type.
|
|
56
|
-
|
|
57
66
|
### For Developers (Code)
|
|
58
67
|
|
|
59
68
|
To run the package entry point or scripts:
|
|
60
69
|
|
|
61
70
|
```bash
|
|
62
71
|
# Run the main package
|
|
63
|
-
uv run {{
|
|
72
|
+
uv run {{ project_name }}
|
|
64
73
|
|
|
65
74
|
# Or run a specific script
|
|
66
75
|
uv run python src/{{ package_name }}/main.py
|
|
67
76
|
```
|
|
68
77
|
|
|
69
|
-
{
|
|
78
|
+
{%- if project_type in ['ml', 'dl'] and not is_subpackage %}
|
|
70
79
|
### For Data Scientists (Notebooks)
|
|
71
80
|
|
|
72
81
|
We use `uv` to launch Jupyter, ensuring it sees the local package and config.
|
|
@@ -77,46 +86,93 @@ uv run jupyter notebook
|
|
|
77
86
|
|
|
78
87
|
- Open `notebooks/Base.ipynb`.
|
|
79
88
|
- Note how it imports `config` from the package.
|
|
80
|
-
{% endif %}
|
|
81
89
|
|
|
82
90
|
### ☁️ Cloud (Colab / Kaggle)
|
|
83
91
|
|
|
84
92
|
You can use the code and config from this repository directly in cloud environments without cloning.
|
|
85
93
|
|
|
86
|
-
**Step 1: Install directly from
|
|
94
|
+
**Step 1: Install directly from Git**
|
|
87
95
|
```python
|
|
88
|
-
!pip install
|
|
96
|
+
!pip install url_to_repo.git
|
|
89
97
|
```
|
|
90
98
|
|
|
91
99
|
**Step 2: Use the unified config**
|
|
92
100
|
```python
|
|
101
|
+
{%- if project_type == 'classic' %}
|
|
102
|
+
from {{ package_name }} import SETTINGS
|
|
103
|
+
{%- else %}
|
|
93
104
|
from {{ package_name }} import get_dataset_path, SETTINGS
|
|
94
105
|
import kagglehub as kh
|
|
106
|
+
{%- endif %}
|
|
95
107
|
|
|
96
108
|
# Transparency: You can inspect what was loaded
|
|
97
|
-
print(f"Loaded config for: {SETTINGS
|
|
109
|
+
print(f"Loaded config for: {SETTINGS.get('project_name', 'Unknown')}")
|
|
98
110
|
|
|
111
|
+
{%- if project_type != 'classic' %}
|
|
99
112
|
# Download datasets defined in config.yaml
|
|
100
113
|
# The key 'titanic' maps to 'heptapod/titanic' in the yaml
|
|
101
|
-
|
|
114
|
+
if 'datasets' in SETTINGS and 'titanic' in SETTINGS['datasets']:
|
|
115
|
+
path = kh.dataset_download(SETTINGS['datasets']['titanic'])
|
|
116
|
+
{%- endif %}
|
|
102
117
|
```
|
|
118
|
+
{%- endif %}
|
|
103
119
|
|
|
104
120
|
## 🔧 Internal Structure
|
|
105
121
|
|
|
106
122
|
```text
|
|
123
|
+
{%- if is_subpackage %}
|
|
124
|
+
{{ package_name }}/
|
|
125
|
+
├── __init__.py
|
|
126
|
+
{%- if has_config %}
|
|
127
|
+
├── config.yaml # EDIT THIS for project settings
|
|
128
|
+
├── config.py # Code that loads the yaml above
|
|
129
|
+
{%- endif %}
|
|
130
|
+
{%- if use_env %}
|
|
131
|
+
├── .env # Secrets (Ignored by git)
|
|
132
|
+
├── .env.example # Template for secrets
|
|
133
|
+
{%- endif %}
|
|
134
|
+
{%- if use_tests %}
|
|
135
|
+
└── tests/ # Unit tests
|
|
136
|
+
{%- endif %}
|
|
137
|
+
{%- else %}
|
|
107
138
|
{{ project_name }}/
|
|
108
139
|
├── pyproject.toml # The Single Source of Truth (Dependencies, Metadata)
|
|
109
140
|
├── uv.lock # Exact versions lockfile
|
|
110
141
|
├── .python-version # Pinned Python version
|
|
111
142
|
├── src/
|
|
143
|
+
{%- if packages %}
|
|
144
|
+
{%- for pkg in packages %}
|
|
145
|
+
│ └── {{ pkg.clean_name }}/
|
|
146
|
+
│ ├── __init__.py
|
|
147
|
+
{%- if pkg.use_config %}
|
|
148
|
+
│ ├── config.yaml # EDIT THIS for project settings
|
|
149
|
+
│ ├── config.py # Code that loads the yaml above
|
|
150
|
+
{%- endif %}
|
|
151
|
+
{%- if pkg.use_env %}
|
|
152
|
+
│ ├── .env # Secrets (Ignored by git)
|
|
153
|
+
│ ├── .env.example # Template for secrets
|
|
154
|
+
{%- endif %}
|
|
155
|
+
{%- if pkg.use_tests %}
|
|
156
|
+
│ └── tests/ # Unit tests
|
|
157
|
+
{%- endif %}
|
|
158
|
+
{%- endfor %}
|
|
159
|
+
{%- else %}
|
|
112
160
|
│ └── {{ package_name }}/
|
|
113
161
|
│ ├── __init__.py
|
|
162
|
+
{%- if has_config %}
|
|
114
163
|
│ ├── config.yaml # EDIT THIS for project settings
|
|
115
164
|
│ ├── config.py # Code that loads the yaml above
|
|
165
|
+
{%- endif %}
|
|
166
|
+
{%- if use_env %}
|
|
167
|
+
│ ├── .env # Secrets (Ignored by git)
|
|
168
|
+
│ ├── .env.example # Template for secrets
|
|
169
|
+
{%- endif %}
|
|
170
|
+
{%- if use_tests %}
|
|
116
171
|
│ └── tests/ # Unit tests
|
|
117
|
-
{
|
|
172
|
+
{%- endif %}
|
|
173
|
+
{%- endif %}
|
|
174
|
+
{%- if project_type in ['ml', 'dl'] %}
|
|
118
175
|
└── notebooks/ # Experimentation (Jupyter)
|
|
119
|
-
{
|
|
176
|
+
{%- endif %}
|
|
177
|
+
{%- endif %}
|
|
120
178
|
```
|
|
121
|
-
|
|
122
|
-
|
|
@@ -2,12 +2,12 @@ import yaml
|
|
|
2
2
|
import importlib.resources
|
|
3
3
|
from pathlib import Path
|
|
4
4
|
from typing import Any, Dict
|
|
5
|
-
{
|
|
5
|
+
{%- if use_env %}
|
|
6
6
|
from dotenv import load_dotenv
|
|
7
7
|
|
|
8
8
|
# Load Environment Variables from the isolated .env file in this package
|
|
9
9
|
load_dotenv(Path(__file__).parent / ".env")
|
|
10
|
-
{
|
|
10
|
+
{%- endif %}
|
|
11
11
|
|
|
12
12
|
# Load configuration safely whether installed or local
|
|
13
13
|
try:
|
|
@@ -28,7 +28,7 @@ def get_config(key: str, default: Any = None) -> Any:
|
|
|
28
28
|
"""Retrieve a value from the globally loaded settings."""
|
|
29
29
|
return SETTINGS.get(key, default)
|
|
30
30
|
|
|
31
|
-
{
|
|
31
|
+
{%- if project_type != 'classic' %}
|
|
32
32
|
def get_dataset_path(notebook_name: str, key: str = "datasets", extension: str = ".csv") -> str | None:
|
|
33
33
|
"""
|
|
34
34
|
Helper for notebook data loading.
|
|
@@ -39,4 +39,4 @@ def get_dataset_path(notebook_name: str, key: str = "datasets", extension: str =
|
|
|
39
39
|
if not dataset_name:
|
|
40
40
|
return None
|
|
41
41
|
return f"{dataset_name}{extension}"
|
|
42
|
-
{
|
|
42
|
+
{%- endif %}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# Global Project Configuration
|
|
2
2
|
project_name: "{{ project_name }}"
|
|
3
3
|
|
|
4
|
-
{
|
|
4
|
+
{%- if project_type in ['ml', 'dl'] %}
|
|
5
5
|
data_urls:
|
|
6
6
|
iris: "https://raw.githubusercontent.com/mwaskom/seaborn-data/master/iris.csv"
|
|
7
7
|
titanic: "https://raw.githubusercontent.com/datasciencedojo/datasets/master/titanic.csv"
|
|
@@ -11,7 +11,7 @@ datasets:
|
|
|
11
11
|
Base_Kaggle: "titanic"
|
|
12
12
|
# Usage: kh.dataset_download(SETTINGS['datasets']['titanic'])
|
|
13
13
|
titanic: "heptapod/titanic"
|
|
14
|
-
{
|
|
14
|
+
{%- else %}
|
|
15
15
|
# Configuration file for {{ package_name }}
|
|
16
16
|
# Add your settings here.
|
|
17
|
-
{
|
|
17
|
+
{%- endif %}
|
|
@@ -2,9 +2,9 @@
|
|
|
2
2
|
name = "{{ project_name }}"
|
|
3
3
|
version = "{{ version }}"
|
|
4
4
|
description = "{{ description }}"
|
|
5
|
-
{
|
|
5
|
+
{%- if use_readme %}
|
|
6
6
|
readme = "README.md"
|
|
7
|
-
{
|
|
7
|
+
{%- endif %}
|
|
8
8
|
requires-python = ">={{ python_version }}"
|
|
9
9
|
authors = [
|
|
10
10
|
{ name = "{{ author_name }}", email = "{{ author_email }}" }
|
|
@@ -1,199 +0,0 @@
|
|
|
1
|
-
import yaml
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
from rich.console import Console
|
|
4
|
-
from rich.panel import Panel
|
|
5
|
-
|
|
6
|
-
from viperx.core import ProjectGenerator
|
|
7
|
-
from viperx.constants import DEFAULT_LICENSE, DEFAULT_BUILDER, TYPE_CLASSIC, TYPE_ML, TYPE_DL, FRAMEWORK_PYTORCH
|
|
8
|
-
|
|
9
|
-
console = Console()
|
|
10
|
-
|
|
11
|
-
class ConfigEngine:
|
|
12
|
-
"""
|
|
13
|
-
Orchestrates project creation and updates based on a declarative YAML config.
|
|
14
|
-
Implements the 'Infrastructure as Code' pattern for ViperX.
|
|
15
|
-
"""
|
|
16
|
-
|
|
17
|
-
def __init__(self, config_path: Path, verbose: bool = False):
|
|
18
|
-
self.config_path = config_path
|
|
19
|
-
self.verbose = verbose
|
|
20
|
-
self.config = self._load_config()
|
|
21
|
-
self.root_path = Path.cwd()
|
|
22
|
-
|
|
23
|
-
def _load_config(self) -> dict:
|
|
24
|
-
"""Load and validate the YAML configuration."""
|
|
25
|
-
if not self.config_path.exists():
|
|
26
|
-
console.print(f"[bold red]Error:[/bold red] Config file not found at {self.config_path}")
|
|
27
|
-
raise FileNotFoundError(f"Config file not found: {self.config_path}")
|
|
28
|
-
|
|
29
|
-
with open(self.config_path, "r") as f:
|
|
30
|
-
try:
|
|
31
|
-
data = yaml.safe_load(f)
|
|
32
|
-
except yaml.YAMLError as e:
|
|
33
|
-
console.print(f"[bold red]Error:[/bold red] Invalid YAML format: {e}")
|
|
34
|
-
raise ValueError("Invalid YAML")
|
|
35
|
-
|
|
36
|
-
# Basic Validation
|
|
37
|
-
if "project" not in data or "name" not in data["project"]:
|
|
38
|
-
console.print("[bold red]Error:[/bold red] Config must contain 'project.name'")
|
|
39
|
-
raise ValueError("Missing project.name")
|
|
40
|
-
|
|
41
|
-
return data
|
|
42
|
-
|
|
43
|
-
def apply(self):
|
|
44
|
-
"""Apply the configuration to the current directory."""
|
|
45
|
-
project_conf = self.config.get("project", {})
|
|
46
|
-
settings_conf = self.config.get("settings", {})
|
|
47
|
-
workspace_conf = self.config.get("workspace", {})
|
|
48
|
-
|
|
49
|
-
project_name = project_conf.get("name")
|
|
50
|
-
|
|
51
|
-
# STRICT NAMING: Always calculate the expected root path using sanitized name
|
|
52
|
-
from viperx.utils import sanitize_project_name
|
|
53
|
-
clean_name = sanitize_project_name(project_name)
|
|
54
|
-
|
|
55
|
-
# Default assumption: current_root is the target directory (folder with underscores)
|
|
56
|
-
current_root = self.root_path / clean_name
|
|
57
|
-
target_dir = current_root
|
|
58
|
-
|
|
59
|
-
# 1. Root Project Handling
|
|
60
|
-
# Heuristic: Are we already in a folder matching the raw name OR sanitized name?
|
|
61
|
-
# e.g. inside test_classic/
|
|
62
|
-
if self.root_path.name == project_name or self.root_path.name == clean_name:
|
|
63
|
-
# We are inside the project folder
|
|
64
|
-
current_root = self.root_path
|
|
65
|
-
|
|
66
|
-
if not (self.root_path / "pyproject.toml").exists():
|
|
67
|
-
console.print(Panel(f"⚠️ [bold yellow]Current directory matches name but is not initialized. Hydrating:[/bold yellow] {project_name}", border_style="yellow"))
|
|
68
|
-
gen = ProjectGenerator(
|
|
69
|
-
name=project_name,
|
|
70
|
-
description=project_conf.get("description", ""),
|
|
71
|
-
type=settings_conf.get("type", TYPE_CLASSIC),
|
|
72
|
-
author=project_conf.get("author", None),
|
|
73
|
-
license=project_conf.get("license", DEFAULT_LICENSE),
|
|
74
|
-
builder=project_conf.get("builder", DEFAULT_BUILDER),
|
|
75
|
-
use_env=settings_conf.get("use_env", False),
|
|
76
|
-
use_config=settings_conf.get("use_config", True),
|
|
77
|
-
use_tests=settings_conf.get("use_tests", True),
|
|
78
|
-
framework=settings_conf.get("framework", FRAMEWORK_PYTORCH),
|
|
79
|
-
scripts={project_name: f"{clean_name}.main:main"},
|
|
80
|
-
verbose=self.verbose
|
|
81
|
-
)
|
|
82
|
-
gen.generate(self.root_path.parent)
|
|
83
|
-
else:
|
|
84
|
-
console.print(Panel(f"♻️ [bold blue]Syncing Project:[/bold blue] {project_name}", border_style="blue"))
|
|
85
|
-
|
|
86
|
-
else:
|
|
87
|
-
# We are outside
|
|
88
|
-
# target_dir (clean) is already set as current_root default
|
|
89
|
-
|
|
90
|
-
if target_dir.exists() and (target_dir / "pyproject.toml").exists():
|
|
91
|
-
console.print(Panel(f"♻️ [bold blue]Updating Existing Project:[/bold blue] {project_name} ({target_dir.name})", border_style="blue"))
|
|
92
|
-
else:
|
|
93
|
-
if target_dir.exists():
|
|
94
|
-
console.print(Panel(f"⚠️ [bold yellow]Directory exists but not initialized. Hydrating:[/bold yellow] {project_name}", border_style="yellow"))
|
|
95
|
-
|
|
96
|
-
# Prepare Scripts & Dependency Context
|
|
97
|
-
packages = workspace_conf.get("packages", [])
|
|
98
|
-
|
|
99
|
-
# --- Aggregate Global Dependencies ---
|
|
100
|
-
root_use_config = settings_conf.get("use_config", True)
|
|
101
|
-
root_use_env = settings_conf.get("use_env", False)
|
|
102
|
-
root_type = settings_conf.get("type", TYPE_CLASSIC)
|
|
103
|
-
root_framework = settings_conf.get("framework", FRAMEWORK_PYTORCH)
|
|
104
|
-
|
|
105
|
-
glob_has_config = root_use_config
|
|
106
|
-
glob_has_env = root_use_env
|
|
107
|
-
glob_is_ml_dl = root_type in [TYPE_ML, TYPE_DL]
|
|
108
|
-
glob_is_dl = root_type == TYPE_DL
|
|
109
|
-
glob_frameworks = {root_framework} if glob_is_dl else set()
|
|
110
|
-
|
|
111
|
-
project_scripts = {project_name: f"{clean_name}.main:main"} # Use clean mapping
|
|
112
|
-
|
|
113
|
-
for pkg in packages:
|
|
114
|
-
# Scripts
|
|
115
|
-
pkg_name = pkg.get("name")
|
|
116
|
-
pkg_name_clean = sanitize_project_name(pkg_name)
|
|
117
|
-
project_scripts[pkg_name] = f"{pkg_name_clean}.main:main"
|
|
118
|
-
|
|
119
|
-
# Dependency Aggregation
|
|
120
|
-
p_config = pkg.get("use_config", settings_conf.get("use_config", True))
|
|
121
|
-
p_env = pkg.get("use_env", settings_conf.get("use_env", False))
|
|
122
|
-
p_type = pkg.get("type", TYPE_CLASSIC)
|
|
123
|
-
p_framework = pkg.get("framework", FRAMEWORK_PYTORCH)
|
|
124
|
-
|
|
125
|
-
if p_config: glob_has_config = True
|
|
126
|
-
if p_env: glob_has_env = True
|
|
127
|
-
if p_type in [TYPE_ML, TYPE_DL]: glob_is_ml_dl = True
|
|
128
|
-
if p_type == TYPE_DL:
|
|
129
|
-
glob_is_dl = True
|
|
130
|
-
glob_frameworks.add(p_framework)
|
|
131
|
-
|
|
132
|
-
dep_context = {
|
|
133
|
-
"has_config": glob_has_config,
|
|
134
|
-
"has_env": glob_has_env,
|
|
135
|
-
"is_ml_dl": glob_is_ml_dl,
|
|
136
|
-
"is_dl": glob_is_dl,
|
|
137
|
-
"frameworks": list(glob_frameworks)
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
gen = ProjectGenerator(
|
|
141
|
-
name=project_name, # Raw name
|
|
142
|
-
description=project_conf.get("description", ""),
|
|
143
|
-
type=settings_conf.get("type", TYPE_CLASSIC),
|
|
144
|
-
author=project_conf.get("author", None),
|
|
145
|
-
license=project_conf.get("license", DEFAULT_LICENSE),
|
|
146
|
-
builder=project_conf.get("builder", DEFAULT_BUILDER),
|
|
147
|
-
use_env=settings_conf.get("use_env", False),
|
|
148
|
-
use_config=settings_conf.get("use_config", True),
|
|
149
|
-
use_tests=settings_conf.get("use_tests", True),
|
|
150
|
-
framework=settings_conf.get("framework", FRAMEWORK_PYTORCH),
|
|
151
|
-
scripts=project_scripts,
|
|
152
|
-
dependency_context=dep_context,
|
|
153
|
-
verbose=self.verbose
|
|
154
|
-
)
|
|
155
|
-
gen.generate(self.root_path)
|
|
156
|
-
|
|
157
|
-
# Verify creation
|
|
158
|
-
if not current_root.exists():
|
|
159
|
-
if (self.root_path / project_name).exists():
|
|
160
|
-
current_root = self.root_path / project_name
|
|
161
|
-
|
|
162
|
-
if self.verbose:
|
|
163
|
-
console.print(f"[debug] Project Root resolves to: {current_root}")
|
|
164
|
-
|
|
165
|
-
# 2. Copy Config to Root (Source of Truth)
|
|
166
|
-
# Only if we aren't reading the one already there
|
|
167
|
-
system_config_path = current_root / "viperx.yaml"
|
|
168
|
-
if self.config_path.absolute() != system_config_path.absolute():
|
|
169
|
-
import shutil
|
|
170
|
-
shutil.copy2(self.config_path, system_config_path)
|
|
171
|
-
console.print(f"[dim]Saved configuration to {system_config_path.name}[/dim]")
|
|
172
|
-
|
|
173
|
-
# 3. Handle Workspace Packages
|
|
174
|
-
packages = workspace_conf.get("packages", [])
|
|
175
|
-
if packages:
|
|
176
|
-
console.print(f"\\n📦 [bold]Processing {len(packages)} workspace packages...[/bold]")
|
|
177
|
-
|
|
178
|
-
for pkg in packages:
|
|
179
|
-
pkg_name = pkg.get("name")
|
|
180
|
-
pkg_path = current_root / "src" / pkg_name.replace("-", "_") # Approximate check
|
|
181
|
-
|
|
182
|
-
# We instantiate a generator for this package
|
|
183
|
-
pkg_gen = ProjectGenerator(
|
|
184
|
-
name=pkg_name,
|
|
185
|
-
description=pkg.get("description", ""),
|
|
186
|
-
type=pkg.get("type", TYPE_CLASSIC),
|
|
187
|
-
author=project_conf.get("author", "Your Name"), # Inherit author
|
|
188
|
-
use_env=pkg.get("use_env", settings_conf.get("use_env", False)), # Inherit settings or default False
|
|
189
|
-
use_config=pkg.get("use_config", settings_conf.get("use_config", True)), # Inherit or default True
|
|
190
|
-
use_readme=pkg.get("use_readme", False),
|
|
191
|
-
use_tests=pkg.get("use_tests", settings_conf.get("use_tests", True)),
|
|
192
|
-
framework=pkg.get("framework", FRAMEWORK_PYTORCH),
|
|
193
|
-
verbose=self.verbose
|
|
194
|
-
)
|
|
195
|
-
|
|
196
|
-
# Check if package seems to exist (ProjectGenerator handles upgrade logic too)
|
|
197
|
-
pkg_gen.add_to_workspace(current_root)
|
|
198
|
-
|
|
199
|
-
console.print(Panel(f"✨ [bold green]Configuration Applied Successfully![/bold green]\\nProject is up to date with {self.config_path.name}", border_style="green"))
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|