viperx 0.9.5__tar.gz → 0.9.72__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {viperx-0.9.5 → viperx-0.9.72}/PKG-INFO +3 -3
- {viperx-0.9.5 → viperx-0.9.72}/README.md +2 -2
- {viperx-0.9.5 → viperx-0.9.72}/pyproject.toml +1 -1
- viperx-0.9.72/src/viperx/config_engine.py +325 -0
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/constants.py +5 -0
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/core.py +136 -107
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/main.py +78 -22
- viperx-0.9.72/src/viperx/report.py +15 -0
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/templates/README.md.j2 +79 -23
- viperx-0.9.72/src/viperx/templates/__init__.py.j2 +8 -0
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/templates/config.py.j2 +4 -2
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/templates/config.yaml.j2 +6 -3
- viperx-0.9.72/src/viperx/templates/main.py.j2 +13 -0
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/templates/pyproject.toml.j2 +25 -18
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/templates/viperx_config.yaml.j2 +8 -11
- viperx-0.9.72/src/viperx/utils.py +78 -0
- viperx-0.9.5/src/viperx/config_engine.py +0 -141
- viperx-0.9.5/src/viperx/templates/__init__.py.j2 +0 -8
- viperx-0.9.5/src/viperx/templates/main.py.j2 +0 -13
- viperx-0.9.5/src/viperx/utils.py +0 -47
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/__init__.py +0 -0
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/licenses.py +0 -0
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/templates/Base.ipynb.j2 +0 -0
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/templates/Base_General.ipynb.j2 +0 -0
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/templates/Base_Kaggle.ipynb.j2 +0 -0
- {viperx-0.9.5 → viperx-0.9.72}/src/viperx/templates/data_loader.py.j2 +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: viperx
|
|
3
|
-
Version: 0.9.
|
|
3
|
+
Version: 0.9.72
|
|
4
4
|
Summary: Professional Python Project Initializer with uv, ml/dl support, and embedded config.
|
|
5
5
|
Keywords: python,project-template,uv,data-science,machine-learning
|
|
6
6
|
Author: Ivann KAMDEM
|
|
@@ -64,7 +64,7 @@ viperx init -n deep-vision -t dl --framework pytorch
|
|
|
64
64
|
viperx init -n deep-vision -t dl --framework pytorch
|
|
65
65
|
|
|
66
66
|
# ✨ Declarative Config (Infrastructure as Code)
|
|
67
|
-
viperx config
|
|
67
|
+
viperx config get # Generate template
|
|
68
68
|
viperx init -c viperx.yaml # Apply config
|
|
69
69
|
```
|
|
70
70
|
|
|
@@ -174,7 +174,7 @@ viperx init -c viperx.yaml
|
|
|
174
174
|
Manage your project infrastructure using a YAML file.
|
|
175
175
|
|
|
176
176
|
```bash
|
|
177
|
-
viperx config
|
|
177
|
+
viperx config get
|
|
178
178
|
```
|
|
179
179
|
Generates a `viperx.yaml` template in the current directory.
|
|
180
180
|
|
|
@@ -45,7 +45,7 @@ viperx init -n deep-vision -t dl --framework pytorch
|
|
|
45
45
|
viperx init -n deep-vision -t dl --framework pytorch
|
|
46
46
|
|
|
47
47
|
# ✨ Declarative Config (Infrastructure as Code)
|
|
48
|
-
viperx config
|
|
48
|
+
viperx config get # Generate template
|
|
49
49
|
viperx init -c viperx.yaml # Apply config
|
|
50
50
|
```
|
|
51
51
|
|
|
@@ -155,7 +155,7 @@ viperx init -c viperx.yaml
|
|
|
155
155
|
Manage your project infrastructure using a YAML file.
|
|
156
156
|
|
|
157
157
|
```bash
|
|
158
|
-
viperx config
|
|
158
|
+
viperx config get
|
|
159
159
|
```
|
|
160
160
|
Generates a `viperx.yaml` template in the current directory.
|
|
161
161
|
|
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
import yaml
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from rich.console import Console
|
|
4
|
+
from rich.panel import Panel
|
|
5
|
+
|
|
6
|
+
from viperx.core import ProjectGenerator
|
|
7
|
+
from viperx.constants import DEFAULT_LICENSE, DEFAULT_BUILDER, TYPE_CLASSIC, TYPE_ML, TYPE_DL, FRAMEWORK_PYTORCH
|
|
8
|
+
|
|
9
|
+
console = Console()
|
|
10
|
+
|
|
11
|
+
class ConfigEngine:
|
|
12
|
+
"""
|
|
13
|
+
Orchestrates project creation and updates based on a declarative YAML config.
|
|
14
|
+
Implements the 'Infrastructure as Code' pattern for ViperX.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
def __init__(self, config_path: Path, verbose: bool = False):
|
|
18
|
+
self.config_path = config_path
|
|
19
|
+
self.verbose = verbose
|
|
20
|
+
self.config = self._load_config()
|
|
21
|
+
self.root_path = Path.cwd()
|
|
22
|
+
|
|
23
|
+
def _load_config(self) -> dict:
|
|
24
|
+
"""Load and validate the YAML configuration."""
|
|
25
|
+
if not self.config_path.exists():
|
|
26
|
+
console.print(f"[bold red]Error:[/bold red] Config file not found at {self.config_path}")
|
|
27
|
+
raise FileNotFoundError(f"Config file not found: {self.config_path}")
|
|
28
|
+
|
|
29
|
+
with open(self.config_path, "r") as f:
|
|
30
|
+
try:
|
|
31
|
+
data = yaml.safe_load(f)
|
|
32
|
+
except yaml.YAMLError as e:
|
|
33
|
+
console.print(f"[bold red]Error:[/bold red] Invalid YAML format: {e}")
|
|
34
|
+
raise ValueError("Invalid YAML")
|
|
35
|
+
|
|
36
|
+
# Basic Validation
|
|
37
|
+
if "project" not in data or "name" not in data["project"]:
|
|
38
|
+
console.print("[bold red]Error:[/bold red] Config must contain 'project.name'")
|
|
39
|
+
raise ValueError("Missing project.name")
|
|
40
|
+
|
|
41
|
+
return data
|
|
42
|
+
|
|
43
|
+
def apply(self):
|
|
44
|
+
"""Apply the configuration to the current directory."""
|
|
45
|
+
from viperx.report import UpdateReport
|
|
46
|
+
from viperx.utils import sanitize_project_name
|
|
47
|
+
|
|
48
|
+
report = UpdateReport()
|
|
49
|
+
project_conf = self.config.get("project", {})
|
|
50
|
+
settings_conf = self.config.get("settings", {})
|
|
51
|
+
workspace_conf = self.config.get("workspace", {})
|
|
52
|
+
|
|
53
|
+
project_name = project_conf.get("name")
|
|
54
|
+
clean_name = sanitize_project_name(project_name)
|
|
55
|
+
|
|
56
|
+
# Determine Root
|
|
57
|
+
current_root = self.root_path / clean_name
|
|
58
|
+
# Heuristic: Are we already inside?
|
|
59
|
+
if self.root_path.name == project_name or self.root_path.name == clean_name:
|
|
60
|
+
current_root = self.root_path
|
|
61
|
+
|
|
62
|
+
# ---------------------------------------------------------
|
|
63
|
+
# Phase 0: Context Aggregation (PRESERVED LOGIC)
|
|
64
|
+
# ---------------------------------------------------------
|
|
65
|
+
# We assume dependencies logic is required for both generation and validation.
|
|
66
|
+
|
|
67
|
+
root_use_config = settings_conf.get("use_config", True)
|
|
68
|
+
root_use_env = settings_conf.get("use_env", False)
|
|
69
|
+
root_use_tests = settings_conf.get("use_tests", True)
|
|
70
|
+
root_type = settings_conf.get("type", TYPE_CLASSIC)
|
|
71
|
+
root_framework = settings_conf.get("framework", FRAMEWORK_PYTORCH)
|
|
72
|
+
|
|
73
|
+
glob_has_config = root_use_config
|
|
74
|
+
glob_has_env = root_use_env
|
|
75
|
+
glob_is_ml_dl = root_type in [TYPE_ML, TYPE_DL]
|
|
76
|
+
glob_is_dl = root_type == TYPE_DL
|
|
77
|
+
glob_frameworks = {root_framework} if glob_is_dl else set()
|
|
78
|
+
|
|
79
|
+
project_scripts = {project_name: f"{clean_name}.main:main"} # Use clean mapping
|
|
80
|
+
|
|
81
|
+
# List for README generation (Order: Root, then packages)
|
|
82
|
+
packages_list = [{
|
|
83
|
+
"raw_name": project_name,
|
|
84
|
+
"clean_name": clean_name,
|
|
85
|
+
"use_config": root_use_config,
|
|
86
|
+
"use_tests": root_use_tests,
|
|
87
|
+
"use_env": root_use_env
|
|
88
|
+
}]
|
|
89
|
+
|
|
90
|
+
packages = workspace_conf.get("packages", [])
|
|
91
|
+
for pkg in packages:
|
|
92
|
+
# Scripts
|
|
93
|
+
pkg_name = pkg.get("name")
|
|
94
|
+
pkg_name_clean = sanitize_project_name(pkg_name)
|
|
95
|
+
project_scripts[pkg_name] = f"{pkg_name_clean}.main:main"
|
|
96
|
+
|
|
97
|
+
# Dependency Aggregation
|
|
98
|
+
p_config = pkg.get("use_config", settings_conf.get("use_config", True))
|
|
99
|
+
p_env = pkg.get("use_env", settings_conf.get("use_env", False))
|
|
100
|
+
p_tests = pkg.get("use_tests", settings_conf.get("use_tests", True))
|
|
101
|
+
p_type = pkg.get("type", TYPE_CLASSIC)
|
|
102
|
+
p_framework = pkg.get("framework", FRAMEWORK_PYTORCH)
|
|
103
|
+
|
|
104
|
+
if p_config: glob_has_config = True
|
|
105
|
+
if p_env: glob_has_env = True
|
|
106
|
+
if p_type in [TYPE_ML, TYPE_DL]: glob_is_ml_dl = True
|
|
107
|
+
if p_type == TYPE_DL:
|
|
108
|
+
glob_is_dl = True
|
|
109
|
+
glob_frameworks.add(p_framework)
|
|
110
|
+
|
|
111
|
+
packages_list.append({
|
|
112
|
+
"raw_name": pkg_name,
|
|
113
|
+
"clean_name": pkg_name_clean,
|
|
114
|
+
"use_config": p_config,
|
|
115
|
+
"use_tests": p_tests,
|
|
116
|
+
"use_env": p_env
|
|
117
|
+
})
|
|
118
|
+
|
|
119
|
+
dep_context = {
|
|
120
|
+
"has_config": glob_has_config,
|
|
121
|
+
"has_env": glob_has_env,
|
|
122
|
+
"is_ml_dl": glob_is_ml_dl,
|
|
123
|
+
"is_dl": glob_is_dl,
|
|
124
|
+
"frameworks": list(glob_frameworks),
|
|
125
|
+
"packages": packages_list
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
# ---------------------------------------------------------
|
|
129
|
+
# Phase 1: Root Project (Hydration vs Update)
|
|
130
|
+
# ---------------------------------------------------------
|
|
131
|
+
if not (current_root / "pyproject.toml").exists():
|
|
132
|
+
# CASE A: New Project (Hydration)
|
|
133
|
+
if not current_root.exists() and current_root != self.root_path:
|
|
134
|
+
report.added.append(f"Project '{project_name}' (Scaffolding)")
|
|
135
|
+
else:
|
|
136
|
+
report.added.append(f"Project Scaffolding in existing '{current_root.name}'")
|
|
137
|
+
|
|
138
|
+
gen = ProjectGenerator(
|
|
139
|
+
name=project_name, # Raw name
|
|
140
|
+
description=project_conf.get("description", ""),
|
|
141
|
+
type=settings_conf.get("type", TYPE_CLASSIC),
|
|
142
|
+
author=project_conf.get("author", None),
|
|
143
|
+
license=project_conf.get("license", DEFAULT_LICENSE),
|
|
144
|
+
builder=project_conf.get("builder", DEFAULT_BUILDER),
|
|
145
|
+
use_env=settings_conf.get("use_env", False),
|
|
146
|
+
use_config=settings_conf.get("use_config", True),
|
|
147
|
+
use_tests=settings_conf.get("use_tests", True),
|
|
148
|
+
framework=settings_conf.get("framework", FRAMEWORK_PYTORCH),
|
|
149
|
+
scripts=project_scripts,
|
|
150
|
+
dependency_context=dep_context,
|
|
151
|
+
verbose=self.verbose
|
|
152
|
+
)
|
|
153
|
+
# We generate at parent if we are creating subfolder, or current if inside
|
|
154
|
+
target_gen_path = current_root.parent if current_root != self.root_path else self.root_path
|
|
155
|
+
gen.generate(target_gen_path)
|
|
156
|
+
|
|
157
|
+
# Verify creation reference for packages
|
|
158
|
+
if not current_root.exists():
|
|
159
|
+
if (self.root_path / project_name).exists():
|
|
160
|
+
current_root = self.root_path / project_name
|
|
161
|
+
|
|
162
|
+
else:
|
|
163
|
+
# CASE B: Update Existing Project
|
|
164
|
+
self._update_root_metadata(current_root, project_conf, report)
|
|
165
|
+
|
|
166
|
+
# Conflict Checks (Root)
|
|
167
|
+
# Check use_env
|
|
168
|
+
if not root_use_env and (current_root / ".env").exists():
|
|
169
|
+
report.conflicts.append("Root: use_env=False but .env exists")
|
|
170
|
+
pass
|
|
171
|
+
|
|
172
|
+
# ---------------------------------------------------------
|
|
173
|
+
# Phase 2: Workspace Packages (Iterative Sync)
|
|
174
|
+
# ---------------------------------------------------------
|
|
175
|
+
|
|
176
|
+
for pkg in packages:
|
|
177
|
+
pkg_name = pkg.get("name")
|
|
178
|
+
pkg_name_clean = sanitize_project_name(pkg_name)
|
|
179
|
+
|
|
180
|
+
# Approximate check for existing package src directory
|
|
181
|
+
pkg_path = current_root / "src" / pkg_name_clean
|
|
182
|
+
# Also check if user used hyphens in folder name (classic behavior)
|
|
183
|
+
if not pkg_path.exists():
|
|
184
|
+
pkg_path_hyphen = current_root / "src" / pkg_name
|
|
185
|
+
if pkg_path_hyphen.exists():
|
|
186
|
+
pkg_path = pkg_path_hyphen
|
|
187
|
+
|
|
188
|
+
if pkg_path.exists():
|
|
189
|
+
# --- UPDATE CHECK ---
|
|
190
|
+
# Check for REMOVAL of features (Conflict Reporting)
|
|
191
|
+
p_use_env = pkg.get("use_env", settings_conf.get("use_env", False))
|
|
192
|
+
if not p_use_env and (pkg_path / ".env").exists():
|
|
193
|
+
report.conflicts.append(f"Package '{pkg_name}': use_env=False but .env exists")
|
|
194
|
+
|
|
195
|
+
# Check for Metadata updates (Assuming we don't sub-update dependencies often)
|
|
196
|
+
# We skip regeneration to be SAFE.
|
|
197
|
+
pass
|
|
198
|
+
else:
|
|
199
|
+
# --- NEW PACKAGE ---
|
|
200
|
+
report.added.append(f"Package '{pkg_name}'")
|
|
201
|
+
|
|
202
|
+
pkg_gen = ProjectGenerator(
|
|
203
|
+
name=pkg_name,
|
|
204
|
+
description=pkg.get("description", ""),
|
|
205
|
+
type=pkg.get("type", TYPE_CLASSIC),
|
|
206
|
+
author=project_conf.get("author", "Your Name"),
|
|
207
|
+
use_env=pkg.get("use_env", settings_conf.get("use_env", False)),
|
|
208
|
+
use_config=pkg.get("use_config", settings_conf.get("use_config", True)),
|
|
209
|
+
use_readme=pkg.get("use_readme", False),
|
|
210
|
+
use_tests=pkg.get("use_tests", settings_conf.get("use_tests", True)),
|
|
211
|
+
framework=pkg.get("framework", FRAMEWORK_PYTORCH),
|
|
212
|
+
scripts=project_scripts,
|
|
213
|
+
dependency_context=dep_context,
|
|
214
|
+
verbose=self.verbose
|
|
215
|
+
)
|
|
216
|
+
pkg_gen.add_to_workspace(current_root)
|
|
217
|
+
|
|
218
|
+
# Check for Deletions (Packages on disk not in config)
|
|
219
|
+
existing_pkgs = set()
|
|
220
|
+
if (current_root / "src").exists():
|
|
221
|
+
existing_pkgs = {p.name for p in (current_root / "src").iterdir() if p.is_dir()}
|
|
222
|
+
|
|
223
|
+
# We need to map config names to folder names to check existence
|
|
224
|
+
config_folder_names = {p["clean_name"] for p in packages_list if p["raw_name"] != project_name}
|
|
225
|
+
|
|
226
|
+
# Also include raw names if they exist on disk (classic case)
|
|
227
|
+
config_raw_names = {p["raw_name"] for p in packages_list if p["raw_name"] != project_name}
|
|
228
|
+
|
|
229
|
+
for ep in existing_pkgs:
|
|
230
|
+
if ep not in config_folder_names and ep not in config_raw_names:
|
|
231
|
+
report.deletions.append(f"Package '{ep}' found on disk but missing from config.")
|
|
232
|
+
|
|
233
|
+
# ---------------------------------------------------------
|
|
234
|
+
# Phase 3: Config Sync & Reporting
|
|
235
|
+
# ---------------------------------------------------------
|
|
236
|
+
# Sync viperx.yaml
|
|
237
|
+
system_config_path = current_root / "viperx.yaml"
|
|
238
|
+
if self.config_path.absolute() != system_config_path.absolute():
|
|
239
|
+
import shutil
|
|
240
|
+
shutil.copy2(self.config_path, system_config_path)
|
|
241
|
+
|
|
242
|
+
if report.added or report.updated:
|
|
243
|
+
report.manual_checks.append("Review README.md for any necessary updates (e.g. Project Name, Description).")
|
|
244
|
+
|
|
245
|
+
self._print_report(report)
|
|
246
|
+
|
|
247
|
+
def _update_root_metadata(self, root: Path, project_conf: dict, report):
|
|
248
|
+
"""Safely update pyproject.toml metadata."""
|
|
249
|
+
import toml
|
|
250
|
+
pyproject_path = root / "pyproject.toml"
|
|
251
|
+
if not pyproject_path.exists():
|
|
252
|
+
return
|
|
253
|
+
|
|
254
|
+
with open(pyproject_path, "r") as f:
|
|
255
|
+
data = toml.load(f)
|
|
256
|
+
|
|
257
|
+
changed = False
|
|
258
|
+
proj = data.get("project", {})
|
|
259
|
+
|
|
260
|
+
# 1. Description
|
|
261
|
+
new_desc = project_conf.get("description")
|
|
262
|
+
if new_desc and proj.get("description") != new_desc:
|
|
263
|
+
proj["description"] = new_desc
|
|
264
|
+
report.updated.append(f"Root description -> '{new_desc}'")
|
|
265
|
+
changed = True
|
|
266
|
+
|
|
267
|
+
# 2. Author (simplified, assumes list of dicts)
|
|
268
|
+
new_author = project_conf.get("author")
|
|
269
|
+
if new_author:
|
|
270
|
+
authors = proj.get("authors", [])
|
|
271
|
+
if authors and authors[0].get("name") != new_author:
|
|
272
|
+
authors[0]["name"] = new_author
|
|
273
|
+
report.updated.append(f"Root author -> '{new_author}'")
|
|
274
|
+
changed = True
|
|
275
|
+
|
|
276
|
+
# 3. License
|
|
277
|
+
new_license = project_conf.get("license")
|
|
278
|
+
current_lic = proj.get("license", {}).get("text")
|
|
279
|
+
if new_license and current_lic != new_license:
|
|
280
|
+
proj["license"] = {"text": new_license}
|
|
281
|
+
report.updated.append(f"Root license -> '{new_license}'")
|
|
282
|
+
changed = True
|
|
283
|
+
report.manual_checks.append("License type changed. Verify LICENSE file content.")
|
|
284
|
+
|
|
285
|
+
if changed:
|
|
286
|
+
data["project"] = proj
|
|
287
|
+
with open(pyproject_path, "w") as f:
|
|
288
|
+
toml.dump(data, f)
|
|
289
|
+
|
|
290
|
+
def _print_report(self, report):
|
|
291
|
+
from rich.tree import Tree
|
|
292
|
+
|
|
293
|
+
if not report.has_events:
|
|
294
|
+
console.print(Panel("✨ [bold green]Start[/bold green]\nNothing to change. Project is in sync.", border_style="green"))
|
|
295
|
+
return
|
|
296
|
+
|
|
297
|
+
tree = Tree("📝 [bold]Update Report[/bold]")
|
|
298
|
+
|
|
299
|
+
if report.added:
|
|
300
|
+
added_node = tree.add("[green]Added[/green]")
|
|
301
|
+
for item in report.added:
|
|
302
|
+
added_node.add(f"[green]+ {item}[/green]")
|
|
303
|
+
|
|
304
|
+
if report.updated:
|
|
305
|
+
updated_node = tree.add("[blue]Updated (Safe)[/blue]")
|
|
306
|
+
for item in report.updated:
|
|
307
|
+
updated_node.add(f"[blue]~ {item}[/blue]")
|
|
308
|
+
|
|
309
|
+
if report.conflicts:
|
|
310
|
+
con_node = tree.add("[yellow]Conflicts (No Action Taken)[/yellow]")
|
|
311
|
+
for item in report.conflicts:
|
|
312
|
+
con_node.add(f"[yellow]! {item}[/yellow]")
|
|
313
|
+
|
|
314
|
+
if report.deletions:
|
|
315
|
+
del_node = tree.add("[red]Deletions Detected (No Action Taken)[/red]")
|
|
316
|
+
for item in report.deletions:
|
|
317
|
+
del_node.add(f"[red]- {item}[/red]")
|
|
318
|
+
|
|
319
|
+
if report.manual_checks:
|
|
320
|
+
check_node = tree.add("[magenta]Manual Checks Required[/magenta]")
|
|
321
|
+
for item in report.manual_checks:
|
|
322
|
+
check_node.add(f"[magenta]? {item}[/magenta]")
|
|
323
|
+
|
|
324
|
+
console.print(tree)
|
|
325
|
+
console.print("\n[dim]Run completed.[/dim]")
|
|
@@ -33,3 +33,8 @@ PROJECT_TYPES = [TYPE_CLASSIC, TYPE_ML, TYPE_DL]
|
|
|
33
33
|
FRAMEWORK_PYTORCH = "pytorch"
|
|
34
34
|
FRAMEWORK_TENSORFLOW = "tensorflow"
|
|
35
35
|
DL_FRAMEWORKS = [FRAMEWORK_PYTORCH, FRAMEWORK_TENSORFLOW]
|
|
36
|
+
|
|
37
|
+
# Builders
|
|
38
|
+
BUILDER_UV = "uv"
|
|
39
|
+
BUILDER_HATCH = "hatch"
|
|
40
|
+
SUPPORTED_BUILDERS = [BUILDER_UV, BUILDER_HATCH]
|