rhiza 0.10.0__tar.gz → 0.10.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rhiza-0.10.1/.rhiza/.rhiza-version +1 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/PKG-INFO +1 -1
- {rhiza-0.10.0 → rhiza-0.10.1}/pyproject.toml +1 -1
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/bundle_resolver.py +7 -7
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/commands/materialize.py +3 -3
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/models.py +4 -4
- {rhiza-0.10.0 → rhiza-0.10.1}/tests/test_bundle_resolver.py +12 -12
- {rhiza-0.10.0 → rhiza-0.10.1}/tests/test_commands/test_materialize_bundles.py +3 -3
- {rhiza-0.10.0 → rhiza-0.10.1}/uv.lock +1 -1
- rhiza-0.10.0/.github/workflows/rhiza_benchmarks.yml +0 -87
- rhiza-0.10.0/.github/workflows/rhiza_book.yml +0 -82
- rhiza-0.10.0/.github/workflows/rhiza_marimo.yml +0 -108
- rhiza-0.10.0/.rhiza/.rhiza-version +0 -1
- rhiza-0.10.0/.rhiza/make.d/02-book.mk +0 -108
- rhiza-0.10.0/.rhiza/make.d/03-marimo.mk +0 -67
- rhiza-0.10.0/.rhiza/make.d/04-presentation.mk +0 -70
- rhiza-0.10.0/.rhiza/make.d/07-docker.mk +0 -31
- rhiza-0.10.0/book/marimo/notebooks/rhiza.py +0 -629
- rhiza-0.10.0/tests/test_rhiza/README.md +0 -71
- rhiza-0.10.0/tests/test_rhiza/__init__.py +0 -5
- rhiza-0.10.0/tests/test_rhiza/benchmarks/.gitignore +0 -3
- rhiza-0.10.0/tests/test_rhiza/benchmarks/README.md +0 -69
- rhiza-0.10.0/tests/test_rhiza/benchmarks/analyze_benchmarks.py +0 -85
- rhiza-0.10.0/tests/test_rhiza/conftest.py +0 -261
- rhiza-0.10.0/tests/test_rhiza/test_book.py +0 -146
- rhiza-0.10.0/tests/test_rhiza/test_docstrings.py +0 -123
- rhiza-0.10.0/tests/test_rhiza/test_git_repo_fixture.py +0 -144
- rhiza-0.10.0/tests/test_rhiza/test_makefile.py +0 -430
- rhiza-0.10.0/tests/test_rhiza/test_makefile_api.py +0 -258
- rhiza-0.10.0/tests/test_rhiza/test_makefile_gh.py +0 -122
- rhiza-0.10.0/tests/test_rhiza/test_marimushka_target.py +0 -93
- rhiza-0.10.0/tests/test_rhiza/test_notebooks.py +0 -90
- rhiza-0.10.0/tests/test_rhiza/test_readme.py +0 -131
- rhiza-0.10.0/tests/test_rhiza/test_release_script.py +0 -230
- rhiza-0.10.0/tests/test_rhiza/test_requirements_folder.py +0 -49
- rhiza-0.10.0/tests/test_rhiza/test_rhiza_workflows.py +0 -227
- rhiza-0.10.0/tests/test_rhiza/test_structure.py +0 -58
- rhiza-0.10.0/tests/test_rhiza/test_version_matrix.py +0 -264
- {rhiza-0.10.0 → rhiza-0.10.1}/.editorconfig +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/ISSUE_TEMPLATE/assign_ui_implementation.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/actions/configure-git-auth/README.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/actions/configure-git-auth/action.yml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/agents/analyser.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/agents/summarise.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/copilot-instructions.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/workflows/rhiza_ci.yml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/workflows/rhiza_codeql.yml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/workflows/rhiza_deptry.yml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/workflows/rhiza_mypy.yml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/workflows/rhiza_pre-commit.yml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/workflows/rhiza_release.yml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/workflows/rhiza_security.yml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/workflows/rhiza_sync.yml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.github/workflows/rhiza_validate.yml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.gitignore +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.pre-commit-config.yaml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.python-version +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/.cfg.toml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/.gitignore +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/assets/rhiza-logo.svg +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/docs/CONFIG.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/docs/PRIVATE_PACKAGES.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/docs/TOKEN_SETUP.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/history +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/make.d/00-custom-env.mk +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/make.d/01-test.mk +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/make.d/05-github.mk +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/make.d/06-agentic.mk +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/make.d/08-docs.mk +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/make.d/10-custom-task.mk +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/make.d/README.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/requirements/README.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/requirements/docs.txt +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/requirements/marimo.txt +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/requirements/tests.txt +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/requirements/tools.txt +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/rhiza.mk +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/scripts/release.sh +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/template.yml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/templates/minibook/custom.html.jinja2 +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/.rhiza/utils/version_matrix.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/CLI.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/CODE_OF_CONDUCT.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/CONTRIBUTING.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/GETTING_STARTED.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/LICENSE +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/Makefile +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/README.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/USAGE.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/pr-description.md +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/pytest.ini +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/renovate.json +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/ruff.toml +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/__init__.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/__main__.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/_templates/basic/__init__.py.jinja2 +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/_templates/basic/main.py.jinja2 +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/_templates/basic/pyproject.toml.jinja2 +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/cli.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/commands/__init__.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/commands/init.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/commands/migrate.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/commands/summarise.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/commands/uninstall.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/commands/validate.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/commands/welcome.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/src/rhiza/subprocess_utils.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/tests/test_cli_commands.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/tests/test_commands/test_init.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/tests/test_commands/test_materialize.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/tests/test_commands/test_migrate.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/tests/test_commands/test_summarise.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/tests/test_commands/test_uninstall.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/tests/test_commands/test_validate.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/tests/test_models.py +0 -0
- {rhiza-0.10.0 → rhiza-0.10.1}/tests/test_package.py +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
0.10.0
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rhiza
|
|
3
|
-
Version: 0.10.
|
|
3
|
+
Version: 0.10.1
|
|
4
4
|
Summary: Reusable configuration templates for modern Python projects
|
|
5
5
|
Project-URL: Homepage, https://github.com/jebel-quant/rhiza-cli
|
|
6
6
|
Project-URL: Repository, https://github.com/jebel-quant/rhiza-cli
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"""Bundle resolution logic for template configuration.
|
|
2
2
|
|
|
3
3
|
This module provides functions to load and resolve bundle configurations
|
|
4
|
-
from the template repository's
|
|
4
|
+
from the template repository's template_bundles.yml file.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
7
|
from pathlib import Path
|
|
@@ -10,19 +10,19 @@ from rhiza.models import RhizaBundles, RhizaTemplate
|
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
def load_bundles_from_clone(tmp_dir: Path) -> RhizaBundles | None:
|
|
13
|
-
"""Load .rhiza/
|
|
13
|
+
"""Load .rhiza/template_bundles.yml from cloned template repo.
|
|
14
14
|
|
|
15
15
|
Args:
|
|
16
16
|
tmp_dir: Path to the cloned template repository.
|
|
17
17
|
|
|
18
18
|
Returns:
|
|
19
|
-
RhizaBundles if
|
|
19
|
+
RhizaBundles if template_bundles.yml exists, None otherwise.
|
|
20
20
|
|
|
21
21
|
Raises:
|
|
22
|
-
yaml.YAMLError: If
|
|
23
|
-
ValueError: If
|
|
22
|
+
yaml.YAMLError: If template_bundles.yml is malformed.
|
|
23
|
+
ValueError: If template_bundles.yml is invalid.
|
|
24
24
|
"""
|
|
25
|
-
bundles_file = tmp_dir / ".rhiza" / "
|
|
25
|
+
bundles_file = tmp_dir / ".rhiza" / "template_bundles.yml"
|
|
26
26
|
if not bundles_file.exists():
|
|
27
27
|
return None
|
|
28
28
|
return RhizaBundles.from_yaml(bundles_file)
|
|
@@ -54,7 +54,7 @@ def resolve_include_paths(
|
|
|
54
54
|
# Resolve templates to paths if specified
|
|
55
55
|
if template.templates:
|
|
56
56
|
if not bundles_config:
|
|
57
|
-
msg = "Template uses templates but
|
|
57
|
+
msg = "Template uses templates but template_bundles.yml not found in template repository"
|
|
58
58
|
raise ValueError(msg)
|
|
59
59
|
paths.extend(bundles_config.resolve_to_paths(template.templates))
|
|
60
60
|
|
|
@@ -121,7 +121,7 @@ def _validate_and_load_template(target: Path, branch: str) -> tuple[RhizaTemplat
|
|
|
121
121
|
excluded_paths = template.exclude
|
|
122
122
|
|
|
123
123
|
# Note: We'll resolve templates to paths after cloning the template repo,
|
|
124
|
-
# since we need access to
|
|
124
|
+
# since we need access to template_bundles.yml from the template
|
|
125
125
|
include_paths = template.include
|
|
126
126
|
|
|
127
127
|
# Validate that we have either templates or include paths
|
|
@@ -545,11 +545,11 @@ def materialize(target: Path, branch: str, target_branch: str | None, force: boo
|
|
|
545
545
|
logger.debug(f"Temporary directory: {tmp_dir}")
|
|
546
546
|
|
|
547
547
|
try:
|
|
548
|
-
# Clone with initial minimal checkout to load
|
|
548
|
+
# Clone with initial minimal checkout to load template_bundles.yml if needed
|
|
549
549
|
initial_paths = [".rhiza"] if template.templates else include_paths
|
|
550
550
|
_clone_template_repository(tmp_dir, git_url, rhiza_branch, initial_paths, git_executable, git_env)
|
|
551
551
|
|
|
552
|
-
# Load
|
|
552
|
+
# Load template_bundles.yml and resolve templates to paths if using template mode
|
|
553
553
|
if template.templates:
|
|
554
554
|
logger.info("Resolving templates to file paths...")
|
|
555
555
|
try:
|
|
@@ -64,7 +64,7 @@ def _normalize_to_list(value: str | list[str] | None) -> list[str]:
|
|
|
64
64
|
|
|
65
65
|
@dataclass
|
|
66
66
|
class BundleDefinition:
|
|
67
|
-
"""Represents a single bundle from
|
|
67
|
+
"""Represents a single bundle from template_bundles.yml.
|
|
68
68
|
|
|
69
69
|
Attributes:
|
|
70
70
|
name: The bundle identifier (e.g., "core", "tests", "github").
|
|
@@ -87,7 +87,7 @@ class BundleDefinition:
|
|
|
87
87
|
|
|
88
88
|
@dataclass
|
|
89
89
|
class RhizaBundles:
|
|
90
|
-
"""Represents the structure of
|
|
90
|
+
"""Represents the structure of template_bundles.yml.
|
|
91
91
|
|
|
92
92
|
Attributes:
|
|
93
93
|
version: Version string of the bundles configuration format.
|
|
@@ -102,7 +102,7 @@ class RhizaBundles:
|
|
|
102
102
|
"""Load RhizaBundles from a YAML file.
|
|
103
103
|
|
|
104
104
|
Args:
|
|
105
|
-
file_path: Path to the
|
|
105
|
+
file_path: Path to the template_bundles.yml file.
|
|
106
106
|
|
|
107
107
|
Returns:
|
|
108
108
|
The loaded bundles configuration.
|
|
@@ -163,7 +163,7 @@ class RhizaBundles:
|
|
|
163
163
|
# Validate all bundles exist
|
|
164
164
|
for name in bundle_names:
|
|
165
165
|
if name not in self.bundles:
|
|
166
|
-
raise ValueError(f"Bundle '{name}' not found in
|
|
166
|
+
raise ValueError(f"Bundle '{name}' not found in template_bundles.yml") # noqa: TRY003
|
|
167
167
|
|
|
168
168
|
resolved: list[str] = []
|
|
169
169
|
visiting: set[str] = set()
|
|
@@ -170,8 +170,8 @@ class TestRhizaBundles:
|
|
|
170
170
|
assert result.index("tests1.txt") < result.index("docs1.txt")
|
|
171
171
|
|
|
172
172
|
def test_from_yaml_valid(self, tmp_path):
|
|
173
|
-
"""Test loading valid
|
|
174
|
-
bundles_file = tmp_path / "
|
|
173
|
+
"""Test loading valid template_bundles.yml."""
|
|
174
|
+
bundles_file = tmp_path / "template_bundles.yml"
|
|
175
175
|
bundles_file.write_text("""
|
|
176
176
|
version: "1.0"
|
|
177
177
|
bundles:
|
|
@@ -200,24 +200,24 @@ bundles:
|
|
|
200
200
|
assert result.bundles["tests"].depends_on == ["core"]
|
|
201
201
|
|
|
202
202
|
def test_from_yaml_empty(self, tmp_path):
|
|
203
|
-
"""Test loading empty
|
|
204
|
-
bundles_file = tmp_path / "
|
|
203
|
+
"""Test loading empty template_bundles.yml."""
|
|
204
|
+
bundles_file = tmp_path / "template_bundles.yml"
|
|
205
205
|
bundles_file.write_text("")
|
|
206
206
|
|
|
207
207
|
with pytest.raises(ValueError, match="Bundles file is empty"):
|
|
208
208
|
RhizaBundles.from_yaml(bundles_file)
|
|
209
209
|
|
|
210
210
|
def test_from_yaml_missing_version(self, tmp_path):
|
|
211
|
-
"""Test loading
|
|
212
|
-
bundles_file = tmp_path / "
|
|
211
|
+
"""Test loading template_bundles.yml without version."""
|
|
212
|
+
bundles_file = tmp_path / "template_bundles.yml"
|
|
213
213
|
bundles_file.write_text("bundles: {}")
|
|
214
214
|
|
|
215
215
|
with pytest.raises(ValueError, match="missing required field: version"):
|
|
216
216
|
RhizaBundles.from_yaml(bundles_file)
|
|
217
217
|
|
|
218
218
|
def test_from_yaml_invalid_bundles_type(self, tmp_path):
|
|
219
|
-
"""Test loading
|
|
220
|
-
bundles_file = tmp_path / "
|
|
219
|
+
"""Test loading template_bundles.yml with invalid bundles type."""
|
|
220
|
+
bundles_file = tmp_path / "template_bundles.yml"
|
|
221
221
|
bundles_file.write_text("version: '1.0'\nbundles: 'invalid'")
|
|
222
222
|
|
|
223
223
|
with pytest.raises(TypeError, match="Bundles must be a dictionary"):
|
|
@@ -322,7 +322,7 @@ class TestResolveIncludePaths:
|
|
|
322
322
|
templates=["core"],
|
|
323
323
|
)
|
|
324
324
|
|
|
325
|
-
with pytest.raises(ValueError, match=r"Template uses templates but
|
|
325
|
+
with pytest.raises(ValueError, match=r"Template uses templates but template_bundles\.yml not found"):
|
|
326
326
|
resolve_include_paths(template, None)
|
|
327
327
|
|
|
328
328
|
def test_no_configuration(self) -> None:
|
|
@@ -339,10 +339,10 @@ class TestLoadBundlesFromClone:
|
|
|
339
339
|
"""Test load_bundles_from_clone function."""
|
|
340
340
|
|
|
341
341
|
def test_load_existing_bundles(self, tmp_path):
|
|
342
|
-
"""Test loading
|
|
342
|
+
"""Test loading template_bundles.yml from cloned repo."""
|
|
343
343
|
rhiza_dir = tmp_path / ".rhiza"
|
|
344
344
|
rhiza_dir.mkdir()
|
|
345
|
-
bundles_file = rhiza_dir / "
|
|
345
|
+
bundles_file = rhiza_dir / "template_bundles.yml"
|
|
346
346
|
bundles_file.write_text("""
|
|
347
347
|
version: "1.0"
|
|
348
348
|
bundles:
|
|
@@ -359,6 +359,6 @@ bundles:
|
|
|
359
359
|
assert "core" in result.bundles
|
|
360
360
|
|
|
361
361
|
def test_load_missing_bundles(self, tmp_path):
|
|
362
|
-
"""Test that missing
|
|
362
|
+
"""Test that missing template_bundles.yml returns None."""
|
|
363
363
|
result = load_bundles_from_clone(tmp_path)
|
|
364
364
|
assert result is None
|
|
@@ -7,7 +7,7 @@ from rhiza.models import RhizaTemplate
|
|
|
7
7
|
|
|
8
8
|
@pytest.fixture
|
|
9
9
|
def template_repo(tmp_path):
|
|
10
|
-
"""Create a mock template repository with
|
|
10
|
+
"""Create a mock template repository with template_bundles.yml."""
|
|
11
11
|
repo = tmp_path / "template-repo"
|
|
12
12
|
repo.mkdir()
|
|
13
13
|
|
|
@@ -18,10 +18,10 @@ def template_repo(tmp_path):
|
|
|
18
18
|
subprocess.run(["git", "config", "user.email", "test@example.com"], cwd=repo, capture_output=True, check=True)
|
|
19
19
|
subprocess.run(["git", "config", "user.name", "Test User"], cwd=repo, capture_output=True, check=True)
|
|
20
20
|
|
|
21
|
-
# Create
|
|
21
|
+
# Create template_bundles.yml
|
|
22
22
|
rhiza_dir = repo / ".rhiza"
|
|
23
23
|
rhiza_dir.mkdir()
|
|
24
|
-
bundles_file = rhiza_dir / "
|
|
24
|
+
bundles_file = rhiza_dir / "template_bundles.yml"
|
|
25
25
|
bundles_file.write_text("""
|
|
26
26
|
version: "1.0"
|
|
27
27
|
bundles:
|
|
@@ -1,87 +0,0 @@
|
|
|
1
|
-
# This file is part of the jebel-quant/rhiza repository
|
|
2
|
-
# (https://github.com/jebel-quant/rhiza).
|
|
3
|
-
#
|
|
4
|
-
# Workflow: Performance Benchmarks
|
|
5
|
-
#
|
|
6
|
-
# Purpose: Run performance benchmarks and detect regressions.
|
|
7
|
-
#
|
|
8
|
-
# Trigger: On push to main/master branches, PRs, and manual trigger.
|
|
9
|
-
#
|
|
10
|
-
# Regression Detection:
|
|
11
|
-
# - Compares against previous benchmark results stored in gh-pages branch
|
|
12
|
-
# - Alerts if performance degrades by more than 150% (configurable)
|
|
13
|
-
# - PRs will show a warning comment but not fail
|
|
14
|
-
# - Main branch updates the baseline for future comparisons
|
|
15
|
-
|
|
16
|
-
name: "(RHIZA) BENCHMARKS"
|
|
17
|
-
|
|
18
|
-
permissions:
|
|
19
|
-
contents: write
|
|
20
|
-
pull-requests: write
|
|
21
|
-
|
|
22
|
-
on:
|
|
23
|
-
push:
|
|
24
|
-
branches: [ main, master ]
|
|
25
|
-
pull_request:
|
|
26
|
-
branches: [ main, master ]
|
|
27
|
-
workflow_dispatch:
|
|
28
|
-
|
|
29
|
-
jobs:
|
|
30
|
-
benchmark:
|
|
31
|
-
runs-on: ubuntu-latest
|
|
32
|
-
|
|
33
|
-
steps:
|
|
34
|
-
- name: Checkout repository
|
|
35
|
-
uses: actions/checkout@v6.0.2
|
|
36
|
-
with:
|
|
37
|
-
lfs: true
|
|
38
|
-
|
|
39
|
-
- name: Install uv
|
|
40
|
-
uses: astral-sh/setup-uv@v7.2.1
|
|
41
|
-
with:
|
|
42
|
-
version: "0.9.30"
|
|
43
|
-
|
|
44
|
-
- name: Configure git auth for private packages
|
|
45
|
-
uses: ./.github/actions/configure-git-auth
|
|
46
|
-
with:
|
|
47
|
-
token: ${{ secrets.GH_PAT }}
|
|
48
|
-
|
|
49
|
-
- name: Run benchmarks
|
|
50
|
-
env:
|
|
51
|
-
UV_EXTRA_INDEX_URL: ${{ secrets.UV_EXTRA_INDEX_URL }}
|
|
52
|
-
run: |
|
|
53
|
-
make benchmark
|
|
54
|
-
|
|
55
|
-
- name: Upload benchmark results
|
|
56
|
-
uses: actions/upload-artifact@v6.0.0
|
|
57
|
-
if: always()
|
|
58
|
-
with:
|
|
59
|
-
name: benchmark-results
|
|
60
|
-
path: |
|
|
61
|
-
_benchmarks/benchmarks.json
|
|
62
|
-
_benchmarks/benchmarks.svg
|
|
63
|
-
_benchmarks/benchmarks.html
|
|
64
|
-
|
|
65
|
-
# Regression detection using github-action-benchmark
|
|
66
|
-
# Stores benchmark history in gh-pages branch under /benchmarks
|
|
67
|
-
# Alerts if performance degrades by more than 150% of baseline
|
|
68
|
-
- name: Store benchmark result and check for regression
|
|
69
|
-
uses: benchmark-action/github-action-benchmark@v1
|
|
70
|
-
# run this only if _benchmarks/benchmarks.json exists
|
|
71
|
-
if: hashFiles('_benchmarks/benchmarks.json') != ''
|
|
72
|
-
with:
|
|
73
|
-
tool: 'pytest'
|
|
74
|
-
output-file-path: _benchmarks/benchmarks.json
|
|
75
|
-
# Store benchmark data in gh-pages branch
|
|
76
|
-
gh-pages-branch: gh-pages
|
|
77
|
-
benchmark-data-dir-path: benchmarks
|
|
78
|
-
# Only update baseline on main branch push (not PRs)
|
|
79
|
-
auto-push: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
|
|
80
|
-
# Alert if performance degrades by more than 150%
|
|
81
|
-
alert-threshold: '150%'
|
|
82
|
-
# Post comment on PR if regression detected
|
|
83
|
-
comment-on-alert: ${{ github.event_name == 'pull_request' }}
|
|
84
|
-
# Fail workflow if regression detected (disabled for PRs to allow investigation)
|
|
85
|
-
fail-on-alert: ${{ github.event_name == 'push' }}
|
|
86
|
-
# GitHub token for pushing to gh-pages and commenting
|
|
87
|
-
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
@@ -1,82 +0,0 @@
|
|
|
1
|
-
# This file is part of the jebel-quant/rhiza repository
|
|
2
|
-
# (https://github.com/jebel-quant/rhiza).
|
|
3
|
-
#
|
|
4
|
-
# Workflow: Book
|
|
5
|
-
# Purpose: This workflow builds and deploys comprehensive documentation for the project.
|
|
6
|
-
# It combines API documentation, test coverage reports, test results, and
|
|
7
|
-
# interactive notebooks into a single GitHub Pages site.
|
|
8
|
-
#
|
|
9
|
-
# Trigger: This workflow runs on every push to the main or master branch
|
|
10
|
-
#
|
|
11
|
-
# Components:
|
|
12
|
-
# - 📓 Process Marimo notebooks
|
|
13
|
-
# - 📖 Generate API documentation with pdoc
|
|
14
|
-
# - 🧪 Run tests and generate coverage reports
|
|
15
|
-
# - 🚀 Deploy combined documentation to GitHub Pages
|
|
16
|
-
|
|
17
|
-
name: "(RHIZA) BOOK"
|
|
18
|
-
|
|
19
|
-
on:
|
|
20
|
-
push:
|
|
21
|
-
branches:
|
|
22
|
-
- main
|
|
23
|
-
- master
|
|
24
|
-
|
|
25
|
-
jobs:
|
|
26
|
-
book:
|
|
27
|
-
runs-on: "ubuntu-latest"
|
|
28
|
-
|
|
29
|
-
environment:
|
|
30
|
-
name: github-pages # 👈 this is the critical missing piece
|
|
31
|
-
|
|
32
|
-
permissions:
|
|
33
|
-
contents: read
|
|
34
|
-
pages: write # Permission to deploy to Pages
|
|
35
|
-
id-token: write # Permission to verify deployment origin
|
|
36
|
-
|
|
37
|
-
steps:
|
|
38
|
-
# Check out the repository code
|
|
39
|
-
- uses: actions/checkout@v6.0.2
|
|
40
|
-
with:
|
|
41
|
-
lfs: true
|
|
42
|
-
|
|
43
|
-
- name: Install uv
|
|
44
|
-
uses: astral-sh/setup-uv@v7.2.1
|
|
45
|
-
with:
|
|
46
|
-
version: "0.9.30"
|
|
47
|
-
|
|
48
|
-
- name: Configure git auth for private packages
|
|
49
|
-
uses: ./.github/actions/configure-git-auth
|
|
50
|
-
with:
|
|
51
|
-
token: ${{ secrets.GH_PAT }}
|
|
52
|
-
|
|
53
|
-
- name: "Sync the virtual environment for ${{ github.repository }}"
|
|
54
|
-
shell: bash
|
|
55
|
-
env:
|
|
56
|
-
UV_EXTRA_INDEX_URL: ${{ secrets.UV_EXTRA_INDEX_URL }}
|
|
57
|
-
run: |
|
|
58
|
-
# will just use .python-version?
|
|
59
|
-
uv sync --all-extras --all-groups --frozen
|
|
60
|
-
|
|
61
|
-
- name: "Make the book"
|
|
62
|
-
env:
|
|
63
|
-
UV_EXTRA_INDEX_URL: ${{ secrets.UV_EXTRA_INDEX_URL }}
|
|
64
|
-
run: |
|
|
65
|
-
make book
|
|
66
|
-
|
|
67
|
-
# Step 5: Package all artifacts for GitHub Pages deployment
|
|
68
|
-
# This prepares the combined outputs for deployment by creating a single artifact
|
|
69
|
-
- name: Upload static files as artifact
|
|
70
|
-
uses: actions/upload-pages-artifact@v4.0.0 # Official GitHub Pages artifact upload action
|
|
71
|
-
with:
|
|
72
|
-
path: _book/ # Path to the directory containing all artifacts to deploy
|
|
73
|
-
|
|
74
|
-
# Step 6: Deploy the packaged artifacts to GitHub Pages
|
|
75
|
-
# This step publishes the content to GitHub Pages
|
|
76
|
-
# The deployment is conditional based on whether the repository is a fork and the PUBLISH_COMPANION_BOOK variable is set
|
|
77
|
-
# If the repository is a fork, deployment is skipped to avoid unauthorised publishing
|
|
78
|
-
# If PUBLISH_COMPANION_BOOK is not set, it defaults to allowing deployment
|
|
79
|
-
- name: Deploy to GitHub Pages
|
|
80
|
-
if: ${{ !github.event.repository.fork && (vars.PUBLISH_COMPANION_BOOK == 'true' || vars.PUBLISH_COMPANION_BOOK == '') }}
|
|
81
|
-
uses: actions/deploy-pages@v4.0.5 # Official GitHub Pages deployment action
|
|
82
|
-
continue-on-error: true
|
|
@@ -1,108 +0,0 @@
|
|
|
1
|
-
# This file is part of the jebel-quant/rhiza repository
|
|
2
|
-
# (https://github.com/jebel-quant/rhiza).
|
|
3
|
-
#
|
|
4
|
-
# Workflow: Marimo Notebooks
|
|
5
|
-
#
|
|
6
|
-
# Purpose: This workflow discovers and executes all Marimo notebooks in the
|
|
7
|
-
# repository. It builds a dynamic matrix to run each notebook in
|
|
8
|
-
# parallel to surface errors early and keep notebooks reproducible.
|
|
9
|
-
#
|
|
10
|
-
# Trigger: This workflow runs on every push and on pull requests to main/master
|
|
11
|
-
# branches (including from forks)
|
|
12
|
-
#
|
|
13
|
-
# Components:
|
|
14
|
-
# - 🔎 Discover notebooks in book/marimo
|
|
15
|
-
# - 🧪 Run each notebook in parallel using a matrix strategy
|
|
16
|
-
# - ✅ Fail-fast disabled to report all failing notebooks
|
|
17
|
-
|
|
18
|
-
name: "(RHIZA) MARIMO"
|
|
19
|
-
|
|
20
|
-
permissions:
|
|
21
|
-
contents: read
|
|
22
|
-
|
|
23
|
-
on:
|
|
24
|
-
push:
|
|
25
|
-
branches: [ main, master ]
|
|
26
|
-
pull_request:
|
|
27
|
-
branches: [ main, master ]
|
|
28
|
-
|
|
29
|
-
jobs:
|
|
30
|
-
# Build a matrix of notebooks to test
|
|
31
|
-
list-notebooks:
|
|
32
|
-
runs-on: ubuntu-latest
|
|
33
|
-
outputs:
|
|
34
|
-
notebook-list: ${{ steps.notebooks.outputs.matrix }}
|
|
35
|
-
steps:
|
|
36
|
-
# Check out the repository code
|
|
37
|
-
- uses: actions/checkout@v6.0.2
|
|
38
|
-
|
|
39
|
-
# Find all Python files in the marimo folder and create a matrix for parallel execution
|
|
40
|
-
- name: Find notebooks and build matrix
|
|
41
|
-
id: notebooks
|
|
42
|
-
run: |
|
|
43
|
-
# Extract MARIMO_FOLDER from the project configuration (via Makefile)
|
|
44
|
-
# shellcheck disable=SC2016 # Single quotes intentional - Make syntax, not shell expansion
|
|
45
|
-
NOTEBOOK_DIR=$(make -s -f Makefile -f - <<< 'print: ; @echo $(or $(MARIMO_FOLDER),marimo)' print)
|
|
46
|
-
|
|
47
|
-
echo "Searching notebooks in: $NOTEBOOK_DIR"
|
|
48
|
-
# Check if directory exists
|
|
49
|
-
if [ ! -d "$NOTEBOOK_DIR" ]; then
|
|
50
|
-
echo "Directory $NOTEBOOK_DIR does not exist. Setting empty matrix."
|
|
51
|
-
echo "matrix=[]" >> "$GITHUB_OUTPUT"
|
|
52
|
-
exit 0
|
|
53
|
-
fi
|
|
54
|
-
|
|
55
|
-
# Find notebooks and handle empty results
|
|
56
|
-
if [ -z "$(find "$NOTEBOOK_DIR" -maxdepth 1 -name "*.py" 2>/dev/null)" ]; then
|
|
57
|
-
echo "No notebooks found in $NOTEBOOK_DIR. Setting empty matrix."
|
|
58
|
-
echo "matrix=[]" >> "$GITHUB_OUTPUT"
|
|
59
|
-
else
|
|
60
|
-
notebooks=$(find "$NOTEBOOK_DIR" -maxdepth 1 -name "*.py" -print0 | xargs -0 -n1 echo | jq -R -s -c 'split("\n")[:-1]')
|
|
61
|
-
echo "matrix=$notebooks" >> "$GITHUB_OUTPUT"
|
|
62
|
-
fi
|
|
63
|
-
shell: bash
|
|
64
|
-
|
|
65
|
-
# Create one job per notebook using the matrix strategy for parallel execution
|
|
66
|
-
test-notebooks:
|
|
67
|
-
if: needs.list-notebooks.outputs.notebook-list != '[]'
|
|
68
|
-
runs-on: ubuntu-latest
|
|
69
|
-
needs: list-notebooks
|
|
70
|
-
strategy:
|
|
71
|
-
matrix:
|
|
72
|
-
notebook: ${{ fromJson(needs.list-notebooks.outputs.notebook-list) }}
|
|
73
|
-
# Don't fail the entire workflow if one notebook fails
|
|
74
|
-
fail-fast: false
|
|
75
|
-
name: Run notebook ${{ matrix.notebook }}
|
|
76
|
-
steps:
|
|
77
|
-
# Check out the repository code
|
|
78
|
-
- uses: actions/checkout@v6.0.2
|
|
79
|
-
with:
|
|
80
|
-
lfs: true
|
|
81
|
-
|
|
82
|
-
# Install uv/uvx
|
|
83
|
-
- name: Install uv
|
|
84
|
-
uses: astral-sh/setup-uv@v7.2.1
|
|
85
|
-
with:
|
|
86
|
-
version: "0.9.30"
|
|
87
|
-
|
|
88
|
-
- name: Configure git auth for private packages
|
|
89
|
-
uses: ./.github/actions/configure-git-auth
|
|
90
|
-
with:
|
|
91
|
-
token: ${{ secrets.GH_PAT }}
|
|
92
|
-
|
|
93
|
-
# Execute the notebook with the appropriate runner based on its content
|
|
94
|
-
- name: Run notebook
|
|
95
|
-
env:
|
|
96
|
-
UV_EXTRA_INDEX_URL: ${{ secrets.UV_EXTRA_INDEX_URL }}
|
|
97
|
-
run: |
|
|
98
|
-
uvx uv run "${{ matrix.notebook }}"
|
|
99
|
-
# uvx → creates a fresh ephemeral environment
|
|
100
|
-
# uv run → runs the notebook as a script in that ephemeral env
|
|
101
|
-
# No project packages are pre-installed
|
|
102
|
-
# ✅ This forces the notebook to explicitly handle dependencies (e.g., uv install ., or pip install inside the script).
|
|
103
|
-
# ✅ It’s a true integration smoke test.
|
|
104
|
-
# Benefits of this pattern
|
|
105
|
-
# Confirms the notebook can bootstrap itself in a fresh environment
|
|
106
|
-
# Catches missing uv install or pip steps early
|
|
107
|
-
# Ensures CI/other users can run the notebook without manual setup
|
|
108
|
-
shell: bash
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
0.9.0
|
|
@@ -1,108 +0,0 @@
|
|
|
1
|
-
## book.mk - Book-building targets
|
|
2
|
-
# This file is included by the main Makefile.
|
|
3
|
-
# It provides targets for exporting Marimo notebooks to HTML (marimushka)
|
|
4
|
-
# and compiling a companion book (minibook).
|
|
5
|
-
|
|
6
|
-
# Declare phony targets (they don't produce files)
|
|
7
|
-
.PHONY: marimushka book
|
|
8
|
-
|
|
9
|
-
# Define a default no-op marimushka target that will be used
|
|
10
|
-
# when book/marimo/marimo.mk doesn't exist or doesn't define marimushka
|
|
11
|
-
marimushka:: install-uv
|
|
12
|
-
@if [ ! -d "book/marimo" ]; then \
|
|
13
|
-
printf "${BLUE}[INFO] No Marimo directory found, creating placeholder${RESET}\n"; \
|
|
14
|
-
mkdir -p "${MARIMUSHKA_OUTPUT}"; \
|
|
15
|
-
printf '%s\n' '<html><head><title>Marimo Notebooks</title></head>' \
|
|
16
|
-
'<body><h1>Marimo Notebooks</h1><p>No notebooks found.</p></body></html>' \
|
|
17
|
-
> "${MARIMUSHKA_OUTPUT}/index.html"; \
|
|
18
|
-
fi
|
|
19
|
-
|
|
20
|
-
# Default output directory for Marimushka (HTML exports of notebooks)
|
|
21
|
-
MARIMUSHKA_OUTPUT ?= _marimushka
|
|
22
|
-
|
|
23
|
-
# ----------------------------
|
|
24
|
-
# Book sections (declarative)
|
|
25
|
-
# ----------------------------
|
|
26
|
-
# format:
|
|
27
|
-
# name | source index | book-relative index | source dir | book dir
|
|
28
|
-
|
|
29
|
-
BOOK_SECTIONS := \
|
|
30
|
-
"API|_pdoc/index.html|pdoc/index.html|_pdoc|pdoc" \
|
|
31
|
-
"Coverage|_tests/html-coverage/index.html|tests/html-coverage/index.html|_tests/html-coverage|tests/html-coverage" \
|
|
32
|
-
"Test Report|_tests/html-report/report.html|tests/html-report/report.html|_tests/html-report|tests/html-report" \
|
|
33
|
-
"Notebooks|_marimushka/index.html|marimushka/index.html|_marimushka|marimushka"
|
|
34
|
-
|
|
35
|
-
##@ Book
|
|
36
|
-
|
|
37
|
-
# The 'book' target assembles the final documentation book.
|
|
38
|
-
# 1. Aggregates API docs, coverage, test reports, and notebooks into _book.
|
|
39
|
-
# 2. Generates links.json to define the book structure.
|
|
40
|
-
# 3. Uses 'minibook' to compile the final HTML site.
|
|
41
|
-
book:: test docs marimushka ## compile the companion book
|
|
42
|
-
@printf "${BLUE}[INFO] Building combined documentation...${RESET}\n"
|
|
43
|
-
@rm -rf _book && mkdir -p _book
|
|
44
|
-
|
|
45
|
-
@if [ -f "_tests/coverage.json" ]; then \
|
|
46
|
-
printf "${BLUE}[INFO] Generating coverage badge JSON...${RESET}\n"; \
|
|
47
|
-
mkdir -p _book/tests; \
|
|
48
|
-
${UV_BIN} run python -c "\
|
|
49
|
-
import json; \
|
|
50
|
-
data = json.load(open('_tests/coverage.json')); \
|
|
51
|
-
pct = int(data['totals']['percent_covered']); \
|
|
52
|
-
color = 'brightgreen' if pct >= 90 else 'green' if pct >= 80 else 'yellow' if pct >= 70 else 'orange' if pct >= 60 else 'red'; \
|
|
53
|
-
badge = {'schemaVersion': 1, 'label': 'coverage', 'message': f'{pct}%', 'color': color}; \
|
|
54
|
-
json.dump(badge, open('_book/tests/coverage-badge.json', 'w'))"; \
|
|
55
|
-
printf "${BLUE}[INFO] Coverage badge JSON:${RESET}\n"; \
|
|
56
|
-
cat _book/tests/coverage-badge.json; \
|
|
57
|
-
printf "\n"; \
|
|
58
|
-
else \
|
|
59
|
-
printf "${YELLOW}[WARN] No coverage.json found, skipping badge generation${RESET}\n"; \
|
|
60
|
-
fi
|
|
61
|
-
|
|
62
|
-
@printf "{\n" > _book/links.json
|
|
63
|
-
@first=1; \
|
|
64
|
-
for entry in $(BOOK_SECTIONS); do \
|
|
65
|
-
name=$${entry%%|*}; \
|
|
66
|
-
rest=$${entry#*|}; \
|
|
67
|
-
src_index=$${rest%%|*}; rest=$${rest#*|}; \
|
|
68
|
-
book_index=$${rest%%|*}; rest=$${rest#*|}; \
|
|
69
|
-
src_dir=$${rest%%|*}; book_dir=$${rest#*|}; \
|
|
70
|
-
if [ -f "$$src_index" ]; then \
|
|
71
|
-
printf "${BLUE}[INFO] Adding $$name...${RESET}\n"; \
|
|
72
|
-
mkdir -p "_book/$$book_dir"; \
|
|
73
|
-
cp -r "$$src_dir/"* "_book/$$book_dir"; \
|
|
74
|
-
if [ $$first -eq 0 ]; then \
|
|
75
|
-
printf ",\n" >> _book/links.json; \
|
|
76
|
-
fi; \
|
|
77
|
-
printf " \"%s\": \"./%s\"" "$$name" "$$book_index" >> _book/links.json; \
|
|
78
|
-
first=0; \
|
|
79
|
-
else \
|
|
80
|
-
printf "${YELLOW}[WARN] Missing $$name, skipping${RESET}\n"; \
|
|
81
|
-
fi; \
|
|
82
|
-
done; \
|
|
83
|
-
printf "\n}\n" >> _book/links.json
|
|
84
|
-
|
|
85
|
-
@printf "${BLUE}[INFO] Generated links.json:${RESET}\n"
|
|
86
|
-
@cat _book/links.json
|
|
87
|
-
|
|
88
|
-
@TEMPLATE_ARG=""; \
|
|
89
|
-
if [ -f "$(BOOK_TEMPLATE)" ]; then \
|
|
90
|
-
TEMPLATE_ARG="--template $(BOOK_TEMPLATE)"; \
|
|
91
|
-
printf "${BLUE}[INFO] Using book template $(BOOK_TEMPLATE)${RESET}\n"; \
|
|
92
|
-
fi; \
|
|
93
|
-
if [ -n "$(LOGO_FILE)" ]; then \
|
|
94
|
-
if [ -f "$(LOGO_FILE)" ]; then \
|
|
95
|
-
cp "$(LOGO_FILE)" "_book/logo$$(echo $(LOGO_FILE) | sed 's/.*\./\./')"; \
|
|
96
|
-
printf "${BLUE}[INFO] Copying logo: $(LOGO_FILE)${RESET}\n"; \
|
|
97
|
-
else \
|
|
98
|
-
printf "${YELLOW}[WARN] Logo file $(LOGO_FILE) not found, skipping${RESET}\n"; \
|
|
99
|
-
fi; \
|
|
100
|
-
fi; \
|
|
101
|
-
"$(UVX_BIN)" minibook \
|
|
102
|
-
--title "$(BOOK_TITLE)" \
|
|
103
|
-
--subtitle "$(BOOK_SUBTITLE)" \
|
|
104
|
-
$$TEMPLATE_ARG \
|
|
105
|
-
--links "$$(python3 -c 'import json;print(json.dumps(json.load(open("_book/links.json"))))')" \
|
|
106
|
-
--output "_book"
|
|
107
|
-
|
|
108
|
-
@touch "_book/.nojekyll"
|