rhiza 0.5.6__tar.gz → 0.6.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. rhiza-0.6.1/.github/dependabot.yml +59 -0
  2. {rhiza-0.5.6/.github → rhiza-0.6.1/.github/rhiza}/copilot-instructions.md +7 -7
  3. {rhiza-0.5.6/.github → rhiza-0.6.1/.github/rhiza}/template.yml +3 -1
  4. {rhiza-0.5.6 → rhiza-0.6.1}/.github/scripts/book.sh +7 -0
  5. {rhiza-0.5.6 → rhiza-0.6.1}/.github/scripts/bump.sh +16 -3
  6. {rhiza-0.5.6 → rhiza-0.6.1}/.github/scripts/marimushka.sh +15 -1
  7. {rhiza-0.5.6 → rhiza-0.6.1}/.github/scripts/release.sh +19 -2
  8. {rhiza-0.5.6 → rhiza-0.6.1}/.github/scripts/update-readme-help.sh +13 -1
  9. {rhiza-0.5.6 → rhiza-0.6.1}/.github/workflows/release.yml +13 -6
  10. rhiza-0.6.1/.github/workflows/rhiza.yml +26 -0
  11. {rhiza-0.5.6 → rhiza-0.6.1}/.github/workflows/scripts/version_matrix.py +10 -0
  12. {rhiza-0.5.6 → rhiza-0.6.1}/.github/workflows/scripts/version_max.py +8 -0
  13. rhiza-0.6.1/.github/workflows/security.yml +23 -0
  14. rhiza-0.6.1/.github/workflows/sync.yml +91 -0
  15. {rhiza-0.5.6 → rhiza-0.6.1}/.gitignore +5 -0
  16. {rhiza-0.5.6 → rhiza-0.6.1}/.pre-commit-config.yaml +1 -1
  17. {rhiza-0.5.6 → rhiza-0.6.1}/.rhiza.history +14 -2
  18. {rhiza-0.5.6 → rhiza-0.6.1}/CLI.md +9 -2
  19. rhiza-0.6.1/GETTING_STARTED.md +492 -0
  20. {rhiza-0.5.6 → rhiza-0.6.1}/Makefile +26 -73
  21. {rhiza-0.5.6 → rhiza-0.6.1}/PKG-INFO +64 -24
  22. {rhiza-0.5.6 → rhiza-0.6.1}/README.md +63 -23
  23. {rhiza-0.5.6 → rhiza-0.6.1}/USAGE.md +23 -15
  24. rhiza-0.6.1/book/Makefile.book +62 -0
  25. rhiza-0.6.1/book/minibook-templates/custom.html.jinja2 +210 -0
  26. rhiza-0.6.1/book/pdoc-templates/module.html.jinja2 +19 -0
  27. rhiza-0.6.1/presentation/Makefile.presentation +70 -0
  28. rhiza-0.6.1/presentation/README.md +325 -0
  29. {rhiza-0.5.6 → rhiza-0.6.1}/pyproject.toml +1 -1
  30. {rhiza-0.5.6 → rhiza-0.6.1}/src/rhiza/__init__.py +1 -1
  31. {rhiza-0.5.6 → rhiza-0.6.1}/src/rhiza/cli.py +5 -5
  32. {rhiza-0.5.6 → rhiza-0.6.1}/src/rhiza/commands/__init__.py +2 -2
  33. rhiza-0.6.1/src/rhiza/commands/init.py +154 -0
  34. {rhiza-0.5.6 → rhiza-0.6.1}/src/rhiza/commands/materialize.py +94 -10
  35. {rhiza-0.5.6 → rhiza-0.6.1}/src/rhiza/commands/validate.py +60 -13
  36. {rhiza-0.5.6 → rhiza-0.6.1}/src/rhiza/commands/welcome.py +9 -2
  37. {rhiza-0.5.6 → rhiza-0.6.1}/src/rhiza/models.py +1 -1
  38. rhiza-0.6.1/tests/Makefile.tests +32 -0
  39. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_cli_commands.py +13 -0
  40. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_commands/test_init.py +44 -12
  41. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_commands/test_materialize.py +211 -62
  42. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_commands/test_validate.py +79 -55
  43. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_models.py +8 -4
  44. rhiza-0.6.1/tests/test_rhiza/benchmarks/.gitignore +3 -0
  45. rhiza-0.6.1/tests/test_rhiza/benchmarks/README.md +69 -0
  46. rhiza-0.6.1/tests/test_rhiza/benchmarks/analyze_benchmarks.py +85 -0
  47. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_rhiza/test_makefile.py +40 -9
  48. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_rhiza/test_structure.py +0 -2
  49. {rhiza-0.5.6 → rhiza-0.6.1}/uv.lock +1 -1
  50. rhiza-0.5.6/.github/workflows/sym2.yml +0 -134
  51. rhiza-0.5.6/.github/workflows/sync.yml +0 -139
  52. rhiza-0.5.6/src/rhiza/commands/init.py +0 -64
  53. {rhiza-0.5.6 → rhiza-0.6.1}/.editorconfig +0 -0
  54. {rhiza-0.5.6 → rhiza-0.6.1}/.github/actions/setup-project/action.yml +0 -0
  55. {rhiza-0.5.6 → rhiza-0.6.1}/.github/renovate.json +0 -0
  56. /rhiza-0.5.6/.github/README.md → /rhiza-0.6.1/.github/rhiza/CONFIG.md +0 -0
  57. {rhiza-0.5.6/.github → rhiza-0.6.1/.github/rhiza}/TOKEN_SETUP.md +0 -0
  58. {rhiza-0.5.6 → rhiza-0.6.1}/.github/scripts/customisations/build-extras.sh +0 -0
  59. {rhiza-0.5.6 → rhiza-0.6.1}/.github/scripts/customisations/post-release.sh +0 -0
  60. {rhiza-0.5.6 → rhiza-0.6.1}/.github/workflows/book.yml +0 -0
  61. {rhiza-0.5.6 → rhiza-0.6.1}/.github/workflows/ci.yml +0 -0
  62. {rhiza-0.5.6 → rhiza-0.6.1}/.github/workflows/deptry.yml +0 -0
  63. {rhiza-0.5.6 → rhiza-0.6.1}/.github/workflows/marimo.yml +0 -0
  64. {rhiza-0.5.6 → rhiza-0.6.1}/.github/workflows/pre-commit.yml +0 -0
  65. {rhiza-0.5.6 → rhiza-0.6.1}/.github/workflows/structure.yml +0 -0
  66. {rhiza-0.5.6 → rhiza-0.6.1}/CODE_OF_CONDUCT.md +0 -0
  67. {rhiza-0.5.6 → rhiza-0.6.1}/CONTRIBUTING.md +0 -0
  68. {rhiza-0.5.6 → rhiza-0.6.1}/LICENSE +0 -0
  69. {rhiza-0.5.6 → rhiza-0.6.1}/book/marimo/.gitkeep +0 -0
  70. {rhiza-0.5.6 → rhiza-0.6.1}/pytest.ini +0 -0
  71. {rhiza-0.5.6 → rhiza-0.6.1}/ruff.toml +0 -0
  72. {rhiza-0.5.6 → rhiza-0.6.1}/src/rhiza/__main__.py +0 -0
  73. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_package.py +0 -0
  74. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_rhiza/README.md +0 -0
  75. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_rhiza/conftest.py +0 -0
  76. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_rhiza/test_bump_script.py +0 -0
  77. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_rhiza/test_docstrings.py +0 -0
  78. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_rhiza/test_git_repo_fixture.py +0 -0
  79. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_rhiza/test_marimushka_script.py +0 -0
  80. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_rhiza/test_readme.py +0 -0
  81. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_rhiza/test_release_script.py +0 -0
  82. {rhiza-0.5.6 → rhiza-0.6.1}/tests/test_rhiza/test_updatereadme_script.py +0 -0
@@ -0,0 +1,59 @@
1
+ # This file is part of the jebel-quant/rhiza repository
2
+ # (https://github.com/jebel-quant/rhiza).
3
+ #
4
+ # Configuration: Dependabot
5
+ #
6
+ # Purpose: Automate dependency updates for Python packages, GitHub Actions, and Docker images.
7
+ #
8
+ # Documentation: https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
9
+
10
+ version: 2
11
+ updates:
12
+ # Python dependencies (pip/pyproject.toml)
13
+ - package-ecosystem: "pip"
14
+ directory: "/"
15
+ schedule:
16
+ interval: "weekly"
17
+ day: "tuesday"
18
+ time: "09:00"
19
+ timezone: "Asia/Dubai"
20
+ open-pull-requests-limit: 10
21
+ labels:
22
+ - "dependencies"
23
+ - "python"
24
+ commit-message:
25
+ prefix: "chore(deps)"
26
+ prefix-development: "chore(deps-dev)"
27
+ include: "scope"
28
+
29
+ # GitHub Actions
30
+ - package-ecosystem: "github-actions"
31
+ directory: "/"
32
+ schedule:
33
+ interval: "weekly"
34
+ day: "tuesday"
35
+ time: "09:00"
36
+ timezone: "Asia/Dubai"
37
+ open-pull-requests-limit: 10
38
+ labels:
39
+ - "dependencies"
40
+ - "github-actions"
41
+ commit-message:
42
+ prefix: "chore(deps)"
43
+ include: "scope"
44
+
45
+ # Docker
46
+ #- package-ecosystem: "docker"
47
+ # directory: "/docker"
48
+ # schedule:
49
+ # interval: "weekly"
50
+ # day: "tuesday"
51
+ # time: "09:00"
52
+ # timezone: "Asia/Dubai"
53
+ # open-pull-requests-limit: 10
54
+ # labels:
55
+ # - "dependencies"
56
+ # - "docker"
57
+ # commit-message:
58
+ # prefix: "chore(deps)"
59
+ # include: "scope"
@@ -125,7 +125,7 @@ The project uses pre-commit hooks that run automatically on commit:
125
125
 
126
126
  The CLI uses Typer for command definitions. Commands are thin wrappers in `cli.py` that delegate to implementations in `rhiza.commands.*`:
127
127
 
128
- - `init`: Initialize or validate `.github/template.yml`
128
+ - `init`: Initialize or validate `.github/rhiza/template.yml`
129
129
  - `materialize` (alias `inject`): Apply templates to a target repository
130
130
  - `validate`: Validate template configuration
131
131
 
@@ -178,9 +178,9 @@ See `pyproject.toml` for complete list. Key dev dependencies:
178
178
  ```python
179
179
  from pathlib import Path
180
180
 
181
- target = Path(".") # Use Path objects, not strings
181
+ target = Path("..") # Use Path objects, not strings
182
182
  if target.exists():
183
- # Do something
183
+ # Do something
184
184
  ```
185
185
 
186
186
  ### Logging
@@ -229,14 +229,14 @@ def safe_operation(path: Path):
229
229
  try:
230
230
  # Normalize path to prevent traversal
231
231
  path = path.resolve()
232
-
232
+
233
233
  if not path.exists():
234
234
  logger.error(f"Path does not exist: {path}")
235
235
  raise FileNotFoundError(f"Path not found: {path}")
236
-
236
+
237
237
  # Perform operation
238
238
  return True
239
-
239
+
240
240
  except PermissionError as e:
241
241
  logger.error(f"Permission denied: {e}")
242
242
  raise
@@ -275,7 +275,7 @@ from loguru import logger
275
275
 
276
276
  def my_new_command(target: Path):
277
277
  """Execute the new command.
278
-
278
+
279
279
  Parameters
280
280
  ----------
281
281
  target:
@@ -1,6 +1,6 @@
1
1
  template-repository: "jebel-quant/rhiza"
2
2
  template-branch: "main"
3
- include:
3
+ include:
4
4
  - .github
5
5
  - tests
6
6
  - .editorconfig
@@ -11,6 +11,8 @@ include:
11
11
  - Makefile
12
12
  - ruff.toml
13
13
  - pytest.ini
14
+ - presentation
15
+ - book
14
16
  exclude:
15
17
  - .github/workflows/docker.yml
16
18
  - .github/workflows/devcontainer.yml
@@ -21,12 +21,15 @@ printf "%b[INFO] Create empty _book folder...%b\n" "$BLUE" "$RESET"
21
21
  mkdir -p _book
22
22
 
23
23
  # Start building links.json content without jq
24
+ # We manually construct JSON by concatenating strings
25
+ # This avoids the dependency on jq while maintaining valid JSON output
24
26
  LINKS_ENTRIES=""
25
27
 
26
28
  printf "%b[INFO] Copy API docs...%b\n" "$BLUE" "$RESET"
27
29
  if [ -f _pdoc/index.html ]; then
28
30
  mkdir -p _book/pdoc
29
31
  cp -r _pdoc/* _book/pdoc
32
+ # Start building JSON entries - first entry doesn't need a comma prefix
30
33
  LINKS_ENTRIES='"API": "./pdoc/index.html"'
31
34
  fi
32
35
 
@@ -34,6 +37,7 @@ printf "%b[INFO] Copy coverage report...%b\n" "$BLUE" "$RESET"
34
37
  if [ -f _tests/html-coverage/index.html ]; then
35
38
  mkdir -p _book/tests/html-coverage
36
39
  cp -r _tests/html-coverage/* _book/tests/html-coverage
40
+ # Add comma separator if there are existing entries
37
41
  if [ -n "$LINKS_ENTRIES" ]; then
38
42
  LINKS_ENTRIES="$LINKS_ENTRIES, \"Coverage\": \"./tests/html-coverage/index.html\""
39
43
  else
@@ -71,9 +75,12 @@ else
71
75
  fi
72
76
 
73
77
  # Write final links.json
78
+ # Wrap the accumulated entries in JSON object syntax
74
79
  if [ -n "$LINKS_ENTRIES" ]; then
80
+ # If we have entries, create a proper JSON object with them
75
81
  printf '{%s}\n' "$LINKS_ENTRIES" > _book/links.json
76
82
  else
83
+ # If no entries were found, create an empty JSON object
77
84
  printf '{}\n' > _book/links.json
78
85
  fi
79
86
 
@@ -90,7 +90,8 @@ do_bump() {
90
90
  4)
91
91
  printf "Enter version: "
92
92
  read -r VERSION
93
- # Strip 'v' prefix if present
93
+ # Strip 'v' prefix if present (e.g., "v1.2.3" becomes "1.2.3")
94
+ # This ensures consistency since uv expects semantic versions without 'v'
94
95
  VERSION=$(echo "$VERSION" | sed 's/^v//')
95
96
  ;;
96
97
  *)
@@ -100,6 +101,7 @@ do_bump() {
100
101
  esac
101
102
 
102
103
  # Get current branch
104
+ # Using git rev-parse to get the symbolic name of HEAD (current branch)
103
105
  CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
104
106
  if [ -z "$CURRENT_BRANCH" ]; then
105
107
  printf "%b[ERROR] Could not determine current branch%b\n" "$RED" "$RESET"
@@ -107,6 +109,8 @@ do_bump() {
107
109
  fi
108
110
 
109
111
  # Determine default branch
112
+ # Query remote to find the default branch (e.g., 'main' or 'master')
113
+ # This ensures we warn users if they're bumping version on a non-default branch
110
114
  DEFAULT_BRANCH=$(git remote show origin | grep 'HEAD branch' | cut -d' ' -f5)
111
115
  if [ -z "$DEFAULT_BRANCH" ]; then
112
116
  printf "%b[ERROR] Could not determine default branch from remote%b\n" "$RED" "$RESET"
@@ -123,7 +127,9 @@ do_bump() {
123
127
  printf "%b[INFO] Current version: %s%b\n" "$BLUE" "$CURRENT_VERSION" "$RESET"
124
128
 
125
129
  # Determine the new version using uv version with --dry-run first
130
+ # Using --dry-run ensures we validate the version change before applying it
126
131
  if [ -n "$TYPE" ]; then
132
+ # For bump types (patch/minor/major), calculate what the new version will be
127
133
  printf "%b[INFO] Bumping version using: %s%b\n" "$BLUE" "$TYPE" "$RESET"
128
134
  NEW_VERSION=$("$UV_BIN" version --bump "$TYPE" --dry-run --short 2>/dev/null)
129
135
  if [ $? -ne 0 ] || [ -z "$NEW_VERSION" ]; then
@@ -131,7 +137,8 @@ do_bump() {
131
137
  exit 1
132
138
  fi
133
139
  else
134
- # Validate the version format by having uv try it with --dry-run
140
+ # For explicit versions, validate the format using dry-run mode
141
+ # This catches invalid semver formats before we modify pyproject.toml
135
142
  if ! "$UV_BIN" version "$VERSION" --dry-run >/dev/null 2>&1; then
136
143
  printf "%b[ERROR] Invalid version format: %s%b\n" "$RED" "$VERSION" "$RESET"
137
144
  printf "uv rejected this version. Please use a valid semantic version.\n"
@@ -145,11 +152,15 @@ do_bump() {
145
152
  TAG="v$NEW_VERSION"
146
153
 
147
154
  # Check if tag already exists
155
+ # Prevent creating duplicate tags by checking both local and remote repositories
156
+ # git rev-parse succeeds if the tag exists locally
148
157
  if git rev-parse "$TAG" >/dev/null 2>&1; then
149
158
  printf "%b[ERROR] Tag '%s' already exists locally%b\n" "$RED" "$TAG" "$RESET"
150
159
  exit 1
151
160
  fi
152
161
 
162
+ # git ls-remote checks if the tag exists on the remote repository
163
+ # --exit-code returns 2 if the ref is not found, which we want
153
164
  if git ls-remote --exit-code --tags origin "refs/tags/$TAG" >/dev/null 2>&1; then
154
165
  printf "%b[ERROR] Tag '%s' already exists on remote%b\n" "$RED" "$TAG" "$RESET"
155
166
  exit 1
@@ -201,7 +212,9 @@ do_bump() {
201
212
 
202
213
  printf "%b[INFO] Committing version change...%b\n" "$BLUE" "$RESET"
203
214
  git add pyproject.toml
204
- git add uv.lock 2>/dev/null || true # In case uv modifies the lock file
215
+ # Add uv.lock if it exists and was modified (uv may update it during version bump)
216
+ # Using || true ensures the script continues even if uv.lock doesn't exist
217
+ git add uv.lock 2>/dev/null || true
205
218
  git commit -m "$COMMIT_MSG"
206
219
 
207
220
  printf "%b[SUCCESS] Version committed with message: '%s'%b\n" "$GREEN" "$COMMIT_MSG" "$RESET"
@@ -24,8 +24,11 @@ fi
24
24
  mkdir -p "$MARIMUSHKA_OUTPUT"
25
25
 
26
26
  # Discover .py files (top-level only) using globbing; handle no-match case
27
+ # Using shell globbing to find all .py files in the notebook folder
28
+ # The set command expands the glob pattern; if no files match, the pattern itself is returned
27
29
  set -- "$MARIMO_FOLDER"/*.py
28
30
  if [ "$1" = "$MARIMO_FOLDER/*.py" ]; then
31
+ # No Python files found - the glob pattern didn't match any files
29
32
  printf "%b[WARN] No Python files found in '%s'.%b\n" "$YELLOW" "$MARIMO_FOLDER" "$RESET"
30
33
  # Create a minimal index.html indicating no notebooks
31
34
  printf '<html><head><title>Marimo Notebooks</title></head><body><h1>Marimo Notebooks</h1><p>No notebooks found.</p></body></html>' > "$MARIMUSHKA_OUTPUT/index.html"
@@ -37,13 +40,17 @@ CURRENT_DIR=$(pwd)
37
40
  OUTPUT_DIR="$CURRENT_DIR/$MARIMUSHKA_OUTPUT"
38
41
 
39
42
  # Resolve UVX_BIN to absolute path if it's a relative path (contains / but doesn't start with /)
43
+ # This is necessary because we'll change directory later and need absolute paths
44
+ # Case 1: Already absolute (starts with /) - no change needed
45
+ # Case 2: Relative path with / (e.g., ./bin/uvx) - convert to absolute
46
+ # Case 3: Command name only (e.g., uvx) - leave as-is to search in PATH
40
47
  case "$UVX_BIN" in
41
48
  /*) ;;
42
49
  */*) UVX_BIN="$CURRENT_DIR/$UVX_BIN" ;;
43
50
  *) ;;
44
51
  esac
45
52
 
46
- # Resolve UV_BIN to absolute path
53
+ # Resolve UV_BIN to absolute path using the same logic
47
54
  case "$UV_BIN" in
48
55
  /*) ;;
49
56
  */*) UV_BIN="$CURRENT_DIR/$UV_BIN" ;;
@@ -51,13 +58,20 @@ case "$UV_BIN" in
51
58
  esac
52
59
 
53
60
  # Derive UV_INSTALL_DIR from UV_BIN
61
+ # This directory is passed to marimushka so it can find uv for processing notebooks
54
62
  UV_INSTALL_DIR=$(dirname "$UV_BIN")
55
63
 
56
64
  # Change to the notebook directory to ensure relative paths in notebooks work correctly
65
+ # Marimo notebooks may contain relative imports or file references
57
66
  cd "$MARIMO_FOLDER"
58
67
 
59
68
  # Run marimushka export
69
+ # - --notebooks: directory containing .py notebooks
70
+ # - --output: where to write HTML files
71
+ # - --bin-path: where marimushka can find the uv binary for processing
60
72
  "$UVX_BIN" "marimushka>=0.1.9" export --notebooks "." --output "$OUTPUT_DIR" --bin-path "$UV_INSTALL_DIR"
61
73
 
62
74
  # Ensure GitHub Pages does not process with Jekyll
75
+ # The : command is a no-op that creates an empty file
76
+ # .nojekyll tells GitHub Pages to serve files as-is without Jekyll processing
63
77
  : > "$OUTPUT_DIR/.nojekyll"
@@ -134,29 +134,39 @@ do_release() {
134
134
  fi
135
135
 
136
136
  # Check if branch is up-to-date with remote
137
+ # This prevents releasing from an out-of-sync branch which could miss commits or conflict
137
138
  printf "%b[INFO] Checking remote status...%b\n" "$BLUE" "$RESET"
138
139
  git fetch origin >/dev/null 2>&1
140
+ # Get the upstream tracking branch (e.g., origin/main)
139
141
  UPSTREAM=$(git rev-parse --abbrev-ref --symbolic-full-name @{u} 2>/dev/null)
140
142
  if [ -z "$UPSTREAM" ]; then
141
143
  printf "%b[ERROR] No upstream branch configured for %s%b\n" "$RED" "$CURRENT_BRANCH" "$RESET"
142
144
  exit 1
143
145
  fi
144
146
 
147
+ # Compare local, remote, and merge-base commits to determine sync status
148
+ # LOCAL: current commit on local branch
149
+ # REMOTE: current commit on remote tracking branch
150
+ # BASE: most recent common ancestor between local and remote
145
151
  LOCAL=$(git rev-parse @)
146
152
  REMOTE=$(git rev-parse "$UPSTREAM")
147
153
  BASE=$(git merge-base @ "$UPSTREAM")
148
154
 
155
+ # Use git revision comparison to detect branch status
149
156
  if [ "$LOCAL" != "$REMOTE" ]; then
150
157
  if [ "$LOCAL" = "$BASE" ]; then
158
+ # Local is behind remote (need to pull)
151
159
  printf "%b[ERROR] Your branch is behind '%s'. Please pull changes.%b\n" "$RED" "$UPSTREAM" "$RESET"
152
160
  exit 1
153
161
  elif [ "$REMOTE" = "$BASE" ]; then
162
+ # Local is ahead of remote (need to push)
154
163
  printf "%b[WARN] Your branch is ahead of '%s'.%b\n" "$YELLOW" "$UPSTREAM" "$RESET"
155
164
  printf "Unpushed commits:\n"
156
165
  git log --oneline --graph --decorate "$UPSTREAM..HEAD"
157
166
  prompt_continue "Push changes to remote before releasing?"
158
167
  git push origin "$CURRENT_BRANCH"
159
168
  else
169
+ # Branches have diverged (need to merge or rebase)
160
170
  printf "%b[ERROR] Your branch has diverged from '%s'. Please reconcile.%b\n" "$RED" "$UPSTREAM" "$RESET"
161
171
  exit 1
162
172
  fi
@@ -182,7 +192,9 @@ do_release() {
182
192
  printf "Creating tag '%s' for version %s\n" "$TAG" "$CURRENT_VERSION"
183
193
  prompt_continue ""
184
194
 
185
- # check for gpg signing config
195
+ # Check if GPG signing is configured for git commits/tags
196
+ # If user.signingkey is set or commit.gpgsign is true, create a signed tag
197
+ # Signed tags provide cryptographic verification of release authenticity
186
198
  if git config --get user.signingkey >/dev/null 2>&1 || [ "$(git config --get commit.gpgsign)" = "true" ]; then
187
199
  printf "%b[INFO] GPG signing is enabled. Creating signed tag.%b\n" "$BLUE" "$RESET"
188
200
  git tag -s "$TAG" -m "Release $TAG"
@@ -197,17 +209,22 @@ do_release() {
197
209
  printf "\n%b=== Step 2: Push Tag to Remote ===%b\n" "$BLUE" "$RESET"
198
210
  printf "Pushing tag '%s' to origin will trigger the release workflow.\n" "$TAG"
199
211
 
200
- # Show what commits are in this tag
212
+ # Show what commits are in this tag compared to the last tag
213
+ # This helps users understand what changes are included in the release
201
214
  LAST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "")
202
215
  if [ -n "$LAST_TAG" ] && [ "$LAST_TAG" != "$TAG" ]; then
216
+ # Count commits between last tag and current tag
203
217
  COMMIT_COUNT=$(git rev-list "$LAST_TAG..$TAG" --count 2>/dev/null || echo "0")
204
218
  printf "Commits since %s: %s\n" "$LAST_TAG" "$COMMIT_COUNT"
205
219
  fi
206
220
 
207
221
  prompt_continue ""
208
222
 
223
+ # Push only the specific tag (not all tags) to trigger the release workflow
209
224
  git push origin "refs/tags/$TAG"
210
225
 
226
+ # Extract repository name from remote URL for constructing GitHub Actions link
227
+ # Converts git@github.com:user/repo.git or https://github.com/user/repo.git to user/repo
211
228
  REPO_URL=$(git remote get-url origin | sed 's/.*github.com[:/]\(.*\)\.git/\1/')
212
229
  printf "\n%b[SUCCESS] Release tag %s pushed to remote!%b\n" "$GREEN" "$TAG" "$RESET"
213
230
  printf "%b[INFO] The release workflow will now be triggered automatically.%b\n" "$BLUE" "$RESET"
@@ -15,6 +15,8 @@ HELP_TEMP=$(mktemp)
15
15
 
16
16
  # Generate the help output from Makefile
17
17
  # Strip ANSI color codes and filter out make[1] directory messages
18
+ # The sed command removes ANSI escape sequences (color codes) from the output
19
+ # The grep commands filter out make's directory change messages
18
20
  make help 2>/dev/null | \
19
21
  sed 's/\x1b\[[0-9;]*m//g' | \
20
22
  grep -v "^make\[" | \
@@ -25,6 +27,8 @@ make help 2>/dev/null | \
25
27
  # Using a temporary file to avoid awk escaping issues
26
28
  # Temporarily disable exit-on-error to handle pattern not found gracefully
27
29
  set +e
30
+ # The awk script processes the README.md file to find and replace the help section
31
+ # It looks for the marker pattern, preserves the structure, and inserts updated help text
28
32
  awk -v helpfile="$HELP_TEMP" '
29
33
  BEGIN {
30
34
  in_help_block = 0
@@ -32,6 +36,7 @@ BEGIN {
32
36
  }
33
37
 
34
38
  # Detect start of help output block
39
+ # This marker indicates where the Makefile help output should be inserted
35
40
  /^Run `make help` to see all available targets:$/ {
36
41
  print
37
42
  pattern_found = 1
@@ -40,6 +45,7 @@ BEGIN {
40
45
  print
41
46
  }
42
47
  getline
48
+ # If we find the code fence, start replacing the content
43
49
  if ($0 ~ /^```makefile$/) {
44
50
  print "```makefile"
45
51
  # Read and print help output from file
@@ -53,22 +59,26 @@ BEGIN {
53
59
  }
54
60
 
55
61
  # Skip lines inside the old help block
62
+ # Once we hit the closing code fence, we stop skipping lines
56
63
  in_help_block == 1 && /^```$/ {
57
64
  print "```"
58
65
  in_help_block = 0
59
66
  next
60
67
  }
61
68
 
69
+ # Continue skipping lines that are part of the old help output
62
70
  in_help_block == 1 {
63
71
  next
64
72
  }
65
73
 
66
- # Print all other lines
74
+ # Print all other lines (outside the help block) unchanged
67
75
  {
68
76
  print
69
77
  }
70
78
 
71
79
  END {
80
+ # If we never found the pattern, notify but don'\''t fail
81
+ # This allows the script to work even if README structure changes
72
82
  if (pattern_found == 0) {
73
83
  print "INFO: No help section marker found in README.md - skipping update" > "/dev/stderr"
74
84
  exit 2
@@ -77,6 +87,8 @@ END {
77
87
  ' "$README_FILE" > "$TEMP_FILE"
78
88
 
79
89
  # Check if awk succeeded or pattern was not found
90
+ # Exit code 2 means the pattern wasn't found (not an error)
91
+ # Other non-zero codes indicate genuine errors
80
92
  awk_status=$?
81
93
  set -e
82
94
  if [ $awk_status -eq 2 ]; then
@@ -7,7 +7,7 @@
7
7
  #
8
8
  # 📋 Pipeline Phases:
9
9
  # 1. 🔍 Validate Tag - Check tag format and ensure release doesn't already exist
10
- # 2. 🏗️ Build - Build Python package with Hatch (if pyproject.toml exists)
10
+ # 2. 🏗️ Build - Build Python package with Hatch (if [build-system] is defined in pyproject.toml
11
11
  # 3. 📝 Draft Release - Create draft GitHub release with build artifacts
12
12
  # 4. 🚀 Publish to PyPI - Publish package using OIDC or custom feed
13
13
  # 5. 🐳 Publish Devcontainer - Build and publish devcontainer image (conditional)
@@ -76,7 +76,6 @@ jobs:
76
76
  uses: actions/checkout@v6
77
77
  with:
78
78
  fetch-depth: 0
79
- ref: ${{ github.ref }}
80
79
 
81
80
  - name: Set Tag Variable
82
81
  id: set_tag
@@ -130,16 +129,24 @@ jobs:
130
129
  fi
131
130
  echo "Version verified: $PROJECT_VERSION matches tag"
132
131
 
132
+ - name: Detect buildable Python package
133
+ id: buildable
134
+ run: |
135
+ if [[ -f pyproject.toml ]] && grep -q '^\[build-system\]' pyproject.toml; then
136
+ echo "buildable=true" >> "$GITHUB_OUTPUT"
137
+ else
138
+ echo "buildable=false" >> "$GITHUB_OUTPUT"
139
+ fi
140
+
133
141
  - name: Build
134
- if: hashFiles('pyproject.toml') != ''
142
+ if: steps.buildable.outputs.buildable == 'true'
135
143
  run: |
136
144
  printf "[INFO] Building package...\n"
137
145
  uvx hatch build
138
146
 
139
147
 
140
148
  - name: Upload dist artifact
141
- # not tested at runtime!
142
- if: hashFiles('pyproject.toml') != ''
149
+ if: steps.buildable.outputs.buildable == 'true'
143
150
  uses: actions/upload-artifact@v6
144
151
  with:
145
152
  name: dist
@@ -174,7 +181,7 @@ jobs:
174
181
  uses: actions/checkout@v6
175
182
  with:
176
183
  fetch-depth: 0
177
- ref: ${{ github.ref }}
184
+ #ref: ${{ github.ref }}
178
185
 
179
186
  - name: Download dist artifact
180
187
  uses: actions/download-artifact@v7
@@ -0,0 +1,26 @@
1
+ name: RHIZA VALIDATE
2
+
3
+ permissions:
4
+ contents: read
5
+
6
+ on:
7
+ push:
8
+ pull_request:
9
+ branches: [ main, master ]
10
+
11
+ jobs:
12
+ validation:
13
+ runs-on: ubuntu-latest
14
+ # don't run this in rhiza itself. Rhiza has no template.yml file.
15
+ if: ${{ github.repository != 'jebel-quant/rhiza' }}
16
+ container:
17
+ image: ghcr.io/astral-sh/uv:0.9.18-python3.12-trixie
18
+
19
+ steps:
20
+ - name: Checkout repository
21
+ uses: actions/checkout@v6
22
+
23
+ - name: Validate Rhiza config
24
+ shell: bash
25
+ run: |
26
+ uvx rhiza validate .
@@ -24,18 +24,25 @@ def supported_versions() -> list[str]:
24
24
  Returns:
25
25
  list[str]: The supported versions (e.g., ["3.11", "3.12"]).
26
26
  """
27
+ # Load pyproject.toml using the tomllib standard library (Python 3.11+)
27
28
  with PYPROJECT.open("rb") as f:
28
29
  data = tomllib.load(f)
29
30
 
31
+ # Extract the requires-python field from project metadata
32
+ # This specifies the Python version constraint (e.g., ">=3.11")
30
33
  spec_str = data.get("project", {}).get("requires-python")
31
34
  if not spec_str:
32
35
  msg = "pyproject.toml: missing 'project.requires-python'"
33
36
  raise KeyError(msg)
34
37
 
38
+ # Parse the version specifier (e.g., ">=3.11,<3.14")
35
39
  spec = SpecifierSet(spec_str)
36
40
 
41
+ # Filter candidate versions to find which ones satisfy the constraint
37
42
  versions: list[str] = []
38
43
  for v in CANDIDATES:
44
+ # packaging.version.Version parses the version string
45
+ # The 'in' operator checks if the version satisfies the specifier
39
46
  if Version(v) in spec:
40
47
  versions.append(v)
41
48
 
@@ -47,6 +54,9 @@ def supported_versions() -> list[str]:
47
54
 
48
55
 
49
56
  if __name__ == "__main__":
57
+ # Check if pyproject.toml exists in the expected location
58
+ # If it exists, use it to determine supported versions
59
+ # Otherwise, fall back to returning all candidates (for edge cases)
50
60
  if PYPROJECT.exists():
51
61
  print(json.dumps(supported_versions()))
52
62
  else:
@@ -24,17 +24,22 @@ def max_supported_version() -> str:
24
24
  Returns:
25
25
  str: The maximum Python version (e.g., "3.13") satisfying the spec.
26
26
  """
27
+ # Load and parse pyproject.toml
27
28
  with PYPROJECT.open("rb") as f:
28
29
  data = tomllib.load(f)
29
30
 
31
+ # Extract and validate the requires-python constraint
30
32
  spec_str = data.get("project", {}).get("requires-python")
31
33
  if not spec_str:
32
34
  msg = "pyproject.toml: missing 'project.requires-python'"
33
35
  raise KeyError(msg)
34
36
 
37
+ # Create a SpecifierSet to check version compatibility
35
38
  spec = SpecifierSet(spec_str)
36
39
  max_version = None
37
40
 
41
+ # Iterate through candidates in order (ascending)
42
+ # The last matching version will be the maximum
38
43
  for v in CANDIDATES:
39
44
  if Version(v) in spec:
40
45
  max_version = v
@@ -47,6 +52,9 @@ def max_supported_version() -> str:
47
52
 
48
53
 
49
54
  if __name__ == "__main__":
55
+ # Check if pyproject.toml exists at the expected location
56
+ # If found, determine max version from requires-python
57
+ # Otherwise, default to 3.13 (latest stable as of this code)
50
58
  if PYPROJECT.exists():
51
59
  print(json.dumps(max_supported_version()))
52
60
  else:
@@ -0,0 +1,23 @@
1
+ name: SECURITY
2
+
3
+ permissions:
4
+ contents: read
5
+ security-events: write
6
+
7
+ on:
8
+ push:
9
+ branches: [ main, master ]
10
+ pull_request:
11
+ branches: [ main, master ]
12
+ # Add to .github/workflows/security.yml (partial example)
13
+ jobs:
14
+ security:
15
+ runs-on: ubuntu-latest
16
+ if: github.repository_visibility != 'private'
17
+ steps:
18
+ - uses: actions/checkout@v6
19
+ - uses: github/codeql-action/init@v4
20
+ with:
21
+ languages: python
22
+ - uses: github/codeql-action/autobuild@v4
23
+ - uses: github/codeql-action/analyze@v4