stochmat 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- stochmat-1.0.0/.github/workflows/_wheels.yml +112 -0
- stochmat-1.0.0/.github/workflows/development.yml +298 -0
- stochmat-1.0.0/.github/workflows/release.yml +92 -0
- stochmat-1.0.0/.github/workflows/report_coverage.yml +29 -0
- stochmat-1.0.0/.github/workflows/status.yml +129 -0
- stochmat-1.0.0/.github/workflows/test_release.yml +51 -0
- stochmat-1.0.0/.gitignore +148 -0
- stochmat-1.0.0/CMakeLists.txt +84 -0
- stochmat-1.0.0/COPYING +674 -0
- stochmat-1.0.0/COPYING.LESSER +165 -0
- stochmat-1.0.0/PKG-INFO +1060 -0
- stochmat-1.0.0/README.md +373 -0
- stochmat-1.0.0/pyproject.toml +176 -0
- stochmat-1.0.0/src/stochmat/__init__.py +76 -0
- stochmat-1.0.0/src/stochmat/_cython_sparse_stoch.pyx +977 -0
- stochmat-1.0.0/src/stochmat/_cython_subst.py +596 -0
- stochmat-1.0.0/src/stochmat/backends.py +139 -0
- stochmat-1.0.0/src/stochmat/fast.pyx +556 -0
- stochmat-1.0.0/src/stochmat/fast_subst.py +341 -0
- stochmat-1.0.0/src/stochmat/sparse_accumulator.cpp +65 -0
- stochmat-1.0.0/src/stochmat/sparse_accumulator.h +45 -0
- stochmat-1.0.0/src/stochmat/sparse_accumulator.pxd +41 -0
- stochmat-1.0.0/src/stochmat/sparse_stoch_mat.py +1366 -0
- stochmat-1.0.0/src/stochmat/testing.py +82 -0
- stochmat-1.0.0/tests/conftest.py +417 -0
- stochmat-1.0.0/tests/fast.py +47 -0
- stochmat-1.0.0/tests/test_cython_sparse_stoch.py +169 -0
- stochmat-1.0.0/tests/test_cython_subst.py +453 -0
- stochmat-1.0.0/tests/test_fast_fallback.py +248 -0
- stochmat-1.0.0/tests/test_mkl_parity.py +151 -0
- stochmat-1.0.0/tests/test_mkl_perf_parity.py +176 -0
- stochmat-1.0.0/tests/test_sparse_stoch_mat.py +670 -0
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
# Reusable workflow that builds wheels (cibuildwheel matrix) and an sdist.
|
|
2
|
+
#
|
|
3
|
+
# Called by:
|
|
4
|
+
# * release.yml -- on tag push, then publishes to PyPI
|
|
5
|
+
# * test_release.yml -- on workflow_dispatch, then publishes to TestPyPI
|
|
6
|
+
#
|
|
7
|
+
# Artifacts uploaded:
|
|
8
|
+
# * cibw-wheels-<os>-<arch> (one per matrix cell)
|
|
9
|
+
# * cibw-sdist (single tarball)
|
|
10
|
+
# Callers re-download with actions/download-artifact and a "cibw-*" pattern.
|
|
11
|
+
|
|
12
|
+
name: Build wheels (reusable)
|
|
13
|
+
|
|
14
|
+
on:
|
|
15
|
+
workflow_call:
|
|
16
|
+
inputs:
|
|
17
|
+
strip-local-version:
|
|
18
|
+
description: >-
|
|
19
|
+
When true, build wheels and the sdist with
|
|
20
|
+
``SETUPTOOLS_SCM_LOCAL_SCHEME=no-local-version`` so the
|
|
21
|
+
``+gSHA`` local version segment is omitted. Required for
|
|
22
|
+
uploads to TestPyPI (which rejects PEP 440 local versions per
|
|
23
|
+
packaging.python.org); the default ``false`` keeps the SHA so
|
|
24
|
+
local dev installs and PyPI releases (which are tagged anyway,
|
|
25
|
+
so have no local segment) are unaffected.
|
|
26
|
+
type: boolean
|
|
27
|
+
required: false
|
|
28
|
+
default: false
|
|
29
|
+
|
|
30
|
+
jobs:
|
|
31
|
+
build_wheels:
|
|
32
|
+
name: wheels ${{ matrix.os }} ${{ matrix.arch }}
|
|
33
|
+
runs-on: ${{ matrix.os }}
|
|
34
|
+
strategy:
|
|
35
|
+
fail-fast: false
|
|
36
|
+
matrix:
|
|
37
|
+
include:
|
|
38
|
+
# Linux
|
|
39
|
+
- os: ubuntu-latest
|
|
40
|
+
arch: x86_64
|
|
41
|
+
- os: ubuntu-latest
|
|
42
|
+
arch: aarch64
|
|
43
|
+
# macOS: macos-15-intel = Intel runner, macos-14 = Apple Silicon.
|
|
44
|
+
# Pinning by runner image (rather than cross-compiling from one)
|
|
45
|
+
# gives native test execution and avoids ARCHFLAGS hacks. The
|
|
46
|
+
# legacy ``macos-13`` label was deprecated by GitHub; jobs that
|
|
47
|
+
# request it queue indefinitely.
|
|
48
|
+
- os: macos-15-intel
|
|
49
|
+
arch: x86_64
|
|
50
|
+
- os: macos-14
|
|
51
|
+
arch: arm64
|
|
52
|
+
# Windows
|
|
53
|
+
- os: windows-latest
|
|
54
|
+
arch: AMD64
|
|
55
|
+
|
|
56
|
+
steps:
|
|
57
|
+
- uses: actions/checkout@v4
|
|
58
|
+
with:
|
|
59
|
+
# setuptools_scm needs the full history (or at least tags) to
|
|
60
|
+
# resolve the version dynamically.
|
|
61
|
+
fetch-depth: 0
|
|
62
|
+
|
|
63
|
+
# QEMU is only needed for cross-arch Linux builds (aarch64 on x86 host).
|
|
64
|
+
- name: Set up QEMU
|
|
65
|
+
if: matrix.os == 'ubuntu-latest' && matrix.arch == 'aarch64'
|
|
66
|
+
uses: docker/setup-qemu-action@v3
|
|
67
|
+
with:
|
|
68
|
+
platforms: arm64
|
|
69
|
+
|
|
70
|
+
# Strip the setuptools_scm ``+gSHA`` local segment when the caller
|
|
71
|
+
# asks for it (TestPyPI uploads). Writing to ``$GITHUB_ENV`` makes
|
|
72
|
+
# the var visible to subsequent steps; ``CIBW_ENVIRONMENT_PASS_LINUX``
|
|
73
|
+
# is what cibuildwheel uses to forward host env vars across the
|
|
74
|
+
# manylinux container boundary on Linux runners (no-op on macOS /
|
|
75
|
+
# Windows where the build runs natively and inherits env directly).
|
|
76
|
+
- name: Configure local-version stripping
|
|
77
|
+
if: inputs.strip-local-version
|
|
78
|
+
shell: bash
|
|
79
|
+
run: |
|
|
80
|
+
echo "SETUPTOOLS_SCM_LOCAL_SCHEME=no-local-version" >> "$GITHUB_ENV"
|
|
81
|
+
echo "CIBW_ENVIRONMENT_PASS_LINUX=SETUPTOOLS_SCM_LOCAL_SCHEME" >> "$GITHUB_ENV"
|
|
82
|
+
|
|
83
|
+
- name: Build wheels
|
|
84
|
+
uses: pypa/cibuildwheel@v2.21
|
|
85
|
+
env:
|
|
86
|
+
CIBW_ARCHS: ${{ matrix.arch }}
|
|
87
|
+
|
|
88
|
+
- uses: actions/upload-artifact@v4
|
|
89
|
+
with:
|
|
90
|
+
name: cibw-wheels-${{ matrix.os }}-${{ matrix.arch }}
|
|
91
|
+
path: ./wheelhouse/*.whl
|
|
92
|
+
|
|
93
|
+
build_sdist:
|
|
94
|
+
name: sdist
|
|
95
|
+
runs-on: ubuntu-latest
|
|
96
|
+
steps:
|
|
97
|
+
- uses: actions/checkout@v4
|
|
98
|
+
with:
|
|
99
|
+
fetch-depth: 0
|
|
100
|
+
|
|
101
|
+
- name: Build sdist
|
|
102
|
+
env:
|
|
103
|
+
# See ``Configure local-version stripping`` in build_wheels;
|
|
104
|
+
# sdist is built directly on the runner so plain step-level
|
|
105
|
+
# env is enough (no container boundary to cross).
|
|
106
|
+
SETUPTOOLS_SCM_LOCAL_SCHEME: ${{ inputs.strip-local-version && 'no-local-version' || '' }}
|
|
107
|
+
run: pipx run build --sdist
|
|
108
|
+
|
|
109
|
+
- uses: actions/upload-artifact@v4
|
|
110
|
+
with:
|
|
111
|
+
name: cibw-sdist
|
|
112
|
+
path: dist/*.tar.gz
|
|
@@ -0,0 +1,298 @@
|
|
|
1
|
+
name: Development Workflow
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
pull_request:
|
|
5
|
+
types: [opened, synchronize, reopened, edited, ready_for_review]
|
|
6
|
+
branches:
|
|
7
|
+
- main
|
|
8
|
+
permissions:
|
|
9
|
+
pull-requests: write
|
|
10
|
+
contents: write
|
|
11
|
+
|
|
12
|
+
jobs:
|
|
13
|
+
determine-python-version:
|
|
14
|
+
runs-on: ubuntu-latest
|
|
15
|
+
outputs:
|
|
16
|
+
version: ${{ steps.pyver.outputs.version }}
|
|
17
|
+
steps:
|
|
18
|
+
- uses: actions/checkout@v5
|
|
19
|
+
|
|
20
|
+
- name: Determine minimum Python version from pyproject.toml
|
|
21
|
+
id: pyver
|
|
22
|
+
run: |
|
|
23
|
+
py_req=$(sed -n 's/^[[:space:]]*requires-python[[:space:]]*=[[:space:]]*"\(.*\)".*/\1/p' pyproject.toml | head -n1)
|
|
24
|
+
py_min=$(echo "$py_req" | sed -n 's/.*>=[[:space:]]*\([0-9]\+\.[0-9]\+\).*/\1/p')
|
|
25
|
+
if [ -z "$py_min" ]; then
|
|
26
|
+
echo "Could not parse minimum Python version from requires-python='$py_req'" >&2
|
|
27
|
+
exit 1
|
|
28
|
+
fi
|
|
29
|
+
echo "version=$py_min" >> "$GITHUB_OUTPUT"
|
|
30
|
+
echo "Resolved minimum Python: $py_min"
|
|
31
|
+
|
|
32
|
+
setup-and-test:
|
|
33
|
+
if: github.event.pull_request.draft == false
|
|
34
|
+
runs-on: ubuntu-latest
|
|
35
|
+
needs: [determine-python-version]
|
|
36
|
+
steps:
|
|
37
|
+
- uses: actions/checkout@v5
|
|
38
|
+
|
|
39
|
+
- name: Set up Python ${{ needs.determine-python-version.outputs.version }}
|
|
40
|
+
uses: actions/setup-python@v5
|
|
41
|
+
with:
|
|
42
|
+
python-version: ${{ needs.determine-python-version.outputs.version }}
|
|
43
|
+
|
|
44
|
+
- name: Install Intel MKL
|
|
45
|
+
run: |
|
|
46
|
+
sudo apt-get update
|
|
47
|
+
sudo apt-get -y install intel-mkl
|
|
48
|
+
|
|
49
|
+
- name: Install uv
|
|
50
|
+
uses: astral-sh/setup-uv@v5
|
|
51
|
+
with:
|
|
52
|
+
enable-cache: true
|
|
53
|
+
cache-dependency-glob: "pyproject.toml"
|
|
54
|
+
|
|
55
|
+
- name: Set up Python
|
|
56
|
+
run: uv python install ${{ needs.determine-python-version.outputs.version }}
|
|
57
|
+
|
|
58
|
+
- name: Sync dependencies
|
|
59
|
+
# Synchronizes base, mkl extra, and dev groups (testing-mkl/docs).
|
|
60
|
+
# ``--all-extras`` covers [mkl]; ``--all-groups`` pulls
|
|
61
|
+
# ``testing-mkl`` (which itself includes ``testing``).
|
|
62
|
+
run: uv sync --all-extras --all-groups
|
|
63
|
+
|
|
64
|
+
- name: Build wheel
|
|
65
|
+
run: uv build
|
|
66
|
+
|
|
67
|
+
- name: Upload wheel
|
|
68
|
+
uses: actions/upload-artifact@v4
|
|
69
|
+
with:
|
|
70
|
+
name: wheel
|
|
71
|
+
path: dist
|
|
72
|
+
retention-days: 1
|
|
73
|
+
|
|
74
|
+
- name: Lint with ruff
|
|
75
|
+
run: uv run ruff check --select=ALL --output-format=github src/
|
|
76
|
+
continue-on-error: true
|
|
77
|
+
|
|
78
|
+
- name: Tests without memory tracking
|
|
79
|
+
# Exclude memory tests (handled by ``test-memory``) and benchmark
|
|
80
|
+
# tests (handled by ``mkl-benchmarks``). Parity tests stay in.
|
|
81
|
+
run: |
|
|
82
|
+
uv run pytest --junitxml=junit/test-results.xml --cov=stochmat --durations=0 -k 'not _memory' -m 'not benchmark'
|
|
83
|
+
env:
|
|
84
|
+
COVERAGE_FILE: ".coverage.no_memory"
|
|
85
|
+
|
|
86
|
+
- name: Store coverage file
|
|
87
|
+
if: always()
|
|
88
|
+
uses: actions/upload-artifact@v4
|
|
89
|
+
with:
|
|
90
|
+
name: coverage-no_memory
|
|
91
|
+
path: .coverage.no_memory
|
|
92
|
+
include-hidden-files: true
|
|
93
|
+
|
|
94
|
+
test-memory:
|
|
95
|
+
if: github.event.pull_request.draft == false
|
|
96
|
+
runs-on: ubuntu-latest
|
|
97
|
+
needs: [determine-python-version]
|
|
98
|
+
steps:
|
|
99
|
+
- uses: actions/checkout@v5
|
|
100
|
+
|
|
101
|
+
- name: Set up Python ${{ needs.determine-python-version.outputs.version }}
|
|
102
|
+
uses: actions/setup-python@v5
|
|
103
|
+
with:
|
|
104
|
+
python-version: ${{ needs.determine-python-version.outputs.version }}
|
|
105
|
+
|
|
106
|
+
- name: Install Intel MKL
|
|
107
|
+
run: |
|
|
108
|
+
sudo apt-get update
|
|
109
|
+
sudo apt-get -y install intel-mkl
|
|
110
|
+
|
|
111
|
+
- name: Install uv
|
|
112
|
+
uses: astral-sh/setup-uv@v5
|
|
113
|
+
with:
|
|
114
|
+
enable-cache: true
|
|
115
|
+
cache-dependency-glob: "pyproject.toml"
|
|
116
|
+
|
|
117
|
+
- name: Set up Python
|
|
118
|
+
run: uv python install ${{ needs.determine-python-version.outputs.version }}
|
|
119
|
+
|
|
120
|
+
- name: Tests with memory tracking
|
|
121
|
+
# Re-syncing is optimized by uv's global cache. Exclude
|
|
122
|
+
# ``benchmark``-marked tests; they are handled by the dedicated
|
|
123
|
+
# ``mkl-benchmarks`` job.
|
|
124
|
+
run: |
|
|
125
|
+
uv sync --all-extras --all-groups
|
|
126
|
+
uv run pytest --junitxml=junit/test-results.xml --cov=stochmat --durations=0 --memray --most-allocations=0 -k '_memory' -m 'not benchmark'
|
|
127
|
+
env:
|
|
128
|
+
COVERAGE_FILE: ".coverage.memory"
|
|
129
|
+
|
|
130
|
+
- name: Store coverage file
|
|
131
|
+
if: always()
|
|
132
|
+
uses: actions/upload-artifact@v4
|
|
133
|
+
with:
|
|
134
|
+
name: coverage-memory
|
|
135
|
+
path: .coverage.memory
|
|
136
|
+
include-hidden-files: true
|
|
137
|
+
|
|
138
|
+
test-pure-python:
|
|
139
|
+
if: github.event.pull_request.draft == false
|
|
140
|
+
runs-on: ubuntu-latest
|
|
141
|
+
needs: [determine-python-version]
|
|
142
|
+
env:
|
|
143
|
+
# Disable building the Cython extensions; the package falls back to the
|
|
144
|
+
# pure-Python implementations in ``_cython_subst`` and ``fast_subst``.
|
|
145
|
+
STOCHMAT_BUILD_EXTENSIONS: "0"
|
|
146
|
+
UV_NO_SYNC: "1" # prevent from fetching dev group
|
|
147
|
+
steps:
|
|
148
|
+
- uses: actions/checkout@v5
|
|
149
|
+
|
|
150
|
+
- name: Set up Python ${{ needs.determine-python-version.outputs.version }}
|
|
151
|
+
uses: actions/setup-python@v5
|
|
152
|
+
with:
|
|
153
|
+
python-version: ${{ needs.determine-python-version.outputs.version }}
|
|
154
|
+
|
|
155
|
+
- name: Install uv
|
|
156
|
+
uses: astral-sh/setup-uv@v5
|
|
157
|
+
with:
|
|
158
|
+
enable-cache: true
|
|
159
|
+
cache-dependency-glob: "pyproject.toml"
|
|
160
|
+
|
|
161
|
+
- name: Set up Python
|
|
162
|
+
run: uv python install ${{ needs.determine-python-version.outputs.version }}
|
|
163
|
+
|
|
164
|
+
- name: Sync dependencies (no Cython extensions, no MKL)
|
|
165
|
+
# MKL extra is intentionally omitted: the pure-Python build must
|
|
166
|
+
# not require system MKL or the optional ``sparse_dot_mkl``
|
|
167
|
+
# package -- the runtime fail-fast in
|
|
168
|
+
# ``stochmat.sparse_stoch_mat`` would otherwise raise
|
|
169
|
+
# ``ImportError`` because no MKL libs are installed on this job.
|
|
170
|
+
# Use the ``testing`` group explicitly (NOT ``testing-mkl``).
|
|
171
|
+
run: uv sync --no-default-groups --group testing
|
|
172
|
+
|
|
173
|
+
- name: Verify compiled extensions are absent
|
|
174
|
+
run: |
|
|
175
|
+
uv run python -c "
|
|
176
|
+
import importlib, sys
|
|
177
|
+
for mod in ('stochmat._cython_sparse_stoch', 'stochmat.fast'):
|
|
178
|
+
try:
|
|
179
|
+
m = importlib.import_module(mod)
|
|
180
|
+
except ImportError:
|
|
181
|
+
print(f'{mod}: not importable (expected for fast/_cython_sparse_stoch C-extension)')
|
|
182
|
+
continue
|
|
183
|
+
# ``stochmat.fast`` is rebound to the pure-Python fallback when
|
|
184
|
+
# the compiled extension is missing; check for that.
|
|
185
|
+
if getattr(m, '__file__', '').endswith('.py'):
|
|
186
|
+
print(f'{mod}: pure-Python fallback active ({m.__file__})')
|
|
187
|
+
else:
|
|
188
|
+
print(f'{mod}: compiled extension loaded ({m.__file__})', file=sys.stderr)
|
|
189
|
+
sys.exit(1)
|
|
190
|
+
import stochmat
|
|
191
|
+
assert not stochmat.backends.fast, 'stochmat.backends.fast should be False without compiled fast extension'
|
|
192
|
+
print('stochmat.backends.summary() =', stochmat.backends.summary())
|
|
193
|
+
"
|
|
194
|
+
|
|
195
|
+
- name: Tests (pure-Python, parity tests excluded)
|
|
196
|
+
# Parity tests require both the compiled and the fallback modules to
|
|
197
|
+
# be importable; tests that depend on the ``fast_modules`` fixture
|
|
198
|
+
# (e.g. ``test_fast_fallback.py``) auto-skip via the importorskip
|
|
199
|
+
# guard inside that fixture. Memory tests are handled by the
|
|
200
|
+
# dedicated ``test-memory`` job; benchmark tests by
|
|
201
|
+
# ``mkl-benchmarks``.
|
|
202
|
+
run: |
|
|
203
|
+
uv run pytest \
|
|
204
|
+
--junitxml=junit/test-results.xml \
|
|
205
|
+
--cov=stochmat \
|
|
206
|
+
--durations=0 \
|
|
207
|
+
-k 'not parity and not _memory' \
|
|
208
|
+
-m 'not benchmark'
|
|
209
|
+
env:
|
|
210
|
+
COVERAGE_FILE: ".coverage.pure_python"
|
|
211
|
+
|
|
212
|
+
- name: Store coverage file
|
|
213
|
+
if: always()
|
|
214
|
+
uses: actions/upload-artifact@v4
|
|
215
|
+
with:
|
|
216
|
+
name: coverage-pure_python
|
|
217
|
+
path: .coverage.pure_python
|
|
218
|
+
include-hidden-files: true
|
|
219
|
+
|
|
220
|
+
mkl-benchmarks:
|
|
221
|
+
# MKL speed/memory parity benchmarks. Non-blocking: this job uploads
|
|
222
|
+
# benchmark JSON + per-test report sections as artifacts but its
|
|
223
|
+
# outcome does not gate the PR (continue-on-error: true).
|
|
224
|
+
if: github.event.pull_request.draft == false
|
|
225
|
+
runs-on: ubuntu-latest
|
|
226
|
+
needs: [determine-python-version]
|
|
227
|
+
continue-on-error: true
|
|
228
|
+
steps:
|
|
229
|
+
- uses: actions/checkout@v5
|
|
230
|
+
|
|
231
|
+
- name: Set up Python ${{ needs.determine-python-version.outputs.version }}
|
|
232
|
+
uses: actions/setup-python@v5
|
|
233
|
+
with:
|
|
234
|
+
python-version: ${{ needs.determine-python-version.outputs.version }}
|
|
235
|
+
|
|
236
|
+
- name: Install Intel MKL
|
|
237
|
+
run: |
|
|
238
|
+
sudo apt-get update
|
|
239
|
+
sudo apt-get -y install intel-mkl
|
|
240
|
+
|
|
241
|
+
- name: Install uv
|
|
242
|
+
uses: astral-sh/setup-uv@v5
|
|
243
|
+
with:
|
|
244
|
+
enable-cache: true
|
|
245
|
+
cache-dependency-glob: "pyproject.toml"
|
|
246
|
+
|
|
247
|
+
- name: Set up Python
|
|
248
|
+
run: uv python install ${{ needs.determine-python-version.outputs.version }}
|
|
249
|
+
|
|
250
|
+
- name: Sync dependencies (testing-mkl group, includes pytest-benchmark)
|
|
251
|
+
run: uv sync --all-extras --all-groups
|
|
252
|
+
|
|
253
|
+
- name: Run MKL benchmark + memory parity tests
|
|
254
|
+
run: |
|
|
255
|
+
uv run pytest \
|
|
256
|
+
-m benchmark \
|
|
257
|
+
--benchmark-json=benchmark.json \
|
|
258
|
+
--durations=0 \
|
|
259
|
+
-v
|
|
260
|
+
# Test-internal asserts (2x speed backstop, 2.5x memory backstop)
|
|
261
|
+
# may legitimately fail on noisy CI runners; surface those without
|
|
262
|
+
# failing the PR.
|
|
263
|
+
|
|
264
|
+
- name: Upload benchmark results
|
|
265
|
+
if: always()
|
|
266
|
+
uses: actions/upload-artifact@v4
|
|
267
|
+
with:
|
|
268
|
+
name: mkl-benchmark-results
|
|
269
|
+
path: benchmark.json
|
|
270
|
+
if-no-files-found: warn
|
|
271
|
+
|
|
272
|
+
coverage:
|
|
273
|
+
name: combine-coverage
|
|
274
|
+
runs-on: ubuntu-24.04
|
|
275
|
+
needs: [setup-and-test, test-memory, test-pure-python]
|
|
276
|
+
if: always() && github.event.pull_request.draft == false
|
|
277
|
+
permissions:
|
|
278
|
+
pull-requests: write
|
|
279
|
+
contents: write
|
|
280
|
+
steps:
|
|
281
|
+
- uses: actions/checkout@v5
|
|
282
|
+
- uses: actions/download-artifact@v4
|
|
283
|
+
with:
|
|
284
|
+
pattern: coverage-*
|
|
285
|
+
merge-multiple: true
|
|
286
|
+
- name: Coverage comment
|
|
287
|
+
id: coverage_comment
|
|
288
|
+
uses: py-cov-action/python-coverage-comment-action@v3
|
|
289
|
+
with:
|
|
290
|
+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
291
|
+
MERGE_COVERAGE_FILES: true
|
|
292
|
+
|
|
293
|
+
- name: Store Pull Request comment
|
|
294
|
+
uses: actions/upload-artifact@v4
|
|
295
|
+
if: steps.coverage_comment.outputs.COMMENT_FILE_WRITTEN == 'true'
|
|
296
|
+
with:
|
|
297
|
+
name: python-coverage-comment-action
|
|
298
|
+
path: python-coverage-comment-action.txt
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
# Release workflow: builds wheels + sdist for a SemVer tag, creates a GitHub
|
|
2
|
+
# Release with auto-generated notes, and publishes to PyPI via OIDC trusted
|
|
3
|
+
# publishing. The PyPI publish step is gated on the ``pypi`` GitHub
|
|
4
|
+
# environment, which is configured (in repo settings) to require manual
|
|
5
|
+
# reviewer approval.
|
|
6
|
+
#
|
|
7
|
+
# Tag conventions (no ``v`` prefix; SemVer-style):
|
|
8
|
+
# 1.2.3 -- final release
|
|
9
|
+
# 1.2.3-rc1 -- pre-releases (alpha / beta / rc; PEP 440 normalises
|
|
10
|
+
# ``1.2.3-rc1`` to ``1.2.3rc1`` for the wheel version)
|
|
11
|
+
#
|
|
12
|
+
# Prerequisites (configured outside this workflow):
|
|
13
|
+
# * PyPI trusted publisher: workflow=release.yml, environment=pypi
|
|
14
|
+
# * GitHub environment ``pypi`` with required reviewers
|
|
15
|
+
# * Tag protection rule for the patterns below
|
|
16
|
+
|
|
17
|
+
name: Release
|
|
18
|
+
|
|
19
|
+
on:
|
|
20
|
+
push:
|
|
21
|
+
tags:
|
|
22
|
+
- '[0-9]+.[0-9]+.[0-9]+'
|
|
23
|
+
- '[0-9]+.[0-9]+.[0-9]+-[abr]*'
|
|
24
|
+
|
|
25
|
+
permissions:
|
|
26
|
+
contents: read
|
|
27
|
+
|
|
28
|
+
jobs:
|
|
29
|
+
guard_main:
|
|
30
|
+
# Refuse to release from a tag that does not point at a commit on main.
|
|
31
|
+
# Prevents accidental releases from feature branches or stale tags.
|
|
32
|
+
name: Verify tag is on main
|
|
33
|
+
runs-on: ubuntu-latest
|
|
34
|
+
steps:
|
|
35
|
+
- uses: actions/checkout@v4
|
|
36
|
+
with:
|
|
37
|
+
fetch-depth: 0
|
|
38
|
+
- name: Check tag commit is an ancestor of origin/main
|
|
39
|
+
run: |
|
|
40
|
+
git fetch origin main
|
|
41
|
+
if ! git merge-base --is-ancestor "$GITHUB_SHA" origin/main; then
|
|
42
|
+
echo "::error::Tag commit $GITHUB_SHA is not on origin/main; refusing to release."
|
|
43
|
+
exit 1
|
|
44
|
+
fi
|
|
45
|
+
|
|
46
|
+
build:
|
|
47
|
+
name: Build artifacts
|
|
48
|
+
needs: guard_main
|
|
49
|
+
uses: ./.github/workflows/_wheels.yml
|
|
50
|
+
|
|
51
|
+
create_github_release:
|
|
52
|
+
name: Create GitHub Release
|
|
53
|
+
needs: build
|
|
54
|
+
runs-on: ubuntu-latest
|
|
55
|
+
permissions:
|
|
56
|
+
contents: write
|
|
57
|
+
steps:
|
|
58
|
+
- uses: actions/download-artifact@v4
|
|
59
|
+
with:
|
|
60
|
+
pattern: cibw-*
|
|
61
|
+
path: dist
|
|
62
|
+
merge-multiple: true
|
|
63
|
+
|
|
64
|
+
- name: Create release
|
|
65
|
+
uses: softprops/action-gh-release@v2
|
|
66
|
+
with:
|
|
67
|
+
generate_release_notes: true
|
|
68
|
+
files: dist/*
|
|
69
|
+
# Mark pre-releases (anything with a/b/rc/post in the tag)
|
|
70
|
+
# appropriately so PyPI/users see the right channel.
|
|
71
|
+
prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }}
|
|
72
|
+
|
|
73
|
+
publish_pypi:
|
|
74
|
+
name: Publish to PyPI
|
|
75
|
+
needs: create_github_release
|
|
76
|
+
runs-on: ubuntu-latest
|
|
77
|
+
environment:
|
|
78
|
+
name: pypi
|
|
79
|
+
url: https://pypi.org/p/stochmat
|
|
80
|
+
permissions:
|
|
81
|
+
id-token: write # OIDC for trusted publishing
|
|
82
|
+
attestations: write # sigstore provenance attestations
|
|
83
|
+
steps:
|
|
84
|
+
- uses: actions/download-artifact@v4
|
|
85
|
+
with:
|
|
86
|
+
pattern: cibw-*
|
|
87
|
+
path: dist
|
|
88
|
+
merge-multiple: true
|
|
89
|
+
|
|
90
|
+
- uses: pypa/gh-action-pypi-publish@release/v1
|
|
91
|
+
with:
|
|
92
|
+
attestations: true
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
name: Post coverage comment
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
workflow_run:
|
|
5
|
+
workflows: ["Development Workflow"]
|
|
6
|
+
types:
|
|
7
|
+
- completed
|
|
8
|
+
|
|
9
|
+
jobs:
|
|
10
|
+
test:
|
|
11
|
+
name: Run tests & display coverage
|
|
12
|
+
runs-on: ubuntu-24.04
|
|
13
|
+
# if: github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success'
|
|
14
|
+
if: github.event.workflow_run.event == 'pull_request'
|
|
15
|
+
permissions:
|
|
16
|
+
pull-requests: write
|
|
17
|
+
contents: write
|
|
18
|
+
actions: read
|
|
19
|
+
steps:
|
|
20
|
+
# DO NOT run actions/checkout here, for security reasons
|
|
21
|
+
# For details, refer to https://securitylab.github.com/research/github-actions-preventing-pwn-requests/
|
|
22
|
+
- name: Post comment
|
|
23
|
+
uses: py-cov-action/python-coverage-comment-action@v3
|
|
24
|
+
with:
|
|
25
|
+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
26
|
+
GITHUB_PR_RUN_ID: ${{ github.event.workflow_run.id }}
|
|
27
|
+
# Update those if you changed the default values:
|
|
28
|
+
# COMMENT_ARTIFACT_NAME: python-coverage-comment-action
|
|
29
|
+
# COMMENT_FILENAME: python-coverage-comment-action.txt
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
name: Status
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches:
|
|
6
|
+
- main
|
|
7
|
+
|
|
8
|
+
jobs:
|
|
9
|
+
determine-python-version:
|
|
10
|
+
runs-on: ubuntu-latest
|
|
11
|
+
outputs:
|
|
12
|
+
min-version: ${{ steps.pyver.outputs.version }}
|
|
13
|
+
steps:
|
|
14
|
+
- uses: actions/checkout@v5
|
|
15
|
+
|
|
16
|
+
- name: Determine minimum Python version from pyproject.toml
|
|
17
|
+
id: pyver
|
|
18
|
+
run: |
|
|
19
|
+
py_req=$(sed -n 's/^[[:space:]]*requires-python[[:space:]]*=[[:space:]]*"\(.*\)".*/\1/p' pyproject.toml | head -n1)
|
|
20
|
+
py_min=$(echo "$py_req" | sed -n 's/.*>=[[:space:]]*\([0-9]\+\.[0-9]\+\).*/\1/p')
|
|
21
|
+
if [ -z "$py_min" ]; then
|
|
22
|
+
echo "Could not parse minimum Python version from requires-python='$py_req'" >&2
|
|
23
|
+
exit 1
|
|
24
|
+
fi
|
|
25
|
+
echo "version=$py_min" >> "$GITHUB_OUTPUT"
|
|
26
|
+
echo "Resolved minimum Python: $py_min"
|
|
27
|
+
|
|
28
|
+
unit-tests:
|
|
29
|
+
runs-on: ubuntu-24.04
|
|
30
|
+
needs: [determine-python-version]
|
|
31
|
+
strategy:
|
|
32
|
+
matrix:
|
|
33
|
+
pythonV: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
|
34
|
+
fail-fast: false
|
|
35
|
+
steps:
|
|
36
|
+
- uses: actions/checkout@v5
|
|
37
|
+
with:
|
|
38
|
+
lfs: 'true'
|
|
39
|
+
submodules: recursive
|
|
40
|
+
|
|
41
|
+
- name: Set up Python ${{ matrix.pythonV }}
|
|
42
|
+
uses: actions/setup-python@v5
|
|
43
|
+
with:
|
|
44
|
+
python-version: ${{ matrix.pythonV }}
|
|
45
|
+
|
|
46
|
+
- name: Install Intel MKL
|
|
47
|
+
run: |
|
|
48
|
+
sudo apt-get update
|
|
49
|
+
sudo apt-get -y install intel-mkl
|
|
50
|
+
|
|
51
|
+
- name: Install uv
|
|
52
|
+
uses: astral-sh/setup-uv@v5
|
|
53
|
+
with:
|
|
54
|
+
enable-cache: true
|
|
55
|
+
cache-dependency-glob: "pyproject.toml"
|
|
56
|
+
|
|
57
|
+
- name: Set up Python
|
|
58
|
+
run: uv python install ${{ matrix.pythonV }}
|
|
59
|
+
|
|
60
|
+
- name: Sync dependencies
|
|
61
|
+
# Synchronizes base, mkl extra, and dev groups (testing/docs)
|
|
62
|
+
run: uv sync --all-extras --all-groups
|
|
63
|
+
|
|
64
|
+
- name: Lint with ruff
|
|
65
|
+
run: uv run ruff check --select=ALL --output-format=github src/
|
|
66
|
+
continue-on-error: true
|
|
67
|
+
|
|
68
|
+
- name: Tests without memory tracking
|
|
69
|
+
run: |
|
|
70
|
+
uv run pytest --junitxml=junit/test-results.xml --cov=stochmat --durations=0 -k 'not _memory'
|
|
71
|
+
env:
|
|
72
|
+
COVERAGE_FILE: ".coverage.no_memory"
|
|
73
|
+
|
|
74
|
+
- name: Store coverage file
|
|
75
|
+
if: matrix.pythonV == needs.determine-python-version.outputs.min-version
|
|
76
|
+
uses: actions/upload-artifact@v4
|
|
77
|
+
with:
|
|
78
|
+
name: coverage-no_memory
|
|
79
|
+
path: .coverage.no_memory
|
|
80
|
+
include-hidden-files: true
|
|
81
|
+
|
|
82
|
+
- name: Tests with memory tracking
|
|
83
|
+
# Re-syncing is optimized by uv's global cache
|
|
84
|
+
run: |
|
|
85
|
+
uv run pytest --junitxml=junit/test-results.xml --cov=stochmat --durations=0 --memray --most-allocations=0 -k '_memory'
|
|
86
|
+
env:
|
|
87
|
+
COVERAGE_FILE: ".coverage.memory"
|
|
88
|
+
|
|
89
|
+
- name: Store coverage file
|
|
90
|
+
if: matrix.pythonV == needs.determine-python-version.outputs.min-version
|
|
91
|
+
uses: actions/upload-artifact@v4
|
|
92
|
+
with:
|
|
93
|
+
name: coverage-memory
|
|
94
|
+
path: .coverage.memory
|
|
95
|
+
include-hidden-files: true
|
|
96
|
+
|
|
97
|
+
# Update per-version pass/fail badge in gist
|
|
98
|
+
- name: Update Python ${{ matrix.pythonV }} status badge
|
|
99
|
+
if: always()
|
|
100
|
+
# pinned to v1.8.0
|
|
101
|
+
uses: schneegans/dynamic-badges-action@0e50b8bad39e7e1afd3e4e9c2b7dd145fad07501
|
|
102
|
+
with:
|
|
103
|
+
auth: ${{ secrets.GIST_SECRET }}
|
|
104
|
+
gistID: ${{ vars.STATUS_GIST_ID }}
|
|
105
|
+
filename: python-${{ matrix.pythonV }}.json
|
|
106
|
+
label: "Python ${{ matrix.pythonV }}"
|
|
107
|
+
message: ${{ job.status == 'success' && 'passing' || 'failing' }}
|
|
108
|
+
color: ${{ job.status == 'success' && 'brightgreen' || 'red' }}
|
|
109
|
+
|
|
110
|
+
coverage-badge:
|
|
111
|
+
name: coverage-badge
|
|
112
|
+
runs-on: ubuntu-24.04
|
|
113
|
+
needs: [unit-tests]
|
|
114
|
+
if: always()
|
|
115
|
+
permissions:
|
|
116
|
+
contents: write
|
|
117
|
+
steps:
|
|
118
|
+
- uses: actions/checkout@v5
|
|
119
|
+
- uses: actions/download-artifact@v4
|
|
120
|
+
id: download
|
|
121
|
+
with:
|
|
122
|
+
pattern: coverage-*
|
|
123
|
+
merge-multiple: true
|
|
124
|
+
|
|
125
|
+
- name: Update coverage badge
|
|
126
|
+
uses: py-cov-action/python-coverage-comment-action@v3
|
|
127
|
+
with:
|
|
128
|
+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
129
|
+
MERGE_COVERAGE_FILES: true
|