ferromic 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ferromic might be problematic. Click here for more details.
- ferromic-0.1.0/.cargo/config.toml +7 -0
- ferromic-0.1.0/.github/workflows/CI.yml +223 -0
- ferromic-0.1.0/.github/workflows/benchmarks.yml +153 -0
- ferromic-0.1.0/.github/workflows/fixer.yml +177 -0
- ferromic-0.1.0/.github/workflows/rust.yml +110 -0
- ferromic-0.1.0/.gitignore +25 -0
- ferromic-0.1.0/Cargo.lock +3457 -0
- ferromic-0.1.0/Cargo.toml +96 -0
- ferromic-0.1.0/LICENSE.md +4 -0
- ferromic-0.1.0/PKG-INFO +366 -0
- ferromic-0.1.0/README.md +339 -0
- ferromic-0.1.0/benches/pca.rs +90 -0
- ferromic-0.1.0/cds/axt_to_phy.py +1141 -0
- ferromic-0.1.0/cds/combine_phy.py +282 -0
- ferromic-0.1.0/cds/omega_test.py +2070 -0
- ferromic-0.1.0/data/README.md +1 -0
- ferromic-0.1.0/data/balanced_recurrence_results.tsv +293 -0
- ferromic-0.1.0/data/callset.tsv +400 -0
- ferromic-0.1.0/data/md5sum.txt +5 -0
- ferromic-0.1.0/data/passed_snvs.txt +138414 -0
- ferromic-0.1.0/data/significant_heritability_diseases.tsv +2291 -0
- ferromic-0.1.0/data/vcf_list.txt +169262 -0
- ferromic-0.1.0/deny.toml +5 -0
- ferromic-0.1.0/install.sh +47 -0
- ferromic-0.1.0/phewas/README.md +20 -0
- ferromic-0.1.0/phewas/extra/family.py +578 -0
- ferromic-0.1.0/phewas/extra/score.py +927 -0
- ferromic-0.1.0/phewas/iox.py +883 -0
- ferromic-0.1.0/phewas/models.py +3504 -0
- ferromic-0.1.0/phewas/pheno.py +1130 -0
- ferromic-0.1.0/phewas/pipes.py +693 -0
- ferromic-0.1.0/phewas/run.py +1215 -0
- ferromic-0.1.0/phewas/test_setup.sh +4 -0
- ferromic-0.1.0/phewas/testing.py +381 -0
- ferromic-0.1.0/phewas/tests.py +1126 -0
- ferromic-0.1.0/pyproject.toml +37 -0
- ferromic-0.1.0/rust-toolchain.toml +2 -0
- ferromic-0.1.0/scripts/check_fixed_sites.py +305 -0
- ferromic-0.1.0/scripts/deduplicate.py +16 -0
- ferromic-0.1.0/scripts/dnds.py +1441 -0
- ferromic-0.1.0/scripts/phy_to_fasta.py +192 -0
- ferromic-0.1.0/src/README.md +86 -0
- ferromic-0.1.0/src/lib.rs +2239 -0
- ferromic-0.1.0/src/main.rs +286 -0
- ferromic-0.1.0/src/merge.rs +612 -0
- ferromic-0.1.0/src/parse.rs +1115 -0
- ferromic-0.1.0/src/pca.rs +1096 -0
- ferromic-0.1.0/src/process.rs +3767 -0
- ferromic-0.1.0/src/progress.rs +948 -0
- ferromic-0.1.0/src/pybenches/__init__.py +7 -0
- ferromic-0.1.0/src/pybenches/conftest.py +37 -0
- ferromic-0.1.0/src/pybenches/test_population_pca_benchmarks.py +709 -0
- ferromic-0.1.0/src/pybenches/test_population_statistics_benchmarks.py +855 -0
- ferromic-0.1.0/src/pytests/__init__.py +0 -0
- ferromic-0.1.0/src/pytests/test_diversity_integration.py +212 -0
- ferromic-0.1.0/src/pytests/test_ferromic.py +98 -0
- ferromic-0.1.0/src/pytests/test_hudson_fst_integration.py +152 -0
- ferromic-0.1.0/src/run_vcf.rs +223 -0
- ferromic-0.1.0/src/stats.rs +4437 -0
- ferromic-0.1.0/src/tests/filter_tests.rs +247 -0
- ferromic-0.1.0/src/tests/hudson_fst_tests.rs +1290 -0
- ferromic-0.1.0/src/tests/interval_tests.rs +98 -0
- ferromic-0.1.0/src/tests/stats_tests.rs +1667 -0
- ferromic-0.1.0/src/transcripts.rs +1318 -0
- ferromic-0.1.0/stats/CDS_identical_model.py +238 -0
- ferromic-0.1.0/stats/CDS_plots.py +1458 -0
- ferromic-0.1.0/stats/OR_matrix.py +442 -0
- ferromic-0.1.0/stats/_inv_common.py +96 -0
- ferromic-0.1.0/stats/af_pi.py +272 -0
- ferromic-0.1.0/stats/analyze_activations.py +1 -0
- ferromic-0.1.0/stats/best_tag.py +215 -0
- ferromic-0.1.0/stats/category_per_site.py +1251 -0
- ferromic-0.1.0/stats/category_per_site_normed.py +1342 -0
- ferromic-0.1.0/stats/cds_differences.py +831 -0
- ferromic-0.1.0/stats/conserve_chords.py +738 -0
- ferromic-0.1.0/stats/conserve_inv_dir.py +320 -0
- ferromic-0.1.0/stats/cross_violins.py +229 -0
- ferromic-0.1.0/stats/dist_fst_by_type.py +260 -0
- ferromic-0.1.0/stats/distance_diversity.py +241 -0
- ferromic-0.1.0/stats/diversity_dir_inv.py +255 -0
- ferromic-0.1.0/stats/diversity_scatterplot.py +114 -0
- ferromic-0.1.0/stats/dnds_kde.py +640 -0
- ferromic-0.1.0/stats/each_per_site.py +426 -0
- ferromic-0.1.0/stats/estimators_fst.py +341 -0
- ferromic-0.1.0/stats/events_rate_diversity.py +605 -0
- ferromic-0.1.0/stats/fixed.py +495 -0
- ferromic-0.1.0/stats/forest.py +592 -0
- ferromic-0.1.0/stats/fst_violins.py +364 -0
- ferromic-0.1.0/stats/get_codes.py +1087 -0
- ferromic-0.1.0/stats/imputation_plot.py +198 -0
- ferromic-0.1.0/stats/infer.py +446 -0
- ferromic-0.1.0/stats/inv_dir_recur.py +1201 -0
- ferromic-0.1.0/stats/inv_dir_recur_model.py +1125 -0
- ferromic-0.1.0/stats/inv_dir_recur_violins.py +467 -0
- ferromic-0.1.0/stats/inversion_phewas_report.py +677 -0
- ferromic-0.1.0/stats/inversion_table.py +304 -0
- ferromic-0.1.0/stats/length_distribution.py +39 -0
- ferromic-0.1.0/stats/linked.py +780 -0
- ferromic-0.1.0/stats/manhattan_phe.py +645 -0
- ferromic-0.1.0/stats/manhattan_plot.py +508 -0
- ferromic-0.1.0/stats/matrix_plots.py +440 -0
- ferromic-0.1.0/stats/middle_vs_flank_fst.py +1381 -0
- ferromic-0.1.0/stats/middle_vs_flank_pi.py +1183 -0
- ferromic-0.1.0/stats/middle_vs_flank_pi_recurrence.py +1305 -0
- ferromic-0.1.0/stats/num_events_diversity.py +370 -0
- ferromic-0.1.0/stats/orientation_pcs.py +499 -0
- ferromic-0.1.0/stats/overall_fst_by_type.py +1104 -0
- ferromic-0.1.0/stats/overall_groups_dnds.py +299 -0
- ferromic-0.1.0/stats/pack_models.py +123 -0
- ferromic-0.1.0/stats/pairwise_matrix_test.py +1466 -0
- ferromic-0.1.0/stats/per_gene_cds_differences_jackknife.py +510 -0
- ferromic-0.1.0/stats/permute.py +522 -0
- ferromic-0.1.0/stats/phelist.py +85 -0
- ferromic-0.1.0/stats/pls_patch.py +1114 -0
- ferromic-0.1.0/stats/prepare_data_for_infer.py +310 -0
- ferromic-0.1.0/stats/ranged_volcano.py +382 -0
- ferromic-0.1.0/stats/recur_conservation.py +993 -0
- ferromic-0.1.0/stats/recur_diversity.py +391 -0
- ferromic-0.1.0/stats/region_descriptive.py +188 -0
- ferromic-0.1.0/stats/regions_plot.py +149 -0
- ferromic-0.1.0/stats/replicate_plots.py +150 -0
- ferromic-0.1.0/stats/replicate_stats.py +11 -0
- ferromic-0.1.0/stats/shuffle_coords.py +224 -0
- ferromic-0.1.0/stats/snv_list_acaf_download.py +862 -0
- ferromic-0.1.0/stats/stats_table.py +474 -0
- ferromic-0.1.0/stats/tagged.py +662 -0
- ferromic-0.1.0/stats/top_n_pi.py +160 -0
- ferromic-0.1.0/stats/vcf_snv_list.py +271 -0
- ferromic-0.1.0/stats/visualize_pca.py +222 -0
- ferromic-0.1.0/stats/volcano.py +455 -0
- ferromic-0.1.0/stats/wald_ci.py +176 -0
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
name: CI
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches:
|
|
6
|
+
- main
|
|
7
|
+
- master
|
|
8
|
+
tags:
|
|
9
|
+
- '*'
|
|
10
|
+
pull_request:
|
|
11
|
+
workflow_dispatch:
|
|
12
|
+
inputs:
|
|
13
|
+
ci_targets:
|
|
14
|
+
description: Select which platforms to build
|
|
15
|
+
type: choice
|
|
16
|
+
default: default
|
|
17
|
+
options:
|
|
18
|
+
- default
|
|
19
|
+
- all
|
|
20
|
+
|
|
21
|
+
permissions:
|
|
22
|
+
contents: read
|
|
23
|
+
|
|
24
|
+
jobs:
|
|
25
|
+
linux:
|
|
26
|
+
runs-on: ${{ matrix.platform.runner }}
|
|
27
|
+
strategy:
|
|
28
|
+
matrix:
|
|
29
|
+
platform: >-
|
|
30
|
+
${{ fromJson(
|
|
31
|
+
(startsWith(github.ref, 'refs/tags/') || (github.event_name == 'workflow_dispatch' && github.event.inputs.ci_targets == 'all'))
|
|
32
|
+
&& '[{"runner":"ubuntu-22.04","target":"x86_64","openblas_target":"SANDYBRIDGE"},{"runner":"ubuntu-22.04","target":"x86","openblas_target":"ATOM"},{"runner":"ubuntu-22.04","target":"aarch64","openblas_target":"ARMV8"},{"runner":"ubuntu-22.04","target":"armv7","openblas_target":"ARMV7"},{"runner":"ubuntu-22.04","target":"ppc64le","openblas_target":"POWER9"}]'
|
|
33
|
+
|| '[{"runner":"ubuntu-22.04","target":"x86_64","openblas_target":"SANDYBRIDGE"}]'
|
|
34
|
+
) }}
|
|
35
|
+
steps:
|
|
36
|
+
- uses: actions/checkout@v4
|
|
37
|
+
- uses: actions/setup-python@v5
|
|
38
|
+
with:
|
|
39
|
+
python-version: 3.x
|
|
40
|
+
- name: Install Rust toolchain
|
|
41
|
+
uses: dtolnay/rust-toolchain@nightly
|
|
42
|
+
- name: Install build dependencies
|
|
43
|
+
if: ${{ matrix.platform.target == 'x86_64' }}
|
|
44
|
+
run: |
|
|
45
|
+
sudo apt-get update
|
|
46
|
+
sudo apt-get install -y build-essential gfortran perl wget
|
|
47
|
+
perl -MIPC::Cmd -e1 >/dev/null 2>&1 || (wget -O - https://cpanmin.us | perl - --notest IPC::Cmd)
|
|
48
|
+
- name: Cargo tests
|
|
49
|
+
if: ${{ matrix.platform.target == 'x86_64' }}
|
|
50
|
+
env:
|
|
51
|
+
OPENBLAS_TARGET: ${{ matrix.platform.openblas_target }}
|
|
52
|
+
run: cargo test --release
|
|
53
|
+
- name: Work around missing s390x assembler support
|
|
54
|
+
if: ${{ matrix.platform.target == 's390x' }}
|
|
55
|
+
run: |
|
|
56
|
+
echo "OPENSSL_NO_ASM=1" >> $GITHUB_ENV
|
|
57
|
+
echo "OPENSSL_SRC_CONFIGURE_ARGS=no-asm" >> $GITHUB_ENV
|
|
58
|
+
- name: Build wheels
|
|
59
|
+
uses: PyO3/maturin-action@v1
|
|
60
|
+
env:
|
|
61
|
+
OPENSSL_STATIC: 1
|
|
62
|
+
OPENSSL_VENDORED: 1
|
|
63
|
+
OPENBLAS_TARGET: ${{ matrix.platform.openblas_target }}
|
|
64
|
+
with:
|
|
65
|
+
target: ${{ matrix.platform.target }}
|
|
66
|
+
args: --release --out dist --find-interpreter
|
|
67
|
+
sccache: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
|
68
|
+
manylinux: auto
|
|
69
|
+
rust-toolchain: nightly
|
|
70
|
+
before-script-linux: |
|
|
71
|
+
if command -v microdnf >/dev/null 2>&1; then
|
|
72
|
+
microdnf install -y gcc gcc-c++ gcc-gfortran make perl perl-IPC-Cmd wget
|
|
73
|
+
elif command -v dnf >/dev/null 2>&1; then
|
|
74
|
+
dnf install -y gcc gcc-c++ gcc-gfortran make perl perl-IPC-Cmd wget
|
|
75
|
+
elif command -v yum >/dev/null 2>&1; then
|
|
76
|
+
yum install -y gcc gcc-c++ gcc-gfortran make perl perl-IPC-Cmd wget
|
|
77
|
+
elif command -v apt-get >/dev/null 2>&1; then
|
|
78
|
+
apt-get update
|
|
79
|
+
apt-get install -y build-essential gfortran perl wget
|
|
80
|
+
elif command -v apk >/dev/null 2>&1; then
|
|
81
|
+
apk add --no-cache build-base gfortran perl perl-utils wget
|
|
82
|
+
fi
|
|
83
|
+
perl -MIPC::Cmd -e1 >/dev/null 2>&1 || (wget -O - https://cpanmin.us | perl - --notest IPC::Cmd)
|
|
84
|
+
|
|
85
|
+
- name: Upload wheels
|
|
86
|
+
uses: actions/upload-artifact@v4
|
|
87
|
+
with:
|
|
88
|
+
name: wheels-linux-${{ matrix.platform.target }}
|
|
89
|
+
path: dist
|
|
90
|
+
|
|
91
|
+
musllinux:
|
|
92
|
+
if: ${{ startsWith(github.ref, 'refs/tags/') || (github.event_name == 'workflow_dispatch' && github.event.inputs.ci_targets == 'all') }}
|
|
93
|
+
runs-on: ${{ matrix.platform.runner }}
|
|
94
|
+
strategy:
|
|
95
|
+
matrix:
|
|
96
|
+
platform: >-
|
|
97
|
+
${{ fromJson('[{"runner":"ubuntu-22.04","target":"x86_64","openblas_target":"SANDYBRIDGE"},{"runner":"ubuntu-22.04","target":"x86","openblas_target":"ATOM"},{"runner":"ubuntu-22.04","target":"aarch64","openblas_target":"ARMV8"},{"runner":"ubuntu-22.04","target":"armv7","openblas_target":"ARMV7"}]') }}
|
|
98
|
+
steps:
|
|
99
|
+
- uses: actions/checkout@v4
|
|
100
|
+
- uses: actions/setup-python@v5
|
|
101
|
+
with:
|
|
102
|
+
python-version: 3.x
|
|
103
|
+
- name: Build wheels
|
|
104
|
+
uses: PyO3/maturin-action@v1
|
|
105
|
+
env:
|
|
106
|
+
OPENSSL_STATIC: 1
|
|
107
|
+
OPENSSL_VENDORED: 1
|
|
108
|
+
OPENBLAS_TARGET: ${{ matrix.platform.openblas_target }}
|
|
109
|
+
with:
|
|
110
|
+
target: ${{ matrix.platform.target }}
|
|
111
|
+
args: --release --out dist --find-interpreter
|
|
112
|
+
sccache: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
|
113
|
+
manylinux: musllinux_1_2
|
|
114
|
+
rust-toolchain: nightly
|
|
115
|
+
before-script-linux: |
|
|
116
|
+
if command -v microdnf >/dev/null 2>&1; then
|
|
117
|
+
microdnf install -y gcc gcc-c++ gcc-gfortran make perl perl-IPC-Cmd wget
|
|
118
|
+
elif command -v dnf >/dev/null 2>&1; then
|
|
119
|
+
dnf install -y gcc gcc-c++ gcc-gfortran make perl perl-IPC-Cmd wget
|
|
120
|
+
elif command -v yum >/dev/null 2>&1; then
|
|
121
|
+
yum install -y gcc gcc-c++ gcc-gfortran make perl perl-IPC-Cmd wget
|
|
122
|
+
elif command -v apt-get >/dev/null 2>&1; then
|
|
123
|
+
apt-get update
|
|
124
|
+
apt-get install -y build-essential gfortran perl wget
|
|
125
|
+
elif command -v apk >/dev/null 2>&1; then
|
|
126
|
+
apk add --no-cache build-base gfortran perl perl-utils wget
|
|
127
|
+
fi
|
|
128
|
+
perl -MIPC::Cmd -e1 >/dev/null 2>&1 || (wget -O - https://cpanmin.us | perl - --notest IPC::Cmd)
|
|
129
|
+
- name: Upload wheels
|
|
130
|
+
uses: actions/upload-artifact@v4
|
|
131
|
+
with:
|
|
132
|
+
name: wheels-musllinux-${{ matrix.platform.target }}
|
|
133
|
+
path: dist
|
|
134
|
+
|
|
135
|
+
windows:
|
|
136
|
+
if: ${{ startsWith(github.ref, 'refs/tags/') || (github.event_name == 'workflow_dispatch' && github.event.inputs.ci_targets == 'all') }}
|
|
137
|
+
runs-on: ${{ matrix.platform.runner }}
|
|
138
|
+
strategy:
|
|
139
|
+
matrix:
|
|
140
|
+
platform: >-
|
|
141
|
+
${{ fromJson('[{"runner":"windows-latest","target":"x64","vcpkg_triplet":"x64-windows"},{"runner":"windows-latest","target":"x86","vcpkg_triplet":"x86-windows"}]') }}
|
|
142
|
+
steps:
|
|
143
|
+
- uses: actions/checkout@v4
|
|
144
|
+
- uses: actions/setup-python@v5
|
|
145
|
+
with:
|
|
146
|
+
python-version: 3.x
|
|
147
|
+
architecture: ${{ matrix.platform.target }}
|
|
148
|
+
- name: Install OpenBLAS dependencies
|
|
149
|
+
shell: pwsh
|
|
150
|
+
run: |
|
|
151
|
+
$vcpkgRoot = Join-Path $Env:USERPROFILE "vcpkg"
|
|
152
|
+
if (-Not (Test-Path $vcpkgRoot)) {
|
|
153
|
+
git clone https://github.com/microsoft/vcpkg.git $vcpkgRoot
|
|
154
|
+
& (Join-Path $vcpkgRoot "bootstrap-vcpkg.bat") -disableMetrics
|
|
155
|
+
}
|
|
156
|
+
& (Join-Path $vcpkgRoot "vcpkg.exe") install openblas --triplet ${{ matrix.platform.vcpkg_triplet }}
|
|
157
|
+
"VCPKG_ROOT=$vcpkgRoot" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
|
158
|
+
"VCPKG_DEFAULT_TRIPLET=${{ matrix.platform.vcpkg_triplet }}" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
|
159
|
+
- name: Build wheels
|
|
160
|
+
uses: PyO3/maturin-action@v1
|
|
161
|
+
with:
|
|
162
|
+
target: ${{ matrix.platform.target }}
|
|
163
|
+
args: --release --out dist --find-interpreter
|
|
164
|
+
sccache: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
|
165
|
+
rust-toolchain: nightly
|
|
166
|
+
- name: Upload wheels
|
|
167
|
+
uses: actions/upload-artifact@v4
|
|
168
|
+
with:
|
|
169
|
+
name: wheels-windows-${{ matrix.platform.target }}
|
|
170
|
+
path: dist
|
|
171
|
+
|
|
172
|
+
macos:
|
|
173
|
+
if: ${{ startsWith(github.ref, 'refs/tags/') || (github.event_name == 'workflow_dispatch' && github.event.inputs.ci_targets == 'all') }}
|
|
174
|
+
runs-on: ${{ matrix.platform.runner }}
|
|
175
|
+
strategy:
|
|
176
|
+
matrix:
|
|
177
|
+
platform: >-
|
|
178
|
+
${{ fromJson('[{"runner":"macos-13","target":"x86_64"},{"runner":"macos-14","target":"aarch64"}]') }}
|
|
179
|
+
steps:
|
|
180
|
+
- uses: actions/checkout@v4
|
|
181
|
+
- uses: actions/setup-python@v5
|
|
182
|
+
with:
|
|
183
|
+
python-version: 3.x
|
|
184
|
+
- name: Build wheels
|
|
185
|
+
uses: PyO3/maturin-action@v1
|
|
186
|
+
with:
|
|
187
|
+
target: ${{ matrix.platform.target }}
|
|
188
|
+
args: --release --out dist --find-interpreter
|
|
189
|
+
sccache: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
|
190
|
+
rust-toolchain: nightly
|
|
191
|
+
- name: Upload wheels
|
|
192
|
+
uses: actions/upload-artifact@v4
|
|
193
|
+
with:
|
|
194
|
+
name: wheels-macos-${{ matrix.platform.target }}
|
|
195
|
+
path: dist
|
|
196
|
+
|
|
197
|
+
release:
|
|
198
|
+
name: Release
|
|
199
|
+
runs-on: ubuntu-latest
|
|
200
|
+
if: ${{ startsWith(github.ref, 'refs/tags/') || github.event_name == 'workflow_dispatch' }}
|
|
201
|
+
needs: [linux, musllinux, windows, macos]
|
|
202
|
+
permissions:
|
|
203
|
+
# Use to sign the release artifacts
|
|
204
|
+
id-token: write
|
|
205
|
+
# Used to upload release artifacts
|
|
206
|
+
contents: write
|
|
207
|
+
# Used to generate artifact attestation
|
|
208
|
+
attestations: write
|
|
209
|
+
steps:
|
|
210
|
+
- uses: actions/download-artifact@v4
|
|
211
|
+
- name: Generate artifact attestation
|
|
212
|
+
uses: actions/attest-build-provenance@v1
|
|
213
|
+
with:
|
|
214
|
+
subject-path: 'wheels-*/*'
|
|
215
|
+
- name: Publish to PyPI
|
|
216
|
+
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
|
217
|
+
uses: PyO3/maturin-action@v1
|
|
218
|
+
env:
|
|
219
|
+
MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
|
220
|
+
with:
|
|
221
|
+
command: upload
|
|
222
|
+
args: --non-interactive --skip-existing wheels-*/*
|
|
223
|
+
rust-toolchain: nightly
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
name: Benchmarks
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches:
|
|
6
|
+
- main
|
|
7
|
+
- master
|
|
8
|
+
pull_request:
|
|
9
|
+
workflow_dispatch:
|
|
10
|
+
|
|
11
|
+
permissions:
|
|
12
|
+
contents: read
|
|
13
|
+
|
|
14
|
+
env:
|
|
15
|
+
CARGO_TERM_COLOR: always
|
|
16
|
+
|
|
17
|
+
jobs:
|
|
18
|
+
python-benchmarks:
|
|
19
|
+
name: Python benchmarks
|
|
20
|
+
runs-on: ubuntu-22.04
|
|
21
|
+
steps:
|
|
22
|
+
- name: Checkout repository
|
|
23
|
+
uses: actions/checkout@v4
|
|
24
|
+
|
|
25
|
+
- name: Set up Python
|
|
26
|
+
uses: actions/setup-python@v5
|
|
27
|
+
with:
|
|
28
|
+
python-version: '3.12'
|
|
29
|
+
|
|
30
|
+
- name: Install Rust toolchain
|
|
31
|
+
uses: dtolnay/rust-toolchain@nightly
|
|
32
|
+
|
|
33
|
+
- name: Create virtual environment
|
|
34
|
+
run: |
|
|
35
|
+
python -m venv .venv
|
|
36
|
+
echo "VIRTUAL_ENV=$PWD/.venv" >> "$GITHUB_ENV"
|
|
37
|
+
echo "$PWD/.venv/bin" >> "$GITHUB_PATH"
|
|
38
|
+
|
|
39
|
+
- name: Install Python dependencies
|
|
40
|
+
run: |
|
|
41
|
+
python -m pip install --upgrade pip
|
|
42
|
+
pip install "maturin[patchelf]" pytest pytest-benchmark numpy scikit-allel scipy
|
|
43
|
+
|
|
44
|
+
- name: Build ferromic extension module
|
|
45
|
+
run: maturin develop --release
|
|
46
|
+
|
|
47
|
+
- name: Run pytest benchmarks
|
|
48
|
+
id: run_benchmarks
|
|
49
|
+
continue-on-error: true
|
|
50
|
+
run: |
|
|
51
|
+
pytest src/pybenches --benchmark-only --benchmark-json bench-results.json
|
|
52
|
+
|
|
53
|
+
- name: Print benchmark summary
|
|
54
|
+
if: always()
|
|
55
|
+
run: |
|
|
56
|
+
python - <<'PY'
|
|
57
|
+
import json
|
|
58
|
+
from pathlib import Path
|
|
59
|
+
|
|
60
|
+
def format_seconds(value: float | None) -> str:
|
|
61
|
+
if value is None:
|
|
62
|
+
return ""
|
|
63
|
+
if value >= 1:
|
|
64
|
+
return f"{value:,.3f}s"
|
|
65
|
+
value_ms = value * 1_000
|
|
66
|
+
if value_ms >= 1:
|
|
67
|
+
return f"{value_ms:,.3f}ms"
|
|
68
|
+
value_us = value_ms * 1_000
|
|
69
|
+
if value_us >= 1:
|
|
70
|
+
return f"{value_us:,.3f}µs"
|
|
71
|
+
value_ns = value_us * 1_000
|
|
72
|
+
return f"{value_ns:,.3f}ns"
|
|
73
|
+
|
|
74
|
+
def format_ops(value: float | None) -> str:
|
|
75
|
+
if value is None:
|
|
76
|
+
return ""
|
|
77
|
+
if value >= 1:
|
|
78
|
+
return f"{value:,.1f}/s"
|
|
79
|
+
return f"{value:,.3f}/s"
|
|
80
|
+
|
|
81
|
+
path = Path("bench-results.json")
|
|
82
|
+
if not path.exists():
|
|
83
|
+
print("No benchmark results found.")
|
|
84
|
+
raise SystemExit(0)
|
|
85
|
+
|
|
86
|
+
with path.open() as fp:
|
|
87
|
+
data = json.load(fp)
|
|
88
|
+
|
|
89
|
+
benches = data.get("benchmarks", [])
|
|
90
|
+
if not benches:
|
|
91
|
+
print("No benchmark results recorded.")
|
|
92
|
+
raise SystemExit(0)
|
|
93
|
+
|
|
94
|
+
rows: list[dict[str, str]] = []
|
|
95
|
+
for bench in benches:
|
|
96
|
+
stats = bench.get("stats", {})
|
|
97
|
+
extra = bench.get("extra_info", {})
|
|
98
|
+
dataset = extra.get("dataset", "")
|
|
99
|
+
implementation = extra.get("implementation", "")
|
|
100
|
+
population = extra.get("population", "")
|
|
101
|
+
name = bench.get("name", "").split("[", 1)[0]
|
|
102
|
+
mean = stats.get("mean")
|
|
103
|
+
stddev = stats.get("stddev")
|
|
104
|
+
rounds = stats.get("rounds")
|
|
105
|
+
ops = stats.get("ops")
|
|
106
|
+
rows.append({
|
|
107
|
+
"Dataset": dataset,
|
|
108
|
+
"Implementation": implementation,
|
|
109
|
+
"Population": population,
|
|
110
|
+
"Benchmark": name,
|
|
111
|
+
"Mean": format_seconds(mean) if mean is not None else "",
|
|
112
|
+
"StdDev": format_seconds(stddev) if stddev is not None else "",
|
|
113
|
+
"Rounds": str(rounds) if rounds is not None else "",
|
|
114
|
+
"Ops": format_ops(ops) if ops is not None else "",
|
|
115
|
+
})
|
|
116
|
+
|
|
117
|
+
rows.sort(key=lambda r: (r["Dataset"], r["Benchmark"], r["Implementation"], r["Population"]))
|
|
118
|
+
|
|
119
|
+
headers = [
|
|
120
|
+
"Dataset",
|
|
121
|
+
"Implementation",
|
|
122
|
+
"Population",
|
|
123
|
+
"Benchmark",
|
|
124
|
+
"Mean",
|
|
125
|
+
"StdDev",
|
|
126
|
+
"Rounds",
|
|
127
|
+
"Ops",
|
|
128
|
+
]
|
|
129
|
+
|
|
130
|
+
col_widths: dict[str, int] = {
|
|
131
|
+
header: max(len(header), *(len(row[header]) for row in rows))
|
|
132
|
+
for header in headers
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
separator = " | "
|
|
136
|
+
header_line = separator.join(header.ljust(col_widths[header]) for header in headers)
|
|
137
|
+
rule = "-+-".join("-" * col_widths[header] for header in headers)
|
|
138
|
+
print(header_line)
|
|
139
|
+
print(rule)
|
|
140
|
+
for row in rows:
|
|
141
|
+
print(separator.join(row[header].ljust(col_widths[header]) for header in headers))
|
|
142
|
+
PY
|
|
143
|
+
|
|
144
|
+
- name: Upload benchmark JSON
|
|
145
|
+
if: always() && hashFiles('bench-results.json') != ''
|
|
146
|
+
uses: actions/upload-artifact@v4
|
|
147
|
+
with:
|
|
148
|
+
name: pytest-benchmarks
|
|
149
|
+
path: bench-results.json
|
|
150
|
+
|
|
151
|
+
- name: Fail if benchmarks failed
|
|
152
|
+
if: steps.run_benchmarks.outcome == 'failure'
|
|
153
|
+
run: exit 1
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
name: Fix Non-Breaking Spaces
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches: [ main, master, develop ]
|
|
6
|
+
|
|
7
|
+
permissions:
|
|
8
|
+
contents: write
|
|
9
|
+
|
|
10
|
+
jobs:
|
|
11
|
+
fix-non-breaking-spaces:
|
|
12
|
+
runs-on: ubuntu-latest
|
|
13
|
+
|
|
14
|
+
steps:
|
|
15
|
+
- name: Checkout code
|
|
16
|
+
uses: actions/checkout@v4
|
|
17
|
+
with:
|
|
18
|
+
token: ${{ secrets.GITHUB_TOKEN }}
|
|
19
|
+
fetch-depth: 0 # Fetch all history
|
|
20
|
+
|
|
21
|
+
- name: Check for non-breaking spaces and fix them
|
|
22
|
+
id: fix-nbsp
|
|
23
|
+
run: |
|
|
24
|
+
set -ex # Enable verbose output and exit on error
|
|
25
|
+
|
|
26
|
+
COMMIT_MSG="${{ github.event.head_commit.message }}"
|
|
27
|
+
if [[ "$COMMIT_MSG" == *"[bot] Auto-fix non-breaking spaces"* ]]; then
|
|
28
|
+
echo "Skipping: Last commit was an auto-fix by this action."
|
|
29
|
+
echo "changes_made=false" >> $GITHUB_OUTPUT
|
|
30
|
+
echo "files_changed=" >> $GITHUB_OUTPUT
|
|
31
|
+
exit 0
|
|
32
|
+
fi
|
|
33
|
+
|
|
34
|
+
changes_made=false
|
|
35
|
+
fixed_files_list=""
|
|
36
|
+
|
|
37
|
+
find . -type f \
|
|
38
|
+
-not -path "./.git/*" \
|
|
39
|
+
-not -path "./node_modules/*" \
|
|
40
|
+
-not -path "./.venv/*" \
|
|
41
|
+
-not -path "./venv/*" \
|
|
42
|
+
-not -path "./__pycache__/*" \
|
|
43
|
+
-not -path "*/.*cache*/*" \
|
|
44
|
+
-not -path "./build/*" \
|
|
45
|
+
-not -path "./dist/*" \
|
|
46
|
+
-not -path "./.next/*" \
|
|
47
|
+
-not -path "./target/*" \
|
|
48
|
+
-not -path "./.github/*" \
|
|
49
|
+
-not -name "*.png" \
|
|
50
|
+
-not -name "*.jpg" \
|
|
51
|
+
-not -name "*.jpeg" \
|
|
52
|
+
-not -name "*.gif" \
|
|
53
|
+
-not -name "*.pdf" \
|
|
54
|
+
-not -name "*.ico" \
|
|
55
|
+
-not -name "*.woff*" \
|
|
56
|
+
-not -name "*.ttf" \
|
|
57
|
+
-not -name "*.eot" \
|
|
58
|
+
-not -name "*.zip" \
|
|
59
|
+
-not -name "*.tar.gz" \
|
|
60
|
+
-not -name "*.exe" \
|
|
61
|
+
-not -name "*.dll" \
|
|
62
|
+
-not -name "*.so" \
|
|
63
|
+
-not -name "*.dylib" \
|
|
64
|
+
-not -name "*.bin" \
|
|
65
|
+
-not -name "*.class" > files_to_check.txt
|
|
66
|
+
|
|
67
|
+
while IFS= read -r file; do
|
|
68
|
+
if [ ! -f "$file" ]; then
|
|
69
|
+
continue
|
|
70
|
+
fi
|
|
71
|
+
|
|
72
|
+
file_type_info=$(file "$file" 2>/dev/null)
|
|
73
|
+
process_this_file=false
|
|
74
|
+
if echo "$file_type_info" | grep -qiE "text|script|json|xml|html|css|javascript|python|shell|yaml|csv"; then
|
|
75
|
+
process_this_file=true
|
|
76
|
+
elif echo "$file_type_info" | grep -qiE "binary|archive|image|audio|video|font|executable.*binary"; then
|
|
77
|
+
process_this_file=false
|
|
78
|
+
else
|
|
79
|
+
if head -c 1024 "$file" 2>/dev/null | LC_ALL=C grep -q '[[:cntrl:]]' && \
|
|
80
|
+
! head -c 1024 "$file" 2>/dev/null | LC_ALL=C grep -q $'[\t\n\r]'; then
|
|
81
|
+
process_this_file=false
|
|
82
|
+
else
|
|
83
|
+
process_this_file=true
|
|
84
|
+
fi
|
|
85
|
+
fi
|
|
86
|
+
|
|
87
|
+
if ! $process_this_file; then
|
|
88
|
+
echo "INFO: Skipping non-text or binary file: $file"
|
|
89
|
+
continue
|
|
90
|
+
fi
|
|
91
|
+
|
|
92
|
+
# Check if file contains the UTF-8 NBSP byte sequence (c2 a0)
|
|
93
|
+
if hexdump -C "$file" 2>/dev/null | grep -q "c2 a0"; then
|
|
94
|
+
echo "INFO: Processing NBSPs in $file"
|
|
95
|
+
cp "$file" "$file.backup"
|
|
96
|
+
|
|
97
|
+
before_nbsp_count=$(hexdump -C "$file" 2>/dev/null | grep -c "c2 a0" || true)
|
|
98
|
+
|
|
99
|
+
# Use sed with LC_ALL=C for byte-wise replacement of \xc2\xa0 with a space \x20
|
|
100
|
+
# This is the sole method for replacement now.
|
|
101
|
+
if LC_ALL=C sed -i 's/\xc2\xa0/ /g' "$file"; then
|
|
102
|
+
# sed command executed successfully (exit code 0)
|
|
103
|
+
# Now verify the replacement
|
|
104
|
+
after_nbsp_count=$(hexdump -C "$file" 2>/dev/null | grep -c "c2 a0" || true)
|
|
105
|
+
|
|
106
|
+
if [ "$after_nbsp_count" -eq 0 ]; then
|
|
107
|
+
if [ "$before_nbsp_count" -ne 0 ]; then # NBSPs were present and now are fixed
|
|
108
|
+
echo "SUCCESS: Fixed $before_nbsp_count NBSPs in $file."
|
|
109
|
+
fixed_files_list="$fixed_files_list $file"
|
|
110
|
+
changes_made=true
|
|
111
|
+
else # File had NBSPs by initial check, sed ran, after_count is 0, before_count was also 0 (unlikely path if grep -q found it)
|
|
112
|
+
# Or, more likely, before_count was non-zero and successfully fixed.
|
|
113
|
+
echo "INFO: $file is now clean (0 NBSPs after processing)."
|
|
114
|
+
# If before_count was non-zero, it's already handled by the above if.
|
|
115
|
+
# If before_count was zero initially by some fluke but grep -q passed, this ensures it's clean.
|
|
116
|
+
fi
|
|
117
|
+
rm -f "$file.backup"
|
|
118
|
+
else
|
|
119
|
+
# sed ran successfully, but non-breaking spaces remain. This is unexpected with this sed command.
|
|
120
|
+
echo "WARNING: sed ran but $file still has $after_nbsp_count NBSPs (was $before_nbsp_count). Reverting."
|
|
121
|
+
mv "$file.backup" "$file"
|
|
122
|
+
fi
|
|
123
|
+
else
|
|
124
|
+
# sed command itself failed (e.g., permissions, disk space for temp file with -i)
|
|
125
|
+
# Since 'set -e' is active and sed is in an 'if' condition, this block will be executed on failure.
|
|
126
|
+
echo "ERROR: sed command failed to execute for $file. Reverting."
|
|
127
|
+
mv "$file.backup" "$file"
|
|
128
|
+
fi
|
|
129
|
+
else
|
|
130
|
+
echo "INFO: No NBSPs detected in $file (initial scan)."
|
|
131
|
+
fi
|
|
132
|
+
done < files_to_check.txt
|
|
133
|
+
|
|
134
|
+
rm -f files_to_check.txt
|
|
135
|
+
|
|
136
|
+
if [ "$changes_made" = true ]; then
|
|
137
|
+
echo "changes_made=true" >> $GITHUB_OUTPUT
|
|
138
|
+
echo "files_changed=${fixed_files_list# }" >> $GITHUB_OUTPUT # Remove potential leading space
|
|
139
|
+
else
|
|
140
|
+
echo "changes_made=false" >> $GITHUB_OUTPUT
|
|
141
|
+
echo "files_changed=" >> $GITHUB_OUTPUT
|
|
142
|
+
fi
|
|
143
|
+
|
|
144
|
+
- name: Configure Git
|
|
145
|
+
if: steps.fix-nbsp.outputs.changes_made == 'true'
|
|
146
|
+
run: |
|
|
147
|
+
set -ex
|
|
148
|
+
git config --local user.email "action@github.com"
|
|
149
|
+
git config --local user.name "GitHub Action Bot"
|
|
150
|
+
|
|
151
|
+
- name: Commit and push changes
|
|
152
|
+
if: steps.fix-nbsp.outputs.changes_made == 'true'
|
|
153
|
+
run: |
|
|
154
|
+
set -ex
|
|
155
|
+
git add .
|
|
156
|
+
if ! git diff --staged --quiet; then
|
|
157
|
+
echo "Committing NBSP fixes for files: ${{ steps.fix-nbsp.outputs.files_changed }}"
|
|
158
|
+
git commit -m "[bot] Auto-fix non-breaking spaces in:${{ steps.fix-nbsp.outputs.files_changed }} [skip ci]"
|
|
159
|
+
git push
|
|
160
|
+
else
|
|
161
|
+
echo "INFO: No changes to commit after NBSP fixing attempt."
|
|
162
|
+
fi
|
|
163
|
+
|
|
164
|
+
- name: Summary
|
|
165
|
+
if: always()
|
|
166
|
+
run: |
|
|
167
|
+
if [ "${{ steps.fix-nbsp.outputs.changes_made }}" = "true" ]; then
|
|
168
|
+
echo "✅ Non-breaking spaces were fixed in the following files:"
|
|
169
|
+
echo "${{ steps.fix-nbsp.outputs.files_changed }}"
|
|
170
|
+
else
|
|
171
|
+
if [[ "${{ github.event.head_commit.message }}" == *"[bot] Auto-fix non-breaking spaces"* && \
|
|
172
|
+
"${{ steps.fix-nbsp.outputs.changes_made }}" == "false" ]]; then
|
|
173
|
+
echo "✅ Skipped: Last commit was an auto-fix. No new non-breaking spaces processing was needed."
|
|
174
|
+
else
|
|
175
|
+
echo "✅ No non-breaking spaces were found or no files required changes."
|
|
176
|
+
fi
|
|
177
|
+
fi
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
name: Rust CI/CD
|
|
2
|
+
|
|
3
|
+
permissions:
|
|
4
|
+
contents: write
|
|
5
|
+
packages: write
|
|
6
|
+
|
|
7
|
+
on:
|
|
8
|
+
workflow_dispatch:
|
|
9
|
+
inputs:
|
|
10
|
+
tag:
|
|
11
|
+
description: 'Tag name for the release (e.g., v1.0.0)'
|
|
12
|
+
required: true
|
|
13
|
+
default: ''
|
|
14
|
+
|
|
15
|
+
env:
|
|
16
|
+
CARGO_TERM_COLOR: always
|
|
17
|
+
|
|
18
|
+
jobs:
|
|
19
|
+
build:
|
|
20
|
+
name: Build
|
|
21
|
+
runs-on: ubuntu-latest
|
|
22
|
+
steps:
|
|
23
|
+
- uses: actions/checkout@v4
|
|
24
|
+
- name: Install Rust toolchain
|
|
25
|
+
uses: dtolnay/rust-toolchain@nightly
|
|
26
|
+
- name: Build
|
|
27
|
+
run: cargo build --verbose --release
|
|
28
|
+
- name: Test
|
|
29
|
+
run: cargo test --verbose --release
|
|
30
|
+
|
|
31
|
+
create_release:
|
|
32
|
+
name: Create Release
|
|
33
|
+
needs: build
|
|
34
|
+
if: >
|
|
35
|
+
github.event_name == 'workflow_dispatch' ||
|
|
36
|
+
(github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v'))
|
|
37
|
+
runs-on: ubuntu-latest
|
|
38
|
+
outputs:
|
|
39
|
+
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
40
|
+
steps:
|
|
41
|
+
- name: Checkout
|
|
42
|
+
uses: actions/checkout@v4
|
|
43
|
+
|
|
44
|
+
- name: Create Release
|
|
45
|
+
id: create_release
|
|
46
|
+
uses: actions/create-release@v1
|
|
47
|
+
env:
|
|
48
|
+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
49
|
+
with:
|
|
50
|
+
tag_name: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.tag || github.ref }}
|
|
51
|
+
release_name: Release ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.tag || github.ref }}
|
|
52
|
+
draft: false
|
|
53
|
+
prerelease: false
|
|
54
|
+
|
|
55
|
+
cross_compile:
|
|
56
|
+
name: Cross-compile
|
|
57
|
+
needs: create_release
|
|
58
|
+
if: >
|
|
59
|
+
github.event_name == 'workflow_dispatch' ||
|
|
60
|
+
(github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v'))
|
|
61
|
+
strategy:
|
|
62
|
+
matrix:
|
|
63
|
+
include:
|
|
64
|
+
- os: ubuntu-latest
|
|
65
|
+
target: x86_64-unknown-linux-gnu
|
|
66
|
+
- os: ubuntu-latest
|
|
67
|
+
target: aarch64-unknown-linux-gnu
|
|
68
|
+
- os: macos-latest
|
|
69
|
+
target: x86_64-apple-darwin
|
|
70
|
+
- os: macos-latest
|
|
71
|
+
target: aarch64-apple-darwin
|
|
72
|
+
runs-on: ${{ matrix.os }}
|
|
73
|
+
steps:
|
|
74
|
+
- uses: actions/checkout@v4
|
|
75
|
+
|
|
76
|
+
- name: Install Rust
|
|
77
|
+
uses: dtolnay/rust-toolchain@nightly
|
|
78
|
+
with:
|
|
79
|
+
targets: ${{ matrix.target }}
|
|
80
|
+
|
|
81
|
+
- name: Install cross
|
|
82
|
+
if: matrix.os == 'ubuntu-latest'
|
|
83
|
+
run: cargo install cross
|
|
84
|
+
|
|
85
|
+
- name: Build
|
|
86
|
+
run: |
|
|
87
|
+
if [ "${{ matrix.os }}" = "ubuntu-latest" ]; then
|
|
88
|
+
cross build --release --target ${{ matrix.target }}
|
|
89
|
+
else
|
|
90
|
+
cargo build --release --target ${{ matrix.target }}
|
|
91
|
+
fi
|
|
92
|
+
shell: bash
|
|
93
|
+
|
|
94
|
+
- name: Package and Upload
|
|
95
|
+
env:
|
|
96
|
+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
97
|
+
run: |
|
|
98
|
+
binaries=$(cargo metadata --no-deps --format-version=1 | jq -r '.packages[0].targets[] | select(.kind[] | contains("bin")) | .name')
|
|
99
|
+
for binary in $binaries; do
|
|
100
|
+
src="target/${{ matrix.target }}/release/${binary}"
|
|
101
|
+
dst="${binary}-${{ matrix.target }}.tar.gz"
|
|
102
|
+
tar czvf "${dst}" -C "$(dirname "${src}")" "$(basename "${src}")"
|
|
103
|
+
asset_path="${dst}"
|
|
104
|
+
content_type="application/gzip"
|
|
105
|
+
|
|
106
|
+
echo "Uploading ${dst}"
|
|
107
|
+
|
|
108
|
+
gh release upload "${{ github.event_name == 'workflow_dispatch' && github.event.inputs.tag || github.ref_name }}" "${asset_path}" --clobber
|
|
109
|
+
done
|
|
110
|
+
shell: bash
|