process_omol25 0.1.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,17 @@
1
+ ---
2
+ description: Automatically build and verify Sphinx documentation for GitHub Pages
3
+ ---
4
+
5
+ # Publish Documentation to GitHub Pages
6
+
7
+ This workflow allows the AI/User to natively test the Sphinx documentation compilation pipeline locally on the machine before pushing to the `main` branch, ensuring the GitHub Action executes flawlessly.
8
+
9
+ 1. Install the main package and all necessary documentation rendering engines.
10
+ // turbo
11
+ 2. `micromamba run -n janus pip install -e . sphinx sphinx-material`
12
+
13
+ 3. Verify the Sphinx HTML documentation compiles cleanly.
14
+ // turbo
15
+ 4. `cd docs && make html`
16
+
17
+ 5. **Deployment Warning**: The remote deployment to GitHub Pages is handled 100% autonomously by the `.github/workflows/gh-pages.yml` Action attached to this repository. You do not need to manually push the `gh-pages` branch unless the CI token is broken.
@@ -0,0 +1,20 @@
1
+ ---
2
+ description: Build and publish the process_omol25 distribution package to PyPI manually
3
+ ---
4
+
5
+ # Publish to PyPI
6
+
7
+ This workflow dictates the native process to manually formulate the distribution tarballs and binary wheels and push them up to the Python Package Index, verifying the steps autonomous CI undertakes.
8
+
9
+ 1. Ensure the `uv` toolchain is globally installed.
10
+ // turbo
11
+ 2. `curl -LsSf https://astral.sh/uv/install.sh | sh`
12
+
13
+ 3. Compile the Python package from source using `pyproject.toml`.
14
+ // turbo
15
+ 4. `uv build`
16
+
17
+ 5. Once built, the `/dist` directory will contain the `.tar.gz` and `.whl` files. Publish them to PyPI natively utilizing `uv publish`. (Note: This will prompt for your PyPI API token).
18
+ 6. `uv publish dist/*`
19
+
20
+ **Warning**: The `.github/workflows/publish-pypi.yml` handles this 100% autonomously whenever a new GitHub Release is fully published via UI. Manual uv publish interaction is purely for fallback or TestPyPI staging.
@@ -0,0 +1,16 @@
1
+ ---
2
+ description: Automatically publish the repository codebase to Zenodo when a new tag is cut
3
+ ---
4
+
5
+ # Publish to Zenodo
6
+
7
+ This workflow maps out how to construct and verify the `Zenodo` Data Object Identifier (DOI) publishing mechanism using the Janus Core architecture.
8
+
9
+ 1. Ensure your repository root has a `.zenodo.json` file (optional, but highly recommended) storing your specific JSON schema metadata.
10
+ 2. Navigate directly to the Zenodo Web UI, link your GitHub account natively, and flick the repository toggle to 'On'.
11
+ 3. Trigger the deployment natively via standard git tagging:
12
+ // turbo
13
+ 4. `git tag v1.0.0 && git push origin v1.0.0`
14
+
15
+ 5. The `.github/workflows/publish-zenodo.yml` pipeline will automatically spin up, instantiate `uv`, ensure the `pyproject.toml` version explicitly matches your tag using `uvx toml`, and finally invoke `ncipollo/release-action`.
16
+ 6. This officially mints a GitHub Release. Because you flicked the Zenodo toggle in Step 2, Zenodo's native Webhooks will securely intercept the GitHub Release event and automatically mint your DOI! This approach guarantees that GitHub Releases and Zenodo DOIs are perfectly structurally paired.
@@ -0,0 +1,38 @@
1
+ name: Publish Docs to GitHub Pages
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ workflow_dispatch:
8
+
9
+ permissions:
10
+ contents: write
11
+
12
+ jobs:
13
+ build-and-deploy:
14
+ runs-on: ubuntu-latest
15
+ steps:
16
+ - name: Checkout repository
17
+ uses: actions/checkout@v4
18
+
19
+ - name: Set up Python
20
+ uses: actions/setup-python@v5
21
+ with:
22
+ python-version: '3.12'
23
+
24
+ - name: Install dependencies
25
+ run: |
26
+ pip install -e .
27
+ pip install sphinx sphinx-material
28
+
29
+ - name: Build Sphinx Documentation
30
+ run: |
31
+ cd docs
32
+ make html
33
+
34
+ - name: Deploy to GitHub Pages
35
+ uses: peaceiris/actions-gh-pages@v4
36
+ with:
37
+ github_token: ${{ secrets.GITHUB_TOKEN }}
38
+ publish_dir: ./docs/_build/html
@@ -0,0 +1,57 @@
1
+ name: Publish Release
2
+
3
+ on:
4
+ push:
5
+ tags:
6
+ - 'v*'
7
+
8
+ jobs:
9
+ publish:
10
+ runs-on: ubuntu-latest
11
+
12
+ environment: pypi
13
+
14
+ # Required for GitHub Releases and PyPI Trusted Publishing OIDC handshakes
15
+ permissions:
16
+ id-token: write
17
+ contents: write
18
+
19
+ steps:
20
+ - name: Checkout repository
21
+ uses: actions/checkout@v4
22
+
23
+ - name: Install uv
24
+ uses: astral-sh/setup-uv@v5
25
+ with:
26
+ version: "latest"
27
+ python-version: "3.12"
28
+
29
+ - name: Install dependencies
30
+ run: |
31
+ uv venv -c
32
+ uv pip install -e .
33
+
34
+ - name: Build
35
+ run: uv build
36
+
37
+ - name: Get version from pyproject.toml
38
+ run: echo "VERSION=$(uvx --from=toml-cli toml get --toml-path=pyproject.toml project.version)" >> $GITHUB_ENV
39
+
40
+ - name: Check version matches tag
41
+ if: ${{ ! contains(github.ref, env.VERSION) }}
42
+ run: |
43
+ echo "Git tag does not match version in pyproject.toml. Expected v${{ env.VERSION }}"
44
+ exit 1
45
+
46
+ - name: Create GitHub Release (Triggers Zenodo Archival)
47
+ uses: ncipollo/release-action@v1
48
+ with:
49
+ artifacts: "dist/*"
50
+ token: ${{ secrets.GITHUB_TOKEN }}
51
+ draft: false
52
+ prerelease: false
53
+ skipIfReleaseExists: true
54
+ generateReleaseNotes: true
55
+
56
+ - name: Publish to PyPI
57
+ run: uv publish
@@ -0,0 +1,41 @@
1
+ name: Test process_omol25
2
+
3
+ on:
4
+ push:
5
+ branches: [ "main" ]
6
+ pull_request:
7
+ branches: [ "main" ]
8
+
9
+ jobs:
10
+ test:
11
+ runs-on: ubuntu-latest
12
+
13
+ steps:
14
+ - uses: actions/checkout@v4
15
+
16
+ - name: Set up Python
17
+ uses: actions/setup-python@v5
18
+ with:
19
+ python-version: "3.12"
20
+
21
+ - name: Install system dependencies
22
+ run: |
23
+ sudo apt-get update
24
+ sudo apt-get install -y openmpi-bin libopenmpi-dev
25
+
26
+ - name: Install Python dependencies
27
+ run: |
28
+ python -m pip install --upgrade pip
29
+ pip install -e .
30
+ pip install pytest
31
+
32
+ - name: Create dummy login file for tests
33
+ run: |
34
+ echo '{"user": "dummy", "access_key": "dummy", "secret_key": "dummy"}' > psdi-argonne-omol25-ro.json
35
+
36
+ - name: Run tests
37
+ env:
38
+ OMPI_MCA_rmaps_base_oversubscribe: 1
39
+ PRTE_MCA_rmaps_base_oversubscribe: 1
40
+ run: |
41
+ pytest tests/
@@ -0,0 +1,74 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ *.egg-info/
24
+ .installed.cfg
25
+ *.egg
26
+
27
+ # PyInstaller
28
+ *.manifest
29
+ *.spec
30
+
31
+ # Installer logs
32
+ pip-log.txt
33
+ pip-delete-this-directory.txt
34
+
35
+ # Unit test / coverage reports
36
+ htmlcov/
37
+ .tox/
38
+ .nox/
39
+ .coverage
40
+ .coverage.*
41
+ .cache
42
+ nosetests.xml
43
+ coverage.xml
44
+ *.cover
45
+ *.sfdx
46
+ .hypothesis/
47
+ .pytest_cache/
48
+ test_output_dir/
49
+ test_output_dir_no_mpi/
50
+
51
+ # Documentation
52
+ docs/_build/
53
+
54
+ # Jupyter Notebook
55
+ .ipynb_checkpoints
56
+
57
+ # Environments
58
+ .env
59
+ .venv
60
+ env/
61
+ venv/
62
+ ENV/
63
+ env.bak/
64
+ venv.bak/
65
+
66
+ # Logs
67
+ *.log
68
+
69
+ # Secrets
70
+ psdi-argonne-omol25-ro.json
71
+
72
+ # Intermediate test files
73
+ *_restart.json
74
+ *.parquet
@@ -0,0 +1,27 @@
1
+ Copyright (c) 2026, Alin M Elena
2
+ All rights reserved.
3
+
4
+ Redistribution and use in source and binary forms, with or without
5
+ modification, are permitted provided that the following conditions are met:
6
+
7
+ 1. Redistributions of source code must retain the above copyright notice, this
8
+ list of conditions and the following disclaimer.
9
+
10
+ 2. Redistributions in binary form must reproduce the above copyright notice,
11
+ this list of conditions and the following disclaimer in the documentation
12
+ and/or other materials provided with the distribution.
13
+
14
+ 3. Neither the name of the copyright holder nor the names of its
15
+ contributors may be used to endorse or promote products derived from
16
+ this software without specific prior written permission.
17
+
18
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
22
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
24
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
25
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -0,0 +1,15 @@
1
+ Metadata-Version: 2.4
2
+ Name: process_omol25
3
+ Version: 0.1.11
4
+ Summary: A Python package for processing omol-25 data using MPI.
5
+ Author: Alin M Elena
6
+ License-File: LICENSE
7
+ Requires-Python: >=3.10
8
+ Requires-Dist: ase
9
+ Requires-Dist: boto3
10
+ Requires-Dist: fastparquet
11
+ Requires-Dist: mpi4py
12
+ Requires-Dist: pandas
13
+ Requires-Dist: pyarrow
14
+ Requires-Dist: tqdm
15
+ Requires-Dist: zstandard
@@ -0,0 +1,51 @@
1
+ # process_omol25
2
+
3
+ [![PyPI version](https://img.shields.io/pypi/v/process_omol25.svg)](https://pypi.org/project/process_omol25/)
4
+ [![Python versions](https://img.shields.io/pypi/pyversions/process_omol25.svg)](https://pypi.org/project/process_omol25/)
5
+ [![License](https://img.shields.io/github/license/ddmms/process_omol25.svg)](LICENSE)
6
+ [![Code style: ruff](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff)
7
+ [![Docs Publish](https://github.com/ddmms/process_omol25/actions/workflows/gh-pages.yml/badge.svg)](https://github.com/ddmms/process_omol25/actions/workflows/gh-pages.yml)
8
+ [![PyPI Publish](https://github.com/ddmms/process_omol25/actions/workflows/publish.yml/badge.svg)](https://github.com/ddmms/process_omol25/actions/workflows/publish.yml)
9
+ [![Zenodo Publish](https://github.com/ddmms/process_omol25/actions/workflows/publish.yml/badge.svg)](https://github.com/ddmms/process_omol25/actions/workflows/publish.yml)
10
+
11
+ A Python package for processing omol-25 data using MPI.
12
+
13
+ ## Installation
14
+
15
+ You can install this package locally:
16
+
17
+ ```bash
18
+ pip install -e .
19
+ ```
20
+
21
+ ## Usage
22
+
23
+ This package provides three primary command-line interfaces:
24
+
25
+ ### 1. Processing Data
26
+ Extract, process, and combine molecular data from an S3 bucket (or local directory):
27
+ ```bash
28
+ process_omol25 --help
29
+ ```
30
+ * **MPI Support**: Add `--mpi` and run via `mpirun` to distribute tasks across multiple workers natively via hybrid RMA.
31
+ * **Smart Restart**: Add `--restart` to automatically sweep the output directory, recover orphaned Parquet/XYZ pairs, and pick up right where you left off.
32
+ * **Logging**: Specify `--log-file my_log.log` to write text streams to disk (existing logs are automatically appended to, not overwritten).
33
+ * **Batch Flushing**: Use `--batch-size N` to control disk writes. If not specified, workers dynamically flush at 1% increments (with a strict minimum of 100 output structures).
34
+
35
+ ### 2. Downloading Raw Data
36
+ Download original raw `orca.out` datasets from S3 without running processing logic natively on them:
37
+ ```bash
38
+ download_omol25 --help
39
+ ```
40
+
41
+ ### 3. Verification Utility
42
+ Cross-reference a generated Parquet dataset with its respective ExtXYZ file to guarantee absolutely zero data corruption or structural mismatching:
43
+ ```bash
44
+ verify_processed_omol25 --parquet props_group.parquet --extxyz structs_group.xyz
45
+ ```
46
+ * This rigorously structurally aligns both tables via `geom_sha1` and flags any mathematically misassigned properties.
47
+ * Embedded timing metadata such as `process_time_s` are strictly and unconditionally excluded to prevent false-positive errors.
48
+
49
+ ## License
50
+
51
+ This project is licensed under the BSD 3-Clause License - see the [LICENSE](LICENSE) file for details.
@@ -0,0 +1,74 @@
1
+ {
2
+ "noble_gas_compounds/FXeNSO2F2_step20_0_1/": {
3
+ "processed": false,
4
+ "key": "orca.tar.zst"
5
+ },
6
+ "noble_gas_compounds/FXeNSO2F2_step21_0_1/": {
7
+ "processed": false,
8
+ "key": "orca.tar.zst"
9
+ },
10
+ "noble_gas_compounds/FXeNSO2F2_step7_0_1/": {
11
+ "processed": false,
12
+ "key": "orca.tar.zst"
13
+ },
14
+ "noble_gas_compounds/H4XeO4_step24_0_1/": {
15
+ "processed": false,
16
+ "key": "orca.tar.zst"
17
+ },
18
+ "noble_gas_compounds/H4XeO6_step2_0_1/": {
19
+ "processed": false,
20
+ "key": "orca.tar.zst"
21
+ },
22
+ "noble_gas_compounds/HCNKrF+_step25_1_1/": {
23
+ "processed": false,
24
+ "key": "orca.tar.zst"
25
+ },
26
+ "noble_gas_compounds/HXeOH_step10_0_1/": {
27
+ "processed": false,
28
+ "key": "orca.tar.zst"
29
+ },
30
+ "noble_gas_compounds/O2XeF4_step1_0_1/": {
31
+ "processed": false,
32
+ "key": "orca.tar.zst"
33
+ },
34
+ "noble_gas_compounds/O2XeF4_step3_0_1/": {
35
+ "processed": false,
36
+ "key": "orca.tar.zst"
37
+ },
38
+ "noble_gas_compounds/OXeF2_step5_0_1/": {
39
+ "processed": false,
40
+ "key": "orca.tar.zst"
41
+ },
42
+ "noble_gas_compounds/XeC6F52_step10_0_1/": {
43
+ "processed": false,
44
+ "key": "orca.tar.zst"
45
+ },
46
+ "noble_gas_compounds/XeC6F52_step4_0_1/": {
47
+ "processed": false,
48
+ "key": "orca.tar.zst"
49
+ },
50
+ "noble_gas_compounds/XeC6F5CN_step9_0_1/": {
51
+ "processed": false,
52
+ "key": "orca.tar.zst"
53
+ },
54
+ "noble_gas_compounds/XeC6F5F2+_step11_1_1/": {
55
+ "processed": false,
56
+ "key": "orca.tar.zst"
57
+ },
58
+ "noble_gas_compounds/XeC6F5F2+_step26_1_1/": {
59
+ "processed": false,
60
+ "key": "orca.tar.zst"
61
+ },
62
+ "noble_gas_compounds/XeCl4_step2_0_1/": {
63
+ "processed": false,
64
+ "key": "orca.tar.zst"
65
+ },
66
+ "noble_gas_compounds/XeF7-_step2_-1_1/": {
67
+ "processed": false,
68
+ "key": "orca.tar.zst"
69
+ },
70
+ "noble_gas_compounds/XeF7-_step4_-1_1/": {
71
+ "processed": false,
72
+ "key": "orca.tar.zst"
73
+ }
74
+ }
@@ -0,0 +1,146 @@
1
+ {
2
+ "noble_gas_compounds/FXeNSO2F2_step20_0_1/": {
3
+ "processed": false,
4
+ "key": [
5
+ "orca.tar.zst",
6
+ "orca.gbw.zstd0",
7
+ "density_mat.npz"
8
+ ]
9
+ },
10
+ "noble_gas_compounds/FXeNSO2F2_step21_0_1/": {
11
+ "processed": false,
12
+ "key": [
13
+ "orca.tar.zst",
14
+ "orca.gbw.zstd0",
15
+ "density_mat.npz"
16
+ ]
17
+ },
18
+ "noble_gas_compounds/FXeNSO2F2_step7_0_1/": {
19
+ "processed": false,
20
+ "key": [
21
+ "orca.tar.zst",
22
+ "orca.gbw.zstd0",
23
+ "density_mat.npz"
24
+ ]
25
+ },
26
+ "noble_gas_compounds/H4XeO4_step24_0_1/": {
27
+ "processed": false,
28
+ "key": [
29
+ "orca.tar.zst",
30
+ "orca.gbw.zstd0",
31
+ "density_mat.npz"
32
+ ]
33
+ },
34
+ "noble_gas_compounds/H4XeO6_step2_0_1/": {
35
+ "processed": false,
36
+ "key": [
37
+ "orca.tar.zst",
38
+ "orca.gbw.zstd0",
39
+ "density_mat.npz"
40
+ ]
41
+ },
42
+ "noble_gas_compounds/HCNKrF+_step25_1_1/": {
43
+ "processed": false,
44
+ "key": [
45
+ "orca.tar.zst",
46
+ "orca.gbw.zstd0",
47
+ "density_mat.npz"
48
+ ]
49
+ },
50
+ "noble_gas_compounds/HXeOH_step10_0_1/": {
51
+ "processed": false,
52
+ "key": [
53
+ "orca.tar.zst",
54
+ "orca.gbw.zstd0",
55
+ "density_mat.npz"
56
+ ]
57
+ },
58
+ "noble_gas_compounds/O2XeF4_step1_0_1/": {
59
+ "processed": false,
60
+ "key": [
61
+ "orca.tar.zst",
62
+ "orca.gbw.zstd0",
63
+ "density_mat.npz"
64
+ ]
65
+ },
66
+ "noble_gas_compounds/O2XeF4_step3_0_1/": {
67
+ "processed": false,
68
+ "key": [
69
+ "orca.tar.zst",
70
+ "orca.gbw.zstd0",
71
+ "density_mat.npz"
72
+ ]
73
+ },
74
+ "noble_gas_compounds/OXeF2_step5_0_1/": {
75
+ "processed": false,
76
+ "key": [
77
+ "orca.tar.zst",
78
+ "orca.gbw.zstd0",
79
+ "density_mat.npz"
80
+ ]
81
+ },
82
+ "noble_gas_compounds/XeC6F52_step10_0_1/": {
83
+ "processed": false,
84
+ "key": [
85
+ "orca.tar.zst",
86
+ "orca.gbw.zstd0",
87
+ "density_mat.npz"
88
+ ]
89
+ },
90
+ "noble_gas_compounds/XeC6F52_step4_0_1/": {
91
+ "processed": false,
92
+ "key": [
93
+ "orca.tar.zst",
94
+ "orca.gbw.zstd0",
95
+ "density_mat.npz"
96
+ ]
97
+ },
98
+ "noble_gas_compounds/XeC6F5CN_step9_0_1/": {
99
+ "processed": false,
100
+ "key": [
101
+ "orca.tar.zst",
102
+ "orca.gbw.zstd0",
103
+ "density_mat.npz"
104
+ ]
105
+ },
106
+ "noble_gas_compounds/XeC6F5F2+_step11_1_1/": {
107
+ "processed": false,
108
+ "key": [
109
+ "orca.tar.zst",
110
+ "orca.gbw.zstd0",
111
+ "density_mat.npz"
112
+ ]
113
+ },
114
+ "noble_gas_compounds/XeC6F5F2+_step26_1_1/": {
115
+ "processed": false,
116
+ "key": [
117
+ "orca.tar.zst",
118
+ "orca.gbw.zstd0",
119
+ "density_mat.npz"
120
+ ]
121
+ },
122
+ "noble_gas_compounds/XeCl4_step2_0_1/": {
123
+ "processed": false,
124
+ "key": [
125
+ "orca.tar.zst",
126
+ "orca.gbw.zstd0",
127
+ "density_mat.npz"
128
+ ]
129
+ },
130
+ "noble_gas_compounds/XeF7-_step2_-1_1/": {
131
+ "processed": false,
132
+ "key": [
133
+ "orca.tar.zst",
134
+ "orca.gbw.zstd0",
135
+ "density_mat.npz"
136
+ ]
137
+ },
138
+ "noble_gas_compounds/XeF7-_step4_-1_1/": {
139
+ "processed": false,
140
+ "key": [
141
+ "orca.tar.zst",
142
+ "orca.gbw.zstd0",
143
+ "density_mat.npz"
144
+ ]
145
+ }
146
+ }
@@ -0,0 +1,20 @@
1
+ # Minimal makefile for Sphinx documentation
2
+ #
3
+
4
+ # You can set these variables from the command line, and also
5
+ # from the environment for the first two.
6
+ SPHINXOPTS ?=
7
+ SPHINXBUILD ?= sphinx-build
8
+ SOURCEDIR = .
9
+ BUILDDIR = _build
10
+
11
+ # Put it first so that "make" without argument is like "make help".
12
+ help:
13
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14
+
15
+ .PHONY: help Makefile
16
+
17
+ # Catch-all target: route all unknown targets to Sphinx using the new
18
+ # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19
+ %: Makefile
20
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
@@ -0,0 +1,18 @@
1
+ API Reference
2
+ =============
3
+
4
+ Main Processor module
5
+ ---------------------
6
+
7
+ .. automodule:: process_omol25.process_omol25
8
+ :members:
9
+ :undoc-members:
10
+ :show-inheritance:
11
+
12
+ CLI Module
13
+ ----------
14
+
15
+ .. automodule:: process_omol25.cli
16
+ :members:
17
+ :undoc-members:
18
+ :show-inheritance:
@@ -0,0 +1,42 @@
1
+ import os
2
+ import sys
3
+ sys.path.insert(0, os.path.abspath('../src'))
4
+
5
+ # Configuration file for the Sphinx documentation builder.
6
+ #
7
+ # For the full list of built-in configuration values, see the documentation:
8
+ # https://www.sphinx-doc.org/en/master/usage/configuration.html
9
+
10
+ # -- Project information -----------------------------------------------------
11
+ # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
12
+
13
+ project = 'process_omol25'
14
+ copyright = '2026, Alin M Elena'
15
+ author = 'Alin M Elena'
16
+
17
+ version = "0.1.11"
18
+ release = "0.1.11"
19
+
20
+ # -- General configuration ---------------------------------------------------
21
+ # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
22
+
23
+ extensions = [
24
+ 'sphinx.ext.autodoc',
25
+ 'sphinx_material',
26
+ ]
27
+
28
+ autodoc_mock_imports = [
29
+ "ase", "boto3", "botocore", "zstandard", "pandas",
30
+ "numpy", "tqdm", "mpi4py"
31
+ ]
32
+
33
+ templates_path = ['_templates']
34
+ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
35
+
36
+
37
+
38
+ # -- Options for HTML output -------------------------------------------------
39
+ # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
40
+
41
+ html_theme = 'sphinx_material'
42
+ html_static_path = ['_static']
@@ -0,0 +1,18 @@
1
+ .. process_omol25 documentation master file, created by
2
+ sphinx-quickstart on Sat Mar 21 08:10:34 2026.
3
+ You can adapt this file completely to your liking, but it should at least
4
+ contain the root `toctree` directive.
5
+
6
+ process_omol25 documentation
7
+ ============================
8
+
9
+ Add your content using ``reStructuredText`` syntax. See the
10
+ `reStructuredText <https://www.sphinx-doc.org/en/master/usage/restructuredtext/index.html>`_
11
+ documentation for details.
12
+
13
+
14
+ .. toctree::
15
+ :maxdepth: 2
16
+ :caption: Contents:
17
+
18
+ api