bluemath-tk 1.0.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bluemath_tk-1.0.7/.github/workflows/build-package.yml +40 -0
- bluemath_tk-1.0.7/.github/workflows/python-tests.yml +40 -0
- bluemath_tk-1.0.7/.gitignore +102 -0
- bluemath_tk-1.0.7/LICENSE +7 -0
- bluemath_tk-1.0.7/PKG-INFO +84 -0
- bluemath_tk-1.0.7/README.md +27 -0
- bluemath_tk-1.0.7/bluemath_tk/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/_version.py +16 -0
- bluemath_tk-1.0.7/bluemath_tk/core/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/core/data/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/core/data/sample_data.py +40 -0
- bluemath_tk-1.0.7/bluemath_tk/core/decorators.py +325 -0
- bluemath_tk-1.0.7/bluemath_tk/core/logging.py +76 -0
- bluemath_tk-1.0.7/bluemath_tk/core/models.py +474 -0
- bluemath_tk-1.0.7/bluemath_tk/core/operations.py +368 -0
- bluemath_tk-1.0.7/bluemath_tk/core/plotting/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/core/plotting/base_plotting.py +230 -0
- bluemath_tk-1.0.7/bluemath_tk/core/plotting/colors.py +234 -0
- bluemath_tk-1.0.7/bluemath_tk/datamining/__init__.py +27 -0
- bluemath_tk-1.0.7/bluemath_tk/datamining/_base_datamining.py +409 -0
- bluemath_tk-1.0.7/bluemath_tk/datamining/kma.py +312 -0
- bluemath_tk-1.0.7/bluemath_tk/datamining/lhs.py +137 -0
- bluemath_tk-1.0.7/bluemath_tk/datamining/mda.py +469 -0
- bluemath_tk-1.0.7/bluemath_tk/datamining/pca.py +549 -0
- bluemath_tk-1.0.7/bluemath_tk/datamining/som.py +389 -0
- bluemath_tk-1.0.7/bluemath_tk/deeplearning/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/deeplearning/deep_environment.yml +206 -0
- bluemath_tk-1.0.7/bluemath_tk/deeplearning/generators/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/deeplearning/generators/mockDataGenerator.py +44 -0
- bluemath_tk-1.0.7/bluemath_tk/deeplearning/generators/ncDataGenerator.py +89 -0
- bluemath_tk-1.0.7/bluemath_tk/deeplearning/models/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/deeplearning/models/resnet_model.py +74 -0
- bluemath_tk-1.0.7/bluemath_tk/deeplearning/resnet.py +36 -0
- bluemath_tk-1.0.7/bluemath_tk/distributions/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/distributions/copula.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/distributions/gev.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/distributions/gpd.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/distributions/poisson.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/distributions/pot.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/downloaders/__init__.py +19 -0
- bluemath_tk-1.0.7/bluemath_tk/downloaders/_base_downloaders.py +82 -0
- bluemath_tk-1.0.7/bluemath_tk/downloaders/copernicus/ERA5/ERA5_config.json +231 -0
- bluemath_tk-1.0.7/bluemath_tk/downloaders/copernicus/ERA5/ERA5_download.py +95 -0
- bluemath_tk-1.0.7/bluemath_tk/downloaders/copernicus/ERA5/README.md +124 -0
- bluemath_tk-1.0.7/bluemath_tk/downloaders/copernicus/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/downloaders/copernicus/copernicus_downloader.py +456 -0
- bluemath_tk-1.0.7/bluemath_tk/interpolation/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/interpolation/_base_interpolation.py +94 -0
- bluemath_tk-1.0.7/bluemath_tk/interpolation/analogs.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/interpolation/gps.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/interpolation/rbf.py +914 -0
- bluemath_tk-1.0.7/bluemath_tk/interpolation/rbf_scipy.py +788 -0
- bluemath_tk-1.0.7/bluemath_tk/predictor/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/predictor/awt.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/predictor/dwt.py +831 -0
- bluemath_tk-1.0.7/bluemath_tk/predictor/indices.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/predictor/itca.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/predictor/iwt.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/risk/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/risk/damage.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/risk/pcrafi.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/risk/riskscapetools.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/tc/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/tc/graffitiwaves.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/tc/parameterization.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/tc/qtcrain.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/tc/skytcwaves.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/tc/tracks.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/tc/vortex.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/tide/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/tide/harmonic.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/tide/ttide.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/tide/utide.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/topo_bathy/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/topo_bathy/profiles.py +232 -0
- bluemath_tk-1.0.7/bluemath_tk/waves/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/waves/climate.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/waves/estela.py +473 -0
- bluemath_tk-1.0.7/bluemath_tk/waves/greenswell.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/waves/partitioning.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/waves/series.py +414 -0
- bluemath_tk-1.0.7/bluemath_tk/waves/snakes.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/waves/spectra.py +46 -0
- bluemath_tk-1.0.7/bluemath_tk/waves/statistics.py +29 -0
- bluemath_tk-1.0.7/bluemath_tk/waves/superpoint.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/__init__.py +19 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/_base_wrappers.py +674 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/_utils_wrappers.py +51 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/cgwave/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/cgwave/cgwave_wrapper.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/delft3d/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/delft3d/delft3d_wrapper.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/lisflood/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/lisflood/lisflood_wrapper.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/schism/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/schism/schism_wrapper.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/swan/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/swan/swan_wrapper.py +99 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/swan/templates/nostruc_input.swn +23 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/swan/templates/struc_input.swn +31 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/swan/templates/wind_input.swn +30 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/swash/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/swash/swash_example.py +117 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/swash/swash_wrapper.py +542 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/swash/templates/input.sws +59 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/swash/templates/sbatch.sh +8 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/xbeach/__init__.py +0 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/xbeach/templates/loclist.txt +3 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/xbeach/templates/params.txt +79 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/xbeach/xbeach_example.py +67 -0
- bluemath_tk-1.0.7/bluemath_tk/wrappers/xbeach/xbeach_wrapper.py +68 -0
- bluemath_tk-1.0.7/bluemath_tk.egg-info/PKG-INFO +84 -0
- bluemath_tk-1.0.7/bluemath_tk.egg-info/SOURCES.txt +143 -0
- bluemath_tk-1.0.7/bluemath_tk.egg-info/dependency_links.txt +1 -0
- bluemath_tk-1.0.7/bluemath_tk.egg-info/requires.txt +30 -0
- bluemath_tk-1.0.7/bluemath_tk.egg-info/top_level.txt +1 -0
- bluemath_tk-1.0.7/docs/assets/bluemath-logo.png +0 -0
- bluemath_tk-1.0.7/docs/assets/sketch_tk.png +0 -0
- bluemath_tk-1.0.7/docs/contribute.md +72 -0
- bluemath_tk-1.0.7/docs/datamining/base_datamining.md +1 -0
- bluemath_tk-1.0.7/docs/datamining/clustering_datamining.md +5 -0
- bluemath_tk-1.0.7/docs/datamining/intro.md +31 -0
- bluemath_tk-1.0.7/docs/datamining/reduction_datamining.md +1 -0
- bluemath_tk-1.0.7/docs/datamining/sampling_datamining.md +1 -0
- bluemath_tk-1.0.7/docs/index.md +17 -0
- bluemath_tk-1.0.7/docs/installation.md +26 -0
- bluemath_tk-1.0.7/docs/interpolation/intro.md +22 -0
- bluemath_tk-1.0.7/docs/interpolation/rbf_interpolation.md +1 -0
- bluemath_tk-1.0.7/docs/wrappers/base_wrapper.md +1 -0
- bluemath_tk-1.0.7/docs/wrappers/intro.md +160 -0
- bluemath_tk-1.0.7/docs/wrappers/schedulers.md +3 -0
- bluemath_tk-1.0.7/docs/wrappers/swash_wrapper.md +1 -0
- bluemath_tk-1.0.7/environment.yml +7 -0
- bluemath_tk-1.0.7/mkdocs.yml +50 -0
- bluemath_tk-1.0.7/pyproject.toml +65 -0
- bluemath_tk-1.0.7/setup.cfg +4 -0
- bluemath_tk-1.0.7/setup.py +41 -0
- bluemath_tk-1.0.7/tests/datamining/test_kma.py +47 -0
- bluemath_tk-1.0.7/tests/datamining/test_lhs.py +34 -0
- bluemath_tk-1.0.7/tests/datamining/test_mda.py +62 -0
- bluemath_tk-1.0.7/tests/datamining/test_pca.py +66 -0
- bluemath_tk-1.0.7/tests/datamining/test_som.py +70 -0
- bluemath_tk-1.0.7/tests/deeplearning/test_resnet.py +59 -0
- bluemath_tk-1.0.7/tests/environment.yml +28 -0
- bluemath_tk-1.0.7/tests/interpolation/test_rbf.py +71 -0
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
name: Publish Python Package
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
tags:
|
|
6
|
+
- "v*"
|
|
7
|
+
|
|
8
|
+
jobs:
|
|
9
|
+
build-and-publish:
|
|
10
|
+
runs-on: ubuntu-latest
|
|
11
|
+
|
|
12
|
+
steps:
|
|
13
|
+
- name: Checkout Repository
|
|
14
|
+
uses: actions/checkout@v4
|
|
15
|
+
|
|
16
|
+
- name: Set Up Python
|
|
17
|
+
uses: actions/setup-python@v5
|
|
18
|
+
with:
|
|
19
|
+
python-version: "3.12"
|
|
20
|
+
|
|
21
|
+
- name: Install Dependencies
|
|
22
|
+
run: |
|
|
23
|
+
pip install build setuptools-scm twine
|
|
24
|
+
conda install -c conda-forge conda-build anaconda-client
|
|
25
|
+
|
|
26
|
+
- name: Get Version
|
|
27
|
+
id: get_version
|
|
28
|
+
run: |
|
|
29
|
+
echo "version=$(python -c 'import setuptools_scm; print(setuptools_scm.get_version())')" >> $GITHUB_ENV
|
|
30
|
+
env:
|
|
31
|
+
GITHUB_ENV: ${{ github.env }}
|
|
32
|
+
|
|
33
|
+
- name: Build Python Package
|
|
34
|
+
run: python -m build
|
|
35
|
+
|
|
36
|
+
- name: Publish to PyPI
|
|
37
|
+
run: python -m twine upload dist/* --verbose
|
|
38
|
+
env:
|
|
39
|
+
TWINE_USERNAME: __token__
|
|
40
|
+
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
name: Python Tests
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
pull_request:
|
|
5
|
+
branches:
|
|
6
|
+
- develop
|
|
7
|
+
types:
|
|
8
|
+
- opened
|
|
9
|
+
|
|
10
|
+
jobs:
|
|
11
|
+
python-tests:
|
|
12
|
+
runs-on: ${{ matrix.os }}
|
|
13
|
+
strategy:
|
|
14
|
+
matrix:
|
|
15
|
+
os: [ubuntu-latest]
|
|
16
|
+
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
|
17
|
+
fail-fast: false
|
|
18
|
+
|
|
19
|
+
steps:
|
|
20
|
+
- name: Checkout code
|
|
21
|
+
uses: actions/checkout@v4
|
|
22
|
+
|
|
23
|
+
- name: Set up Miniconda
|
|
24
|
+
uses: conda-incubator/setup-miniconda@v2
|
|
25
|
+
with:
|
|
26
|
+
auto-update-conda: true
|
|
27
|
+
|
|
28
|
+
- name: Modify environment file with Python version
|
|
29
|
+
run: sed -i "s/python=[0-9.]\+/python=${{ matrix.python-version }}/" tests/environment.yml
|
|
30
|
+
|
|
31
|
+
- name: Create conda environment
|
|
32
|
+
run: conda env create -f tests/environment.yml
|
|
33
|
+
|
|
34
|
+
- name: Run tests
|
|
35
|
+
run: |
|
|
36
|
+
source /usr/share/miniconda/etc/profile.d/conda.sh
|
|
37
|
+
conda activate bluemath-tests
|
|
38
|
+
python --version
|
|
39
|
+
python -m unittest discover tests/datamining/
|
|
40
|
+
python -m unittest discover tests/interpolation/
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
# Byte-compiled / optimized / DLL files
|
|
2
|
+
__pycache__/
|
|
3
|
+
*.py[cod]
|
|
4
|
+
*$py.class
|
|
5
|
+
|
|
6
|
+
# C extensions
|
|
7
|
+
*.so
|
|
8
|
+
|
|
9
|
+
# Distribution / packaging
|
|
10
|
+
.Python
|
|
11
|
+
build/
|
|
12
|
+
develop-eggs/
|
|
13
|
+
dist/
|
|
14
|
+
downloads/
|
|
15
|
+
eggs/
|
|
16
|
+
.eggs/
|
|
17
|
+
lib/
|
|
18
|
+
lib64/
|
|
19
|
+
parts/
|
|
20
|
+
sdist/
|
|
21
|
+
var/
|
|
22
|
+
wheels/
|
|
23
|
+
share/python-wheels/
|
|
24
|
+
*.egg-info/
|
|
25
|
+
.installed.cfg
|
|
26
|
+
*.egg
|
|
27
|
+
MANIFEST
|
|
28
|
+
conda-recipe/
|
|
29
|
+
bluemath_tk/_version.py
|
|
30
|
+
|
|
31
|
+
# PyInstaller
|
|
32
|
+
# Usually these files are written by a python script from a template
|
|
33
|
+
*.manifest
|
|
34
|
+
*.spec
|
|
35
|
+
|
|
36
|
+
# Installer logs
|
|
37
|
+
pip-log.txt
|
|
38
|
+
pip-delete-this-directory.txt
|
|
39
|
+
|
|
40
|
+
# Unit test / coverage reports
|
|
41
|
+
htmlcov/
|
|
42
|
+
.tox/
|
|
43
|
+
.coverage
|
|
44
|
+
.coverage.*
|
|
45
|
+
.cache
|
|
46
|
+
nosetests.xml
|
|
47
|
+
coverage.xml
|
|
48
|
+
*.cover
|
|
49
|
+
*.py,cover
|
|
50
|
+
.hypothesis/
|
|
51
|
+
.pytest_cache/
|
|
52
|
+
|
|
53
|
+
# mypy
|
|
54
|
+
.mypy_cache/
|
|
55
|
+
.dmypy.json
|
|
56
|
+
dmypy.json
|
|
57
|
+
|
|
58
|
+
# Pyre type checker
|
|
59
|
+
.pyre/
|
|
60
|
+
|
|
61
|
+
# Jupyter Notebook
|
|
62
|
+
.ipynb_checkpoints
|
|
63
|
+
|
|
64
|
+
# Django stuff:
|
|
65
|
+
*.log
|
|
66
|
+
staticfiles/
|
|
67
|
+
media/
|
|
68
|
+
|
|
69
|
+
# Flask stuff:
|
|
70
|
+
instance/
|
|
71
|
+
.webassets-cache
|
|
72
|
+
|
|
73
|
+
# Scrapy stuff:
|
|
74
|
+
.scrapy
|
|
75
|
+
|
|
76
|
+
# Sphinx documentation
|
|
77
|
+
docs/_build/
|
|
78
|
+
|
|
79
|
+
# MkDocs documentation (production)
|
|
80
|
+
site/
|
|
81
|
+
|
|
82
|
+
# PyCharm
|
|
83
|
+
.idea/
|
|
84
|
+
*.iml
|
|
85
|
+
|
|
86
|
+
# VS Code
|
|
87
|
+
.vscode/
|
|
88
|
+
|
|
89
|
+
# Local environment files
|
|
90
|
+
.env
|
|
91
|
+
.venv
|
|
92
|
+
env/
|
|
93
|
+
venv/
|
|
94
|
+
ENV/
|
|
95
|
+
env.bak/
|
|
96
|
+
venv.bak/
|
|
97
|
+
|
|
98
|
+
# Local notebooks
|
|
99
|
+
notebooks/
|
|
100
|
+
test_cases/
|
|
101
|
+
test_data/
|
|
102
|
+
TODO.md
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
Copyright (c) 2024 GeoOcean group, Universidad de Cantabria
|
|
2
|
+
|
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
4
|
+
|
|
5
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
6
|
+
|
|
7
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
Metadata-Version: 2.2
|
|
2
|
+
Name: bluemath-tk
|
|
3
|
+
Version: 1.0.7
|
|
4
|
+
Summary: Module collection for custom climate data analysis. The main objective of this Python package is to provide a set of statistical tools tailored for climate data analysis, with a focus on statistical classification and prediction.
|
|
5
|
+
Home-page: https://github.com/GeoOcean/BlueMath_tk
|
|
6
|
+
Author: GeoOcean Group
|
|
7
|
+
Author-email: Geomatics and Ocean Engineering Group <bluemath@unican.es>
|
|
8
|
+
License: Copyright (c) 2024 GeoOcean group, Universidad de Cantabria
|
|
9
|
+
|
|
10
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
13
|
+
|
|
14
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
15
|
+
Project-URL: Homepage, https://github.com/GeoOcean/BlueMath_tk
|
|
16
|
+
Project-URL: Documentation, https://geoocean.github.io/BlueMath_tk/
|
|
17
|
+
Project-URL: Issues, https://github.com/GeoOcean/BlueMath_tk/issues
|
|
18
|
+
Keywords: climate,statistical,tools
|
|
19
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
20
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
21
|
+
Classifier: Operating System :: OS Independent
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
24
|
+
Requires-Python: >=3.11
|
|
25
|
+
Description-Content-Type: text/markdown
|
|
26
|
+
License-File: LICENSE
|
|
27
|
+
Requires-Dist: numpy
|
|
28
|
+
Requires-Dist: pandas
|
|
29
|
+
Requires-Dist: xarray
|
|
30
|
+
Requires-Dist: netcdf4
|
|
31
|
+
Requires-Dist: dask
|
|
32
|
+
Requires-Dist: zarr
|
|
33
|
+
Requires-Dist: scipy
|
|
34
|
+
Requires-Dist: scikit-learn
|
|
35
|
+
Requires-Dist: matplotlib
|
|
36
|
+
Requires-Dist: plotly
|
|
37
|
+
Requires-Dist: cartopy
|
|
38
|
+
Requires-Dist: cdsapi
|
|
39
|
+
Requires-Dist: jinja2
|
|
40
|
+
Requires-Dist: requests
|
|
41
|
+
Requires-Dist: aiohttp
|
|
42
|
+
Requires-Dist: minisom
|
|
43
|
+
Provides-Extra: docs
|
|
44
|
+
Requires-Dist: mkdocs; extra == "docs"
|
|
45
|
+
Requires-Dist: mkdocs-material; extra == "docs"
|
|
46
|
+
Requires-Dist: mkdocstrings; extra == "docs"
|
|
47
|
+
Requires-Dist: mkdocstrings[python]; extra == "docs"
|
|
48
|
+
Provides-Extra: tests
|
|
49
|
+
Requires-Dist: pytest; extra == "tests"
|
|
50
|
+
Requires-Dist: pytest-mock; extra == "tests"
|
|
51
|
+
Provides-Extra: deep
|
|
52
|
+
Requires-Dist: tensorflow; extra == "deep"
|
|
53
|
+
Requires-Dist: keras; extra == "deep"
|
|
54
|
+
Dynamic: author
|
|
55
|
+
Dynamic: home-page
|
|
56
|
+
Dynamic: requires-python
|
|
57
|
+
|
|
58
|
+
# Bluemath {**Toolkit**}
|
|
59
|
+
|
|
60
|
+
<p align="center">
|
|
61
|
+
<img alt="GitHub Actions" src="https://github.com/GeoOcean/BlueMath_tk/actions/workflows/python-tests.yml/badge.svg?branch=main">
|
|
62
|
+
<img alt="GitHub" src="https://img.shields.io/github/license/GeoOcean/BlueMath_tk">
|
|
63
|
+
<img alt="PyPI" src="https://img.shields.io/pypi/v/BlueMath_tk">
|
|
64
|
+
<img alt="Conda (channel only)" src="https://img.shields.io/conda/vn/conda-forge/BlueMath_tk">
|
|
65
|
+
<img alt="PyPI - Python Version" src="https://img.shields.io/pypi/pyversions/BlueMath_tk">
|
|
66
|
+
</p>
|
|
67
|
+
|
|
68
|
+
Module collection for custom climate data analysis. The main objective of this Python package is to provide a set of statistical tools tailored for climate data analysis, with a focus on statistical classification and prediction.
|
|
69
|
+
|
|
70
|
+
The package also includes a set of utility functions for processing climate data. To facilitate understanding of how to use these modules, simple demonstration scripts and the necessary data have been added to this repository.
|
|
71
|
+
|
|
72
|
+
## Installation
|
|
73
|
+
|
|
74
|
+
```sh
|
|
75
|
+
pip install bluemath-tk
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
## License
|
|
79
|
+
|
|
80
|
+
This project is licensed under the MIT License. See the [license](LICENSE.txt) file for details
|
|
81
|
+
|
|
82
|
+
## Project status
|
|
83
|
+
|
|
84
|
+
- UNDER DEVELOPMENT
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
# Bluemath {**Toolkit**}
|
|
2
|
+
|
|
3
|
+
<p align="center">
|
|
4
|
+
<img alt="GitHub Actions" src="https://github.com/GeoOcean/BlueMath_tk/actions/workflows/python-tests.yml/badge.svg?branch=main">
|
|
5
|
+
<img alt="GitHub" src="https://img.shields.io/github/license/GeoOcean/BlueMath_tk">
|
|
6
|
+
<img alt="PyPI" src="https://img.shields.io/pypi/v/BlueMath_tk">
|
|
7
|
+
<img alt="Conda (channel only)" src="https://img.shields.io/conda/vn/conda-forge/BlueMath_tk">
|
|
8
|
+
<img alt="PyPI - Python Version" src="https://img.shields.io/pypi/pyversions/BlueMath_tk">
|
|
9
|
+
</p>
|
|
10
|
+
|
|
11
|
+
Module collection for custom climate data analysis. The main objective of this Python package is to provide a set of statistical tools tailored for climate data analysis, with a focus on statistical classification and prediction.
|
|
12
|
+
|
|
13
|
+
The package also includes a set of utility functions for processing climate data. To facilitate understanding of how to use these modules, simple demonstration scripts and the necessary data have been added to this repository.
|
|
14
|
+
|
|
15
|
+
## Installation
|
|
16
|
+
|
|
17
|
+
```sh
|
|
18
|
+
pip install bluemath-tk
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
## License
|
|
22
|
+
|
|
23
|
+
This project is licensed under the MIT License. See the [license](LICENSE.txt) file for details
|
|
24
|
+
|
|
25
|
+
## Project status
|
|
26
|
+
|
|
27
|
+
- UNDER DEVELOPMENT
|
|
File without changes
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# file generated by setuptools_scm
|
|
2
|
+
# don't change, don't track in version control
|
|
3
|
+
TYPE_CHECKING = False
|
|
4
|
+
if TYPE_CHECKING:
|
|
5
|
+
from typing import Tuple, Union
|
|
6
|
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
|
7
|
+
else:
|
|
8
|
+
VERSION_TUPLE = object
|
|
9
|
+
|
|
10
|
+
version: str
|
|
11
|
+
__version__: str
|
|
12
|
+
__version_tuple__: VERSION_TUPLE
|
|
13
|
+
version_tuple: VERSION_TUPLE
|
|
14
|
+
|
|
15
|
+
__version__ = version = '1.0.7'
|
|
16
|
+
__version_tuple__ = version_tuple = (1, 0, 7)
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import xarray as xr
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def get_2d_dataset():
|
|
6
|
+
# Define the coordinates
|
|
7
|
+
coord1 = np.linspace(-100, 100, 20)
|
|
8
|
+
coord2 = np.linspace(-100, 100, 20)
|
|
9
|
+
coord3 = np.arange(1, 50)
|
|
10
|
+
|
|
11
|
+
# Create a meshgrid
|
|
12
|
+
coord1, coord2, coord3 = np.meshgrid(coord1, coord2, coord3, indexing="ij")
|
|
13
|
+
|
|
14
|
+
# Create a 3D dataset
|
|
15
|
+
X = (
|
|
16
|
+
np.sin(np.radians(coord1)) * np.cos(np.radians(coord2)) * np.sin(coord3)
|
|
17
|
+
+ np.sin(2 * np.radians(coord1))
|
|
18
|
+
* np.cos(2 * np.radians(coord2))
|
|
19
|
+
* np.sin(2 * coord3)
|
|
20
|
+
+ np.sin(3 * np.radians(coord1))
|
|
21
|
+
* np.cos(3 * np.radians(coord2))
|
|
22
|
+
* np.sin(3 * coord3)
|
|
23
|
+
)
|
|
24
|
+
# Create a 3D dataset
|
|
25
|
+
Y = -np.sin(X)
|
|
26
|
+
|
|
27
|
+
# Create an xarray dataset
|
|
28
|
+
ds = xr.Dataset(
|
|
29
|
+
{
|
|
30
|
+
"X": (["coord1", "coord2", "coord3"], X),
|
|
31
|
+
"Y": (["coord1", "coord2", "coord3"], Y),
|
|
32
|
+
},
|
|
33
|
+
coords={
|
|
34
|
+
"coord1": coord1[:, 0, 0],
|
|
35
|
+
"coord2": coord2[0, :, 0],
|
|
36
|
+
"coord3": coord3[0, 0, :],
|
|
37
|
+
},
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
return ds
|
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
from typing import List
|
|
3
|
+
import pandas as pd
|
|
4
|
+
import xarray as xr
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def validate_data_lhs(func):
|
|
8
|
+
"""
|
|
9
|
+
Decorator to validate data in LHS class fit method.
|
|
10
|
+
|
|
11
|
+
Parameters
|
|
12
|
+
----------
|
|
13
|
+
func : callable
|
|
14
|
+
The function to be decorated
|
|
15
|
+
|
|
16
|
+
Returns
|
|
17
|
+
-------
|
|
18
|
+
callable
|
|
19
|
+
The decorated function
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
@functools.wraps(func)
|
|
23
|
+
def wrapper(
|
|
24
|
+
self,
|
|
25
|
+
dimensions_names: List[str],
|
|
26
|
+
lower_bounds: List[float],
|
|
27
|
+
upper_bounds: List[float],
|
|
28
|
+
num_samples: int,
|
|
29
|
+
):
|
|
30
|
+
if not isinstance(dimensions_names, list):
|
|
31
|
+
raise TypeError("Dimensions names must be a list")
|
|
32
|
+
if not isinstance(lower_bounds, list):
|
|
33
|
+
raise TypeError("Lower bounds must be a list")
|
|
34
|
+
if not isinstance(upper_bounds, list):
|
|
35
|
+
raise TypeError("Upper bounds must be a list")
|
|
36
|
+
if len(dimensions_names) != len(lower_bounds) or len(lower_bounds) != len(
|
|
37
|
+
upper_bounds
|
|
38
|
+
):
|
|
39
|
+
raise ValueError(
|
|
40
|
+
"Dimensions names, lower bounds and upper bounds must have the same length"
|
|
41
|
+
)
|
|
42
|
+
if not all(
|
|
43
|
+
[lower <= upper for lower, upper in zip(lower_bounds, upper_bounds)]
|
|
44
|
+
):
|
|
45
|
+
raise ValueError("Lower bounds must be less than or equal to upper bounds")
|
|
46
|
+
if not isinstance(num_samples, int) or num_samples <= 0:
|
|
47
|
+
raise ValueError("Variable num_samples must be integer and > 0")
|
|
48
|
+
return func(self, dimensions_names, lower_bounds, upper_bounds, num_samples)
|
|
49
|
+
|
|
50
|
+
return wrapper
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def validate_data_mda(func):
|
|
54
|
+
"""
|
|
55
|
+
Decorator to validate data in MDA class fit method.
|
|
56
|
+
|
|
57
|
+
Parameters
|
|
58
|
+
----------
|
|
59
|
+
func : callable
|
|
60
|
+
The function to be decorated
|
|
61
|
+
|
|
62
|
+
Returns
|
|
63
|
+
-------
|
|
64
|
+
callable
|
|
65
|
+
The decorated function
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
@functools.wraps(func)
|
|
69
|
+
def wrapper(
|
|
70
|
+
self,
|
|
71
|
+
data: pd.DataFrame,
|
|
72
|
+
directional_variables: List[str] = [],
|
|
73
|
+
custom_scale_factor: dict = {},
|
|
74
|
+
first_centroid_seed: int = None,
|
|
75
|
+
):
|
|
76
|
+
if data is None:
|
|
77
|
+
raise ValueError("Data cannot be None")
|
|
78
|
+
elif not isinstance(data, pd.DataFrame):
|
|
79
|
+
raise TypeError("Data must be a pandas DataFrame")
|
|
80
|
+
if not isinstance(directional_variables, list):
|
|
81
|
+
raise TypeError("Directional variables must be a list")
|
|
82
|
+
if not isinstance(custom_scale_factor, dict):
|
|
83
|
+
raise TypeError("Custom scale factor must be a dict")
|
|
84
|
+
if first_centroid_seed is not None:
|
|
85
|
+
if (
|
|
86
|
+
not isinstance(first_centroid_seed, int)
|
|
87
|
+
or first_centroid_seed < 0
|
|
88
|
+
or first_centroid_seed > data.shape[0]
|
|
89
|
+
):
|
|
90
|
+
raise ValueError(
|
|
91
|
+
"First centroid seed must be an integer >= 0 and < num of data points"
|
|
92
|
+
)
|
|
93
|
+
return func(self, data, directional_variables, custom_scale_factor)
|
|
94
|
+
|
|
95
|
+
return wrapper
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def validate_data_kma(func):
|
|
99
|
+
"""
|
|
100
|
+
Decorator to validate data in KMA class fit method.
|
|
101
|
+
|
|
102
|
+
Parameters
|
|
103
|
+
----------
|
|
104
|
+
func : callable
|
|
105
|
+
The function to be decorated
|
|
106
|
+
|
|
107
|
+
Returns
|
|
108
|
+
-------
|
|
109
|
+
callable
|
|
110
|
+
The decorated function
|
|
111
|
+
"""
|
|
112
|
+
|
|
113
|
+
@functools.wraps(func)
|
|
114
|
+
def wrapper(
|
|
115
|
+
self,
|
|
116
|
+
data: pd.DataFrame,
|
|
117
|
+
directional_variables: List[str] = [],
|
|
118
|
+
custom_scale_factor: dict = {},
|
|
119
|
+
):
|
|
120
|
+
if data is None:
|
|
121
|
+
raise ValueError("Data cannot be None")
|
|
122
|
+
elif not isinstance(data, pd.DataFrame):
|
|
123
|
+
raise TypeError("Data must be a pandas DataFrame")
|
|
124
|
+
if not isinstance(directional_variables, list):
|
|
125
|
+
raise TypeError("Directional variables must be a list")
|
|
126
|
+
if not isinstance(custom_scale_factor, dict):
|
|
127
|
+
raise TypeError("Custom scale factor must be a dict")
|
|
128
|
+
return func(self, data, directional_variables, custom_scale_factor)
|
|
129
|
+
|
|
130
|
+
return wrapper
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def validate_data_som(func):
|
|
134
|
+
"""
|
|
135
|
+
Decorator to validate data in SOM class fit method.
|
|
136
|
+
|
|
137
|
+
Parameters
|
|
138
|
+
----------
|
|
139
|
+
func : callable
|
|
140
|
+
The function to be decorated
|
|
141
|
+
|
|
142
|
+
Returns
|
|
143
|
+
-------
|
|
144
|
+
callable
|
|
145
|
+
The decorated function
|
|
146
|
+
"""
|
|
147
|
+
|
|
148
|
+
@functools.wraps(func)
|
|
149
|
+
def wrapper(
|
|
150
|
+
self,
|
|
151
|
+
data: pd.DataFrame,
|
|
152
|
+
directional_variables: List[str] = [],
|
|
153
|
+
num_iteration: int = 1000,
|
|
154
|
+
):
|
|
155
|
+
if data is None:
|
|
156
|
+
raise ValueError("Data cannot be None")
|
|
157
|
+
elif not isinstance(data, pd.DataFrame):
|
|
158
|
+
raise TypeError("Data must be a pandas DataFrame")
|
|
159
|
+
if not isinstance(directional_variables, list):
|
|
160
|
+
raise TypeError("Directional variables must be a list")
|
|
161
|
+
if not isinstance(num_iteration, int) or num_iteration <= 0:
|
|
162
|
+
raise ValueError("Number of iterations must be integer and > 0")
|
|
163
|
+
return func(self, data, directional_variables, num_iteration)
|
|
164
|
+
|
|
165
|
+
return wrapper
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def validate_data_pca(func):
|
|
169
|
+
"""
|
|
170
|
+
Decorator to validate data in PCA class fit method.
|
|
171
|
+
|
|
172
|
+
Parameters
|
|
173
|
+
----------
|
|
174
|
+
func : callable
|
|
175
|
+
The function to be decorated
|
|
176
|
+
|
|
177
|
+
Returns
|
|
178
|
+
-------
|
|
179
|
+
callable
|
|
180
|
+
The decorated function
|
|
181
|
+
"""
|
|
182
|
+
|
|
183
|
+
@functools.wraps(func)
|
|
184
|
+
def wrapper(
|
|
185
|
+
self,
|
|
186
|
+
data: xr.Dataset,
|
|
187
|
+
vars_to_stack: List[str],
|
|
188
|
+
coords_to_stack: List[str],
|
|
189
|
+
pca_dim_for_rows: str,
|
|
190
|
+
window_in_pca_dim_for_rows: List[int] = [0],
|
|
191
|
+
value_to_replace_nans: float = None,
|
|
192
|
+
):
|
|
193
|
+
if data is None:
|
|
194
|
+
raise ValueError("Data cannot be None")
|
|
195
|
+
elif not isinstance(data, xr.Dataset):
|
|
196
|
+
raise TypeError("Data must be an xarray Dataset")
|
|
197
|
+
# Check that all vars_to_stack are in the data
|
|
198
|
+
if not isinstance(vars_to_stack, list) or len(vars_to_stack) == 0:
|
|
199
|
+
raise ValueError("Variables to stack must be a non-empty list")
|
|
200
|
+
for var in vars_to_stack:
|
|
201
|
+
if var not in data.data_vars:
|
|
202
|
+
raise ValueError(f"Variable {var} not found in data")
|
|
203
|
+
# Check that all variables in vars_to_stack have the same coordinates and dimensions
|
|
204
|
+
first_var = vars_to_stack[0]
|
|
205
|
+
first_var_dims = list(data[first_var].dims)
|
|
206
|
+
first_var_coords = list(data[first_var].coords)
|
|
207
|
+
for var in vars_to_stack:
|
|
208
|
+
if list(data[var].dims) != first_var_dims:
|
|
209
|
+
raise ValueError(
|
|
210
|
+
f"All variables must have the same dimensions. Variable {var} does not match."
|
|
211
|
+
)
|
|
212
|
+
if list(data[var].coords) != first_var_coords:
|
|
213
|
+
raise ValueError(
|
|
214
|
+
f"All variables must have the same coordinates. Variable {var} does not match."
|
|
215
|
+
)
|
|
216
|
+
# Check that all coords_to_stack are in the data
|
|
217
|
+
if not isinstance(coords_to_stack, list) or len(coords_to_stack) == 0:
|
|
218
|
+
raise ValueError("Coordinates to stack must be a non-empty list")
|
|
219
|
+
for coord in coords_to_stack:
|
|
220
|
+
if coord not in data.coords:
|
|
221
|
+
raise ValueError(f"Coordinate {coord} not found in data.")
|
|
222
|
+
# Check that pca_dim_for_rows is in the data, and window > 0 if provided
|
|
223
|
+
if not isinstance(pca_dim_for_rows, str) or pca_dim_for_rows not in data.dims:
|
|
224
|
+
raise ValueError(
|
|
225
|
+
"PCA dimension for rows must be a string and found in the data dimensions"
|
|
226
|
+
)
|
|
227
|
+
if window_in_pca_dim_for_rows is not None:
|
|
228
|
+
if (
|
|
229
|
+
not isinstance(window_in_pca_dim_for_rows, list)
|
|
230
|
+
or len(window_in_pca_dim_for_rows) == 0
|
|
231
|
+
):
|
|
232
|
+
raise ValueError(
|
|
233
|
+
"Window in PCA dimension for rows must be a non-empty list"
|
|
234
|
+
)
|
|
235
|
+
if value_to_replace_nans is not None:
|
|
236
|
+
if not isinstance(value_to_replace_nans, float):
|
|
237
|
+
raise ValueError("Value to replace NaNs must be float")
|
|
238
|
+
return func(
|
|
239
|
+
self,
|
|
240
|
+
data,
|
|
241
|
+
vars_to_stack,
|
|
242
|
+
coords_to_stack,
|
|
243
|
+
pca_dim_for_rows,
|
|
244
|
+
window_in_pca_dim_for_rows,
|
|
245
|
+
value_to_replace_nans,
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
return wrapper
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def validate_data_rbf(func):
|
|
252
|
+
"""
|
|
253
|
+
Decorator to validate data in RBF class fit method.
|
|
254
|
+
|
|
255
|
+
Parameters
|
|
256
|
+
----------
|
|
257
|
+
func : callable
|
|
258
|
+
The function to be decorated
|
|
259
|
+
|
|
260
|
+
Returns
|
|
261
|
+
-------
|
|
262
|
+
callable
|
|
263
|
+
The decorated function
|
|
264
|
+
"""
|
|
265
|
+
|
|
266
|
+
@functools.wraps(func)
|
|
267
|
+
def wrapper(
|
|
268
|
+
self,
|
|
269
|
+
subset_data: pd.DataFrame,
|
|
270
|
+
target_data: pd.DataFrame,
|
|
271
|
+
subset_directional_variables: List[str] = [],
|
|
272
|
+
target_directional_variables: List[str] = [],
|
|
273
|
+
subset_custom_scale_factor: dict = {},
|
|
274
|
+
normalize_target_data: bool = True,
|
|
275
|
+
target_custom_scale_factor: dict = {},
|
|
276
|
+
num_threads: int = None,
|
|
277
|
+
iteratively_update_sigma: bool = False,
|
|
278
|
+
):
|
|
279
|
+
if subset_data is None:
|
|
280
|
+
raise ValueError("Subset data cannot be None")
|
|
281
|
+
elif not isinstance(subset_data, pd.DataFrame):
|
|
282
|
+
raise TypeError("Subset data must be a pandas DataFrame")
|
|
283
|
+
if target_data is None:
|
|
284
|
+
raise ValueError("Target data cannot be None")
|
|
285
|
+
elif not isinstance(target_data, pd.DataFrame):
|
|
286
|
+
raise TypeError("Target data must be a pandas DataFrame")
|
|
287
|
+
if not isinstance(subset_directional_variables, list):
|
|
288
|
+
raise TypeError("Subset directional variables must be a list")
|
|
289
|
+
for directional_variable in subset_directional_variables:
|
|
290
|
+
if directional_variable not in subset_data.columns:
|
|
291
|
+
raise ValueError(
|
|
292
|
+
f"Directional variable {directional_variable} not found in subset data"
|
|
293
|
+
)
|
|
294
|
+
if not isinstance(target_directional_variables, list):
|
|
295
|
+
raise TypeError("Target directional variables must be a list")
|
|
296
|
+
for directional_variable in target_directional_variables:
|
|
297
|
+
if directional_variable not in target_data.columns:
|
|
298
|
+
raise ValueError(
|
|
299
|
+
f"Directional variable {directional_variable} not found in target data"
|
|
300
|
+
)
|
|
301
|
+
if not isinstance(subset_custom_scale_factor, dict):
|
|
302
|
+
raise TypeError("Subset custom scale factor must be a dict")
|
|
303
|
+
if not isinstance(normalize_target_data, bool):
|
|
304
|
+
raise TypeError("Normalize target data must be a bool")
|
|
305
|
+
if not isinstance(target_custom_scale_factor, dict):
|
|
306
|
+
raise TypeError("Target custom scale factor must be a dict")
|
|
307
|
+
if num_threads is not None:
|
|
308
|
+
if not isinstance(num_threads, int) or num_threads <= 0:
|
|
309
|
+
raise ValueError("Number of threads must be integer and > 0")
|
|
310
|
+
if not isinstance(iteratively_update_sigma, bool):
|
|
311
|
+
raise TypeError("Iteratively update sigma must be a boolean")
|
|
312
|
+
return func(
|
|
313
|
+
self,
|
|
314
|
+
subset_data,
|
|
315
|
+
target_data,
|
|
316
|
+
subset_directional_variables,
|
|
317
|
+
target_directional_variables,
|
|
318
|
+
subset_custom_scale_factor,
|
|
319
|
+
normalize_target_data,
|
|
320
|
+
target_custom_scale_factor,
|
|
321
|
+
num_threads,
|
|
322
|
+
iteratively_update_sigma,
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
return wrapper
|