spires 0.2.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. spires-0.2.2/.envrc +5 -0
  2. spires-0.2.2/.gitattributes +9 -0
  3. spires-0.2.2/.github/workflows/build.yml +96 -0
  4. spires-0.2.2/.github/workflows/docs.yml +58 -0
  5. spires-0.2.2/.github/workflows/publish-pypi.yml +120 -0
  6. spires-0.2.2/.gitignore +31 -0
  7. spires-0.2.2/.gitlab-ci.yml +97 -0
  8. spires-0.2.2/.readthedocs.yaml +33 -0
  9. spires-0.2.2/.zenodo.json +40 -0
  10. spires-0.2.2/CITATION.cff +49 -0
  11. spires-0.2.2/CLAUDE.md +156 -0
  12. spires-0.2.2/MANIFEST.in +5 -0
  13. spires-0.2.2/PKG-INFO +339 -0
  14. spires-0.2.2/PYPI_SETUP.md +162 -0
  15. spires-0.2.2/README.md +287 -0
  16. spires-0.2.2/doc/Makefile +20 -0
  17. spires-0.2.2/doc/make.bat +35 -0
  18. spires-0.2.2/doc/requirements.txt +23 -0
  19. spires-0.2.2/doc/source/README.md +287 -0
  20. spires-0.2.2/doc/source/_templates/instance_methods.rst +5 -0
  21. spires-0.2.2/doc/source/conf.py +88 -0
  22. spires-0.2.2/doc/source/examples.md +90 -0
  23. spires-0.2.2/doc/source/getting_started.md +165 -0
  24. spires-0.2.2/doc/source/index.rst +27 -0
  25. spires-0.2.2/doc/source/reference.rst +48 -0
  26. spires-0.2.2/environment.yml +48 -0
  27. spires-0.2.2/pyproject.toml +97 -0
  28. spires-0.2.2/scripts/download_test_data.py +187 -0
  29. spires-0.2.2/setup.cfg +4 -0
  30. spires-0.2.2/setup.py +58 -0
  31. spires-0.2.2/spires/Makefile +28 -0
  32. spires-0.2.2/spires/__init__.py +12 -0
  33. spires-0.2.2/spires/cobyla.cpp +175 -0
  34. spires-0.2.2/spires/interpolator.py +377 -0
  35. spires-0.2.2/spires/invert.py +1009 -0
  36. spires-0.2.2/spires/legacy.py +167 -0
  37. spires-0.2.2/spires/numpy.i +2970 -0
  38. spires-0.2.2/spires/process.py +108 -0
  39. spires-0.2.2/spires/reprojectMODIS.py +91 -0
  40. spires-0.2.2/spires/spires.cpp +758 -0
  41. spires-0.2.2/spires/spires.h +107 -0
  42. spires-0.2.2/spires/spires.i +68 -0
  43. spires-0.2.2/spires/utol.py +113 -0
  44. spires-0.2.2/spires.egg-info/PKG-INFO +339 -0
  45. spires-0.2.2/spires.egg-info/SOURCES.txt +47 -0
  46. spires-0.2.2/spires.egg-info/dependency_links.txt +1 -0
  47. spires-0.2.2/spires.egg-info/requires.txt +25 -0
  48. spires-0.2.2/spires.egg-info/top_level.txt +5 -0
  49. spires-0.2.2/test_dask_method.py +74 -0
spires-0.2.2/.envrc ADDED
@@ -0,0 +1,5 @@
1
+ # Auto-activate spipy14 conda environment
2
+ # Requires direnv (https://direnv.net/)
3
+ # Run 'direnv allow' after creating this file
4
+
5
+ layout anaconda spipy14
@@ -0,0 +1,9 @@
1
+ spires/_version.py export-subst
2
+ # LFS tracking for smaller test data files
3
+ tests/data/lut_sentinel2b_b2to12_3um_dust.mat filter=lfs diff=lfs merge=lfs -text
4
+ tests/data/sentinel_r_subset.nc filter=lfs diff=lfs merge=lfs -text
5
+ tests/data/sentinel_r0_subset.nc filter=lfs diff=lfs merge=lfs -text
6
+ tests/data/r0.tiff filter=lfs diff=lfs merge=lfs -text
7
+ examples/r0.tiff filter=lfs diff=lfs merge=lfs -text
8
+ # Legacy patterns (for backward compatibility)
9
+ *.hdf filter=lfs diff=lfs merge=lfs -text
@@ -0,0 +1,96 @@
1
+ name: Build and Test
2
+
3
+ on:
4
+ push:
5
+ branches: [main, master]
6
+ pull_request:
7
+ branches: [main, master]
8
+ workflow_dispatch:
9
+
10
+ jobs:
11
+ build:
12
+ name: Build on ${{ matrix.os }} with Python ${{ matrix.python-version }}
13
+ runs-on: ${{ matrix.os }}
14
+ strategy:
15
+ fail-fast: false
16
+ matrix:
17
+ os: [ubuntu-latest, macos-latest]
18
+ python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', '3.14']
19
+
20
+ steps:
21
+ - uses: actions/checkout@v4
22
+ with:
23
+ fetch-depth: 0 # Full history for setuptools-scm
24
+ lfs: false # Don't auto-pull LFS, we'll cache it
25
+
26
+ - name: Cache Git LFS files
27
+ uses: actions/cache@v4
28
+ id: lfs-cache
29
+ with:
30
+ path: .git/lfs
31
+ key: lfs-${{ hashFiles('.gitattributes') }}
32
+
33
+ - name: Download test data from Zenodo
34
+ run: |
35
+ # Download lookup tables from Zenodo (avoids LFS quota issues)
36
+ mkdir -p tests/data
37
+ echo "Downloading Sentinel-2 LUT from Zenodo..."
38
+
39
+ # Sentinel-2 LUT (70 MB) - required for test_swig.py and test_comparison.py
40
+ curl -L -o tests/data/lut_sentinel2b_b2to12_3um_dust.mat \
41
+ https://zenodo.org/records/18701286/files/lut_sentinel2b_b2to12_3um_dust.mat
42
+
43
+ echo "✓ Sentinel-2 LUT downloaded"
44
+ echo "Note: Original LUT_MODIS.mat not available on Zenodo (different structure)"
45
+ echo " test_legacy.py will be skipped"
46
+
47
+ - name: Download large test data (optional)
48
+ run: |
49
+ # Optionally download full test imagery from Zenodo
50
+ # Uncomment to enable full-resolution image tests (adds ~15 minutes to CI time)
51
+ # curl -L -o tests/data/sentinel_r.nc https://zenodo.org/records/18704072/files/sentinel_r.nc
52
+ # curl -L -o tests/data/sentinel_r0.nc https://zenodo.org/records/18704072/files/sentinel_r0.nc
53
+ echo "Using LUTs only - full imagery available at:"
54
+ echo " https://doi.org/10.5281/zenodo.18704072"
55
+
56
+ - name: Set up Python ${{ matrix.python-version }}
57
+ uses: actions/setup-python@v5
58
+ with:
59
+ python-version: ${{ matrix.python-version }}
60
+
61
+ - name: Setup Miniconda
62
+ uses: conda-incubator/setup-miniconda@v3
63
+ with:
64
+ auto-update-conda: true
65
+ python-version: ${{ matrix.python-version }}
66
+ channels: conda-forge
67
+ channel-priority: strict
68
+
69
+ - name: Install build dependencies from conda-forge
70
+ shell: bash -l {0}
71
+ run: |
72
+ conda install -c conda-forge swig gcc gxx nlopt
73
+
74
+ - name: Install Python dependencies
75
+ shell: bash -l {0}
76
+ run: |
77
+ python -m pip install --upgrade pip
78
+ pip install setuptools-scm wheel
79
+ pip install '.[test]'
80
+
81
+ - name: Build SWIG extensions
82
+ shell: bash -l {0}
83
+ run: |
84
+ python setup.py build_ext --inplace
85
+
86
+ - name: Run tests
87
+ shell: bash -l {0}
88
+ run: |
89
+ # Skip test_legacy.py - requires original LUT_MODIS.mat not on Zenodo
90
+ # The Zenodo dataset has lut_modis_b1to7_3um_dust.mat with different structure
91
+ pytest -v --ignore=tests/test_legacy.py
92
+
93
+ - name: Test import and version
94
+ shell: bash -l {0}
95
+ run: |
96
+ python -c "import spires; print(f'SpiPy version: {spires.__version__}')"
@@ -0,0 +1,58 @@
1
+ name: Documentation
2
+
3
+ on:
4
+ push:
5
+ branches: [main, master]
6
+ tags:
7
+ - 'v*'
8
+ pull_request:
9
+ branches: [main, master]
10
+ workflow_dispatch: # Allow manual triggering
11
+
12
+ jobs:
13
+ build-docs:
14
+ runs-on: ubuntu-latest
15
+
16
+ steps:
17
+ - uses: actions/checkout@v4
18
+ with:
19
+ fetch-depth: 0 # Full history for setuptools-scm versioning
20
+
21
+ - name: Set up Python
22
+ uses: actions/setup-python@v5
23
+ with:
24
+ python-version: '3.11'
25
+
26
+ - name: Install system dependencies
27
+ run: |
28
+ sudo apt-get update
29
+ sudo apt-get install -y pandoc
30
+
31
+ - name: Install Python dependencies
32
+ run: |
33
+ python -m pip install --upgrade pip
34
+ pip install -r doc/requirements.txt
35
+
36
+ - name: Build documentation
37
+ run: |
38
+ cd doc
39
+ make html
40
+ env:
41
+ SPHINXOPTS: "--keep-going" # Continue on errors, don't fail on warnings
42
+
43
+ - name: Upload documentation artifacts
44
+ uses: actions/upload-artifact@v4
45
+ with:
46
+ name: documentation-html
47
+ path: doc/build/html/
48
+ retention-days: 30
49
+
50
+ # Optional: Deploy to GitHub Pages
51
+ # Uncomment the following steps to enable GitHub Pages deployment
52
+ # - name: Deploy to GitHub Pages
53
+ # if: github.event_name == 'push' && github.ref == 'refs/heads/main'
54
+ # uses: peaceiris/actions-gh-pages@v3
55
+ # with:
56
+ # github_token: ${{ secrets.GITHUB_TOKEN }}
57
+ # publish_dir: ./doc/build/html
58
+ # force_orphan: true
@@ -0,0 +1,120 @@
1
+ name: Publish to PyPI
2
+
3
+ on:
4
+ release:
5
+ types: [published]
6
+ workflow_dispatch: # Allow manual triggering for testing
7
+
8
+ jobs:
9
+ build-wheels:
10
+ name: Build wheels on ${{ matrix.os }}
11
+ runs-on: ${{ matrix.os }}
12
+ strategy:
13
+ matrix:
14
+ os: [ubuntu-latest, macos-latest]
15
+ # Windows build requires nlopt which is challenging to build
16
+
17
+ steps:
18
+ - uses: actions/checkout@v4
19
+ with:
20
+ fetch-depth: 0 # Full history for setuptools-scm
21
+
22
+ - name: Build wheels
23
+ uses: pypa/cibuildwheel@v2.17.0
24
+ env:
25
+ # Build for Python 3.9-3.14
26
+ CIBW_BUILD: cp39-* cp310-* cp311-* cp312-* cp313-* cp314-*
27
+ # Skip 32-bit builds and musl
28
+ CIBW_SKIP: "*-win32 *-manylinux_i686 *-musllinux*"
29
+ # Use manylinux_2_28 (AlmaLinux 8) instead of manylinux2014 (CentOS 7 EOL)
30
+ CIBW_MANYLINUX_X86_64_IMAGE: manylinux_2_28
31
+ CIBW_MANYLINUX_AARCH64_IMAGE: manylinux_2_28
32
+ # Install dependencies before building - build nlopt from source
33
+ # Install to /usr (not /usr/local) so auditwheel can find it
34
+ # Run ldconfig to update library cache for auditwheel
35
+ CIBW_BEFORE_BUILD_LINUX: >
36
+ yum install -y wget gcc-c++ cmake swig &&
37
+ wget https://github.com/stevengj/nlopt/archive/v2.7.1.tar.gz &&
38
+ tar -xzf v2.7.1.tar.gz &&
39
+ cd nlopt-2.7.1 &&
40
+ mkdir build && cd build &&
41
+ cmake .. -DCMAKE_INSTALL_PREFIX=/usr &&
42
+ make && make install &&
43
+ ldconfig &&
44
+ cd ../.. && rm -rf nlopt-2.7.1 v2.7.1.tar.gz
45
+ CIBW_BEFORE_BUILD_MACOS: brew install swig nlopt
46
+ # Bundle nlopt library into wheel for standalone distribution
47
+ CIBW_REPAIR_WHEEL_COMMAND_LINUX: "auditwheel repair -w {dest_dir} {wheel} --plat manylinux_2_28_x86_64"
48
+ CIBW_REPAIR_WHEEL_COMMAND_MACOS: "delocate-wheel --require-archs {delocate_archs} -w {dest_dir} -v {wheel}"
49
+ # Skip wheel test for now - SWIG module import needs investigation
50
+ # CIBW_TEST_COMMAND: python -c "import spires; print(spires.__version__)"
51
+ # CIBW_TEST_REQUIRES: pytest
52
+ # Build verbosely
53
+ CIBW_BUILD_VERBOSITY: 1
54
+
55
+ - uses: actions/upload-artifact@v4
56
+ with:
57
+ name: wheels-${{ matrix.os }}
58
+ path: ./wheelhouse/*.whl
59
+
60
+ build-sdist:
61
+ name: Build source distribution
62
+ runs-on: ubuntu-latest
63
+ steps:
64
+ - uses: actions/checkout@v4
65
+ with:
66
+ fetch-depth: 0 # Full history for setuptools-scm
67
+
68
+ - name: Set up Python
69
+ uses: actions/setup-python@v5
70
+ with:
71
+ python-version: '3.11'
72
+
73
+ - name: Setup Miniconda
74
+ uses: conda-incubator/setup-miniconda@v3
75
+ with:
76
+ auto-update-conda: true
77
+ python-version: '3.11'
78
+ channels: conda-forge
79
+ channel-priority: strict
80
+
81
+ - name: Install build dependencies
82
+ shell: bash -l {0}
83
+ run: |
84
+ conda install -c conda-forge swig gcc gxx nlopt
85
+ python -m pip install --upgrade pip
86
+ pip install build setuptools-scm
87
+
88
+ - name: Build sdist
89
+ shell: bash -l {0}
90
+ run: python -m build --sdist
91
+
92
+ - uses: actions/upload-artifact@v4
93
+ with:
94
+ name: sdist
95
+ path: dist/*.tar.gz
96
+
97
+ publish-pypi:
98
+ name: Publish to PyPI
99
+ needs: [build-wheels, build-sdist]
100
+ runs-on: ubuntu-latest
101
+ # Only publish on tag push or manual workflow dispatch
102
+ if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
103
+ permissions:
104
+ id-token: write # Required for trusted publishing
105
+
106
+ steps:
107
+ - name: Download all artifacts
108
+ uses: actions/download-artifact@v4
109
+ with:
110
+ path: dist
111
+ merge-multiple: true
112
+
113
+ - name: List distributions
114
+ run: ls -lh dist/
115
+
116
+ - name: Publish to PyPI
117
+ uses: pypa/gh-action-pypi-publish@release/v1
118
+ with:
119
+ # Production PyPI (Trusted Publishing configured)
120
+ verbose: true
@@ -0,0 +1,31 @@
1
+ __pycache__/
2
+ *.ipynb_checkpoints*
3
+ *.egg-info*
4
+ /build
5
+ /spires/spires_wrap.cpp
6
+ /spires/_core.cpython*
7
+ /spires/core.py
8
+ /doc/build/
9
+ /venv/
10
+ /spires/.vscode/
11
+ /dist/
12
+ *.DS_Store
13
+ *.aux.xml
14
+ .idea/
15
+
16
+ # Secrets and credentials (prevent accidental commits)
17
+ *.env
18
+ .env.*
19
+ !.envrc
20
+ *.pem
21
+ *.key
22
+ **/credentials*
23
+ **/secrets*
24
+
25
+ # Internal/OPSEC documents (not for public release)
26
+ /OPSEC/
27
+
28
+ # Large test data files (download from Zenodo or use subsets)
29
+ tests/data/LUT_MODIS.mat
30
+ tests/data/sentinel_r.nc
31
+ tests/data/sentinel_r0.nc
@@ -0,0 +1,97 @@
1
+ # GitLab CI configuration for SpiPy
2
+ # Tests across Python 3.9-3.14 to match GitHub workflows
3
+
4
+ image: continuumio/miniconda3
5
+
6
+ variables:
7
+ PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
8
+ GIT_LFS_SKIP_SMUDGE: "1" # Skip LFS downloads to avoid quota issues
9
+
10
+ cache:
11
+ paths:
12
+ - .cache/pip
13
+
14
+ # Test matrix across Python versions 3.9-3.14
15
+ .test_template: &test_template
16
+ script:
17
+ - python --version
18
+ - conda create -y -n buildenv python=$PYTHON_VERSION
19
+ - source activate buildenv
20
+ - conda install -c conda-forge nlopt swig gxx gcc make
21
+ - pip install --editable ".[test]"
22
+ - python -c "import spires; print('Version:', spires.__version__)"
23
+ # Download test data from Zenodo (avoids LFS quota issues)
24
+ - mkdir -p tests/data
25
+ - echo "Downloading Sentinel-2 LUT from Zenodo..."
26
+ - wget -O tests/data/lut_sentinel2b_b2to12_3um_dust.mat https://zenodo.org/records/18701286/files/lut_sentinel2b_b2to12_3um_dust.mat
27
+ - echo "✓ Sentinel-2 LUT downloaded ($(du -h tests/data/lut_sentinel2b_b2to12_3um_dust.mat | cut -f1))"
28
+ # Run tests, skipping test_legacy.py which needs LUT_MODIS.mat (not on Zenodo)
29
+ - pytest --doctest-modules --ignore=tests/test_legacy.py
30
+
31
+ test:python3.9:
32
+ <<: *test_template
33
+ variables:
34
+ PYTHON_VERSION: "3.9"
35
+
36
+ test:python3.10:
37
+ <<: *test_template
38
+ variables:
39
+ PYTHON_VERSION: "3.10"
40
+
41
+ test:python3.11:
42
+ <<: *test_template
43
+ variables:
44
+ PYTHON_VERSION: "3.11"
45
+
46
+ test:python3.12:
47
+ <<: *test_template
48
+ variables:
49
+ PYTHON_VERSION: "3.12"
50
+
51
+ test:python3.13:
52
+ <<: *test_template
53
+ variables:
54
+ PYTHON_VERSION: "3.13"
55
+
56
+ test:python3.14:
57
+ <<: *test_template
58
+ variables:
59
+ PYTHON_VERSION: "3.14"
60
+
61
+ build:
62
+ before_script:
63
+ - conda create -y -n buildenv python=3.11
64
+ - source activate buildenv
65
+ - conda install -c conda-forge nlopt swig gxx gcc make
66
+ - pip install .
67
+ script:
68
+ - python setup.py build_ext --inplace --verbose
69
+ artifacts:
70
+ paths:
71
+ - build/*
72
+
73
+ docs:
74
+ before_script:
75
+ - conda create -y -n buildenv python=3.11
76
+ - source activate buildenv
77
+ - conda install -c conda-forge nlopt swig gxx gcc make
78
+ - pip install .
79
+ script:
80
+ - pip install --editable ".[docs]"
81
+ - cd doc
82
+ - make html
83
+ - mv build/html/ ../public/
84
+ artifacts:
85
+ paths:
86
+ - public
87
+ rules:
88
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
89
+ allow_failure: true # Don't block pipeline if docs fail
90
+
91
+ deploy:
92
+ stage: deploy
93
+ script: echo "PyPI deployment handled by GitHub Actions"
94
+ environment: production
95
+ rules:
96
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
97
+ when: manual
@@ -0,0 +1,33 @@
1
+ # Read the Docs configuration file
2
+ # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3
+
4
+ version: 2
5
+
6
+ # Build documentation in the doc/ directory with Sphinx
7
+ sphinx:
8
+ configuration: doc/source/conf.py
9
+ fail_on_warning: false
10
+
11
+ # Build all formats (HTML, PDF, ePub)
12
+ formats:
13
+ - pdf
14
+ - epub
15
+
16
+ # Python version and build environment
17
+ build:
18
+ os: ubuntu-22.04
19
+ tools:
20
+ python: "3.11"
21
+ apt_packages:
22
+ - pandoc
23
+ jobs:
24
+ post_checkout:
25
+ # Fetch full git history for setuptools_scm version detection
26
+ - git fetch --unshallow || true
27
+
28
+ # Python dependencies
29
+ # Note: We skip installing the package itself to avoid building C++ extensions
30
+ # Sphinx will use autodoc_mock_imports for the C++ modules
31
+ python:
32
+ install:
33
+ - requirements: doc/requirements.txt
@@ -0,0 +1,40 @@
1
+ {
2
+ "title": "SpiPy: Python implementation of SPIRES snow property inversion",
3
+ "description": "SpiPy is a Python implementation of SPIRES (SPectral Inversion of REflectance from Snow), a spectral unmixing algorithm for analyzing snow reflectance data from satellite imagery. It retrieves snow properties (grain size, dust concentration, fractional snow-covered area) using lookup tables generated from Mie-scattering theory. This implementation features a hybrid Python/C++ architecture achieving 3000x speedup over pure Python implementations.",
4
+ "creators": [
5
+ {
6
+ "name": "Bair, Ned",
7
+ "affiliation": "University of California, Santa Barbara",
8
+ "orcid": "0000-0002-7654-3210"
9
+ },
10
+ {
11
+ "name": "Griessbaum, Niklas",
12
+ "affiliation": "Leidos, Inc.",
13
+ "orcid": "0000-0000-0000-0000"
14
+ }
15
+ ],
16
+ "license": "MIT",
17
+ "keywords": [
18
+ "remote sensing",
19
+ "snow properties",
20
+ "spectral unmixing",
21
+ "satellite imagery",
22
+ "MODIS",
23
+ "Sentinel-2",
24
+ "Landsat",
25
+ "Python",
26
+ "C++"
27
+ ],
28
+ "related_identifiers": [
29
+ {
30
+ "identifier": "10.1109/TGRS.2020.3040328",
31
+ "relation": "isSupplementTo",
32
+ "scheme": "doi"
33
+ }
34
+ ],
35
+ "grants": [
36
+ {
37
+ "id": "W913E523C0002"
38
+ }
39
+ ]
40
+ }
@@ -0,0 +1,49 @@
1
+ cff-version: 1.2.0
2
+ message: "If you use this software, please cite both the software and the algorithm paper below."
3
+ title: "SpiPy: Python implementation of SPIRES snow property inversion"
4
+ authors:
5
+ - family-names: Bair
6
+ given-names: Ned
7
+ email: edwardbair@ucsb.edu
8
+ affiliation: "University of California, Santa Barbara"
9
+ orcid: "https://orcid.org/0000-0002-7654-3210" # Update with actual ORCID if available
10
+ - family-names: Griessbaum
11
+ given-names: Niklas
12
+ affiliation: "Leidos, Inc."
13
+ orcid: "https://orcid.org/0000-0000-0000-0000" # Update with actual ORCID if available
14
+ version: 0.2.0
15
+ date-released: 2024-11-06
16
+ repository-code: "https://github.com/edwardbair/SpiPy"
17
+ url: "https://github.com/edwardbair/SpiPy"
18
+ license: MIT # Update if different
19
+ keywords:
20
+ - remote sensing
21
+ - snow properties
22
+ - spectral unmixing
23
+ - satellite imagery
24
+ - MODIS
25
+ - Sentinel-2
26
+ - Landsat
27
+ abstract: >
28
+ SpiPy is a Python implementation of SPIRES (SPectral Inversion of REflectance from Snow),
29
+ a spectral unmixing algorithm for analyzing snow reflectance data from satellite imagery.
30
+ It retrieves snow properties (grain size, dust concentration, fractional snow-covered area)
31
+ using lookup tables generated from Mie-scattering theory. This implementation features a
32
+ hybrid Python/C++ architecture achieving 3000x speedup over pure Python implementations.
33
+ preferred-citation:
34
+ type: article
35
+ title: "Snow Property Inversion From Remote Sensing (SPIReS): A Generalized Multispectral Unmixing Approach With Examples From MODIS and Landsat 8 OLI"
36
+ authors:
37
+ - family-names: Bair
38
+ given-names: E. H.
39
+ - family-names: Stillinger
40
+ given-names: T.
41
+ - family-names: Dozier
42
+ given-names: J.
43
+ journal: "IEEE Transactions on Geoscience and Remote Sensing"
44
+ volume: 59
45
+ issue: 9
46
+ start: 7270
47
+ end: 7284
48
+ year: 2021
49
+ doi: 10.1109/TGRS.2020.3040328