SnowMapPy 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cloud/__init__.py +11 -0
- cloud/auth.py +22 -0
- cloud/loader.py +91 -0
- cloud/processor.py +398 -0
- core/__init__.py +25 -0
- core/data_io.py +180 -0
- core/quality.py +142 -0
- core/spatial.py +131 -0
- core/temporal.py +36 -0
- core/utils.py +54 -0
- local/__init__.py +12 -0
- local/file_handler.py +38 -0
- local/preparator.py +146 -0
- local/processor.py +141 -0
- snowmappy-1.0.1.dist-info/METADATA +242 -0
- snowmappy-1.0.1.dist-info/RECORD +25 -0
- snowmappy-1.0.1.dist-info/WHEEL +5 -0
- snowmappy-1.0.1.dist-info/top_level.txt +4 -0
- tests/__init__.py +1 -0
- tests/test_cloud/__init__.py +1 -0
- tests/test_cloud/real_cloud_test.py +414 -0
- tests/test_cloud/test_basic_cloud.py +219 -0
- tests/test_core/__init__.py +1 -0
- tests/test_core/test_quality.py +69 -0
- tests/test_local/__init__.py +1 -0
local/preparator.py
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import rasterio
|
|
3
|
+
import datetime
|
|
4
|
+
import numpy as np
|
|
5
|
+
import xarray as xr
|
|
6
|
+
from tqdm import tqdm
|
|
7
|
+
from rasterio.features import geometry_mask
|
|
8
|
+
from rasterio.mask import mask as rasterio_mask
|
|
9
|
+
|
|
10
|
+
from ..core.spatial import clip_dem_to_roi, check_overlap, reproject_shp, handle_reprojection
|
|
11
|
+
from ..core.data_io import save_as_zarr, load_shapefile
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def prepare_modis(data_dir, save_dir, dem_path, shp_path, oparams_file=None,
|
|
15
|
+
priority='MODIS', save_name='MODIS', save_dem=True, dem_name='DEM'):
|
|
16
|
+
"""
|
|
17
|
+
Prepare MODIS data for processing.
|
|
18
|
+
"""
|
|
19
|
+
# Load the ROI shapefile
|
|
20
|
+
roi = load_shapefile(shp_path)
|
|
21
|
+
|
|
22
|
+
# Ensure output directories exist
|
|
23
|
+
os.chdir(data_dir)
|
|
24
|
+
subdirs = os.listdir(data_dir)
|
|
25
|
+
|
|
26
|
+
modis_save_dir = os.path.join(save_dir, save_name)
|
|
27
|
+
if not os.path.exists(modis_save_dir):
|
|
28
|
+
os.makedirs(modis_save_dir)
|
|
29
|
+
if save_dem:
|
|
30
|
+
dem_save_dir = os.path.join(save_dir, 'DEM')
|
|
31
|
+
if not os.path.exists(dem_save_dir):
|
|
32
|
+
os.makedirs(dem_save_dir)
|
|
33
|
+
|
|
34
|
+
# Open DEM once to extract its CRS
|
|
35
|
+
with rasterio.open(dem_path) as dem_src:
|
|
36
|
+
dem_crs = dem_src.crs
|
|
37
|
+
|
|
38
|
+
# Prepare lists to accumulate the processed MODIS arrays and their dates.
|
|
39
|
+
modis_data_list = []
|
|
40
|
+
time_list = []
|
|
41
|
+
lat = None
|
|
42
|
+
lon = None
|
|
43
|
+
first_valid = True # flag for first successful processing
|
|
44
|
+
|
|
45
|
+
# Process each directory
|
|
46
|
+
for k in tqdm(range(len(subdirs)), desc="Processing directories"):
|
|
47
|
+
currD = os.path.join(data_dir, subdirs[k])
|
|
48
|
+
files = os.listdir(currD)
|
|
49
|
+
os.chdir(currD)
|
|
50
|
+
|
|
51
|
+
# Look for a Snow Cover file ending with '.tif'
|
|
52
|
+
scfile = [f for f in files if 'Snow_Cover' in f and f.endswith('.tif')]
|
|
53
|
+
if not scfile:
|
|
54
|
+
tqdm.write(f'No Snow Cover data found in {currD}.')
|
|
55
|
+
continue
|
|
56
|
+
|
|
57
|
+
fname = scfile[0]
|
|
58
|
+
DateSve = datetime.datetime.strptime(fname[9:16], '%Y%j').strftime('%Y-%m-%d')
|
|
59
|
+
img_path = os.path.join(currD, scfile[0])
|
|
60
|
+
|
|
61
|
+
with rasterio.open(img_path) as src:
|
|
62
|
+
# Check if the CRS of the MODIS image and the DEM are the same
|
|
63
|
+
if src.crs != dem_crs:
|
|
64
|
+
if priority == 'MODIS' and k == 0:
|
|
65
|
+
# Reproject DEM to match MODIS CRS
|
|
66
|
+
dem_file = os.path.basename(dem_path)
|
|
67
|
+
if save_dem:
|
|
68
|
+
reprojected_dem = os.path.join(dem_path, f"reprojected_{dem_file}")
|
|
69
|
+
else:
|
|
70
|
+
reprojected_dem = os.path.join(os.path.dirname(dem_path), f"reprojected_{dem_file}")
|
|
71
|
+
handle_reprojection(img_path, dem_path, reprojected_dem, priority=priority)
|
|
72
|
+
dem_path = reprojected_dem
|
|
73
|
+
with rasterio.open(dem_path) as new_dem:
|
|
74
|
+
dem_crs = new_dem.crs
|
|
75
|
+
elif priority == 'DEM':
|
|
76
|
+
# Reproject the MODIS image to match the DEM CRS
|
|
77
|
+
reprojected_image = os.path.join(currD, f"reprojected_{scfile[0]}")
|
|
78
|
+
handle_reprojection(img_path, dem_path, reprojected_image, priority=priority)
|
|
79
|
+
img_path = reprojected_image
|
|
80
|
+
# Re-open the reprojected image to use its CRS
|
|
81
|
+
src = rasterio.open(img_path)
|
|
82
|
+
|
|
83
|
+
# On the first valid image, reproject the ROI to the MODIS image's CRS
|
|
84
|
+
if first_valid:
|
|
85
|
+
reprojected_roi = reproject_shp(roi, src.crs)
|
|
86
|
+
first_valid = False
|
|
87
|
+
|
|
88
|
+
if not check_overlap(src, reprojected_roi):
|
|
89
|
+
tqdm.write(f"ROI does not overlap with raster in {currD}. Skipping...")
|
|
90
|
+
continue
|
|
91
|
+
|
|
92
|
+
try:
|
|
93
|
+
# Mask the raster using the ROI geometry
|
|
94
|
+
SCA, out_transf = rasterio_mask(src, [reprojected_roi.geometry.iloc[0]],
|
|
95
|
+
crop=True, all_touched=True, pad=True)
|
|
96
|
+
SCA = SCA[0] # remove the band dimension
|
|
97
|
+
except ValueError as e:
|
|
98
|
+
tqdm.write(f"Masking failed in {currD}: {e}")
|
|
99
|
+
continue
|
|
100
|
+
|
|
101
|
+
# Create the ROI mask and coordinate grid only once (assuming same shape across images)
|
|
102
|
+
if lat is None or lon is None:
|
|
103
|
+
ROI_mask = geometry_mask(reprojected_roi.geometry,
|
|
104
|
+
transform=out_transf,
|
|
105
|
+
invert=True,
|
|
106
|
+
out_shape=SCA.shape)
|
|
107
|
+
ROI_mask = np.where(ROI_mask == 0, np.nan, 1)
|
|
108
|
+
|
|
109
|
+
bounds = rasterio.transform.array_bounds(SCA.shape[0], SCA.shape[1], out_transf)
|
|
110
|
+
X, Y = np.meshgrid(np.linspace(bounds[0], bounds[2], SCA.shape[1]),
|
|
111
|
+
np.linspace(bounds[3], bounds[1], SCA.shape[0]))
|
|
112
|
+
lat = Y[:, 0].tolist()
|
|
113
|
+
lon = X[0, :].tolist()
|
|
114
|
+
|
|
115
|
+
# Apply the ROI mask to the clipped data
|
|
116
|
+
SCA_ROI = SCA * ROI_mask
|
|
117
|
+
|
|
118
|
+
# Store the processed data and date
|
|
119
|
+
modis_data_list.append(SCA_ROI)
|
|
120
|
+
time_list.append(DateSve)
|
|
121
|
+
|
|
122
|
+
# Create the final dataset
|
|
123
|
+
if modis_data_list:
|
|
124
|
+
modis_array = np.stack(modis_data_list, axis=0)
|
|
125
|
+
|
|
126
|
+
ds = xr.Dataset(
|
|
127
|
+
data_vars={
|
|
128
|
+
'NDSI': (('time', 'lat', 'lon'), modis_array)
|
|
129
|
+
},
|
|
130
|
+
coords={
|
|
131
|
+
'time': time_list,
|
|
132
|
+
'lat': lat,
|
|
133
|
+
'lon': lon
|
|
134
|
+
}
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
# Save the dataset
|
|
138
|
+
save_as_zarr(ds, modis_save_dir, 'MODIS', oparams_file)
|
|
139
|
+
|
|
140
|
+
# Save DEM if requested
|
|
141
|
+
if save_dem:
|
|
142
|
+
clip_dem_to_roi(dem_path, roi, dem_save_dir, dem_name, oparams_file)
|
|
143
|
+
|
|
144
|
+
return ds
|
|
145
|
+
else:
|
|
146
|
+
raise ValueError("No valid MODIS data found in the specified directory.")
|
local/processor.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import datetime
|
|
4
|
+
import numpy as np
|
|
5
|
+
import pandas as pd
|
|
6
|
+
import xarray as xr
|
|
7
|
+
from tqdm import tqdm
|
|
8
|
+
import matplotlib.pyplot as plt
|
|
9
|
+
from ..core.temporal import vectorized_interpolation_griddata_parallel
|
|
10
|
+
from ..core.data_io import save_as_zarr, load_dem_and_nanmask, load_shapefile
|
|
11
|
+
from ..core.utils import generate_time_series
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def load_or_create_nan_array(dataset, date, shape, var_name):
|
|
15
|
+
"""
|
|
16
|
+
Return data or a nan array.
|
|
17
|
+
"""
|
|
18
|
+
# Convert date to string
|
|
19
|
+
date = date.strftime('%Y-%m-%d')
|
|
20
|
+
if date in dataset.time.values:
|
|
21
|
+
return dataset.sel(time=date)[var_name].values
|
|
22
|
+
else:
|
|
23
|
+
return np.full(shape, np.nan)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def process_files_array(series, movwind, currentday_ind, mod_data, myd_data,
|
|
27
|
+
dem, nanmask, daysbefore, daysafter, var_name):
|
|
28
|
+
"""
|
|
29
|
+
Process the time series and accumulate processed images into a single array.
|
|
30
|
+
"""
|
|
31
|
+
mod_arr = mod_data[var_name].values
|
|
32
|
+
lat_dim, lon_dim, _ = mod_arr.shape
|
|
33
|
+
n_processed = len(series) - daysbefore - daysafter
|
|
34
|
+
out_arr = np.empty((lat_dim, lon_dim, n_processed), dtype=np.float64)
|
|
35
|
+
out_dates = []
|
|
36
|
+
|
|
37
|
+
for i in tqdm(range(daysbefore, len(series) - daysafter), desc="Processing Files"):
|
|
38
|
+
|
|
39
|
+
if i == daysbefore:
|
|
40
|
+
# Extract the moving window with specific days
|
|
41
|
+
window_mod = np.array([load_or_create_nan_array(mod_data, series[i + j], (lat_dim, lon_dim), var_name) for j in movwind])
|
|
42
|
+
window_myd = np.array([load_or_create_nan_array(myd_data, series[i + j], (lat_dim, lon_dim), var_name) for j in movwind])
|
|
43
|
+
|
|
44
|
+
# Move the time dimension to the last axis
|
|
45
|
+
window_mod = np.moveaxis(window_mod, 0, -1)
|
|
46
|
+
window_myd = np.moveaxis(window_myd, 0, -1)
|
|
47
|
+
else:
|
|
48
|
+
window_mod = np.roll(window_mod, -1, axis=2)
|
|
49
|
+
window_myd = np.roll(window_myd, -1, axis=2)
|
|
50
|
+
# Printing shape after rolling
|
|
51
|
+
|
|
52
|
+
window_mod[:, :, -1] = np.array(load_or_create_nan_array(mod_data, series[i + daysafter], (lat_dim, lon_dim), var_name))
|
|
53
|
+
window_myd[:, :, -1] = np.array(load_or_create_nan_array(myd_data, series[i + daysafter], (lat_dim, lon_dim), var_name))
|
|
54
|
+
|
|
55
|
+
# Apply the DEM nanmask
|
|
56
|
+
window_mod[nanmask, :] = np.nan
|
|
57
|
+
window_myd[nanmask, :] = np.nan
|
|
58
|
+
|
|
59
|
+
# Merge Aqua and Terra based on quality codes
|
|
60
|
+
codvals = [200, 201, 211, 237, 239, 250, 254, 255]
|
|
61
|
+
MODind = np.isin(window_mod, codvals)
|
|
62
|
+
MYDind = np.isin(window_myd, codvals)
|
|
63
|
+
MERGEind = (MODind == 1) & (MYDind == 0)
|
|
64
|
+
NDSIFill_MERGE = np.where(MERGEind, window_myd, window_mod)
|
|
65
|
+
|
|
66
|
+
# Select the current day slice from the moving window
|
|
67
|
+
NDSI_merge = np.squeeze(NDSIFill_MERGE[:, :, currentday_ind])
|
|
68
|
+
|
|
69
|
+
# Quality control adjustment based on DEM values and code distribution
|
|
70
|
+
cond1 = np.float64(dem > 1000)
|
|
71
|
+
cond2 = np.float64((dem > 1000) & np.isin(NDSI_merge, codvals))
|
|
72
|
+
if (np.sum(cond2) / np.sum(cond1)) < 0.60:
|
|
73
|
+
sc = (NDSI_merge == 100)
|
|
74
|
+
meanZ = np.mean(dem[sc])
|
|
75
|
+
if np.sum(sc) > 10:
|
|
76
|
+
ind = (dem > meanZ) & np.isin(NDSI_merge, codvals)
|
|
77
|
+
NDSI_merge[ind] = 100
|
|
78
|
+
print('I did it')
|
|
79
|
+
|
|
80
|
+
# Clean up values and perform spatial interpolation on missing data
|
|
81
|
+
NDSIFill_MERGE[NDSIFill_MERGE > 100] = np.nan
|
|
82
|
+
NDSIFill_MERGE = vectorized_interpolation_griddata_parallel(NDSIFill_MERGE, nanmask)
|
|
83
|
+
NDSIFill_MERGE = np.clip(NDSIFill_MERGE, 0, 100)
|
|
84
|
+
|
|
85
|
+
NDSI = np.squeeze(NDSIFill_MERGE[:, :, currentday_ind])
|
|
86
|
+
dem_ind = dem < 1000
|
|
87
|
+
NDSI[dem_ind] = 0
|
|
88
|
+
|
|
89
|
+
# Accumulate the processed image and record the date label
|
|
90
|
+
out_arr[:, :, i - daysbefore] = NDSI
|
|
91
|
+
# out_dates.append(series[i].strftime('%Y-%m-%d'))
|
|
92
|
+
out_dates.append(series[i])
|
|
93
|
+
|
|
94
|
+
return out_arr, out_dates
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def modis_time_series(mod_ds, myd_ds, dem_ds, output_zarr, file_name, var_name='NDSI', oparams_file=None):
|
|
98
|
+
"""
|
|
99
|
+
Main function to run the processing and save all images in a single zarr dataset.
|
|
100
|
+
"""
|
|
101
|
+
daysbefore = 3
|
|
102
|
+
daysafter = 2
|
|
103
|
+
|
|
104
|
+
# Load DEM and create nanmask
|
|
105
|
+
dem, nanmask = load_dem_and_nanmask(dem_ds)
|
|
106
|
+
|
|
107
|
+
# Verify that both datasets contain the variable 'NDSI'
|
|
108
|
+
if var_name not in mod_ds or var_name not in myd_ds:
|
|
109
|
+
raise ValueError("One of the datasets does not contain the 'NDSI' variable.")
|
|
110
|
+
|
|
111
|
+
# Get the full timeseries arrays; expected shape is (lat, lon, time)
|
|
112
|
+
mod_data = mod_ds[var_name].values
|
|
113
|
+
myd_data = myd_ds[var_name].values
|
|
114
|
+
|
|
115
|
+
# Check that spatial dimensions match between Terra and Aqua data
|
|
116
|
+
if mod_data.shape[:2] != myd_data.shape[:2]:
|
|
117
|
+
raise ValueError("Terra and Aqua data do not have matching spatial dimensions.")
|
|
118
|
+
|
|
119
|
+
# Generate a continuous daily time series and moving window parameters
|
|
120
|
+
series, movwind, currentday_ind, _ = generate_time_series(mod_ds['time'].values, daysbefore, daysafter)
|
|
121
|
+
|
|
122
|
+
# Process and accumulate all processed images
|
|
123
|
+
out_arr, out_dates = process_files_array(series, movwind, currentday_ind, mod_ds, myd_ds,
|
|
124
|
+
dem, nanmask, daysbefore, daysafter, var_name)
|
|
125
|
+
|
|
126
|
+
# Create an xarray Dataset to store the complete time series in a single zarr file
|
|
127
|
+
ds_out = xr.Dataset(
|
|
128
|
+
{
|
|
129
|
+
var_name: (("lat", "lon", "time"), out_arr)
|
|
130
|
+
},
|
|
131
|
+
coords={
|
|
132
|
+
"lat": mod_ds["lat"],
|
|
133
|
+
"lon": mod_ds["lon"],
|
|
134
|
+
"time": out_dates
|
|
135
|
+
}
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
# Save the dataset to a single zarr store
|
|
139
|
+
save_as_zarr(ds_out, output_zarr, file_name, params_file=oparams_file)
|
|
140
|
+
|
|
141
|
+
return ds_out
|
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: SnowMapPy
|
|
3
|
+
Version: 1.0.1
|
|
4
|
+
Summary: A comprehensive Python package for processing MODIS NDSI data from local files and Google Earth Engine
|
|
5
|
+
Home-page: https://github.com/Hbechri/SnowMapPy
|
|
6
|
+
Author: Haytam Elyoussfi
|
|
7
|
+
Author-email: haytam.elyoussfi@um6p.ma
|
|
8
|
+
Project-URL: Bug Reports, https://github.com/Hbechri/SnowMapPy/issues
|
|
9
|
+
Project-URL: Source, https://github.com/Hbechri/SnowMapPy
|
|
10
|
+
Project-URL: Documentation, https://github.com/Hbechri/SnowMapPy#readme
|
|
11
|
+
Keywords: modis,snow,remote sensing,earth engine,gis,hydrology
|
|
12
|
+
Classifier: Development Status :: 4 - Beta
|
|
13
|
+
Classifier: Intended Audience :: Science/Research
|
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
+
Classifier: Operating System :: OS Independent
|
|
16
|
+
Classifier: Programming Language :: Python :: 3
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
21
|
+
Classifier: Topic :: Scientific/Engineering :: GIS
|
|
22
|
+
Classifier: Topic :: Scientific/Engineering :: Atmospheric Science
|
|
23
|
+
Requires-Python: >=3.8
|
|
24
|
+
Description-Content-Type: text/markdown
|
|
25
|
+
Requires-Dist: numpy==2.1.0
|
|
26
|
+
Requires-Dist: scipy==1.15.2
|
|
27
|
+
Requires-Dist: pandas==2.2.3
|
|
28
|
+
Requires-Dist: rasterio==1.4.3
|
|
29
|
+
Requires-Dist: geopandas==1.0.1
|
|
30
|
+
Requires-Dist: pyproj==3.7.1
|
|
31
|
+
Requires-Dist: shapely==2.1.0
|
|
32
|
+
Requires-Dist: affine==2.4.0
|
|
33
|
+
Requires-Dist: xarray==2025.3.1
|
|
34
|
+
Requires-Dist: zarr==3.0.6
|
|
35
|
+
Requires-Dist: numcodecs==0.16.0
|
|
36
|
+
Requires-Dist: h5py==3.13.0
|
|
37
|
+
Requires-Dist: netCDF4==1.7.2
|
|
38
|
+
Requires-Dist: earthengine-api==1.5.9
|
|
39
|
+
Requires-Dist: geemap==0.35.3
|
|
40
|
+
Requires-Dist: tqdm==4.66.4
|
|
41
|
+
Requires-Dist: joblib==1.4.2
|
|
42
|
+
Requires-Dist: python-dateutil==2.9.0
|
|
43
|
+
Requires-Dist: matplotlib==3.10.1
|
|
44
|
+
Provides-Extra: dev
|
|
45
|
+
Requires-Dist: pytest>=6.0; extra == "dev"
|
|
46
|
+
Requires-Dist: flake8>=3.8; extra == "dev"
|
|
47
|
+
Requires-Dist: black>=21.0; extra == "dev"
|
|
48
|
+
Dynamic: author
|
|
49
|
+
Dynamic: author-email
|
|
50
|
+
Dynamic: classifier
|
|
51
|
+
Dynamic: description
|
|
52
|
+
Dynamic: description-content-type
|
|
53
|
+
Dynamic: home-page
|
|
54
|
+
Dynamic: keywords
|
|
55
|
+
Dynamic: project-url
|
|
56
|
+
Dynamic: provides-extra
|
|
57
|
+
Dynamic: requires-dist
|
|
58
|
+
Dynamic: requires-python
|
|
59
|
+
Dynamic: summary
|
|
60
|
+
|
|
61
|
+
# SnowMapPy ๐จ๏ธ
|
|
62
|
+
|
|
63
|
+
A comprehensive Python package for processing MODIS NDSI (Normalized Difference Snow Index) data from both local files and Google Earth Engine, with advanced quality control and temporal interpolation capabilities.
|
|
64
|
+
|
|
65
|
+
## ๐ Table of Contents
|
|
66
|
+
|
|
67
|
+
- [Features](#features)
|
|
68
|
+
- [Installation](#installation)
|
|
69
|
+
- [Quick Start](#quick-start)
|
|
70
|
+
- [Package Structure](#package-structure)
|
|
71
|
+
- [Usage Examples](#usage-examples)
|
|
72
|
+
- [Testing](#testing)
|
|
73
|
+
- [Contributing](#contributing)
|
|
74
|
+
- [License](#license)
|
|
75
|
+
|
|
76
|
+
## โจ Features
|
|
77
|
+
|
|
78
|
+
- **๐ Cloud Processing**: Download and process MODIS NDSI data directly from Google Earth Engine
|
|
79
|
+
- **๐พ Local Processing**: Process locally stored MODIS NDSI files
|
|
80
|
+
- **๐ Quality Control**: Advanced masking using NDSI_Snow_Cover_Class for data validation
|
|
81
|
+
- **โฐ Temporal Interpolation**: Fill missing data points using spatial and temporal interpolation
|
|
82
|
+
- **๐บ๏ธ Spatial Operations**: Clip data to regions of interest using shapefiles or bounding boxes
|
|
83
|
+
- **๐ Data Export**: Save processed data in Zarr format for efficient storage and access
|
|
84
|
+
- **๐งช Comprehensive Testing**: Unit tests and real-world processing tests
|
|
85
|
+
|
|
86
|
+
## ๐ Installation
|
|
87
|
+
|
|
88
|
+
### Option 1: Install from PyPI (Recommended)
|
|
89
|
+
|
|
90
|
+
```bash
|
|
91
|
+
pip install SnowMapPy
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
### Option 2: Install from GitHub
|
|
95
|
+
|
|
96
|
+
1. **Clone the repository:**
|
|
97
|
+
```bash
|
|
98
|
+
git clone https://github.com/Hbechri/SnowMapPy.git
|
|
99
|
+
cd SnowMapPy/package
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
2. **Install the package:**
|
|
103
|
+
```bash
|
|
104
|
+
pip install -e .
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
### Prerequisites
|
|
108
|
+
|
|
109
|
+
- Python 3.8+
|
|
110
|
+
- Google Earth Engine account (for cloud processing)
|
|
111
|
+
- Required Python packages (automatically installed with the package)
|
|
112
|
+
|
|
113
|
+
### Google Earth Engine Setup (for cloud processing)
|
|
114
|
+
|
|
115
|
+
1. **Sign up for Google Earth Engine:**
|
|
116
|
+
- Visit [https://earthengine.google.com/](https://earthengine.google.com/)
|
|
117
|
+
- Sign up for an account
|
|
118
|
+
|
|
119
|
+
2. **Authenticate:**
|
|
120
|
+
```bash
|
|
121
|
+
earthengine authenticate
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
## ๐ฏ Quick Start
|
|
125
|
+
|
|
126
|
+
### Cloud Processing Example
|
|
127
|
+
|
|
128
|
+
```python
|
|
129
|
+
from SnowMapPy.cloud.processor import process_modis_ndsi_cloud
|
|
130
|
+
|
|
131
|
+
# Process MODIS NDSI data from Google Earth Engine
|
|
132
|
+
result = process_modis_ndsi_cloud(
|
|
133
|
+
project_name="your-gee-project",
|
|
134
|
+
shapefile_path="path/to/roi.shp",
|
|
135
|
+
start_date="2023-01-01",
|
|
136
|
+
end_date="2023-01-31",
|
|
137
|
+
output_path="output/",
|
|
138
|
+
file_name="snow_cover"
|
|
139
|
+
)
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
### Local Processing Example
|
|
143
|
+
|
|
144
|
+
```python
|
|
145
|
+
from SnowMapPy.local.processor import process_modis_ndsi_local
|
|
146
|
+
|
|
147
|
+
# Process locally stored MODIS NDSI files
|
|
148
|
+
result = process_modis_ndsi_local(
|
|
149
|
+
mod_dir="path/to/MOD/files/",
|
|
150
|
+
myd_dir="path/to/MYD/files/",
|
|
151
|
+
dem_file="path/to/dem.tif",
|
|
152
|
+
output_path="output/",
|
|
153
|
+
file_name="local_snow_cover"
|
|
154
|
+
)
|
|
155
|
+
```
|
|
156
|
+
|
|
157
|
+
## ๐ Package Structure
|
|
158
|
+
|
|
159
|
+
```
|
|
160
|
+
SnowMapPy/
|
|
161
|
+
โโโ core/ # Shared functionality
|
|
162
|
+
โ โโโ data_io.py # Data input/output operations
|
|
163
|
+
โ โโโ quality.py # Quality control functions
|
|
164
|
+
โ โโโ spatial.py # Spatial operations
|
|
165
|
+
โ โโโ temporal.py # Temporal interpolation
|
|
166
|
+
โ โโโ utils.py # Utility functions
|
|
167
|
+
โโโ cloud/ # Google Earth Engine processing
|
|
168
|
+
โ โโโ auth.py # GEE authentication
|
|
169
|
+
โ โโโ loader.py # Data loading from GEE
|
|
170
|
+
โ โโโ processor.py # Cloud processing pipeline
|
|
171
|
+
โโโ local/ # Local file processing
|
|
172
|
+
โ โโโ file_handler.py # File management
|
|
173
|
+
โ โโโ preparator.py # Data preparation
|
|
174
|
+
โ โโโ processor.py # Local processing pipeline
|
|
175
|
+
โโโ tests/ # Test suite
|
|
176
|
+
โโโ test_core/ # Core functionality tests
|
|
177
|
+
โโโ test_cloud/ # Cloud processing tests
|
|
178
|
+
โโโ test_local/ # Local processing tests
|
|
179
|
+
```
|
|
180
|
+
|
|
181
|
+
## ๐ Usage Examples
|
|
182
|
+
|
|
183
|
+
### Quality Control
|
|
184
|
+
|
|
185
|
+
```python
|
|
186
|
+
from SnowMapPy.core.quality import get_invalid_modis_classes, apply_modis_quality_mask
|
|
187
|
+
|
|
188
|
+
# Get invalid MODIS class values
|
|
189
|
+
invalid_classes = get_invalid_modis_classes()
|
|
190
|
+
print(f"Invalid classes: {invalid_classes}")
|
|
191
|
+
|
|
192
|
+
# Apply quality mask to data
|
|
193
|
+
masked_data = apply_modis_quality_mask(value_data, class_data)
|
|
194
|
+
```
|
|
195
|
+
|
|
196
|
+
### Spatial Operations
|
|
197
|
+
|
|
198
|
+
```python
|
|
199
|
+
from SnowMapPy.core.spatial import clip_dem_to_roi
|
|
200
|
+
|
|
201
|
+
# Clip DEM to region of interest
|
|
202
|
+
clipped_dem = clip_dem_to_roi(dem_data, shapefile_path)
|
|
203
|
+
```
|
|
204
|
+
|
|
205
|
+
## ๐งช Testing
|
|
206
|
+
|
|
207
|
+
Run the test suite:
|
|
208
|
+
|
|
209
|
+
```bash
|
|
210
|
+
# Run all tests
|
|
211
|
+
python -m pytest tests/
|
|
212
|
+
|
|
213
|
+
# Run specific test categories
|
|
214
|
+
python tests/test_core/test_quality.py
|
|
215
|
+
python tests/test_cloud/test_basic_cloud.py
|
|
216
|
+
```
|
|
217
|
+
|
|
218
|
+
For detailed testing instructions, see [TESTING.md](TESTING.md).
|
|
219
|
+
|
|
220
|
+
## ๐ค Contributing
|
|
221
|
+
|
|
222
|
+
1. Fork the repository
|
|
223
|
+
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
|
|
224
|
+
3. Commit your changes (`git commit -m 'Add amazing feature'`)
|
|
225
|
+
4. Push to the branch (`git push origin feature/amazing-feature`)
|
|
226
|
+
5. Open a Pull Request
|
|
227
|
+
|
|
228
|
+
## ๐ License
|
|
229
|
+
|
|
230
|
+
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
|
231
|
+
|
|
232
|
+
## ๐ Acknowledgments
|
|
233
|
+
|
|
234
|
+
- Google Earth Engine team for providing the platform
|
|
235
|
+
- NASA for MODIS data
|
|
236
|
+
- The open-source geospatial community
|
|
237
|
+
|
|
238
|
+
## ๐ Support
|
|
239
|
+
|
|
240
|
+
- **Issues**: [GitHub Issues](https://github.com/Hbechri/SnowMapPy/issues)
|
|
241
|
+
- **Documentation**: [GitHub README](https://github.com/Hbechri/SnowMapPy#readme)
|
|
242
|
+
- **Email**: haytam.elyoussfi@um6p.ma
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
cloud/__init__.py,sha256=-R4cEQvkGtzIr5kE3C8Ul0I3KDg3uYaXDqtSUaYxyQw,353
|
|
2
|
+
cloud/auth.py,sha256=Yu_QtIiWiJ9x5omyah6P4qdocVSIKaOiyoUNH517Ssc,607
|
|
3
|
+
cloud/loader.py,sha256=KOkJYAp8-wSRLLkcsrQEM_rFCD_cquHLOVZ_8lNoip4,4172
|
|
4
|
+
cloud/processor.py,sha256=bUWMxkDeY-ldpXTmdPbveglgttXGmpfgjYFVah26F54,19063
|
|
5
|
+
core/__init__.py,sha256=zsLmlyo72uNvLJIwlgjccMiNmddug8DFihdSE8TnUVU,949
|
|
6
|
+
core/data_io.py,sha256=cHnjnbbkhqXPWKLXY867Mz22g7NFsoQnDq8edoE-f_Q,6137
|
|
7
|
+
core/quality.py,sha256=ARxwQG7fcj0CXrSlScd1s3OiS-ivHYRef_LbN23o64I,4766
|
|
8
|
+
core/spatial.py,sha256=HEBmgO9j6Nd9Lr1FPnyTZBVJOPyXm8oBBcj_2y6Olj4,4412
|
|
9
|
+
core/temporal.py,sha256=2FGLYw94iNu4yXBFrpEAy74XlEFWw9Oxh68_RMY46Cs,1387
|
|
10
|
+
core/utils.py,sha256=WlJ6BFN7XqX2s9lWRe8iENqAaUwtqzVZnRtSt_RFNyg,1632
|
|
11
|
+
local/__init__.py,sha256=S6IVb8aFMuNS_sm4isnV4rti2Nvzt11aTJ0V7ZG4IT8,352
|
|
12
|
+
local/file_handler.py,sha256=bLlILjMRLq8yuABzNaDb8SuR0zSXSe5lTQhVnxrdnek,1232
|
|
13
|
+
local/preparator.py,sha256=6dBTmSsGhzEOCvg_cqDHa6YUJdnD8iOQchltwSCoSQI,6005
|
|
14
|
+
local/processor.py,sha256=j360BWyggbog_ccCsOMOtRZ2iDZJjUvWJc2ZlNp64n0,5820
|
|
15
|
+
tests/__init__.py,sha256=5wfjsghwLVndZWJQRfB7KWeY62bssenUWW9MV81s7Po,35
|
|
16
|
+
tests/test_cloud/__init__.py,sha256=UXkfFwW1CGgXacpI90nnP59d5VIZ4U9LhD8Vvtzlxz4,21
|
|
17
|
+
tests/test_cloud/real_cloud_test.py,sha256=m2BPKHtgD-nAhp8Tg5BnidTMkFL7CW7YfKpthCD7t-s,14760
|
|
18
|
+
tests/test_cloud/test_basic_cloud.py,sha256=Od-svnU1_CAJWL80GLhDGzIq2YaNyBtmelbJBPVruzs,8349
|
|
19
|
+
tests/test_core/__init__.py,sha256=0bvz0rqDybg2L5J3jXJF2RBF8E51vXhTxbQGCUG4TZ8,20
|
|
20
|
+
tests/test_core/test_quality.py,sha256=nefFrWzX7zNclQZCGbd85obpAj83a4tKjDY2LWKWjtk,2355
|
|
21
|
+
tests/test_local/__init__.py,sha256=85fWbznBK4B08nWGMFcMC-fe4SrWa6ZKwMTQWrAUjRk,21
|
|
22
|
+
snowmappy-1.0.1.dist-info/METADATA,sha256=OZ5axg2d2Oqw5bsorai-a0OxstKNSse36GWyDGYiTrA,7745
|
|
23
|
+
snowmappy-1.0.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
24
|
+
snowmappy-1.0.1.dist-info/top_level.txt,sha256=x0dVy4Hq9VmXJnj_f5EJ6YjdO5tdueEuynPmZiBvMTQ,23
|
|
25
|
+
snowmappy-1.0.1.dist-info/RECORD,,
|
tests/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# Test suite for SnowMapPy package
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# Cloud module tests
|