ras-commander 0.48.0__py3-none-any.whl → 0.49.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,410 @@
1
+ """
2
+ Class: HdfInfiltration
3
+
4
+ Attribution: A substantial amount of code in this file is sourced or derived
5
+ from the https://github.com/fema-ffrd/rashdf library,
6
+ released under MIT license and Copyright (c) 2024 fema-ffrd
7
+
8
+ The file has been forked and modified for use in RAS Commander.
9
+
10
+ -----
11
+
12
+ All of the methods in this class are static and are designed to be used without instantiation.
13
+
14
+ List of Functions in HdfInfiltration:
15
+ - scale_infiltration_data(): Updates infiltration parameters in HDF file with scaling factors
16
+ - get_infiltration_data(): Retrieves current infiltration parameters from HDF file
17
+ - get_infiltration_map(): Reads the infiltration raster map from HDF file
18
+ - calculate_soil_statistics(): Calculates soil statistics from zonal statistics
19
+ - get_significant_mukeys(): Gets mukeys with percentage greater than threshold
20
+ - calculate_total_significant_percentage(): Calculates total percentage covered by significant mukeys
21
+ - save_statistics(): Saves soil statistics to CSV
22
+ - get_infiltration_parameters(): Gets infiltration parameters for a specific mukey
23
+ - calculate_weighted_parameters(): Calculates weighted infiltration parameters based on soil statistics
24
+
25
+ Each function is decorated with @standardize_input to ensure consistent handling of HDF file paths
26
+ and @log_call for logging function calls and errors. Functions return various data types including
27
+ DataFrames, dictionaries, and floating-point values depending on their purpose.
28
+
29
+ The class provides comprehensive functionality for analyzing and modifying infiltration-related
30
+ data in HEC-RAS HDF files, including parameter scaling, soil statistics calculation, and
31
+ weighted parameter computation.
32
+ """
33
+ from pathlib import Path
34
+ import h5py
35
+ import numpy as np
36
+ import pandas as pd
37
+ from typing import Optional, Dict, Any
38
+ import logging
39
+ from .HdfBase import HdfBase
40
+ from .HdfUtils import HdfUtils
41
+ from .Decorators import standardize_input, log_call
42
+ from .LoggingConfig import setup_logging, get_logger
43
+
44
+ logger = get_logger(__name__)
45
+
46
+ from pathlib import Path
47
+ import pandas as pd
48
+ import geopandas as gpd
49
+ import h5py
50
+ from rasterstats import zonal_stats
51
+ from .Decorators import log_call, standardize_input
52
+
53
+ class HdfInfiltration:
54
+
55
+ """
56
+ A class for handling infiltration-related operations on HEC-RAS HDF files.
57
+
58
+ This class provides methods to extract and modify infiltration data from HEC-RAS HDF files,
59
+ including base overrides and infiltration parameters.
60
+ """
61
+
62
+ # Constants for unit conversion
63
+ SQM_TO_ACRE = 0.000247105
64
+ SQM_TO_SQMILE = 3.861e-7
65
+
66
+ def __init__(self):
67
+ self.logger = logging.getLogger(__name__)
68
+
69
+ @staticmethod
70
+ @standardize_input(file_type='geom_hdf')
71
+ @log_call
72
+ def scale_infiltration_data(
73
+ hdf_path: Path,
74
+ infiltration_df: pd.DataFrame,
75
+ scale_md: float = 1.0,
76
+ scale_id: float = 1.0,
77
+ scale_pr: float = 1.0
78
+ ) -> Optional[pd.DataFrame]:
79
+ """
80
+ Update infiltration parameters in the HDF file with optional scaling factors.
81
+
82
+ Parameters
83
+ ----------
84
+ hdf_path : Path
85
+ Path to the HEC-RAS geometry HDF file
86
+ infiltration_df : pd.DataFrame
87
+ DataFrame containing infiltration parameters with columns:
88
+ ['Name', 'Maximum Deficit', 'Initial Deficit', 'Potential Percolation Rate']
89
+ scale_md : float, optional
90
+ Scaling factor for Maximum Deficit, by default 1.0
91
+ scale_id : float, optional
92
+ Scaling factor for Initial Deficit, by default 1.0
93
+ scale_pr : float, optional
94
+ Scaling factor for Potential Percolation Rate, by default 1.0
95
+
96
+ Returns
97
+ -------
98
+ Optional[pd.DataFrame]
99
+ The updated infiltration DataFrame if successful, None if operation fails
100
+ """
101
+ try:
102
+ hdf_path_to_overwrite = '/Geometry/Infiltration/Base Overrides'
103
+
104
+ # Apply scaling factors
105
+ infiltration_df = infiltration_df.copy()
106
+ infiltration_df['Maximum Deficit'] *= scale_md
107
+ infiltration_df['Initial Deficit'] *= scale_id
108
+ infiltration_df['Potential Percolation Rate'] *= scale_pr
109
+
110
+ with h5py.File(hdf_path, 'a') as hdf_file:
111
+ # Delete existing dataset if it exists
112
+ if hdf_path_to_overwrite in hdf_file:
113
+ del hdf_file[hdf_path_to_overwrite]
114
+
115
+ # Define dtype for structured array
116
+ dt = np.dtype([
117
+ ('Land Cover Name', 'S7'),
118
+ ('Maximum Deficit', 'f4'),
119
+ ('Initial Deficit', 'f4'),
120
+ ('Potential Percolation Rate', 'f4')
121
+ ])
122
+
123
+ # Create structured array
124
+ structured_array = np.zeros(infiltration_df.shape[0], dtype=dt)
125
+ structured_array['Land Cover Name'] = np.array(infiltration_df['Name'].astype(str).values.astype('|S7'))
126
+ structured_array['Maximum Deficit'] = infiltration_df['Maximum Deficit'].values.astype(np.float32)
127
+ structured_array['Initial Deficit'] = infiltration_df['Initial Deficit'].values.astype(np.float32)
128
+ structured_array['Potential Percolation Rate'] = infiltration_df['Potential Percolation Rate'].values.astype(np.float32)
129
+
130
+ # Create new dataset
131
+ hdf_file.create_dataset(
132
+ hdf_path_to_overwrite,
133
+ data=structured_array,
134
+ dtype=dt,
135
+ compression='gzip',
136
+ compression_opts=1,
137
+ chunks=(100,),
138
+ maxshape=(None,)
139
+ )
140
+
141
+ return infiltration_df
142
+
143
+ except Exception as e:
144
+ logger.error(f"Error updating infiltration data in {hdf_path}: {str(e)}")
145
+ return None
146
+
147
+ @staticmethod
148
+ @standardize_input(file_type='geom_hdf')
149
+ @log_call
150
+ def get_infiltration_data(hdf_path: Path) -> Optional[pd.DataFrame]:
151
+ """
152
+ Retrieve current infiltration parameters from the HDF file.
153
+
154
+ Parameters
155
+ ----------
156
+ hdf_path : Path
157
+ Path to the HEC-RAS geometry HDF file
158
+
159
+ Returns
160
+ -------
161
+ Optional[pd.DataFrame]
162
+ DataFrame containing infiltration parameters if successful, None if operation fails
163
+ """
164
+ try:
165
+ with h5py.File(hdf_path, 'r') as hdf_file:
166
+ if '/Geometry/Infiltration/Base Overrides' not in hdf_file:
167
+ logger.warning(f"No infiltration data found in {hdf_path}")
168
+ return None
169
+
170
+ data = hdf_file['/Geometry/Infiltration/Base Overrides'][()]
171
+
172
+ # Convert structured array to DataFrame
173
+ df = pd.DataFrame({
174
+ 'Name': [name.decode('utf-8').strip() for name in data['Land Cover Name']],
175
+ 'Maximum Deficit': data['Maximum Deficit'],
176
+ 'Initial Deficit': data['Initial Deficit'],
177
+ 'Potential Percolation Rate': data['Potential Percolation Rate']
178
+ })
179
+
180
+ return df
181
+
182
+ except Exception as e:
183
+ logger.error(f"Error reading infiltration data from {hdf_path}: {str(e)}")
184
+ return None
185
+
186
+
187
+
188
+
189
+
190
+
191
+
192
+
193
+
194
+
195
+
196
+
197
+
198
+
199
+
200
+
201
+
202
+
203
+
204
+
205
+
206
+
207
+ @staticmethod
208
+ @log_call
209
+ @standardize_input
210
+ def get_infiltration_map(hdf_path: Path) -> dict:
211
+ """Read the infiltration raster map from HDF file
212
+
213
+ Args:
214
+ hdf_path: Path to the HDF file
215
+
216
+ Returns:
217
+ Dictionary mapping raster values to mukeys
218
+ """
219
+ with h5py.File(hdf_path, 'r') as hdf:
220
+ raster_map_data = hdf['Raster Map'][:]
221
+ return {int(item[0]): item[1].decode('utf-8') for item in raster_map_data}
222
+
223
+ @staticmethod
224
+ @log_call
225
+ def calculate_soil_statistics(zonal_stats: list, raster_map: dict) -> pd.DataFrame:
226
+ """Calculate soil statistics from zonal statistics
227
+
228
+ Args:
229
+ zonal_stats: List of zonal statistics
230
+ raster_map: Dictionary mapping raster values to mukeys
231
+
232
+ Returns:
233
+ DataFrame with soil statistics including percentages and areas
234
+ """
235
+ # Initialize areas dictionary
236
+ mukey_areas = {mukey: 0 for mukey in raster_map.values()}
237
+
238
+ # Calculate total area and mukey areas
239
+ total_area_sqm = 0
240
+ for stat in zonal_stats:
241
+ for raster_val, area in stat.items():
242
+ mukey = raster_map.get(raster_val)
243
+ if mukey:
244
+ mukey_areas[mukey] += area
245
+ total_area_sqm += area
246
+
247
+ # Create DataFrame rows
248
+ rows = []
249
+ for mukey, area_sqm in mukey_areas.items():
250
+ if area_sqm > 0:
251
+ rows.append({
252
+ 'mukey': mukey,
253
+ 'Percentage': (area_sqm / total_area_sqm) * 100,
254
+ 'Area in Acres': area_sqm * HdfInfiltration.SQM_TO_ACRE,
255
+ 'Area in Square Miles': area_sqm * HdfInfiltration.SQM_TO_SQMILE
256
+ })
257
+
258
+ return pd.DataFrame(rows)
259
+
260
+ @staticmethod
261
+ @log_call
262
+ def get_significant_mukeys(soil_stats: pd.DataFrame,
263
+ threshold: float = 1.0) -> pd.DataFrame:
264
+ """Get mukeys with percentage greater than threshold
265
+
266
+ Args:
267
+ soil_stats: DataFrame with soil statistics
268
+ threshold: Minimum percentage threshold (default 1.0)
269
+
270
+ Returns:
271
+ DataFrame with significant mukeys and their statistics
272
+ """
273
+ significant = soil_stats[soil_stats['Percentage'] > threshold].copy()
274
+ significant.sort_values('Percentage', ascending=False, inplace=True)
275
+ return significant
276
+
277
+ @staticmethod
278
+ @log_call
279
+ def calculate_total_significant_percentage(significant_mukeys: pd.DataFrame) -> float:
280
+ """Calculate total percentage covered by significant mukeys
281
+
282
+ Args:
283
+ significant_mukeys: DataFrame of significant mukeys
284
+
285
+ Returns:
286
+ Total percentage covered by significant mukeys
287
+ """
288
+ return significant_mukeys['Percentage'].sum()
289
+
290
+ @staticmethod
291
+ @log_call
292
+ def save_statistics(soil_stats: pd.DataFrame, output_path: Path,
293
+ include_timestamp: bool = True):
294
+ """Save soil statistics to CSV
295
+
296
+ Args:
297
+ soil_stats: DataFrame with soil statistics
298
+ output_path: Path to save CSV file
299
+ include_timestamp: Whether to include timestamp in filename
300
+ """
301
+ if include_timestamp:
302
+ timestamp = pd.Timestamp.now().strftime('%Y%m%d_%H%M%S')
303
+ output_path = output_path.with_name(
304
+ f"{output_path.stem}_{timestamp}{output_path.suffix}")
305
+
306
+ soil_stats.to_csv(output_path, index=False)
307
+
308
+ @staticmethod
309
+ @log_call
310
+ @standardize_input
311
+ def get_infiltration_parameters(hdf_path: Path, mukey: str) -> dict:
312
+ """Get infiltration parameters for a specific mukey from HDF file
313
+
314
+ Args:
315
+ hdf_path: Path to the HDF file
316
+ mukey: Mukey identifier
317
+
318
+ Returns:
319
+ Dictionary of infiltration parameters
320
+ """
321
+ with h5py.File(hdf_path, 'r') as hdf:
322
+ if 'Infiltration Parameters' not in hdf:
323
+ raise KeyError("No infiltration parameters found in HDF file")
324
+
325
+ params = hdf['Infiltration Parameters'][:]
326
+ for row in params:
327
+ if row[0].decode('utf-8') == mukey:
328
+ return {
329
+ 'Initial Loss (in)': float(row[1]),
330
+ 'Constant Loss Rate (in/hr)': float(row[2]),
331
+ 'Impervious Area (%)': float(row[3])
332
+ }
333
+ return None
334
+
335
+ @staticmethod
336
+ @log_call
337
+ def calculate_weighted_parameters(soil_stats: pd.DataFrame,
338
+ infiltration_params: dict) -> dict:
339
+ """Calculate weighted infiltration parameters based on soil statistics
340
+
341
+ Args:
342
+ soil_stats: DataFrame with soil statistics
343
+ infiltration_params: Dictionary of infiltration parameters by mukey
344
+
345
+ Returns:
346
+ Dictionary of weighted average infiltration parameters
347
+ """
348
+ total_weight = soil_stats['Percentage'].sum()
349
+
350
+ weighted_params = {
351
+ 'Initial Loss (in)': 0.0,
352
+ 'Constant Loss Rate (in/hr)': 0.0,
353
+ 'Impervious Area (%)': 0.0
354
+ }
355
+
356
+ for _, row in soil_stats.iterrows():
357
+ mukey = row['mukey']
358
+ weight = row['Percentage'] / total_weight
359
+
360
+ if mukey in infiltration_params:
361
+ for param in weighted_params:
362
+ weighted_params[param] += (
363
+ infiltration_params[mukey][param] * weight
364
+ )
365
+
366
+ return weighted_params
367
+
368
+ # Example usage:
369
+ """
370
+ from pathlib import Path
371
+
372
+ # Initialize paths
373
+ raster_path = Path('input_files/gSSURGO_InfiltrationDC.tif')
374
+ boundary_path = Path('input_files/WF_Boundary_Simple.shp')
375
+ hdf_path = raster_path.with_suffix('.hdf')
376
+
377
+ # Get infiltration mapping
378
+ infil_map = HdfInfiltration.get_infiltration_map(hdf_path)
379
+
380
+ # Get zonal statistics (using RasMapper class)
381
+ clipped_data, transform, nodata = RasMapper.clip_raster_with_boundary(
382
+ raster_path, boundary_path)
383
+ stats = RasMapper.calculate_zonal_stats(
384
+ boundary_path, clipped_data, transform, nodata)
385
+
386
+ # Calculate soil statistics
387
+ soil_stats = HdfInfiltration.calculate_soil_statistics(stats, infil_map)
388
+
389
+ # Get significant mukeys (>1%)
390
+ significant = HdfInfiltration.get_significant_mukeys(soil_stats, threshold=1.0)
391
+
392
+ # Calculate total percentage of significant mukeys
393
+ total_significant = HdfInfiltration.calculate_total_significant_percentage(significant)
394
+ print(f"Total percentage of significant mukeys: {total_significant}%")
395
+
396
+ # Get infiltration parameters for each significant mukey
397
+ infiltration_params = {}
398
+ for mukey in significant['mukey']:
399
+ params = HdfInfiltration.get_infiltration_parameters(hdf_path, mukey)
400
+ if params:
401
+ infiltration_params[mukey] = params
402
+
403
+ # Calculate weighted parameters
404
+ weighted_params = HdfInfiltration.calculate_weighted_parameters(
405
+ significant, infiltration_params)
406
+ print("Weighted infiltration parameters:", weighted_params)
407
+
408
+ # Save results
409
+ HdfInfiltration.save_statistics(soil_stats, Path('soil_statistics.csv'))
410
+ """