ras-commander 0.71.0__py3-none-any.whl → 0.72.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ras_commander/HdfInfiltration.py +696 -416
- ras_commander/RasGeo.py +358 -214
- ras_commander/RasMap.py +252 -0
- ras_commander/RasPrj.py +19 -1
- ras_commander/__init__.py +1 -1
- {ras_commander-0.71.0.dist-info → ras_commander-0.72.0.dist-info}/METADATA +2 -2
- {ras_commander-0.71.0.dist-info → ras_commander-0.72.0.dist-info}/RECORD +10 -9
- {ras_commander-0.71.0.dist-info → ras_commander-0.72.0.dist-info}/WHEEL +0 -0
- {ras_commander-0.71.0.dist-info → ras_commander-0.72.0.dist-info}/licenses/LICENSE +0 -0
- {ras_commander-0.71.0.dist-info → ras_commander-0.72.0.dist-info}/top_level.txt +0 -0
ras_commander/HdfInfiltration.py
CHANGED
@@ -1,416 +1,696 @@
|
|
1
|
-
"""
|
2
|
-
Class: HdfInfiltration
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
-
|
17
|
-
-
|
18
|
-
-
|
19
|
-
|
20
|
-
|
21
|
-
-
|
22
|
-
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
from
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
"""
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
-
|
412
|
-
|
413
|
-
|
414
|
-
|
415
|
-
|
416
|
-
"""
|
1
|
+
"""
|
2
|
+
Class: HdfInfiltration
|
3
|
+
|
4
|
+
A comprehensive class for handling infiltration-related operations in HEC-RAS HDF geometry files.
|
5
|
+
This class provides methods for managing infiltration parameters, soil statistics, and raster data processing.
|
6
|
+
|
7
|
+
Key Features:
|
8
|
+
- Infiltration parameter management (scaling, setting, retrieving)
|
9
|
+
- Soil statistics calculation and analysis
|
10
|
+
- Raster data processing and mapping
|
11
|
+
- Weighted parameter calculations
|
12
|
+
- Data export and file management
|
13
|
+
|
14
|
+
Methods:
|
15
|
+
1. Geometry File Base Override Management:
|
16
|
+
- scale_infiltration_data(): Updates infiltration parameters with scaling factors in geometry file
|
17
|
+
- get_infiltration_data(): Retrieves current infiltration parameters from geometry file
|
18
|
+
- set_infiltration_table(): Sets infiltration parameters directly in geometry file
|
19
|
+
|
20
|
+
2. Raster and Mapping Operations (uses rasmap_df HDF files):
|
21
|
+
- get_infiltration_map(): Reads infiltration raster map from rasmap_df HDF file
|
22
|
+
- calculate_soil_statistics(): Processes zonal statistics for soil analysis
|
23
|
+
|
24
|
+
3. Soil Analysis (uses rasmap_df HDF files):
|
25
|
+
- get_significant_mukeys(): Identifies mukeys above percentage threshold
|
26
|
+
- calculate_total_significant_percentage(): Computes total coverage of significant mukeys
|
27
|
+
- get_infiltration_parameters(): Retrieves parameters for specific mukey
|
28
|
+
- calculate_weighted_parameters(): Computes weighted average parameters
|
29
|
+
|
30
|
+
4. Data Management (uses rasmap_df HDF files):
|
31
|
+
- save_statistics(): Exports soil statistics to CSV
|
32
|
+
|
33
|
+
Constants:
|
34
|
+
- SQM_TO_ACRE: Conversion factor from square meters to acres (0.000247105)
|
35
|
+
- SQM_TO_SQMILE: Conversion factor from square meters to square miles (3.861e-7)
|
36
|
+
|
37
|
+
Dependencies:
|
38
|
+
- pathlib: Path handling
|
39
|
+
- pandas: Data manipulation
|
40
|
+
- geopandas: Geospatial data processing
|
41
|
+
- h5py: HDF file operations
|
42
|
+
- rasterstats: Zonal statistics calculation (optional)
|
43
|
+
|
44
|
+
Note:
|
45
|
+
- Methods in section 1 work with base overrides in geometry files
|
46
|
+
- Methods in sections 2-4 work with HDF files from rasmap_df by default
|
47
|
+
- All methods are static and decorated with @standardize_input and @log_call
|
48
|
+
- The class is designed to work with both HEC-RAS geometry files and rasmap_df HDF files
|
49
|
+
"""
|
50
|
+
from pathlib import Path
|
51
|
+
import h5py
|
52
|
+
import numpy as np
|
53
|
+
import pandas as pd
|
54
|
+
from typing import Optional, Dict, Any, List, Tuple
|
55
|
+
import logging
|
56
|
+
from .HdfBase import HdfBase
|
57
|
+
from .HdfUtils import HdfUtils
|
58
|
+
from .Decorators import standardize_input, log_call
|
59
|
+
from .LoggingConfig import setup_logging, get_logger
|
60
|
+
|
61
|
+
logger = get_logger(__name__)
|
62
|
+
|
63
|
+
from pathlib import Path
|
64
|
+
import pandas as pd
|
65
|
+
import geopandas as gpd
|
66
|
+
import h5py
|
67
|
+
|
68
|
+
from .Decorators import log_call, standardize_input
|
69
|
+
|
70
|
+
class HdfInfiltration:
|
71
|
+
|
72
|
+
"""
|
73
|
+
A class for handling infiltration-related operations on HEC-RAS HDF geometry files.
|
74
|
+
|
75
|
+
This class provides methods to extract and modify infiltration data from HEC-RAS HDF geometry files,
|
76
|
+
including base overrides of infiltration parameters.
|
77
|
+
"""
|
78
|
+
|
79
|
+
# Constants for unit conversion
|
80
|
+
SQM_TO_ACRE = 0.000247105
|
81
|
+
SQM_TO_SQMILE = 3.861e-7
|
82
|
+
|
83
|
+
def __init__(self):
|
84
|
+
self.logger = logging.getLogger(__name__)
|
85
|
+
|
86
|
+
@staticmethod
|
87
|
+
def _get_table_info(hdf_file: h5py.File, table_path: str) -> Tuple[List[str], List[str], List[str]]:
|
88
|
+
"""Get column names and types from HDF table
|
89
|
+
|
90
|
+
Args:
|
91
|
+
hdf_file: Open HDF file object
|
92
|
+
table_path: Path to table in HDF file
|
93
|
+
|
94
|
+
Returns:
|
95
|
+
Tuple of (column names, numpy dtypes, column descriptions)
|
96
|
+
"""
|
97
|
+
if table_path not in hdf_file:
|
98
|
+
return [], [], []
|
99
|
+
|
100
|
+
dataset = hdf_file[table_path]
|
101
|
+
dtype = dataset.dtype
|
102
|
+
|
103
|
+
# Extract column names and types
|
104
|
+
col_names = []
|
105
|
+
col_types = []
|
106
|
+
col_descs = []
|
107
|
+
|
108
|
+
for name in dtype.names:
|
109
|
+
col_names.append(name)
|
110
|
+
col_types.append(dtype[name].str)
|
111
|
+
col_descs.append(name) # Could be enhanced to get actual descriptions
|
112
|
+
|
113
|
+
return col_names, col_types, col_descs
|
114
|
+
|
115
|
+
@staticmethod
|
116
|
+
@log_call
|
117
|
+
def get_infiltration_baseoverrides(hdf_path: Path) -> Optional[pd.DataFrame]:
|
118
|
+
"""
|
119
|
+
Retrieve current infiltration parameters from a HEC-RAS geometry HDF file.
|
120
|
+
Dynamically reads whatever columns are present in the table.
|
121
|
+
|
122
|
+
Parameters
|
123
|
+
----------
|
124
|
+
hdf_path : Path
|
125
|
+
Path to the HEC-RAS geometry HDF file
|
126
|
+
|
127
|
+
Returns
|
128
|
+
-------
|
129
|
+
Optional[pd.DataFrame]
|
130
|
+
DataFrame containing infiltration parameters if successful, None if operation fails
|
131
|
+
"""
|
132
|
+
try:
|
133
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
134
|
+
table_path = '/Geometry/Infiltration/Base Overrides'
|
135
|
+
if table_path not in hdf_file:
|
136
|
+
logger.warning(f"No infiltration data found in {hdf_path}")
|
137
|
+
return None
|
138
|
+
|
139
|
+
# Get column info
|
140
|
+
col_names, _, _ = HdfInfiltration._get_table_info(hdf_file, table_path)
|
141
|
+
if not col_names:
|
142
|
+
logger.error(f"No columns found in infiltration table")
|
143
|
+
return None
|
144
|
+
|
145
|
+
# Read data
|
146
|
+
data = hdf_file[table_path][()]
|
147
|
+
|
148
|
+
# Convert to DataFrame
|
149
|
+
df_dict = {}
|
150
|
+
for col in col_names:
|
151
|
+
values = data[col]
|
152
|
+
# Convert byte strings to regular strings if needed
|
153
|
+
if values.dtype.kind == 'S':
|
154
|
+
values = [v.decode('utf-8').strip() for v in values]
|
155
|
+
df_dict[col] = values
|
156
|
+
|
157
|
+
return pd.DataFrame(df_dict)
|
158
|
+
|
159
|
+
except Exception as e:
|
160
|
+
logger.error(f"Error reading infiltration data from {hdf_path}: {str(e)}")
|
161
|
+
return None
|
162
|
+
|
163
|
+
@staticmethod
|
164
|
+
@log_call
|
165
|
+
def get_infiltration_layer_data(hdf_path: Path) -> Optional[pd.DataFrame]:
|
166
|
+
"""
|
167
|
+
Retrieve current infiltration parameters from a HEC-RAS infiltration layer HDF file.
|
168
|
+
Extracts the Variables dataset which contains the layer data.
|
169
|
+
|
170
|
+
Parameters
|
171
|
+
----------
|
172
|
+
hdf_path : Path
|
173
|
+
Path to the HEC-RAS infiltration layer HDF file
|
174
|
+
|
175
|
+
Returns
|
176
|
+
-------
|
177
|
+
Optional[pd.DataFrame]
|
178
|
+
DataFrame containing infiltration parameters if successful, None if operation fails
|
179
|
+
"""
|
180
|
+
try:
|
181
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
182
|
+
variables_path = '//Variables'
|
183
|
+
if variables_path not in hdf_file:
|
184
|
+
logger.warning(f"No Variables dataset found in {hdf_path}")
|
185
|
+
return None
|
186
|
+
|
187
|
+
# Read data from Variables dataset
|
188
|
+
data = hdf_file[variables_path][()]
|
189
|
+
|
190
|
+
# Convert to DataFrame
|
191
|
+
df_dict = {}
|
192
|
+
for field_name in data.dtype.names:
|
193
|
+
values = data[field_name]
|
194
|
+
# Convert byte strings to regular strings if needed
|
195
|
+
if values.dtype.kind == 'S':
|
196
|
+
values = [v.decode('utf-8').strip() for v in values]
|
197
|
+
df_dict[field_name] = values
|
198
|
+
|
199
|
+
return pd.DataFrame(df_dict)
|
200
|
+
|
201
|
+
except Exception as e:
|
202
|
+
logger.error(f"Error reading infiltration layer data from {hdf_path}: {str(e)}")
|
203
|
+
return None
|
204
|
+
|
205
|
+
@staticmethod
|
206
|
+
@log_call
|
207
|
+
def set_infiltration_layer_data(
|
208
|
+
hdf_path: Path,
|
209
|
+
infiltration_df: pd.DataFrame
|
210
|
+
) -> Optional[pd.DataFrame]:
|
211
|
+
"""
|
212
|
+
Set infiltration layer data in the infiltration layer HDF file directly from the provided DataFrame.
|
213
|
+
# NOTE: This will not work if there are base overrides present in the Geometry HDF file.
|
214
|
+
Updates the Variables dataset with the provided data.
|
215
|
+
|
216
|
+
Parameters
|
217
|
+
----------
|
218
|
+
hdf_path : Path
|
219
|
+
Path to the HEC-RAS infiltration layer HDF file
|
220
|
+
infiltration_df : pd.DataFrame
|
221
|
+
DataFrame containing infiltration parameters with columns:
|
222
|
+
- Name (string)
|
223
|
+
- Curve Number (float)
|
224
|
+
- Abstraction Ratio (float)
|
225
|
+
- Minimum Infiltration Rate (float)
|
226
|
+
|
227
|
+
Returns
|
228
|
+
-------
|
229
|
+
Optional[pd.DataFrame]
|
230
|
+
The infiltration DataFrame if successful, None if operation fails
|
231
|
+
"""
|
232
|
+
try:
|
233
|
+
variables_path = '//Variables'
|
234
|
+
|
235
|
+
# Validate required columns
|
236
|
+
required_columns = ['Name', 'Curve Number', 'Abstraction Ratio', 'Minimum Infiltration Rate']
|
237
|
+
missing_columns = [col for col in required_columns if col not in infiltration_df.columns]
|
238
|
+
if missing_columns:
|
239
|
+
raise ValueError(f"Missing required columns: {missing_columns}")
|
240
|
+
|
241
|
+
with h5py.File(hdf_path, 'a') as hdf_file:
|
242
|
+
# Delete existing dataset if it exists
|
243
|
+
if variables_path in hdf_file:
|
244
|
+
del hdf_file[variables_path]
|
245
|
+
|
246
|
+
# Create dtype for structured array
|
247
|
+
dt = np.dtype([
|
248
|
+
('Name', f'S{infiltration_df["Name"].str.len().max()}'),
|
249
|
+
('Curve Number', 'f4'),
|
250
|
+
('Abstraction Ratio', 'f4'),
|
251
|
+
('Minimum Infiltration Rate', 'f4')
|
252
|
+
])
|
253
|
+
|
254
|
+
# Create structured array
|
255
|
+
structured_array = np.zeros(infiltration_df.shape[0], dtype=dt)
|
256
|
+
|
257
|
+
# Fill structured array
|
258
|
+
structured_array['Name'] = infiltration_df['Name'].values.astype(f'|S{dt["Name"].itemsize}')
|
259
|
+
structured_array['Curve Number'] = infiltration_df['Curve Number'].values
|
260
|
+
structured_array['Abstraction Ratio'] = infiltration_df['Abstraction Ratio'].values
|
261
|
+
structured_array['Minimum Infiltration Rate'] = infiltration_df['Minimum Infiltration Rate'].values
|
262
|
+
|
263
|
+
# Create new dataset
|
264
|
+
hdf_file.create_dataset(
|
265
|
+
variables_path,
|
266
|
+
data=structured_array,
|
267
|
+
dtype=dt,
|
268
|
+
compression='gzip',
|
269
|
+
compression_opts=1,
|
270
|
+
chunks=(100,),
|
271
|
+
maxshape=(None,)
|
272
|
+
)
|
273
|
+
|
274
|
+
return infiltration_df
|
275
|
+
|
276
|
+
except Exception as e:
|
277
|
+
logger.error(f"Error setting infiltration layer data in {hdf_path}: {str(e)}")
|
278
|
+
return None
|
279
|
+
@staticmethod
|
280
|
+
@standardize_input(file_type='geom_hdf')
|
281
|
+
@log_call
|
282
|
+
def scale_infiltration_data(
|
283
|
+
hdf_path: Path,
|
284
|
+
infiltration_df: pd.DataFrame,
|
285
|
+
scale_factors: Dict[str, float]
|
286
|
+
) -> Optional[pd.DataFrame]:
|
287
|
+
"""
|
288
|
+
Update infiltration parameters in the HDF file with scaling factors.
|
289
|
+
Supports any numeric columns present in the DataFrame.
|
290
|
+
|
291
|
+
Parameters
|
292
|
+
----------
|
293
|
+
hdf_path : Path
|
294
|
+
Path to the HEC-RAS geometry HDF file
|
295
|
+
infiltration_df : pd.DataFrame
|
296
|
+
DataFrame containing infiltration parameters
|
297
|
+
scale_factors : Dict[str, float]
|
298
|
+
Dictionary mapping column names to their scaling factors
|
299
|
+
|
300
|
+
Returns
|
301
|
+
-------
|
302
|
+
Optional[pd.DataFrame]
|
303
|
+
The updated infiltration DataFrame if successful, None if operation fails
|
304
|
+
"""
|
305
|
+
try:
|
306
|
+
# Make a copy to avoid modifying the input DataFrame
|
307
|
+
infiltration_df = infiltration_df.copy()
|
308
|
+
|
309
|
+
# Apply scaling factors to specified columns
|
310
|
+
for col, factor in scale_factors.items():
|
311
|
+
if col in infiltration_df.columns and pd.api.types.is_numeric_dtype(infiltration_df[col]):
|
312
|
+
infiltration_df[col] *= factor
|
313
|
+
else:
|
314
|
+
logger.warning(f"Column {col} not found or not numeric - skipping scaling")
|
315
|
+
|
316
|
+
# Use set_infiltration_table to write the scaled data
|
317
|
+
return HdfInfiltration.set_infiltration_table(hdf_path, infiltration_df)
|
318
|
+
|
319
|
+
except Exception as e:
|
320
|
+
logger.error(f"Error scaling infiltration data in {hdf_path}: {str(e)}")
|
321
|
+
return None
|
322
|
+
|
323
|
+
@staticmethod
|
324
|
+
@log_call
|
325
|
+
@standardize_input
|
326
|
+
def get_infiltration_map(hdf_path: Path = None, ras_object: Any = None) -> dict:
|
327
|
+
"""Read the infiltration raster map from HDF file
|
328
|
+
|
329
|
+
Args:
|
330
|
+
hdf_path: Optional path to the HDF file. If not provided, uses first infiltration_hdf_path from rasmap_df
|
331
|
+
ras_object: Optional RAS object. If not provided, uses global ras instance
|
332
|
+
|
333
|
+
Returns:
|
334
|
+
Dictionary mapping raster values to mukeys
|
335
|
+
"""
|
336
|
+
if hdf_path is None:
|
337
|
+
if ras_object is None:
|
338
|
+
from .RasPrj import ras
|
339
|
+
ras_object = ras
|
340
|
+
hdf_path = Path(ras_object.rasmap_df.iloc[0]['infiltration_hdf_path'][0])
|
341
|
+
|
342
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
343
|
+
raster_map_data = hdf['Raster Map'][:]
|
344
|
+
return {int(item[0]): item[1].decode('utf-8') for item in raster_map_data}
|
345
|
+
|
346
|
+
@staticmethod
|
347
|
+
@log_call
|
348
|
+
def calculate_soil_statistics(zonal_stats: list, raster_map: dict) -> pd.DataFrame:
|
349
|
+
"""Calculate soil statistics from zonal statistics
|
350
|
+
|
351
|
+
Args:
|
352
|
+
zonal_stats: List of zonal statistics
|
353
|
+
raster_map: Dictionary mapping raster values to mukeys
|
354
|
+
|
355
|
+
Returns:
|
356
|
+
DataFrame with soil statistics including percentages and areas
|
357
|
+
"""
|
358
|
+
|
359
|
+
try:
|
360
|
+
from rasterstats import zonal_stats
|
361
|
+
except ImportError as e:
|
362
|
+
logger.error("Failed to import rasterstats. Please run 'pip install rasterstats' and try again.")
|
363
|
+
raise e
|
364
|
+
# Initialize areas dictionary
|
365
|
+
mukey_areas = {mukey: 0 for mukey in raster_map.values()}
|
366
|
+
|
367
|
+
# Calculate total area and mukey areas
|
368
|
+
total_area_sqm = 0
|
369
|
+
for stat in zonal_stats:
|
370
|
+
for raster_val, area in stat.items():
|
371
|
+
mukey = raster_map.get(raster_val)
|
372
|
+
if mukey:
|
373
|
+
mukey_areas[mukey] += area
|
374
|
+
total_area_sqm += area
|
375
|
+
|
376
|
+
# Create DataFrame rows
|
377
|
+
rows = []
|
378
|
+
for mukey, area_sqm in mukey_areas.items():
|
379
|
+
if area_sqm > 0:
|
380
|
+
rows.append({
|
381
|
+
'mukey': mukey,
|
382
|
+
'Percentage': (area_sqm / total_area_sqm) * 100,
|
383
|
+
'Area in Acres': area_sqm * HdfInfiltration.SQM_TO_ACRE,
|
384
|
+
'Area in Square Miles': area_sqm * HdfInfiltration.SQM_TO_SQMILE
|
385
|
+
})
|
386
|
+
|
387
|
+
return pd.DataFrame(rows)
|
388
|
+
|
389
|
+
@staticmethod
|
390
|
+
@log_call
|
391
|
+
def get_significant_mukeys(soil_stats: pd.DataFrame,
|
392
|
+
threshold: float = 1.0) -> pd.DataFrame:
|
393
|
+
"""Get mukeys with percentage greater than threshold
|
394
|
+
|
395
|
+
Args:
|
396
|
+
soil_stats: DataFrame with soil statistics
|
397
|
+
threshold: Minimum percentage threshold (default 1.0)
|
398
|
+
|
399
|
+
Returns:
|
400
|
+
DataFrame with significant mukeys and their statistics
|
401
|
+
"""
|
402
|
+
significant = soil_stats[soil_stats['Percentage'] > threshold].copy()
|
403
|
+
significant.sort_values('Percentage', ascending=False, inplace=True)
|
404
|
+
return significant
|
405
|
+
|
406
|
+
@staticmethod
|
407
|
+
@log_call
|
408
|
+
def calculate_total_significant_percentage(significant_mukeys: pd.DataFrame) -> float:
|
409
|
+
"""Calculate total percentage covered by significant mukeys
|
410
|
+
|
411
|
+
Args:
|
412
|
+
significant_mukeys: DataFrame of significant mukeys
|
413
|
+
|
414
|
+
Returns:
|
415
|
+
Total percentage covered by significant mukeys
|
416
|
+
"""
|
417
|
+
return significant_mukeys['Percentage'].sum()
|
418
|
+
|
419
|
+
@staticmethod
|
420
|
+
@log_call
|
421
|
+
def save_statistics(soil_stats: pd.DataFrame, output_path: Path,
|
422
|
+
include_timestamp: bool = True):
|
423
|
+
"""Save soil statistics to CSV
|
424
|
+
|
425
|
+
Args:
|
426
|
+
soil_stats: DataFrame with soil statistics
|
427
|
+
output_path: Path to save CSV file
|
428
|
+
include_timestamp: Whether to include timestamp in filename
|
429
|
+
"""
|
430
|
+
if include_timestamp:
|
431
|
+
timestamp = pd.Timestamp.now().strftime('%Y%m%d_%H%M%S')
|
432
|
+
output_path = output_path.with_name(
|
433
|
+
f"{output_path.stem}_{timestamp}{output_path.suffix}")
|
434
|
+
|
435
|
+
soil_stats.to_csv(output_path, index=False)
|
436
|
+
|
437
|
+
@staticmethod
|
438
|
+
@log_call
|
439
|
+
@standardize_input
|
440
|
+
def get_infiltration_parameters(hdf_path: Path = None, mukey: str = None, ras_object: Any = None) -> dict:
|
441
|
+
"""Get infiltration parameters for a specific mukey from HDF file
|
442
|
+
|
443
|
+
Args:
|
444
|
+
hdf_path: Optional path to the HDF file. If not provided, uses first infiltration_hdf_path from rasmap_df
|
445
|
+
mukey: Mukey identifier
|
446
|
+
ras_object: Optional RAS object. If not provided, uses global ras instance
|
447
|
+
|
448
|
+
Returns:
|
449
|
+
Dictionary of infiltration parameters
|
450
|
+
"""
|
451
|
+
if hdf_path is None:
|
452
|
+
if ras_object is None:
|
453
|
+
from .RasPrj import ras
|
454
|
+
ras_object = ras
|
455
|
+
hdf_path = Path(ras_object.rasmap_df.iloc[0]['infiltration_hdf_path'][0])
|
456
|
+
|
457
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
458
|
+
if 'Infiltration Parameters' not in hdf:
|
459
|
+
raise KeyError("No infiltration parameters found in HDF file")
|
460
|
+
|
461
|
+
params = hdf['Infiltration Parameters'][:]
|
462
|
+
for row in params:
|
463
|
+
if row[0].decode('utf-8') == mukey:
|
464
|
+
return {
|
465
|
+
'Initial Loss (in)': float(row[1]),
|
466
|
+
'Constant Loss Rate (in/hr)': float(row[2]),
|
467
|
+
'Impervious Area (%)': float(row[3])
|
468
|
+
}
|
469
|
+
return None
|
470
|
+
|
471
|
+
@staticmethod
|
472
|
+
@log_call
|
473
|
+
def calculate_weighted_parameters(soil_stats: pd.DataFrame,
|
474
|
+
infiltration_params: dict) -> dict:
|
475
|
+
"""Calculate weighted infiltration parameters based on soil statistics
|
476
|
+
|
477
|
+
Args:
|
478
|
+
soil_stats: DataFrame with soil statistics
|
479
|
+
infiltration_params: Dictionary of infiltration parameters by mukey
|
480
|
+
|
481
|
+
Returns:
|
482
|
+
Dictionary of weighted average infiltration parameters
|
483
|
+
"""
|
484
|
+
total_weight = soil_stats['Percentage'].sum()
|
485
|
+
|
486
|
+
weighted_params = {
|
487
|
+
'Initial Loss (in)': 0.0,
|
488
|
+
'Constant Loss Rate (in/hr)': 0.0,
|
489
|
+
'Impervious Area (%)': 0.0
|
490
|
+
}
|
491
|
+
|
492
|
+
for _, row in soil_stats.iterrows():
|
493
|
+
mukey = row['mukey']
|
494
|
+
weight = row['Percentage'] / total_weight
|
495
|
+
|
496
|
+
if mukey in infiltration_params:
|
497
|
+
for param in weighted_params:
|
498
|
+
weighted_params[param] += (
|
499
|
+
infiltration_params[mukey][param] * weight
|
500
|
+
)
|
501
|
+
|
502
|
+
return weighted_params
|
503
|
+
|
504
|
+
|
505
|
+
|
506
|
+
|
507
|
+
|
508
|
+
|
509
|
+
# Example usage:
|
510
|
+
"""
|
511
|
+
from pathlib import Path
|
512
|
+
|
513
|
+
# Initialize paths
|
514
|
+
raster_path = Path('input_files/gSSURGO_InfiltrationDC.tif')
|
515
|
+
boundary_path = Path('input_files/WF_Boundary_Simple.shp')
|
516
|
+
hdf_path = raster_path.with_suffix('.hdf')
|
517
|
+
|
518
|
+
# Get infiltration mapping
|
519
|
+
infil_map = HdfInfiltration.get_infiltration_map(hdf_path)
|
520
|
+
|
521
|
+
# Get zonal statistics (using RasMapper class)
|
522
|
+
clipped_data, transform, nodata = RasMapper.clip_raster_with_boundary(
|
523
|
+
raster_path, boundary_path)
|
524
|
+
stats = RasMapper.calculate_zonal_stats(
|
525
|
+
boundary_path, clipped_data, transform, nodata)
|
526
|
+
|
527
|
+
# Calculate soil statistics
|
528
|
+
soil_stats = HdfInfiltration.calculate_soil_statistics(stats, infil_map)
|
529
|
+
|
530
|
+
# Get significant mukeys (>1%)
|
531
|
+
significant = HdfInfiltration.get_significant_mukeys(soil_stats, threshold=1.0)
|
532
|
+
|
533
|
+
# Calculate total percentage of significant mukeys
|
534
|
+
total_significant = HdfInfiltration.calculate_total_significant_percentage(significant)
|
535
|
+
print(f"Total percentage of significant mukeys: {total_significant}%")
|
536
|
+
|
537
|
+
# Get infiltration parameters for each significant mukey
|
538
|
+
infiltration_params = {}
|
539
|
+
for mukey in significant['mukey']:
|
540
|
+
params = HdfInfiltration.get_infiltration_parameters(hdf_path, mukey)
|
541
|
+
if params:
|
542
|
+
infiltration_params[mukey] = params
|
543
|
+
|
544
|
+
# Calculate weighted parameters
|
545
|
+
weighted_params = HdfInfiltration.calculate_weighted_parameters(
|
546
|
+
significant, infiltration_params)
|
547
|
+
print("Weighted infiltration parameters:", weighted_params)
|
548
|
+
|
549
|
+
# Save results
|
550
|
+
HdfInfiltration.save_statistics(soil_stats, Path('soil_statistics.csv'))
|
551
|
+
"""
|
552
|
+
|
553
|
+
|
554
|
+
|
555
|
+
|
556
|
+
|
557
|
+
'''
|
558
|
+
|
559
|
+
THIS FUNCTION IS VERY CLOSE BUT DOES NOT WORK BECAUSE IT DOES NOT PRESERVE THE EXACT STRUCTURE OF THE HDF FILE.
|
560
|
+
WHEN RAS LOADS THE HDF, IT IGNORES THE DATA IN THE TABLE AND REPLACES IT WITH NULLS.
|
561
|
+
|
562
|
+
|
563
|
+
@staticmethod
|
564
|
+
@log_call
|
565
|
+
def set_infiltration_baseoverrides(
|
566
|
+
hdf_path: Path,
|
567
|
+
infiltration_df: pd.DataFrame
|
568
|
+
) -> Optional[pd.DataFrame]:
|
569
|
+
"""
|
570
|
+
Set base overrides for infiltration parameters in the HDF file while preserving
|
571
|
+
the exact structure of the existing dataset.
|
572
|
+
|
573
|
+
This function ensures that the HDF structure is maintained exactly as in the
|
574
|
+
original file, including field names, data types, and string lengths. It updates
|
575
|
+
the values while preserving all dataset attributes.
|
576
|
+
|
577
|
+
Parameters
|
578
|
+
----------
|
579
|
+
hdf_path : Path
|
580
|
+
Path to the HEC-RAS geometry HDF file
|
581
|
+
infiltration_df : pd.DataFrame
|
582
|
+
DataFrame containing infiltration parameters with columns matching HDF structure.
|
583
|
+
The first column should be 'Name' or 'Land Cover Name'.
|
584
|
+
|
585
|
+
Returns
|
586
|
+
-------
|
587
|
+
Optional[pd.DataFrame]
|
588
|
+
The infiltration DataFrame if successful, None if operation fails
|
589
|
+
"""
|
590
|
+
try:
|
591
|
+
# Make a copy to avoid modifying the input DataFrame
|
592
|
+
infiltration_df = infiltration_df.copy()
|
593
|
+
|
594
|
+
# Check for and rename the first column if needed
|
595
|
+
if "Land Cover Name" in infiltration_df.columns:
|
596
|
+
name_col = "Land Cover Name"
|
597
|
+
else:
|
598
|
+
name_col = "Name"
|
599
|
+
# Rename 'Name' to 'Land Cover Name' for HDF dataset
|
600
|
+
infiltration_df = infiltration_df.rename(columns={"Name": "Land Cover Name"})
|
601
|
+
|
602
|
+
table_path = '/Geometry/Infiltration/Base Overrides'
|
603
|
+
|
604
|
+
with h5py.File(hdf_path, 'r') as hdf_file_read:
|
605
|
+
# Check if dataset exists
|
606
|
+
if table_path not in hdf_file_read:
|
607
|
+
logger.warning(f"No infiltration data found in {hdf_path}. Creating new dataset.")
|
608
|
+
# If dataset doesn't exist, use the standard set_infiltration_baseoverrides method
|
609
|
+
return HdfInfiltration.set_infiltration_baseoverrides(hdf_path, infiltration_df)
|
610
|
+
|
611
|
+
# Get the exact dtype of the existing dataset
|
612
|
+
existing_dtype = hdf_file_read[table_path].dtype
|
613
|
+
|
614
|
+
# Extract column names from the existing dataset
|
615
|
+
existing_columns = existing_dtype.names
|
616
|
+
|
617
|
+
# Check if all columns in the DataFrame exist in the HDF dataset
|
618
|
+
for col in infiltration_df.columns:
|
619
|
+
hdf_col = col
|
620
|
+
if col == "Name" and "Land Cover Name" in existing_columns:
|
621
|
+
hdf_col = "Land Cover Name"
|
622
|
+
|
623
|
+
if hdf_col not in existing_columns:
|
624
|
+
logger.warning(f"Column {col} not found in existing dataset - it will be ignored")
|
625
|
+
|
626
|
+
# Get current dataset to preserve structure for non-updated fields
|
627
|
+
existing_data = hdf_file_read[table_path][()]
|
628
|
+
|
629
|
+
# Create a structured array with the exact same dtype as the existing dataset
|
630
|
+
structured_array = np.zeros(len(infiltration_df), dtype=existing_dtype)
|
631
|
+
|
632
|
+
# Copy data from DataFrame to structured array, preserving existing structure
|
633
|
+
for col in existing_columns:
|
634
|
+
df_col = col
|
635
|
+
# Map 'Land Cover Name' to 'Name' if needed
|
636
|
+
if col == "Land Cover Name" and name_col == "Name":
|
637
|
+
df_col = "Name"
|
638
|
+
|
639
|
+
if df_col in infiltration_df.columns:
|
640
|
+
# Handle string fields - need to maintain exact string length
|
641
|
+
if existing_dtype[col].kind == 'S':
|
642
|
+
# Get the exact string length from dtype
|
643
|
+
max_str_len = existing_dtype[col].itemsize
|
644
|
+
# Convert to bytes with correct length
|
645
|
+
structured_array[col] = infiltration_df[df_col].astype(str).values.astype(f'|S{max_str_len}')
|
646
|
+
else:
|
647
|
+
# Handle numeric fields - ensure correct numeric type
|
648
|
+
if existing_dtype[col].kind in ('f', 'i'):
|
649
|
+
structured_array[col] = infiltration_df[df_col].values.astype(existing_dtype[col])
|
650
|
+
else:
|
651
|
+
# For any other type, just copy as is
|
652
|
+
structured_array[col] = infiltration_df[df_col].values
|
653
|
+
else:
|
654
|
+
logger.warning(f"Column {col} not in DataFrame - using default values")
|
655
|
+
# Use zeros for numeric fields or empty strings for string fields
|
656
|
+
if existing_dtype[col].kind == 'S':
|
657
|
+
structured_array[col] = np.array([''] * len(infiltration_df), dtype=f'|S{existing_dtype[col].itemsize}')
|
658
|
+
|
659
|
+
# Write back to HDF file
|
660
|
+
with h5py.File(hdf_path, 'a') as hdf_file_write:
|
661
|
+
# Delete existing dataset
|
662
|
+
if table_path in hdf_file_write:
|
663
|
+
del hdf_file_write[table_path]
|
664
|
+
|
665
|
+
# Create new dataset with exact same properties as original
|
666
|
+
dataset = hdf_file_write.create_dataset(
|
667
|
+
table_path,
|
668
|
+
data=structured_array,
|
669
|
+
dtype=existing_dtype,
|
670
|
+
compression='gzip',
|
671
|
+
compression_opts=1,
|
672
|
+
chunks=(100,),
|
673
|
+
maxshape=(None,)
|
674
|
+
)
|
675
|
+
|
676
|
+
# Return the DataFrame with columns matching what was actually written
|
677
|
+
result_df = pd.DataFrame()
|
678
|
+
for col in existing_columns:
|
679
|
+
if existing_dtype[col].kind == 'S':
|
680
|
+
# Convert bytes back to string
|
681
|
+
result_df[col] = [val.decode('utf-8').strip() for val in structured_array[col]]
|
682
|
+
else:
|
683
|
+
result_df[col] = structured_array[col]
|
684
|
+
|
685
|
+
return result_df
|
686
|
+
|
687
|
+
except Exception as e:
|
688
|
+
logger.error(f"Error setting infiltration data in {hdf_path}: {str(e)}")
|
689
|
+
return None
|
690
|
+
|
691
|
+
|
692
|
+
|
693
|
+
|
694
|
+
|
695
|
+
|
696
|
+
'''
|