ras-commander 0.47.0__py3-none-any.whl → 0.49.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ras_commander/Decorators.py +18 -1
- ras_commander/HdfBase.py +307 -197
- ras_commander/HdfBndry.py +94 -287
- ras_commander/HdfFluvialPluvial.py +156 -239
- ras_commander/HdfInfiltration.py +410 -0
- ras_commander/HdfMesh.py +121 -41
- ras_commander/HdfPipe.py +127 -175
- ras_commander/HdfPlan.py +144 -58
- ras_commander/HdfPlot.py +104 -0
- ras_commander/HdfPump.py +76 -28
- ras_commander/HdfResultsMesh.py +186 -167
- ras_commander/HdfResultsPlan.py +76 -220
- ras_commander/HdfResultsPlot.py +182 -0
- ras_commander/HdfResultsXsec.py +185 -145
- ras_commander/HdfStruc.py +65 -35
- ras_commander/HdfUtils.py +435 -518
- ras_commander/HdfXsec.py +137 -127
- ras_commander/RasCmdr.py +13 -0
- ras_commander/RasExamples.py +14 -0
- ras_commander/RasGeo.py +11 -0
- ras_commander/RasGpt.py +8 -0
- ras_commander/RasMapper.py +105 -0
- ras_commander/RasPlan.py +30 -0
- ras_commander/RasPrj.py +34 -0
- ras_commander/RasToGo.py +16 -0
- ras_commander/RasUnsteady.py +15 -0
- ras_commander/RasUtils.py +31 -0
- ras_commander/__init__.py +10 -0
- {ras_commander-0.47.0.dist-info → ras_commander-0.49.0.dist-info}/METADATA +74 -9
- ras_commander-0.49.0.dist-info/RECORD +34 -0
- ras_commander-0.47.0.dist-info/RECORD +0 -30
- {ras_commander-0.47.0.dist-info → ras_commander-0.49.0.dist-info}/LICENSE +0 -0
- {ras_commander-0.47.0.dist-info → ras_commander-0.49.0.dist-info}/WHEEL +0 -0
- {ras_commander-0.47.0.dist-info → ras_commander-0.49.0.dist-info}/top_level.txt +0 -0
ras_commander/HdfResultsMesh.py
CHANGED
@@ -6,6 +6,61 @@ from the https://github.com/fema-ffrd/rashdf library,
|
|
6
6
|
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
7
|
|
8
8
|
The file has been forked and modified for use in RAS Commander.
|
9
|
+
|
10
|
+
-----
|
11
|
+
|
12
|
+
All methods in this class are static and designed to be used without instantiation.
|
13
|
+
|
14
|
+
Public Functions:
|
15
|
+
- get_mesh_summary(): Get summary output data for a variable
|
16
|
+
- get_mesh_timeseries(): Get timeseries output for a mesh and variable
|
17
|
+
- get_mesh_faces_timeseries(): Get timeseries for all face-based variables
|
18
|
+
- get_mesh_cells_timeseries(): Get timeseries for mesh cells
|
19
|
+
- get_mesh_last_iter(): Get last iteration count for cells
|
20
|
+
- get_mesh_max_ws(): Get maximum water surface elevation at each cell
|
21
|
+
- get_mesh_min_ws(): Get minimum water surface elevation at each cell
|
22
|
+
- get_mesh_max_face_v(): Get maximum face velocity at each face
|
23
|
+
- get_mesh_min_face_v(): Get minimum face velocity at each face
|
24
|
+
- get_mesh_max_ws_err(): Get maximum water surface error at each cell
|
25
|
+
- get_mesh_max_iter(): Get maximum iteration count at each cell
|
26
|
+
|
27
|
+
Private Functions:
|
28
|
+
- _get_mesh_timeseries_output_path(): Get HDF path for timeseries output #REDUNDANT??
|
29
|
+
- _get_mesh_cells_timeseries_output(): Internal handler for cell timeseries #REDUNDANT??
|
30
|
+
- _get_mesh_timeseries_output(): Internal handler for mesh timeseries # FACES??
|
31
|
+
- _get_mesh_timeseries_output_values_units(): Get values and units for timeseries
|
32
|
+
- _get_available_meshes(): Get list of available meshes in HDF #USE HDFBASE OR HDFUTIL
|
33
|
+
- get_mesh_summary_output(): Internal handler for summary output
|
34
|
+
- get_mesh_summary_output_group(): Get HDF group for summary output #REDUNDANT?? Include in Above
|
35
|
+
|
36
|
+
The class works with HEC-RAS version 6.0+ plan HDF files and uses HdfBase and
|
37
|
+
HdfUtils for common operations. Methods use @log_call decorator for logging and
|
38
|
+
@standardize_input decorator to handle different input types.
|
39
|
+
|
40
|
+
|
41
|
+
|
42
|
+
|
43
|
+
|
44
|
+
|
45
|
+
REVISIONS MADE:
|
46
|
+
|
47
|
+
Use get_ prefix for functions that return data.
|
48
|
+
BUT, we will never set results data, so we should use get_ for results data.
|
49
|
+
|
50
|
+
Renamed functions:
|
51
|
+
- mesh_summary_output() to get_mesh_summary()
|
52
|
+
- mesh_timeseries_output() to get_mesh_timeseries()
|
53
|
+
- mesh_faces_timeseries_output() to get_mesh_faces_timeseries()
|
54
|
+
- mesh_cells_timeseries_output() to get_mesh_cells_timeseries()
|
55
|
+
- mesh_last_iter() to get_mesh_last_iter()
|
56
|
+
- mesh_max_ws() to get_mesh_max_ws()
|
57
|
+
|
58
|
+
|
59
|
+
|
60
|
+
|
61
|
+
|
62
|
+
|
63
|
+
|
9
64
|
"""
|
10
65
|
|
11
66
|
import numpy as np
|
@@ -24,76 +79,75 @@ logger = get_logger(__name__)
|
|
24
79
|
|
25
80
|
class HdfResultsMesh:
|
26
81
|
"""
|
27
|
-
|
82
|
+
Handles mesh-related results from HEC-RAS HDF files.
|
28
83
|
|
29
|
-
|
30
|
-
|
31
|
-
|
84
|
+
Provides methods to extract and analyze:
|
85
|
+
- Mesh summary outputs
|
86
|
+
- Timeseries data
|
87
|
+
- Water surface elevations
|
88
|
+
- Velocities
|
89
|
+
- Error metrics
|
32
90
|
|
33
|
-
|
34
|
-
for common operations and utilities.
|
35
|
-
|
36
|
-
Methods in this class use the @log_call decorator for logging and the
|
37
|
-
@standardize_input decorator to handle different input types (e.g.,
|
38
|
-
plan number, file path).
|
39
|
-
|
40
|
-
Attributes:
|
41
|
-
None
|
42
|
-
|
43
|
-
Note:
|
44
|
-
This class is designed to work with HEC-RAS version 6.0 and later.
|
91
|
+
Works with HEC-RAS 6.0+ plan HDF files.
|
45
92
|
"""
|
46
93
|
|
47
94
|
@staticmethod
|
48
95
|
@log_call
|
49
96
|
@standardize_input(file_type='plan_hdf')
|
50
|
-
def
|
97
|
+
def get_mesh_summary(hdf_path: Path, var: str, round_to: str = "100ms") -> pd.DataFrame:
|
51
98
|
"""
|
52
|
-
|
99
|
+
Get timeseries output for a specific mesh and variable.
|
53
100
|
|
54
101
|
Args:
|
55
|
-
hdf_path (Path): Path to the
|
56
|
-
|
57
|
-
|
102
|
+
hdf_path (Path): Path to the HDF file
|
103
|
+
mesh_name (str): Name of the mesh
|
104
|
+
var (str): Variable to retrieve (see valid options below)
|
105
|
+
truncate (bool): Whether to truncate trailing zeros (default True)
|
58
106
|
|
59
107
|
Returns:
|
60
|
-
|
108
|
+
xr.DataArray: DataArray with dimensions:
|
109
|
+
- time: Timestamps
|
110
|
+
- face_id/cell_id: IDs for faces/cells
|
111
|
+
And attributes:
|
112
|
+
- units: Variable units
|
113
|
+
- mesh_name: Name of mesh
|
114
|
+
- variable: Variable name
|
61
115
|
|
62
|
-
|
63
|
-
|
116
|
+
Valid variables include:
|
117
|
+
"Water Surface", "Face Velocity", "Cell Velocity X"...
|
64
118
|
"""
|
65
119
|
try:
|
66
120
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
67
|
-
return HdfResultsMesh.
|
121
|
+
return HdfResultsMesh.get_mesh_summary_output(hdf_file, var, round_to)
|
68
122
|
except Exception as e:
|
69
|
-
logger.error(f"Error in
|
123
|
+
logger.error(f"Error in get_mesh_summary: {str(e)}")
|
70
124
|
logger.error(f"Variable: {var}")
|
71
125
|
raise ValueError(f"Failed to get summary output: {str(e)}")
|
72
126
|
|
73
127
|
@staticmethod
|
74
128
|
@log_call
|
75
129
|
@standardize_input(file_type='plan_hdf')
|
76
|
-
def
|
130
|
+
def get_mesh_timeseries(hdf_path: Path, mesh_name: str, var: str, truncate: bool = True) -> xr.DataArray:
|
77
131
|
"""
|
78
132
|
Get timeseries output for a specific mesh and variable.
|
79
133
|
|
80
134
|
Args:
|
81
|
-
hdf_path (Path): Path to the HDF file
|
82
|
-
mesh_name (str): Name of the mesh
|
83
|
-
var (str): Variable to retrieve
|
84
|
-
|
85
|
-
"Face Flow", "Face Water Surface", "Cell Volume", "Cell Volume Error",
|
86
|
-
"Cell Water Surface Error", "Cell Courant", "Face Courant",
|
87
|
-
"Cell Hydraulic Depth", "Cell Invert Depth",
|
88
|
-
"Cell Cumulative Precipitation Depth", "Cell Divergence Term",
|
89
|
-
"Cell Eddy Viscosity X", "Cell Eddy Viscosity Y", "Cell Flow Balance",
|
90
|
-
"Cell Storage Term", "Cell Water Source Term", "Face Cumulative Volume",
|
91
|
-
"Face Eddy Viscosity", "Face Flow Period Average", "Face Friction Term",
|
92
|
-
"Face Pressure Gradient Term", "Face Shear Stress", "Face Tangential Velocity"
|
93
|
-
truncate (bool): Whether to truncate the output (default True).
|
135
|
+
hdf_path (Path): Path to the HDF file
|
136
|
+
mesh_name (str): Name of the mesh
|
137
|
+
var (str): Variable to retrieve (see valid options below)
|
138
|
+
truncate (bool): Whether to truncate trailing zeros (default True)
|
94
139
|
|
95
140
|
Returns:
|
96
|
-
xr.DataArray: DataArray
|
141
|
+
xr.DataArray: DataArray with dimensions:
|
142
|
+
- time: Timestamps
|
143
|
+
- face_id/cell_id: IDs for faces/cells
|
144
|
+
And attributes:
|
145
|
+
- units: Variable units
|
146
|
+
- mesh_name: Name of mesh
|
147
|
+
- variable: Variable name
|
148
|
+
|
149
|
+
Valid variables include:
|
150
|
+
"Water Surface", "Face Velocity", "Cell Velocity X"...
|
97
151
|
"""
|
98
152
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
99
153
|
return HdfResultsMesh._get_mesh_timeseries_output(hdf_file, mesh_name, var, truncate)
|
@@ -101,7 +155,7 @@ class HdfResultsMesh:
|
|
101
155
|
@staticmethod
|
102
156
|
@log_call
|
103
157
|
@standardize_input(file_type='plan_hdf')
|
104
|
-
def
|
158
|
+
def get_mesh_faces_timeseries(hdf_path: Path, mesh_name: str) -> xr.Dataset:
|
105
159
|
"""
|
106
160
|
Get timeseries output for all face-based variables of a specific mesh.
|
107
161
|
|
@@ -117,7 +171,7 @@ class HdfResultsMesh:
|
|
117
171
|
|
118
172
|
for var in face_vars:
|
119
173
|
try:
|
120
|
-
da = HdfResultsMesh.
|
174
|
+
da = HdfResultsMesh.get_mesh_timeseries(hdf_path, mesh_name, var)
|
121
175
|
# Assign the variable name as the DataArray name
|
122
176
|
da.name = var.lower().replace(' ', '_')
|
123
177
|
datasets.append(da)
|
@@ -137,34 +191,34 @@ class HdfResultsMesh:
|
|
137
191
|
@staticmethod
|
138
192
|
@log_call
|
139
193
|
@standardize_input(file_type='plan_hdf')
|
140
|
-
def
|
194
|
+
def get_mesh_cells_timeseries(hdf_path: Path, mesh_names: Optional[Union[str, List[str]]] = None, var: Optional[str] = None, truncate: bool = False, ras_object: Optional[Any] = None) -> Dict[str, xr.Dataset]:
|
141
195
|
"""
|
142
|
-
Get mesh cells timeseries output
|
196
|
+
Get mesh cells timeseries output.
|
143
197
|
|
144
198
|
Args:
|
145
|
-
hdf_path (
|
146
|
-
mesh_names (
|
147
|
-
var (
|
148
|
-
truncate (bool):
|
149
|
-
ras_object (
|
199
|
+
hdf_path (Path): Path to HDF file
|
200
|
+
mesh_names (str|List[str], optional): Mesh name(s). If None, processes all meshes
|
201
|
+
var (str, optional): Variable name. If None, retrieves all variables
|
202
|
+
truncate (bool): Remove trailing zeros if True
|
203
|
+
ras_object (Any, optional): RAS object if available
|
150
204
|
|
151
205
|
Returns:
|
152
|
-
Dict[str, xr.Dataset]:
|
153
|
-
|
154
|
-
|
155
|
-
|
206
|
+
Dict[str, xr.Dataset]: Dictionary mapping mesh names to datasets containing:
|
207
|
+
- Time-indexed variables
|
208
|
+
- Cell/face IDs
|
209
|
+
- Variable metadata
|
156
210
|
"""
|
157
211
|
try:
|
158
212
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
159
|
-
return HdfResultsMesh.
|
213
|
+
return HdfResultsMesh._get_mesh_cells_timeseries_output(hdf_file, mesh_names, var, truncate)
|
160
214
|
except Exception as e:
|
161
|
-
logger.error(f"Error in
|
215
|
+
logger.error(f"Error in get_mesh_cells_timeseries: {str(e)}")
|
162
216
|
raise ValueError(f"Error processing timeseries output data: {e}")
|
163
217
|
|
164
218
|
@staticmethod
|
165
219
|
@log_call
|
166
220
|
@standardize_input(file_type='plan_hdf')
|
167
|
-
def
|
221
|
+
def get_mesh_last_iter(hdf_path: Path) -> pd.DataFrame:
|
168
222
|
"""
|
169
223
|
Get last iteration count for each mesh cell.
|
170
224
|
|
@@ -174,33 +228,31 @@ class HdfResultsMesh:
|
|
174
228
|
Returns:
|
175
229
|
pd.DataFrame: DataFrame containing last iteration counts.
|
176
230
|
"""
|
177
|
-
return HdfResultsMesh.
|
231
|
+
return HdfResultsMesh.get_mesh_summary_output(hdf_path, "Cell Last Iteration")
|
178
232
|
|
179
233
|
|
180
234
|
@staticmethod
|
181
235
|
@log_call
|
182
236
|
@standardize_input(file_type='plan_hdf')
|
183
|
-
def
|
237
|
+
def get_mesh_max_ws(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
|
184
238
|
"""
|
185
|
-
Get maximum
|
239
|
+
Get maximum water surface elevation for each mesh cell.
|
186
240
|
|
187
241
|
Args:
|
188
242
|
hdf_path (Path): Path to the HDF file.
|
189
243
|
round_to (str): Time rounding specification (default "100ms").
|
190
244
|
|
191
245
|
Returns:
|
192
|
-
pd.DataFrame: DataFrame containing maximum
|
246
|
+
pd.DataFrame: DataFrame containing maximum water surface elevations.
|
193
247
|
|
194
248
|
Raises:
|
195
|
-
ValueError: If there's an error processing the maximum
|
196
|
-
|
197
|
-
Note: The Maximum Iteration is labeled as "Cell Last Iteration" in the HDF file
|
249
|
+
ValueError: If there's an error processing the maximum water surface data.
|
198
250
|
"""
|
199
251
|
try:
|
200
252
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
201
|
-
return HdfResultsMesh.
|
253
|
+
return HdfResultsMesh.get_mesh_summary_output(hdf_file, "Maximum Water Surface", round_to)
|
202
254
|
except Exception as e:
|
203
|
-
logger.error(f"Error in
|
255
|
+
logger.error(f"Error in get_mesh_max_ws: {str(e)}")
|
204
256
|
raise ValueError(f"Failed to get maximum water surface: {str(e)}")
|
205
257
|
|
206
258
|
|
@@ -210,7 +262,7 @@ class HdfResultsMesh:
|
|
210
262
|
@staticmethod
|
211
263
|
@log_call
|
212
264
|
@standardize_input(file_type='plan_hdf')
|
213
|
-
def
|
265
|
+
def get_mesh_min_ws(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
|
214
266
|
"""
|
215
267
|
Get minimum water surface elevation for each mesh cell.
|
216
268
|
|
@@ -223,15 +275,15 @@ class HdfResultsMesh:
|
|
223
275
|
"""
|
224
276
|
try:
|
225
277
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
226
|
-
return HdfResultsMesh.
|
278
|
+
return HdfResultsMesh.get_mesh_summary_output(hdf_file, "Minimum Water Surface", round_to)
|
227
279
|
except Exception as e:
|
228
|
-
logger.error(f"Error in
|
280
|
+
logger.error(f"Error in get_mesh_min_ws: {str(e)}")
|
229
281
|
raise ValueError(f"Failed to get minimum water surface: {str(e)}")
|
230
282
|
|
231
283
|
@staticmethod
|
232
284
|
@log_call
|
233
285
|
@standardize_input(file_type='plan_hdf')
|
234
|
-
def
|
286
|
+
def get_mesh_max_face_v(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
|
235
287
|
"""
|
236
288
|
Get maximum face velocity for each mesh face.
|
237
289
|
|
@@ -241,21 +293,18 @@ class HdfResultsMesh:
|
|
241
293
|
|
242
294
|
Returns:
|
243
295
|
pd.DataFrame: DataFrame containing maximum face velocities.
|
244
|
-
|
245
|
-
Raises:
|
246
|
-
ValueError: If there's an error processing the maximum face velocity data.
|
247
296
|
"""
|
248
297
|
try:
|
249
298
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
250
|
-
return HdfResultsMesh.
|
299
|
+
return HdfResultsMesh.get_mesh_summary_output(hdf_file, "Maximum Face Velocity", round_to)
|
251
300
|
except Exception as e:
|
252
|
-
logger.error(f"Error in
|
301
|
+
logger.error(f"Error in get_mesh_max_face_v: {str(e)}")
|
253
302
|
raise ValueError(f"Failed to get maximum face velocity: {str(e)}")
|
254
303
|
|
255
304
|
@staticmethod
|
256
305
|
@log_call
|
257
306
|
@standardize_input(file_type='plan_hdf')
|
258
|
-
def
|
307
|
+
def get_mesh_min_face_v(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
|
259
308
|
"""
|
260
309
|
Get minimum face velocity for each mesh cell.
|
261
310
|
|
@@ -271,15 +320,15 @@ class HdfResultsMesh:
|
|
271
320
|
"""
|
272
321
|
try:
|
273
322
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
274
|
-
return HdfResultsMesh.
|
323
|
+
return HdfResultsMesh.get_mesh_summary_output(hdf_file, "Minimum Face Velocity", round_to)
|
275
324
|
except Exception as e:
|
276
|
-
logger.error(f"Error in
|
325
|
+
logger.error(f"Error in get_mesh_min_face_v: {str(e)}")
|
277
326
|
raise ValueError(f"Failed to get minimum face velocity: {str(e)}")
|
278
327
|
|
279
328
|
@staticmethod
|
280
329
|
@log_call
|
281
330
|
@standardize_input(file_type='plan_hdf')
|
282
|
-
def
|
331
|
+
def get_mesh_max_ws_err(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
|
283
332
|
"""
|
284
333
|
Get maximum water surface error for each mesh cell.
|
285
334
|
|
@@ -295,16 +344,16 @@ class HdfResultsMesh:
|
|
295
344
|
"""
|
296
345
|
try:
|
297
346
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
298
|
-
return HdfResultsMesh.
|
347
|
+
return HdfResultsMesh.get_mesh_summary_output(hdf_file, "Cell Maximum Water Surface Error", round_to)
|
299
348
|
except Exception as e:
|
300
|
-
logger.error(f"Error in
|
349
|
+
logger.error(f"Error in get_mesh_max_ws_err: {str(e)}")
|
301
350
|
raise ValueError(f"Failed to get maximum water surface error: {str(e)}")
|
302
351
|
|
303
352
|
|
304
353
|
@staticmethod
|
305
354
|
@log_call
|
306
355
|
@standardize_input(file_type='plan_hdf')
|
307
|
-
def
|
356
|
+
def get_mesh_max_iter(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
|
308
357
|
"""
|
309
358
|
Get maximum iteration count for each mesh cell.
|
310
359
|
|
@@ -313,8 +362,26 @@ class HdfResultsMesh:
|
|
313
362
|
round_to (str): Time rounding specification (default "100ms").
|
314
363
|
|
315
364
|
Returns:
|
316
|
-
pd.DataFrame: DataFrame containing maximum iteration counts.
|
365
|
+
pd.DataFrame: DataFrame containing maximum iteration counts with face geometry.
|
366
|
+
|
367
|
+
Raises:
|
368
|
+
ValueError: If there's an error processing the maximum iteration data.
|
369
|
+
"""
|
370
|
+
"""
|
371
|
+
Get maximum iteration count for each mesh cell.
|
372
|
+
|
373
|
+
Args:
|
374
|
+
hdf_path (Path): Path to the HDF file
|
375
|
+
round_to (str): Time rounding specification (default "100ms").
|
317
376
|
|
377
|
+
Returns:
|
378
|
+
pd.DataFrame: DataFrame containing maximum iteration counts with columns:
|
379
|
+
- mesh_name: Name of the mesh
|
380
|
+
- cell_id: ID of the cell
|
381
|
+
- cell_last_iteration: Maximum number of iterations
|
382
|
+
- cell_last_iteration_time: Time when max iterations occurred
|
383
|
+
- geometry: Point geometry representing cell center
|
384
|
+
|
318
385
|
Raises:
|
319
386
|
ValueError: If there's an error processing the maximum iteration data.
|
320
387
|
|
@@ -322,13 +389,12 @@ class HdfResultsMesh:
|
|
322
389
|
"""
|
323
390
|
try:
|
324
391
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
325
|
-
return HdfResultsMesh.
|
392
|
+
return HdfResultsMesh.get_mesh_summary_output(hdf_file, "Cell Last Iteration", round_to)
|
326
393
|
except Exception as e:
|
327
394
|
logger.error(f"Error in mesh_max_iter: {str(e)}")
|
328
395
|
raise ValueError(f"Failed to get maximum iteration count: {str(e)}")
|
329
396
|
|
330
397
|
|
331
|
-
|
332
398
|
|
333
399
|
|
334
400
|
@staticmethod
|
@@ -347,10 +413,13 @@ class HdfResultsMesh:
|
|
347
413
|
|
348
414
|
|
349
415
|
@staticmethod
|
350
|
-
def
|
416
|
+
def _get_mesh_cells_timeseries_output(hdf_file: h5py.File,
|
417
|
+
mesh_names: Optional[Union[str, List[str]]] = None,
|
418
|
+
var: Optional[str] = None,
|
419
|
+
truncate: bool = False) -> Dict[str, xr.Dataset]:
|
351
420
|
"""
|
352
421
|
Get mesh cells timeseries output for specified meshes and variables.
|
353
|
-
|
422
|
+
|
354
423
|
Args:
|
355
424
|
hdf_file (h5py.File): Open HDF file object.
|
356
425
|
mesh_names (Optional[Union[str, List[str]]]): Name(s) of the mesh(es). If None, processes all available meshes.
|
@@ -381,8 +450,8 @@ class HdfResultsMesh:
|
|
381
450
|
}
|
382
451
|
|
383
452
|
try:
|
384
|
-
start_time = HdfBase.
|
385
|
-
time_stamps = HdfBase.
|
453
|
+
start_time = HdfBase.get_simulation_start_time(hdf_file)
|
454
|
+
time_stamps = HdfBase.get_unsteady_timestamps(hdf_file)
|
386
455
|
|
387
456
|
if mesh_names is None:
|
388
457
|
mesh_names = HdfResultsMesh._get_available_meshes(hdf_file)
|
@@ -470,19 +539,26 @@ class HdfResultsMesh:
|
|
470
539
|
dataset = hdf_file[path]
|
471
540
|
values = dataset[:]
|
472
541
|
units = dataset.attrs.get("Units", "").decode("utf-8")
|
473
|
-
|
542
|
+
|
543
|
+
# Get start time and timesteps
|
544
|
+
start_time = HdfBase.get_simulation_start_time(hdf_file)
|
545
|
+
# Updated to use the new function name from HdfUtils
|
546
|
+
timesteps = HdfUtils.convert_timesteps_to_datetimes(
|
547
|
+
np.array(hdf_file["Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/Time"][:]),
|
548
|
+
start_time
|
549
|
+
)
|
474
550
|
|
475
551
|
if truncate:
|
476
552
|
non_zero = np.nonzero(values)[0]
|
477
553
|
if len(non_zero) > 0:
|
478
554
|
start, end = non_zero[0], non_zero[-1] + 1
|
479
555
|
values = values[start:end]
|
480
|
-
|
556
|
+
timesteps = timesteps[start:end]
|
481
557
|
|
482
558
|
# Determine if this is a face-based or cell-based variable
|
483
559
|
id_dim = "face_id" if "Face" in var else "cell_id"
|
484
560
|
dims = ["time", id_dim] if values.ndim == 2 else ["time"]
|
485
|
-
coords = {"time":
|
561
|
+
coords = {"time": timesteps}
|
486
562
|
if values.ndim == 2:
|
487
563
|
coords[id_dim] = np.arange(values.shape[1])
|
488
564
|
|
@@ -530,16 +606,13 @@ class HdfResultsMesh:
|
|
530
606
|
Returns:
|
531
607
|
List[str]: A list of mesh names.
|
532
608
|
"""
|
533
|
-
|
534
|
-
|
535
|
-
if base_path in hdf_file:
|
536
|
-
for name in hdf_file[base_path]:
|
537
|
-
if isinstance(hdf_file[f"{base_path}/{name}"], h5py.Group):
|
538
|
-
mesh_names.append(name)
|
539
|
-
return mesh_names
|
609
|
+
return HdfMesh.get_mesh_area_names(hdf_file)
|
610
|
+
|
540
611
|
|
541
612
|
@staticmethod
|
542
|
-
|
613
|
+
@log_call
|
614
|
+
@standardize_input(file_type='plan_hdf')
|
615
|
+
def get_mesh_summary_output(hdf_file: h5py.File, var: str, round_to: str = "100ms") -> pd.DataFrame:
|
543
616
|
"""
|
544
617
|
Get the summary output data for a given variable from the HDF file.
|
545
618
|
|
@@ -564,12 +637,18 @@ class HdfResultsMesh:
|
|
564
637
|
"""
|
565
638
|
try:
|
566
639
|
dfs = []
|
567
|
-
start_time = HdfBase.
|
640
|
+
start_time = HdfBase.get_simulation_start_time(hdf_file)
|
568
641
|
|
569
642
|
logger.info(f"Processing summary output for variable: {var}")
|
570
|
-
|
643
|
+
d2_flow_areas = hdf_file.get("Geometry/2D Flow Areas/Attributes")
|
644
|
+
if d2_flow_areas is None:
|
645
|
+
return pd.DataFrame()
|
646
|
+
|
647
|
+
for d2_flow_area in d2_flow_areas[:]:
|
648
|
+
mesh_name = HdfUtils.convert_ras_string(d2_flow_area[0])
|
649
|
+
cell_count = d2_flow_area[-1]
|
571
650
|
logger.debug(f"Processing mesh: {mesh_name} with {cell_count} cells")
|
572
|
-
group = HdfResultsMesh.
|
651
|
+
group = HdfResultsMesh.get_mesh_summary_output_group(hdf_file, mesh_name, var)
|
573
652
|
|
574
653
|
data = group[:]
|
575
654
|
logger.debug(f"Data shape for {var} in {mesh_name}: {data.shape}")
|
@@ -585,7 +664,7 @@ class HdfResultsMesh:
|
|
585
664
|
"mesh_name": [mesh_name] * data.shape[1],
|
586
665
|
"cell_id" if "Face" not in var else "face_id": range(data.shape[1]),
|
587
666
|
f"{var.lower().replace(' ', '_')}": data[0, :],
|
588
|
-
f"{var.lower().replace(' ', '_')}_time": HdfUtils.
|
667
|
+
f"{var.lower().replace(' ', '_')}_time": HdfUtils.convert_timesteps_to_datetimes(
|
589
668
|
data[1, :], start_time, time_unit="days", round_to=round_to
|
590
669
|
)
|
591
670
|
})
|
@@ -604,13 +683,13 @@ class HdfResultsMesh:
|
|
604
683
|
|
605
684
|
# Add geometry based on variable type
|
606
685
|
if "Face" in var:
|
607
|
-
face_df = HdfMesh.
|
686
|
+
face_df = HdfMesh.get_mesh_cell_faces(hdf_file)
|
608
687
|
if not face_df.empty:
|
609
688
|
df = df.merge(face_df[['mesh_name', 'face_id', 'geometry']],
|
610
689
|
on=['mesh_name', 'face_id'],
|
611
690
|
how='left')
|
612
691
|
else:
|
613
|
-
cell_df = HdfMesh.
|
692
|
+
cell_df = HdfMesh.get_mesh_cell_points(hdf_file)
|
614
693
|
if not cell_df.empty:
|
615
694
|
df = df.merge(cell_df[['mesh_name', 'cell_id', 'geometry']],
|
616
695
|
on=['mesh_name', 'cell_id'],
|
@@ -652,7 +731,7 @@ class HdfResultsMesh:
|
|
652
731
|
|
653
732
|
|
654
733
|
@staticmethod
|
655
|
-
def
|
734
|
+
def get_mesh_summary_output_group(hdf_file: h5py.File, mesh_name: str, var: str) -> Union[h5py.Group, h5py.Dataset]:
|
656
735
|
"""
|
657
736
|
Return the HDF group for a given mesh and summary output variable.
|
658
737
|
|
@@ -673,63 +752,3 @@ class HdfResultsMesh:
|
|
673
752
|
raise ValueError(f"Could not find HDF group or dataset at path '{output_path}'")
|
674
753
|
return output_item
|
675
754
|
|
676
|
-
@staticmethod
|
677
|
-
def plot_mesh_variable(variable_df: pd.DataFrame, variable_name: str, colormap: str = 'viridis', point_size: int = 10) -> None:
|
678
|
-
"""
|
679
|
-
Plot any mesh variable with consistent styling.
|
680
|
-
|
681
|
-
Args:
|
682
|
-
variable_df (pd.DataFrame): DataFrame containing the variable data
|
683
|
-
variable_name (str): Name of the variable (for labels)
|
684
|
-
colormap (str): Matplotlib colormap to use. Default: 'viridis'
|
685
|
-
point_size (int): Size of the scatter points. Default: 10
|
686
|
-
|
687
|
-
Returns:
|
688
|
-
None
|
689
|
-
|
690
|
-
Raises:
|
691
|
-
ImportError: If matplotlib is not installed
|
692
|
-
ValueError: If required columns are missing from variable_df
|
693
|
-
"""
|
694
|
-
try:
|
695
|
-
import matplotlib.pyplot as plt
|
696
|
-
except ImportError:
|
697
|
-
logger.error("matplotlib is required for plotting. Please install it with 'pip install matplotlib'")
|
698
|
-
raise ImportError("matplotlib is required for plotting")
|
699
|
-
|
700
|
-
# Get cell coordinates if not in variable_df
|
701
|
-
if 'geometry' not in variable_df.columns:
|
702
|
-
cell_coords = HdfMesh.mesh_cell_points(plan_hdf_path)
|
703
|
-
merged_df = pd.merge(variable_df, cell_coords, on=['mesh_name', 'cell_id'])
|
704
|
-
else:
|
705
|
-
merged_df = variable_df
|
706
|
-
|
707
|
-
# Extract coordinates, handling None values
|
708
|
-
merged_df = merged_df.dropna(subset=['geometry'])
|
709
|
-
merged_df['x'] = merged_df['geometry'].apply(lambda geom: geom.x if geom is not None else None)
|
710
|
-
merged_df['y'] = merged_df['geometry'].apply(lambda geom: geom.y if geom is not None else None)
|
711
|
-
|
712
|
-
# Drop any rows with None coordinates
|
713
|
-
merged_df = merged_df.dropna(subset=['x', 'y'])
|
714
|
-
|
715
|
-
if len(merged_df) == 0:
|
716
|
-
logger.error("No valid coordinates found for plotting")
|
717
|
-
raise ValueError("No valid coordinates found for plotting")
|
718
|
-
|
719
|
-
# Create plot
|
720
|
-
fig, ax = plt.subplots(figsize=(12, 8))
|
721
|
-
scatter = ax.scatter(merged_df['x'], merged_df['y'],
|
722
|
-
c=merged_df[variable_name],
|
723
|
-
cmap=colormap,
|
724
|
-
s=point_size)
|
725
|
-
|
726
|
-
# Customize plot
|
727
|
-
ax.set_title(f'{variable_name} per Cell')
|
728
|
-
ax.set_xlabel('X Coordinate')
|
729
|
-
ax.set_ylabel('Y Coordinate')
|
730
|
-
plt.colorbar(scatter, label=variable_name)
|
731
|
-
ax.grid(True, linestyle='--', alpha=0.7)
|
732
|
-
plt.rcParams.update({'font.size': 12})
|
733
|
-
plt.tight_layout()
|
734
|
-
plt.show()
|
735
|
-
|