ras-commander 0.48.0__tar.gz → 0.50.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ras_commander-0.48.0/ras_commander.egg-info → ras_commander-0.50.0}/PKG-INFO +77 -9
- {ras_commander-0.48.0 → ras_commander-0.50.0}/README.md +76 -8
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/Decorators.py +18 -1
- ras_commander-0.50.0/ras_commander/HdfBase.py +307 -0
- ras_commander-0.50.0/ras_commander/HdfBndry.py +312 -0
- ras_commander-0.50.0/ras_commander/HdfFluvialPluvial.py +309 -0
- ras_commander-0.50.0/ras_commander/HdfInfiltration.py +410 -0
- ras_commander-0.50.0/ras_commander/HdfMesh.py +461 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/HdfPipe.py +127 -175
- ras_commander-0.50.0/ras_commander/HdfPlan.py +291 -0
- ras_commander-0.50.0/ras_commander/HdfPlot.py +104 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/HdfPump.py +76 -28
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/HdfResultsMesh.py +190 -183
- ras_commander-0.50.0/ras_commander/HdfResultsPlan.py +257 -0
- ras_commander-0.50.0/ras_commander/HdfResultsPlot.py +182 -0
- ras_commander-0.50.0/ras_commander/HdfResultsXsec.py +312 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/HdfStruc.py +65 -35
- ras_commander-0.50.0/ras_commander/HdfUtils.py +435 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/HdfXsec.py +137 -127
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/LoggingConfig.py +13 -3
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/RasCmdr.py +13 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/RasExamples.py +14 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/RasGeo.py +11 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/RasGpt.py +8 -0
- ras_commander-0.50.0/ras_commander/RasMapper.py +105 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/RasPlan.py +30 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/RasPrj.py +34 -0
- ras_commander-0.50.0/ras_commander/RasToGo.py +37 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/RasUnsteady.py +15 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/RasUtils.py +31 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander/__init__.py +10 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0/ras_commander.egg-info}/PKG-INFO +77 -9
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander.egg-info/SOURCES.txt +4 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/setup.py +10 -1
- ras_commander-0.48.0/ras_commander/HdfBase.py +0 -197
- ras_commander-0.48.0/ras_commander/HdfBndry.py +0 -505
- ras_commander-0.48.0/ras_commander/HdfFluvialPluvial.py +0 -326
- ras_commander-0.48.0/ras_commander/HdfMesh.py +0 -353
- ras_commander-0.48.0/ras_commander/HdfPlan.py +0 -205
- ras_commander-0.48.0/ras_commander/HdfResultsPlan.py +0 -401
- ras_commander-0.48.0/ras_commander/HdfResultsXsec.py +0 -272
- ras_commander-0.48.0/ras_commander/HdfUtils.py +0 -518
- ras_commander-0.48.0/ras_commander/RasToGo.py +0 -21
- {ras_commander-0.48.0 → ras_commander-0.50.0}/LICENSE +0 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/pyproject.toml +0 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander.egg-info/dependency_links.txt +0 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/ras_commander.egg-info/top_level.txt +0 -0
- {ras_commander-0.48.0 → ras_commander-0.50.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ras-commander
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.50.0
|
4
4
|
Summary: A Python library for automating HEC-RAS operations
|
5
5
|
Home-page: https://github.com/billk-FM/ras-commander
|
6
6
|
Author: William M. Katzenmeyer
|
@@ -61,10 +61,13 @@ Create a virtual environment with conda or venv (ask ChatGPT if you need help)
|
|
61
61
|
|
62
62
|
In your virtual environment, install ras-commander using pip:
|
63
63
|
```
|
64
|
-
pip install h5py numpy pandas requests tqdm scipy xarray geopandas matplotlib ras-commander ipython psutil shapely fiona pathlib rtree
|
64
|
+
pip install h5py numpy pandas requests tqdm scipy xarray geopandas matplotlib ras-commander ipython psutil shapely fiona pathlib rtree rasterstats
|
65
65
|
pip install --upgrade ras-commander
|
66
66
|
```
|
67
67
|
|
68
|
+
**Tested with Python 3.11**
|
69
|
+
|
70
|
+
|
68
71
|
If you have dependency issues with pip (especially if you have errors with numpy), try clearing your local pip packages 'C:\Users\your_username\AppData\Roaming\Python\' and then creating a new virtual environment.
|
69
72
|
|
70
73
|
|
@@ -118,26 +121,56 @@ Certainly! I'll provide you with an updated Key Components section and Project O
|
|
118
121
|
- `RasUnsteady`: Manages unsteady flow file operations
|
119
122
|
- `RasUtils`: Contains utility functions for file operations and data management
|
120
123
|
- `RasExamples`: Manages and loads HEC-RAS example projects
|
121
|
-
|
124
|
+
|
125
|
+
### New Components:
|
126
|
+
- `HdfBase`: Core functionality for HDF file operations
|
127
|
+
- `HdfBndry`: Enhanced boundary condition handling
|
128
|
+
- `HdfMesh`: Comprehensive mesh data management
|
129
|
+
- `HdfPlan`: Plan data extraction and analysis
|
130
|
+
- `HdfResultsMesh`: Advanced mesh results processing
|
131
|
+
- `HdfResultsPlan`: Plan results analysis
|
132
|
+
- `HdfResultsXsec`: Cross-section results processing
|
133
|
+
- `HdfStruc`: Structure data management
|
134
|
+
- `HdfPipe`: Pipe network analysis tools
|
135
|
+
- `HdfPump`: Pump station analysis capabilities
|
136
|
+
- `HdfFluvialPluvial`: Fluvial-pluvial boundary analysis
|
137
|
+
- `RasMapper`: RASMapper interface
|
138
|
+
- `RasToGo`: Go-Consequences integration
|
139
|
+
- `HdfPlot` & `HdfResultsPlot`: Specialized plotting utilities
|
140
|
+
|
141
|
+
## Project Organization Diagram
|
122
142
|
|
123
143
|
## Project Organization Diagram
|
124
144
|
|
125
145
|
```
|
126
146
|
ras_commander
|
127
|
-
├── .github
|
128
|
-
│ └── workflows
|
129
|
-
│ └── python-package.yml
|
130
147
|
├── ras_commander
|
131
148
|
│ ├── __init__.py
|
132
149
|
│ ├── _version.py
|
150
|
+
│ ├── Decorators.py
|
151
|
+
│ ├── LoggingConfig.py
|
133
152
|
│ ├── RasCmdr.py
|
134
153
|
│ ├── RasExamples.py
|
135
154
|
│ ├── RasGeo.py
|
136
|
-
│ ├── RasHdf.py
|
137
155
|
│ ├── RasPlan.py
|
138
156
|
│ ├── RasPrj.py
|
139
157
|
│ ├── RasUnsteady.py
|
140
|
-
│
|
158
|
+
│ ├── RasUtils.py
|
159
|
+
│ ├── RasToGo.py
|
160
|
+
│ ├── RasGpt.py
|
161
|
+
│ ├── HdfBase.py
|
162
|
+
│ ├── HdfBndry.py
|
163
|
+
│ ├── HdfMesh.py
|
164
|
+
│ ├── HdfPlan.py
|
165
|
+
│ ├── HdfResultsMesh.py
|
166
|
+
│ ├── HdfResultsPlan.py
|
167
|
+
│ ├── HdfResultsXsec.py
|
168
|
+
│ ├── HdfStruc.py
|
169
|
+
│ ├── HdfPipe.py
|
170
|
+
│ ├── HdfPump.py
|
171
|
+
│ ├── HdfFluvialPluvial.py
|
172
|
+
│ ├── HdfPlot.py
|
173
|
+
│ └── HdfResultsPlot.py
|
141
174
|
├── examples
|
142
175
|
│ ├── 01_project_initialization.py
|
143
176
|
│ ├── 02_plan_operations.py
|
@@ -236,10 +269,45 @@ plan_number = "01"
|
|
236
269
|
runtime_data = RasHdf.get_runtime_data(plan_number, ras_object=custom_ras)
|
237
270
|
print(runtime_data)
|
238
271
|
```
|
239
|
-
|
240
272
|
This class simplifies the process of extracting and analyzing data from HEC-RAS HDF output files, supporting tasks such as post-processing and result visualization.
|
241
273
|
|
242
274
|
|
275
|
+
### Infrastructure Analysis
|
276
|
+
```python
|
277
|
+
from ras_commander import HdfPipe, HdfPump
|
278
|
+
|
279
|
+
# Analyze pipe network
|
280
|
+
pipe_network = HdfPipe.get_pipe_network(hdf_path)
|
281
|
+
conduits = HdfPipe.get_pipe_conduits(hdf_path)
|
282
|
+
|
283
|
+
# Analyze pump stations
|
284
|
+
pump_stations = HdfPump.get_pump_stations(hdf_path)
|
285
|
+
pump_performance = HdfPump.get_pump_station_summary(hdf_path)
|
286
|
+
```
|
287
|
+
|
288
|
+
### Advanced Results Analysis
|
289
|
+
```python
|
290
|
+
from ras_commander import HdfResultsMesh
|
291
|
+
|
292
|
+
# Get maximum water surface and velocity
|
293
|
+
max_ws = HdfResultsMesh.get_mesh_max_ws(hdf_path)
|
294
|
+
max_vel = HdfResultsMesh.get_mesh_max_face_v(hdf_path)
|
295
|
+
|
296
|
+
# Visualize results
|
297
|
+
from ras_commander import HdfResultsPlot
|
298
|
+
HdfResultsPlot.plot_results_max_wsel(max_ws)
|
299
|
+
```
|
300
|
+
|
301
|
+
### Fluvial-Pluvial Analysis
|
302
|
+
```python
|
303
|
+
from ras_commander import HdfFluvialPluvial
|
304
|
+
|
305
|
+
boundary = HdfFluvialPluvial.calculate_fluvial_pluvial_boundary(
|
306
|
+
hdf_path,
|
307
|
+
delta_t=12 # Time threshold in hours
|
308
|
+
)
|
309
|
+
|
310
|
+
|
243
311
|
## Documentation
|
244
312
|
|
245
313
|
For detailed usage instructions and API documentation, please refer to the [Comprehensive Library Guide](Comprehensive_Library_Guide.md).
|
@@ -50,10 +50,13 @@ Create a virtual environment with conda or venv (ask ChatGPT if you need help)
|
|
50
50
|
|
51
51
|
In your virtual environment, install ras-commander using pip:
|
52
52
|
```
|
53
|
-
pip install h5py numpy pandas requests tqdm scipy xarray geopandas matplotlib ras-commander ipython psutil shapely fiona pathlib rtree
|
53
|
+
pip install h5py numpy pandas requests tqdm scipy xarray geopandas matplotlib ras-commander ipython psutil shapely fiona pathlib rtree rasterstats
|
54
54
|
pip install --upgrade ras-commander
|
55
55
|
```
|
56
56
|
|
57
|
+
**Tested with Python 3.11**
|
58
|
+
|
59
|
+
|
57
60
|
If you have dependency issues with pip (especially if you have errors with numpy), try clearing your local pip packages 'C:\Users\your_username\AppData\Roaming\Python\' and then creating a new virtual environment.
|
58
61
|
|
59
62
|
|
@@ -107,26 +110,56 @@ Certainly! I'll provide you with an updated Key Components section and Project O
|
|
107
110
|
- `RasUnsteady`: Manages unsteady flow file operations
|
108
111
|
- `RasUtils`: Contains utility functions for file operations and data management
|
109
112
|
- `RasExamples`: Manages and loads HEC-RAS example projects
|
110
|
-
|
113
|
+
|
114
|
+
### New Components:
|
115
|
+
- `HdfBase`: Core functionality for HDF file operations
|
116
|
+
- `HdfBndry`: Enhanced boundary condition handling
|
117
|
+
- `HdfMesh`: Comprehensive mesh data management
|
118
|
+
- `HdfPlan`: Plan data extraction and analysis
|
119
|
+
- `HdfResultsMesh`: Advanced mesh results processing
|
120
|
+
- `HdfResultsPlan`: Plan results analysis
|
121
|
+
- `HdfResultsXsec`: Cross-section results processing
|
122
|
+
- `HdfStruc`: Structure data management
|
123
|
+
- `HdfPipe`: Pipe network analysis tools
|
124
|
+
- `HdfPump`: Pump station analysis capabilities
|
125
|
+
- `HdfFluvialPluvial`: Fluvial-pluvial boundary analysis
|
126
|
+
- `RasMapper`: RASMapper interface
|
127
|
+
- `RasToGo`: Go-Consequences integration
|
128
|
+
- `HdfPlot` & `HdfResultsPlot`: Specialized plotting utilities
|
129
|
+
|
130
|
+
## Project Organization Diagram
|
111
131
|
|
112
132
|
## Project Organization Diagram
|
113
133
|
|
114
134
|
```
|
115
135
|
ras_commander
|
116
|
-
├── .github
|
117
|
-
│ └── workflows
|
118
|
-
│ └── python-package.yml
|
119
136
|
├── ras_commander
|
120
137
|
│ ├── __init__.py
|
121
138
|
│ ├── _version.py
|
139
|
+
│ ├── Decorators.py
|
140
|
+
│ ├── LoggingConfig.py
|
122
141
|
│ ├── RasCmdr.py
|
123
142
|
│ ├── RasExamples.py
|
124
143
|
│ ├── RasGeo.py
|
125
|
-
│ ├── RasHdf.py
|
126
144
|
│ ├── RasPlan.py
|
127
145
|
│ ├── RasPrj.py
|
128
146
|
│ ├── RasUnsteady.py
|
129
|
-
│
|
147
|
+
│ ├── RasUtils.py
|
148
|
+
│ ├── RasToGo.py
|
149
|
+
│ ├── RasGpt.py
|
150
|
+
│ ├── HdfBase.py
|
151
|
+
│ ├── HdfBndry.py
|
152
|
+
│ ├── HdfMesh.py
|
153
|
+
│ ├── HdfPlan.py
|
154
|
+
│ ├── HdfResultsMesh.py
|
155
|
+
│ ├── HdfResultsPlan.py
|
156
|
+
│ ├── HdfResultsXsec.py
|
157
|
+
│ ├── HdfStruc.py
|
158
|
+
│ ├── HdfPipe.py
|
159
|
+
│ ├── HdfPump.py
|
160
|
+
│ ├── HdfFluvialPluvial.py
|
161
|
+
│ ├── HdfPlot.py
|
162
|
+
│ └── HdfResultsPlot.py
|
130
163
|
├── examples
|
131
164
|
│ ├── 01_project_initialization.py
|
132
165
|
│ ├── 02_plan_operations.py
|
@@ -225,10 +258,45 @@ plan_number = "01"
|
|
225
258
|
runtime_data = RasHdf.get_runtime_data(plan_number, ras_object=custom_ras)
|
226
259
|
print(runtime_data)
|
227
260
|
```
|
228
|
-
|
229
261
|
This class simplifies the process of extracting and analyzing data from HEC-RAS HDF output files, supporting tasks such as post-processing and result visualization.
|
230
262
|
|
231
263
|
|
264
|
+
### Infrastructure Analysis
|
265
|
+
```python
|
266
|
+
from ras_commander import HdfPipe, HdfPump
|
267
|
+
|
268
|
+
# Analyze pipe network
|
269
|
+
pipe_network = HdfPipe.get_pipe_network(hdf_path)
|
270
|
+
conduits = HdfPipe.get_pipe_conduits(hdf_path)
|
271
|
+
|
272
|
+
# Analyze pump stations
|
273
|
+
pump_stations = HdfPump.get_pump_stations(hdf_path)
|
274
|
+
pump_performance = HdfPump.get_pump_station_summary(hdf_path)
|
275
|
+
```
|
276
|
+
|
277
|
+
### Advanced Results Analysis
|
278
|
+
```python
|
279
|
+
from ras_commander import HdfResultsMesh
|
280
|
+
|
281
|
+
# Get maximum water surface and velocity
|
282
|
+
max_ws = HdfResultsMesh.get_mesh_max_ws(hdf_path)
|
283
|
+
max_vel = HdfResultsMesh.get_mesh_max_face_v(hdf_path)
|
284
|
+
|
285
|
+
# Visualize results
|
286
|
+
from ras_commander import HdfResultsPlot
|
287
|
+
HdfResultsPlot.plot_results_max_wsel(max_ws)
|
288
|
+
```
|
289
|
+
|
290
|
+
### Fluvial-Pluvial Analysis
|
291
|
+
```python
|
292
|
+
from ras_commander import HdfFluvialPluvial
|
293
|
+
|
294
|
+
boundary = HdfFluvialPluvial.calculate_fluvial_pluvial_boundary(
|
295
|
+
hdf_path,
|
296
|
+
delta_t=12 # Time threshold in hours
|
297
|
+
)
|
298
|
+
|
299
|
+
|
232
300
|
## Documentation
|
233
301
|
|
234
302
|
For detailed usage instructions and API documentation, please refer to the [Comprehensive Library Guide](Comprehensive_Library_Guide.md).
|
@@ -16,7 +16,6 @@ def log_call(func):
|
|
16
16
|
return result
|
17
17
|
return wrapper
|
18
18
|
|
19
|
-
|
20
19
|
def standardize_input(file_type: str = 'plan_hdf'):
|
21
20
|
"""
|
22
21
|
Decorator to standardize input for HDF file operations.
|
@@ -41,6 +40,14 @@ def standardize_input(file_type: str = 'plan_hdf'):
|
|
41
40
|
def wrapper(*args, **kwargs):
|
42
41
|
logger = logging.getLogger(func.__module__)
|
43
42
|
|
43
|
+
# Check if the function expects an hdf_path parameter
|
44
|
+
sig = inspect.signature(func)
|
45
|
+
param_names = list(sig.parameters.keys())
|
46
|
+
|
47
|
+
# If first parameter is 'hdf_file', skip path processing
|
48
|
+
if param_names and param_names[0] == 'hdf_file':
|
49
|
+
return func(*args, **kwargs)
|
50
|
+
|
44
51
|
# Handle both static method calls and regular function calls
|
45
52
|
if args and isinstance(args[0], type):
|
46
53
|
# Static method call, remove the class argument
|
@@ -49,6 +56,16 @@ def standardize_input(file_type: str = 'plan_hdf'):
|
|
49
56
|
hdf_input = kwargs.pop('hdf_path', None) or kwargs.pop('hdf_input', None) or (args[0] if args else None)
|
50
57
|
ras_object = kwargs.pop('ras_object', None) or (args[1] if len(args) > 1 else None)
|
51
58
|
|
59
|
+
# If no hdf_input provided, return the function unmodified
|
60
|
+
if hdf_input is None:
|
61
|
+
return func(*args, **kwargs)
|
62
|
+
|
63
|
+
# NEW: If input is already a Path and exists, use it directly regardless of file_type
|
64
|
+
if isinstance(hdf_input, Path) and hdf_input.is_file():
|
65
|
+
logger.info(f"Using existing HDF file: {hdf_input}")
|
66
|
+
new_args = (hdf_input,) + args[1:]
|
67
|
+
return func(*new_args, **kwargs)
|
68
|
+
|
52
69
|
hdf_path = None
|
53
70
|
|
54
71
|
# If hdf_input is already an h5py.File object, use its filename
|
@@ -0,0 +1,307 @@
|
|
1
|
+
"""
|
2
|
+
HdfBase: Core HDF File Operations for HEC-RAS
|
3
|
+
|
4
|
+
This module provides fundamental methods for interacting with HEC-RAS HDF files.
|
5
|
+
It serves as a foundation for more specialized HDF classes.
|
6
|
+
|
7
|
+
Attribution:
|
8
|
+
Derived from the rashdf library (https://github.com/fema-ffrd/rashdf)
|
9
|
+
Copyright (c) 2024 fema-ffrd - MIT License
|
10
|
+
|
11
|
+
Features:
|
12
|
+
- Time parsing and conversion utilities
|
13
|
+
- HDF attribute and dataset access
|
14
|
+
- Geometric data extraction
|
15
|
+
- 2D flow area information retrieval
|
16
|
+
|
17
|
+
Classes:
|
18
|
+
HdfBase: Base class containing static methods for HDF operations
|
19
|
+
|
20
|
+
Key Methods:
|
21
|
+
Time Operations:
|
22
|
+
- get_simulation_start_time(): Get simulation start datetime
|
23
|
+
- get_unsteady_timestamps(): Get unsteady output timestamps
|
24
|
+
- parse_ras_datetime(): Parse RAS datetime strings
|
25
|
+
|
26
|
+
Data Access:
|
27
|
+
- get_2d_flow_area_names_and_counts(): Get 2D flow area info
|
28
|
+
- get_projection(): Get spatial projection
|
29
|
+
- get_attrs(): Access HDF attributes
|
30
|
+
- get_dataset_info(): Explore HDF structure
|
31
|
+
- get_polylines_from_parts(): Extract geometric polylines
|
32
|
+
|
33
|
+
Example:
|
34
|
+
```python
|
35
|
+
from ras_commander import HdfBase
|
36
|
+
|
37
|
+
with h5py.File('model.hdf', 'r') as hdf:
|
38
|
+
start_time = HdfBase.get_simulation_start_time(hdf)
|
39
|
+
timestamps = HdfBase.get_unsteady_timestamps(hdf)
|
40
|
+
```
|
41
|
+
"""
|
42
|
+
import re
|
43
|
+
from datetime import datetime, timedelta
|
44
|
+
import h5py
|
45
|
+
import numpy as np
|
46
|
+
import pandas as pd
|
47
|
+
import xarray as xr
|
48
|
+
from typing import List, Tuple, Union, Optional, Dict, Any
|
49
|
+
from pathlib import Path
|
50
|
+
import logging
|
51
|
+
from shapely.geometry import LineString, MultiLineString
|
52
|
+
|
53
|
+
from .HdfUtils import HdfUtils
|
54
|
+
from .Decorators import standardize_input, log_call
|
55
|
+
from .LoggingConfig import setup_logging, get_logger
|
56
|
+
|
57
|
+
logger = get_logger(__name__)
|
58
|
+
|
59
|
+
class HdfBase:
|
60
|
+
"""
|
61
|
+
Base class for HEC-RAS HDF file operations.
|
62
|
+
|
63
|
+
This class provides static methods for fundamental HDF file operations,
|
64
|
+
including time parsing, attribute access, and geometric data extraction.
|
65
|
+
All methods are designed to work with h5py.File objects or pathlib.Path
|
66
|
+
inputs.
|
67
|
+
|
68
|
+
Note:
|
69
|
+
This class is not meant to be instantiated. All methods are static
|
70
|
+
and should be called directly from the class.
|
71
|
+
"""
|
72
|
+
|
73
|
+
@staticmethod
|
74
|
+
def get_simulation_start_time(hdf_file: h5py.File) -> datetime:
|
75
|
+
"""
|
76
|
+
Extract the simulation start time from the HDF file.
|
77
|
+
|
78
|
+
Args:
|
79
|
+
hdf_file: Open HDF file object containing RAS simulation data.
|
80
|
+
|
81
|
+
Returns:
|
82
|
+
datetime: Simulation start time as a datetime object.
|
83
|
+
|
84
|
+
Raises:
|
85
|
+
ValueError: If Plan Information is not found or start time cannot be parsed.
|
86
|
+
|
87
|
+
Note:
|
88
|
+
Expects 'Plan Data/Plan Information' group with 'Simulation Start Time' attribute.
|
89
|
+
"""
|
90
|
+
plan_info = hdf_file.get("Plan Data/Plan Information")
|
91
|
+
if plan_info is None:
|
92
|
+
raise ValueError("Plan Information not found in HDF file")
|
93
|
+
time_str = plan_info.attrs.get('Simulation Start Time')
|
94
|
+
return HdfUtils.parse_ras_datetime(time_str.decode('utf-8'))
|
95
|
+
|
96
|
+
@staticmethod
|
97
|
+
def get_unsteady_timestamps(hdf_file: h5py.File) -> List[datetime]:
|
98
|
+
"""
|
99
|
+
Extract the list of unsteady timestamps from the HDF file.
|
100
|
+
|
101
|
+
Args:
|
102
|
+
hdf_file (h5py.File): Open HDF file object.
|
103
|
+
|
104
|
+
Returns:
|
105
|
+
List[datetime]: A list of datetime objects representing the unsteady timestamps.
|
106
|
+
"""
|
107
|
+
group_path = "Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/Time Date Stamp (ms)"
|
108
|
+
raw_datetimes = hdf_file[group_path][:]
|
109
|
+
return [HdfUtils.parse_ras_datetime_ms(x.decode("utf-8")) for x in raw_datetimes]
|
110
|
+
|
111
|
+
@staticmethod
|
112
|
+
@standardize_input(file_type='plan_hdf')
|
113
|
+
def get_2d_flow_area_names_and_counts(hdf_path: Path) -> List[Tuple[str, int]]:
|
114
|
+
"""
|
115
|
+
Get the names and cell counts of 2D flow areas from the HDF file.
|
116
|
+
|
117
|
+
Args:
|
118
|
+
hdf_path (Path): Path to the HDF file.
|
119
|
+
|
120
|
+
Returns:
|
121
|
+
List[Tuple[str, int]]: A list of tuples containing the name and cell count of each 2D flow area.
|
122
|
+
|
123
|
+
Raises:
|
124
|
+
ValueError: If there's an error reading the HDF file or accessing the required data.
|
125
|
+
"""
|
126
|
+
try:
|
127
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
128
|
+
flow_area_2d_path = "Geometry/2D Flow Areas"
|
129
|
+
if flow_area_2d_path not in hdf_file:
|
130
|
+
return []
|
131
|
+
|
132
|
+
attributes = hdf_file[f"{flow_area_2d_path}/Attributes"][()]
|
133
|
+
names = [HdfUtils.convert_ras_string(name) for name in attributes["Name"]]
|
134
|
+
|
135
|
+
cell_info = hdf_file[f"{flow_area_2d_path}/Cell Info"][()]
|
136
|
+
cell_counts = [info[1] for info in cell_info]
|
137
|
+
|
138
|
+
return list(zip(names, cell_counts))
|
139
|
+
except Exception as e:
|
140
|
+
logger.error(f"Error reading 2D flow area names and counts from {hdf_path}: {str(e)}")
|
141
|
+
raise ValueError(f"Failed to get 2D flow area names and counts: {str(e)}")
|
142
|
+
|
143
|
+
@staticmethod
|
144
|
+
@standardize_input(file_type='plan_hdf')
|
145
|
+
def get_projection(hdf_path: Path) -> Optional[str]:
|
146
|
+
"""
|
147
|
+
Get the projection information from the HDF file.
|
148
|
+
|
149
|
+
Args:
|
150
|
+
hdf_path (Path): Path to the HDF file.
|
151
|
+
|
152
|
+
Returns:
|
153
|
+
Optional[str]: The projection information as a string, or None if not found.
|
154
|
+
"""
|
155
|
+
try:
|
156
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
157
|
+
proj_wkt = hdf_file.attrs.get("Projection")
|
158
|
+
if proj_wkt is None:
|
159
|
+
return None
|
160
|
+
if isinstance(proj_wkt, bytes) or isinstance(proj_wkt, np.bytes_):
|
161
|
+
proj_wkt = proj_wkt.decode("utf-8")
|
162
|
+
return proj_wkt
|
163
|
+
except Exception as e:
|
164
|
+
logger.error(f"Error reading projection from {hdf_path}: {str(e)}")
|
165
|
+
return None
|
166
|
+
|
167
|
+
@staticmethod
|
168
|
+
@standardize_input(file_type='plan_hdf')
|
169
|
+
def get_attrs(hdf_file: h5py.File, attr_path: str) -> Dict[str, Any]:
|
170
|
+
"""
|
171
|
+
Get attributes from an HDF file at a specified path.
|
172
|
+
|
173
|
+
Args:
|
174
|
+
hdf_file (h5py.File): The opened HDF file.
|
175
|
+
attr_path (str): Path to the attributes in the HDF file.
|
176
|
+
|
177
|
+
Returns:
|
178
|
+
Dict[str, Any]: Dictionary of attributes.
|
179
|
+
"""
|
180
|
+
try:
|
181
|
+
if attr_path not in hdf_file:
|
182
|
+
logger.warning(f"Path {attr_path} not found in HDF file")
|
183
|
+
return {}
|
184
|
+
|
185
|
+
return HdfUtils.hdf5_attrs_to_dict(hdf_file[attr_path].attrs)
|
186
|
+
except Exception as e:
|
187
|
+
logger.error(f"Error getting attributes from {attr_path}: {str(e)}")
|
188
|
+
return {}
|
189
|
+
|
190
|
+
@staticmethod
|
191
|
+
@standardize_input(file_type='plan_hdf')
|
192
|
+
def get_dataset_info(file_path: Path, group_path: str = '/') -> None:
|
193
|
+
"""
|
194
|
+
Recursively explore and print the structure of an HDF5 file.
|
195
|
+
|
196
|
+
Displays detailed information about groups, datasets, and their attributes
|
197
|
+
in a hierarchical format.
|
198
|
+
|
199
|
+
Args:
|
200
|
+
file_path: Path to the HDF5 file.
|
201
|
+
group_path: Starting group path to explore (default: root '/').
|
202
|
+
|
203
|
+
Prints:
|
204
|
+
- Group and dataset names with hierarchical indentation
|
205
|
+
- Dataset shapes and data types
|
206
|
+
- All attributes for groups and datasets
|
207
|
+
"""
|
208
|
+
def recurse(name, obj, indent=0):
|
209
|
+
spacer = " " * indent
|
210
|
+
if isinstance(obj, h5py.Group):
|
211
|
+
print(f"{spacer}Group: {name}")
|
212
|
+
HdfBase.print_attrs(name, obj)
|
213
|
+
for key in obj:
|
214
|
+
recurse(f"{name}/{key}", obj[key], indent+1)
|
215
|
+
elif isinstance(obj, h5py.Dataset):
|
216
|
+
print(f"{spacer}Dataset: {name}")
|
217
|
+
print(f"{spacer} Shape: {obj.shape}")
|
218
|
+
print(f"{spacer} Dtype: {obj.dtype}")
|
219
|
+
HdfBase.print_attrs(name, obj)
|
220
|
+
else:
|
221
|
+
print(f"{spacer}Unknown object: {name}")
|
222
|
+
|
223
|
+
try:
|
224
|
+
with h5py.File(file_path, 'r') as hdf_file:
|
225
|
+
if group_path in hdf_file:
|
226
|
+
print("")
|
227
|
+
print(f"Exploring group: {group_path}\n")
|
228
|
+
group = hdf_file[group_path]
|
229
|
+
for key in group:
|
230
|
+
print("")
|
231
|
+
recurse(f"{group_path}/{key}", group[key], indent=1)
|
232
|
+
else:
|
233
|
+
print(f"Group path '{group_path}' not found in the HDF5 file.")
|
234
|
+
except Exception as e:
|
235
|
+
print(f"Error exploring HDF5 file: {e}")
|
236
|
+
|
237
|
+
@staticmethod
|
238
|
+
@log_call
|
239
|
+
@standardize_input(file_type='plan_hdf')
|
240
|
+
def get_polylines_from_parts(hdf_path: Path, path: str, info_name: str = "Polyline Info",
|
241
|
+
parts_name: str = "Polyline Parts",
|
242
|
+
points_name: str = "Polyline Points") -> List[LineString]:
|
243
|
+
"""
|
244
|
+
Extract polylines from HDF file parts data.
|
245
|
+
|
246
|
+
Args:
|
247
|
+
hdf_path: Path to the HDF file.
|
248
|
+
path: Internal HDF path to polyline data.
|
249
|
+
info_name: Name of polyline info dataset.
|
250
|
+
parts_name: Name of polyline parts dataset.
|
251
|
+
points_name: Name of polyline points dataset.
|
252
|
+
|
253
|
+
Returns:
|
254
|
+
List of Shapely LineString/MultiLineString geometries.
|
255
|
+
|
256
|
+
Note:
|
257
|
+
Expects HDF datasets containing:
|
258
|
+
- Polyline information (start points and counts)
|
259
|
+
- Parts information for multi-part lines
|
260
|
+
- Point coordinates
|
261
|
+
"""
|
262
|
+
try:
|
263
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
264
|
+
polyline_info_path = f"{path}/{info_name}"
|
265
|
+
polyline_parts_path = f"{path}/{parts_name}"
|
266
|
+
polyline_points_path = f"{path}/{points_name}"
|
267
|
+
|
268
|
+
polyline_info = hdf_file[polyline_info_path][()]
|
269
|
+
polyline_parts = hdf_file[polyline_parts_path][()]
|
270
|
+
polyline_points = hdf_file[polyline_points_path][()]
|
271
|
+
|
272
|
+
geoms = []
|
273
|
+
for pnt_start, pnt_cnt, part_start, part_cnt in polyline_info:
|
274
|
+
points = polyline_points[pnt_start : pnt_start + pnt_cnt]
|
275
|
+
if part_cnt == 1:
|
276
|
+
geoms.append(LineString(points))
|
277
|
+
else:
|
278
|
+
parts = polyline_parts[part_start : part_start + part_cnt]
|
279
|
+
geoms.append(
|
280
|
+
MultiLineString(
|
281
|
+
list(
|
282
|
+
points[part_pnt_start : part_pnt_start + part_pnt_cnt]
|
283
|
+
for part_pnt_start, part_pnt_cnt in parts
|
284
|
+
)
|
285
|
+
)
|
286
|
+
)
|
287
|
+
return geoms
|
288
|
+
except Exception as e:
|
289
|
+
logger.error(f"Error getting polylines: {str(e)}")
|
290
|
+
return []
|
291
|
+
|
292
|
+
@staticmethod
|
293
|
+
def print_attrs(name: str, obj: Union[h5py.Dataset, h5py.Group]) -> None:
|
294
|
+
"""
|
295
|
+
Print the attributes of an HDF5 object (Dataset or Group).
|
296
|
+
|
297
|
+
Args:
|
298
|
+
name (str): Name of the object
|
299
|
+
obj (Union[h5py.Dataset, h5py.Group]): HDF5 object whose attributes are to be printed
|
300
|
+
"""
|
301
|
+
if len(obj.attrs) > 0:
|
302
|
+
print(f" Attributes for {name}:")
|
303
|
+
for key, value in obj.attrs.items():
|
304
|
+
print(f" {key}: {value}")
|
305
|
+
|
306
|
+
|
307
|
+
|