ras-commander 0.49.0__tar.gz → 0.51.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ras_commander-0.49.0/ras_commander.egg-info → ras_commander-0.51.0}/PKG-INFO +5 -2
- {ras_commander-0.49.0 → ras_commander-0.51.0}/README.md +4 -1
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfBndry.py +8 -8
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfFluvialPluvial.py +132 -57
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfMesh.py +109 -82
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfPlot.py +1 -1
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfResultsMesh.py +25 -37
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/LoggingConfig.py +13 -3
- {ras_commander-0.49.0 → ras_commander-0.51.0/ras_commander.egg-info}/PKG-INFO +5 -2
- {ras_commander-0.49.0 → ras_commander-0.51.0}/setup.py +11 -2
- {ras_commander-0.49.0 → ras_commander-0.51.0}/LICENSE +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/pyproject.toml +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/Decorators.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfBase.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfInfiltration.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfPipe.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfPlan.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfPump.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfResultsPlan.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfResultsPlot.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfResultsXsec.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfStruc.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfUtils.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/HdfXsec.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/RasCmdr.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/RasExamples.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/RasGeo.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/RasGpt.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/RasMapper.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/RasPlan.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/RasPrj.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/RasToGo.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/RasUnsteady.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/RasUtils.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander/__init__.py +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander.egg-info/SOURCES.txt +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander.egg-info/dependency_links.txt +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/ras_commander.egg-info/top_level.txt +0 -0
- {ras_commander-0.49.0 → ras_commander-0.51.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ras-commander
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.51.0
|
4
4
|
Summary: A Python library for automating HEC-RAS operations
|
5
5
|
Home-page: https://github.com/billk-FM/ras-commander
|
6
6
|
Author: William M. Katzenmeyer
|
@@ -61,10 +61,13 @@ Create a virtual environment with conda or venv (ask ChatGPT if you need help)
|
|
61
61
|
|
62
62
|
In your virtual environment, install ras-commander using pip:
|
63
63
|
```
|
64
|
-
pip install h5py numpy pandas requests tqdm scipy xarray geopandas matplotlib ras-commander ipython psutil shapely fiona pathlib rtree
|
64
|
+
pip install h5py numpy pandas requests tqdm scipy xarray geopandas matplotlib ras-commander ipython tqdm psutil shapely fiona pathlib rtree rasterstats
|
65
65
|
pip install --upgrade ras-commander
|
66
66
|
```
|
67
67
|
|
68
|
+
**Tested with Python 3.11**
|
69
|
+
|
70
|
+
|
68
71
|
If you have dependency issues with pip (especially if you have errors with numpy), try clearing your local pip packages 'C:\Users\your_username\AppData\Roaming\Python\' and then creating a new virtual environment.
|
69
72
|
|
70
73
|
|
@@ -50,10 +50,13 @@ Create a virtual environment with conda or venv (ask ChatGPT if you need help)
|
|
50
50
|
|
51
51
|
In your virtual environment, install ras-commander using pip:
|
52
52
|
```
|
53
|
-
pip install h5py numpy pandas requests tqdm scipy xarray geopandas matplotlib ras-commander ipython psutil shapely fiona pathlib rtree
|
53
|
+
pip install h5py numpy pandas requests tqdm scipy xarray geopandas matplotlib ras-commander ipython tqdm psutil shapely fiona pathlib rtree rasterstats
|
54
54
|
pip install --upgrade ras-commander
|
55
55
|
```
|
56
56
|
|
57
|
+
**Tested with Python 3.11**
|
58
|
+
|
59
|
+
|
57
60
|
If you have dependency issues with pip (especially if you have errors with numpy), try clearing your local pip packages 'C:\Users\your_username\AppData\Roaming\Python\' and then creating a new virtual environment.
|
58
61
|
|
59
62
|
|
@@ -79,7 +79,7 @@ class HdfBndry:
|
|
79
79
|
|
80
80
|
# Get geometries
|
81
81
|
bc_line_data = hdf_file[bc_lines_path]
|
82
|
-
geoms =
|
82
|
+
geoms = HdfBase.get_polylines_from_parts(hdf_path, bc_lines_path)
|
83
83
|
|
84
84
|
# Get attributes
|
85
85
|
attributes = pd.DataFrame(bc_line_data["Attributes"][()])
|
@@ -94,7 +94,7 @@ class HdfBndry:
|
|
94
94
|
gdf = gpd.GeoDataFrame(
|
95
95
|
attributes,
|
96
96
|
geometry=geoms,
|
97
|
-
crs=
|
97
|
+
crs=HdfBase.get_projection(hdf_file)
|
98
98
|
)
|
99
99
|
|
100
100
|
# Add ID column if not present
|
@@ -134,11 +134,11 @@ class HdfBndry:
|
|
134
134
|
names = np.vectorize(HdfUtils.convert_ras_string)(
|
135
135
|
bl_line_data["Attributes"][()]["Name"]
|
136
136
|
)
|
137
|
-
geoms =
|
137
|
+
geoms = HdfBase.get_polylines_from_parts(hdf_path, breaklines_path)
|
138
138
|
return gpd.GeoDataFrame(
|
139
139
|
{"bl_id": bl_line_ids, "Name": names, "geometry": geoms},
|
140
140
|
geometry="geometry",
|
141
|
-
crs=
|
141
|
+
crs=HdfBase.get_projection(hdf_file),
|
142
142
|
)
|
143
143
|
except Exception as e:
|
144
144
|
logger.error(f"Error reading breaklines: {str(e)}")
|
@@ -186,7 +186,7 @@ class HdfBndry:
|
|
186
186
|
return gpd.GeoDataFrame(
|
187
187
|
{"rr_id": rr_ids, "Name": names, "geometry": geoms},
|
188
188
|
geometry="geometry",
|
189
|
-
crs=
|
189
|
+
crs=HdfBase.get_projection(hdf_file),
|
190
190
|
)
|
191
191
|
except Exception as e:
|
192
192
|
logger.error(f"Error reading refinement regions: {str(e)}")
|
@@ -229,7 +229,7 @@ class HdfBndry:
|
|
229
229
|
except ValueError:
|
230
230
|
types = np.array([""] * attributes.shape[0])
|
231
231
|
|
232
|
-
geoms =
|
232
|
+
geoms = HdfBase.get_polylines_from_parts(hdf_path, reference_lines_path)
|
233
233
|
|
234
234
|
gdf = gpd.GeoDataFrame(
|
235
235
|
{
|
@@ -240,7 +240,7 @@ class HdfBndry:
|
|
240
240
|
"geometry": geoms,
|
241
241
|
},
|
242
242
|
geometry="geometry",
|
243
|
-
crs=
|
243
|
+
crs=HdfBase.get_projection(hdf_file),
|
244
244
|
)
|
245
245
|
|
246
246
|
# Filter by mesh_name if provided
|
@@ -296,7 +296,7 @@ class HdfBndry:
|
|
296
296
|
"geometry": list(map(Point, points)),
|
297
297
|
},
|
298
298
|
geometry="geometry",
|
299
|
-
crs=
|
299
|
+
crs=HdfBase.get_projection(hdf_file),
|
300
300
|
)
|
301
301
|
|
302
302
|
# Filter by mesh_name if provided
|
@@ -14,7 +14,7 @@ from typing import Dict, List, Tuple
|
|
14
14
|
import pandas as pd
|
15
15
|
import geopandas as gpd
|
16
16
|
from collections import defaultdict
|
17
|
-
from shapely.geometry import LineString
|
17
|
+
from shapely.geometry import LineString, MultiLineString # Added MultiLineString import
|
18
18
|
from tqdm import tqdm
|
19
19
|
from .HdfMesh import HdfMesh
|
20
20
|
from .HdfUtils import HdfUtils
|
@@ -53,7 +53,9 @@ class HdfFluvialPluvial:
|
|
53
53
|
... delta_t=12
|
54
54
|
... )
|
55
55
|
"""
|
56
|
-
|
56
|
+
def __init__(self):
|
57
|
+
self.logger = get_logger(__name__) # Initialize logger with module name
|
58
|
+
|
57
59
|
@staticmethod
|
58
60
|
@standardize_input(file_type='plan_hdf')
|
59
61
|
def calculate_fluvial_pluvial_boundary(hdf_path: Path, delta_t: float = 12) -> gpd.GeoDataFrame:
|
@@ -93,106 +95,179 @@ class HdfFluvialPluvial:
|
|
93
95
|
raise ValueError("No maximum water surface data found in HDF file")
|
94
96
|
|
95
97
|
# Convert timestamps using the renamed utility function
|
98
|
+
logger.info("Converting maximum water surface timestamps...")
|
96
99
|
if 'maximum_water_surface_time' in max_ws_df.columns:
|
97
100
|
max_ws_df['maximum_water_surface_time'] = max_ws_df['maximum_water_surface_time'].apply(
|
98
101
|
lambda x: HdfUtils.parse_ras_datetime(x) if isinstance(x, str) else x
|
99
102
|
)
|
100
103
|
|
101
104
|
# Process cell adjacencies
|
105
|
+
logger.info("Processing cell adjacencies...")
|
102
106
|
cell_adjacency, common_edges = HdfFluvialPluvial._process_cell_adjacencies(cell_polygons_gdf)
|
103
107
|
|
104
108
|
# Get cell times from max_ws_df
|
109
|
+
logger.info("Extracting cell times from maximum water surface data...")
|
105
110
|
cell_times = max_ws_df.set_index('cell_id')['maximum_water_surface_time'].to_dict()
|
106
111
|
|
107
112
|
# Identify boundary edges
|
113
|
+
logger.info("Identifying boundary edges...")
|
108
114
|
boundary_edges = HdfFluvialPluvial._identify_boundary_edges(
|
109
115
|
cell_adjacency, common_edges, cell_times, delta_t
|
110
116
|
)
|
111
117
|
|
112
118
|
# Join adjacent LineStrings into simple LineStrings
|
119
|
+
logger.info("Joining adjacent LineStrings into simple LineStrings...")
|
113
120
|
joined_lines = []
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
if
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
121
|
+
|
122
|
+
def get_coords(geom):
|
123
|
+
"""Helper function to get coordinates from either LineString or MultiLineString"""
|
124
|
+
if isinstance(geom, LineString):
|
125
|
+
return list(geom.coords)
|
126
|
+
elif isinstance(geom, MultiLineString):
|
127
|
+
return list(geom.geoms[0].coords)
|
128
|
+
return None
|
129
|
+
|
130
|
+
# Create a dictionary to store start and end points for each line
|
131
|
+
line_endpoints = {}
|
132
|
+
for i, edge in enumerate(boundary_edges):
|
133
|
+
coords = get_coords(edge)
|
134
|
+
if coords:
|
135
|
+
line_endpoints[i] = (coords[0], coords[-1])
|
136
|
+
|
137
|
+
# Process lines in order
|
138
|
+
used_indices = set()
|
139
|
+
while len(used_indices) < len(boundary_edges):
|
140
|
+
current_line = []
|
141
|
+
current_points = []
|
142
|
+
|
143
|
+
# Find a new starting line if needed
|
144
|
+
for i in range(len(boundary_edges)):
|
145
|
+
if i not in used_indices:
|
146
|
+
current_line.append(boundary_edges[i])
|
147
|
+
coords = get_coords(boundary_edges[i])
|
148
|
+
if coords:
|
149
|
+
current_points.extend(coords)
|
150
|
+
used_indices.add(i)
|
151
|
+
break
|
152
|
+
|
153
|
+
# Continue adding connected lines
|
154
|
+
while True:
|
155
|
+
found_next = False
|
156
|
+
current_end = current_points[-1] if current_points else None
|
157
|
+
|
158
|
+
# Look for the next connected line
|
159
|
+
for i, (start, end) in line_endpoints.items():
|
160
|
+
if i not in used_indices and current_end:
|
161
|
+
if start == current_end:
|
162
|
+
# Add line in forward direction
|
163
|
+
coords = get_coords(boundary_edges[i])
|
164
|
+
if coords:
|
165
|
+
current_points.extend(coords[1:]) # Skip first point to avoid duplication
|
166
|
+
current_line.append(boundary_edges[i])
|
167
|
+
used_indices.add(i)
|
168
|
+
found_next = True
|
169
|
+
break
|
170
|
+
elif end == current_end:
|
171
|
+
# Add line in reverse direction
|
172
|
+
coords = get_coords(boundary_edges[i])
|
173
|
+
if coords:
|
174
|
+
current_points.extend(reversed(coords[:-1])) # Skip last point to avoid duplication
|
175
|
+
current_line.append(boundary_edges[i])
|
176
|
+
used_indices.add(i)
|
177
|
+
found_next = True
|
178
|
+
break
|
179
|
+
|
180
|
+
if not found_next:
|
181
|
+
break
|
182
|
+
|
183
|
+
# Create a single LineString from the collected points
|
184
|
+
if current_points:
|
185
|
+
joined_lines.append(LineString(current_points))
|
128
186
|
|
129
187
|
# Create final GeoDataFrame with CRS from cell_polygons_gdf
|
188
|
+
logger.info("Creating final GeoDataFrame for boundaries...")
|
130
189
|
boundary_gdf = gpd.GeoDataFrame(
|
131
190
|
geometry=joined_lines,
|
132
191
|
crs=cell_polygons_gdf.crs
|
133
192
|
)
|
134
193
|
|
135
194
|
# Clean up intermediate dataframes
|
195
|
+
logger.info("Cleaning up intermediate dataframes...")
|
136
196
|
del cell_polygons_gdf
|
137
197
|
del max_ws_df
|
138
198
|
|
199
|
+
logger.info("Fluvial-pluvial boundary calculation completed successfully.")
|
139
200
|
return boundary_gdf
|
140
201
|
|
141
202
|
except Exception as e:
|
142
|
-
logger.error(f"Error calculating fluvial-pluvial boundary: {str(e)}")
|
143
|
-
|
144
|
-
|
203
|
+
self.logger.error(f"Error calculating fluvial-pluvial boundary: {str(e)}")
|
204
|
+
return None
|
205
|
+
|
206
|
+
|
145
207
|
@staticmethod
|
146
208
|
def _process_cell_adjacencies(cell_polygons_gdf: gpd.GeoDataFrame) -> Tuple[Dict[int, List[int]], Dict[int, Dict[int, LineString]]]:
|
147
209
|
"""
|
148
|
-
|
149
|
-
|
210
|
+
Optimized method to process cell adjacencies by extracting shared edges directly.
|
211
|
+
|
150
212
|
Args:
|
151
213
|
cell_polygons_gdf (gpd.GeoDataFrame): GeoDataFrame containing 2D mesh cell polygons
|
152
|
-
|
214
|
+
with 'cell_id' and 'geometry' columns.
|
153
215
|
|
154
216
|
Returns:
|
155
217
|
Tuple containing:
|
156
|
-
- Dict[int, List[int]]: Dictionary mapping cell IDs to lists of adjacent cell IDs
|
218
|
+
- Dict[int, List[int]]: Dictionary mapping cell IDs to lists of adjacent cell IDs.
|
157
219
|
- Dict[int, Dict[int, LineString]]: Nested dictionary storing common edges between cells,
|
158
|
-
|
159
|
-
|
160
|
-
Note:
|
161
|
-
Uses R-tree spatial indexing to efficiently identify potential neighboring cells
|
162
|
-
before performing more detailed geometric operations.
|
220
|
+
where common_edges[cell1][cell2] gives the shared boundary.
|
163
221
|
"""
|
164
|
-
from rtree import index
|
165
222
|
cell_adjacency = defaultdict(list)
|
166
223
|
common_edges = defaultdict(dict)
|
167
|
-
idx = index.Index()
|
168
|
-
|
169
|
-
for i, geom in enumerate(cell_polygons_gdf.geometry):
|
170
|
-
idx.insert(i, geom.bounds)
|
171
|
-
|
172
|
-
with tqdm(total=len(cell_polygons_gdf), desc="Processing cell adjacencies") as pbar:
|
173
|
-
for idx1, row1 in cell_polygons_gdf.iterrows():
|
174
|
-
cell_id1 = row1['cell_id']
|
175
|
-
poly1 = row1['geometry']
|
176
|
-
potential_neighbors = list(idx.intersection(poly1.bounds))
|
177
|
-
|
178
|
-
for idx2 in potential_neighbors:
|
179
|
-
if idx1 >= idx2:
|
180
|
-
continue
|
181
|
-
|
182
|
-
row2 = cell_polygons_gdf.iloc[idx2]
|
183
|
-
cell_id2 = row2['cell_id']
|
184
|
-
poly2 = row2['geometry']
|
185
|
-
|
186
|
-
if poly1.touches(poly2):
|
187
|
-
intersection = poly1.intersection(poly2)
|
188
|
-
if isinstance(intersection, LineString):
|
189
|
-
cell_adjacency[cell_id1].append(cell_id2)
|
190
|
-
cell_adjacency[cell_id2].append(cell_id1)
|
191
|
-
common_edges[cell_id1][cell_id2] = intersection
|
192
|
-
common_edges[cell_id2][cell_id1] = intersection
|
193
|
-
|
194
|
-
pbar.update(1)
|
195
224
|
|
225
|
+
# Build an edge to cells mapping
|
226
|
+
edge_to_cells = defaultdict(set)
|
227
|
+
|
228
|
+
# Function to generate edge keys
|
229
|
+
def edge_key(coords1, coords2, precision=8):
|
230
|
+
# Round coordinates
|
231
|
+
coords1 = tuple(round(coord, precision) for coord in coords1)
|
232
|
+
coords2 = tuple(round(coord, precision) for coord in coords2)
|
233
|
+
# Create sorted key to handle edge direction
|
234
|
+
return tuple(sorted([coords1, coords2]))
|
235
|
+
|
236
|
+
# For each polygon, extract edges
|
237
|
+
for idx, row in cell_polygons_gdf.iterrows():
|
238
|
+
cell_id = row['cell_id']
|
239
|
+
geom = row['geometry']
|
240
|
+
if geom.is_empty or not geom.is_valid:
|
241
|
+
continue
|
242
|
+
# Get exterior coordinates
|
243
|
+
coords = list(geom.exterior.coords)
|
244
|
+
num_coords = len(coords)
|
245
|
+
for i in range(num_coords - 1):
|
246
|
+
coord1 = coords[i]
|
247
|
+
coord2 = coords[i + 1]
|
248
|
+
key = edge_key(coord1, coord2)
|
249
|
+
edge_to_cells[key].add(cell_id)
|
250
|
+
|
251
|
+
# Now, process edge_to_cells to build adjacency
|
252
|
+
for edge, cells in edge_to_cells.items():
|
253
|
+
cells = list(cells)
|
254
|
+
if len(cells) >= 2:
|
255
|
+
# For all pairs of cells sharing this edge
|
256
|
+
for i in range(len(cells)):
|
257
|
+
for j in range(i + 1, len(cells)):
|
258
|
+
cell1 = cells[i]
|
259
|
+
cell2 = cells[j]
|
260
|
+
# Update adjacency
|
261
|
+
if cell2 not in cell_adjacency[cell1]:
|
262
|
+
cell_adjacency[cell1].append(cell2)
|
263
|
+
if cell1 not in cell_adjacency[cell2]:
|
264
|
+
cell_adjacency[cell2].append(cell1)
|
265
|
+
# Store common edge
|
266
|
+
common_edge = LineString([edge[0], edge[1]])
|
267
|
+
common_edges[cell1][cell2] = common_edge
|
268
|
+
common_edges[cell2][cell1] = common_edge
|
269
|
+
|
270
|
+
logger.info("Cell adjacencies processed successfully.")
|
196
271
|
return cell_adjacency, common_edges
|
197
272
|
|
198
273
|
@staticmethod
|
@@ -9,15 +9,25 @@ All methods are designed to work with the mesh geometry data stored in
|
|
9
9
|
HEC-RAS HDF files, providing functionality to retrieve and process various aspects
|
10
10
|
of the 2D flow areas and their associated mesh structures.
|
11
11
|
|
12
|
+
|
12
13
|
List of Functions:
|
13
14
|
-----------------
|
14
15
|
get_mesh_area_names()
|
15
16
|
Returns list of 2D mesh area names
|
16
17
|
get_mesh_areas()
|
17
18
|
Returns 2D flow area perimeter polygons
|
18
|
-
|
19
|
+
get_mesh_cell_polygons()
|
19
20
|
Returns 2D flow mesh cell polygons
|
20
|
-
|
21
|
+
get_mesh_cell_points()
|
22
|
+
Returns 2D flow mesh cell center points
|
23
|
+
get_mesh_cell_faces()
|
24
|
+
Returns 2D flow mesh cell faces
|
25
|
+
get_mesh_area_attributes()
|
26
|
+
Returns geometry 2D flow area attributes
|
27
|
+
get_mesh_face_property_tables()
|
28
|
+
Returns Face Property Tables for each Face in all 2D Flow Areas
|
29
|
+
get_mesh_cell_property_tables()
|
30
|
+
Returns Cell Property Tables for each Cell in all 2D Flow Areas
|
21
31
|
|
22
32
|
Each function is decorated with @standardize_input and @log_call for consistent
|
23
33
|
input handling and logging functionality.
|
@@ -148,49 +158,45 @@ class HdfMesh:
|
|
148
158
|
if not mesh_area_names:
|
149
159
|
return GeoDataFrame()
|
150
160
|
|
161
|
+
# Get face geometries once
|
151
162
|
face_gdf = HdfMesh.get_mesh_cell_faces(hdf_path)
|
163
|
+
|
164
|
+
# Pre-allocate lists for better memory efficiency
|
165
|
+
all_mesh_names = []
|
166
|
+
all_cell_ids = []
|
167
|
+
all_geometries = []
|
168
|
+
|
169
|
+
for mesh_name in mesh_area_names:
|
170
|
+
# Get cell face info in one read
|
171
|
+
cell_face_info = hdf_file[f"Geometry/2D Flow Areas/{mesh_name}/Cells Face and Orientation Info"][()]
|
172
|
+
cell_face_values = hdf_file[f"Geometry/2D Flow Areas/{mesh_name}/Cells Face and Orientation Values"][()][:, 0]
|
173
|
+
|
174
|
+
# Create face lookup dictionary for this mesh
|
175
|
+
mesh_faces_dict = dict(face_gdf[face_gdf.mesh_name == mesh_name][["face_id", "geometry"]].values)
|
176
|
+
|
177
|
+
# Process each cell
|
178
|
+
for cell_id, (start, length) in enumerate(cell_face_info[:, :2]):
|
179
|
+
face_ids = cell_face_values[start:start + length]
|
180
|
+
face_geoms = [mesh_faces_dict[face_id] for face_id in face_ids]
|
181
|
+
|
182
|
+
# Create polygon
|
183
|
+
polygons = list(polygonize(face_geoms))
|
184
|
+
if polygons:
|
185
|
+
all_mesh_names.append(mesh_name)
|
186
|
+
all_cell_ids.append(cell_id)
|
187
|
+
all_geometries.append(Polygon(polygons[0]))
|
188
|
+
|
189
|
+
# Create GeoDataFrame in one go
|
190
|
+
return GeoDataFrame(
|
191
|
+
{
|
192
|
+
"mesh_name": all_mesh_names,
|
193
|
+
"cell_id": all_cell_ids,
|
194
|
+
"geometry": all_geometries
|
195
|
+
},
|
196
|
+
geometry="geometry",
|
197
|
+
crs=HdfBase.get_projection(hdf_file)
|
198
|
+
)
|
152
199
|
|
153
|
-
cell_dict = {"mesh_name": [], "cell_id": [], "geometry": []}
|
154
|
-
for i, mesh_name in enumerate(mesh_area_names):
|
155
|
-
cell_cnt = hdf_file["Geometry/2D Flow Areas/Cell Info"][()][i][1]
|
156
|
-
cell_ids = list(range(cell_cnt))
|
157
|
-
cell_face_info = hdf_file[
|
158
|
-
"Geometry/2D Flow Areas/{}/Cells Face and Orientation Info".format(mesh_name)
|
159
|
-
][()]
|
160
|
-
cell_face_values = hdf_file[
|
161
|
-
"Geometry/2D Flow Areas/{}/Cells Face and Orientation Values".format(mesh_name)
|
162
|
-
][()][:, 0]
|
163
|
-
face_id_lists = list(
|
164
|
-
np.vectorize(
|
165
|
-
lambda cell_id: str(
|
166
|
-
cell_face_values[
|
167
|
-
cell_face_info[cell_id][0] : cell_face_info[cell_id][0]
|
168
|
-
+ cell_face_info[cell_id][1]
|
169
|
-
]
|
170
|
-
)
|
171
|
-
)(cell_ids)
|
172
|
-
)
|
173
|
-
mesh_faces = (
|
174
|
-
face_gdf[face_gdf.mesh_name == mesh_name][["face_id", "geometry"]]
|
175
|
-
.set_index("face_id")
|
176
|
-
.to_numpy()
|
177
|
-
)
|
178
|
-
cell_dict["mesh_name"] += [mesh_name] * cell_cnt
|
179
|
-
cell_dict["cell_id"] += cell_ids
|
180
|
-
cell_dict["geometry"] += list(
|
181
|
-
np.vectorize(
|
182
|
-
lambda face_id_list: (
|
183
|
-
lambda geom_col: Polygon(list(polygonize(geom_col))[0])
|
184
|
-
)(
|
185
|
-
np.ravel(
|
186
|
-
mesh_faces[
|
187
|
-
np.array(face_id_list.strip("[]").split()).astype(int)
|
188
|
-
]
|
189
|
-
)
|
190
|
-
)
|
191
|
-
)(face_id_lists)
|
192
|
-
)
|
193
|
-
return GeoDataFrame(cell_dict, geometry="geometry", crs=HdfBase.get_projection(hdf_file))
|
194
200
|
except Exception as e:
|
195
201
|
logger.error(f"Error reading mesh cell polygons from {hdf_path}: {str(e)}")
|
196
202
|
return GeoDataFrame()
|
@@ -217,19 +223,32 @@ class HdfMesh:
|
|
217
223
|
if not mesh_area_names:
|
218
224
|
return GeoDataFrame()
|
219
225
|
|
220
|
-
|
226
|
+
# Pre-allocate lists
|
227
|
+
all_mesh_names = []
|
228
|
+
all_cell_ids = []
|
229
|
+
all_points = []
|
230
|
+
|
221
231
|
for mesh_name in mesh_area_names:
|
222
|
-
|
223
|
-
|
232
|
+
# Get all cell centers in one read
|
233
|
+
cell_centers = hdf_file[f"Geometry/2D Flow Areas/{mesh_name}/Cells Center Coordinate"][()]
|
234
|
+
cell_count = len(cell_centers)
|
224
235
|
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
236
|
+
# Extend lists efficiently
|
237
|
+
all_mesh_names.extend([mesh_name] * cell_count)
|
238
|
+
all_cell_ids.extend(range(cell_count))
|
239
|
+
all_points.extend(Point(coords) for coords in cell_centers)
|
240
|
+
|
241
|
+
# Create GeoDataFrame in one go
|
242
|
+
return GeoDataFrame(
|
243
|
+
{
|
244
|
+
"mesh_name": all_mesh_names,
|
245
|
+
"cell_id": all_cell_ids,
|
246
|
+
"geometry": all_points
|
247
|
+
},
|
248
|
+
geometry="geometry",
|
249
|
+
crs=HdfBase.get_projection(hdf_file)
|
250
|
+
)
|
251
|
+
|
233
252
|
except Exception as e:
|
234
253
|
logger.error(f"Error reading mesh cell points from {hdf_path}: {str(e)}")
|
235
254
|
return GeoDataFrame()
|
@@ -255,37 +274,45 @@ class HdfMesh:
|
|
255
274
|
mesh_area_names = HdfMesh.get_mesh_area_names(hdf_path)
|
256
275
|
if not mesh_area_names:
|
257
276
|
return GeoDataFrame()
|
258
|
-
|
277
|
+
|
278
|
+
# Pre-allocate lists
|
279
|
+
all_mesh_names = []
|
280
|
+
all_face_ids = []
|
281
|
+
all_geometries = []
|
282
|
+
|
259
283
|
for mesh_name in mesh_area_names:
|
260
|
-
|
261
|
-
|
262
|
-
][()]
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
"Geometry/2D Flow Areas/{}/Faces Perimeter Values".format(mesh_name)
|
271
|
-
][()]
|
272
|
-
face_id = -1
|
273
|
-
for pnt_a_index, pnt_b_index in facepoints_index:
|
274
|
-
face_id += 1
|
275
|
-
face_dict["mesh_name"].append(mesh_name)
|
276
|
-
face_dict["face_id"].append(face_id)
|
277
|
-
coordinates = list()
|
278
|
-
coordinates.append(facepoints_coordinates[pnt_a_index])
|
279
|
-
starting_row, count = faces_perimeter_info[face_id]
|
284
|
+
# Read all data at once
|
285
|
+
facepoints_index = hdf_file[f"Geometry/2D Flow Areas/{mesh_name}/Faces FacePoint Indexes"][()]
|
286
|
+
facepoints_coords = hdf_file[f"Geometry/2D Flow Areas/{mesh_name}/FacePoints Coordinate"][()]
|
287
|
+
faces_perim_info = hdf_file[f"Geometry/2D Flow Areas/{mesh_name}/Faces Perimeter Info"][()]
|
288
|
+
faces_perim_values = hdf_file[f"Geometry/2D Flow Areas/{mesh_name}/Faces Perimeter Values"][()]
|
289
|
+
|
290
|
+
# Process each face
|
291
|
+
for face_id, ((pnt_a_idx, pnt_b_idx), (start_row, count)) in enumerate(zip(facepoints_index, faces_perim_info)):
|
292
|
+
coords = [facepoints_coords[pnt_a_idx]]
|
293
|
+
|
280
294
|
if count > 0:
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
295
|
+
coords.extend(faces_perim_values[start_row:start_row + count])
|
296
|
+
|
297
|
+
coords.append(facepoints_coords[pnt_b_idx])
|
298
|
+
|
299
|
+
all_mesh_names.append(mesh_name)
|
300
|
+
all_face_ids.append(face_id)
|
301
|
+
all_geometries.append(LineString(coords))
|
302
|
+
|
303
|
+
# Create GeoDataFrame in one go
|
304
|
+
return GeoDataFrame(
|
305
|
+
{
|
306
|
+
"mesh_name": all_mesh_names,
|
307
|
+
"face_id": all_face_ids,
|
308
|
+
"geometry": all_geometries
|
309
|
+
},
|
310
|
+
geometry="geometry",
|
311
|
+
crs=HdfBase.get_projection(hdf_file)
|
312
|
+
)
|
313
|
+
|
287
314
|
except Exception as e:
|
288
|
-
|
315
|
+
logger.error(f"Error reading mesh cell faces from {hdf_path}: {str(e)}")
|
289
316
|
return GeoDataFrame()
|
290
317
|
|
291
318
|
@staticmethod
|
@@ -22,7 +22,7 @@ class HdfPlot:
|
|
22
22
|
@staticmethod
|
23
23
|
@log_call
|
24
24
|
def plot_mesh_cells(
|
25
|
-
cell_polygons_df: pd.DataFrame,
|
25
|
+
cell_polygons_df: pd.DataFrame, ## THIS IS A GEODATAFRAME - NEED TO EDIT BOTH ARGUMENT AND USAGE
|
26
26
|
projection: str,
|
27
27
|
title: str = '2D Flow Area Mesh Cells',
|
28
28
|
figsize: Tuple[int, int] = (12, 8)
|
@@ -74,6 +74,7 @@ from .HdfBase import HdfBase
|
|
74
74
|
from .HdfUtils import HdfUtils
|
75
75
|
from .Decorators import log_call, standardize_input
|
76
76
|
from .LoggingConfig import setup_logging, get_logger
|
77
|
+
import geopandas as gpd
|
77
78
|
|
78
79
|
logger = get_logger(__name__)
|
79
80
|
|
@@ -234,7 +235,7 @@ class HdfResultsMesh:
|
|
234
235
|
@staticmethod
|
235
236
|
@log_call
|
236
237
|
@standardize_input(file_type='plan_hdf')
|
237
|
-
def get_mesh_max_ws(hdf_path: Path, round_to: str = "100ms") ->
|
238
|
+
def get_mesh_max_ws(hdf_path: Path, round_to: str = "100ms") -> gpd.GeoDataFrame:
|
238
239
|
"""
|
239
240
|
Get maximum water surface elevation for each mesh cell.
|
240
241
|
|
@@ -243,10 +244,7 @@ class HdfResultsMesh:
|
|
243
244
|
round_to (str): Time rounding specification (default "100ms").
|
244
245
|
|
245
246
|
Returns:
|
246
|
-
|
247
|
-
|
248
|
-
Raises:
|
249
|
-
ValueError: If there's an error processing the maximum water surface data.
|
247
|
+
gpd.GeoDataFrame: GeoDataFrame containing maximum water surface elevations with geometry.
|
250
248
|
"""
|
251
249
|
try:
|
252
250
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
@@ -262,7 +260,7 @@ class HdfResultsMesh:
|
|
262
260
|
@staticmethod
|
263
261
|
@log_call
|
264
262
|
@standardize_input(file_type='plan_hdf')
|
265
|
-
def get_mesh_min_ws(hdf_path: Path, round_to: str = "100ms") ->
|
263
|
+
def get_mesh_min_ws(hdf_path: Path, round_to: str = "100ms") -> gpd.GeoDataFrame:
|
266
264
|
"""
|
267
265
|
Get minimum water surface elevation for each mesh cell.
|
268
266
|
|
@@ -271,7 +269,7 @@ class HdfResultsMesh:
|
|
271
269
|
round_to (str): Time rounding specification (default "100ms").
|
272
270
|
|
273
271
|
Returns:
|
274
|
-
|
272
|
+
gpd.GeoDataFrame: GeoDataFrame containing minimum water surface elevations with geometry.
|
275
273
|
"""
|
276
274
|
try:
|
277
275
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
@@ -353,7 +351,7 @@ class HdfResultsMesh:
|
|
353
351
|
@staticmethod
|
354
352
|
@log_call
|
355
353
|
@standardize_input(file_type='plan_hdf')
|
356
|
-
def get_mesh_max_iter(hdf_path: Path, round_to: str = "100ms") ->
|
354
|
+
def get_mesh_max_iter(hdf_path: Path, round_to: str = "100ms") -> gpd.GeoDataFrame:
|
357
355
|
"""
|
358
356
|
Get maximum iteration count for each mesh cell.
|
359
357
|
|
@@ -362,36 +360,19 @@ class HdfResultsMesh:
|
|
362
360
|
round_to (str): Time rounding specification (default "100ms").
|
363
361
|
|
364
362
|
Returns:
|
365
|
-
|
366
|
-
|
367
|
-
Raises:
|
368
|
-
ValueError: If there's an error processing the maximum iteration data.
|
369
|
-
"""
|
370
|
-
"""
|
371
|
-
Get maximum iteration count for each mesh cell.
|
372
|
-
|
373
|
-
Args:
|
374
|
-
hdf_path (Path): Path to the HDF file
|
375
|
-
round_to (str): Time rounding specification (default "100ms").
|
376
|
-
|
377
|
-
Returns:
|
378
|
-
pd.DataFrame: DataFrame containing maximum iteration counts with columns:
|
363
|
+
gpd.GeoDataFrame: GeoDataFrame containing maximum iteration counts with geometry.
|
364
|
+
Includes columns:
|
379
365
|
- mesh_name: Name of the mesh
|
380
366
|
- cell_id: ID of the cell
|
381
367
|
- cell_last_iteration: Maximum number of iterations
|
382
368
|
- cell_last_iteration_time: Time when max iterations occurred
|
383
369
|
- geometry: Point geometry representing cell center
|
384
|
-
|
385
|
-
Raises:
|
386
|
-
ValueError: If there's an error processing the maximum iteration data.
|
387
|
-
|
388
|
-
Note: The Maximum Iteration is labeled as "Cell Last Iteration" in the HDF file
|
389
370
|
"""
|
390
371
|
try:
|
391
372
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
392
373
|
return HdfResultsMesh.get_mesh_summary_output(hdf_file, "Cell Last Iteration", round_to)
|
393
374
|
except Exception as e:
|
394
|
-
logger.error(f"Error in
|
375
|
+
logger.error(f"Error in get_mesh_max_iter: {str(e)}")
|
395
376
|
raise ValueError(f"Failed to get maximum iteration count: {str(e)}")
|
396
377
|
|
397
378
|
|
@@ -612,7 +593,7 @@ class HdfResultsMesh:
|
|
612
593
|
@staticmethod
|
613
594
|
@log_call
|
614
595
|
@standardize_input(file_type='plan_hdf')
|
615
|
-
def get_mesh_summary_output(hdf_file: h5py.File, var: str, round_to: str = "100ms") ->
|
596
|
+
def get_mesh_summary_output(hdf_file: h5py.File, var: str, round_to: str = "100ms") -> gpd.GeoDataFrame:
|
616
597
|
"""
|
617
598
|
Get the summary output data for a given variable from the HDF file.
|
618
599
|
|
@@ -627,8 +608,8 @@ class HdfResultsMesh:
|
|
627
608
|
|
628
609
|
Returns
|
629
610
|
-------
|
630
|
-
|
631
|
-
A
|
611
|
+
gpd.GeoDataFrame
|
612
|
+
A GeoDataFrame containing the summary output data with attributes as metadata.
|
632
613
|
|
633
614
|
Raises
|
634
615
|
------
|
@@ -642,7 +623,7 @@ class HdfResultsMesh:
|
|
642
623
|
logger.info(f"Processing summary output for variable: {var}")
|
643
624
|
d2_flow_areas = hdf_file.get("Geometry/2D Flow Areas/Attributes")
|
644
625
|
if d2_flow_areas is None:
|
645
|
-
return
|
626
|
+
return gpd.GeoDataFrame()
|
646
627
|
|
647
628
|
for d2_flow_area in d2_flow_areas[:]:
|
648
629
|
mesh_name = HdfUtils.convert_ras_string(d2_flow_area[0])
|
@@ -707,10 +688,18 @@ class HdfResultsMesh:
|
|
707
688
|
dfs.append(df)
|
708
689
|
|
709
690
|
if not dfs:
|
710
|
-
return
|
691
|
+
return gpd.GeoDataFrame()
|
711
692
|
|
712
693
|
result = pd.concat(dfs, ignore_index=True)
|
713
694
|
|
695
|
+
# Convert to GeoDataFrame
|
696
|
+
gdf = gpd.GeoDataFrame(result, geometry='geometry')
|
697
|
+
|
698
|
+
# Get CRS from HdfUtils
|
699
|
+
crs = HdfBase.get_projection(hdf_file)
|
700
|
+
if crs:
|
701
|
+
gdf.set_crs(crs, inplace=True)
|
702
|
+
|
714
703
|
# Combine attributes from all meshes
|
715
704
|
combined_attrs = {}
|
716
705
|
for df in dfs:
|
@@ -720,15 +709,14 @@ class HdfResultsMesh:
|
|
720
709
|
elif combined_attrs[key] != value:
|
721
710
|
combined_attrs[key] = f"Multiple values: {combined_attrs[key]}, {value}"
|
722
711
|
|
723
|
-
|
712
|
+
gdf.attrs.update(combined_attrs)
|
724
713
|
|
725
|
-
logger.info(f"Processed {len(
|
726
|
-
return
|
714
|
+
logger.info(f"Processed {len(gdf)} rows of summary output data")
|
715
|
+
return gdf
|
727
716
|
|
728
717
|
except Exception as e:
|
729
718
|
logger.error(f"Error processing summary output data: {e}")
|
730
719
|
raise ValueError(f"Error processing summary output data: {e}")
|
731
|
-
|
732
720
|
|
733
721
|
@staticmethod
|
734
722
|
def get_mesh_summary_output_group(hdf_file: h5py.File, mesh_name: str, var: str) -> Union[h5py.Group, h5py.Dataset]:
|
@@ -49,9 +49,19 @@ def setup_logging(log_file=None, log_level=logging.INFO):
|
|
49
49
|
|
50
50
|
_logging_setup_done = True
|
51
51
|
|
52
|
-
def get_logger(name):
|
53
|
-
"""Get a logger
|
54
|
-
|
52
|
+
def get_logger(name: str) -> logging.Logger:
|
53
|
+
"""Get a logger instance with the specified name.
|
54
|
+
|
55
|
+
Args:
|
56
|
+
name: The name for the logger, typically __name__ or module path
|
57
|
+
|
58
|
+
Returns:
|
59
|
+
logging.Logger: Configured logger instance
|
60
|
+
"""
|
61
|
+
logger = logging.getLogger(name)
|
62
|
+
if not logger.handlers: # Only add handler if none exists
|
63
|
+
setup_logging() # Ensure logging is configured
|
64
|
+
return logger
|
55
65
|
|
56
66
|
def log_call(logger=None):
|
57
67
|
"""Decorator to log function calls."""
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ras-commander
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.51.0
|
4
4
|
Summary: A Python library for automating HEC-RAS operations
|
5
5
|
Home-page: https://github.com/billk-FM/ras-commander
|
6
6
|
Author: William M. Katzenmeyer
|
@@ -61,10 +61,13 @@ Create a virtual environment with conda or venv (ask ChatGPT if you need help)
|
|
61
61
|
|
62
62
|
In your virtual environment, install ras-commander using pip:
|
63
63
|
```
|
64
|
-
pip install h5py numpy pandas requests tqdm scipy xarray geopandas matplotlib ras-commander ipython psutil shapely fiona pathlib rtree
|
64
|
+
pip install h5py numpy pandas requests tqdm scipy xarray geopandas matplotlib ras-commander ipython tqdm psutil shapely fiona pathlib rtree rasterstats
|
65
65
|
pip install --upgrade ras-commander
|
66
66
|
```
|
67
67
|
|
68
|
+
**Tested with Python 3.11**
|
69
|
+
|
70
|
+
|
68
71
|
If you have dependency issues with pip (especially if you have errors with numpy), try clearing your local pip packages 'C:\Users\your_username\AppData\Roaming\Python\' and then creating a new virtual environment.
|
69
72
|
|
70
73
|
|
@@ -5,6 +5,15 @@ from pathlib import Path
|
|
5
5
|
|
6
6
|
class CustomBuildPy(build_py):
|
7
7
|
def run(self):
|
8
|
+
# Clean up __pycache__ folders
|
9
|
+
root_dir = Path(__file__).parent
|
10
|
+
for pycache_dir in root_dir.rglob('__pycache__'):
|
11
|
+
if pycache_dir.is_dir():
|
12
|
+
for cache_file in pycache_dir.iterdir():
|
13
|
+
cache_file.unlink() # Delete each file
|
14
|
+
pycache_dir.rmdir() # Delete the empty directory
|
15
|
+
print(f"Cleaned up: {pycache_dir}")
|
16
|
+
|
8
17
|
# Run the summary_knowledge_bases.py script
|
9
18
|
script_path = Path(__file__).parent / 'ai_tools' / 'summary_knowledge_bases.py'
|
10
19
|
subprocess.run(['python', str(script_path)], check=True)
|
@@ -14,7 +23,7 @@ class CustomBuildPy(build_py):
|
|
14
23
|
|
15
24
|
setup(
|
16
25
|
name="ras-commander",
|
17
|
-
version="0.
|
26
|
+
version="0.51.0",
|
18
27
|
packages=["ras_commander"],
|
19
28
|
include_package_data=True,
|
20
29
|
python_requires='>=3.10',
|
@@ -42,7 +51,7 @@ To build and publish this package, follow these steps:
|
|
42
51
|
2. Update the version number in ras_commander/__init__.py (if not using automatic versioning)
|
43
52
|
|
44
53
|
3. Create source distribution and wheel:
|
45
|
-
python
|
54
|
+
python setup.py sdist bdist_wheel
|
46
55
|
|
47
56
|
4. Check the distribution:
|
48
57
|
twine check dist/*
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|