rashdf 0.7.2__tar.gz → 0.8.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: rashdf
3
- Version: 0.7.2
3
+ Version: 0.8.1
4
4
  Summary: Read data from HEC-RAS HDF files.
5
5
  Project-URL: repository, https://github.com/fema-ffrd/rashdf
6
6
  Classifier: Development Status :: 4 - Beta
@@ -16,7 +16,7 @@ License-File: LICENSE
16
16
  Requires-Dist: h5py
17
17
  Requires-Dist: geopandas<2.0,>=1.0
18
18
  Requires-Dist: pyarrow
19
- Requires-Dist: xarray
19
+ Requires-Dist: xarray<=2025.4.0
20
20
  Provides-Extra: dev
21
21
  Requires-Dist: pre-commit; extra == "dev"
22
22
  Requires-Dist: ruff; extra == "dev"
@@ -28,10 +28,12 @@ Requires-Dist: dask; extra == "dev"
28
28
  Requires-Dist: fsspec; extra == "dev"
29
29
  Requires-Dist: s3fs; extra == "dev"
30
30
  Requires-Dist: fiona==1.9.6; extra == "dev"
31
+ Requires-Dist: numcodecs<0.16; extra == "dev"
31
32
  Provides-Extra: docs
32
33
  Requires-Dist: sphinx; extra == "docs"
33
34
  Requires-Dist: numpydoc; extra == "docs"
34
35
  Requires-Dist: sphinx_rtd_theme; extra == "docs"
36
+ Dynamic: license-file
35
37
 
36
38
  # rashdf
37
39
  [![CI](https://github.com/fema-ffrd/rashdf/actions/workflows/continuous-integration.yml/badge.svg?branch=main)](https://github.com/fema-ffrd/rashdf/actions/workflows/continuous-integration.yml)
@@ -12,11 +12,23 @@ classifiers = [
12
12
  "Programming Language :: Python :: 3.11",
13
13
  "Programming Language :: Python :: 3.12",
14
14
  ]
15
- version = "0.7.2"
16
- dependencies = ["h5py", "geopandas>=1.0,<2.0", "pyarrow", "xarray"]
15
+ version = "0.8.1"
16
+ dependencies = ["h5py", "geopandas>=1.0,<2.0", "pyarrow", "xarray<=2025.4.0"]
17
17
 
18
18
  [project.optional-dependencies]
19
- dev = ["pre-commit", "ruff", "pytest", "pytest-cov", "kerchunk", "zarr==2.18.2", "dask", "fsspec", "s3fs", "fiona==1.9.6"]
19
+ dev = [
20
+ "pre-commit",
21
+ "ruff",
22
+ "pytest",
23
+ "pytest-cov",
24
+ "kerchunk",
25
+ "zarr==2.18.2",
26
+ "dask",
27
+ "fsspec",
28
+ "s3fs",
29
+ "fiona==1.9.6",
30
+ "numcodecs<0.16"
31
+ ]
20
32
  docs = ["sphinx", "numpydoc", "sphinx_rtd_theme"]
21
33
 
22
34
  [project.urls]
@@ -21,6 +21,7 @@ COMMANDS = [
21
21
  "mesh_cell_faces",
22
22
  "refinement_regions",
23
23
  "bc_lines",
24
+ "ic_points",
24
25
  "breaklines",
25
26
  "reference_lines",
26
27
  "reference_points",
@@ -17,6 +17,8 @@ from shapely import (
17
17
  )
18
18
 
19
19
  from typing import Dict, List, Optional, Union
20
+ from warnings import warn
21
+ from pathlib import Path
20
22
 
21
23
 
22
24
  from .base import RasHdf
@@ -29,7 +31,7 @@ from .utils import (
29
31
 
30
32
 
31
33
  class RasGeomHdfError(Exception):
32
- """HEC-RAS Plan HDF error class."""
34
+ """HEC-RAS Geometry HDF error class."""
33
35
 
34
36
  pass
35
37
 
@@ -41,7 +43,9 @@ class RasGeomHdf(RasHdf):
41
43
  GEOM_STRUCTURES_PATH = f"{GEOM_PATH}/Structures"
42
44
  FLOW_AREA_2D_PATH = f"{GEOM_PATH}/2D Flow Areas"
43
45
  BC_LINES_PATH = f"{GEOM_PATH}/Boundary Condition Lines"
46
+ IC_POINTS_PATH = f"{GEOM_PATH}/IC Points"
44
47
  BREAKLINES_PATH = f"{GEOM_PATH}/2D Flow Area Break Lines"
48
+ REFINEMENT_REGIONS_PATH = f"{GEOM_PATH}/2D Flow Area Refinement Regions"
45
49
  REFERENCE_LINES_PATH = f"{GEOM_PATH}/Reference Lines"
46
50
  REFERENCE_POINTS_PATH = f"{GEOM_PATH}/Reference Points"
47
51
  CROSS_SECTIONS = f"{GEOM_PATH}/Cross Sections"
@@ -294,19 +298,28 @@ class RasGeomHdf(RasHdf):
294
298
  polyline_points = self[polyline_points_path][()]
295
299
 
296
300
  geoms = []
297
- for pnt_start, pnt_cnt, part_start, part_cnt in polyline_info:
298
- points = polyline_points[pnt_start : pnt_start + pnt_cnt]
299
- if part_cnt == 1:
300
- geoms.append(LineString(points))
301
- else:
302
- parts = polyline_parts[part_start : part_start + part_cnt]
303
- geoms.append(
304
- MultiLineString(
305
- list(
306
- points[part_pnt_start : part_pnt_start + part_pnt_cnt]
307
- for part_pnt_start, part_pnt_cnt in parts
301
+ for i, (pnt_start, pnt_cnt, part_start, part_cnt) in enumerate(polyline_info):
302
+ try:
303
+ points = polyline_points[pnt_start : pnt_start + pnt_cnt]
304
+ if part_cnt == 1:
305
+ geoms.append(LineString(points))
306
+ else: # pragma: no cover | TODO: add test coverage for this
307
+ parts = polyline_parts[part_start : part_start + part_cnt]
308
+ geoms.append(
309
+ MultiLineString(
310
+ list(
311
+ points[part_pnt_start : part_pnt_start + part_pnt_cnt]
312
+ for part_pnt_start, part_pnt_cnt in parts
313
+ )
308
314
  )
309
315
  )
316
+ except (
317
+ Exception
318
+ ) as e: # pragma: no cover | TODO: add test coverage for this
319
+ geoms.append(None)
320
+ warn(
321
+ f"Feature ID {i} within '{Path(path).name}' layer set to null due to invalid geometry. {e}",
322
+ UserWarning,
310
323
  )
311
324
  return geoms
312
325
 
@@ -369,25 +382,38 @@ class RasGeomHdf(RasHdf):
369
382
  GeoDataFrame
370
383
  A GeoDataFrame containing the 2D mesh area refinement regions if they exist.
371
384
  """
372
- if "/Geometry/2D Flow Area Refinement Regions" not in self:
385
+ if self.REFINEMENT_REGIONS_PATH not in self:
373
386
  return GeoDataFrame()
374
- rr_data = self["/Geometry/2D Flow Area Refinement Regions"]
387
+ rr_data = self[self.REFINEMENT_REGIONS_PATH]
375
388
  rr_ids = range(rr_data["Attributes"][()].shape[0])
376
389
  names = np.vectorize(convert_ras_hdf_string)(rr_data["Attributes"][()]["Name"])
377
390
  geoms = list()
378
- for pnt_start, pnt_cnt, part_start, part_cnt in rr_data["Polygon Info"][()]:
379
- points = rr_data["Polygon Points"][()][pnt_start : pnt_start + pnt_cnt]
380
- if part_cnt == 1:
381
- geoms.append(Polygon(points))
382
- else:
383
- parts = rr_data["Polygon Parts"][()][part_start : part_start + part_cnt]
384
- geoms.append(
385
- MultiPolygon(
386
- list(
387
- points[part_pnt_start : part_pnt_start + part_pnt_cnt]
388
- for part_pnt_start, part_pnt_cnt in parts
391
+ for i, (pnt_start, pnt_cnt, part_start, part_cnt) in enumerate(
392
+ rr_data["Polygon Info"][()]
393
+ ):
394
+ try:
395
+ points = rr_data["Polygon Points"][()][pnt_start : pnt_start + pnt_cnt]
396
+ if part_cnt == 1:
397
+ geoms.append(Polygon(points))
398
+ else: # pragma: no cover | TODO: add test coverage for this
399
+ parts = rr_data["Polygon Parts"][()][
400
+ part_start : part_start + part_cnt
401
+ ]
402
+ geoms.append(
403
+ MultiPolygon(
404
+ list(
405
+ points[part_pnt_start : part_pnt_start + part_pnt_cnt]
406
+ for part_pnt_start, part_pnt_cnt in parts
407
+ )
389
408
  )
390
409
  )
410
+ except (
411
+ Exception
412
+ ) as e: # pragma: no cover | TODO: add test coverage for this
413
+ geoms.append(None)
414
+ warn(
415
+ f"Feature ID {i} within '{Path(self.REFINEMENT_REGIONS_PATH).name}' layer set to null due to invalid geometry. {e}",
416
+ UserWarning,
391
417
  )
392
418
  return GeoDataFrame(
393
419
  {"rr_id": rr_ids, "name": names, "geometry": geoms},
@@ -408,7 +434,10 @@ class RasGeomHdf(RasHdf):
408
434
  GeoDataFrame
409
435
  A GeoDataFrame containing the model structures if they exist.
410
436
  """
411
- if self.GEOM_STRUCTURES_PATH not in self:
437
+ if (
438
+ self.GEOM_STRUCTURES_PATH not in self
439
+ or f"{self.GEOM_STRUCTURES_PATH}/Attributes" not in self
440
+ ):
412
441
  return GeoDataFrame()
413
442
  struct_data = self[self.GEOM_STRUCTURES_PATH]
414
443
  v_conv_val = np.vectorize(convert_ras_hdf_value)
@@ -438,7 +467,32 @@ class RasGeomHdf(RasHdf):
438
467
  raise NotImplementedError
439
468
 
440
469
  def ic_points(self) -> GeoDataFrame: # noqa D102
441
- raise NotImplementedError
470
+ """Return initial conditions points.
471
+
472
+ Returns
473
+ -------
474
+ GeoDataFrame
475
+ A GeoDataFrame containing the initial condition points if they exist.
476
+ """
477
+ if self.IC_POINTS_PATH not in self:
478
+ return GeoDataFrame()
479
+ ic_data = self[self.IC_POINTS_PATH]
480
+ v_conv_str = np.vectorize(convert_ras_hdf_string)
481
+ names = v_conv_str(ic_data["Attributes"][()]["Name"])
482
+ mesh_names = v_conv_str(ic_data["Attributes"][()]["SA/2D"])
483
+ cell_ids = ic_data["Attributes"][()]["Cell Index"]
484
+ points = ic_data["Points"][()]
485
+ return GeoDataFrame(
486
+ {
487
+ "icpt_id": range(len(names)),
488
+ "icpt_name": names,
489
+ "mesh_name": mesh_names,
490
+ "cell_id": cell_ids,
491
+ "geometry": list(map(Point, points)),
492
+ },
493
+ geometry="geometry",
494
+ crs=self.projection(),
495
+ )
442
496
 
443
497
  def _reference_lines_points_names(
444
498
  self, reftype: str = "lines", mesh_name: Optional[str] = None
@@ -168,6 +168,7 @@ class RasPlanHdf(RasGeomHdf):
168
168
  UNSTEADY_TIME_SERIES_PATH = f"{BASE_OUTPUT_PATH}/Unsteady Time Series"
169
169
  REFERENCE_LINES_OUTPUT_PATH = f"{UNSTEADY_TIME_SERIES_PATH}/Reference Lines"
170
170
  REFERENCE_POINTS_OUTPUT_PATH = f"{UNSTEADY_TIME_SERIES_PATH}/Reference Points"
171
+ BOUNDARY_CONDITIONS_OUTPUT_PATH = f"{UNSTEADY_TIME_SERIES_PATH}/Boundary Conditions"
171
172
  OBS_FLOW_OUTPUT_PATH = f"{OBS_DATA_PATH}/Flow"
172
173
  OBS_STAGE_OUTPUT_PATH = f"{OBS_DATA_PATH}/Stage"
173
174
 
@@ -1121,6 +1122,81 @@ class RasPlanHdf(RasGeomHdf):
1121
1122
  """
1122
1123
  return self.reference_timeseries_output(reftype="lines")
1123
1124
 
1125
+ def bc_line_timeseries_output(self, bc_line_name: str) -> xr.Dataset:
1126
+ """Return timeseries output data for a specific boundary condition line from a HEC-RAS HDF plan file.
1127
+
1128
+ Parameters
1129
+ ----------
1130
+ bc_line_name : str
1131
+ The name of the boundary condition line.
1132
+
1133
+ Returns
1134
+ -------
1135
+ xr.Dataset
1136
+ An xarray Dataset with timeseries output data for the specified boundary condition line.
1137
+ """
1138
+ path = f"{self.BOUNDARY_CONDITIONS_OUTPUT_PATH}/{bc_line_name}"
1139
+ dataset = self.get(path)
1140
+ if dataset is None:
1141
+ raise RasPlanHdfError(
1142
+ f"Could not find HDF group at path '{path}'."
1143
+ f" Does the Plan HDF file contain boundary condition output data for '{bc_line_name}'?"
1144
+ )
1145
+ columns = [c.decode("utf-8") for c in dataset.attrs["Columns"]]
1146
+ ds = xr.Dataset()
1147
+ try:
1148
+ import dask.array as da
1149
+
1150
+ # TODO: user-specified chunks?
1151
+ values = da.from_array(dataset, chunks=dataset.chunks)
1152
+ except ImportError:
1153
+ values = dataset[:]
1154
+ for i, col in enumerate(columns):
1155
+ units = dataset.attrs.get(col, None)
1156
+ if units is not None:
1157
+ units = units.decode("utf-8")
1158
+ da = xr.DataArray(
1159
+ values[:, i],
1160
+ name=col,
1161
+ dims=["time"],
1162
+ coords={
1163
+ "time": self.unsteady_datetimes(),
1164
+ },
1165
+ attrs={
1166
+ "units": units,
1167
+ "hdf_path": f"{path}",
1168
+ },
1169
+ )
1170
+ ds[col] = da
1171
+ return ds
1172
+
1173
+ def bc_lines_timeseries_output(self) -> xr.Dataset:
1174
+ """Return timeseries output data for boundary conditions lines from a HEC-RAS HDF plan file.
1175
+
1176
+ Returns
1177
+ -------
1178
+ xr.Dataset
1179
+ An xarray Dataset with timeseries output data for boundary conditions lines.
1180
+ """
1181
+ df_bc_lines = self.bc_lines()
1182
+ bc_lines_names = df_bc_lines["name"]
1183
+ datasets = []
1184
+ for bc_line_name in bc_lines_names:
1185
+ ds_bc_line = self.bc_line_timeseries_output(bc_line_name)
1186
+ datasets.append(ds_bc_line)
1187
+ bc_line_ids = df_bc_lines["bc_line_id"].values
1188
+ ds: xr.Dataset = xr.concat(
1189
+ datasets, dim=pd.Index(bc_line_ids, name="bc_line_id")
1190
+ )
1191
+ ds = ds.assign_coords(
1192
+ {
1193
+ "bc_line_name": ("bc_line_id", bc_lines_names),
1194
+ "bc_line_type": ("bc_line_id", df_bc_lines["type"]),
1195
+ "mesh_name": ("bc_line_id", df_bc_lines["mesh_name"]),
1196
+ }
1197
+ )
1198
+ return ds
1199
+
1124
1200
  def observed_timeseries_input(self, vartype: str = "Flow") -> xr.DataArray:
1125
1201
  """Return observed timeseries input data for reference lines and points from a HEC-RAS HDF plan file.
1126
1202
 
@@ -1381,10 +1457,49 @@ class RasPlanHdf(RasGeomHdf):
1381
1457
  """
1382
1458
  return self.get_attrs(self.VOLUME_ACCOUNTING_PATH)
1383
1459
 
1384
- def enroachment_points(self) -> GeoDataFrame: # noqa: D102
1385
- raise NotImplementedError
1460
+ def encroachment_points(self, profile_name: str) -> GeoDataFrame:
1461
+ """Return encroachment points from a HEC-RAS plan HDF file based on a user-specified flow profile.
1462
+
1463
+ Returns
1464
+ -------
1465
+ GeoDataframe
1466
+ A GeoDataFrame with cross-section encroachments represented as Point geometry features along with pertinent attributes.
1467
+ """
1468
+ cross_sections = self.cross_sections()
1469
+ cross_sections["Enc_Profile"] = profile_name
1470
+
1471
+ leftmost_sta = self.cross_sections_elevations()["elevation info"].apply(
1472
+ lambda x: x[0][0]
1473
+ )
1474
+ left_enc_sta = self.cross_sections_additional_enc_station_left()[profile_name]
1475
+ left_enc_points = GeoDataFrame(
1476
+ pd.concat(
1477
+ [
1478
+ cross_sections[["River", "Reach", "RS", "Enc_Profile"]],
1479
+ left_enc_sta.rename("Enc_Sta", inplace=False),
1480
+ ],
1481
+ axis=1,
1482
+ ),
1483
+ geometry=cross_sections.geometry.interpolate(left_enc_sta - leftmost_sta),
1484
+ )
1485
+ left_enc_points["Side"] = "Left"
1486
+
1487
+ right_enc_sta = self.cross_sections_additional_enc_station_right()[profile_name]
1488
+ right_enc_points = GeoDataFrame(
1489
+ pd.concat(
1490
+ [
1491
+ cross_sections[["River", "Reach", "RS", "Enc_Profile"]],
1492
+ right_enc_sta.rename("Enc_Sta", inplace=False),
1493
+ ],
1494
+ axis=1,
1495
+ ),
1496
+ geometry=cross_sections.geometry.interpolate(right_enc_sta - leftmost_sta),
1497
+ )
1498
+ right_enc_points["Side"] = "Right"
1499
+
1500
+ return GeoDataFrame(pd.concat([left_enc_points, right_enc_points]))
1386
1501
 
1387
- def steady_flow_names(self) -> list:
1502
+ def steady_flow_names(self) -> List[str]:
1388
1503
  """Return the profile information for each steady flow event.
1389
1504
 
1390
1505
  Returns
@@ -1393,7 +1508,7 @@ class RasPlanHdf(RasGeomHdf):
1393
1508
  A Dataframe containing the profile names for each event
1394
1509
  """
1395
1510
  if self.STEADY_PROFILES_PATH not in self:
1396
- return pd.DataFrame()
1511
+ return []
1397
1512
 
1398
1513
  profile_data = self[self.STEADY_PROFILES_PATH]
1399
1514
  profile_attrs = profile_data["Profile Names"][()]
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: rashdf
3
- Version: 0.7.2
3
+ Version: 0.8.1
4
4
  Summary: Read data from HEC-RAS HDF files.
5
5
  Project-URL: repository, https://github.com/fema-ffrd/rashdf
6
6
  Classifier: Development Status :: 4 - Beta
@@ -16,7 +16,7 @@ License-File: LICENSE
16
16
  Requires-Dist: h5py
17
17
  Requires-Dist: geopandas<2.0,>=1.0
18
18
  Requires-Dist: pyarrow
19
- Requires-Dist: xarray
19
+ Requires-Dist: xarray<=2025.4.0
20
20
  Provides-Extra: dev
21
21
  Requires-Dist: pre-commit; extra == "dev"
22
22
  Requires-Dist: ruff; extra == "dev"
@@ -28,10 +28,12 @@ Requires-Dist: dask; extra == "dev"
28
28
  Requires-Dist: fsspec; extra == "dev"
29
29
  Requires-Dist: s3fs; extra == "dev"
30
30
  Requires-Dist: fiona==1.9.6; extra == "dev"
31
+ Requires-Dist: numcodecs<0.16; extra == "dev"
31
32
  Provides-Extra: docs
32
33
  Requires-Dist: sphinx; extra == "docs"
33
34
  Requires-Dist: numpydoc; extra == "docs"
34
35
  Requires-Dist: sphinx_rtd_theme; extra == "docs"
36
+ Dynamic: license-file
35
37
 
36
38
  # rashdf
37
39
  [![CI](https://github.com/fema-ffrd/rashdf/actions/workflows/continuous-integration.yml/badge.svg?branch=main)](https://github.com/fema-ffrd/rashdf/actions/workflows/continuous-integration.yml)
@@ -1,7 +1,7 @@
1
1
  h5py
2
2
  geopandas<2.0,>=1.0
3
3
  pyarrow
4
- xarray
4
+ xarray<=2025.4.0
5
5
 
6
6
  [dev]
7
7
  pre-commit
@@ -14,6 +14,7 @@ dask
14
14
  fsspec
15
15
  s3fs
16
16
  fiona==1.9.6
17
+ numcodecs<0.16
17
18
 
18
19
  [docs]
19
20
  sphinx
@@ -1,7 +1,10 @@
1
+ import geopandas as gpd
1
2
  from pathlib import Path
2
3
  import h5py
3
4
  from pyproj import CRS
4
5
  from src.rashdf import RasGeomHdf
6
+ from pandas.testing import assert_frame_equal
7
+ import pytest
5
8
 
6
9
  from . import _create_hdf_with_group_attrs, _gdf_matches_json, _gdf_matches_json_alt
7
10
 
@@ -11,6 +14,8 @@ COAL_G01 = TEST_DATA / "ras/Coal.g01.hdf"
11
14
  BAXTER_P01 = TEST_DATA / "ras_1d/Baxter.p01.hdf"
12
15
  TEST_JSON = TEST_DATA / "json"
13
16
  BALD_EAGLE_P18_REF = TEST_DATA / "ras/BaldEagleDamBrk.reflines-refpts.p18.hdf"
17
+ LOWER_KANAWHA_P01_IC_POINTS = TEST_DATA / "ras/LowerKanawha.p01.icpoints.hdf"
18
+ LOWER_KANAWHA_P01_IC_POINTS_JSON = TEST_JSON / "LowerKanawha.p01.icpoints.geojson"
14
19
 
15
20
  TEST_ATTRS = {"test_attribute1": "test_str1", "test_attribute2": 500}
16
21
 
@@ -31,30 +36,71 @@ def test_mesh_area_names():
31
36
  assert ghdf.mesh_area_names() == ["2D Interior Area", "Perimeter_NW"]
32
37
 
33
38
 
39
+ def test_invalid_mesh_area_names(tmp_path):
40
+ test_hdf = tmp_path / "test.hdf"
41
+ _create_hdf_with_group_attrs(test_hdf, RasGeomHdf.GEOM_PATH, TEST_ATTRS)
42
+ # Test the empty Mesh Area names
43
+ with RasGeomHdf(test_hdf) as ghdf:
44
+ assert ghdf.mesh_area_names() == []
45
+
46
+
34
47
  def test_mesh_areas():
35
48
  mesh_areas_json = TEST_JSON / "mesh_areas.json"
36
49
  with RasGeomHdf(MUNCIE_G05) as ghdf:
37
50
  assert _gdf_matches_json(ghdf.mesh_areas(), mesh_areas_json)
38
51
 
39
52
 
53
+ def test_invalid_mesh_areas(tmp_path):
54
+ test_hdf = tmp_path / "test.hdf"
55
+ _create_hdf_with_group_attrs(test_hdf, RasGeomHdf.GEOM_PATH, TEST_ATTRS)
56
+ # Test the empty Mesh Areas
57
+ with RasGeomHdf(test_hdf) as ghdf:
58
+ assert ghdf.mesh_areas().empty
59
+
60
+
40
61
  def test_mesh_cell_faces():
41
62
  mesh_cell_faces_json = TEST_JSON / "mesh_cell_faces.json"
42
63
  with RasGeomHdf(MUNCIE_G05) as ghdf:
43
64
  assert _gdf_matches_json(ghdf.mesh_cell_faces(), mesh_cell_faces_json)
44
65
 
45
66
 
67
+ def test_invalid_mesh_faces(tmp_path):
68
+ test_hdf = tmp_path / "test.hdf"
69
+ _create_hdf_with_group_attrs(test_hdf, RasGeomHdf.GEOM_PATH, TEST_ATTRS)
70
+ # Test the empty Mesh Faces
71
+ with RasGeomHdf(test_hdf) as ghdf:
72
+ assert ghdf.mesh_cell_faces().empty
73
+
74
+
46
75
  def test_mesh_cell_points():
47
76
  mesh_cell_points_json = TEST_JSON / "mesh_cell_points.json"
48
77
  with RasGeomHdf(MUNCIE_G05) as ghdf:
49
78
  assert _gdf_matches_json(ghdf.mesh_cell_points(), mesh_cell_points_json)
50
79
 
51
80
 
81
+ def test_invalid_mesh_cell_points(tmp_path):
82
+ test_hdf = tmp_path / "test.hdf"
83
+ _create_hdf_with_group_attrs(test_hdf, RasGeomHdf.GEOM_PATH, TEST_ATTRS)
84
+ # Test the empty Mesh Cell Points
85
+ with RasGeomHdf(test_hdf) as ghdf:
86
+ assert ghdf.mesh_cell_points().empty
87
+
88
+
52
89
  def test_mesh_cell_polygons():
53
90
  mesh_cell_polygons_json = TEST_JSON / "mesh_cell_polygons.json"
54
91
  with RasGeomHdf(MUNCIE_G05) as ghdf:
55
92
  assert _gdf_matches_json(ghdf.mesh_cell_polygons(), mesh_cell_polygons_json)
56
93
 
57
94
 
95
+ def test_invalid_mesh_cell_polygons(tmp_path):
96
+ # Create a dummy HDF file
97
+ test_hdf = tmp_path / "test.hdf"
98
+ _create_hdf_with_group_attrs(test_hdf, RasGeomHdf.GEOM_PATH, TEST_ATTRS)
99
+ # Test the empty Mesh Cell Polygons
100
+ with RasGeomHdf(test_hdf) as ghdf:
101
+ assert ghdf.mesh_cell_polygons().empty
102
+
103
+
58
104
  def test_mesh_cell_polygons_coal():
59
105
  """Test with the mesh from the Coal River model.
60
106
 
@@ -114,6 +160,18 @@ def test_get_geom_2d_flow_area_attrs(tmp_path):
114
160
  assert ras_hdf.get_geom_2d_flow_area_attrs() == TEST_ATTRS
115
161
 
116
162
 
163
+ def test_invalid_get_geom_2d_flow_area_attrs(tmp_path):
164
+ test_hdf = tmp_path / "test.hdf"
165
+ _create_hdf_with_group_attrs(test_hdf, RasGeomHdf.GEOM_PATH, TEST_ATTRS)
166
+ ras_hdf = RasGeomHdf(test_hdf)
167
+
168
+ with pytest.raises(
169
+ AttributeError,
170
+ match=f"Unable to get 2D Flow Area; {RasGeomHdf.FLOW_AREA_2D_PATH} group not found in HDF5 file.",
171
+ ):
172
+ ras_hdf.get_geom_2d_flow_area_attrs()
173
+
174
+
117
175
  def test_structs():
118
176
  structs_json = TEST_JSON / "structures.json"
119
177
  with RasGeomHdf(MUNCIE_G05) as ghdf:
@@ -151,7 +209,7 @@ def test_reference_points_names():
151
209
 
152
210
  def test_structs_not_found():
153
211
  with RasGeomHdf(COAL_G01) as ghdf:
154
- assert (ghdf.structures(), None)
212
+ assert ghdf.structures().empty
155
213
 
156
214
 
157
215
  def test_cross_sections():
@@ -164,7 +222,7 @@ def test_cross_sections():
164
222
 
165
223
  def test_cross_sections_not_found():
166
224
  with RasGeomHdf(COAL_G01) as ghdf:
167
- assert (ghdf.cross_sections(), None)
225
+ assert ghdf.cross_sections().empty
168
226
 
169
227
 
170
228
  def test_river_reaches():
@@ -175,7 +233,7 @@ def test_river_reaches():
175
233
 
176
234
  def test_river_reaches_not_found():
177
235
  with RasGeomHdf(COAL_G01) as ghdf:
178
- assert (ghdf.river_reaches(), None)
236
+ assert ghdf.river_reaches().empty
179
237
 
180
238
 
181
239
  def test_cross_sections_elevations():
@@ -186,4 +244,18 @@ def test_cross_sections_elevations():
186
244
 
187
245
  def test_cross_sections_elevations_not_found():
188
246
  with RasGeomHdf(COAL_G01) as ghdf:
189
- assert (ghdf.cross_sections_elevations(), None)
247
+ assert ghdf.cross_sections_elevations().empty
248
+
249
+
250
+ def test_ic_points():
251
+ with RasGeomHdf(LOWER_KANAWHA_P01_IC_POINTS) as ghdf:
252
+ gdf_ic_points = ghdf.ic_points()
253
+ valid_gdf = gpd.read_file(
254
+ LOWER_KANAWHA_P01_IC_POINTS_JSON,
255
+ crs=ghdf.projection(),
256
+ )
257
+ assert_frame_equal(
258
+ gdf_ic_points,
259
+ valid_gdf,
260
+ check_dtype=False,
261
+ )
@@ -5,9 +5,11 @@ from src.rashdf.plan import (
5
5
  TimeSeriesOutputVar,
6
6
  )
7
7
 
8
+ import builtins
8
9
  import filecmp
9
10
  import json
10
11
  from pathlib import Path
12
+ from unittest import mock
11
13
 
12
14
  import numpy as np
13
15
  import pandas as pd
@@ -32,6 +34,7 @@ BALD_EAGLE_P18_REF = TEST_DATA / "ras/BaldEagleDamBrk.reflines-refpts.p18.hdf"
32
34
  DENTON = TEST_DATA / "ras/Denton.hdf"
33
35
  MUNCIE_G05 = TEST_DATA / "ras/Muncie.g05.hdf"
34
36
  COAL_G01 = TEST_DATA / "ras/Coal.g01.hdf"
37
+ LOWER_KANAWHA_P01_BC_LINES = TEST_DATA / "ras/LowerKanawha.p01.bclines.hdf"
35
38
  BAXTER_P01 = TEST_DATA / "ras_1d/Baxter.p01.hdf"
36
39
  FLODENCR_P01 = TEST_DATA / "ras_1d/FLODENCR.p01.hdf"
37
40
 
@@ -322,6 +325,59 @@ def test_reference_lines_timeseries(tmp_path: Path):
322
325
  assert_frame_equal(df, valid_df)
323
326
 
324
327
 
328
+ def test_bc_lines_timeseries(tmp_path: Path):
329
+ plan_hdf = RasPlanHdf(LOWER_KANAWHA_P01_BC_LINES)
330
+ ds = plan_hdf.bc_lines_timeseries_output()
331
+ assert "time" in ds.coords
332
+ assert "bc_line_id" in ds.coords
333
+ assert "bc_line_name" in ds.coords
334
+ assert "mesh_name" in ds.coords
335
+ assert "Flow" in ds.variables
336
+ assert "Stage" in ds.variables
337
+
338
+ q = ds["Flow"]
339
+ assert q.chunks is not None # Ensure Dask chunks are set
340
+ assert q.shape == (10, 577)
341
+ assert q.attrs["units"] == "cfs"
342
+
343
+ stage = ds["Stage"]
344
+ assert stage.chunks is not None # Ensure Dask chunks are set
345
+ assert stage.shape == (10, 577)
346
+ assert stage.attrs["units"] == "ft"
347
+
348
+ df = ds.sel(bc_line_id=7).to_dataframe()
349
+ valid_df = pd.read_csv(
350
+ TEST_CSV / "LowerKanawha.p01.bclines.7.csv",
351
+ index_col="time",
352
+ parse_dates=True,
353
+ dtype={"Flow": np.float32, "Stage": np.float32},
354
+ )
355
+ assert_frame_equal(df, valid_df)
356
+
357
+
358
+ def test_bc_lines_timeseries_no_dask(monkeypatch):
359
+ """Test that the bc_lines_timeseries_output method works without Dask."""
360
+ original_import = builtins.__import__
361
+
362
+ def mocked_import(name, *args, **kwargs):
363
+ if name == "dask.array":
364
+ raise ImportError("Dask is not available")
365
+ return original_import(name, *args, **kwargs)
366
+
367
+ monkeypatch.setattr(builtins, "__import__", mocked_import)
368
+
369
+ plan_hdf = RasPlanHdf(LOWER_KANAWHA_P01_BC_LINES)
370
+ ds = plan_hdf.bc_lines_timeseries_output()
371
+ assert ds["Flow"].chunks is None # Ensure no Dask chunks are set
372
+ assert ds["Stage"].chunks is None
373
+
374
+
375
+ def test_bc_line_timeseries_error():
376
+ plan_hdf = RasPlanHdf(LOWER_KANAWHA_P01_BC_LINES)
377
+ with pytest.raises(RasPlanHdfError):
378
+ plan_hdf.bc_line_timeseries_output("asdf")
379
+
380
+
325
381
  def test_reference_points(tmp_path: Path):
326
382
  plan_hdf = RasPlanHdf(BALD_EAGLE_P18_REF)
327
383
  gdf = plan_hdf.reference_points(datetime_to_str=True)
@@ -371,7 +427,7 @@ def test_cross_sections_additional_velocity_total():
371
427
 
372
428
  def test_cross_sections_additional_velocity_total_not_found():
373
429
  with RasPlanHdf(COAL_G01) as phdf:
374
- assert (phdf.cross_sections_additional_velocity_total(), None)
430
+ assert phdf.cross_sections_additional_velocity_total().empty
375
431
 
376
432
 
377
433
  def test_cross_sections_additional_area_total():
@@ -384,7 +440,7 @@ def test_cross_sections_additional_area_total():
384
440
 
385
441
  def test_cross_sections_additional_area_total_not_found():
386
442
  with RasPlanHdf(COAL_G01) as phdf:
387
- assert (phdf.cross_sections_additional_area_total(), None)
443
+ assert phdf.cross_sections_additional_area_total().empty
388
444
 
389
445
 
390
446
  def test_steady_flow_names():
@@ -394,7 +450,7 @@ def test_steady_flow_names():
394
450
 
395
451
  def test_steady_flow_names_not_found():
396
452
  with RasPlanHdf(COAL_G01) as phdf:
397
- assert (phdf.steady_flow_names(), None)
453
+ assert phdf.steady_flow_names() == []
398
454
 
399
455
 
400
456
  def test_cross_sections_wsel():
@@ -405,7 +461,7 @@ def test_cross_sections_wsel():
405
461
 
406
462
  def test_cross_sections_wsel_not_found():
407
463
  with RasPlanHdf(COAL_G01) as phdf:
408
- assert (phdf.cross_sections_wsel(), None)
464
+ assert phdf.cross_sections_wsel().empty
409
465
 
410
466
 
411
467
  def test_cross_sections_additional_enc_station_right():
@@ -419,7 +475,7 @@ def test_cross_sections_additional_enc_station_right():
419
475
 
420
476
  def test_cross_sections_additional_enc_station_right_not_found():
421
477
  with RasPlanHdf(COAL_G01) as phdf:
422
- assert (phdf.cross_sections_additional_enc_station_right(), None)
478
+ assert phdf.cross_sections_additional_enc_station_right().empty
423
479
 
424
480
 
425
481
  def test_cross_sections_additional_enc_station_left():
@@ -432,7 +488,7 @@ def test_cross_sections_additional_enc_station_left():
432
488
 
433
489
  def test_cross_sections_additional_enc_station_left_not_found():
434
490
  with RasPlanHdf(COAL_G01) as phdf:
435
- assert (phdf.cross_sections_additional_enc_station_left(), None)
491
+ assert phdf.cross_sections_additional_enc_station_left().empty
436
492
 
437
493
 
438
494
  def test_cross_sections_flow():
@@ -650,3 +706,12 @@ def test_observed_timeseries_input_rasplanhdf_error():
650
706
  with RasPlanHdf(BALD_EAGLE_P18) as phdf:
651
707
  with pytest.raises(RasPlanHdfError):
652
708
  phdf.observed_timeseries_input(vartype="Flow")
709
+
710
+
711
+ def test_encroachment_points():
712
+ enc_pnts_json = TEST_JSON / "encroachment_points.json"
713
+ with RasPlanHdf(FLODENCR_P01) as phdf:
714
+ assert _gdf_matches_json_alt(
715
+ phdf.encroachment_points(profile_name="PF#2"),
716
+ enc_pnts_json,
717
+ )
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes