rashdf 0.6.0__tar.gz → 0.7.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: rashdf
3
- Version: 0.6.0
3
+ Version: 0.7.0
4
4
  Summary: Read data from HEC-RAS HDF files.
5
5
  Project-URL: repository, https://github.com/fema-ffrd/rashdf
6
6
  Classifier: Development Status :: 4 - Beta
@@ -22,12 +22,12 @@ Requires-Dist: pre-commit; extra == "dev"
22
22
  Requires-Dist: ruff; extra == "dev"
23
23
  Requires-Dist: pytest; extra == "dev"
24
24
  Requires-Dist: pytest-cov; extra == "dev"
25
- Requires-Dist: fiona; extra == "dev"
26
25
  Requires-Dist: kerchunk; extra == "dev"
27
26
  Requires-Dist: zarr; extra == "dev"
28
27
  Requires-Dist: dask; extra == "dev"
29
28
  Requires-Dist: fsspec; extra == "dev"
30
29
  Requires-Dist: s3fs; extra == "dev"
30
+ Requires-Dist: fiona==1.9.6; extra == "dev"
31
31
  Provides-Extra: docs
32
32
  Requires-Dist: sphinx; extra == "docs"
33
33
  Requires-Dist: numpydoc; extra == "docs"
@@ -145,8 +145,12 @@ $ python -m venv venv-rashdf
145
145
 
146
146
  Activate the virtual environment:
147
147
  ```
148
- $ source ./venv/bin/activate
148
+ # For macOS/Linux
149
+ $ source ./venv-rashdf/bin/activate
149
150
  (venv-rashdf) $
151
+
152
+ # For Windows
153
+ > ./venv-rashdf/Scripts/activate
150
154
  ```
151
155
 
152
156
  Install dev dependencies:
@@ -110,8 +110,12 @@ $ python -m venv venv-rashdf
110
110
 
111
111
  Activate the virtual environment:
112
112
  ```
113
- $ source ./venv/bin/activate
113
+ # For macOS/Linux
114
+ $ source ./venv-rashdf/bin/activate
114
115
  (venv-rashdf) $
116
+
117
+ # For Windows
118
+ > ./venv-rashdf/Scripts/activate
115
119
  ```
116
120
 
117
121
  Install dev dependencies:
@@ -12,11 +12,11 @@ classifiers = [
12
12
  "Programming Language :: Python :: 3.11",
13
13
  "Programming Language :: Python :: 3.12",
14
14
  ]
15
- version = "0.6.0"
15
+ version = "0.7.0"
16
16
  dependencies = ["h5py", "geopandas>=1.0,<2.0", "pyarrow", "xarray"]
17
17
 
18
18
  [project.optional-dependencies]
19
- dev = ["pre-commit", "ruff", "pytest", "pytest-cov", "fiona", "kerchunk", "zarr", "dask", "fsspec", "s3fs"]
19
+ dev = ["pre-commit", "ruff", "pytest", "pytest-cov", "kerchunk", "zarr", "dask", "fsspec", "s3fs", "fiona==1.9.6"]
20
20
  docs = ["sphinx", "numpydoc", "sphinx_rtd_theme"]
21
21
 
22
22
  [project.urls]
@@ -6,6 +6,7 @@ from .utils import (
6
6
  ras_timesteps_to_datetimes,
7
7
  parse_ras_datetime_ms,
8
8
  deprecated,
9
+ convert_ras_hdf_value,
9
10
  )
10
11
 
11
12
  from geopandas import GeoDataFrame
@@ -156,6 +157,7 @@ class RasPlanHdf(RasGeomHdf):
156
157
  PLAN_INFO_PATH = "Plan Data/Plan Information"
157
158
  PLAN_PARAMS_PATH = "Plan Data/Plan Parameters"
158
159
  PRECIP_PATH = "Event Conditions/Meteorology/Precipitation"
160
+ OBS_DATA_PATH = "Event Conditions/Observed Data"
159
161
  RESULTS_UNSTEADY_PATH = "Results/Unsteady"
160
162
  RESULTS_UNSTEADY_SUMMARY_PATH = f"{RESULTS_UNSTEADY_PATH}/Summary"
161
163
  VOLUME_ACCOUNTING_PATH = f"{RESULTS_UNSTEADY_PATH}/Volume Accounting"
@@ -166,6 +168,8 @@ class RasPlanHdf(RasGeomHdf):
166
168
  UNSTEADY_TIME_SERIES_PATH = f"{BASE_OUTPUT_PATH}/Unsteady Time Series"
167
169
  REFERENCE_LINES_OUTPUT_PATH = f"{UNSTEADY_TIME_SERIES_PATH}/Reference Lines"
168
170
  REFERENCE_POINTS_OUTPUT_PATH = f"{UNSTEADY_TIME_SERIES_PATH}/Reference Points"
171
+ OBS_FLOW_OUTPUT_PATH = f"{OBS_DATA_PATH}/Flow"
172
+ OBS_STAGE_OUTPUT_PATH = f"{OBS_DATA_PATH}/Stage"
169
173
 
170
174
  RESULTS_STEADY_PATH = "Results/Steady"
171
175
  BASE_STEADY_PATH = f"{RESULTS_STEADY_PATH}/Output/Output Blocks/Base Output"
@@ -1117,6 +1121,74 @@ class RasPlanHdf(RasGeomHdf):
1117
1121
  """
1118
1122
  return self.reference_timeseries_output(reftype="lines")
1119
1123
 
1124
+ def observed_timeseries_input(self, vartype: str = "Flow") -> dict:
1125
+ """Return observed timeseries input data for reference lines and points from a HEC-RAS HDF plan file.
1126
+
1127
+ Parameters
1128
+ ----------
1129
+ vartype : str, optional
1130
+ The type of observed data to retrieve. Must be either "Flow" or "Stage".
1131
+ (default: "Flow")
1132
+
1133
+ Returns
1134
+ -------
1135
+ xr.Dataset
1136
+ An xarray Dataset with observed timeseries input data for both reference lines and reference points.
1137
+ """
1138
+ if vartype == "Flow":
1139
+ output_path = self.OBS_FLOW_OUTPUT_PATH
1140
+ elif vartype == "Stage":
1141
+ output_path = self.OBS_STAGE_OUTPUT_PATH
1142
+ else:
1143
+ raise ValueError('vartype must be either "Flow" or "Stage".')
1144
+
1145
+ observed_group = self.get(output_path)
1146
+ if observed_group is None:
1147
+ raise RasPlanHdfError(
1148
+ f"Could not find HDF group at path '{output_path}'."
1149
+ f" Does the Plan HDF file contain reference {vartype} output data?"
1150
+ )
1151
+ if "Attributes" in observed_group.keys():
1152
+ attr_path = observed_group["Attributes"]
1153
+ attrs_df = pd.DataFrame(attr_path[:]).map(convert_ras_hdf_value)
1154
+
1155
+ das = {}
1156
+ for idx, site in enumerate(observed_group.keys()):
1157
+ if site != "Attributes":
1158
+ # Site Ex: 'Ref Point: Grapevine_Lake_RP'
1159
+ site_path = observed_group[site]
1160
+ site_name = site.split(":")[1][1:] # Grapevine_Lake_RP
1161
+ ref_type = site.split(":")[0] # Ref Point
1162
+ if ref_type == "Ref Line":
1163
+ ref_type = "refln"
1164
+ else:
1165
+ ref_type = "refpt"
1166
+ df = pd.DataFrame(site_path[:]).map(convert_ras_hdf_value)
1167
+ # rename Date to time
1168
+ df = df.rename(columns={"Date": "time"})
1169
+ # Ensure the Date index is unique
1170
+ df = df.drop_duplicates(subset="time")
1171
+ # Package into an 1D xarray DataArray
1172
+ values = df["Value"].values
1173
+ times = df["time"].values
1174
+ da = xr.DataArray(
1175
+ values,
1176
+ name=vartype,
1177
+ dims=["time"],
1178
+ coords={
1179
+ "time": times,
1180
+ },
1181
+ attrs={
1182
+ "hdf_path": f"{output_path}/{site}",
1183
+ },
1184
+ )
1185
+ # Expand dimensions to add additional coordinates
1186
+ da = da.expand_dims({f"{ref_type}_id": [idx - 1]})
1187
+ da = da.expand_dims({f"{ref_type}_name": [site_name]})
1188
+ das[site_name] = da
1189
+ das = xr.concat([das[site] for site in das.keys()], dim="time")
1190
+ return das
1191
+
1120
1192
  def reference_points_timeseries_output(self) -> xr.Dataset:
1121
1193
  """Return timeseries output data for reference points from a HEC-RAS HDF plan file.
1122
1194
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: rashdf
3
- Version: 0.6.0
3
+ Version: 0.7.0
4
4
  Summary: Read data from HEC-RAS HDF files.
5
5
  Project-URL: repository, https://github.com/fema-ffrd/rashdf
6
6
  Classifier: Development Status :: 4 - Beta
@@ -22,12 +22,12 @@ Requires-Dist: pre-commit; extra == "dev"
22
22
  Requires-Dist: ruff; extra == "dev"
23
23
  Requires-Dist: pytest; extra == "dev"
24
24
  Requires-Dist: pytest-cov; extra == "dev"
25
- Requires-Dist: fiona; extra == "dev"
26
25
  Requires-Dist: kerchunk; extra == "dev"
27
26
  Requires-Dist: zarr; extra == "dev"
28
27
  Requires-Dist: dask; extra == "dev"
29
28
  Requires-Dist: fsspec; extra == "dev"
30
29
  Requires-Dist: s3fs; extra == "dev"
30
+ Requires-Dist: fiona==1.9.6; extra == "dev"
31
31
  Provides-Extra: docs
32
32
  Requires-Dist: sphinx; extra == "docs"
33
33
  Requires-Dist: numpydoc; extra == "docs"
@@ -145,8 +145,12 @@ $ python -m venv venv-rashdf
145
145
 
146
146
  Activate the virtual environment:
147
147
  ```
148
- $ source ./venv/bin/activate
148
+ # For macOS/Linux
149
+ $ source ./venv-rashdf/bin/activate
149
150
  (venv-rashdf) $
151
+
152
+ # For Windows
153
+ > ./venv-rashdf/Scripts/activate
150
154
  ```
151
155
 
152
156
  Install dev dependencies:
@@ -8,12 +8,12 @@ pre-commit
8
8
  ruff
9
9
  pytest
10
10
  pytest-cov
11
- fiona
12
11
  kerchunk
13
12
  zarr
14
13
  dask
15
14
  fsspec
16
15
  s3fs
16
+ fiona==1.9.6
17
17
 
18
18
  [docs]
19
19
  sphinx
@@ -29,6 +29,7 @@ TEST_ATTRS = {"test_attribute1": "test_str1", "test_attribute2": 500}
29
29
  BALD_EAGLE_P18 = TEST_DATA / "ras/BaldEagleDamBrk.p18.hdf"
30
30
  BALD_EAGLE_P18_TIMESERIES = TEST_DATA / "ras/BaldEagleDamBrk.p18.timeseries.hdf"
31
31
  BALD_EAGLE_P18_REF = TEST_DATA / "ras/BaldEagleDamBrk.reflines-refpts.p18.hdf"
32
+ DENTON = TEST_DATA / "ras/Denton.hdf"
32
33
  MUNCIE_G05 = TEST_DATA / "ras/Muncie.g05.hdf"
33
34
  COAL_G01 = TEST_DATA / "ras/Coal.g01.hdf"
34
35
  BAXTER_P01 = TEST_DATA / "ras_1d/Baxter.p01.hdf"
@@ -617,3 +618,35 @@ def test__mesh_summary_outputs_df(tmp_path):
617
618
  TEST_CSV / "BaldEagleDamBrk.summary-cells-selectvars.csv",
618
619
  shallow=False,
619
620
  )
621
+
622
+
623
+ def test_observed_timeseries_input_flow():
624
+ with RasPlanHdf(DENTON) as phdf:
625
+ ds = phdf.observed_timeseries_input(vartype="Flow")
626
+ df = ds.sel(refln_name="Denton-Justin_RL").to_dataframe().dropna().reset_index()
627
+ valid_df = pd.read_csv(TEST_CSV / "Denton-Justin_RL_Flow.csv")
628
+ valid_df["time"] = pd.to_datetime(valid_df["time"])
629
+ assert_frame_equal(df, valid_df)
630
+
631
+
632
+ def test_observed_timeseries_input_stage():
633
+ with RasPlanHdf(DENTON) as phdf:
634
+ ds = phdf.observed_timeseries_input(vartype="Stage")
635
+ df = (
636
+ ds.sel(refpt_name="Grapevine_Lake_RP").to_dataframe().dropna().reset_index()
637
+ )
638
+ valid_df = pd.read_csv(TEST_CSV / "Grapevine_Lake_RP_Stage.csv")
639
+ valid_df["time"] = pd.to_datetime(valid_df["time"])
640
+ assert_frame_equal(df, valid_df)
641
+
642
+
643
+ def test_observed_timeseries_input_value_error():
644
+ with RasPlanHdf(DENTON) as phdf:
645
+ with pytest.raises(ValueError):
646
+ phdf.observed_timeseries_input(vartype="Fake Variable")
647
+
648
+
649
+ def test_observed_timeseries_input_rasplanhdf_error():
650
+ with RasPlanHdf(BALD_EAGLE_P18) as phdf:
651
+ with pytest.raises(RasPlanHdfError):
652
+ phdf.observed_timeseries_input(vartype="Flow")
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes