rashdf 0.8.1__py3-none-any.whl → 0.8.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cli.py CHANGED
@@ -88,12 +88,10 @@ def parse_args(args: str) -> argparse.Namespace:
88
88
  action="store_true",
89
89
  help="List the drivers supported by pyogrio for writing output files.",
90
90
  )
91
- fiona_installed = False
92
91
  engines = ["pyogrio"]
93
92
  try:
94
93
  import fiona
95
94
 
96
- fiona_installed = True
97
95
  engines.append("fiona")
98
96
  parser.add_argument(
99
97
  "--fiona-drivers",
@@ -142,62 +140,81 @@ def parse_args(args: str) -> argparse.Namespace:
142
140
  return args
143
141
 
144
142
 
145
- def export(args: argparse.Namespace) -> Optional[str]:
146
- """Act on parsed arguments to extract data from HEC-RAS HDF files."""
143
+ def _print_requested_drivers(args: argparse.Namespace) -> bool:
147
144
  if args.pyogrio_drivers:
148
145
  for driver in pyogrio_supported_drivers():
149
146
  print(driver)
150
- return
147
+ return True
151
148
  if hasattr(args, "fiona_drivers") and args.fiona_drivers:
152
149
  for driver in fiona_supported_drivers():
153
150
  print(driver)
154
- return
151
+ return True
152
+ return False
153
+
154
+
155
+ def _load_hdf_class(args: argparse.Namespace):
155
156
  if re.match(r"^.*\.p\d\d\.hdf$", args.hdf_file):
156
157
  ras_hdf_class = RasPlanHdf
157
158
  else:
158
159
  ras_hdf_class = RasGeomHdf
160
+
159
161
  if re.match(r"^\w+://", args.hdf_file):
160
- geom_hdf = ras_hdf_class.open_uri(args.hdf_file)
162
+ return ras_hdf_class.open_uri(args.hdf_file)
161
163
  else:
162
- geom_hdf = ras_hdf_class(args.hdf_file)
164
+ return ras_hdf_class(args.hdf_file)
165
+
166
+
167
+ def _print_stdout_geojson(gdf: GeoDataFrame, kwargs: dict) -> str:
168
+ # If an output file path isn't provided, write the GeoDataFrame to stdout
169
+ # as GeoJSON. Convert any datetime columns to strings.
170
+ gdf = df_datetimes_to_str(gdf)
171
+ with warnings.catch_warnings():
172
+ # Squash warnings about converting the CRS to OGC URN format.
173
+ # Likely to come up since USACE's Albers projection is a custom CRS.
174
+ # A warning written to stdout might cause issues with downstream processing.
175
+ warnings.filterwarnings(
176
+ "ignore",
177
+ (
178
+ "GeoDataFrame's CRS is not representable in URN OGC format."
179
+ " Resulting JSON will contain no CRS information."
180
+ ),
181
+ )
182
+ result = gdf.to_json(**kwargs)
183
+ print("No output file!")
184
+ print(result)
185
+ return result
186
+
187
+
188
+ def _write_to_file(gdf: GeoDataFrame, args: argparse.Namespace, kwargs: dict):
189
+ output_file_path = Path(args.output_file)
190
+ output_file_ext = output_file_path.suffix
191
+ if output_file_ext not in [".gpkg"]:
192
+ # Unless the user specifies a format that supports datetime,
193
+ # convert any datetime columns to string.
194
+ gdf = df_datetimes_to_str(gdf)
195
+ gdf.to_file(args.output_file, engine=args.engine, **kwargs)
196
+
197
+
198
+ def export(args: argparse.Namespace) -> Optional[str]:
199
+ """Act on parsed arguments to extract data from HEC-RAS HDF files."""
200
+ if _print_requested_drivers(args):
201
+ return
202
+
203
+ geom_hdf = _load_hdf_class(args)
163
204
  func = getattr(geom_hdf, args.func)
164
205
  gdf: GeoDataFrame = func()
165
206
  kwargs = literal_eval(args.kwargs) if args.kwargs else {}
166
207
  if args.to_crs:
167
208
  gdf = gdf.to_crs(args.to_crs)
168
209
  if not args.output_file:
169
- # If an output file path isn't provided, write the GeoDataFrame to stdout
170
- # as GeoJSON. Convert any datetime columns to strings.
171
- gdf = df_datetimes_to_str(gdf)
172
- with warnings.catch_warnings():
173
- # Squash warnings about converting the CRS to OGC URN format.
174
- # Likely to come up since USACE's Albers projection is a custom CRS.
175
- # A warning written to stdout might cause issues with downstream processing.
176
- warnings.filterwarnings(
177
- "ignore",
178
- (
179
- "GeoDataFrame's CRS is not representable in URN OGC format."
180
- " Resulting JSON will contain no CRS information."
181
- ),
182
- )
183
- result = gdf.to_json(**kwargs)
184
- print("No output file!")
185
- print(result)
186
- return result
210
+ return _print_stdout_geojson(gdf, kwargs)
187
211
  elif args.parquet:
188
212
  gdf.to_parquet(args.output_file, **kwargs)
189
213
  return
190
214
  elif args.feather:
191
215
  gdf.to_feather(args.output_file, **kwargs)
192
216
  return
193
- output_file_path = Path(args.output_file)
194
- output_file_ext = output_file_path.suffix
195
- if output_file_ext not in [".gpkg"]:
196
- # Unless the user specifies a format that supports datetime,
197
- # convert any datetime columns to string.
198
- # TODO: besides Geopackage, which of the standard Fiona drivers allow datetime?
199
- gdf = df_datetimes_to_str(gdf)
200
- gdf.to_file(args.output_file, engine=args.engine, **kwargs)
217
+ _write_to_file(gdf, args, kwargs)
201
218
 
202
219
 
203
220
  def main():
rashdf/geom.py CHANGED
@@ -48,8 +48,11 @@ class RasGeomHdf(RasHdf):
48
48
  REFINEMENT_REGIONS_PATH = f"{GEOM_PATH}/2D Flow Area Refinement Regions"
49
49
  REFERENCE_LINES_PATH = f"{GEOM_PATH}/Reference Lines"
50
50
  REFERENCE_POINTS_PATH = f"{GEOM_PATH}/Reference Points"
51
- CROSS_SECTIONS = f"{GEOM_PATH}/Cross Sections"
51
+ CROSS_SECTIONS_PATH = f"{GEOM_PATH}/Cross Sections"
52
52
  RIVER_CENTERLINES = f"{GEOM_PATH}/River Centerlines"
53
+ SA_2D = "SA/2D"
54
+
55
+ LAST_EDITED_COLUMN = "Last Edited"
53
56
 
54
57
  def __init__(self, name: str, **kwargs):
55
58
  """Open a HEC-RAS Geometry HDF file.
@@ -87,13 +90,11 @@ class RasGeomHdf(RasHdf):
87
90
  A list of the 2D mesh area names (str) within the RAS geometry if 2D areas exist.
88
91
  """
89
92
  if self.FLOW_AREA_2D_PATH not in self:
90
- return list()
91
- return list(
92
- [
93
- convert_ras_hdf_string(n)
94
- for n in self[f"{self.FLOW_AREA_2D_PATH}/Attributes"][()]["Name"]
95
- ]
96
- )
93
+ return []
94
+ return [
95
+ convert_ras_hdf_string(n)
96
+ for n in self[f"{self.FLOW_AREA_2D_PATH}/Attributes"][()]["Name"]
97
+ ]
97
98
 
98
99
  def mesh_areas(self) -> GeoDataFrame:
99
100
  """Return 2D flow area perimeter polygons.
@@ -106,10 +107,15 @@ class RasGeomHdf(RasHdf):
106
107
  mesh_area_names = self.mesh_area_names()
107
108
  if not mesh_area_names:
108
109
  return GeoDataFrame()
109
- mesh_area_polygons = [
110
- Polygon(self[f"{self.FLOW_AREA_2D_PATH}/{n}/Perimeter"][()])
111
- for n in mesh_area_names
112
- ]
110
+
111
+ mesh_area_polygons = []
112
+ for n in mesh_area_names:
113
+ try:
114
+ mesh_area_polygons.append(
115
+ Polygon(self[f"{self.FLOW_AREA_2D_PATH}/{n}/Perimeter"][()])
116
+ )
117
+ except KeyError as e:
118
+ raise RasGeomHdfError(f"Data for mesh '{n}' not found.") from e
113
119
  return GeoDataFrame(
114
120
  {"mesh_name": mesh_area_names, "geometry": mesh_area_polygons},
115
121
  geometry="geometry",
@@ -142,7 +148,9 @@ class RasGeomHdf(RasHdf):
142
148
  ][()][:, 0]
143
149
  face_id_lists = list(
144
150
  np.vectorize(
145
- lambda cell_id: str(
151
+ lambda cell_id,
152
+ cell_face_values=cell_face_values,
153
+ cell_face_info=cell_face_info: str(
146
154
  cell_face_values[
147
155
  cell_face_info[cell_id][0] : cell_face_info[cell_id][0]
148
156
  + cell_face_info[cell_id][1]
@@ -159,7 +167,7 @@ class RasGeomHdf(RasHdf):
159
167
  cell_dict["cell_id"] += cell_ids
160
168
  cell_dict["geometry"] += list(
161
169
  np.vectorize(
162
- lambda face_id_list: (
170
+ lambda face_id_list, mesh_faces=mesh_faces: (
163
171
  lambda geom_col: Polygon((geom_col[0] or geom_col[3]).geoms[0])
164
172
  )(
165
173
  polygonize_full(
@@ -232,7 +240,7 @@ class RasGeomHdf(RasHdf):
232
240
  face_id += 1
233
241
  face_dict["mesh_name"].append(mesh_name)
234
242
  face_dict["face_id"].append(face_id)
235
- coordinates = list()
243
+ coordinates = []
236
244
  coordinates.append(facepoints_coordinates[pnt_a_index])
237
245
  starting_row, count = faces_perimeter_info[face_id]
238
246
  if count > 0:
@@ -307,10 +315,10 @@ class RasGeomHdf(RasHdf):
307
315
  parts = polyline_parts[part_start : part_start + part_cnt]
308
316
  geoms.append(
309
317
  MultiLineString(
310
- list(
318
+ [
311
319
  points[part_pnt_start : part_pnt_start + part_pnt_cnt]
312
320
  for part_pnt_start, part_pnt_cnt in parts
313
- )
321
+ ]
314
322
  )
315
323
  )
316
324
  except (
@@ -387,7 +395,7 @@ class RasGeomHdf(RasHdf):
387
395
  rr_data = self[self.REFINEMENT_REGIONS_PATH]
388
396
  rr_ids = range(rr_data["Attributes"][()].shape[0])
389
397
  names = np.vectorize(convert_ras_hdf_string)(rr_data["Attributes"][()]["Name"])
390
- geoms = list()
398
+ geoms = []
391
399
  for i, (pnt_start, pnt_cnt, part_start, part_cnt) in enumerate(
392
400
  rr_data["Polygon Info"][()]
393
401
  ):
@@ -401,10 +409,10 @@ class RasGeomHdf(RasHdf):
401
409
  ]
402
410
  geoms.append(
403
411
  MultiPolygon(
404
- list(
412
+ [
405
413
  points[part_pnt_start : part_pnt_start + part_pnt_cnt]
406
414
  for part_pnt_start, part_pnt_cnt in parts
407
- )
415
+ ]
408
416
  )
409
417
  )
410
418
  except (
@@ -458,9 +466,9 @@ class RasGeomHdf(RasHdf):
458
466
  crs=self.projection(),
459
467
  )
460
468
  if datetime_to_str:
461
- struct_gdf["Last Edited"] = struct_gdf["Last Edited"].apply(
462
- lambda x: pd.Timestamp.isoformat(x)
463
- )
469
+ struct_gdf[self.LAST_EDITED_COLUMN] = struct_gdf[
470
+ self.LAST_EDITED_COLUMN
471
+ ].apply(lambda x: pd.Timestamp.isoformat(x))
464
472
  return struct_gdf
465
473
 
466
474
  def connections(self) -> GeoDataFrame: # noqa D102
@@ -479,7 +487,7 @@ class RasGeomHdf(RasHdf):
479
487
  ic_data = self[self.IC_POINTS_PATH]
480
488
  v_conv_str = np.vectorize(convert_ras_hdf_string)
481
489
  names = v_conv_str(ic_data["Attributes"][()]["Name"])
482
- mesh_names = v_conv_str(ic_data["Attributes"][()]["SA/2D"])
490
+ mesh_names = v_conv_str(ic_data["Attributes"][()][self.SA_2D])
483
491
  cell_ids = ic_data["Attributes"][()]["Cell Index"]
484
492
  points = ic_data["Points"][()]
485
493
  return GeoDataFrame(
@@ -518,7 +526,7 @@ class RasGeomHdf(RasHdf):
518
526
  sa_2d_field = "SA-2D"
519
527
  elif reftype == "points":
520
528
  path = self.REFERENCE_POINTS_PATH
521
- sa_2d_field = "SA/2D"
529
+ sa_2d_field = self.SA_2D
522
530
  else:
523
531
  raise RasGeomHdfError(
524
532
  f"Invalid reference type: {reftype} -- must be 'lines' or 'points'."
@@ -627,7 +635,7 @@ class RasGeomHdf(RasHdf):
627
635
  attributes = ref_points_group["Attributes"][:]
628
636
  v_conv_str = np.vectorize(convert_ras_hdf_string)
629
637
  names = v_conv_str(attributes["Name"])
630
- mesh_names = v_conv_str(attributes["SA/2D"])
638
+ mesh_names = v_conv_str(attributes[self.SA_2D])
631
639
  cell_id = attributes["Cell Index"]
632
640
  points = ref_points_group["Points"][()]
633
641
  return GeoDataFrame(
@@ -662,24 +670,24 @@ class RasGeomHdf(RasHdf):
662
670
  GeoDataFrame
663
671
  A GeoDataFrame containing the model 1D cross sections if they exist.
664
672
  """
665
- if self.CROSS_SECTIONS not in self:
673
+ xs_attribute_path = self.CROSS_SECTIONS_PATH + "/Attributes"
674
+ if xs_attribute_path not in self:
666
675
  return GeoDataFrame()
667
676
 
668
- xs_data = self[self.CROSS_SECTIONS]
677
+ xs_attrs = self[xs_attribute_path][()]
669
678
  v_conv_val = np.vectorize(convert_ras_hdf_value)
670
- xs_attrs = xs_data["Attributes"][()]
671
679
  xs_dict = {"xs_id": range(xs_attrs.shape[0])}
672
680
  xs_dict.update(
673
681
  {name: v_conv_val(xs_attrs[name]) for name in xs_attrs.dtype.names}
674
682
  )
675
- geoms = self._get_polylines(self.CROSS_SECTIONS)
683
+ geoms = self._get_polylines(self.CROSS_SECTIONS_PATH)
676
684
  xs_gdf = GeoDataFrame(
677
685
  xs_dict,
678
686
  geometry=geoms,
679
687
  crs=self.projection(),
680
688
  )
681
689
  if datetime_to_str:
682
- xs_gdf["Last Edited"] = xs_gdf["Last Edited"].apply(
690
+ xs_gdf[self.LAST_EDITED_COLUMN] = xs_gdf[self.LAST_EDITED_COLUMN].apply(
683
691
  lambda x: pd.Timestamp.isoformat(x)
684
692
  )
685
693
  return xs_gdf
@@ -702,7 +710,6 @@ class RasGeomHdf(RasHdf):
702
710
  river_dict.update(
703
711
  {name: v_conv_val(river_attrs[name]) for name in river_attrs.dtype.names}
704
712
  )
705
- geoms = list()
706
713
  geoms = self._get_polylines(self.RIVER_CENTERLINES)
707
714
  river_gdf = GeoDataFrame(
708
715
  river_dict,
@@ -710,9 +717,9 @@ class RasGeomHdf(RasHdf):
710
717
  crs=self.projection(),
711
718
  )
712
719
  if datetime_to_str:
713
- river_gdf["Last Edited"] = river_gdf["Last Edited"].apply(
714
- lambda x: pd.Timestamp.isoformat(x)
715
- )
720
+ river_gdf[self.LAST_EDITED_COLUMN] = river_gdf[
721
+ self.LAST_EDITED_COLUMN
722
+ ].apply(lambda x: pd.Timestamp.isoformat(x))
716
723
  return river_gdf
717
724
 
718
725
  def flowpaths(self) -> GeoDataFrame: # noqa D102
@@ -747,18 +754,20 @@ class RasGeomHdf(RasHdf):
747
754
 
748
755
  xselev_data = self[path]
749
756
  xs_df = self.cross_sections()
750
- elevations = list()
757
+ elevations = []
751
758
  for part_start, part_cnt in xselev_data["Station Elevation Info"][()]:
752
759
  xzdata = xselev_data["Station Elevation Values"][()][
753
760
  part_start : part_start + part_cnt
754
761
  ]
755
762
  elevations.append(xzdata)
756
763
 
764
+ left_bank = "Left Bank"
765
+ right_bank = "Right Bank"
757
766
  xs_elev_df = xs_df[
758
- ["xs_id", "River", "Reach", "RS", "Left Bank", "Right Bank"]
767
+ ["xs_id", "River", "Reach", "RS", left_bank, right_bank]
759
768
  ].copy()
760
- xs_elev_df["Left Bank"] = xs_elev_df["Left Bank"].round(round_to).astype(str)
761
- xs_elev_df["Right Bank"] = xs_elev_df["Right Bank"].round(round_to).astype(str)
769
+ xs_elev_df[left_bank] = xs_elev_df[left_bank].round(round_to).astype(str)
770
+ xs_elev_df[right_bank] = xs_elev_df[right_bank].round(round_to).astype(str)
762
771
  xs_elev_df["elevation info"] = elevations
763
772
 
764
773
  return xs_elev_df
rashdf/plan.py CHANGED
@@ -20,6 +20,9 @@ from datetime import datetime
20
20
  from enum import Enum
21
21
  from typing import Dict, List, Optional, Tuple, Union
22
22
 
23
+ # Shared constant
24
+ WATER_SURFACE = "Water Surface"
25
+
23
26
 
24
27
  class RasPlanHdfError(Exception):
25
28
  """HEC-RAS Plan HDF error class."""
@@ -32,7 +35,7 @@ class XsSteadyOutputVar(Enum):
32
35
 
33
36
  ENERGY_GRADE = "Energy Grade"
34
37
  FLOW = "Flow"
35
- WATER_SURFACE = "Water Surface"
38
+ WATER_SURFACE = WATER_SURFACE
36
39
  ENCROACHMENT_STATION_LEFT = "Encroachment Station Left"
37
40
  ENCROACHMENT_STATION_RIGHT = "Encroachment Station Right"
38
41
  AREA_INEFFECTIVE_TOTAL = "Area including Ineffective Total"
@@ -77,7 +80,7 @@ class TimeSeriesOutputVar(Enum):
77
80
  """Time series output variables."""
78
81
 
79
82
  # Default Outputs
80
- WATER_SURFACE = "Water Surface"
83
+ WATER_SURFACE = WATER_SURFACE
81
84
  FACE_VELOCITY = "Face Velocity"
82
85
 
83
86
  # Optional Outputs
@@ -141,7 +144,6 @@ TIME_SERIES_OUTPUT_VARS_FACES = [
141
144
  TimeSeriesOutputVar.FACE_TANGENTIAL_VELOCITY,
142
145
  TimeSeriesOutputVar.FACE_VELOCITY,
143
146
  TimeSeriesOutputVar.FACE_WATER_SURFACE,
144
- # TODO: investigate why "Face Wind Term" data gets written as a 1D array
145
147
  # TimeSeriesOutputVar.FACE_WIND_TERM,
146
148
  ]
147
149
 
@@ -178,6 +180,8 @@ class RasPlanHdf(RasGeomHdf):
178
180
  STEADY_XS_PATH = f"{STEADY_PROFILES_PATH}/Cross Sections"
179
181
  STEADY_XS_ADDITIONAL_PATH = f"{STEADY_XS_PATH}/Additional Variables"
180
182
 
183
+ INVALID_REFTYPE_ERROR = 'reftype must be either "lines" or "points".'
184
+
181
185
  def __init__(self, name: str, **kwargs):
182
186
  """Open a HEC-RAS Plan HDF file.
183
187
 
@@ -297,7 +301,7 @@ class RasPlanHdf(RasGeomHdf):
297
301
  self,
298
302
  mesh_name: str,
299
303
  var: SummaryOutputVar,
300
- time_unit: str = "days",
304
+ time_unit: str = None,
301
305
  round_to: str = "0.1 s",
302
306
  ) -> np.ndarray[np.datetime64]:
303
307
  """Return an array of times for min/max summary output data.
@@ -321,7 +325,8 @@ class RasPlanHdf(RasGeomHdf):
321
325
  """
322
326
  start_time = self._simulation_start_time()
323
327
  max_ws_group = self._mesh_summary_output_group(mesh_name, var)
324
- time_unit = self._summary_output_min_max_time_unit(max_ws_group)
328
+ if time_unit is None:
329
+ time_unit = self._summary_output_min_max_time_unit(max_ws_group)
325
330
  max_ws_raw = max_ws_group[:]
326
331
  max_ws_times_raw = max_ws_raw[1]
327
332
  # we get weirdly specific datetime values if we don't round to e.g., 0.1 seconds;
@@ -693,7 +698,7 @@ class RasPlanHdf(RasGeomHdf):
693
698
  """
694
699
  mesh_names_counts = self._2d_flow_area_names_and_counts()
695
700
  mesh_names = [mesh_name for mesh_name, _ in mesh_names_counts]
696
- vars = set()
701
+ summary_vars = set()
697
702
  for mesh_name in mesh_names:
698
703
  path = f"{self.SUMMARY_OUTPUT_2D_FLOW_AREAS_PATH}/{mesh_name}"
699
704
  datasets = self[path].keys()
@@ -702,12 +707,12 @@ class RasPlanHdf(RasGeomHdf):
702
707
  var = SummaryOutputVar(dataset)
703
708
  except ValueError:
704
709
  continue
705
- vars.add(var)
710
+ summary_vars.add(var)
706
711
  if cells_or_faces == "cells":
707
- vars = vars.intersection(SUMMARY_OUTPUT_VARS_CELLS)
712
+ summary_vars = summary_vars.intersection(SUMMARY_OUTPUT_VARS_CELLS)
708
713
  elif cells_or_faces == "faces":
709
- vars = vars.intersection(SUMMARY_OUTPUT_VARS_FACES)
710
- return sorted(list(vars), key=lambda x: x.value)
714
+ summary_vars = summary_vars.intersection(SUMMARY_OUTPUT_VARS_FACES)
715
+ return sorted(summary_vars, key=lambda x: x.value)
711
716
 
712
717
  def _mesh_summary_outputs_gdf(
713
718
  self,
@@ -894,7 +899,6 @@ class RasPlanHdf(RasGeomHdf):
894
899
  try:
895
900
  import dask.array as da
896
901
 
897
- # TODO: user-specified chunks?
898
902
  values = da.from_array(group, chunks=group.chunks)
899
903
  except ImportError:
900
904
  values = group[:]
@@ -926,9 +930,7 @@ class RasPlanHdf(RasGeomHdf):
926
930
  An xarray DataArray with dimensions 'time' and 'cell_id'.
927
931
  """
928
932
  times = self.unsteady_datetimes()
929
- mesh_names_counts = {
930
- name: count for name, count in self._2d_flow_area_names_and_counts()
931
- }
933
+ mesh_names_counts = dict(self._2d_flow_area_names_and_counts())
932
934
  if mesh_name not in mesh_names_counts:
933
935
  raise ValueError(f"Mesh '{mesh_name}' not found in the Plan HDF file.")
934
936
  if isinstance(var, str):
@@ -1067,7 +1069,7 @@ class RasPlanHdf(RasGeomHdf):
1067
1069
  output_path = self.REFERENCE_POINTS_OUTPUT_PATH
1068
1070
  abbrev = "refpt"
1069
1071
  else:
1070
- raise ValueError('reftype must be either "lines" or "points".')
1072
+ raise ValueError(self.INVALID_REFTYPE_ERROR)
1071
1073
  reference_group = self.get(output_path)
1072
1074
  if reference_group is None:
1073
1075
  raise RasPlanHdfError(
@@ -1085,14 +1087,13 @@ class RasPlanHdf(RasGeomHdf):
1085
1087
  times = self.unsteady_datetimes()
1086
1088
 
1087
1089
  das = {}
1088
- for var in ["Flow", "Velocity", "Water Surface"]:
1090
+ for var in ["Flow", "Velocity", WATER_SURFACE]:
1089
1091
  group = reference_group.get(var)
1090
1092
  if group is None:
1091
1093
  continue
1092
1094
  try:
1093
1095
  import dask.array as da
1094
1096
 
1095
- # TODO: user-specified chunks?
1096
1097
  values = da.from_array(group, chunks=group.chunks)
1097
1098
  except ImportError:
1098
1099
  values = group[:]
@@ -1147,7 +1148,6 @@ class RasPlanHdf(RasGeomHdf):
1147
1148
  try:
1148
1149
  import dask.array as da
1149
1150
 
1150
- # TODO: user-specified chunks?
1151
1151
  values = da.from_array(dataset, chunks=dataset.chunks)
1152
1152
  except ImportError:
1153
1153
  values = dataset[:]
@@ -1224,9 +1224,6 @@ class RasPlanHdf(RasGeomHdf):
1224
1224
  f"Could not find HDF group at path '{output_path}'."
1225
1225
  f" Does the Plan HDF file contain reference {vartype} output data?"
1226
1226
  )
1227
- if "Attributes" in observed_group.keys():
1228
- attr_path = observed_group["Attributes"]
1229
- attrs_df = pd.DataFrame(attr_path[:]).map(convert_ras_hdf_value)
1230
1227
 
1231
1228
  das = {}
1232
1229
  for idx, site in enumerate(observed_group.keys()):
@@ -1288,19 +1285,19 @@ class RasPlanHdf(RasGeomHdf):
1288
1285
  elif reftype == "points":
1289
1286
  abbrev = "refpt"
1290
1287
  else:
1291
- raise ValueError('reftype must be either "lines" or "points".')
1288
+ raise ValueError(self.INVALID_REFTYPE_ERROR)
1292
1289
  ds = self.reference_timeseries_output(reftype=reftype)
1293
1290
  result = {
1294
1291
  f"{abbrev}_id": ds[f"{abbrev}_id"],
1295
1292
  f"{abbrev}_name": ds[f"{abbrev}_name"],
1296
1293
  "mesh_name": ds.mesh_name,
1297
1294
  }
1298
- vars = {
1295
+ var_abbrevs = {
1299
1296
  "Flow": "q",
1300
- "Water Surface": "ws",
1297
+ WATER_SURFACE: "ws",
1301
1298
  "Velocity": "v",
1302
1299
  }
1303
- for var, abbrev in vars.items():
1300
+ for var, abbrev in var_abbrevs.items():
1304
1301
  if var not in ds:
1305
1302
  continue
1306
1303
  max_var = ds[var].max(dim="time")
@@ -1326,7 +1323,7 @@ class RasPlanHdf(RasGeomHdf):
1326
1323
  abbrev = "refpt"
1327
1324
  gdf = super().reference_points()
1328
1325
  else:
1329
- raise ValueError('reftype must be either "lines" or "points".')
1326
+ raise ValueError(self.INVALID_REFTYPE_ERROR)
1330
1327
  if include_output is False:
1331
1328
  return gdf
1332
1329
  summary_output = self.reference_summary_output(reftype=reftype)
@@ -1650,7 +1647,6 @@ class RasPlanHdf(RasGeomHdf):
1650
1647
  # and a bit risky because these private methods are more likely
1651
1648
  # to change, but short of reimplementing these functions ourselves
1652
1649
  # it's the best way to get the metadata we need.
1653
- # TODO: raise an issue in Kerchunk to expose this functionality?
1654
1650
  filters = SingleHdf5ToZarr._decode_filters(None, hdf_ds)
1655
1651
  encoding[var] = {"compressor": None, "filters": filters}
1656
1652
  storage_info = SingleHdf5ToZarr._storage_info(None, hdf_ds)
rashdf/utils.py CHANGED
@@ -42,14 +42,14 @@ def parse_ras_datetime(datetime_str: str) -> datetime:
42
42
  -------
43
43
  datetime: A datetime object representing the parsed datetime.
44
44
  """
45
- format = "%d%b%Y %H:%M:%S"
45
+ datetime_format = "%d%b%Y %H:%M:%S"
46
46
 
47
47
  if datetime_str.endswith("24:00:00"):
48
48
  datetime_str = datetime_str.replace("24:00:00", "00:00:00")
49
- parsed_dt = datetime.strptime(datetime_str, format)
49
+ parsed_dt = datetime.strptime(datetime_str, datetime_format)
50
50
  parsed_dt += timedelta(days=1)
51
51
  else:
52
- parsed_dt = datetime.strptime(datetime_str, format)
52
+ parsed_dt = datetime.strptime(datetime_str, datetime_format)
53
53
 
54
54
  return parsed_dt
55
55
 
@@ -68,14 +68,14 @@ def parse_ras_simulation_window_datetime(datetime_str) -> datetime:
68
68
  -------
69
69
  datetime: A datetime object representing the parsed datetime.
70
70
  """
71
- format = "%d%b%Y %H%M"
71
+ datetime_format = "%d%b%Y %H%M"
72
72
 
73
73
  if datetime_str.endswith("2400"):
74
74
  datetime_str = datetime_str.replace("2400", "0000")
75
- parsed_dt = datetime.strptime(datetime_str, format)
75
+ parsed_dt = datetime.strptime(datetime_str, datetime_format)
76
76
  parsed_dt += timedelta(days=1)
77
77
  else:
78
- parsed_dt = datetime.strptime(datetime_str, format)
78
+ parsed_dt = datetime.strptime(datetime_str, datetime_format)
79
79
 
80
80
  return parsed_dt
81
81
 
@@ -190,8 +190,6 @@ def convert_ras_hdf_value(
190
190
  The converted value, which could be None, a boolean, a string, a list of strings, an integer, a float, a list
191
191
  of integers, a list of floats, or the original value as a string if no other conditions are met.
192
192
  """
193
- # TODO (?): handle "8-bit bitfield" values in 2D Flow Area groups
194
-
195
193
  # Check for NaN (np.nan)
196
194
  if isinstance(value, np.floating) and np.isnan(value):
197
195
  return None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: rashdf
3
- Version: 0.8.1
3
+ Version: 0.8.3
4
4
  Summary: Read data from HEC-RAS HDF files.
5
5
  Project-URL: repository, https://github.com/fema-ffrd/rashdf
6
6
  Classifier: Development Status :: 4 - Beta
@@ -0,0 +1,12 @@
1
+ cli.py,sha256=NtO1bHaiM8GBW5Gv9eSF8Iu-ekwTIQAfV_hd7DBXyLQ,6834
2
+ rashdf/__init__.py,sha256=XXFtJDgLPCimqAhfsFz_pTWYECJiRT0i-Kb1uflXmVU,156
3
+ rashdf/base.py,sha256=cAQJX1aeBJKb3MJ06ltpbRTUaZX5NkuxpR1J4f7FyTU,2507
4
+ rashdf/geom.py,sha256=O2PMYY7w7fdW2U4u0rsbWeEDKAmsUh4-49ro-xUMc4A,28755
5
+ rashdf/plan.py,sha256=ctkfLBqocF2TpU6wYygXkxE2voCJa8WyVGYlimsyxS4,63612
6
+ rashdf/utils.py,sha256=AihMVcxxaufAnOVT3e5ollD5UINI_kjxSHohBS77-l0,10879
7
+ rashdf-0.8.3.dist-info/licenses/LICENSE,sha256=L_0QaLpQVHPcglVjiaJPnOocwzP8uXevDRjUPr9DL1Y,1065
8
+ rashdf-0.8.3.dist-info/METADATA,sha256=myhrp-erERbBWyofdiUaJLgC1apyviBizGBHMyGhRak,6072
9
+ rashdf-0.8.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
10
+ rashdf-0.8.3.dist-info/entry_points.txt,sha256=LHHMR1lLy4wRyscMuW1RlYDXemtPgqQhNcILz0DtStY,36
11
+ rashdf-0.8.3.dist-info/top_level.txt,sha256=SrmLb6FFTJtM_t6O1v0M0JePshiQJMHr0yYVkHL7ztk,11
12
+ rashdf-0.8.3.dist-info/RECORD,,
@@ -1,12 +0,0 @@
1
- cli.py,sha256=HbyrdUVKfrmtU2T9ljKTPQ-ugomJqYbCA26ghGJDJh0,6588
2
- rashdf/__init__.py,sha256=XXFtJDgLPCimqAhfsFz_pTWYECJiRT0i-Kb1uflXmVU,156
3
- rashdf/base.py,sha256=cAQJX1aeBJKb3MJ06ltpbRTUaZX5NkuxpR1J4f7FyTU,2507
4
- rashdf/geom.py,sha256=-GmHmddcdIcfOn-SFS940WyDLUilW9inrp_nuZ8aTHo,28306
5
- rashdf/plan.py,sha256=d8YhpC6cV8rhh3qf1o12TbhUvo_4pMh75vIdDkcAvjE,63971
6
- rashdf/utils.py,sha256=Cba6sULF0m0jg6CQass4bPm2oxTd_avoe1pRQxq082c,10896
7
- rashdf-0.8.1.dist-info/licenses/LICENSE,sha256=L_0QaLpQVHPcglVjiaJPnOocwzP8uXevDRjUPr9DL1Y,1065
8
- rashdf-0.8.1.dist-info/METADATA,sha256=7h2fJs_IYE81euocfUfqCtY0qSPoMOJ-yV5kKdZ9Zco,6072
9
- rashdf-0.8.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
10
- rashdf-0.8.1.dist-info/entry_points.txt,sha256=LHHMR1lLy4wRyscMuW1RlYDXemtPgqQhNcILz0DtStY,36
11
- rashdf-0.8.1.dist-info/top_level.txt,sha256=SrmLb6FFTJtM_t6O1v0M0JePshiQJMHr0yYVkHL7ztk,11
12
- rashdf-0.8.1.dist-info/RECORD,,
File without changes