ras-commander 0.43.0__py3-none-any.whl → 0.45.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,13 @@
1
+ """
2
+ Class: HdfResultsMesh
3
+
4
+ Attribution: A substantial amount of code in this file is sourced or derived
5
+ from the https://github.com/fema-ffrd/rashdf library,
6
+ released under MIT license and Copyright (c) 2024 fema-ffrd
7
+
8
+ The file has been forked and modified for use in RAS Commander.
9
+ """
10
+
1
11
  import numpy as np
2
12
  import pandas as pd
3
13
  import xarray as xr
@@ -37,14 +47,14 @@ class HdfResultsMesh:
37
47
  @staticmethod
38
48
  @log_call
39
49
  @standardize_input(file_type='plan_hdf')
40
- def mesh_summary_output(hdf_path: Path, var: str, round_to: str = "0.1 s") -> pd.DataFrame:
50
+ def mesh_summary_output(hdf_path: Path, var: str, round_to: str = "100ms") -> pd.DataFrame:
41
51
  """
42
52
  Return the summary output data for a given variable.
43
53
 
44
54
  Args:
45
55
  hdf_path (Path): Path to the HEC-RAS plan HDF file.
46
56
  var (str): The summary output variable to retrieve.
47
- round_to (str): The time unit to round the datetimes to. Default: "0.1 s" (seconds).
57
+ round_to (str): The time unit to round the datetimes to. Default: "100ms" (100 milliseconds).
48
58
 
49
59
  Returns:
50
60
  pd.DataFrame: DataFrame containing the summary output data.
@@ -166,68 +176,47 @@ class HdfResultsMesh:
166
176
  """
167
177
  return HdfResultsMesh._get_mesh_summary_output(hdf_path, "Cell Last Iteration")
168
178
 
179
+
169
180
  @staticmethod
170
181
  @log_call
171
182
  @standardize_input(file_type='plan_hdf')
172
- def mesh_max_ws(hdf_path: Path, round_to: str = "0.1 s") -> pd.DataFrame:
183
+ def mesh_max_ws(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
173
184
  """
174
- Get maximum water surface elevation for each mesh cell.
185
+ Get maximum iteration count for each mesh cell.
175
186
 
176
187
  Args:
177
188
  hdf_path (Path): Path to the HDF file.
178
- round_to (str): Time rounding specification (default "0.1 s").
189
+ round_to (str): Time rounding specification (default "100ms").
179
190
 
180
191
  Returns:
181
- pd.DataFrame: DataFrame containing maximum water surface elevations.
192
+ pd.DataFrame: DataFrame containing maximum iteration counts.
182
193
 
183
194
  Raises:
184
- ValueError: If there's an error processing the maximum water surface data.
195
+ ValueError: If there's an error processing the maximum iteration data.
196
+
197
+ Note: The Maximum Iteration is labeled as "Cell Last Iteration" in the HDF file
185
198
  """
186
199
  try:
187
200
  with h5py.File(hdf_path, 'r') as hdf_file:
188
- start_time = HdfBase._get_simulation_start_time(hdf_file)
189
- dfs = []
190
- for mesh_name, cell_count in HdfBase._get_2d_flow_area_names_and_counts(hdf_file):
191
- group = HdfResultsMesh._get_mesh_summary_output_group(hdf_file, mesh_name, "Maximum Water Surface")
192
-
193
- # Check the structure of the group
194
- if isinstance(group, h5py.Dataset):
195
- data = group[:]
196
- if data.ndim > 1:
197
- values = data[:, 0] # Assume first column is values
198
- times = data[:, 1] if data.shape[1] > 1 else np.arange(len(values))
199
- else:
200
- values = data
201
- times = np.arange(len(values))
202
- else:
203
- values = group['Values'][:] if 'Values' in group else group[:]
204
- times = group['Time'][:] if 'Time' in group else np.arange(len(values))
205
-
206
- times = HdfUtils._ras_timesteps_to_datetimes(times, start_time, time_unit="days", round_to=round_to)
207
- df = pd.DataFrame({
208
- "mesh_name": [mesh_name] * len(values),
209
- "cell_id": range(len(values)),
210
- "maximum_water_surface": values,
211
- "maximum_water_surface_time": times
212
- })
213
- dfs.append(df)
214
-
215
- return pd.concat(dfs, ignore_index=True)
201
+ return HdfResultsMesh._get_mesh_summary_output(hdf_file, "Maximum Water Surface", round_to)
216
202
  except Exception as e:
217
203
  logger.error(f"Error in mesh_max_ws: {str(e)}")
218
- logger.error(f"Data structure: {group.shape if isinstance(group, h5py.Dataset) else [k for k in group.keys()]}")
219
204
  raise ValueError(f"Failed to get maximum water surface: {str(e)}")
205
+
206
+
207
+
208
+
220
209
 
221
210
  @staticmethod
222
211
  @log_call
223
212
  @standardize_input(file_type='plan_hdf')
224
- def mesh_min_ws(hdf_path: Path, round_to: str = "0.1 s") -> pd.DataFrame:
213
+ def mesh_min_ws(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
225
214
  """
226
215
  Get minimum water surface elevation for each mesh cell.
227
216
 
228
217
  Args:
229
218
  hdf_path (Path): Path to the HDF file.
230
- round_to (str): Time rounding specification (default "0.1 s").
219
+ round_to (str): Time rounding specification (default "100ms").
231
220
 
232
221
  Returns:
233
222
  pd.DataFrame: DataFrame containing minimum water surface elevations.
@@ -242,13 +231,13 @@ class HdfResultsMesh:
242
231
  @staticmethod
243
232
  @log_call
244
233
  @standardize_input(file_type='plan_hdf')
245
- def mesh_max_face_v(hdf_path: Path, round_to: str = "0.1 s") -> pd.DataFrame:
234
+ def mesh_max_face_v(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
246
235
  """
247
236
  Get maximum face velocity for each mesh cell.
248
237
 
249
238
  Args:
250
239
  hdf_path (Path): Path to the HDF file.
251
- round_to (str): Time rounding specification (default "0.1 s").
240
+ round_to (str): Time rounding specification (default "100ms").
252
241
 
253
242
  Returns:
254
243
  pd.DataFrame: DataFrame containing maximum face velocities.
@@ -266,13 +255,13 @@ class HdfResultsMesh:
266
255
  @staticmethod
267
256
  @log_call
268
257
  @standardize_input(file_type='plan_hdf')
269
- def mesh_min_face_v(hdf_path: Path, round_to: str = "0.1 s") -> pd.DataFrame:
258
+ def mesh_min_face_v(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
270
259
  """
271
260
  Get minimum face velocity for each mesh cell.
272
261
 
273
262
  Args:
274
263
  hdf_path (Path): Path to the HDF file.
275
- round_to (str): Time rounding specification (default "0.1 s").
264
+ round_to (str): Time rounding specification (default "100ms").
276
265
 
277
266
  Returns:
278
267
  pd.DataFrame: DataFrame containing minimum face velocities.
@@ -290,13 +279,13 @@ class HdfResultsMesh:
290
279
  @staticmethod
291
280
  @log_call
292
281
  @standardize_input(file_type='plan_hdf')
293
- def mesh_max_ws_err(hdf_path: Path, round_to: str = "0.1 s") -> pd.DataFrame:
282
+ def mesh_max_ws_err(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
294
283
  """
295
284
  Get maximum water surface error for each mesh cell.
296
285
 
297
286
  Args:
298
287
  hdf_path (Path): Path to the HDF file.
299
- round_to (str): Time rounding specification (default "0.1 s").
288
+ round_to (str): Time rounding specification (default "100ms").
300
289
 
301
290
  Returns:
302
291
  pd.DataFrame: DataFrame containing maximum water surface errors.
@@ -315,13 +304,13 @@ class HdfResultsMesh:
315
304
  @staticmethod
316
305
  @log_call
317
306
  @standardize_input(file_type='plan_hdf')
318
- def mesh_max_iter(hdf_path: Path, round_to: str = "0.1 s") -> pd.DataFrame:
307
+ def mesh_max_iter(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
319
308
  """
320
309
  Get maximum iteration count for each mesh cell.
321
310
 
322
311
  Args:
323
312
  hdf_path (Path): Path to the HDF file.
324
- round_to (str): Time rounding specification (default "0.1 s").
313
+ round_to (str): Time rounding specification (default "100ms").
325
314
 
326
315
  Returns:
327
316
  pd.DataFrame: DataFrame containing maximum iteration counts.
@@ -341,63 +330,6 @@ class HdfResultsMesh:
341
330
 
342
331
 
343
332
 
344
- @staticmethod
345
- def _get_mesh_timeseries_output(hdf_file: h5py.File, mesh_name: str, var: str, truncate: bool = True) -> xr.DataArray:
346
- """
347
- Get timeseries output for a specific mesh and variable.
348
-
349
- Args:
350
- hdf_file (h5py.File): Open HDF file object.
351
- mesh_name (str): Name of the mesh.
352
- var (str): Variable name to retrieve. Valid options include:
353
- "Water Surface", "Face Velocity", "Cell Velocity X", "Cell Velocity Y",
354
- "Face Flow", "Face Water Surface", "Cell Volume", "Cell Volume Error",
355
- "Cell Water Surface Error", "Cell Courant", "Face Courant",
356
- "Cell Hydraulic Depth", "Cell Invert Depth",
357
- "Cell Cumulative Precipitation Depth", "Cell Divergence Term",
358
- "Cell Eddy Viscosity X", "Cell Eddy Viscosity Y", "Cell Flow Balance",
359
- "Cell Storage Term", "Cell Water Source Term", "Face Cumulative Volume",
360
- "Face Eddy Viscosity", "Face Flow Period Average", "Face Friction Term",
361
- "Face Pressure Gradient Term", "Face Shear Stress", "Face Tangential Velocity"
362
- truncate (bool): Whether to truncate the output to remove trailing zeros (default True).
363
-
364
- Returns:
365
- xr.DataArray: DataArray containing the timeseries output.
366
-
367
- Raises:
368
- ValueError: If the specified path is not found in the HDF file or if there's an error processing the data.
369
- """
370
- try:
371
- path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, var)
372
-
373
- if path not in hdf_file:
374
- raise ValueError(f"Path {path} not found in HDF file")
375
-
376
- values, units = HdfResultsMesh._get_mesh_timeseries_output_values_units(hdf_file, mesh_name, var)
377
- times = HdfBase._get_unsteady_datetimes(hdf_file)
378
-
379
- if truncate:
380
- non_zero = np.nonzero(values)[0]
381
- if len(non_zero) > 0:
382
- start, end = non_zero[0], non_zero[-1] + 1
383
- values = values[start:end]
384
- times = times[start:end]
385
-
386
- # Check if values is 2D and adjust accordingly
387
- if values.ndim == 2:
388
- dims = ["time", "cell"]
389
- else:
390
- dims = ["time"]
391
-
392
- return xr.DataArray(
393
- values,
394
- coords={"time": times},
395
- dims=dims,
396
- attrs={"units": units, "mesh_name": mesh_name, "variable": var},
397
- )
398
- except Exception as e:
399
- logger.error(f"Error in get_mesh_timeseries_output: {str(e)}")
400
- raise ValueError(f"Failed to get timeseries output: {str(e)}")
401
333
 
402
334
  @staticmethod
403
335
  def _get_mesh_timeseries_output_path(mesh_name: str, var_name: str) -> str:
@@ -413,26 +345,6 @@ class HdfResultsMesh:
413
345
  """
414
346
  return f"Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/2D Flow Areas/{mesh_name}/{var_name}"
415
347
 
416
- @staticmethod
417
- def _get_mesh_timeseries_output_values_units(hdf_file: h5py.File, mesh_name: str, var: str) -> Tuple[np.ndarray, str]:
418
- """
419
- Get the mesh timeseries output values and units for a specific variable from the HDF file.
420
-
421
- Args:
422
- hdf_file (h5py.File): Open HDF file object.
423
- mesh_name (str): Name of the mesh.
424
- var (str): Variable name to retrieve.
425
-
426
- Returns:
427
- Tuple[np.ndarray, str]: A tuple containing the output values and units.
428
- """
429
- path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, var)
430
- group = hdf_file[path]
431
- values = group[:]
432
- units = group.attrs.get("Units")
433
- if units is not None:
434
- units = units.decode("utf-8")
435
- return values, units
436
348
 
437
349
  @staticmethod
438
350
  def _mesh_cells_timeseries_output(hdf_file: h5py.File, mesh_names: Optional[Union[str, List[str]]] = None, var: Optional[str] = None, truncate: bool = False) -> Dict[str, xr.Dataset]:
@@ -492,11 +404,10 @@ class HdfResultsMesh:
492
404
  data_vars = {}
493
405
  for variable in variables:
494
406
  try:
495
- values, units = HdfResultsMesh._get_mesh_timeseries_output_values_units(hdf_file, mesh_name, variable)
496
- logger.info(f"Variable: {variable}")
497
- logger.info(f"Original values shape: {values.shape}")
498
- logger.info(f"Units: {units}")
499
- logger.info(f"Number of time stamps: {len(time_stamps)}")
407
+ path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, variable)
408
+ dataset = hdf_file[path]
409
+ values = dataset[:]
410
+ units = dataset.attrs.get("Units", "").decode("utf-8")
500
411
 
501
412
  if truncate:
502
413
  last_nonzero = np.max(np.nonzero(values)[1]) + 1 if values.size > 0 else 0
@@ -505,13 +416,10 @@ class HdfResultsMesh:
505
416
  else:
506
417
  truncated_time_stamps = time_stamps
507
418
 
508
- # Ensure the data shape matches our expectations
509
419
  if values.shape[0] != len(truncated_time_stamps):
510
420
  logger.warning(f"Mismatch between number of time steps ({len(truncated_time_stamps)}) and data shape ({values.shape}) for variable {variable}")
511
421
  continue
512
422
 
513
- logger.info(f"Final values shape: {values.shape}")
514
-
515
423
  data_vars[variable] = xr.DataArray(
516
424
  data=values,
517
425
  dims=['time', 'cell_id'],
@@ -536,6 +444,92 @@ class HdfResultsMesh:
536
444
  logger.error(f"Error in _mesh_cells_timeseries_output: {str(e)}")
537
445
  raise ValueError(f"Error processing timeseries output data: {e}")
538
446
 
447
+
448
+
449
+ @staticmethod
450
+ def _get_mesh_timeseries_output(hdf_file: h5py.File, mesh_name: str, var: str, truncate: bool = True) -> xr.DataArray:
451
+ """
452
+ Get timeseries output for a specific mesh and variable.
453
+
454
+ Args:
455
+ hdf_file (h5py.File): Open HDF file object.
456
+ mesh_name (str): Name of the mesh.
457
+ var (str): Variable name to retrieve. Valid options include:
458
+ "Water Surface", "Face Velocity", "Cell Velocity X", "Cell Velocity Y",
459
+ "Face Flow", "Face Water Surface", "Cell Volume", "Cell Volume Error",
460
+ "Cell Water Surface Error", "Cell Courant", "Face Courant",
461
+ "Cell Hydraulic Depth", "Cell Invert Depth",
462
+ "Cell Cumulative Precipitation Depth", "Cell Divergence Term",
463
+ "Cell Eddy Viscosity X", "Cell Eddy Viscosity Y", "Cell Flow Balance",
464
+ "Cell Storage Term", "Cell Water Source Term", "Face Cumulative Volume",
465
+ "Face Eddy Viscosity", "Face Flow Period Average", "Face Friction Term",
466
+ "Face Pressure Gradient Term", "Face Shear Stress", "Face Tangential Velocity"
467
+ truncate (bool): Whether to truncate the output to remove trailing zeros (default True).
468
+
469
+ Returns:
470
+ xr.DataArray: DataArray containing the timeseries output.
471
+
472
+ Raises:
473
+ ValueError: If the specified path is not found in the HDF file or if there's an error processing the data.
474
+ """
475
+ try:
476
+ path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, var)
477
+
478
+ if path not in hdf_file:
479
+ raise ValueError(f"Path {path} not found in HDF file")
480
+
481
+ # Use h5py to get the dataset
482
+ dataset = hdf_file[path]
483
+ values = dataset[:]
484
+ units = dataset.attrs.get("Units", "").decode("utf-8")
485
+ times = HdfBase._get_unsteady_datetimes(hdf_file)
486
+
487
+ if truncate:
488
+ non_zero = np.nonzero(values)[0]
489
+ if len(non_zero) > 0:
490
+ start, end = non_zero[0], non_zero[-1] + 1
491
+ values = values[start:end]
492
+ times = times[start:end]
493
+
494
+ # Create xarray DataArray
495
+ dims = ["time", "cell"] if values.ndim == 2 else ["time"]
496
+ coords = {"time": times}
497
+ if values.ndim == 2:
498
+ coords["cell"] = np.arange(values.shape[1])
499
+
500
+ return xr.DataArray(
501
+ values,
502
+ coords=coords,
503
+ dims=dims,
504
+ attrs={"units": units, "mesh_name": mesh_name, "variable": var},
505
+ )
506
+ except Exception as e:
507
+ logger.error(f"Error in get_mesh_timeseries_output: {str(e)}")
508
+ raise ValueError(f"Failed to get timeseries output: {str(e)}")
509
+
510
+
511
+ @staticmethod
512
+ def _get_mesh_timeseries_output_values_units(hdf_file: h5py.File, mesh_name: str, var: str) -> Tuple[np.ndarray, str]:
513
+ """
514
+ Get the mesh timeseries output values and units for a specific variable from the HDF file.
515
+
516
+ Args:
517
+ hdf_file (h5py.File): Open HDF file object.
518
+ mesh_name (str): Name of the mesh.
519
+ var (str): Variable name to retrieve.
520
+
521
+ Returns:
522
+ Tuple[np.ndarray, str]: A tuple containing the output values and units.
523
+ """
524
+ path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, var)
525
+ group = hdf_file[path]
526
+ values = group[:]
527
+ units = group.attrs.get("Units")
528
+ if units is not None:
529
+ units = units.decode("utf-8")
530
+ return values, units
531
+
532
+
539
533
  @staticmethod
540
534
  def _get_available_meshes(hdf_file: h5py.File) -> List[str]:
541
535
  """
@@ -555,23 +549,22 @@ class HdfResultsMesh:
555
549
  mesh_names.append(name)
556
550
  return mesh_names
557
551
 
558
-
559
552
  @staticmethod
560
- def _get_mesh_summary_output(hdf_file: h5py.File, var: str, round_to: str = "0.1 s") -> pd.DataFrame:
553
+ def _get_mesh_summary_output(hdf_file: h5py.File, var: str, round_to: str = "100ms") -> pd.DataFrame:
561
554
  """
562
555
  Get the summary output data for a given variable from the HDF file.
563
556
 
564
557
  This method retrieves summary output data for all 2D flow areas (meshes) in the HDF file
565
- for a specified variable. It handles different data structures that may be present in
566
- different versions of HEC-RAS HDF files.
558
+ for a specified variable. It handles both 1D and 2D datasets.
559
+ Group attributes are added as metadata to the DataFrame.
567
560
 
568
561
  Args:
569
562
  hdf_file (h5py.File): Open HDF file object.
570
563
  var (str): The summary output variable to retrieve.
571
- round_to (str): The time unit to round the datetimes to. Default is "0.1 s".
564
+ round_to (str): The time unit to round the datetimes to. Default is "100ms".
572
565
 
573
566
  Returns:
574
- pd.DataFrame: A DataFrame containing the summary output data.
567
+ pd.DataFrame: A DataFrame containing the summary output data with attributes as metadata.
575
568
 
576
569
  Raises:
577
570
  ValueError: If the HDF file cannot be opened or read, or if the requested data is not found.
@@ -585,45 +578,66 @@ class HdfResultsMesh:
585
578
  logger.debug(f"Processing mesh: {mesh_name} with {cell_count} cells")
586
579
  group = HdfResultsMesh._get_mesh_summary_output_group(hdf_file, mesh_name, var)
587
580
 
588
- # Handle different data structures
589
- if isinstance(group, h5py.Dataset):
590
- data = group[:]
591
- if data.ndim == 2:
592
- values = data[:cell_count, 0]
593
- times = data[:cell_count, 1] if data.shape[1] > 1 else np.arange(cell_count)
594
- else:
595
- values = data[:cell_count]
596
- times = np.arange(cell_count)
581
+ data = group[:]
582
+ logger.debug(f"Data shape for {var} in {mesh_name}: {data.shape}")
583
+ logger.debug(f"Data type: {data.dtype}")
584
+ logger.debug(f"Attributes: {dict(group.attrs)}")
585
+
586
+ if data.ndim == 2 and data.shape[0] == 2:
587
+ # This is the case for "Maximum Water Surface"
588
+ row_variables = group.attrs.get('Row Variables', [b'Value', b'Time'])
589
+ row_variables = [v.decode('utf-8').strip() for v in row_variables]
590
+
591
+ df = pd.DataFrame({
592
+ "mesh_name": [mesh_name] * data.shape[1],
593
+ "cell_id": range(data.shape[1]),
594
+ f"{var.lower().replace(' ', '_')}": data[0, :],
595
+ f"{var.lower().replace(' ', '_')}_time": HdfUtils._ras_timesteps_to_datetimes(
596
+ data[1, :], start_time, time_unit="days", round_to=round_to
597
+ )
598
+ })
599
+ elif data.ndim == 1:
600
+ # Handle 1D datasets (like Cell Last Iteration)
601
+ df = pd.DataFrame({
602
+ "mesh_name": [mesh_name] * len(data),
603
+ "cell_id": range(len(data)),
604
+ var.lower().replace(' ', '_'): data
605
+ })
597
606
  else:
598
- try:
599
- values = group['Values'][:cell_count]
600
- times = group['Time'][:cell_count]
601
- except KeyError:
602
- # If 'Values' and 'Time' don't exist, assume it's a simple dataset
603
- values = group[:][:cell_count]
604
- times = np.arange(cell_count)
607
+ raise ValueError(f"Unexpected data shape for {var} in {mesh_name}. "
608
+ f"Got shape {data.shape}")
605
609
 
606
- # Convert times to datetime objects
607
- times = HdfUtils._ras_timesteps_to_datetimes(times, start_time, time_unit="days", round_to=round_to)
610
+ # Add group attributes as metadata
611
+ df.attrs['mesh_name'] = mesh_name
612
+ for attr_name, attr_value in group.attrs.items():
613
+ if isinstance(attr_value, bytes):
614
+ attr_value = attr_value.decode('utf-8')
615
+ elif isinstance(attr_value, np.ndarray):
616
+ attr_value = attr_value.tolist()
617
+ df.attrs[attr_name] = attr_value
608
618
 
609
- # Create DataFrame for this mesh
610
- df = pd.DataFrame({
611
- "mesh_name": [mesh_name] * len(values),
612
- "cell_id": range(len(values)),
613
- f"{var.lower().replace(' ', '_')}": values,
614
- f"{var.lower().replace(' ', '_')}_time": times
615
- })
616
619
  dfs.append(df)
617
620
 
618
- # Combine all mesh DataFrames
619
621
  result = pd.concat(dfs, ignore_index=True)
622
+
623
+ # Combine attributes from all meshes
624
+ combined_attrs = {}
625
+ for df in dfs:
626
+ for key, value in df.attrs.items():
627
+ if key not in combined_attrs:
628
+ combined_attrs[key] = value
629
+ elif combined_attrs[key] != value:
630
+ combined_attrs[key] = f"Multiple values: {combined_attrs[key]}, {value}"
631
+
632
+ result.attrs.update(combined_attrs)
633
+
620
634
  logger.info(f"Processed {len(result)} rows of summary output data")
621
635
  return result
622
636
 
623
637
  except (KeyError, ValueError, AttributeError) as e:
624
638
  logger.error(f"Error processing summary output data: {e}")
625
639
  raise ValueError(f"Error processing summary output data: {e}")
626
-
640
+
627
641
 
628
642
  @staticmethod
629
643
  def _get_mesh_summary_output_group(hdf_file: h5py.File, mesh_name: str, var: str) -> Union[h5py.Group, h5py.Dataset]:
@@ -1,3 +1,13 @@
1
+ """
2
+ Class: HdfResultsPlan
3
+
4
+ Attribution: A substantial amount of code in this file is sourced or derived
5
+ from the https://github.com/fema-ffrd/rashdf library,
6
+ released under MIT license and Copyright (c) 2024 fema-ffrd
7
+
8
+ The file has been forked and modified for use in RAS Commander.
9
+ """
10
+
1
11
  from typing import Dict, List, Union, Optional
2
12
  from pathlib import Path
3
13
  import h5py
@@ -7,9 +17,8 @@ from .Decorators import standardize_input, log_call
7
17
  from .HdfBase import HdfBase
8
18
  from .HdfResultsXsec import HdfResultsXsec
9
19
  from .LoggingConfig import get_logger
10
- import dask.array as da
11
- from datetime import datetime
12
20
  import numpy as np
21
+ from datetime import datetime
13
22
 
14
23
  logger = get_logger(__name__)
15
24
 
@@ -283,10 +292,7 @@ class HdfResultsPlan:
283
292
  group = reference_group.get(var)
284
293
  if group is None:
285
294
  continue
286
- try:
287
- values = da.from_array(group, chunks=group.chunks)
288
- except ImportError:
289
- values = group[:]
295
+ values = group[:]
290
296
  units = group.attrs["Units"].decode("utf-8")
291
297
  da = xr.DataArray(
292
298
  values,
@@ -390,3 +396,6 @@ class HdfResultsPlan:
390
396
  except Exception as e:
391
397
  logger.error(f"Error in reference_summary_output: {str(e)}")
392
398
  return pd.DataFrame() # Return an empty DataFrame on general error
399
+
400
+
401
+