ras-commander 0.72.0__py3-none-any.whl → 0.74.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ras_commander/HdfPlan.py CHANGED
@@ -280,17 +280,18 @@ class HdfPlan:
280
280
  Raises:
281
281
  ValueError: If Geometry group is missing or there's an error reading attributes.
282
282
  """
283
- print(f"Getting geometry attributes from {hdf_path}")
283
+ logger.info(f"Getting geometry attributes from {hdf_path}")
284
284
  try:
285
285
  with h5py.File(hdf_path, 'r') as hdf_file:
286
286
  geom_attrs_path = "Geometry"
287
- print(f"Checking for Geometry group in {hdf_path}")
287
+ logger.info(f"Checking for Geometry group in {hdf_path}")
288
288
  if geom_attrs_path not in hdf_file:
289
+ logger.error(f"Geometry group not found in {hdf_path}")
289
290
  raise ValueError(f"Geometry group not found in {hdf_path}")
290
291
 
291
292
  attrs = {}
292
293
  geom_group = hdf_file[geom_attrs_path]
293
- print("Getting root level geometry attributes")
294
+ logger.info("Getting root level geometry attributes")
294
295
  # Get root level geometry attributes only
295
296
  for key, value in geom_group.attrs.items():
296
297
  if isinstance(value, bytes):
@@ -300,13 +301,16 @@ class HdfPlan:
300
301
  logger.warning(f"Failed to decode byte string for root attribute {key}")
301
302
  continue
302
303
  attrs[key] = value
304
+ logger.debug(f"Geometry attribute: {key} = {value}")
303
305
 
304
- print("Successfully extracted root level geometry attributes")
306
+ logger.info(f"Successfully extracted {len(attrs)} root level geometry attributes")
305
307
  return pd.DataFrame.from_dict(attrs, orient='index', columns=['Value'])
306
308
 
307
309
  except (OSError, RuntimeError) as e:
310
+ logger.error(f"Failed to read HDF file {hdf_path}: {str(e)}")
308
311
  raise ValueError(f"Failed to read HDF file {hdf_path}: {str(e)}")
309
312
  except Exception as e:
313
+ logger.error(f"Failed to get geometry attributes: {str(e)}")
310
314
  raise ValueError(f"Failed to get geometry attributes: {str(e)}")
311
315
 
312
316
 
@@ -750,3 +750,248 @@ class HdfResultsMesh:
750
750
  raise ValueError(f"Dataset not found at path '{output_path}'")
751
751
  return output_item
752
752
 
753
+ @staticmethod
754
+ @log_call
755
+ @standardize_input(file_type='plan_hdf')
756
+ def get_boundary_conditions_timeseries(hdf_path: Path) -> xr.Dataset:
757
+ """
758
+ Get timeseries output for all boundary conditions as a single combined xarray Dataset.
759
+
760
+ Args:
761
+ hdf_path (Path): Path to the HDF file.
762
+
763
+ Returns:
764
+ xr.Dataset: Dataset containing all boundary condition data with:
765
+ - Dimensions: time, bc_name (boundary condition name), face_id
766
+ - Variables: stage, flow, flow_per_face, stage_per_face
767
+ - Coordinates and attributes preserving original metadata
768
+
769
+ Example:
770
+ >>> bc_data = HdfResultsMesh.get_boundary_conditions_timeseries_combined(hdf_path)
771
+ >>> print(bc_data)
772
+ >>> # Plot flow for all boundary conditions
773
+ >>> bc_data.flow.plot(x='time', hue='bc_name')
774
+ >>> # Extract data for a specific boundary condition
775
+ >>> upstream_data = bc_data.sel(bc_name='Upstream Inflow')
776
+ """
777
+ try:
778
+ with h5py.File(hdf_path, 'r') as hdf_file:
779
+ # Get the base path and check if boundary conditions exist
780
+ base_path = "Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series"
781
+ bc_base_path = f"{base_path}/Boundary Conditions"
782
+
783
+ if bc_base_path not in hdf_file:
784
+ logger.warning(f"No boundary conditions found in HDF file")
785
+ return xr.Dataset()
786
+
787
+ # Get timestamps
788
+ start_time = HdfBase.get_simulation_start_time(hdf_file)
789
+ time_data = hdf_file[f"{base_path}/Time"][:]
790
+ timestamps = HdfUtils.convert_timesteps_to_datetimes(time_data, start_time)
791
+
792
+ # Get all boundary condition names (excluding those with " - Flow per Face" or " - Stage per Face" suffix)
793
+ bc_names = [name for name in hdf_file[bc_base_path].keys()
794
+ if " - Flow per Face" not in name and " - Stage per Face" not in name]
795
+
796
+ if not bc_names:
797
+ logger.warning(f"No boundary conditions found in HDF file")
798
+ return xr.Dataset()
799
+
800
+ # Initialize arrays for main stage and flow data
801
+ num_timesteps = len(timestamps)
802
+ num_bcs = len(bc_names)
803
+
804
+ stage_data = np.full((num_timesteps, num_bcs), np.nan)
805
+ flow_data = np.full((num_timesteps, num_bcs), np.nan)
806
+
807
+ # Dictionary to store face-specific data
808
+ face_data = {
809
+ 'flow_per_face': {},
810
+ 'stage_per_face': {}
811
+ }
812
+
813
+ # Extract metadata from all boundary conditions
814
+ bc_metadata = {}
815
+
816
+ # Process each boundary condition
817
+ for bc_idx, bc_name in enumerate(bc_names):
818
+ bc_path = f"{bc_base_path}/{bc_name}"
819
+
820
+ try:
821
+ # Extract main boundary data
822
+ bc_data = hdf_file[bc_path][:]
823
+ bc_attrs = dict(hdf_file[bc_path].attrs)
824
+
825
+ # Store metadata
826
+ bc_metadata[bc_name] = {
827
+ k: v.decode('utf-8') if isinstance(v, bytes) else v
828
+ for k, v in bc_attrs.items()
829
+ }
830
+
831
+ # Get column indices for Stage and Flow
832
+ if 'Columns' in bc_attrs:
833
+ columns = [col.decode('utf-8') if isinstance(col, bytes) else col
834
+ for col in bc_attrs['Columns']]
835
+
836
+ stage_idx = columns.index('Stage') if 'Stage' in columns else None
837
+ flow_idx = columns.index('Flow') if 'Flow' in columns else None
838
+
839
+ if stage_idx is not None:
840
+ stage_data[:, bc_idx] = bc_data[:, stage_idx]
841
+ if flow_idx is not None:
842
+ flow_data[:, bc_idx] = bc_data[:, flow_idx]
843
+
844
+ # Extract Flow per Face data
845
+ flow_face_path = f"{bc_path} - Flow per Face"
846
+ if flow_face_path in hdf_file:
847
+ flow_face_data = hdf_file[flow_face_path][:]
848
+ flow_face_attrs = dict(hdf_file[flow_face_path].attrs)
849
+
850
+ # Get face IDs
851
+ face_ids = flow_face_attrs.get('Faces', [])
852
+ if isinstance(face_ids, np.ndarray):
853
+ face_ids = face_ids.tolist()
854
+ else:
855
+ face_ids = list(range(flow_face_data.shape[1]))
856
+
857
+ face_data['flow_per_face'][bc_name] = {
858
+ 'data': flow_face_data,
859
+ 'faces': face_ids,
860
+ 'attrs': {
861
+ k: v.decode('utf-8') if isinstance(v, bytes) else v
862
+ for k, v in flow_face_attrs.items()
863
+ }
864
+ }
865
+
866
+ # Extract Stage per Face data
867
+ stage_face_path = f"{bc_path} - Stage per Face"
868
+ if stage_face_path in hdf_file:
869
+ stage_face_data = hdf_file[stage_face_path][:]
870
+ stage_face_attrs = dict(hdf_file[stage_face_path].attrs)
871
+
872
+ # Get face IDs
873
+ face_ids = stage_face_attrs.get('Faces', [])
874
+ if isinstance(face_ids, np.ndarray):
875
+ face_ids = face_ids.tolist()
876
+ else:
877
+ face_ids = list(range(stage_face_data.shape[1]))
878
+
879
+ face_data['stage_per_face'][bc_name] = {
880
+ 'data': stage_face_data,
881
+ 'faces': face_ids,
882
+ 'attrs': {
883
+ k: v.decode('utf-8') if isinstance(v, bytes) else v
884
+ for k, v in stage_face_attrs.items()
885
+ }
886
+ }
887
+
888
+ except Exception as e:
889
+ logger.warning(f"Error processing boundary condition '{bc_name}': {str(e)}")
890
+ continue
891
+
892
+ # Create base dataset with stage and flow data
893
+ ds = xr.Dataset(
894
+ data_vars={
895
+ 'stage': xr.DataArray(
896
+ stage_data,
897
+ dims=['time', 'bc_name'],
898
+ coords={
899
+ 'time': timestamps,
900
+ 'bc_name': bc_names
901
+ },
902
+ attrs={'description': 'Water surface elevation at boundary condition'}
903
+ ),
904
+ 'flow': xr.DataArray(
905
+ flow_data,
906
+ dims=['time', 'bc_name'],
907
+ coords={
908
+ 'time': timestamps,
909
+ 'bc_name': bc_names
910
+ },
911
+ attrs={'description': 'Flow at boundary condition'}
912
+ )
913
+ },
914
+ attrs={
915
+ 'source': 'HEC-RAS HDF Boundary Conditions',
916
+ 'start_time': start_time
917
+ }
918
+ )
919
+
920
+ # Add metadata as coordinates
921
+ for key in bc_metadata[bc_names[0]]:
922
+ if key != 'Columns': # Skip Columns attribute as it's used for Stage/Flow
923
+ try:
924
+ values = [bc_metadata[bc].get(key, '') for bc in bc_names]
925
+ ds = ds.assign_coords({f'{key.lower()}': ('bc_name', values)})
926
+ except Exception as e:
927
+ logger.debug(f"Could not add metadata coordinate '{key}': {str(e)}")
928
+
929
+ # Add face-specific data variables if available
930
+ if face_data['flow_per_face']:
931
+ # First determine the maximum number of faces across all BCs
932
+ all_flow_faces = set()
933
+ for bc_name in face_data['flow_per_face']:
934
+ all_flow_faces.update(face_data['flow_per_face'][bc_name]['faces'])
935
+
936
+ # Create a merged array with NaN values for missing faces
937
+ all_flow_faces = sorted(list(all_flow_faces))
938
+ flow_face_data = np.full((num_timesteps, num_bcs, len(all_flow_faces)), np.nan)
939
+
940
+ # Fill in the data where available
941
+ for bc_idx, bc_name in enumerate(bc_names):
942
+ if bc_name in face_data['flow_per_face']:
943
+ bc_faces = face_data['flow_per_face'][bc_name]['faces']
944
+ bc_data = face_data['flow_per_face'][bc_name]['data']
945
+
946
+ for face_idx, face_id in enumerate(bc_faces):
947
+ if face_id in all_flow_faces:
948
+ target_idx = all_flow_faces.index(face_id)
949
+ flow_face_data[:, bc_idx, target_idx] = bc_data[:, face_idx]
950
+
951
+ # Add to the dataset
952
+ ds['flow_per_face'] = xr.DataArray(
953
+ flow_face_data,
954
+ dims=['time', 'bc_name', 'face_id'],
955
+ coords={
956
+ 'time': timestamps,
957
+ 'bc_name': bc_names,
958
+ 'face_id': all_flow_faces
959
+ },
960
+ attrs={'description': 'Flow per face at boundary condition'}
961
+ )
962
+
963
+ # Similar approach for stage per face
964
+ if face_data['stage_per_face']:
965
+ all_stage_faces = set()
966
+ for bc_name in face_data['stage_per_face']:
967
+ all_stage_faces.update(face_data['stage_per_face'][bc_name]['faces'])
968
+
969
+ all_stage_faces = sorted(list(all_stage_faces))
970
+ stage_face_data = np.full((num_timesteps, num_bcs, len(all_stage_faces)), np.nan)
971
+
972
+ for bc_idx, bc_name in enumerate(bc_names):
973
+ if bc_name in face_data['stage_per_face']:
974
+ bc_faces = face_data['stage_per_face'][bc_name]['faces']
975
+ bc_data = face_data['stage_per_face'][bc_name]['data']
976
+
977
+ for face_idx, face_id in enumerate(bc_faces):
978
+ if face_id in all_stage_faces:
979
+ target_idx = all_stage_faces.index(face_id)
980
+ stage_face_data[:, bc_idx, target_idx] = bc_data[:, face_idx]
981
+
982
+ ds['stage_per_face'] = xr.DataArray(
983
+ stage_face_data,
984
+ dims=['time', 'bc_name', 'face_id'],
985
+ coords={
986
+ 'time': timestamps,
987
+ 'bc_name': bc_names,
988
+ 'face_id': all_stage_faces
989
+ },
990
+ attrs={'description': 'Water surface elevation per face at boundary condition'}
991
+ )
992
+
993
+ return ds
994
+
995
+ except Exception as e:
996
+ logger.error(f"Error getting all boundary conditions timeseries: {str(e)}")
997
+ return xr.Dataset()