ras-commander 0.42.0__py3-none-any.whl → 0.43.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,138 @@
1
+ from typing import Dict, Any, List, Union
2
+ from pathlib import Path
3
+ import h5py
4
+ import numpy as np
5
+ import pandas as pd
6
+ from geopandas import GeoDataFrame
7
+ from shapely.geometry import LineString, MultiLineString, Polygon, MultiPolygon, Point, GeometryCollection
8
+ from .HdfUtils import HdfUtils
9
+ from .HdfXsec import HdfXsec
10
+ from .HdfBase import HdfBase
11
+ from .Decorators import standardize_input, log_call
12
+ from .LoggingConfig import setup_logging, get_logger
13
+
14
+ logger = get_logger(__name__)
15
+
16
+ class HdfStruc:
17
+ """
18
+ HEC-RAS HDF Structures class for handling operations related to structures in HDF files.
19
+
20
+ This class provides methods for extracting and analyzing data about structures
21
+ from HEC-RAS HDF files. It includes functionality to retrieve structure geometries
22
+ and attributes.
23
+
24
+ Methods in this class use the @standardize_input decorator to handle different
25
+ input types (file path, etc.) and the @log_call decorator for logging method calls.
26
+
27
+ Attributes:
28
+ GEOM_STRUCTURES_PATH (str): Constant for the HDF path to structures data.
29
+
30
+ Note: This class contains static methods and does not require instantiation.
31
+ """
32
+
33
+ GEOM_STRUCTURES_PATH = "Geometry/Structures"
34
+
35
+ @staticmethod
36
+ @log_call
37
+ @standardize_input(file_type='geom_hdf')
38
+ def structures(hdf_path: Path, datetime_to_str: bool = False) -> GeoDataFrame:
39
+ """
40
+ Return the model structures.
41
+
42
+ This method extracts structure data from the HDF file, including geometry
43
+ and attributes, and returns it as a GeoDataFrame.
44
+
45
+ Parameters
46
+ ----------
47
+ hdf_path : Path
48
+ Path to the HEC-RAS geometry HDF file.
49
+ datetime_to_str : bool, optional
50
+ If True, convert datetime objects to strings. Default is False.
51
+
52
+ Returns
53
+ -------
54
+ GeoDataFrame
55
+ A GeoDataFrame containing the structures, with columns for attributes
56
+ and geometry.
57
+
58
+ Raises
59
+ ------
60
+ Exception
61
+ If there's an error reading the structures data from the HDF file.
62
+ """
63
+ try:
64
+ with h5py.File(hdf_path, 'r') as hdf_file:
65
+ # Check if the structures path exists in the HDF file
66
+ if HdfStruc.GEOM_STRUCTURES_PATH not in hdf_file:
67
+ logger.info(f"No structures found in the geometry file: {hdf_path}")
68
+ return GeoDataFrame()
69
+
70
+ struct_data = hdf_file[HdfStruc.GEOM_STRUCTURES_PATH]
71
+ v_conv_val = np.vectorize(HdfUtils._convert_ras_hdf_value)
72
+ sd_attrs = struct_data["Attributes"][()]
73
+
74
+ # Create a dictionary to store structure data
75
+ struct_dict = {"struct_id": range(sd_attrs.shape[0])}
76
+ struct_dict.update(
77
+ {name: v_conv_val(sd_attrs[name]) for name in sd_attrs.dtype.names}
78
+ )
79
+
80
+ # Get structure geometries
81
+ geoms = HdfXsec._get_polylines(
82
+ hdf_path,
83
+ HdfStruc.GEOM_STRUCTURES_PATH,
84
+ info_name="Centerline Info",
85
+ parts_name="Centerline Parts",
86
+ points_name="Centerline Points"
87
+ )
88
+
89
+ # Create GeoDataFrame
90
+ struct_gdf = GeoDataFrame(
91
+ struct_dict,
92
+ geometry=geoms,
93
+ crs=HdfUtils.projection(hdf_path),
94
+ )
95
+
96
+ # Convert datetime to string if requested
97
+ if datetime_to_str:
98
+ struct_gdf["Last Edited"] = struct_gdf["Last Edited"].apply(
99
+ lambda x: pd.Timestamp.isoformat(x) if pd.notnull(x) else None
100
+ )
101
+
102
+ return struct_gdf
103
+ except Exception as e:
104
+ logger.error(f"Error reading structures: {str(e)}")
105
+ raise
106
+
107
+ @staticmethod
108
+ @log_call
109
+ @standardize_input(file_type='geom_hdf')
110
+ def get_geom_structures_attrs(hdf_path: Path) -> Dict[str, Any]:
111
+ """
112
+ Return geometry structures attributes from a HEC-RAS HDF file.
113
+
114
+ This method extracts attributes related to geometry structures from the HDF file.
115
+
116
+ Parameters
117
+ ----------
118
+ hdf_path : Path
119
+ Path to the HEC-RAS geometry HDF file.
120
+
121
+ Returns
122
+ -------
123
+ Dict[str, Any]
124
+ A dictionary containing the geometry structures attributes.
125
+
126
+ Notes
127
+ -----
128
+ If no structures are found in the geometry file, an empty dictionary is returned.
129
+ """
130
+ try:
131
+ with h5py.File(hdf_path, 'r') as hdf_file:
132
+ if HdfStruc.GEOM_STRUCTURES_PATH not in hdf_file:
133
+ logger.info(f"No structures found in the geometry file: {hdf_path}")
134
+ return {}
135
+ return HdfUtils.get_attrs(hdf_file, HdfStruc.GEOM_STRUCTURES_PATH)
136
+ except Exception as e:
137
+ logger.error(f"Error reading geometry structures attributes: {str(e)}")
138
+ return {}
@@ -0,0 +1,461 @@
1
+ import logging
2
+ from pathlib import Path
3
+ import h5py
4
+ import numpy as np
5
+ import pandas as pd
6
+ from datetime import datetime, timedelta
7
+ from typing import Union, Optional, Dict, List, Tuple, Any
8
+ from scipy.spatial import KDTree
9
+ import re
10
+
11
+ from .Decorators import standardize_input, log_call
12
+ from .LoggingConfig import setup_logging, get_logger
13
+
14
+ logger = get_logger(__name__)
15
+
16
+ class HdfUtils:
17
+ """
18
+ Utility class for working with HEC-RAS HDF files.
19
+
20
+ This class provides general utility functions for HDF file operations,
21
+ including attribute extraction, data conversion, and common HDF queries.
22
+ It also includes spatial operations and helper methods for working with
23
+ HEC-RAS specific data structures.
24
+
25
+ Note:
26
+ - Use this class for general HDF utility functions that are not specific to plan or geometry files.
27
+ - All methods in this class are static and can be called without instantiating the class.
28
+ """
29
+
30
+ @staticmethod
31
+ @standardize_input(file_type='plan_hdf')
32
+ def get_hdf_filename(hdf_input: Union[str, Path, h5py.File], ras_object=None) -> Optional[Path]:
33
+ """
34
+ Get the HDF filename from various input types.
35
+
36
+ Args:
37
+ hdf_input (Union[str, Path, h5py.File]): The plan number, full path to the HDF file, or an open HDF file object.
38
+ ras_object (RasPrj, optional): The RAS project object. If None, uses the global ras instance.
39
+
40
+ Returns:
41
+ Optional[Path]: Path to the HDF file, or None if not found.
42
+ """
43
+ if isinstance(hdf_input, h5py.File):
44
+ return Path(hdf_input.filename)
45
+
46
+ if isinstance(hdf_input, str):
47
+ hdf_input = Path(hdf_input)
48
+
49
+ if isinstance(hdf_input, Path) and hdf_input.is_file():
50
+ return hdf_input
51
+
52
+ if ras_object is None:
53
+ logger.critical("RAS object is not provided. It is required when hdf_input is not a direct file path.")
54
+ return None
55
+
56
+ plan_info = ras_object.plan_df[ras_object.plan_df['plan_number'] == str(hdf_input)]
57
+ if plan_info.empty:
58
+ logger.critical(f"No HDF file found for plan number {hdf_input}")
59
+ return None
60
+
61
+ hdf_filename = plan_info.iloc[0]['HDF_Results_Path']
62
+ if hdf_filename is None:
63
+ logger.critical(f"HDF_Results_Path is None for plan number {hdf_input}")
64
+ return None
65
+
66
+ hdf_path = Path(hdf_filename)
67
+ if not hdf_path.is_file():
68
+ logger.critical(f"HDF file not found: {hdf_path}")
69
+ return None
70
+
71
+ return hdf_path
72
+
73
+ @staticmethod
74
+ @standardize_input(file_type='plan_hdf')
75
+ def get_root_attrs(hdf_path: Path) -> dict:
76
+ """
77
+ Return attributes at root level of HEC-RAS HDF file.
78
+
79
+ Args:
80
+ hdf_path (Path): Path to the HDF file.
81
+
82
+ Returns:
83
+ dict: Dictionary filled with HEC-RAS HDF root attributes.
84
+ """
85
+ with h5py.File(hdf_path, 'r') as hdf_file:
86
+ return HdfUtils.get_attrs(hdf_file, "/")
87
+
88
+ @staticmethod
89
+ @standardize_input(file_type='plan_hdf')
90
+ def get_attrs(hdf_path: Path, attr_path: str) -> dict:
91
+ """
92
+ Get attributes from a HEC-RAS HDF file for a given attribute path.
93
+
94
+ Args:
95
+ hdf_path (Path): The path to the HDF file.
96
+ attr_path (str): The path to the attributes within the HDF file.
97
+
98
+ Returns:
99
+ dict: A dictionary of attributes.
100
+ """
101
+ try:
102
+ with h5py.File(hdf_path, 'r') as hdf_file:
103
+ attr_object = hdf_file.get(attr_path)
104
+ if attr_object is None:
105
+ logger.warning(f"Attribute path '{attr_path}' not found in HDF file.")
106
+ return {}
107
+ return HdfUtils._hdf5_attrs_to_dict(attr_object.attrs)
108
+ except Exception as e:
109
+ logger.error(f"Error getting attributes from '{attr_path}': {str(e)}")
110
+ return {}
111
+
112
+ @staticmethod
113
+ @standardize_input(file_type='plan_hdf')
114
+ def get_hdf_paths_with_properties(hdf_path: Path) -> pd.DataFrame:
115
+ """
116
+ Get all paths in the HDF file with their properties.
117
+
118
+ Args:
119
+ hdf_path (Path): Path to the HDF file.
120
+
121
+ Returns:
122
+ pd.DataFrame: DataFrame containing paths and their properties.
123
+ """
124
+ def get_item_properties(item):
125
+ return {
126
+ 'name': item.name,
127
+ 'type': type(item).__name__,
128
+ 'shape': item.shape if hasattr(item, 'shape') else None,
129
+ 'dtype': item.dtype if hasattr(item, 'dtype') else None
130
+ }
131
+
132
+ try:
133
+ with h5py.File(hdf_path, 'r') as hdf_file:
134
+ items = []
135
+ hdf_file.visititems(lambda name, item: items.append(get_item_properties(item)))
136
+
137
+ return pd.DataFrame(items)
138
+ except Exception as e:
139
+ logger.error(f"Error reading HDF file: {e}")
140
+ return pd.DataFrame()
141
+
142
+ @staticmethod
143
+ @standardize_input(file_type='plan_hdf')
144
+ def get_group_attributes_as_df(hdf_path: Path, group_path: str) -> pd.DataFrame:
145
+ """
146
+ Get attributes of a group in the HDF file as a DataFrame.
147
+
148
+ Args:
149
+ hdf_path (Path): Path to the HDF file.
150
+ group_path (str): Path to the group within the HDF file.
151
+
152
+ Returns:
153
+ pd.DataFrame: DataFrame containing the group's attributes.
154
+ """
155
+ with h5py.File(hdf_path, 'r') as hdf_file:
156
+ group = hdf_file[group_path]
157
+ attributes = {key: group.attrs[key] for key in group.attrs.keys()}
158
+ return pd.DataFrame([attributes])
159
+
160
+
161
+ @staticmethod
162
+ def convert_ras_hdf_string(value: Union[str, bytes]) -> Union[bool, datetime, List[datetime], timedelta, str]:
163
+ """
164
+ Convert a string value from an HEC-RAS HDF file into a Python object.
165
+
166
+ Args:
167
+ value (Union[str, bytes]): The value to convert.
168
+
169
+ Returns:
170
+ Union[bool, datetime, List[datetime], timedelta, str]: The converted value.
171
+ """
172
+ return HdfUtils._convert_ras_hdf_string(value)
173
+
174
+ @staticmethod
175
+ def df_datetimes_to_str(df: pd.DataFrame) -> pd.DataFrame:
176
+ """
177
+ Convert any datetime64 columns in a DataFrame to strings.
178
+
179
+ Args:
180
+ df (pd.DataFrame): The DataFrame to convert.
181
+
182
+ Returns:
183
+ pd.DataFrame: The DataFrame with datetime columns converted to strings.
184
+ """
185
+ for col in df.select_dtypes(include=['datetime64']).columns:
186
+ df[col] = df[col].dt.strftime('%Y-%m-%d %H:%M:%S')
187
+ return df
188
+
189
+ @staticmethod
190
+ def perform_kdtree_query(
191
+ reference_points: np.ndarray,
192
+ query_points: np.ndarray,
193
+ max_distance: float = 2.0
194
+ ) -> np.ndarray:
195
+ """
196
+ Performs a KDTree query between two datasets and returns indices with distances exceeding max_distance set to -1.
197
+
198
+ Args:
199
+ reference_points (np.ndarray): The reference dataset for KDTree.
200
+ query_points (np.ndarray): The query dataset to search against KDTree of reference_points.
201
+ max_distance (float, optional): The maximum distance threshold. Indices with distances greater than this are set to -1. Defaults to 2.0.
202
+
203
+ Returns:
204
+ np.ndarray: Array of indices from reference_points that are nearest to each point in query_points.
205
+ Indices with distances > max_distance are set to -1.
206
+
207
+ Example:
208
+ >>> ref_points = np.array([[0, 0], [1, 1], [2, 2]])
209
+ >>> query_points = np.array([[0.5, 0.5], [3, 3]])
210
+ >>> result = HdfUtils.perform_kdtree_query(ref_points, query_points)
211
+ >>> print(result)
212
+ array([ 0, -1])
213
+ """
214
+ dist, snap = KDTree(reference_points).query(query_points, distance_upper_bound=max_distance)
215
+ snap[dist > max_distance] = -1
216
+ return snap
217
+
218
+ @staticmethod
219
+ def find_nearest_neighbors(points: np.ndarray, max_distance: float = 2.0) -> np.ndarray:
220
+ """
221
+ Creates a self KDTree for dataset points and finds nearest neighbors excluding self,
222
+ with distances above max_distance set to -1.
223
+
224
+ Args:
225
+ points (np.ndarray): The dataset to build the KDTree from and query against itself.
226
+ max_distance (float, optional): The maximum distance threshold. Indices with distances
227
+ greater than max_distance are set to -1. Defaults to 2.0.
228
+
229
+ Returns:
230
+ np.ndarray: Array of indices representing the nearest neighbor in points for each point in points.
231
+ Indices with distances > max_distance or self-matches are set to -1.
232
+
233
+ Example:
234
+ >>> points = np.array([[0, 0], [1, 1], [2, 2], [10, 10]])
235
+ >>> result = HdfUtils.find_nearest_neighbors(points)
236
+ >>> print(result)
237
+ array([1, 0, 1, -1])
238
+ """
239
+ dist, snap = KDTree(points).query(points, k=2, distance_upper_bound=max_distance)
240
+ snap[dist > max_distance] = -1
241
+
242
+ snp = pd.DataFrame(snap, index=np.arange(len(snap)))
243
+ snp = snp.replace(-1, np.nan)
244
+ snp.loc[snp[0] == snp.index, 0] = np.nan
245
+ snp.loc[snp[1] == snp.index, 1] = np.nan
246
+ filled = snp[0].fillna(snp[1])
247
+ snapped = filled.fillna(-1).astype(np.int64).to_numpy()
248
+ return snapped
249
+
250
+ @staticmethod
251
+ def _convert_ras_hdf_string(value: Union[str, bytes]) -> Union[bool, datetime, List[datetime], timedelta, str]:
252
+ """
253
+ Private method to convert a string value from an HEC-RAS HDF file into a Python object.
254
+
255
+ Args:
256
+ value (Union[str, bytes]): The value to convert.
257
+
258
+ Returns:
259
+ Union[bool, datetime, List[datetime], timedelta, str]: The converted value.
260
+ """
261
+ if isinstance(value, bytes):
262
+ s = value.decode("utf-8")
263
+ else:
264
+ s = value
265
+
266
+ if s == "True":
267
+ return True
268
+ elif s == "False":
269
+ return False
270
+
271
+ ras_datetime_format1_re = r"\d{2}\w{3}\d{4} \d{2}:\d{2}:\d{2}"
272
+ ras_datetime_format2_re = r"\d{2}\w{3}\d{4} \d{2}\d{2}"
273
+ ras_duration_format_re = r"\d{2}:\d{2}:\d{2}"
274
+
275
+ if re.match(rf"^{ras_datetime_format1_re}", s):
276
+ if re.match(rf"^{ras_datetime_format1_re} to {ras_datetime_format1_re}$", s):
277
+ split = s.split(" to ")
278
+ return [
279
+ HdfBase._parse_ras_datetime(split[0]),
280
+ HdfBase._parse_ras_datetime(split[1]),
281
+ ]
282
+ return HdfBase._parse_ras_datetime(s)
283
+ elif re.match(rf"^{ras_datetime_format2_re}", s):
284
+ if re.match(rf"^{ras_datetime_format2_re} to {ras_datetime_format2_re}$", s):
285
+ split = s.split(" to ")
286
+ return [
287
+ HdfBase._parse_ras_simulation_window_datetime(split[0]),
288
+ HdfBase._parse_ras_simulation_window_datetime(split[1]),
289
+ ]
290
+ return HdfBase._parse_ras_simulation_window_datetime(s)
291
+ elif re.match(rf"^{ras_duration_format_re}$", s):
292
+ return HdfBase._parse_duration(s)
293
+ return s
294
+
295
+ @staticmethod
296
+ def _convert_ras_hdf_value(value: Any) -> Union[None, bool, str, List[str], int, float, List[int], List[float]]:
297
+ """
298
+ Convert a value from a HEC-RAS HDF file into a Python object.
299
+
300
+ Args:
301
+ value (Any): The value to convert.
302
+
303
+ Returns:
304
+ Union[None, bool, str, List[str], int, float, List[int], List[float]]: The converted value.
305
+ """
306
+ if isinstance(value, np.floating) and np.isnan(value):
307
+ return None
308
+ elif isinstance(value, (bytes, np.bytes_)):
309
+ return value.decode('utf-8')
310
+ elif isinstance(value, np.integer):
311
+ return int(value)
312
+ elif isinstance(value, np.floating):
313
+ return float(value)
314
+ elif isinstance(value, (int, float)):
315
+ return value
316
+ elif isinstance(value, (list, tuple, np.ndarray)):
317
+ if len(value) > 1:
318
+ return [HdfUtils._convert_ras_hdf_value(v) for v in value]
319
+ else:
320
+ return HdfUtils._convert_ras_hdf_value(value[0])
321
+ else:
322
+ return str(value)
323
+
324
+ @staticmethod
325
+ def _parse_ras_datetime_ms(datetime_str: str) -> datetime:
326
+ """
327
+ Private method to parse a datetime string with milliseconds from a RAS file.
328
+
329
+ Args:
330
+ datetime_str (str): The datetime string to parse.
331
+
332
+ Returns:
333
+ datetime: The parsed datetime object.
334
+ """
335
+ milliseconds = int(datetime_str[-3:])
336
+ microseconds = milliseconds * 1000
337
+ parsed_dt = HdfBase._parse_ras_datetime(datetime_str[:-4]).replace(microsecond=microseconds)
338
+ return parsed_dt
339
+
340
+ @staticmethod
341
+ def _ras_timesteps_to_datetimes(timesteps: np.ndarray, start_time: datetime, time_unit: str, round_to: str = "0.1 s") -> List[datetime]:
342
+ """
343
+ Convert an array of RAS timesteps into a list of datetime objects.
344
+
345
+ Args:
346
+ timesteps (np.ndarray): An array of RAS timesteps.
347
+ start_time (datetime): The start time of the simulation.
348
+ time_unit (str): The time unit of the timesteps.
349
+ round_to (str): The time unit to round the datetimes to. Default is "0.1 s".
350
+
351
+ Returns:
352
+ List[datetime]: A list of datetime objects corresponding to the timesteps.
353
+
354
+ Note:
355
+ The `round_to` parameter uses Pandas time string notation. For example:
356
+ - "0.1 s" means round to the nearest 0.1 seconds
357
+ - "1 s" means round to the nearest second
358
+ - "1 min" means round to the nearest minute
359
+ For more options, see Pandas documentation on time string notation.
360
+ """
361
+ return [
362
+ start_time + pd.Timedelta(timestep, unit=time_unit).round(round_to)
363
+ for timestep in timesteps.astype(np.float64)
364
+ ]
365
+
366
+ @staticmethod
367
+ def _hdf5_attrs_to_dict(attrs: Union[h5py.AttributeManager, Dict], prefix: Optional[str] = None) -> Dict:
368
+ """
369
+ Private method to convert HDF5 attributes to a Python dictionary.
370
+
371
+ Args:
372
+ attrs (Union[h5py.AttributeManager, Dict]): The attributes to convert.
373
+ prefix (Optional[str]): A prefix to add to the attribute keys.
374
+
375
+ Returns:
376
+ Dict: A dictionary of converted attributes.
377
+ """
378
+ result = {}
379
+ for key, value in attrs.items():
380
+ if prefix:
381
+ key = f"{prefix}/{key}"
382
+ if isinstance(value, (np.ndarray, list)):
383
+ result[key] = [HdfUtils._convert_ras_hdf_value(v) for v in value]
384
+ else:
385
+ result[key] = HdfUtils._convert_ras_hdf_value(value)
386
+ return result
387
+
388
+ @staticmethod
389
+ def parse_run_time_window(window: str) -> Tuple[datetime, datetime]:
390
+ """
391
+ Parse a run time window string into a tuple of datetime objects.
392
+
393
+ Args:
394
+ window (str): The run time window string to be parsed.
395
+
396
+ Returns:
397
+ Tuple[datetime, datetime]: A tuple containing two datetime objects representing the start and end of the run
398
+ time window.
399
+ """
400
+ split = window.split(" to ")
401
+ begin = HdfBase._parse_ras_datetime(split[0])
402
+ end = HdfBase._parse_ras_datetime(split[1])
403
+ return begin, end
404
+
405
+
406
+
407
+ @staticmethod
408
+ @standardize_input(file_type='plan_hdf')
409
+ def get_2d_flow_area_names_and_counts(hdf_path: Path) -> List[Tuple[str, int]]:
410
+ """
411
+ Get the names and cell counts of 2D flow areas from the HDF file.
412
+
413
+ Args:
414
+ hdf_path (Path): Path to the HDF file.
415
+
416
+ Returns:
417
+ List[Tuple[str, int]]: A list of tuples containing the name and cell count of each 2D flow area.
418
+
419
+ Raises:
420
+ ValueError: If there's an error reading the HDF file or accessing the required data.
421
+ """
422
+ try:
423
+ with h5py.File(hdf_path, 'r') as hdf_file:
424
+ flow_area_2d_path = "Geometry/2D Flow Areas"
425
+ if flow_area_2d_path not in hdf_file:
426
+ return []
427
+
428
+ attributes = hdf_file[f"{flow_area_2d_path}/Attributes"][()]
429
+ names = [HdfUtils._convert_ras_hdf_string(name) for name in attributes["Name"]]
430
+
431
+ cell_info = hdf_file[f"{flow_area_2d_path}/Cell Info"][()]
432
+ cell_counts = [info[1] for info in cell_info]
433
+
434
+ return list(zip(names, cell_counts))
435
+ except Exception as e:
436
+ logger.error(f"Error reading 2D flow area names and counts from {hdf_path}: {str(e)}")
437
+ raise ValueError(f"Failed to get 2D flow area names and counts: {str(e)}")
438
+
439
+ @staticmethod
440
+ @standardize_input(file_type='plan_hdf')
441
+ def projection(hdf_path: Path) -> Optional[str]:
442
+ """
443
+ Get the projection information from the HDF file.
444
+
445
+ Args:
446
+ hdf_path (Path): Path to the HDF file.
447
+
448
+ Returns:
449
+ Optional[str]: The projection information as a string, or None if not found.
450
+ """
451
+ try:
452
+ with h5py.File(hdf_path, 'r') as hdf_file:
453
+ proj_wkt = hdf_file.attrs.get("Projection")
454
+ if proj_wkt is None:
455
+ return None
456
+ if isinstance(proj_wkt, bytes) or isinstance(proj_wkt, np.bytes_):
457
+ proj_wkt = proj_wkt.decode("utf-8")
458
+ return proj_wkt
459
+ except Exception as e:
460
+ logger.error(f"Error reading projection from {hdf_path}: {str(e)}")
461
+ return None