dagmc-h5m-file-inspector 0.4.3__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,43 +1,540 @@
1
+ from dataclasses import dataclass
1
2
  from pathlib import Path
2
- from typing import List, Optional
3
+ from typing import Dict, List, Optional, Literal, Sequence, Tuple, Union
3
4
 
4
- import pymoab as mb
5
- from pymoab import core, types
5
+ import h5py
6
+ import numpy as np
6
7
 
7
8
 
8
- def load_moab_file(filename: str):
9
- """Loads a DAGMC h5m into a Moab Core object and returns the object
9
+ RANGE_COMPRESSED_FLAG = 0x8
10
10
 
11
- Arguments:
12
- filename: the filename of the DAGMC h5m file
13
11
 
14
- Returns:
15
- A pymoab.core.Core()
16
- """
12
+ @dataclass(frozen=True)
13
+ class _SetInfo:
14
+ """Internal dataclass for storing MOAB set information."""
15
+ handle: int
16
+ contents: Sequence[int] | Sequence[Tuple[int, int]]
17
+ contents_are_ranges: bool
18
+ children: Sequence[int]
19
+ parents: Sequence[int]
20
+ flags: int
17
21
 
18
- if not Path(filename).is_file():
19
- msg = f"filename provided ({filename}) does not exist"
20
- raise FileNotFoundError(msg)
22
+
23
+ # ============================================================================
24
+ # h5py backend implementation
25
+ # ============================================================================
26
+
27
+
28
+ def _get_volumes_h5py(filename: str) -> List[int]:
29
+ """Get volume IDs using h5py backend."""
30
+ with h5py.File(filename, "r") as f:
31
+ global_ids = f["tstt/sets/tags/GLOBAL_ID"][()]
32
+ cat_ids = f["tstt/tags/CATEGORY/id_list"][()]
33
+ cat_vals = f["tstt/tags/CATEGORY/values"][()]
34
+
35
+ cat_lookup = {}
36
+ for eid, val in zip(cat_ids, cat_vals):
37
+ cat_lookup[int(eid)] = val.tobytes().decode("ascii").rstrip("\x00")
38
+
39
+ base_entity_id = int(cat_ids.min()) - 1
40
+
41
+ volume_ids = []
42
+ for i in range(len(global_ids)):
43
+ entity_id = base_entity_id + i
44
+ if cat_lookup.get(entity_id) == "Volume":
45
+ volume_ids.append(int(global_ids[i]))
46
+
47
+ return sorted(set(volume_ids))
48
+
49
+
50
+ def _get_materials_h5py(filename: str, remove_prefix: bool) -> List[str]:
51
+ """Get material names using h5py backend."""
52
+ with h5py.File(filename, "r") as f:
53
+ name_ids = f["tstt/tags/NAME/id_list"][()]
54
+ name_vals = f["tstt/tags/NAME/values"][()]
55
+
56
+ materials_list = []
57
+ for eid, val in zip(name_ids, name_vals):
58
+ name = val.tobytes().decode("ascii").rstrip("\x00")
59
+ if name.startswith("mat:"):
60
+ if remove_prefix:
61
+ materials_list.append(name[4:])
62
+ else:
63
+ materials_list.append(name)
64
+
65
+ return sorted(set(materials_list))
66
+
67
+
68
+ def _get_volumes_and_materials_h5py(filename: str, remove_prefix: bool) -> dict:
69
+ """Get volume-to-material mapping using h5py backend."""
70
+ with h5py.File(filename, "r") as f:
71
+ global_ids = f["tstt/sets/tags/GLOBAL_ID"][()]
72
+ cat_ids = f["tstt/tags/CATEGORY/id_list"][()]
73
+ cat_vals = f["tstt/tags/CATEGORY/values"][()]
74
+ name_ids = f["tstt/tags/NAME/id_list"][()]
75
+ name_vals = f["tstt/tags/NAME/values"][()]
76
+
77
+ cat_lookup = {}
78
+ for eid, val in zip(cat_ids, cat_vals):
79
+ cat_lookup[int(eid)] = val.tobytes().decode("ascii").rstrip("\x00")
80
+
81
+ name_lookup = {}
82
+ for eid, val in zip(name_ids, name_vals):
83
+ name_lookup[int(eid)] = val.tobytes().decode("ascii").rstrip("\x00")
84
+
85
+ base_entity_id = int(cat_ids.min()) - 1
86
+
87
+ volumes = []
88
+ for i in range(len(global_ids)):
89
+ entity_id = base_entity_id + i
90
+ if cat_lookup.get(entity_id) == "Volume":
91
+ volumes.append({"set_idx": i, "gid": int(global_ids[i])})
92
+
93
+ groups = []
94
+ for i in range(len(global_ids)):
95
+ entity_id = base_entity_id + i
96
+ name = name_lookup.get(entity_id, "")
97
+ if name.startswith("mat:"):
98
+ groups.append({"set_idx": i, "name": name})
99
+
100
+ volumes_sorted = sorted(volumes, key=lambda x: x["gid"])
101
+ groups_sorted = sorted(groups, key=lambda x: x["set_idx"])
102
+
103
+ vol_mat = {}
104
+ for vol, grp in zip(volumes_sorted, groups_sorted):
105
+ material_name = grp["name"]
106
+ if remove_prefix:
107
+ material_name = material_name[4:]
108
+ vol_mat[vol["gid"]] = material_name
109
+
110
+ return vol_mat
111
+
112
+
113
+ def _get_bounding_box_h5py(filename: str) -> Tuple[np.ndarray, np.ndarray]:
114
+ """Get bounding box using h5py backend."""
115
+ with h5py.File(filename, "r") as f:
116
+ coords = f["tstt/nodes/coordinates"][()]
117
+ lower_left = coords.min(axis=0)
118
+ upper_right = coords.max(axis=0)
119
+ return lower_left, upper_right
120
+
121
+
122
+ def _calculate_triangle_volumes(vertices: np.ndarray, triangles: np.ndarray) -> float:
123
+ """Calculate the volume enclosed by a triangular mesh using signed tetrahedra.
124
+
125
+ For a closed mesh, the sum of signed tetrahedra volumes (formed by each
126
+ triangle and the origin) gives the enclosed volume.
127
+ """
128
+ # Get vertices for each triangle
129
+ v0 = vertices[triangles[:, 0]]
130
+ v1 = vertices[triangles[:, 1]]
131
+ v2 = vertices[triangles[:, 2]]
132
+
133
+ # Signed volume of tetrahedron = (v0 · (v1 × v2)) / 6
134
+ cross = np.cross(v1, v2)
135
+ signed_volumes = np.sum(v0 * cross, axis=1) / 6.0
136
+
137
+ return abs(np.sum(signed_volumes))
138
+
139
+
140
+ # ============================================================================
141
+ # h5py volume calculation helpers
142
+ # ============================================================================
143
+
144
+
145
+ def _read_nodes_h5py(f: h5py.File) -> Tuple[np.ndarray, int]:
146
+ """Read node coordinates and start ID from h5py file."""
147
+ nodes = f["tstt/nodes/coordinates"]
148
+ coords = nodes[...]
149
+ node_start = int(nodes.attrs["start_id"])
150
+ return coords, node_start
151
+
152
+
153
+ def _read_tri3_connectivity_h5py(f: h5py.File) -> Tuple[np.ndarray, int]:
154
+ """Read triangle connectivity and start ID from h5py file."""
155
+ tri = f["tstt/elements/Tri3/connectivity"]
156
+ tri_conn = tri[...]
157
+ tri_start = int(tri.attrs["start_id"])
158
+ return tri_conn, tri_start
159
+
160
+
161
+ def _slices_from_end_indices(ends: np.ndarray) -> List[Optional[slice]]:
162
+ """Convert end indices to slices."""
163
+ prev_end = -1
164
+ slices: List[Optional[slice]] = []
165
+ for end in ends.tolist():
166
+ start = prev_end + 1
167
+ if end >= start:
168
+ slices.append(slice(start, end + 1))
169
+ else:
170
+ slices.append(None)
171
+ prev_end = end
172
+ return slices
173
+
174
+
175
+ def _read_sets_h5py(f: h5py.File) -> List[_SetInfo]:
176
+ """Read all entity sets from h5py file."""
177
+ list_ds = f["tstt/sets/list"]
178
+ list_arr = list_ds[...]
179
+ start_id = int(list_ds.attrs["start_id"])
180
+ contents = f["tstt/sets/contents"][...]
181
+ children = f["tstt/sets/children"][...]
182
+ parents = f["tstt/sets/parents"][...]
183
+
184
+ contents_slices = _slices_from_end_indices(list_arr[:, 0])
185
+ children_slices = _slices_from_end_indices(list_arr[:, 1])
186
+ parents_slices = _slices_from_end_indices(list_arr[:, 2])
187
+
188
+ sets: List[_SetInfo] = []
189
+ for idx in range(list_arr.shape[0]):
190
+ handle = start_id + idx
191
+ flags = int(list_arr[idx, 3])
192
+
193
+ contents_slice = contents_slices[idx]
194
+ if contents_slice is None:
195
+ contents_data: Sequence[int] | Sequence[Tuple[int, int]] = []
196
+ contents_are_ranges = False
197
+ else:
198
+ data = contents[contents_slice]
199
+ if flags & RANGE_COMPRESSED_FLAG:
200
+ if len(data) % 2 != 0:
201
+ raise ValueError(
202
+ f"Range-compressed contents for set {handle} "
203
+ f"has odd length {len(data)}"
204
+ )
205
+ contents_data = [
206
+ (int(data[i]), int(data[i + 1]))
207
+ for i in range(0, len(data), 2)
208
+ ]
209
+ contents_are_ranges = True
210
+ else:
211
+ contents_data = [int(v) for v in data]
212
+ contents_are_ranges = False
213
+
214
+ children_slice = children_slices[idx]
215
+ if children_slice is None:
216
+ child_list: Sequence[int] = []
217
+ else:
218
+ child_list = [int(v) for v in children[children_slice]]
219
+
220
+ parents_slice = parents_slices[idx]
221
+ if parents_slice is None:
222
+ parent_list: Sequence[int] = []
223
+ else:
224
+ parent_list = [int(v) for v in parents[parents_slice]]
225
+
226
+ sets.append(
227
+ _SetInfo(
228
+ handle=handle,
229
+ contents=contents_data,
230
+ contents_are_ranges=contents_are_ranges,
231
+ children=child_list,
232
+ parents=parent_list,
233
+ flags=flags,
234
+ )
235
+ )
236
+
237
+ return sets
238
+
239
+
240
+ def _read_tag_h5py(f: h5py.File, tag_name: str) -> Dict[int, object]:
241
+ """Read a tag from h5py file and return handle -> value mapping."""
242
+ try:
243
+ tag_group = f[f"tstt/tags/{tag_name}"]
244
+ except KeyError:
245
+ return {}
246
+
247
+ if "id_list" not in tag_group or "values" not in tag_group:
248
+ return {}
249
+
250
+ ids = tag_group["id_list"][...]
251
+ values = tag_group["values"][...]
252
+
253
+ decoded: Dict[int, object] = {}
254
+ if values.dtype.kind in {"S", "V"}:
255
+ for h, v in zip(ids, values):
256
+ if hasattr(v, "tobytes"):
257
+ data = v.tobytes()
258
+ else:
259
+ data = bytes(v)
260
+ decoded[int(h)] = data.split(b"\x00", 1)[0].decode("ascii", "replace")
261
+ else:
262
+ for h, v in zip(ids, values):
263
+ decoded[int(h)] = int(v) if np.issubdtype(values.dtype, np.integer) else v
264
+
265
+ return decoded
266
+
267
+
268
+ def _read_geom_sense_h5py(f: h5py.File) -> Dict[int, Tuple[int, int]]:
269
+ """Read GEOM_SENSE_2 tag from h5py file."""
270
+ try:
271
+ tag_group = f["tstt/tags/GEOM_SENSE_2"]
272
+ except KeyError:
273
+ return {}
274
+
275
+ if "id_list" not in tag_group or "values" not in tag_group:
276
+ return {}
277
+
278
+ ids = tag_group["id_list"][...]
279
+ values = tag_group["values"][...]
280
+ return {
281
+ int(h): (int(v[0]), int(v[1]))
282
+ for h, v in zip(ids, values)
283
+ }
284
+
285
+
286
+ def _expand_set_contents(
287
+ set_info: _SetInfo,
288
+ target_min: Optional[int] = None,
289
+ target_max: Optional[int] = None,
290
+ ) -> List[int]:
291
+ """Expand set contents, handling range compression."""
292
+ if not set_info.contents:
293
+ return []
294
+
295
+ if not set_info.contents_are_ranges:
296
+ return [int(v) for v in set_info.contents]
297
+
298
+ handles: List[int] = []
299
+ for start, count in set_info.contents:
300
+ end = start + count - 1
301
+ if target_min is not None:
302
+ start = max(start, target_min)
303
+ if target_max is not None:
304
+ end = min(end, target_max)
305
+ if start <= end:
306
+ handles.extend(range(start, end + 1))
307
+ return handles
308
+
309
+
310
+ def _surface_sign_for_volume(
311
+ vol_handle: int,
312
+ sense: Optional[Tuple[int, int]],
313
+ ) -> float:
314
+ """Determine surface sign (+1 or -1) relative to a volume."""
315
+ if sense is None:
316
+ return 1.0
317
+ forward, reverse = sense
318
+ if vol_handle == forward and vol_handle != reverse:
319
+ return 1.0
320
+ if vol_handle == reverse and vol_handle != forward:
321
+ return -1.0
322
+ return 1.0
323
+
324
+
325
+ def _tri_indices_for_set(
326
+ set_info: _SetInfo,
327
+ *,
328
+ tri_start: int,
329
+ tri_end: int,
330
+ ) -> np.ndarray:
331
+ """Get triangle indices (0-based) for a set."""
332
+ if not set_info.contents:
333
+ return np.array([], dtype=np.int64)
334
+
335
+ if set_info.contents_are_ranges:
336
+ indices: List[int] = []
337
+ for start, count in set_info.contents:
338
+ end = start + count - 1
339
+ if end < tri_start or start > tri_end:
340
+ continue
341
+ start = max(start, tri_start)
342
+ end = min(end, tri_end)
343
+ indices.extend(range(start - tri_start, end - tri_start + 1))
344
+ return np.asarray(indices, dtype=np.int64)
345
+
346
+ handles = [
347
+ h for h in set_info.contents
348
+ if tri_start <= h <= tri_end
349
+ ]
350
+ if not handles:
351
+ return np.array([], dtype=np.int64)
352
+ return np.asarray(handles, dtype=np.int64) - tri_start
353
+
354
+
355
+ def _signed_volume_from_tris(
356
+ coords: np.ndarray,
357
+ tri_conn0: np.ndarray,
358
+ tri_indices: np.ndarray,
359
+ ) -> float:
360
+ """Calculate signed volume from triangles using tetrahedra method."""
361
+ tri_nodes = tri_conn0[tri_indices]
362
+ v0 = coords[tri_nodes[:, 0]]
363
+ v1 = coords[tri_nodes[:, 1]]
364
+ v2 = coords[tri_nodes[:, 2]]
365
+ return float(np.einsum("ij,ij->i", v0, np.cross(v1, v2)).sum() / 6.0)
366
+
367
+
368
+ def _volume_for_volume_set(
369
+ *,
370
+ vol_handle: int,
371
+ sets_by_handle: Dict[int, _SetInfo],
372
+ surface_handles: set,
373
+ geom_sense: Dict[int, Tuple[int, int]],
374
+ coords: np.ndarray,
375
+ tri_conn0: np.ndarray,
376
+ tri_start: int,
377
+ tri_end: int,
378
+ ) -> float:
379
+ """Calculate the geometric volume for a single volume entity."""
380
+ volume_set = sets_by_handle.get(vol_handle)
381
+ if volume_set is None:
382
+ return 0.0
383
+
384
+ if volume_set.children:
385
+ surfaces = [h for h in volume_set.children if h in surface_handles]
386
+ else:
387
+ surfaces = [
388
+ h for h in surface_handles
389
+ if h in geom_sense and vol_handle in geom_sense[h]
390
+ ]
391
+
392
+ total = 0.0
393
+ for surf_handle in surfaces:
394
+ surf_set = sets_by_handle.get(surf_handle)
395
+ if surf_set is None:
396
+ continue
397
+
398
+ sense = geom_sense.get(surf_handle)
399
+ sign = _surface_sign_for_volume(vol_handle, sense)
400
+
401
+ tri_indices = _tri_indices_for_set(
402
+ surf_set,
403
+ tri_start=tri_start,
404
+ tri_end=tri_end,
405
+ )
406
+ if tri_indices.size == 0:
407
+ continue
408
+
409
+ total += sign * _signed_volume_from_tris(
410
+ coords,
411
+ tri_conn0,
412
+ tri_indices,
413
+ )
414
+
415
+ return total
416
+
417
+
418
+ def _get_volumes_sizes_h5py(filename: str) -> dict:
419
+ """Get geometric volume sizes for each volume ID using h5py backend.
420
+
421
+ Uses the parent-child relationships (Volume -> Surfaces) and GEOM_SENSE_2
422
+ to properly assign surfaces to volumes with correct orientation.
423
+ """
424
+ with h5py.File(filename, "r") as f:
425
+ coords, node_start = _read_nodes_h5py(f)
426
+ tri_conn, tri_start = _read_tri3_connectivity_h5py(f)
427
+ tri_conn0 = tri_conn - node_start
428
+ tri_end = tri_start + tri_conn.shape[0] - 1
429
+
430
+ sets = _read_sets_h5py(f)
431
+ sets_by_handle = {s.handle: s for s in sets}
432
+
433
+ categories = _read_tag_h5py(f, "CATEGORY")
434
+ geom_dim = _read_tag_h5py(f, "GEOM_DIMENSION")
435
+ geom_sense = _read_geom_sense_h5py(f)
436
+
437
+ # Get GLOBAL_ID for sets - this can be stored as:
438
+ # 1. Dense array in tstt/sets/tags/GLOBAL_ID
439
+ # 2. Sparse tag in tstt/tags/GLOBAL_ID with id_list/values
440
+ global_ids: Dict[int, int] = {}
441
+
442
+ # Try dense array first (more common)
443
+ sets_start_id = int(f["tstt/sets/list"].attrs["start_id"])
444
+ if "tstt/sets/tags/GLOBAL_ID" in f:
445
+ dense_gids = f["tstt/sets/tags/GLOBAL_ID"][...]
446
+ for idx, gid in enumerate(dense_gids):
447
+ handle = sets_start_id + idx
448
+ global_ids[handle] = int(gid)
449
+ else:
450
+ # Fall back to sparse tag
451
+ global_ids = _read_tag_h5py(f, "GLOBAL_ID")
452
+
453
+ # Build set of surface handles
454
+ surface_handles = {
455
+ h
456
+ for h, cat in categories.items()
457
+ if cat == "Surface"
458
+ }
459
+ surface_handles.update(
460
+ h for h, dim in geom_dim.items() if dim == 2
461
+ )
462
+
463
+ # Build set of volume handles
464
+ volume_handles = {
465
+ h
466
+ for h, cat in categories.items()
467
+ if cat == "Volume"
468
+ }
469
+ volume_handles.update(
470
+ h for h, dim in geom_dim.items() if dim == 3
471
+ )
472
+
473
+ volume_sizes = {}
474
+ for vol_handle in volume_handles:
475
+ vol_gid = global_ids.get(vol_handle)
476
+ if vol_gid is None:
477
+ continue
478
+
479
+ size = _volume_for_volume_set(
480
+ vol_handle=vol_handle,
481
+ sets_by_handle=sets_by_handle,
482
+ surface_handles=surface_handles,
483
+ geom_sense=geom_sense,
484
+ coords=coords,
485
+ tri_conn0=tri_conn0,
486
+ tri_start=tri_start,
487
+ tri_end=tri_end,
488
+ )
489
+ volume_sizes[int(vol_gid)] = abs(size)
490
+
491
+ return volume_sizes
492
+
493
+
494
+ # ============================================================================
495
+ # pymoab backend implementation
496
+ # ============================================================================
497
+
498
+
499
+ def _check_pymoab_available():
500
+ """Check if pymoab is available and raise ImportError if not."""
501
+ try:
502
+ import pymoab # noqa: F401
503
+ except ImportError:
504
+ raise ImportError(
505
+ "pymoab is not installed. Install it to use backend='pymoab', "
506
+ "or use the default h5py backend."
507
+ )
508
+
509
+
510
+ def _load_moab_file(filename: str):
511
+ """Load a DAGMC h5m file into a pymoab Core object."""
512
+ import pymoab as mb
513
+ from pymoab import core
21
514
 
22
515
  moab_core = core.Core()
23
516
  moab_core.load_file(filename)
24
517
  return moab_core
25
518
 
26
519
 
27
- def get_volumes_from_h5m(filename: str) -> List[int]:
28
- """Reads in a DAGMC h5m file and uses PyMoab to find the volume ids of the
29
- materials in the file.
520
+ def _get_groups_pymoab(mbcore):
521
+ """Get group entities using pymoab."""
522
+ import pymoab as mb
30
523
 
31
- Arguments:
32
- filename: the filename of the DAGMC h5m file
524
+ category_tag = mbcore.tag_get_handle(mb.types.CATEGORY_TAG_NAME)
525
+ group_category = ["Group"]
526
+ group_ents = mbcore.get_entities_by_type_and_tag(
527
+ 0, mb.types.MBENTITYSET, category_tag, group_category
528
+ )
529
+ return group_ents
33
530
 
34
- Returns:
35
- A list of volume ids
36
- """
37
531
 
38
- # create a new PyMOAB instance and load the specified DAGMC file
39
- mbcore = load_moab_file(filename)
40
- group_ents = get_groups(mbcore)
532
+ def _get_volumes_pymoab(filename: str) -> List[int]:
533
+ """Get volume IDs using pymoab backend."""
534
+ import pymoab as mb
535
+
536
+ mbcore = _load_moab_file(filename)
537
+ group_ents = _get_groups_pymoab(mbcore)
41
538
  name_tag = mbcore.tag_get_handle(mb.types.NAME_TAG_NAME)
42
539
  id_tag = mbcore.tag_get_handle(mb.types.GLOBAL_ID_TAG_NAME)
43
540
  ids = []
@@ -46,7 +543,6 @@ def get_volumes_from_h5m(filename: str) -> List[int]:
46
543
  group_name = mbcore.tag_get_data(name_tag, group_ent)[0][0]
47
544
  if group_name.startswith("mat:"):
48
545
  vols = mbcore.get_entities_by_type(group_ent, mb.types.MBENTITYSET)
49
-
50
546
  for vol in vols:
51
547
  id = mbcore.tag_get_data(id_tag, vol)[0][0]
52
548
  ids.append(id.item())
@@ -54,40 +550,16 @@ def get_volumes_from_h5m(filename: str) -> List[int]:
54
550
  return sorted(set(list(ids)))
55
551
 
56
552
 
57
- def get_groups(mbcore):
58
-
59
- category_tag = mbcore.tag_get_handle(mb.types.CATEGORY_TAG_NAME)
60
-
61
- group_category = ["Group"]
62
-
63
- group_ents = mbcore.get_entities_by_type_and_tag(
64
- 0, mb.types.MBENTITYSET, category_tag, group_category
65
- )
66
-
67
- return group_ents
68
-
69
-
70
- def get_materials_from_h5m(
71
- filename: str, remove_prefix: Optional[bool] = True
72
- ) -> List[str]:
73
- """Reads in a DAGMC h5m file and uses PyMoab to find the material tags in
74
- the file.
75
-
76
- Arguments:
77
- filename: the filename of the DAGMC h5m file
78
- remove_prefix: remove the mat: prefix from the material tag or not
553
+ def _get_materials_pymoab(filename: str, remove_prefix: bool) -> List[str]:
554
+ """Get material names using pymoab backend."""
555
+ import pymoab as mb
79
556
 
80
- Returns:
81
- A list of material tags
82
- """
83
-
84
- mbcore = load_moab_file(filename)
85
- group_ents = get_groups(mbcore)
557
+ mbcore = _load_moab_file(filename)
558
+ group_ents = _get_groups_pymoab(mbcore)
86
559
  name_tag = mbcore.tag_get_handle(mb.types.NAME_TAG_NAME)
87
560
 
88
561
  materials_list = []
89
562
  for group_ent in group_ents:
90
-
91
563
  group_name = mbcore.tag_get_data(name_tag, group_ent)[0][0]
92
564
  if group_name.startswith("mat:"):
93
565
  if remove_prefix:
@@ -98,20 +570,20 @@ def get_materials_from_h5m(
98
570
  return sorted(set(materials_list))
99
571
 
100
572
 
101
- def get_vol_mat_map(group_ents, mbcore, remove_prefix) -> dict:
573
+ def _get_volumes_and_materials_pymoab(filename: str, remove_prefix: bool) -> dict:
574
+ """Get volume-to-material mapping using pymoab backend."""
575
+ import pymoab as mb
576
+
577
+ mbcore = _load_moab_file(filename)
578
+ group_ents = _get_groups_pymoab(mbcore)
102
579
  name_tag = mbcore.tag_get_handle(mb.types.NAME_TAG_NAME)
103
580
  id_tag = mbcore.tag_get_handle(mb.types.GLOBAL_ID_TAG_NAME)
104
581
  vol_mat = {}
105
582
 
106
583
  for group_ent in group_ents:
107
-
108
584
  group_name = mbcore.tag_get_data(name_tag, group_ent)[0][0]
109
- # optionally confirm that this is a material!
110
-
111
585
  if group_name.startswith("mat:"):
112
-
113
586
  vols = mbcore.get_entities_by_type(group_ent, mb.types.MBENTITYSET)
114
-
115
587
  for vol in vols:
116
588
  id = mbcore.tag_get_data(id_tag, vol)[0][0].item()
117
589
  if remove_prefix:
@@ -122,21 +594,333 @@ def get_vol_mat_map(group_ents, mbcore, remove_prefix) -> dict:
122
594
  return vol_mat
123
595
 
124
596
 
597
+ def _get_bounding_box_pymoab(filename: str) -> Tuple[np.ndarray, np.ndarray]:
598
+ """Get bounding box using pymoab backend."""
599
+ import pymoab as mb
600
+
601
+ mbcore = _load_moab_file(filename)
602
+ # Get all vertices
603
+ vertices = mbcore.get_entities_by_type(0, mb.types.MBVERTEX)
604
+ coords = mbcore.get_coords(vertices)
605
+ coords = coords.reshape(-1, 3)
606
+
607
+ lower_left = coords.min(axis=0)
608
+ upper_right = coords.max(axis=0)
609
+ return lower_left, upper_right
610
+
611
+
612
+ def _get_volumes_sizes_pymoab(filename: str) -> dict:
613
+ """Get geometric volume sizes for each volume ID using pymoab backend.
614
+
615
+ Uses GEOM_SENSE_2 tag to determine surface orientation relative to each
616
+ volume, enabling correct signed volume calculation for nested geometries.
617
+ """
618
+ import pymoab as mb
619
+ from pymoab import types
620
+
621
+ mbcore = _load_moab_file(filename)
622
+ category_tag = mbcore.tag_get_handle(mb.types.CATEGORY_TAG_NAME)
623
+ id_tag = mbcore.tag_get_handle(mb.types.GLOBAL_ID_TAG_NAME)
624
+
625
+ # Get the GEOM_SENSE_2 tag - this stores [forward_vol, reverse_vol] for each surface
626
+ try:
627
+ geom_sense_tag = mbcore.tag_get_handle("GEOM_SENSE_2")
628
+ except RuntimeError:
629
+ geom_sense_tag = None
630
+
631
+ # Get all volumes
632
+ volume_ents = mbcore.get_entities_by_type_and_tag(
633
+ 0, mb.types.MBENTITYSET, category_tag, ["Volume"]
634
+ )
635
+
636
+ volume_sizes = {}
637
+
638
+ for vol_ent in volume_ents:
639
+ vol_gid = mbcore.tag_get_data(id_tag, vol_ent)[0][0].item()
640
+
641
+ # Get child surfaces of this volume
642
+ surfaces = mbcore.get_child_meshsets(vol_ent)
643
+
644
+ total_signed_volume = 0.0
645
+
646
+ for surf in surfaces:
647
+ # Determine the sign for this surface relative to this volume
648
+ sign = 1.0
649
+ if geom_sense_tag is not None:
650
+ try:
651
+ sense_data = mbcore.tag_get_data(geom_sense_tag, surf)
652
+ # sense_data is [forward_vol, reverse_vol]
653
+ forward_vol = sense_data[0][0]
654
+ reverse_vol = sense_data[0][1]
655
+ if vol_ent == forward_vol and vol_ent != reverse_vol:
656
+ sign = 1.0
657
+ elif vol_ent == reverse_vol and vol_ent != forward_vol:
658
+ sign = -1.0
659
+ # If vol_ent equals both or neither, default to +1
660
+ except RuntimeError:
661
+ pass # Tag not set for this surface, use default sign
662
+
663
+ # Get triangles in this surface
664
+ tris = mbcore.get_entities_by_type(surf, mb.types.MBTRI)
665
+
666
+ if not tris:
667
+ continue
668
+
669
+ # Get all unique vertices for this surface's triangles
670
+ all_verts = set()
671
+ for tri in tris:
672
+ conn = mbcore.get_connectivity(tri)
673
+ all_verts.update(conn)
674
+
675
+ all_verts = list(all_verts)
676
+ vert_to_idx = {v: i for i, v in enumerate(all_verts)}
677
+
678
+ # Get coordinates
679
+ coords = mbcore.get_coords(all_verts).reshape(-1, 3)
680
+
681
+ # Build triangle array with local indices
682
+ tri_array = []
683
+ for tri in tris:
684
+ conn = mbcore.get_connectivity(tri)
685
+ tri_array.append([vert_to_idx[v] for v in conn])
686
+ tri_array = np.array(tri_array)
687
+
688
+ # Calculate signed volume for this surface's triangles
689
+ v0 = coords[tri_array[:, 0]]
690
+ v1 = coords[tri_array[:, 1]]
691
+ v2 = coords[tri_array[:, 2]]
692
+ cross = np.cross(v1, v2)
693
+ surface_signed_volume = np.sum(v0 * cross, axis=1).sum() / 6.0
694
+
695
+ total_signed_volume += sign * surface_signed_volume
696
+
697
+ volume_sizes[vol_gid] = abs(total_signed_volume)
698
+
699
+ return volume_sizes
700
+
701
+
702
+ # ============================================================================
703
+ # Public API
704
+ # ============================================================================
705
+
706
+
707
+ def get_volumes_from_h5m(
708
+ filename: str,
709
+ backend: Literal["h5py", "pymoab"] = "h5py",
710
+ ) -> List[int]:
711
+ """Reads in a DAGMC h5m file and finds the volume ids.
712
+
713
+ Arguments:
714
+ filename: the filename of the DAGMC h5m file
715
+ backend: the backend to use for reading the file ("h5py" or "pymoab")
716
+
717
+ Returns:
718
+ A list of volume ids
719
+ """
720
+ if not Path(filename).is_file():
721
+ raise FileNotFoundError(f"filename provided ({filename}) does not exist")
722
+
723
+ if backend == "pymoab":
724
+ _check_pymoab_available()
725
+ return _get_volumes_pymoab(filename)
726
+ else:
727
+ return _get_volumes_h5py(filename)
728
+
729
+
730
+ def get_materials_from_h5m(
731
+ filename: str,
732
+ remove_prefix: Optional[bool] = True,
733
+ backend: Literal["h5py", "pymoab"] = "h5py",
734
+ ) -> List[str]:
735
+ """Reads in a DAGMC h5m file and finds the material tags.
736
+
737
+ Arguments:
738
+ filename: the filename of the DAGMC h5m file
739
+ remove_prefix: remove the mat: prefix from the material tag or not
740
+ backend: the backend to use for reading the file ("h5py" or "pymoab")
741
+
742
+ Returns:
743
+ A list of material tags
744
+ """
745
+ if not Path(filename).is_file():
746
+ raise FileNotFoundError(f"filename provided ({filename}) does not exist")
747
+
748
+ if backend == "pymoab":
749
+ _check_pymoab_available()
750
+ return _get_materials_pymoab(filename, remove_prefix)
751
+ else:
752
+ return _get_materials_h5py(filename, remove_prefix)
753
+
754
+
125
755
  def get_volumes_and_materials_from_h5m(
126
- filename: str, remove_prefix: Optional[bool] = True
756
+ filename: str,
757
+ remove_prefix: Optional[bool] = True,
758
+ backend: Literal["h5py", "pymoab"] = "h5py",
127
759
  ) -> dict:
128
- """Reads in a DAGMC h5m file and uses PyMoab to find the volume ids with
129
- their associated material tags.
760
+ """Reads in a DAGMC h5m file and finds the volume ids with their
761
+ associated material tags.
130
762
 
131
763
  Arguments:
132
764
  filename: the filename of the DAGMC h5m file
133
765
  remove_prefix: remove the mat: prefix from the material tag or not
766
+ backend: the backend to use for reading the file ("h5py" or "pymoab")
134
767
 
135
768
  Returns:
136
769
  A dictionary of volume ids and material tags
137
770
  """
771
+ if not Path(filename).is_file():
772
+ raise FileNotFoundError(f"filename provided ({filename}) does not exist")
138
773
 
139
- mbcore = load_moab_file(filename)
140
- group_ents = get_groups(mbcore)
141
- vol_mat = get_vol_mat_map(group_ents, mbcore, remove_prefix)
142
- return vol_mat
774
+ if backend == "pymoab":
775
+ _check_pymoab_available()
776
+ return _get_volumes_and_materials_pymoab(filename, remove_prefix)
777
+ else:
778
+ return _get_volumes_and_materials_h5py(filename, remove_prefix)
779
+
780
+
781
+ def get_bounding_box_from_h5m(
782
+ filename: str,
783
+ backend: Literal["h5py", "pymoab"] = "h5py",
784
+ ) -> Tuple[np.ndarray, np.ndarray]:
785
+ """Reads in a DAGMC h5m file and returns the axis-aligned bounding box.
786
+
787
+ Arguments:
788
+ filename: the filename of the DAGMC h5m file
789
+ backend: the backend to use for reading the file ("h5py" or "pymoab")
790
+
791
+ Returns:
792
+ A tuple of (lower_left, upper_right) numpy arrays representing
793
+ the corners of the bounding box
794
+ """
795
+ if not Path(filename).is_file():
796
+ raise FileNotFoundError(f"filename provided ({filename}) does not exist")
797
+
798
+ if backend == "pymoab":
799
+ _check_pymoab_available()
800
+ return _get_bounding_box_pymoab(filename)
801
+ else:
802
+ return _get_bounding_box_h5py(filename)
803
+
804
+
805
+ def get_volumes_sizes_from_h5m_by_cell_id(
806
+ filename: str,
807
+ backend: Literal["h5py", "pymoab"] = "h5py",
808
+ ) -> dict:
809
+ """Reads in a DAGMC h5m file and calculates the geometric volume
810
+ (size) of each volume entity.
811
+
812
+ Arguments:
813
+ filename: the filename of the DAGMC h5m file
814
+ backend: the backend to use for reading the file ("h5py" or "pymoab")
815
+
816
+ Returns:
817
+ A dictionary mapping volume IDs (cell IDs) to their geometric volumes (sizes)
818
+ """
819
+ if not Path(filename).is_file():
820
+ raise FileNotFoundError(f"filename provided ({filename}) does not exist")
821
+
822
+ if backend == "pymoab":
823
+ _check_pymoab_available()
824
+ return _get_volumes_sizes_pymoab(filename)
825
+ else:
826
+ return _get_volumes_sizes_h5py(filename)
827
+
828
+
829
+ def get_volumes_sizes_from_h5m_by_material_name(
830
+ filename: str,
831
+ backend: Literal["h5py", "pymoab"] = "h5py",
832
+ ) -> Dict[str, float]:
833
+ """Reads in a DAGMC h5m file and calculates the geometric volume
834
+ for each material, aggregating volumes from all cells with the same material.
835
+
836
+ Arguments:
837
+ filename: the filename of the DAGMC h5m file
838
+ backend: the backend to use for reading the file ("h5py" or "pymoab")
839
+
840
+ Returns:
841
+ A dictionary mapping material names to their total geometric volumes.
842
+ If a material is assigned to multiple cells, their volumes are summed.
843
+ """
844
+ if not Path(filename).is_file():
845
+ raise FileNotFoundError(f"filename provided ({filename}) does not exist")
846
+
847
+ # Get volume-to-material mapping and volume sizes
848
+ vol_mat_mapping = get_volumes_and_materials_from_h5m(
849
+ filename=filename,
850
+ remove_prefix=True,
851
+ backend=backend,
852
+ )
853
+ volume_sizes = get_volumes_sizes_from_h5m_by_cell_id(
854
+ filename=filename,
855
+ backend=backend,
856
+ )
857
+
858
+ # Aggregate volumes by material name
859
+ material_volumes: Dict[str, float] = {}
860
+ for vol_id, mat_name in vol_mat_mapping.items():
861
+ if mat_name not in material_volumes:
862
+ material_volumes[mat_name] = 0.0
863
+ material_volumes[mat_name] += volume_sizes.get(vol_id, 0.0)
864
+
865
+ return material_volumes
866
+
867
+
868
+ def set_openmc_material_volumes_from_h5m(
869
+ materials: Union[List, "openmc.Materials"],
870
+ filename: str,
871
+ backend: Literal["h5py", "pymoab"] = "h5py",
872
+ ) -> None:
873
+ """Sets the volume attribute on OpenMC Material objects based on DAGMC geometry.
874
+
875
+ This function reads volume and material information from a DAGMC h5m file,
876
+ then matches materials by name and sets the `volume` attribute on the
877
+ corresponding OpenMC Material objects.
878
+
879
+ If a material name in the DAGMC file appears in multiple volumes, the
880
+ geometric volumes are summed together.
881
+
882
+ Arguments:
883
+ materials: A list of openmc.Material objects or an openmc.Materials
884
+ collection. Materials are matched by their `name` attribute.
885
+ filename: The filename of the DAGMC h5m file.
886
+ backend: The backend to use for reading the file ("h5py" or "pymoab").
887
+ Note: "pymoab" backend is required for accurate volume calculations.
888
+
889
+ Raises:
890
+ FileNotFoundError: If the DAGMC file does not exist.
891
+ ValueError: If multiple OpenMC materials have the same name.
892
+
893
+ Example:
894
+ >>> import openmc
895
+ >>> steel = openmc.Material(name='steel')
896
+ >>> water = openmc.Material(name='water')
897
+ >>> materials = openmc.Materials([steel, water])
898
+ >>> set_openmc_material_volumes_from_h5m(materials, 'dagmc.h5m')
899
+ >>> print(steel.volume) # Volume is now set
900
+ """
901
+ if not Path(filename).is_file():
902
+ raise FileNotFoundError(f"filename provided ({filename}) does not exist")
903
+
904
+ # Check for duplicate material names in the provided materials
905
+ material_names = [mat.name for mat in materials]
906
+ seen_names = {}
907
+ for name in material_names:
908
+ if name is None:
909
+ continue
910
+ if name in seen_names:
911
+ raise ValueError(
912
+ f"Multiple OpenMC materials have the same name '{name}'. "
913
+ "Each material must have a unique name for matching."
914
+ )
915
+ seen_names[name] = True
916
+
917
+ # Get volumes aggregated by material name
918
+ material_volumes = get_volumes_sizes_from_h5m_by_material_name(
919
+ filename=filename,
920
+ backend=backend,
921
+ )
922
+
923
+ # Set volumes on matching OpenMC materials
924
+ for mat in materials:
925
+ if mat.name is not None and mat.name in material_volumes:
926
+ mat.volume = material_volumes[mat.name]