cad-to-dagmc 0.10.0__py3-none-any.whl → 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
_version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.10.0'
32
- __version_tuple__ = version_tuple = (0, 10, 0)
31
+ __version__ = version = '0.11.0'
32
+ __version_tuple__ = version_tuple = (0, 11, 0)
33
33
 
34
34
  __commit_id__ = commit_id = None
cad_to_dagmc/__init__.py CHANGED
@@ -11,6 +11,7 @@ except PackageNotFoundError:
11
11
 
12
12
  __version__ = get_version(root="..", relative_to=__file__)
13
13
 
14
- __all__ = ["__version__"]
14
+ __all__ = ["__version__", "PyMoabNotFoundError"]
15
15
 
16
16
  from .core import *
17
+ from .core import PyMoabNotFoundError
cad_to_dagmc/core.py CHANGED
@@ -4,7 +4,6 @@ import cadquery as cq
4
4
  import gmsh
5
5
  import numpy as np
6
6
  from cadquery import importers
7
- from pymoab import core, types
8
7
  import tempfile
9
8
  import warnings
10
9
  from typing import Iterable
@@ -12,7 +11,25 @@ from cad_to_dagmc import __version__
12
11
  import cadquery_direct_mesh_plugin
13
12
 
14
13
 
15
- def define_moab_core_and_tags() -> tuple[core.Core, dict]:
14
+ class PyMoabNotFoundError(ImportError):
15
+ """Raised when pymoab is not installed but the pymoab backend is requested."""
16
+
17
+ def __init__(self, message=None):
18
+ if message is None:
19
+ message = (
20
+ "pymoab is not installed. pymoab/MOAB is not available on PyPI so it "
21
+ "cannot be included as a dependency of cad-to-dagmc.\n\n"
22
+ "You can install pymoab via one of these methods:\n"
23
+ " 1. From conda-forge: conda install -c conda-forge moab\n"
24
+ " 2. From extra index: pip install --extra-index-url https://shimwell.github.io/wheels moab\n"
25
+ " 3. From source: https://bitbucket.org/fathomteam/moab\n\n"
26
+ "Alternatively, use the h5py backend (the default) which does not require pymoab:\n"
27
+ " export_dagmc_h5m_file(..., h5m_backend='h5py')"
28
+ )
29
+ super().__init__(message)
30
+
31
+
32
+ def define_moab_core_and_tags():
16
33
  """Creates a MOAB Core instance which can be built up by adding sets of
17
34
  triangles to the instance
18
35
 
@@ -20,6 +37,10 @@ def define_moab_core_and_tags() -> tuple[core.Core, dict]:
20
37
  (pymoab Core): A pymoab.core.Core() instance
21
38
  (pymoab tag_handle): A pymoab.core.tag_get_handle() instance
22
39
  """
40
+ try:
41
+ from pymoab import core, types
42
+ except ImportError as e:
43
+ raise PyMoabNotFoundError() from e
23
44
 
24
45
  # create pymoab instance
25
46
  moab_core = core.Core()
@@ -68,21 +89,55 @@ def define_moab_core_and_tags() -> tuple[core.Core, dict]:
68
89
 
69
90
  def vertices_to_h5m(
70
91
  vertices: list[tuple[float, float, float]] | list["cadquery.occ_impl.geom.Vector"],
71
- triangles_by_solid_by_face: list[list[tuple[int, int, int]]],
92
+ triangles_by_solid_by_face: dict[int, dict[int, list[list[int]]]],
72
93
  material_tags: list[str],
73
94
  h5m_filename: str = "dagmc.h5m",
74
95
  implicit_complement_material_tag: str | None = None,
96
+ method: str = "h5py",
75
97
  ):
76
98
  """Converts vertices and triangle sets into a tagged h5m file compatible
77
99
  with DAGMC enabled neutronics simulations
78
100
 
79
101
  Args:
80
- vertices:
81
- triangles:
82
- material_tags:
83
- h5m_filename:
84
- implicit_complement_material_tag:
102
+ vertices: List of vertex coordinates as (x, y, z) tuples or CadQuery vectors
103
+ triangles_by_solid_by_face: Dict mapping solid_id -> face_id -> list of triangles
104
+ material_tags: List of material tag names, one per solid
105
+ h5m_filename: Output filename for the h5m file
106
+ implicit_complement_material_tag: Optional material tag for implicit complement
107
+ method: Backend to use for writing h5m file ('pymoab' or 'h5py')
85
108
  """
109
+ if method == "pymoab":
110
+ return _vertices_to_h5m_pymoab(
111
+ vertices=vertices,
112
+ triangles_by_solid_by_face=triangles_by_solid_by_face,
113
+ material_tags=material_tags,
114
+ h5m_filename=h5m_filename,
115
+ implicit_complement_material_tag=implicit_complement_material_tag,
116
+ )
117
+ elif method == "h5py":
118
+ return _vertices_to_h5m_h5py(
119
+ vertices=vertices,
120
+ triangles_by_solid_by_face=triangles_by_solid_by_face,
121
+ material_tags=material_tags,
122
+ h5m_filename=h5m_filename,
123
+ implicit_complement_material_tag=implicit_complement_material_tag,
124
+ )
125
+ else:
126
+ raise ValueError(f"method must be 'pymoab' or 'h5py', not '{method}'")
127
+
128
+
129
+ def _vertices_to_h5m_pymoab(
130
+ vertices: list[tuple[float, float, float]] | list["cadquery.occ_impl.geom.Vector"],
131
+ triangles_by_solid_by_face: dict[int, dict[int, list[list[int]]]],
132
+ material_tags: list[str],
133
+ h5m_filename: str = "dagmc.h5m",
134
+ implicit_complement_material_tag: str | None = None,
135
+ ):
136
+ """PyMOAB backend for vertices_to_h5m."""
137
+ try:
138
+ from pymoab import types
139
+ except ImportError as e:
140
+ raise PyMoabNotFoundError() from e
86
141
 
87
142
  if len(material_tags) != len(triangles_by_solid_by_face):
88
143
  msg = f"The number of material_tags provided is {len(material_tags)} and the number of sets of triangles is {len(triangles_by_solid_by_face)}. You must provide one material_tag for every triangle set"
@@ -221,6 +276,442 @@ def vertices_to_h5m(
221
276
  return h5m_filename
222
277
 
223
278
 
279
+ def _vertices_to_h5m_h5py(
280
+ vertices: list[tuple[float, float, float]] | list["cadquery.occ_impl.geom.Vector"],
281
+ triangles_by_solid_by_face: dict[int, dict[int, list[list[int]]]],
282
+ material_tags: list[str],
283
+ h5m_filename: str = "dagmc.h5m",
284
+ implicit_complement_material_tag: str | None = None,
285
+ ):
286
+ """H5PY backend for vertices_to_h5m.
287
+
288
+ Creates an h5m file compatible with DAGMC using h5py directly,
289
+ without requiring pymoab.
290
+ """
291
+ import h5py
292
+ from datetime import datetime
293
+
294
+ if len(material_tags) != len(triangles_by_solid_by_face):
295
+ msg = f"The number of material_tags provided is {len(material_tags)} and the number of sets of triangles is {len(triangles_by_solid_by_face)}. You must provide one material_tag for every triangle set"
296
+ raise ValueError(msg)
297
+
298
+ # Convert CadQuery vectors to floats if needed
299
+ if (
300
+ hasattr(vertices[0], "x")
301
+ and hasattr(vertices[0], "y")
302
+ and hasattr(vertices[0], "z")
303
+ ):
304
+ vertices_floats = [(vert.x, vert.y, vert.z) for vert in vertices]
305
+ else:
306
+ vertices_floats = vertices
307
+
308
+ # Build face_ids_with_solid_ids to track shared faces
309
+ face_ids_with_solid_ids = {}
310
+ for solid_id, triangles_on_each_face in triangles_by_solid_by_face.items():
311
+ for face_id in triangles_on_each_face.keys():
312
+ if face_id in face_ids_with_solid_ids:
313
+ face_ids_with_solid_ids[face_id].append(solid_id)
314
+ else:
315
+ face_ids_with_solid_ids[face_id] = [solid_id]
316
+
317
+ # Collect all unique faces and their triangles
318
+ all_faces = {} # face_id -> list of triangles
319
+ for solid_id, triangles_on_each_face in triangles_by_solid_by_face.items():
320
+ for face_id, triangles_on_face in triangles_on_each_face.items():
321
+ if face_id not in all_faces:
322
+ all_faces[face_id] = triangles_on_face
323
+
324
+ # Convert vertices to numpy array
325
+ vertices_arr = np.asarray(vertices_floats, dtype=np.float64)
326
+ num_vertices = len(vertices_arr)
327
+
328
+ # Collect all triangles
329
+ all_triangles = []
330
+ for face_id in sorted(all_faces.keys()):
331
+ all_triangles.extend(all_faces[face_id])
332
+ all_triangles = np.asarray(all_triangles, dtype=np.int64)
333
+ num_triangles = len(all_triangles)
334
+
335
+ # Create the h5m file
336
+ # makes the folder if it does not exist
337
+ if Path(h5m_filename).parent:
338
+ Path(h5m_filename).parent.mkdir(parents=True, exist_ok=True)
339
+
340
+ with h5py.File(h5m_filename, "w") as f:
341
+ tstt = f.create_group("tstt")
342
+
343
+ # Global ID counter - starts at 1
344
+ global_id = 1
345
+
346
+ # === NODES ===
347
+ nodes_group = tstt.create_group("nodes")
348
+ coords = nodes_group.create_dataset("coordinates", data=vertices_arr)
349
+ coords.attrs.create("start_id", global_id)
350
+ global_id += num_vertices
351
+
352
+ # Node tags
353
+ node_tags = nodes_group.create_group("tags")
354
+ node_tags.create_dataset("GLOBAL_ID", data=np.full(num_vertices, -1, dtype=np.int32))
355
+
356
+ # === ELEMENTS ===
357
+ elements = tstt.create_group("elements")
358
+
359
+ # Element type enum
360
+ elems = {
361
+ "Edge": 1, "Tri": 2, "Quad": 3, "Polygon": 4, "Tet": 5,
362
+ "Pyramid": 6, "Prism": 7, "Knife": 8, "Hex": 9, "Polyhedron": 10,
363
+ }
364
+ tstt["elemtypes"] = h5py.enum_dtype(elems)
365
+
366
+ # History
367
+ now = datetime.now()
368
+ tstt.create_dataset(
369
+ "history",
370
+ data=[
371
+ "cad_to_dagmc".encode("ascii"),
372
+ __version__.encode("ascii"),
373
+ now.strftime("%m/%d/%y").encode("ascii"),
374
+ now.strftime("%H:%M:%S").encode("ascii"),
375
+ ],
376
+ )
377
+
378
+ # Triangles
379
+ tri3_group = elements.create_group("Tri3")
380
+ tri3_group.attrs.create("element_type", elems["Tri"], dtype=tstt["elemtypes"])
381
+
382
+ # Node indices are 1-based in h5m
383
+ connectivity = tri3_group.create_dataset(
384
+ "connectivity",
385
+ data=all_triangles + 1,
386
+ dtype=np.uint64,
387
+ )
388
+ triangle_start_id = global_id
389
+ connectivity.attrs.create("start_id", triangle_start_id)
390
+ global_id += num_triangles
391
+
392
+ # Triangle tags
393
+ tags_tri3 = tri3_group.create_group("tags")
394
+ tags_tri3.create_dataset("GLOBAL_ID", data=np.full(num_triangles, -1, dtype=np.int32))
395
+
396
+ # === SETS ===
397
+ # Plan out the entity set structure:
398
+ # For each solid: 1 volume set, N surface sets (one per face), 1 group set (material)
399
+ # Plus: 1 file set at the end, optionally 1 implicit complement group
400
+
401
+ solid_ids = list(triangles_by_solid_by_face.keys())
402
+ num_solids = len(solid_ids)
403
+
404
+ # Assign set IDs
405
+ sets_start_id = global_id
406
+
407
+ # Map solid_id -> volume_set_id
408
+ volume_set_ids = {}
409
+ # Map face_id -> surface_set_id
410
+ surface_set_ids = {}
411
+ # Map solid_id -> group_set_id
412
+ group_set_ids = {}
413
+
414
+ current_set_id = sets_start_id
415
+
416
+ # First, assign IDs to all surfaces (one per unique face)
417
+ for face_id in sorted(all_faces.keys()):
418
+ surface_set_ids[face_id] = current_set_id
419
+ current_set_id += 1
420
+
421
+ # Then assign IDs to volumes
422
+ for solid_id in solid_ids:
423
+ volume_set_ids[solid_id] = current_set_id
424
+ current_set_id += 1
425
+
426
+ # Then assign IDs to groups (materials)
427
+ for solid_id in solid_ids:
428
+ group_set_ids[solid_id] = current_set_id
429
+ current_set_id += 1
430
+
431
+ # Implicit complement group (if requested)
432
+ implicit_complement_set_id = None
433
+ if implicit_complement_material_tag:
434
+ implicit_complement_set_id = current_set_id
435
+ current_set_id += 1
436
+
437
+ # File set
438
+ file_set_id = current_set_id
439
+ current_set_id += 1
440
+
441
+ global_id = current_set_id
442
+
443
+ # === TAGS ===
444
+ tstt_tags = tstt.create_group("tags")
445
+
446
+ # Collect tagged set IDs for CATEGORY (all entities)
447
+ # and GEOM_DIMENSION (only surfaces and volumes - not groups, to match pymoab)
448
+ category_set_ids = []
449
+ categories = []
450
+ geom_dim_set_ids = []
451
+ geom_dimensions = []
452
+
453
+ # Volumes first (to match pymoab ordering)
454
+ for solid_id in solid_ids:
455
+ category_set_ids.append(volume_set_ids[solid_id])
456
+ categories.append("Volume")
457
+ geom_dim_set_ids.append(volume_set_ids[solid_id])
458
+ geom_dimensions.append(3)
459
+
460
+ # Groups (CATEGORY only - pymoab doesn't set geom_dimension on groups)
461
+ # Note: Groups COULD have geom_dimension=4 set, but pymoab doesn't do this
462
+ for solid_id in solid_ids:
463
+ category_set_ids.append(group_set_ids[solid_id])
464
+ categories.append("Group")
465
+
466
+ # Surfaces
467
+ for face_id in sorted(all_faces.keys()):
468
+ category_set_ids.append(surface_set_ids[face_id])
469
+ categories.append("Surface")
470
+ geom_dim_set_ids.append(surface_set_ids[face_id])
471
+ geom_dimensions.append(2)
472
+
473
+ # Implicit complement (CATEGORY only)
474
+ if implicit_complement_material_tag:
475
+ category_set_ids.append(implicit_complement_set_id)
476
+ categories.append("Group")
477
+
478
+ # CATEGORY tag
479
+ # Note: We use opaque dtype (|V32) to match pymoab output exactly.
480
+ # A string dtype (|S32) would also work and be more readable in h5dump,
481
+ # but we match pymoab for maximum compatibility.
482
+ cat_group = tstt_tags.create_group("CATEGORY")
483
+ cat_group.attrs.create("class", 1, dtype=np.int32)
484
+ cat_group.create_dataset("id_list", data=np.array(category_set_ids, dtype=np.uint64))
485
+ # Create opaque 32-byte type to match pymoab's H5T_OPAQUE
486
+ opaque_dt = h5py.opaque_dtype(np.dtype("V32"))
487
+ cat_group["type"] = opaque_dt
488
+ # Encode category strings as 32-byte null-padded values
489
+ cat_values = np.array([s.encode("ascii").ljust(32, b"\x00") for s in categories], dtype="V32")
490
+ cat_group.create_dataset("values", data=cat_values)
491
+
492
+ # GEOM_DIMENSION tag
493
+ # Note: We only tag surfaces (dim=2) and volumes (dim=3), not groups.
494
+ # Groups COULD have geom_dimension=4, but pymoab doesn't set this.
495
+ geom_group = tstt_tags.create_group("GEOM_DIMENSION")
496
+ geom_group["type"] = np.dtype("i4")
497
+ geom_group.attrs.create("class", 1, dtype=np.int32)
498
+ geom_group.attrs.create("default", -1, dtype=geom_group["type"])
499
+ geom_group.attrs.create("global", -1, dtype=geom_group["type"])
500
+ geom_group.create_dataset("id_list", data=np.array(geom_dim_set_ids, dtype=np.uint64))
501
+ geom_group.create_dataset("values", data=np.array(geom_dimensions, dtype=np.int32))
502
+
503
+ # GEOM_SENSE_2 tag (only for surfaces)
504
+ surface_ids_list = [surface_set_ids[fid] for fid in sorted(all_faces.keys())]
505
+ gs2_group = tstt_tags.create_group("GEOM_SENSE_2")
506
+ gs2_dtype = np.dtype("(2,)u8")
507
+ gs2_group["type"] = gs2_dtype
508
+ gs2_group.attrs.create("class", 1, dtype=np.int32)
509
+ gs2_group.attrs.create("is_handle", 1, dtype=np.int32)
510
+ gs2_group.create_dataset("id_list", data=np.array(surface_ids_list, dtype=np.uint64))
511
+
512
+ # Build sense data for each surface
513
+ sense_values = []
514
+ for face_id in sorted(all_faces.keys()):
515
+ solids_for_face = face_ids_with_solid_ids[face_id]
516
+ if len(solids_for_face) == 2:
517
+ # Shared face - both volumes
518
+ vol1 = volume_set_ids[solids_for_face[0]]
519
+ vol2 = volume_set_ids[solids_for_face[1]]
520
+ sense_values.append([vol1, vol2])
521
+ else:
522
+ # Single volume
523
+ vol = volume_set_ids[solids_for_face[0]]
524
+ sense_values.append([vol, 0])
525
+
526
+ if sense_values:
527
+ gs2_values = np.zeros((len(sense_values),), dtype=[("f0", "<u8", (2,))])
528
+ gs2_values["f0"] = np.array(sense_values, dtype=np.uint64)
529
+ gs2_space = h5py.h5s.create_simple((len(sense_values),))
530
+ gs2_arr_type = h5py.h5t.array_create(h5py.h5t.NATIVE_UINT64, (2,))
531
+ gs2_dset = h5py.h5d.create(gs2_group.id, b"values", gs2_arr_type, gs2_space)
532
+ gs2_dset.write(h5py.h5s.ALL, h5py.h5s.ALL, gs2_values, mtype=gs2_arr_type)
533
+ gs2_dset.close()
534
+
535
+ # GLOBAL_ID tag - store as sparse tag with id_list and values
536
+ # This stores the user-facing IDs for surfaces and volumes
537
+ gid_ids = []
538
+ gid_values = []
539
+ # Surfaces get their face_id as global_id
540
+ for face_id in sorted(all_faces.keys()):
541
+ gid_ids.append(surface_set_ids[face_id])
542
+ gid_values.append(face_id)
543
+ # Volumes get their solid_id as global_id
544
+ for solid_id in solid_ids:
545
+ gid_ids.append(volume_set_ids[solid_id])
546
+ gid_values.append(solid_id)
547
+ # Groups also get the solid_id
548
+ for solid_id in solid_ids:
549
+ gid_ids.append(group_set_ids[solid_id])
550
+ gid_values.append(solid_id)
551
+
552
+ gid_group = tstt_tags.create_group("GLOBAL_ID")
553
+ gid_group["type"] = np.dtype("i4")
554
+ gid_group.attrs.create("class", 2, dtype=np.int32)
555
+ gid_group.attrs.create("default", -1, dtype=gid_group["type"])
556
+ gid_group.attrs.create("global", -1, dtype=gid_group["type"])
557
+ gid_group.create_dataset("id_list", data=np.array(gid_ids, dtype=np.uint64))
558
+ gid_group.create_dataset("values", data=np.array(gid_values, dtype=np.int32))
559
+
560
+ # NAME tag (for groups - material names)
561
+ name_ids = []
562
+ name_values = []
563
+ for solid_id, mat_tag in zip(solid_ids, material_tags):
564
+ name_ids.append(group_set_ids[solid_id])
565
+ name_values.append(f"mat:{mat_tag}")
566
+ if implicit_complement_material_tag:
567
+ name_ids.append(implicit_complement_set_id)
568
+ name_values.append(f"mat:{implicit_complement_material_tag}_comp")
569
+
570
+ name_group = tstt_tags.create_group("NAME")
571
+ name_group.attrs.create("class", 1, dtype=np.int32)
572
+ name_group.create_dataset("id_list", data=np.array(name_ids, dtype=np.uint64))
573
+ name_group["type"] = h5py.opaque_dtype(np.dtype("S32"))
574
+ name_group.create_dataset("values", data=name_values, dtype=name_group["type"])
575
+
576
+ # Other standard tags (empty but needed)
577
+ for tag_name in ["DIRICHLET_SET", "MATERIAL_SET", "NEUMANN_SET"]:
578
+ tag_grp = tstt_tags.create_group(tag_name)
579
+ tag_grp["type"] = np.dtype("i4")
580
+ tag_grp.attrs.create("class", 1, dtype=np.int32)
581
+ tag_grp.attrs.create("default", -1, dtype=tag_grp["type"])
582
+ tag_grp.attrs.create("global", -1, dtype=tag_grp["type"])
583
+
584
+ # === SETS structure ===
585
+ sets_group = tstt.create_group("sets")
586
+
587
+ # Build contents, parents, children, and list arrays
588
+ contents = []
589
+ list_rows = []
590
+ parents_list = []
591
+ children_list = []
592
+
593
+ # Track triangle ranges per face
594
+ tri_offset = 0
595
+ face_triangle_ranges = {}
596
+ for face_id in sorted(all_faces.keys()):
597
+ tris = all_faces[face_id]
598
+ face_triangle_ranges[face_id] = (tri_offset, len(tris))
599
+ tri_offset += len(tris)
600
+
601
+ # Track vertices per face
602
+ face_vertex_sets = {}
603
+ for face_id, tris in all_faces.items():
604
+ verts = set()
605
+ for tri in tris:
606
+ verts.update(tri)
607
+ face_vertex_sets[face_id] = sorted(verts)
608
+
609
+ contents_end = -1
610
+ children_end = -1
611
+ parents_end = -1
612
+
613
+ # Surface sets
614
+ for face_id in sorted(all_faces.keys()):
615
+ # Content: vertices + triangles for this face
616
+ verts = face_vertex_sets[face_id]
617
+ tri_start, tri_count = face_triangle_ranges[face_id]
618
+
619
+ # Add individual vertex handles (1-based IDs)
620
+ # Don't assume vertices are contiguous - store each one
621
+ for v in verts:
622
+ contents.append(v + 1) # 1-based vertex ID
623
+
624
+ # Add individual triangle handles
625
+ for i in range(tri_count):
626
+ contents.append(triangle_start_id + tri_start + i)
627
+
628
+ contents_end = len(contents) - 1
629
+
630
+ # Parent-child: surface is child of volume(s)
631
+ solids_for_face = face_ids_with_solid_ids[face_id]
632
+ for solid_id in solids_for_face:
633
+ parents_list.append(volume_set_ids[solid_id])
634
+ parents_end = len(parents_list) - 1
635
+
636
+ # flags: 2 = MESHSET_SET (handles, not ranges)
637
+ list_rows.append([contents_end, children_end, parents_end, 2])
638
+
639
+ # Volume sets (empty contents, but have surface children)
640
+ for solid_id in solid_ids:
641
+ # Volumes have no direct content
642
+ # Children are the surfaces
643
+ faces_in_solid = list(triangles_by_solid_by_face[solid_id].keys())
644
+ for face_id in faces_in_solid:
645
+ children_list.append(surface_set_ids[face_id])
646
+ children_end = len(children_list) - 1
647
+
648
+ # flags: 2 = handle-based (0b0010)
649
+ list_rows.append([contents_end, children_end, parents_end, 2])
650
+
651
+ # Group sets (contain volume handles)
652
+ for solid_id in solid_ids:
653
+ contents.append(volume_set_ids[solid_id])
654
+ contents_end = len(contents) - 1
655
+ list_rows.append([contents_end, children_end, parents_end, 2])
656
+
657
+ # Implicit complement group
658
+ if implicit_complement_material_tag:
659
+ # Add the last volume to the implicit complement group
660
+ contents.append(volume_set_ids[solid_ids[-1]])
661
+ contents_end = len(contents) - 1
662
+ list_rows.append([contents_end, children_end, parents_end, 2])
663
+
664
+ # File set (contains everything)
665
+ contents.extend([1, file_set_id - 1]) # range of all entities
666
+ contents_end = len(contents) - 1
667
+ list_rows.append([contents_end, children_end, parents_end, 10])
668
+
669
+ # Write sets datasets
670
+ sets_group.create_dataset("contents", data=np.array(contents, dtype=np.uint64))
671
+ if children_list:
672
+ sets_group.create_dataset("children", data=np.array(children_list, dtype=np.uint64))
673
+ else:
674
+ sets_group.create_dataset("children", data=np.array([], dtype=np.uint64))
675
+ if parents_list:
676
+ sets_group.create_dataset("parents", data=np.array(parents_list, dtype=np.uint64))
677
+ else:
678
+ sets_group.create_dataset("parents", data=np.array([], dtype=np.uint64))
679
+
680
+ lst = sets_group.create_dataset("list", data=np.array(list_rows, dtype=np.int64))
681
+ lst.attrs.create("start_id", sets_start_id)
682
+
683
+ # Set tags (GLOBAL_ID for each set)
684
+ sets_tags = sets_group.create_group("tags")
685
+ set_global_ids = []
686
+
687
+ # Surface global IDs
688
+ for face_id in sorted(all_faces.keys()):
689
+ set_global_ids.append(face_id)
690
+
691
+ # Volume global IDs
692
+ for solid_id in solid_ids:
693
+ set_global_ids.append(solid_id)
694
+
695
+ # Group global IDs
696
+ for solid_id in solid_ids:
697
+ set_global_ids.append(solid_id)
698
+
699
+ # Implicit complement
700
+ if implicit_complement_material_tag:
701
+ set_global_ids.append(-1)
702
+
703
+ # File set
704
+ set_global_ids.append(-1)
705
+
706
+ sets_tags.create_dataset("GLOBAL_ID", data=np.array(set_global_ids, dtype=np.int32))
707
+
708
+ # Max ID attribute
709
+ tstt.attrs.create("max_id", np.uint64(global_id - 1))
710
+
711
+ print(f"written DAGMC file {h5m_filename}")
712
+ return h5m_filename
713
+
714
+
224
715
  def get_volumes(gmsh, assembly, method="file", scale_factor=1.0):
225
716
 
226
717
  if method == "in memory":
@@ -444,6 +935,7 @@ def export_gmsh_object_to_dagmc_h5m_file(
444
935
  material_tags: list[str] | None = None,
445
936
  implicit_complement_material_tag: str | None = None,
446
937
  filename: str = "dagmc.h5m",
938
+ h5m_backend: str = "h5py",
447
939
  ) -> str:
448
940
  """
449
941
  Exports a GMSH object to a DAGMC-compatible h5m file. Note gmsh should
@@ -454,6 +946,7 @@ def export_gmsh_object_to_dagmc_h5m_file(
454
946
  material_tags: A list of material tags corresponding to the volumes in the GMSH object.
455
947
  implicit_complement_material_tag: The material tag for the implicit complement (void space).
456
948
  filename: The name of the output h5m file. Defaults to "dagmc.h5m".
949
+ h5m_backend: Backend for writing h5m file, 'pymoab' or 'h5py'. Defaults to 'h5py'.
457
950
 
458
951
  Returns:
459
952
  str: The filename of the generated DAGMC h5m file.
@@ -481,6 +974,7 @@ def export_gmsh_object_to_dagmc_h5m_file(
481
974
  material_tags=material_tags,
482
975
  h5m_filename=filename,
483
976
  implicit_complement_material_tag=implicit_complement_material_tag,
977
+ method=h5m_backend,
484
978
  )
485
979
 
486
980
  return h5m_filename
@@ -508,11 +1002,13 @@ def export_gmsh_file_to_dagmc_h5m_file(
508
1002
  material_tags: list[str] | None = None,
509
1003
  implicit_complement_material_tag: str | None = None,
510
1004
  dagmc_filename: str = "dagmc.h5m",
1005
+ h5m_backend: str = "h5py",
511
1006
  ) -> str:
512
1007
  """Saves a DAGMC h5m file of the geometry GMsh file. This function
513
1008
  initializes and finalizes Gmsh.
514
1009
 
515
1010
  Args:
1011
+ gmsh_filename (str): the filename of the GMSH mesh file.
516
1012
  material_tags (list[str]): the names of the DAGMC
517
1013
  material tags to assign. These will need to be in the same
518
1014
  order as the volumes in the GMESH mesh and match the
@@ -520,7 +1016,9 @@ def export_gmsh_file_to_dagmc_h5m_file(
520
1016
  implicit_complement_material_tag (str | None, optional):
521
1017
  the name of the material tag to use for the implicit
522
1018
  complement (void space). Defaults to None which is a vacuum.
523
- dagmc_filename (str, optional): _description_. Defaults to "dagmc.h5m".
1019
+ dagmc_filename (str, optional): Output filename. Defaults to "dagmc.h5m".
1020
+ h5m_backend (str, optional): Backend for writing h5m file, 'pymoab' or 'h5py'.
1021
+ Defaults to 'h5py'.
524
1022
 
525
1023
  Returns:
526
1024
  str: The filename of the generated DAGMC h5m file.
@@ -553,6 +1051,7 @@ def export_gmsh_file_to_dagmc_h5m_file(
553
1051
  material_tags=material_tags,
554
1052
  h5m_filename=dagmc_filename,
555
1053
  implicit_complement_material_tag=implicit_complement_material_tag,
1054
+ method=h5m_backend,
556
1055
  )
557
1056
 
558
1057
  return h5m_filename
@@ -898,6 +1397,8 @@ class CadToDagmc:
898
1397
  - meshing_backend (str, optional): explicitly specify 'gmsh' or 'cadquery'.
899
1398
  If not provided, backend is auto-selected based on other arguments.
900
1399
  Defaults to 'cadquery' if no backend-specific arguments are given.
1400
+ - h5m_backend (str, optional): 'pymoab' or 'h5py' for writing h5m files.
1401
+ Defaults to 'h5py'.
901
1402
 
902
1403
  For GMSH backend:
903
1404
  - min_mesh_size (float): minimum mesh element size
@@ -930,7 +1431,7 @@ class CadToDagmc:
930
1431
  "method",
931
1432
  "unstructured_volumes",
932
1433
  }
933
- all_acceptable_keys = cadquery_keys | gmsh_keys | {"meshing_backend"}
1434
+ all_acceptable_keys = cadquery_keys | gmsh_keys | {"meshing_backend", "h5m_backend"}
934
1435
 
935
1436
  # Check for invalid kwargs
936
1437
  invalid_keys = set(kwargs.keys()) - all_acceptable_keys
@@ -943,6 +1444,9 @@ class CadToDagmc:
943
1444
  # Handle meshing_backend - either from kwargs or auto-detect
944
1445
  meshing_backend = kwargs.pop("meshing_backend", None)
945
1446
 
1447
+ # Handle h5m_backend - pymoab or h5py
1448
+ h5m_backend = kwargs.pop("h5m_backend", "h5py")
1449
+
946
1450
  if meshing_backend is None:
947
1451
  # Auto-select meshing_backend based on kwargs
948
1452
  has_cadquery = any(key in kwargs for key in cadquery_keys)
@@ -1133,6 +1637,7 @@ class CadToDagmc:
1133
1637
  material_tags=material_tags_in_brep_order,
1134
1638
  h5m_filename=filename,
1135
1639
  implicit_complement_material_tag=implicit_complement_material_tag,
1640
+ method=h5m_backend,
1136
1641
  )
1137
1642
 
1138
1643
  if unstructured_volumes:
@@ -0,0 +1,56 @@
1
+ Metadata-Version: 2.4
2
+ Name: cad_to_dagmc
3
+ Version: 0.11.0
4
+ Summary: Converts CAD files to a DAGMC h5m file
5
+ Author-email: Jonathan Shimwell <mail@jshimwell.com>
6
+ Project-URL: Homepage, https://github.com/fusion-energy/cad_to_dagmc
7
+ Project-URL: Bug Tracker, https://github.com/fusion-energy/cad_to_dagmc/issues
8
+ Keywords: dagmc,geometry,plot,slice
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Operating System :: OS Independent
12
+ Requires-Python: >=3.8
13
+ Description-Content-Type: text/markdown
14
+ License-File: LICENSE
15
+ Requires-Dist: trimesh
16
+ Requires-Dist: networkx
17
+ Requires-Dist: cadquery>=2.6.0
18
+ Requires-Dist: numpy
19
+ Requires-Dist: gmsh
20
+ Requires-Dist: h5py
21
+ Requires-Dist: cadquery_direct_mesh_plugin>=0.1.0
22
+ Provides-Extra: tests
23
+ Requires-Dist: pytest; extra == "tests"
24
+ Requires-Dist: pytest-codeblocks; extra == "tests"
25
+ Requires-Dist: vtk; extra == "tests"
26
+ Requires-Dist: assembly-mesh-plugin; extra == "tests"
27
+ Provides-Extra: docs
28
+ Requires-Dist: sphinx; extra == "docs"
29
+ Requires-Dist: myst-parser; extra == "docs"
30
+ Requires-Dist: sphinx-book-theme; extra == "docs"
31
+ Requires-Dist: sphinx-autodoc-typehints; extra == "docs"
32
+ Requires-Dist: sphinx-design; extra == "docs"
33
+ Requires-Dist: sphinxcontrib-mermaid; extra == "docs"
34
+ Requires-Dist: sphinxcadquery; extra == "docs"
35
+ Requires-Dist: pyvista[jupyter]; extra == "docs"
36
+ Requires-Dist: panel; extra == "docs"
37
+ Requires-Dist: jupyter-sphinx; extra == "docs"
38
+ Dynamic: license-file
39
+
40
+
41
+ [![N|Python](https://www.python.org/static/community_logos/python-powered-w-100x40.png)](https://www.python.org)
42
+
43
+ [![CI with Conda install](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/ci_with_conda_install.yml/badge.svg)](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/ci_with_conda_install.yml) Testing package and running examples with dependencies installed via Conda
44
+
45
+ [![CI with pip install](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/ci_with_pip_install.yml/badge.svg)](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/ci_with_pip_install.yml) Testing package and running examples with dependencies installed via pip
46
+
47
+ [![CI with model benchmark zoo](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/ci_with_benchmarks.yml/badge.svg?branch=main)](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/ci_with_benchmarks.yml) Testing with [Model Benchmark Zoo](https://github.com/fusion-energy/model_benchmark_zoo)
48
+
49
+ [![Upload Python Package](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/python-publish.yml/badge.svg)](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/python-publish.yml)
50
+
51
+ [![PyPI](https://img.shields.io/pypi/v/cad_to_dagmc?color=brightgreen&label=pypi&logo=grebrightgreenen&logoColor=green)](https://pypi.org/project/cad_to_dagmc/)
52
+
53
+
54
+ A minimal package that converts CAD geometry to [DAGMC](https://github.com/svalinn/DAGMC/) (h5m) files, [unstructured mesh](https://docs.openmc.org/en/latest/pythonapi/generated/openmc.UnstructuredMesh.html) files (vtk) and Gmsh (msh) files ready for use in neutronics simulations.
55
+
56
+ ## See the :point_right: [online documentation](https://fusion-energy.github.io/cad_to_dagmc/) :point_left: for installation options, usage recommendations and Python API details.
@@ -0,0 +1,8 @@
1
+ _version.py,sha256=9eKRDJ72C44i2IPiti-C7phzF429SwV2Nogzt0etpr0,706
2
+ cad_to_dagmc/__init__.py,sha256=mGRPqA239UAPXY3wyVefthdMDWz4IPJ5hJjTBeVAex8,555
3
+ cad_to_dagmc/core.py,sha256=6O0SxViLeFYIGIDO-BLVhT7EVDYszuBFv0AywEZTJpY,66658
4
+ cad_to_dagmc-0.11.0.dist-info/licenses/LICENSE,sha256=B8kznH_777JVNZ3HOKDc4Tj24F7wJ68ledaNYeL9sCw,1070
5
+ cad_to_dagmc-0.11.0.dist-info/METADATA,sha256=4dpd7Ok3k16qLp_icXlogwcYoBM7lSvuaOI5S2m2_wg,3331
6
+ cad_to_dagmc-0.11.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
7
+ cad_to_dagmc-0.11.0.dist-info/top_level.txt,sha256=zTi8C64SEBsE5WOtPovnxhOzt-E6Oc5nC3RW6M_5aEA,22
8
+ cad_to_dagmc-0.11.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,180 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: cad_to_dagmc
3
- Version: 0.10.0
4
- Summary: Converts CAD files to a DAGMC h5m file
5
- Author-email: Jonathan Shimwell <mail@jshimwell.com>
6
- Project-URL: Homepage, https://github.com/fusion-energy/cad_to_dagmc
7
- Project-URL: Bug Tracker, https://github.com/fusion-energy/cad_to_dagmc/issues
8
- Keywords: dagmc,geometry,plot,slice
9
- Classifier: Programming Language :: Python :: 3
10
- Classifier: License :: OSI Approved :: MIT License
11
- Classifier: Operating System :: OS Independent
12
- Requires-Python: >=3.8
13
- Description-Content-Type: text/markdown
14
- License-File: LICENSE
15
- Requires-Dist: trimesh
16
- Requires-Dist: networkx
17
- Requires-Dist: cadquery>=2.6.0
18
- Requires-Dist: numpy
19
- Requires-Dist: gmsh
20
- Requires-Dist: cadquery_direct_mesh_plugin>=0.1.0
21
- Provides-Extra: tests
22
- Requires-Dist: pytest; extra == "tests"
23
- Requires-Dist: vtk; extra == "tests"
24
- Requires-Dist: assembly-mesh-plugin; extra == "tests"
25
- Dynamic: license-file
26
-
27
-
28
- [![N|Python](https://www.python.org/static/community_logos/python-powered-w-100x40.png)](https://www.python.org)
29
-
30
- [![CI with Conda install](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/ci_with_conda_install.yml/badge.svg)](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/ci_with_conda_install.yml) Testing package and running examples with dependencies installed via Conda
31
-
32
- [![CI with pip install](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/ci_with_pip_install.yml/badge.svg)](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/ci_with_pip_install.yml) Testing package and running examples with dependencies installed vua PIP
33
-
34
- [![CI with model benchmark zoo](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/ci_with_benchmarks.yml/badge.svg?branch=main)](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/ci_with_benchmarks.yml) Testing with [Model Benchmark Zoo](https://github.com/fusion-energy/model_benchmark_zoo)
35
-
36
- [![Upload Python Package](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/python-publish.yml/badge.svg)](https://github.com/fusion-energy/cad_to_dagmc/actions/workflows/python-publish.yml)
37
-
38
- [![PyPI](https://img.shields.io/pypi/v/cad_to_dagmc?color=brightgreen&label=pypi&logo=grebrightgreenen&logoColor=green)](https://pypi.org/project/cad_to_dagmc/)
39
-
40
-
41
- A minimal package that converts CAD geometry to [DAGMC](https://github.com/svalinn/DAGMC/) (h5m) files, [unstructured mesh](https://docs.openmc.org/en/latest/pythonapi/generated/openmc.UnstructuredMesh.html) files (vtk) and Gmsh (msh) files ready for use in neutronics simulations.
42
-
43
- cad-to-dagmc can create DAGMC compatible:
44
- - surface meshes / faceted geometry / triangular meshes
45
- - unstructured mesh / tetrahedral meshes / volume meshes
46
-
47
- cad-to-dagmc can convert the following in to DAGMC compatible meshes:
48
- - STEP files
49
- - CadQuery objects (optionally use names as material tags)
50
- - Gmsh meshes (optionally use physical groups as material tags)
51
-
52
- Cad-to-dagmc offers a wide range of features including.
53
- - Geometry scaling with ```scale_factor``` argument
54
- - Direct surface meshing of CadQuery geometry with ```tolerance``` and ```angular_tolerance``` arguments (avoids using Gmsh)
55
- - Model wide mesh Gmsh size parameters with ```min_mesh_size``` and ```max_mesh_size``` arguments
56
- - Volume specific mesh sizing parameters with the ```set_size``` argument
57
- - Unstructured mesh that share the same coordinates as the surface mesh.
58
- - Volume mesh allows selecting individual volumes in the geometry.
59
- - Parallel meshing to quickly mesh the geometry using multiple CPU cores
60
- - Imprint and merging of CAD geometry, or disable with the ```imprint``` argument
61
- - Add geometry from multiple sources ([STEP](http://www.steptools.com/stds/step/) files, [CadQuery](https://cadquery.readthedocs.io) objects and [Gmsh](https://gmsh.info/) meshes)
62
- - Ability to tag the DAGMC implicit complement material using the ```implicit_complement_material_tag``` argument
63
- - Selected different Gmesh mesh algorithms (defaults to 1) using the ```mesh_algorithm``` argument
64
- - Pass CadQuery objects in memory for fast transfer of geometry using the ```method``` argument
65
- - Easy to install with [pip](https://pypi.org/project/cad-to-dagmc/) and [Conda/Mamba](https://anaconda.org/conda-forge/cad_to_dagmc)
66
- - Well tested both with [CI unit tests](https://github.com/fusion-energy/cad_to_dagmc/tree/main/tests), integration tests and the CSG [Model Benchmark Zoo](https://github.com/fusion-energy/model_benchmark_zoo).
67
- - Access to the Gmsh mesh to allow user to define full set of mesh parameters
68
- - Option to use Gmsh physical groups as material tags
69
- - Compatibly with [assembly-mesh-plugin](https://github.com/CadQuery/assembly-mesh-plugin) (see examples)
70
- - Compatible with [Paramak](https://github.com/fusion-energy/paramak) geometry for fusion simulations.
71
-
72
-
73
- # Installation options
74
-
75
- - Install using Mamba
76
- - Install using Conda
77
- - Install using pip
78
-
79
- ## Install using Mamba
80
-
81
- In principle, installing any Conda/Mamba distribution will work. A few Conda/Mamba options are:
82
- - [Miniforge](https://github.com/conda-forge/miniforge) (recommended as it includes mamba)
83
- - [Anaconda](https://www.anaconda.com/download)
84
- - [Miniconda](https://docs.conda.io/en/latest/miniconda.html)
85
-
86
- This example assumes you have installed the Miniforge option or separately have installed Mamba with ```conda install -c conda-forge mamba -y```
87
-
88
- Create a new environment, I've chosen Python 3.10 here but newer versions are
89
- also supported.
90
- ```bash
91
- mamba create --name new_env python=3.10 -y
92
- ```
93
-
94
- Activate the environment
95
- ```bash
96
- mamba activate new_env
97
- ```
98
-
99
- Then you can install the cad_to_dagmc package
100
- ```bash
101
- mamba install -y -c conda-forge cad_to_dagmc
102
- ```
103
-
104
- ## Install using Conda
105
-
106
- In principle, installing any Conda/Mamba distribution will work. A few Conda/Mamba options are:
107
- - [Miniforge](https://github.com/conda-forge/miniforge) (recommended as it includes mamba)
108
- - [Anaconda](https://www.anaconda.com/download)
109
- - [Miniconda](https://docs.conda.io/en/latest/miniconda.html)
110
-
111
- Create a new environment, I've chosen Python 3.10 here but newer versions are
112
- also supported.
113
- ```bash
114
- conda create --name new_env python=3.10 -y
115
- ```
116
-
117
- Activate the environment
118
- ```bash
119
- conda activate new_env
120
- ```
121
-
122
- Then you can install the cad_to_dagmc package
123
- ```bash
124
- conda install -y -c conda-forge cad_to_dagmc
125
- ```
126
-
127
- ## Install using pip and source compilations
128
-
129
- It is also possible to avoid the use of conda/mamba and installing using pip.
130
-
131
- First ensure hdf5 is installed as this is needed by MOAB pip install command
132
-
133
- ```
134
- sudo apt-get install libhdf5-dev
135
- ```
136
-
137
- Then install MOAB, currently available from the repo.
138
-
139
- ```
140
- pip install git+https://bitbucket.org/fathomteam/moab/
141
- ```
142
-
143
- Then you can install the cad_to_dagmc package with ```pip```
144
-
145
- ```bash
146
- pip install cad_to_dagmc
147
- ```
148
-
149
- ## Install with OpenMC or other particle transport codes
150
-
151
- You may also want to install OpenMC with DAGMC to make use of the h5m geometry files produced in simulations. However you could also use other supported particle transport codes such as MCNP, FLUKA and others [link to DAGMC documentation](https://svalinn.github.io/DAGMC/).
152
-
153
- To install OpenMC you can run ```mamba install -c conda-forge openmc``` however this more specific command makes sure the latest version of OpenMC which contains DAGMC is chosen by conda / mamba
154
- ```bash
155
- mamba install -c conda-forge -y "openmc=0.15.2=dagmc*nompi*"
156
- ```
157
-
158
- You could also install using this [wheel repo](https://github.com/shimwell/wheels)
159
-
160
- Another option would be to [install OpenMC from source](https://docs.openmc.org/en/stable/quickinstall.html) which would also need compiling with MOAB and DAGMC options.
161
-
162
-
163
- # Known incompatibilities
164
-
165
- The package requires newer versions of Linux. For example the package does not work on Ubuntu 18.04 or older.
166
-
167
- The package requires newer versions of pip. It is recommended to ensure that your version of pip is up to date. This can be done with ```python -m pip install --upgrade pip```
168
-
169
- Installing one of the package dependancies (Gmsh) with pip appears to result in errors when passing cad objects in memory between cadquery / ocp and gmsh. The default method of passing cad objects is via file so this should not impact most users. The conda install gmsh appears to work fine with in memory passing of cad objects as the version of OCP matches between Gmsh and CadQuery.
170
-
171
-
172
- # Usage
173
-
174
- For examples showing creation of DAGMC h5m files, vtk files and usage within OpenMC transport code see the [examples folder](https://github.com/fusion-energy/cad_to_dagmc/tree/main/examples)
175
-
176
- For more examples see the CAD tasks in the [neutronics-workshop](https://github.com/fusion-energy/neutronics-workshop) and [model benchmark zoo](https://github.com/fusion-energy/model_benchmark_zoo)
177
-
178
- # Related software
179
-
180
- Also checkout these other software projects that also create DAGMC geometry [CAD-to-OpenMC](https://github.com/openmsr/CAD_to_OpenMC), [Stellarmesh](https://github.com/Thea-Energy/stellarmesh) and [Coreform Cubit](https://coreform.com/products/coreform-cubit/).
@@ -1,8 +0,0 @@
1
- _version.py,sha256=XS8OMho0YiZyQ_qDeRsy__m_nWUzYVEJw-NLk1VtDQU,706
2
- cad_to_dagmc/__init__.py,sha256=fskHUTyCunSpnpJUvBfAYjx4uwDKXHTTiMP6GqnFRf0,494
3
- cad_to_dagmc/core.py,sha256=x4v7EeyXKBqsdcZeq_ks3EQ82yuteedAq8NA76rgJag,45732
4
- cad_to_dagmc-0.10.0.dist-info/licenses/LICENSE,sha256=B8kznH_777JVNZ3HOKDc4Tj24F7wJ68ledaNYeL9sCw,1070
5
- cad_to_dagmc-0.10.0.dist-info/METADATA,sha256=ZiH6QhnM8Dibqn72qAcMu_RzGyqOxyRKDW997dRkuHc,9045
6
- cad_to_dagmc-0.10.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
7
- cad_to_dagmc-0.10.0.dist-info/top_level.txt,sha256=zTi8C64SEBsE5WOtPovnxhOzt-E6Oc5nC3RW6M_5aEA,22
8
- cad_to_dagmc-0.10.0.dist-info/RECORD,,