cad-to-dagmc 0.9.9__py3-none-any.whl → 0.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- _version.py +2 -2
- cad_to_dagmc/__init__.py +2 -2
- cad_to_dagmc/core.py +598 -40
- cad_to_dagmc-0.11.0.dist-info/METADATA +56 -0
- cad_to_dagmc-0.11.0.dist-info/RECORD +8 -0
- {cad_to_dagmc-0.9.9.dist-info → cad_to_dagmc-0.11.0.dist-info}/WHEEL +1 -1
- cad_to_dagmc/direct_mesh_plugin.py +0 -510
- cad_to_dagmc-0.9.9.dist-info/METADATA +0 -179
- cad_to_dagmc-0.9.9.dist-info/RECORD +0 -9
- {cad_to_dagmc-0.9.9.dist-info → cad_to_dagmc-0.11.0.dist-info}/licenses/LICENSE +0 -0
- {cad_to_dagmc-0.9.9.dist-info → cad_to_dagmc-0.11.0.dist-info}/top_level.txt +0 -0
cad_to_dagmc/core.py
CHANGED
|
@@ -4,15 +4,32 @@ import cadquery as cq
|
|
|
4
4
|
import gmsh
|
|
5
5
|
import numpy as np
|
|
6
6
|
from cadquery import importers
|
|
7
|
-
from pymoab import core, types
|
|
8
7
|
import tempfile
|
|
9
8
|
import warnings
|
|
10
9
|
from typing import Iterable
|
|
11
10
|
from cad_to_dagmc import __version__
|
|
12
|
-
|
|
11
|
+
import cadquery_direct_mesh_plugin
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class PyMoabNotFoundError(ImportError):
|
|
15
|
+
"""Raised when pymoab is not installed but the pymoab backend is requested."""
|
|
16
|
+
|
|
17
|
+
def __init__(self, message=None):
|
|
18
|
+
if message is None:
|
|
19
|
+
message = (
|
|
20
|
+
"pymoab is not installed. pymoab/MOAB is not available on PyPI so it "
|
|
21
|
+
"cannot be included as a dependency of cad-to-dagmc.\n\n"
|
|
22
|
+
"You can install pymoab via one of these methods:\n"
|
|
23
|
+
" 1. From conda-forge: conda install -c conda-forge moab\n"
|
|
24
|
+
" 2. From extra index: pip install --extra-index-url https://shimwell.github.io/wheels moab\n"
|
|
25
|
+
" 3. From source: https://bitbucket.org/fathomteam/moab\n\n"
|
|
26
|
+
"Alternatively, use the h5py backend (the default) which does not require pymoab:\n"
|
|
27
|
+
" export_dagmc_h5m_file(..., h5m_backend='h5py')"
|
|
28
|
+
)
|
|
29
|
+
super().__init__(message)
|
|
13
30
|
|
|
14
31
|
|
|
15
|
-
def define_moab_core_and_tags()
|
|
32
|
+
def define_moab_core_and_tags():
|
|
16
33
|
"""Creates a MOAB Core instance which can be built up by adding sets of
|
|
17
34
|
triangles to the instance
|
|
18
35
|
|
|
@@ -20,6 +37,10 @@ def define_moab_core_and_tags() -> tuple[core.Core, dict]:
|
|
|
20
37
|
(pymoab Core): A pymoab.core.Core() instance
|
|
21
38
|
(pymoab tag_handle): A pymoab.core.tag_get_handle() instance
|
|
22
39
|
"""
|
|
40
|
+
try:
|
|
41
|
+
from pymoab import core, types
|
|
42
|
+
except ImportError as e:
|
|
43
|
+
raise PyMoabNotFoundError() from e
|
|
23
44
|
|
|
24
45
|
# create pymoab instance
|
|
25
46
|
moab_core = core.Core()
|
|
@@ -68,28 +89,66 @@ def define_moab_core_and_tags() -> tuple[core.Core, dict]:
|
|
|
68
89
|
|
|
69
90
|
def vertices_to_h5m(
|
|
70
91
|
vertices: list[tuple[float, float, float]] | list["cadquery.occ_impl.geom.Vector"],
|
|
71
|
-
triangles_by_solid_by_face:
|
|
92
|
+
triangles_by_solid_by_face: dict[int, dict[int, list[list[int]]]],
|
|
72
93
|
material_tags: list[str],
|
|
73
94
|
h5m_filename: str = "dagmc.h5m",
|
|
74
95
|
implicit_complement_material_tag: str | None = None,
|
|
96
|
+
method: str = "h5py",
|
|
75
97
|
):
|
|
76
98
|
"""Converts vertices and triangle sets into a tagged h5m file compatible
|
|
77
99
|
with DAGMC enabled neutronics simulations
|
|
78
100
|
|
|
79
101
|
Args:
|
|
80
|
-
vertices:
|
|
81
|
-
triangles
|
|
82
|
-
material_tags:
|
|
83
|
-
h5m_filename:
|
|
84
|
-
implicit_complement_material_tag:
|
|
102
|
+
vertices: List of vertex coordinates as (x, y, z) tuples or CadQuery vectors
|
|
103
|
+
triangles_by_solid_by_face: Dict mapping solid_id -> face_id -> list of triangles
|
|
104
|
+
material_tags: List of material tag names, one per solid
|
|
105
|
+
h5m_filename: Output filename for the h5m file
|
|
106
|
+
implicit_complement_material_tag: Optional material tag for implicit complement
|
|
107
|
+
method: Backend to use for writing h5m file ('pymoab' or 'h5py')
|
|
85
108
|
"""
|
|
109
|
+
if method == "pymoab":
|
|
110
|
+
return _vertices_to_h5m_pymoab(
|
|
111
|
+
vertices=vertices,
|
|
112
|
+
triangles_by_solid_by_face=triangles_by_solid_by_face,
|
|
113
|
+
material_tags=material_tags,
|
|
114
|
+
h5m_filename=h5m_filename,
|
|
115
|
+
implicit_complement_material_tag=implicit_complement_material_tag,
|
|
116
|
+
)
|
|
117
|
+
elif method == "h5py":
|
|
118
|
+
return _vertices_to_h5m_h5py(
|
|
119
|
+
vertices=vertices,
|
|
120
|
+
triangles_by_solid_by_face=triangles_by_solid_by_face,
|
|
121
|
+
material_tags=material_tags,
|
|
122
|
+
h5m_filename=h5m_filename,
|
|
123
|
+
implicit_complement_material_tag=implicit_complement_material_tag,
|
|
124
|
+
)
|
|
125
|
+
else:
|
|
126
|
+
raise ValueError(f"method must be 'pymoab' or 'h5py', not '{method}'")
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def _vertices_to_h5m_pymoab(
|
|
130
|
+
vertices: list[tuple[float, float, float]] | list["cadquery.occ_impl.geom.Vector"],
|
|
131
|
+
triangles_by_solid_by_face: dict[int, dict[int, list[list[int]]]],
|
|
132
|
+
material_tags: list[str],
|
|
133
|
+
h5m_filename: str = "dagmc.h5m",
|
|
134
|
+
implicit_complement_material_tag: str | None = None,
|
|
135
|
+
):
|
|
136
|
+
"""PyMOAB backend for vertices_to_h5m."""
|
|
137
|
+
try:
|
|
138
|
+
from pymoab import types
|
|
139
|
+
except ImportError as e:
|
|
140
|
+
raise PyMoabNotFoundError() from e
|
|
86
141
|
|
|
87
142
|
if len(material_tags) != len(triangles_by_solid_by_face):
|
|
88
143
|
msg = f"The number of material_tags provided is {len(material_tags)} and the number of sets of triangles is {len(triangles_by_solid_by_face)}. You must provide one material_tag for every triangle set"
|
|
89
144
|
raise ValueError(msg)
|
|
90
145
|
|
|
91
146
|
# limited attribute checking to see if user passed in a list of CadQuery vectors
|
|
92
|
-
if
|
|
147
|
+
if (
|
|
148
|
+
hasattr(vertices[0], "x")
|
|
149
|
+
and hasattr(vertices[0], "y")
|
|
150
|
+
and hasattr(vertices[0], "z")
|
|
151
|
+
):
|
|
93
152
|
vertices_floats = []
|
|
94
153
|
for vert in vertices:
|
|
95
154
|
vertices_floats.append((vert.x, vert.y, vert.z))
|
|
@@ -142,7 +201,9 @@ def vertices_to_h5m(
|
|
|
142
201
|
if len(face_ids_with_solid_ids[face_id]) == 2:
|
|
143
202
|
other_solid_id = face_ids_with_solid_ids[face_id][1]
|
|
144
203
|
other_volume_set = volume_sets_by_solid_id[other_solid_id]
|
|
145
|
-
sense_data = np.array(
|
|
204
|
+
sense_data = np.array(
|
|
205
|
+
[other_volume_set, volume_set], dtype="uint64"
|
|
206
|
+
)
|
|
146
207
|
else:
|
|
147
208
|
sense_data = np.array([volume_set, 0], dtype="uint64")
|
|
148
209
|
|
|
@@ -215,6 +276,442 @@ def vertices_to_h5m(
|
|
|
215
276
|
return h5m_filename
|
|
216
277
|
|
|
217
278
|
|
|
279
|
+
def _vertices_to_h5m_h5py(
|
|
280
|
+
vertices: list[tuple[float, float, float]] | list["cadquery.occ_impl.geom.Vector"],
|
|
281
|
+
triangles_by_solid_by_face: dict[int, dict[int, list[list[int]]]],
|
|
282
|
+
material_tags: list[str],
|
|
283
|
+
h5m_filename: str = "dagmc.h5m",
|
|
284
|
+
implicit_complement_material_tag: str | None = None,
|
|
285
|
+
):
|
|
286
|
+
"""H5PY backend for vertices_to_h5m.
|
|
287
|
+
|
|
288
|
+
Creates an h5m file compatible with DAGMC using h5py directly,
|
|
289
|
+
without requiring pymoab.
|
|
290
|
+
"""
|
|
291
|
+
import h5py
|
|
292
|
+
from datetime import datetime
|
|
293
|
+
|
|
294
|
+
if len(material_tags) != len(triangles_by_solid_by_face):
|
|
295
|
+
msg = f"The number of material_tags provided is {len(material_tags)} and the number of sets of triangles is {len(triangles_by_solid_by_face)}. You must provide one material_tag for every triangle set"
|
|
296
|
+
raise ValueError(msg)
|
|
297
|
+
|
|
298
|
+
# Convert CadQuery vectors to floats if needed
|
|
299
|
+
if (
|
|
300
|
+
hasattr(vertices[0], "x")
|
|
301
|
+
and hasattr(vertices[0], "y")
|
|
302
|
+
and hasattr(vertices[0], "z")
|
|
303
|
+
):
|
|
304
|
+
vertices_floats = [(vert.x, vert.y, vert.z) for vert in vertices]
|
|
305
|
+
else:
|
|
306
|
+
vertices_floats = vertices
|
|
307
|
+
|
|
308
|
+
# Build face_ids_with_solid_ids to track shared faces
|
|
309
|
+
face_ids_with_solid_ids = {}
|
|
310
|
+
for solid_id, triangles_on_each_face in triangles_by_solid_by_face.items():
|
|
311
|
+
for face_id in triangles_on_each_face.keys():
|
|
312
|
+
if face_id in face_ids_with_solid_ids:
|
|
313
|
+
face_ids_with_solid_ids[face_id].append(solid_id)
|
|
314
|
+
else:
|
|
315
|
+
face_ids_with_solid_ids[face_id] = [solid_id]
|
|
316
|
+
|
|
317
|
+
# Collect all unique faces and their triangles
|
|
318
|
+
all_faces = {} # face_id -> list of triangles
|
|
319
|
+
for solid_id, triangles_on_each_face in triangles_by_solid_by_face.items():
|
|
320
|
+
for face_id, triangles_on_face in triangles_on_each_face.items():
|
|
321
|
+
if face_id not in all_faces:
|
|
322
|
+
all_faces[face_id] = triangles_on_face
|
|
323
|
+
|
|
324
|
+
# Convert vertices to numpy array
|
|
325
|
+
vertices_arr = np.asarray(vertices_floats, dtype=np.float64)
|
|
326
|
+
num_vertices = len(vertices_arr)
|
|
327
|
+
|
|
328
|
+
# Collect all triangles
|
|
329
|
+
all_triangles = []
|
|
330
|
+
for face_id in sorted(all_faces.keys()):
|
|
331
|
+
all_triangles.extend(all_faces[face_id])
|
|
332
|
+
all_triangles = np.asarray(all_triangles, dtype=np.int64)
|
|
333
|
+
num_triangles = len(all_triangles)
|
|
334
|
+
|
|
335
|
+
# Create the h5m file
|
|
336
|
+
# makes the folder if it does not exist
|
|
337
|
+
if Path(h5m_filename).parent:
|
|
338
|
+
Path(h5m_filename).parent.mkdir(parents=True, exist_ok=True)
|
|
339
|
+
|
|
340
|
+
with h5py.File(h5m_filename, "w") as f:
|
|
341
|
+
tstt = f.create_group("tstt")
|
|
342
|
+
|
|
343
|
+
# Global ID counter - starts at 1
|
|
344
|
+
global_id = 1
|
|
345
|
+
|
|
346
|
+
# === NODES ===
|
|
347
|
+
nodes_group = tstt.create_group("nodes")
|
|
348
|
+
coords = nodes_group.create_dataset("coordinates", data=vertices_arr)
|
|
349
|
+
coords.attrs.create("start_id", global_id)
|
|
350
|
+
global_id += num_vertices
|
|
351
|
+
|
|
352
|
+
# Node tags
|
|
353
|
+
node_tags = nodes_group.create_group("tags")
|
|
354
|
+
node_tags.create_dataset("GLOBAL_ID", data=np.full(num_vertices, -1, dtype=np.int32))
|
|
355
|
+
|
|
356
|
+
# === ELEMENTS ===
|
|
357
|
+
elements = tstt.create_group("elements")
|
|
358
|
+
|
|
359
|
+
# Element type enum
|
|
360
|
+
elems = {
|
|
361
|
+
"Edge": 1, "Tri": 2, "Quad": 3, "Polygon": 4, "Tet": 5,
|
|
362
|
+
"Pyramid": 6, "Prism": 7, "Knife": 8, "Hex": 9, "Polyhedron": 10,
|
|
363
|
+
}
|
|
364
|
+
tstt["elemtypes"] = h5py.enum_dtype(elems)
|
|
365
|
+
|
|
366
|
+
# History
|
|
367
|
+
now = datetime.now()
|
|
368
|
+
tstt.create_dataset(
|
|
369
|
+
"history",
|
|
370
|
+
data=[
|
|
371
|
+
"cad_to_dagmc".encode("ascii"),
|
|
372
|
+
__version__.encode("ascii"),
|
|
373
|
+
now.strftime("%m/%d/%y").encode("ascii"),
|
|
374
|
+
now.strftime("%H:%M:%S").encode("ascii"),
|
|
375
|
+
],
|
|
376
|
+
)
|
|
377
|
+
|
|
378
|
+
# Triangles
|
|
379
|
+
tri3_group = elements.create_group("Tri3")
|
|
380
|
+
tri3_group.attrs.create("element_type", elems["Tri"], dtype=tstt["elemtypes"])
|
|
381
|
+
|
|
382
|
+
# Node indices are 1-based in h5m
|
|
383
|
+
connectivity = tri3_group.create_dataset(
|
|
384
|
+
"connectivity",
|
|
385
|
+
data=all_triangles + 1,
|
|
386
|
+
dtype=np.uint64,
|
|
387
|
+
)
|
|
388
|
+
triangle_start_id = global_id
|
|
389
|
+
connectivity.attrs.create("start_id", triangle_start_id)
|
|
390
|
+
global_id += num_triangles
|
|
391
|
+
|
|
392
|
+
# Triangle tags
|
|
393
|
+
tags_tri3 = tri3_group.create_group("tags")
|
|
394
|
+
tags_tri3.create_dataset("GLOBAL_ID", data=np.full(num_triangles, -1, dtype=np.int32))
|
|
395
|
+
|
|
396
|
+
# === SETS ===
|
|
397
|
+
# Plan out the entity set structure:
|
|
398
|
+
# For each solid: 1 volume set, N surface sets (one per face), 1 group set (material)
|
|
399
|
+
# Plus: 1 file set at the end, optionally 1 implicit complement group
|
|
400
|
+
|
|
401
|
+
solid_ids = list(triangles_by_solid_by_face.keys())
|
|
402
|
+
num_solids = len(solid_ids)
|
|
403
|
+
|
|
404
|
+
# Assign set IDs
|
|
405
|
+
sets_start_id = global_id
|
|
406
|
+
|
|
407
|
+
# Map solid_id -> volume_set_id
|
|
408
|
+
volume_set_ids = {}
|
|
409
|
+
# Map face_id -> surface_set_id
|
|
410
|
+
surface_set_ids = {}
|
|
411
|
+
# Map solid_id -> group_set_id
|
|
412
|
+
group_set_ids = {}
|
|
413
|
+
|
|
414
|
+
current_set_id = sets_start_id
|
|
415
|
+
|
|
416
|
+
# First, assign IDs to all surfaces (one per unique face)
|
|
417
|
+
for face_id in sorted(all_faces.keys()):
|
|
418
|
+
surface_set_ids[face_id] = current_set_id
|
|
419
|
+
current_set_id += 1
|
|
420
|
+
|
|
421
|
+
# Then assign IDs to volumes
|
|
422
|
+
for solid_id in solid_ids:
|
|
423
|
+
volume_set_ids[solid_id] = current_set_id
|
|
424
|
+
current_set_id += 1
|
|
425
|
+
|
|
426
|
+
# Then assign IDs to groups (materials)
|
|
427
|
+
for solid_id in solid_ids:
|
|
428
|
+
group_set_ids[solid_id] = current_set_id
|
|
429
|
+
current_set_id += 1
|
|
430
|
+
|
|
431
|
+
# Implicit complement group (if requested)
|
|
432
|
+
implicit_complement_set_id = None
|
|
433
|
+
if implicit_complement_material_tag:
|
|
434
|
+
implicit_complement_set_id = current_set_id
|
|
435
|
+
current_set_id += 1
|
|
436
|
+
|
|
437
|
+
# File set
|
|
438
|
+
file_set_id = current_set_id
|
|
439
|
+
current_set_id += 1
|
|
440
|
+
|
|
441
|
+
global_id = current_set_id
|
|
442
|
+
|
|
443
|
+
# === TAGS ===
|
|
444
|
+
tstt_tags = tstt.create_group("tags")
|
|
445
|
+
|
|
446
|
+
# Collect tagged set IDs for CATEGORY (all entities)
|
|
447
|
+
# and GEOM_DIMENSION (only surfaces and volumes - not groups, to match pymoab)
|
|
448
|
+
category_set_ids = []
|
|
449
|
+
categories = []
|
|
450
|
+
geom_dim_set_ids = []
|
|
451
|
+
geom_dimensions = []
|
|
452
|
+
|
|
453
|
+
# Volumes first (to match pymoab ordering)
|
|
454
|
+
for solid_id in solid_ids:
|
|
455
|
+
category_set_ids.append(volume_set_ids[solid_id])
|
|
456
|
+
categories.append("Volume")
|
|
457
|
+
geom_dim_set_ids.append(volume_set_ids[solid_id])
|
|
458
|
+
geom_dimensions.append(3)
|
|
459
|
+
|
|
460
|
+
# Groups (CATEGORY only - pymoab doesn't set geom_dimension on groups)
|
|
461
|
+
# Note: Groups COULD have geom_dimension=4 set, but pymoab doesn't do this
|
|
462
|
+
for solid_id in solid_ids:
|
|
463
|
+
category_set_ids.append(group_set_ids[solid_id])
|
|
464
|
+
categories.append("Group")
|
|
465
|
+
|
|
466
|
+
# Surfaces
|
|
467
|
+
for face_id in sorted(all_faces.keys()):
|
|
468
|
+
category_set_ids.append(surface_set_ids[face_id])
|
|
469
|
+
categories.append("Surface")
|
|
470
|
+
geom_dim_set_ids.append(surface_set_ids[face_id])
|
|
471
|
+
geom_dimensions.append(2)
|
|
472
|
+
|
|
473
|
+
# Implicit complement (CATEGORY only)
|
|
474
|
+
if implicit_complement_material_tag:
|
|
475
|
+
category_set_ids.append(implicit_complement_set_id)
|
|
476
|
+
categories.append("Group")
|
|
477
|
+
|
|
478
|
+
# CATEGORY tag
|
|
479
|
+
# Note: We use opaque dtype (|V32) to match pymoab output exactly.
|
|
480
|
+
# A string dtype (|S32) would also work and be more readable in h5dump,
|
|
481
|
+
# but we match pymoab for maximum compatibility.
|
|
482
|
+
cat_group = tstt_tags.create_group("CATEGORY")
|
|
483
|
+
cat_group.attrs.create("class", 1, dtype=np.int32)
|
|
484
|
+
cat_group.create_dataset("id_list", data=np.array(category_set_ids, dtype=np.uint64))
|
|
485
|
+
# Create opaque 32-byte type to match pymoab's H5T_OPAQUE
|
|
486
|
+
opaque_dt = h5py.opaque_dtype(np.dtype("V32"))
|
|
487
|
+
cat_group["type"] = opaque_dt
|
|
488
|
+
# Encode category strings as 32-byte null-padded values
|
|
489
|
+
cat_values = np.array([s.encode("ascii").ljust(32, b"\x00") for s in categories], dtype="V32")
|
|
490
|
+
cat_group.create_dataset("values", data=cat_values)
|
|
491
|
+
|
|
492
|
+
# GEOM_DIMENSION tag
|
|
493
|
+
# Note: We only tag surfaces (dim=2) and volumes (dim=3), not groups.
|
|
494
|
+
# Groups COULD have geom_dimension=4, but pymoab doesn't set this.
|
|
495
|
+
geom_group = tstt_tags.create_group("GEOM_DIMENSION")
|
|
496
|
+
geom_group["type"] = np.dtype("i4")
|
|
497
|
+
geom_group.attrs.create("class", 1, dtype=np.int32)
|
|
498
|
+
geom_group.attrs.create("default", -1, dtype=geom_group["type"])
|
|
499
|
+
geom_group.attrs.create("global", -1, dtype=geom_group["type"])
|
|
500
|
+
geom_group.create_dataset("id_list", data=np.array(geom_dim_set_ids, dtype=np.uint64))
|
|
501
|
+
geom_group.create_dataset("values", data=np.array(geom_dimensions, dtype=np.int32))
|
|
502
|
+
|
|
503
|
+
# GEOM_SENSE_2 tag (only for surfaces)
|
|
504
|
+
surface_ids_list = [surface_set_ids[fid] for fid in sorted(all_faces.keys())]
|
|
505
|
+
gs2_group = tstt_tags.create_group("GEOM_SENSE_2")
|
|
506
|
+
gs2_dtype = np.dtype("(2,)u8")
|
|
507
|
+
gs2_group["type"] = gs2_dtype
|
|
508
|
+
gs2_group.attrs.create("class", 1, dtype=np.int32)
|
|
509
|
+
gs2_group.attrs.create("is_handle", 1, dtype=np.int32)
|
|
510
|
+
gs2_group.create_dataset("id_list", data=np.array(surface_ids_list, dtype=np.uint64))
|
|
511
|
+
|
|
512
|
+
# Build sense data for each surface
|
|
513
|
+
sense_values = []
|
|
514
|
+
for face_id in sorted(all_faces.keys()):
|
|
515
|
+
solids_for_face = face_ids_with_solid_ids[face_id]
|
|
516
|
+
if len(solids_for_face) == 2:
|
|
517
|
+
# Shared face - both volumes
|
|
518
|
+
vol1 = volume_set_ids[solids_for_face[0]]
|
|
519
|
+
vol2 = volume_set_ids[solids_for_face[1]]
|
|
520
|
+
sense_values.append([vol1, vol2])
|
|
521
|
+
else:
|
|
522
|
+
# Single volume
|
|
523
|
+
vol = volume_set_ids[solids_for_face[0]]
|
|
524
|
+
sense_values.append([vol, 0])
|
|
525
|
+
|
|
526
|
+
if sense_values:
|
|
527
|
+
gs2_values = np.zeros((len(sense_values),), dtype=[("f0", "<u8", (2,))])
|
|
528
|
+
gs2_values["f0"] = np.array(sense_values, dtype=np.uint64)
|
|
529
|
+
gs2_space = h5py.h5s.create_simple((len(sense_values),))
|
|
530
|
+
gs2_arr_type = h5py.h5t.array_create(h5py.h5t.NATIVE_UINT64, (2,))
|
|
531
|
+
gs2_dset = h5py.h5d.create(gs2_group.id, b"values", gs2_arr_type, gs2_space)
|
|
532
|
+
gs2_dset.write(h5py.h5s.ALL, h5py.h5s.ALL, gs2_values, mtype=gs2_arr_type)
|
|
533
|
+
gs2_dset.close()
|
|
534
|
+
|
|
535
|
+
# GLOBAL_ID tag - store as sparse tag with id_list and values
|
|
536
|
+
# This stores the user-facing IDs for surfaces and volumes
|
|
537
|
+
gid_ids = []
|
|
538
|
+
gid_values = []
|
|
539
|
+
# Surfaces get their face_id as global_id
|
|
540
|
+
for face_id in sorted(all_faces.keys()):
|
|
541
|
+
gid_ids.append(surface_set_ids[face_id])
|
|
542
|
+
gid_values.append(face_id)
|
|
543
|
+
# Volumes get their solid_id as global_id
|
|
544
|
+
for solid_id in solid_ids:
|
|
545
|
+
gid_ids.append(volume_set_ids[solid_id])
|
|
546
|
+
gid_values.append(solid_id)
|
|
547
|
+
# Groups also get the solid_id
|
|
548
|
+
for solid_id in solid_ids:
|
|
549
|
+
gid_ids.append(group_set_ids[solid_id])
|
|
550
|
+
gid_values.append(solid_id)
|
|
551
|
+
|
|
552
|
+
gid_group = tstt_tags.create_group("GLOBAL_ID")
|
|
553
|
+
gid_group["type"] = np.dtype("i4")
|
|
554
|
+
gid_group.attrs.create("class", 2, dtype=np.int32)
|
|
555
|
+
gid_group.attrs.create("default", -1, dtype=gid_group["type"])
|
|
556
|
+
gid_group.attrs.create("global", -1, dtype=gid_group["type"])
|
|
557
|
+
gid_group.create_dataset("id_list", data=np.array(gid_ids, dtype=np.uint64))
|
|
558
|
+
gid_group.create_dataset("values", data=np.array(gid_values, dtype=np.int32))
|
|
559
|
+
|
|
560
|
+
# NAME tag (for groups - material names)
|
|
561
|
+
name_ids = []
|
|
562
|
+
name_values = []
|
|
563
|
+
for solid_id, mat_tag in zip(solid_ids, material_tags):
|
|
564
|
+
name_ids.append(group_set_ids[solid_id])
|
|
565
|
+
name_values.append(f"mat:{mat_tag}")
|
|
566
|
+
if implicit_complement_material_tag:
|
|
567
|
+
name_ids.append(implicit_complement_set_id)
|
|
568
|
+
name_values.append(f"mat:{implicit_complement_material_tag}_comp")
|
|
569
|
+
|
|
570
|
+
name_group = tstt_tags.create_group("NAME")
|
|
571
|
+
name_group.attrs.create("class", 1, dtype=np.int32)
|
|
572
|
+
name_group.create_dataset("id_list", data=np.array(name_ids, dtype=np.uint64))
|
|
573
|
+
name_group["type"] = h5py.opaque_dtype(np.dtype("S32"))
|
|
574
|
+
name_group.create_dataset("values", data=name_values, dtype=name_group["type"])
|
|
575
|
+
|
|
576
|
+
# Other standard tags (empty but needed)
|
|
577
|
+
for tag_name in ["DIRICHLET_SET", "MATERIAL_SET", "NEUMANN_SET"]:
|
|
578
|
+
tag_grp = tstt_tags.create_group(tag_name)
|
|
579
|
+
tag_grp["type"] = np.dtype("i4")
|
|
580
|
+
tag_grp.attrs.create("class", 1, dtype=np.int32)
|
|
581
|
+
tag_grp.attrs.create("default", -1, dtype=tag_grp["type"])
|
|
582
|
+
tag_grp.attrs.create("global", -1, dtype=tag_grp["type"])
|
|
583
|
+
|
|
584
|
+
# === SETS structure ===
|
|
585
|
+
sets_group = tstt.create_group("sets")
|
|
586
|
+
|
|
587
|
+
# Build contents, parents, children, and list arrays
|
|
588
|
+
contents = []
|
|
589
|
+
list_rows = []
|
|
590
|
+
parents_list = []
|
|
591
|
+
children_list = []
|
|
592
|
+
|
|
593
|
+
# Track triangle ranges per face
|
|
594
|
+
tri_offset = 0
|
|
595
|
+
face_triangle_ranges = {}
|
|
596
|
+
for face_id in sorted(all_faces.keys()):
|
|
597
|
+
tris = all_faces[face_id]
|
|
598
|
+
face_triangle_ranges[face_id] = (tri_offset, len(tris))
|
|
599
|
+
tri_offset += len(tris)
|
|
600
|
+
|
|
601
|
+
# Track vertices per face
|
|
602
|
+
face_vertex_sets = {}
|
|
603
|
+
for face_id, tris in all_faces.items():
|
|
604
|
+
verts = set()
|
|
605
|
+
for tri in tris:
|
|
606
|
+
verts.update(tri)
|
|
607
|
+
face_vertex_sets[face_id] = sorted(verts)
|
|
608
|
+
|
|
609
|
+
contents_end = -1
|
|
610
|
+
children_end = -1
|
|
611
|
+
parents_end = -1
|
|
612
|
+
|
|
613
|
+
# Surface sets
|
|
614
|
+
for face_id in sorted(all_faces.keys()):
|
|
615
|
+
# Content: vertices + triangles for this face
|
|
616
|
+
verts = face_vertex_sets[face_id]
|
|
617
|
+
tri_start, tri_count = face_triangle_ranges[face_id]
|
|
618
|
+
|
|
619
|
+
# Add individual vertex handles (1-based IDs)
|
|
620
|
+
# Don't assume vertices are contiguous - store each one
|
|
621
|
+
for v in verts:
|
|
622
|
+
contents.append(v + 1) # 1-based vertex ID
|
|
623
|
+
|
|
624
|
+
# Add individual triangle handles
|
|
625
|
+
for i in range(tri_count):
|
|
626
|
+
contents.append(triangle_start_id + tri_start + i)
|
|
627
|
+
|
|
628
|
+
contents_end = len(contents) - 1
|
|
629
|
+
|
|
630
|
+
# Parent-child: surface is child of volume(s)
|
|
631
|
+
solids_for_face = face_ids_with_solid_ids[face_id]
|
|
632
|
+
for solid_id in solids_for_face:
|
|
633
|
+
parents_list.append(volume_set_ids[solid_id])
|
|
634
|
+
parents_end = len(parents_list) - 1
|
|
635
|
+
|
|
636
|
+
# flags: 2 = MESHSET_SET (handles, not ranges)
|
|
637
|
+
list_rows.append([contents_end, children_end, parents_end, 2])
|
|
638
|
+
|
|
639
|
+
# Volume sets (empty contents, but have surface children)
|
|
640
|
+
for solid_id in solid_ids:
|
|
641
|
+
# Volumes have no direct content
|
|
642
|
+
# Children are the surfaces
|
|
643
|
+
faces_in_solid = list(triangles_by_solid_by_face[solid_id].keys())
|
|
644
|
+
for face_id in faces_in_solid:
|
|
645
|
+
children_list.append(surface_set_ids[face_id])
|
|
646
|
+
children_end = len(children_list) - 1
|
|
647
|
+
|
|
648
|
+
# flags: 2 = handle-based (0b0010)
|
|
649
|
+
list_rows.append([contents_end, children_end, parents_end, 2])
|
|
650
|
+
|
|
651
|
+
# Group sets (contain volume handles)
|
|
652
|
+
for solid_id in solid_ids:
|
|
653
|
+
contents.append(volume_set_ids[solid_id])
|
|
654
|
+
contents_end = len(contents) - 1
|
|
655
|
+
list_rows.append([contents_end, children_end, parents_end, 2])
|
|
656
|
+
|
|
657
|
+
# Implicit complement group
|
|
658
|
+
if implicit_complement_material_tag:
|
|
659
|
+
# Add the last volume to the implicit complement group
|
|
660
|
+
contents.append(volume_set_ids[solid_ids[-1]])
|
|
661
|
+
contents_end = len(contents) - 1
|
|
662
|
+
list_rows.append([contents_end, children_end, parents_end, 2])
|
|
663
|
+
|
|
664
|
+
# File set (contains everything)
|
|
665
|
+
contents.extend([1, file_set_id - 1]) # range of all entities
|
|
666
|
+
contents_end = len(contents) - 1
|
|
667
|
+
list_rows.append([contents_end, children_end, parents_end, 10])
|
|
668
|
+
|
|
669
|
+
# Write sets datasets
|
|
670
|
+
sets_group.create_dataset("contents", data=np.array(contents, dtype=np.uint64))
|
|
671
|
+
if children_list:
|
|
672
|
+
sets_group.create_dataset("children", data=np.array(children_list, dtype=np.uint64))
|
|
673
|
+
else:
|
|
674
|
+
sets_group.create_dataset("children", data=np.array([], dtype=np.uint64))
|
|
675
|
+
if parents_list:
|
|
676
|
+
sets_group.create_dataset("parents", data=np.array(parents_list, dtype=np.uint64))
|
|
677
|
+
else:
|
|
678
|
+
sets_group.create_dataset("parents", data=np.array([], dtype=np.uint64))
|
|
679
|
+
|
|
680
|
+
lst = sets_group.create_dataset("list", data=np.array(list_rows, dtype=np.int64))
|
|
681
|
+
lst.attrs.create("start_id", sets_start_id)
|
|
682
|
+
|
|
683
|
+
# Set tags (GLOBAL_ID for each set)
|
|
684
|
+
sets_tags = sets_group.create_group("tags")
|
|
685
|
+
set_global_ids = []
|
|
686
|
+
|
|
687
|
+
# Surface global IDs
|
|
688
|
+
for face_id in sorted(all_faces.keys()):
|
|
689
|
+
set_global_ids.append(face_id)
|
|
690
|
+
|
|
691
|
+
# Volume global IDs
|
|
692
|
+
for solid_id in solid_ids:
|
|
693
|
+
set_global_ids.append(solid_id)
|
|
694
|
+
|
|
695
|
+
# Group global IDs
|
|
696
|
+
for solid_id in solid_ids:
|
|
697
|
+
set_global_ids.append(solid_id)
|
|
698
|
+
|
|
699
|
+
# Implicit complement
|
|
700
|
+
if implicit_complement_material_tag:
|
|
701
|
+
set_global_ids.append(-1)
|
|
702
|
+
|
|
703
|
+
# File set
|
|
704
|
+
set_global_ids.append(-1)
|
|
705
|
+
|
|
706
|
+
sets_tags.create_dataset("GLOBAL_ID", data=np.array(set_global_ids, dtype=np.int32))
|
|
707
|
+
|
|
708
|
+
# Max ID attribute
|
|
709
|
+
tstt.attrs.create("max_id", np.uint64(global_id - 1))
|
|
710
|
+
|
|
711
|
+
print(f"written DAGMC file {h5m_filename}")
|
|
712
|
+
return h5m_filename
|
|
713
|
+
|
|
714
|
+
|
|
218
715
|
def get_volumes(gmsh, assembly, method="file", scale_factor=1.0):
|
|
219
716
|
|
|
220
717
|
if method == "in memory":
|
|
@@ -233,7 +730,9 @@ def get_volumes(gmsh, assembly, method="file", scale_factor=1.0):
|
|
|
233
730
|
|
|
234
731
|
if scale_factor != 1.0:
|
|
235
732
|
dim_tags = gmsh.model.getEntities(3)
|
|
236
|
-
gmsh.model.occ.dilate(
|
|
733
|
+
gmsh.model.occ.dilate(
|
|
734
|
+
dim_tags, 0.0, 0.0, 0.0, scale_factor, scale_factor, scale_factor
|
|
735
|
+
)
|
|
237
736
|
# update the model to ensure the scaling factor has been applied
|
|
238
737
|
gmsh.model.occ.synchronize()
|
|
239
738
|
|
|
@@ -298,7 +797,9 @@ def set_sizes_for_mesh(
|
|
|
298
797
|
)
|
|
299
798
|
|
|
300
799
|
# Step 1: Preprocess boundaries to find shared surfaces and decide mesh sizes
|
|
301
|
-
boundary_sizes =
|
|
800
|
+
boundary_sizes = (
|
|
801
|
+
{}
|
|
802
|
+
) # Dictionary to store the mesh size and count for each boundary
|
|
302
803
|
for volume_id, size in set_size.items():
|
|
303
804
|
boundaries = gmsh.model.getBoundary(
|
|
304
805
|
[(3, volume_id)], recursive=True
|
|
@@ -373,7 +874,8 @@ def mesh_to_vertices_and_triangles(
|
|
|
373
874
|
for nodeTag in nodeTags:
|
|
374
875
|
shifted_node_tags.append(nodeTag - 1)
|
|
375
876
|
grouped_node_tags = [
|
|
376
|
-
shifted_node_tags[i : i + n]
|
|
877
|
+
shifted_node_tags[i : i + n]
|
|
878
|
+
for i in range(0, len(shifted_node_tags), n)
|
|
377
879
|
]
|
|
378
880
|
nodes_in_each_surface[surface] = grouped_node_tags
|
|
379
881
|
triangles_by_solid_by_face[vol_id] = nodes_in_each_surface
|
|
@@ -433,6 +935,7 @@ def export_gmsh_object_to_dagmc_h5m_file(
|
|
|
433
935
|
material_tags: list[str] | None = None,
|
|
434
936
|
implicit_complement_material_tag: str | None = None,
|
|
435
937
|
filename: str = "dagmc.h5m",
|
|
938
|
+
h5m_backend: str = "h5py",
|
|
436
939
|
) -> str:
|
|
437
940
|
"""
|
|
438
941
|
Exports a GMSH object to a DAGMC-compatible h5m file. Note gmsh should
|
|
@@ -443,6 +946,7 @@ def export_gmsh_object_to_dagmc_h5m_file(
|
|
|
443
946
|
material_tags: A list of material tags corresponding to the volumes in the GMSH object.
|
|
444
947
|
implicit_complement_material_tag: The material tag for the implicit complement (void space).
|
|
445
948
|
filename: The name of the output h5m file. Defaults to "dagmc.h5m".
|
|
949
|
+
h5m_backend: Backend for writing h5m file, 'pymoab' or 'h5py'. Defaults to 'h5py'.
|
|
446
950
|
|
|
447
951
|
Returns:
|
|
448
952
|
str: The filename of the generated DAGMC h5m file.
|
|
@@ -470,6 +974,7 @@ def export_gmsh_object_to_dagmc_h5m_file(
|
|
|
470
974
|
material_tags=material_tags,
|
|
471
975
|
h5m_filename=filename,
|
|
472
976
|
implicit_complement_material_tag=implicit_complement_material_tag,
|
|
977
|
+
method=h5m_backend,
|
|
473
978
|
)
|
|
474
979
|
|
|
475
980
|
return h5m_filename
|
|
@@ -497,11 +1002,13 @@ def export_gmsh_file_to_dagmc_h5m_file(
|
|
|
497
1002
|
material_tags: list[str] | None = None,
|
|
498
1003
|
implicit_complement_material_tag: str | None = None,
|
|
499
1004
|
dagmc_filename: str = "dagmc.h5m",
|
|
1005
|
+
h5m_backend: str = "h5py",
|
|
500
1006
|
) -> str:
|
|
501
1007
|
"""Saves a DAGMC h5m file of the geometry GMsh file. This function
|
|
502
1008
|
initializes and finalizes Gmsh.
|
|
503
1009
|
|
|
504
1010
|
Args:
|
|
1011
|
+
gmsh_filename (str): the filename of the GMSH mesh file.
|
|
505
1012
|
material_tags (list[str]): the names of the DAGMC
|
|
506
1013
|
material tags to assign. These will need to be in the same
|
|
507
1014
|
order as the volumes in the GMESH mesh and match the
|
|
@@ -509,7 +1016,9 @@ def export_gmsh_file_to_dagmc_h5m_file(
|
|
|
509
1016
|
implicit_complement_material_tag (str | None, optional):
|
|
510
1017
|
the name of the material tag to use for the implicit
|
|
511
1018
|
complement (void space). Defaults to None which is a vacuum.
|
|
512
|
-
dagmc_filename (str, optional):
|
|
1019
|
+
dagmc_filename (str, optional): Output filename. Defaults to "dagmc.h5m".
|
|
1020
|
+
h5m_backend (str, optional): Backend for writing h5m file, 'pymoab' or 'h5py'.
|
|
1021
|
+
Defaults to 'h5py'.
|
|
513
1022
|
|
|
514
1023
|
Returns:
|
|
515
1024
|
str: The filename of the generated DAGMC h5m file.
|
|
@@ -542,6 +1051,7 @@ def export_gmsh_file_to_dagmc_h5m_file(
|
|
|
542
1051
|
material_tags=material_tags,
|
|
543
1052
|
h5m_filename=dagmc_filename,
|
|
544
1053
|
implicit_complement_material_tag=implicit_complement_material_tag,
|
|
1054
|
+
method=h5m_backend,
|
|
545
1055
|
)
|
|
546
1056
|
|
|
547
1057
|
return h5m_filename
|
|
@@ -583,14 +1093,18 @@ class CadToDagmc:
|
|
|
583
1093
|
scaled_part = part
|
|
584
1094
|
else:
|
|
585
1095
|
scaled_part = part.scale(scale_factor)
|
|
586
|
-
return self.add_cadquery_object(
|
|
1096
|
+
return self.add_cadquery_object(
|
|
1097
|
+
cadquery_object=scaled_part, material_tags=material_tags
|
|
1098
|
+
)
|
|
587
1099
|
|
|
588
1100
|
def add_cadquery_object(
|
|
589
1101
|
self,
|
|
590
1102
|
cadquery_object: (
|
|
591
|
-
cq.assembly.Assembly
|
|
1103
|
+
cq.assembly.Assembly
|
|
1104
|
+
| cq.occ_impl.shapes.Compound
|
|
1105
|
+
| cq.occ_impl.shapes.Solid
|
|
592
1106
|
),
|
|
593
|
-
material_tags: list[str] |
|
|
1107
|
+
material_tags: list[str] | str,
|
|
594
1108
|
scale_factor: float = 1.0,
|
|
595
1109
|
) -> int:
|
|
596
1110
|
"""Loads the parts from CadQuery object into the model.
|
|
@@ -612,25 +1126,43 @@ class CadToDagmc:
|
|
|
612
1126
|
int: number of volumes in the stp file.
|
|
613
1127
|
"""
|
|
614
1128
|
|
|
1129
|
+
if isinstance(material_tags, str) and material_tags not in [
|
|
1130
|
+
"assembly_materials",
|
|
1131
|
+
"assembly_names",
|
|
1132
|
+
]:
|
|
1133
|
+
raise ValueError(
|
|
1134
|
+
f"If material_tags is a string it must be 'assembly_materials' or 'assembly_names' but got {material_tags}"
|
|
1135
|
+
)
|
|
1136
|
+
|
|
615
1137
|
if isinstance(cadquery_object, cq.assembly.Assembly):
|
|
616
1138
|
# look for materials in each part of the assembly
|
|
617
|
-
if material_tags
|
|
1139
|
+
if material_tags == "assembly_materials":
|
|
618
1140
|
material_tags = []
|
|
619
|
-
for child in cadquery_object
|
|
1141
|
+
for child in _get_all_leaf_children(cadquery_object):
|
|
620
1142
|
if child.material is not None and child.material.name is not None:
|
|
621
|
-
material_tags.append(child.material.name)
|
|
1143
|
+
material_tags.append(str(child.material.name))
|
|
622
1144
|
else:
|
|
623
1145
|
raise ValueError(
|
|
624
|
-
f"Not all parts in the assembly have
|
|
625
|
-
f"
|
|
626
|
-
"
|
|
1146
|
+
f"Not all parts in the assembly have materials assigned.\n"
|
|
1147
|
+
f"When adding to an assembly include material=cadquery.Material('material_name')\n"
|
|
1148
|
+
f"Missing material tag for child: {child}.\n"
|
|
1149
|
+
"Please assign material tags to all parts or provide material_tags argument when adding the assembly.\n"
|
|
627
1150
|
)
|
|
1151
|
+
print("material_tags found from assembly materials:", material_tags)
|
|
1152
|
+
elif material_tags == "assembly_names":
|
|
1153
|
+
material_tags = []
|
|
1154
|
+
for child in _get_all_leaf_children(cadquery_object):
|
|
1155
|
+
# parts always have a name as cq will auto assign one
|
|
1156
|
+
material_tags.append(child.name)
|
|
1157
|
+
print("material_tags found from assembly names:", material_tags)
|
|
628
1158
|
|
|
629
1159
|
cadquery_compound = cadquery_object.toCompound()
|
|
630
1160
|
else:
|
|
631
1161
|
cadquery_compound = cadquery_object
|
|
632
1162
|
|
|
633
|
-
if isinstance(
|
|
1163
|
+
if isinstance(
|
|
1164
|
+
cadquery_compound, (cq.occ_impl.shapes.Compound, cq.occ_impl.shapes.Solid)
|
|
1165
|
+
):
|
|
634
1166
|
iterable_solids = cadquery_compound.Solids()
|
|
635
1167
|
else:
|
|
636
1168
|
iterable_solids = cadquery_compound.val().Solids()
|
|
@@ -638,10 +1170,9 @@ class CadToDagmc:
|
|
|
638
1170
|
if scale_factor == 1.0:
|
|
639
1171
|
scaled_iterable_solids = iterable_solids
|
|
640
1172
|
else:
|
|
641
|
-
scaled_iterable_solids = [
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
# if material_tags is None:
|
|
1173
|
+
scaled_iterable_solids = [
|
|
1174
|
+
part.scale(scale_factor) for part in iterable_solids
|
|
1175
|
+
]
|
|
645
1176
|
|
|
646
1177
|
check_material_tags(material_tags, scaled_iterable_solids)
|
|
647
1178
|
if material_tags:
|
|
@@ -742,7 +1273,9 @@ class CadToDagmc:
|
|
|
742
1273
|
gmsh.model.occ.synchronize()
|
|
743
1274
|
# Clear the mesh
|
|
744
1275
|
gmsh.model.mesh.clear()
|
|
745
|
-
gmsh.option.setNumber(
|
|
1276
|
+
gmsh.option.setNumber(
|
|
1277
|
+
"Mesh.SaveElementTagType", 3
|
|
1278
|
+
) # Save only volume elements
|
|
746
1279
|
|
|
747
1280
|
gmsh.model.mesh.generate(3)
|
|
748
1281
|
|
|
@@ -813,7 +1346,9 @@ class CadToDagmc:
|
|
|
813
1346
|
|
|
814
1347
|
gmsh = init_gmsh()
|
|
815
1348
|
|
|
816
|
-
gmsh, _ = get_volumes(
|
|
1349
|
+
gmsh, _ = get_volumes(
|
|
1350
|
+
gmsh, imprinted_assembly, method=method, scale_factor=scale_factor
|
|
1351
|
+
)
|
|
817
1352
|
|
|
818
1353
|
gmsh = set_sizes_for_mesh(
|
|
819
1354
|
gmsh=gmsh,
|
|
@@ -862,6 +1397,8 @@ class CadToDagmc:
|
|
|
862
1397
|
- meshing_backend (str, optional): explicitly specify 'gmsh' or 'cadquery'.
|
|
863
1398
|
If not provided, backend is auto-selected based on other arguments.
|
|
864
1399
|
Defaults to 'cadquery' if no backend-specific arguments are given.
|
|
1400
|
+
- h5m_backend (str, optional): 'pymoab' or 'h5py' for writing h5m files.
|
|
1401
|
+
Defaults to 'h5py'.
|
|
865
1402
|
|
|
866
1403
|
For GMSH backend:
|
|
867
1404
|
- min_mesh_size (float): minimum mesh element size
|
|
@@ -894,7 +1431,7 @@ class CadToDagmc:
|
|
|
894
1431
|
"method",
|
|
895
1432
|
"unstructured_volumes",
|
|
896
1433
|
}
|
|
897
|
-
all_acceptable_keys = cadquery_keys | gmsh_keys | {"meshing_backend"}
|
|
1434
|
+
all_acceptable_keys = cadquery_keys | gmsh_keys | {"meshing_backend", "h5m_backend"}
|
|
898
1435
|
|
|
899
1436
|
# Check for invalid kwargs
|
|
900
1437
|
invalid_keys = set(kwargs.keys()) - all_acceptable_keys
|
|
@@ -907,6 +1444,9 @@ class CadToDagmc:
|
|
|
907
1444
|
# Handle meshing_backend - either from kwargs or auto-detect
|
|
908
1445
|
meshing_backend = kwargs.pop("meshing_backend", None)
|
|
909
1446
|
|
|
1447
|
+
# Handle h5m_backend - pymoab or h5py
|
|
1448
|
+
h5m_backend = kwargs.pop("h5m_backend", "h5py")
|
|
1449
|
+
|
|
910
1450
|
if meshing_backend is None:
|
|
911
1451
|
# Auto-select meshing_backend based on kwargs
|
|
912
1452
|
has_cadquery = any(key in kwargs for key in cadquery_keys)
|
|
@@ -1014,8 +1554,7 @@ class CadToDagmc:
|
|
|
1014
1554
|
if meshing_backend == "cadquery":
|
|
1015
1555
|
|
|
1016
1556
|
# Mesh the assembly using CadQuery's direct-mesh plugin
|
|
1017
|
-
cq_mesh =
|
|
1018
|
-
assembly,
|
|
1557
|
+
cq_mesh = assembly.toMesh(
|
|
1019
1558
|
imprint=imprint,
|
|
1020
1559
|
tolerance=tolerance,
|
|
1021
1560
|
angular_tolerance=angular_tolerance,
|
|
@@ -1024,9 +1563,13 @@ class CadToDagmc:
|
|
|
1024
1563
|
|
|
1025
1564
|
# Fix the material tag order for imprinted assemblies
|
|
1026
1565
|
if cq_mesh["imprinted_assembly"] is not None:
|
|
1027
|
-
imprinted_solids_with_org_id = cq_mesh[
|
|
1566
|
+
imprinted_solids_with_org_id = cq_mesh[
|
|
1567
|
+
"imprinted_solids_with_orginal_ids"
|
|
1568
|
+
]
|
|
1028
1569
|
|
|
1029
|
-
scrambled_ids = get_ids_from_imprinted_assembly(
|
|
1570
|
+
scrambled_ids = get_ids_from_imprinted_assembly(
|
|
1571
|
+
imprinted_solids_with_org_id
|
|
1572
|
+
)
|
|
1030
1573
|
|
|
1031
1574
|
material_tags_in_brep_order = order_material_ids_by_brep_order(
|
|
1032
1575
|
original_ids, scrambled_ids, self.material_tags
|
|
@@ -1044,11 +1587,13 @@ class CadToDagmc:
|
|
|
1044
1587
|
# If assembly is not to be imprinted, pass through the assembly as-is
|
|
1045
1588
|
if imprint:
|
|
1046
1589
|
print("Imprinting assembly for mesh generation")
|
|
1047
|
-
imprinted_assembly, imprinted_solids_with_org_id =
|
|
1048
|
-
assembly
|
|
1590
|
+
imprinted_assembly, imprinted_solids_with_org_id = (
|
|
1591
|
+
cq.occ_impl.assembly.imprint(assembly)
|
|
1049
1592
|
)
|
|
1050
1593
|
|
|
1051
|
-
scrambled_ids = get_ids_from_imprinted_assembly(
|
|
1594
|
+
scrambled_ids = get_ids_from_imprinted_assembly(
|
|
1595
|
+
imprinted_solids_with_org_id
|
|
1596
|
+
)
|
|
1052
1597
|
|
|
1053
1598
|
material_tags_in_brep_order = order_material_ids_by_brep_order(
|
|
1054
1599
|
original_ids, scrambled_ids, self.material_tags
|
|
@@ -1092,6 +1637,7 @@ class CadToDagmc:
|
|
|
1092
1637
|
material_tags=material_tags_in_brep_order,
|
|
1093
1638
|
h5m_filename=filename,
|
|
1094
1639
|
implicit_complement_material_tag=implicit_complement_material_tag,
|
|
1640
|
+
method=h5m_backend,
|
|
1095
1641
|
)
|
|
1096
1642
|
|
|
1097
1643
|
if unstructured_volumes:
|
|
@@ -1108,7 +1654,9 @@ class CadToDagmc:
|
|
|
1108
1654
|
gmsh.model.removePhysicalGroups([entry])
|
|
1109
1655
|
|
|
1110
1656
|
gmsh.model.mesh.generate(3)
|
|
1111
|
-
gmsh.option.setNumber(
|
|
1657
|
+
gmsh.option.setNumber(
|
|
1658
|
+
"Mesh.SaveElementTagType", 3
|
|
1659
|
+
) # Save only volume elements
|
|
1112
1660
|
gmsh.write(umesh_filename)
|
|
1113
1661
|
|
|
1114
1662
|
gmsh.finalize()
|
|
@@ -1116,3 +1664,13 @@ class CadToDagmc:
|
|
|
1116
1664
|
return dagmc_filename, umesh_filename
|
|
1117
1665
|
else:
|
|
1118
1666
|
return dagmc_filename
|
|
1667
|
+
|
|
1668
|
+
|
|
1669
|
+
def _get_all_leaf_children(assembly):
|
|
1670
|
+
"""Recursively yield all leaf children (parts, not assemblies) from a CadQuery assembly."""
|
|
1671
|
+
for child in assembly.children:
|
|
1672
|
+
# If the child is itself an assembly, recurse
|
|
1673
|
+
if hasattr(child, "children") and len(child.children) > 0:
|
|
1674
|
+
yield from _get_all_leaf_children(child)
|
|
1675
|
+
else:
|
|
1676
|
+
yield child
|