ansys-pyensight-core 0.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ansys/pyensight/core/__init__.py +41 -0
- ansys/pyensight/core/common.py +341 -0
- ansys/pyensight/core/deep_pixel_view.html +98 -0
- ansys/pyensight/core/dockerlauncher.py +1124 -0
- ansys/pyensight/core/dvs.py +872 -0
- ansys/pyensight/core/enscontext.py +345 -0
- ansys/pyensight/core/enshell_grpc.py +641 -0
- ansys/pyensight/core/ensight_grpc.py +874 -0
- ansys/pyensight/core/ensobj.py +515 -0
- ansys/pyensight/core/launch_ensight.py +296 -0
- ansys/pyensight/core/launcher.py +388 -0
- ansys/pyensight/core/libuserd.py +2110 -0
- ansys/pyensight/core/listobj.py +280 -0
- ansys/pyensight/core/locallauncher.py +579 -0
- ansys/pyensight/core/py.typed +0 -0
- ansys/pyensight/core/renderable.py +880 -0
- ansys/pyensight/core/session.py +1923 -0
- ansys/pyensight/core/sgeo_poll.html +24 -0
- ansys/pyensight/core/utils/__init__.py +21 -0
- ansys/pyensight/core/utils/adr.py +111 -0
- ansys/pyensight/core/utils/dsg_server.py +1220 -0
- ansys/pyensight/core/utils/export.py +606 -0
- ansys/pyensight/core/utils/omniverse.py +769 -0
- ansys/pyensight/core/utils/omniverse_cli.py +614 -0
- ansys/pyensight/core/utils/omniverse_dsg_server.py +1196 -0
- ansys/pyensight/core/utils/omniverse_glb_server.py +848 -0
- ansys/pyensight/core/utils/parts.py +1221 -0
- ansys/pyensight/core/utils/query.py +487 -0
- ansys/pyensight/core/utils/readers.py +300 -0
- ansys/pyensight/core/utils/resources/Materials/000_sky.exr +0 -0
- ansys/pyensight/core/utils/support.py +128 -0
- ansys/pyensight/core/utils/variables.py +2019 -0
- ansys/pyensight/core/utils/views.py +674 -0
- ansys_pyensight_core-0.11.0.dist-info/METADATA +309 -0
- ansys_pyensight_core-0.11.0.dist-info/RECORD +37 -0
- ansys_pyensight_core-0.11.0.dist-info/WHEEL +4 -0
- ansys_pyensight_core-0.11.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,848 @@
|
|
|
1
|
+
# Copyright (C) 2022 - 2026 ANSYS, Inc. and/or its affiliates.
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
#
|
|
4
|
+
#
|
|
5
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
# of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
# in the Software without restriction, including without limitation the rights
|
|
8
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
# copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
# furnished to do so, subject to the following conditions:
|
|
11
|
+
#
|
|
12
|
+
# The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
# copies or substantial portions of the Software.
|
|
14
|
+
#
|
|
15
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
# SOFTWARE.
|
|
22
|
+
|
|
23
|
+
import io
|
|
24
|
+
import json
|
|
25
|
+
import logging
|
|
26
|
+
import os
|
|
27
|
+
import sys
|
|
28
|
+
from typing import Any, List, Optional
|
|
29
|
+
import uuid
|
|
30
|
+
|
|
31
|
+
from PIL import Image
|
|
32
|
+
from ansys.api.pyensight.v0 import dynamic_scene_graph_pb2
|
|
33
|
+
import ansys.pyensight.core.utils.dsg_server as dsg_server
|
|
34
|
+
import numpy
|
|
35
|
+
import pygltflib
|
|
36
|
+
|
|
37
|
+
sys.path.insert(0, os.path.dirname(__file__))
|
|
38
|
+
sys.path.insert(0, os.path.dirname(__file__))
|
|
39
|
+
original_stdout = sys.stdout
|
|
40
|
+
original_stderr = sys.stderr
|
|
41
|
+
sys.stderr = open(os.devnull, "w")
|
|
42
|
+
sys.stdout = open(os.devnull, "w")
|
|
43
|
+
try:
|
|
44
|
+
from dsg_server import UpdateHandler # noqa: E402
|
|
45
|
+
except AttributeError as exc:
|
|
46
|
+
if "_ARRAY_API" not in str(exc):
|
|
47
|
+
raise exc
|
|
48
|
+
finally:
|
|
49
|
+
sys.stderr = original_stderr
|
|
50
|
+
sys.stdout = original_stdout
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class GLBSession(dsg_server.DSGSession):
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
verbose: int = 0,
|
|
57
|
+
normalize_geometry: bool = False,
|
|
58
|
+
time_scale: float = 1.0,
|
|
59
|
+
vrmode: bool = False,
|
|
60
|
+
handler: UpdateHandler = UpdateHandler(),
|
|
61
|
+
):
|
|
62
|
+
"""
|
|
63
|
+
Provide an interface to read a GLB file and link it to an UpdateHandler instance
|
|
64
|
+
|
|
65
|
+
This class reads GLB files and provides the data to an UpdateHandler instance for
|
|
66
|
+
further processing.
|
|
67
|
+
|
|
68
|
+
Parameters
|
|
69
|
+
----------
|
|
70
|
+
verbose : int
|
|
71
|
+
The verbosity level. If set to 1 or higher the class will call logging.info
|
|
72
|
+
for log output. The default is ``0``.
|
|
73
|
+
normalize_geometry : bool
|
|
74
|
+
If True, the scene coordinates will be remapped into the volume [-1,-1,-1] - [1,1,1]
|
|
75
|
+
The default is not to remap coordinates.
|
|
76
|
+
time_scale : float
|
|
77
|
+
Scale time values by this factor after being read. The default is ``1.0``.
|
|
78
|
+
vrmode : bool
|
|
79
|
+
If True, do not include the camera in the output.
|
|
80
|
+
handler : UpdateHandler
|
|
81
|
+
This is an UpdateHandler subclass that is called back when the state of
|
|
82
|
+
a scene transfer changes. For example, methods are called when the
|
|
83
|
+
transfer begins or ends and when a Part (mesh block) is ready for processing.
|
|
84
|
+
"""
|
|
85
|
+
super().__init__(
|
|
86
|
+
verbose=verbose,
|
|
87
|
+
normalize_geometry=normalize_geometry,
|
|
88
|
+
time_scale=time_scale,
|
|
89
|
+
vrmode=vrmode,
|
|
90
|
+
handler=handler,
|
|
91
|
+
)
|
|
92
|
+
self._gltf: pygltflib.GLTF2 = pygltflib.GLTF2()
|
|
93
|
+
self._id_num: int = 0
|
|
94
|
+
self._node_idx: int = -1
|
|
95
|
+
self._glb_textures: dict = {}
|
|
96
|
+
self._scene_id: int = 0
|
|
97
|
+
|
|
98
|
+
def _reset(self) -> None:
|
|
99
|
+
"""
|
|
100
|
+
Reset the current state to prepare for a new dataset.
|
|
101
|
+
"""
|
|
102
|
+
super()._reset()
|
|
103
|
+
self._cur_timeline = [0.0, 0.0] # Start/End time for current update
|
|
104
|
+
self._status = dict(status="idle", start_time=0.0, processed_buffers=0, total_buffers=0)
|
|
105
|
+
self._gltf = pygltflib.GLTF2()
|
|
106
|
+
self._node_idx = -1
|
|
107
|
+
self._id_num = 0
|
|
108
|
+
self._glb_textures = {}
|
|
109
|
+
self._scene_id = 0
|
|
110
|
+
|
|
111
|
+
def _next_id(self) -> int:
|
|
112
|
+
"""Simple sequential number source
|
|
113
|
+
Called whenever a unique integer is needed.
|
|
114
|
+
|
|
115
|
+
Returns
|
|
116
|
+
-------
|
|
117
|
+
int
|
|
118
|
+
A unique, monotonically increasing integer.
|
|
119
|
+
"""
|
|
120
|
+
self._id_num += 1
|
|
121
|
+
return self._id_num
|
|
122
|
+
|
|
123
|
+
def _map_material(self, glb_materialid: int, part_pb: Any) -> None:
|
|
124
|
+
"""
|
|
125
|
+
Apply various material properties to part protocol buffer.
|
|
126
|
+
|
|
127
|
+
Parameters
|
|
128
|
+
----------
|
|
129
|
+
glb_materialid : int
|
|
130
|
+
The GLB material ID to use as the source information.
|
|
131
|
+
part_pb : Any
|
|
132
|
+
The DSG UpdatePart protocol buffer to update.
|
|
133
|
+
"""
|
|
134
|
+
mat = self._gltf.materials[glb_materialid]
|
|
135
|
+
color = [1.0, 1.0, 1.0, 1.0]
|
|
136
|
+
# Change the color if we can find one
|
|
137
|
+
if hasattr(mat, "pbrMetallicRoughness"):
|
|
138
|
+
if hasattr(mat.pbrMetallicRoughness, "baseColorFactor"):
|
|
139
|
+
color = mat.pbrMetallicRoughness.baseColorFactor
|
|
140
|
+
part_pb.fill_color.extend(color)
|
|
141
|
+
part_pb.line_color.extend(color)
|
|
142
|
+
# Constants for now
|
|
143
|
+
part_pb.ambient = 1.0
|
|
144
|
+
part_pb.diffuse = 1.0
|
|
145
|
+
part_pb.specular_intensity = 1.0
|
|
146
|
+
if "ANSYS_material_details" in mat.extensions:
|
|
147
|
+
part_pb.material_name = json.dumps(mat.extensions["ANSYS_material_details"])
|
|
148
|
+
# if the material maps to a variable, set the variable id for coloring
|
|
149
|
+
glb_varid = self._find_variable_from_glb_mat(glb_materialid)
|
|
150
|
+
if glb_varid:
|
|
151
|
+
part_pb.color_variableid = glb_varid
|
|
152
|
+
|
|
153
|
+
def _parse_mesh(self, meshid: int, parentid: int, parentname: str) -> None:
|
|
154
|
+
"""
|
|
155
|
+
Walk a mesh id found in a "node" instance. This amounts to
|
|
156
|
+
walking the list of "primitives" in the "meshes" list indexed
|
|
157
|
+
by the meshid.
|
|
158
|
+
|
|
159
|
+
Parameters
|
|
160
|
+
----------
|
|
161
|
+
meshid: int
|
|
162
|
+
The index of the mesh in the "meshes" list.
|
|
163
|
+
|
|
164
|
+
parentid: int
|
|
165
|
+
The DSG parent id.
|
|
166
|
+
|
|
167
|
+
parentname: str
|
|
168
|
+
The name of the GROUP parent of the meshes.
|
|
169
|
+
"""
|
|
170
|
+
mesh = self._gltf.meshes[meshid]
|
|
171
|
+
for prim_idx, prim in enumerate(mesh.primitives):
|
|
172
|
+
# POINTS, LINES, TRIANGLES, LINE_LOOP, LINE_STRIP, TRIANGLE_STRIP, TRIANGLE_FAN
|
|
173
|
+
mode = prim.mode
|
|
174
|
+
if mode not in (
|
|
175
|
+
pygltflib.TRIANGLES,
|
|
176
|
+
pygltflib.LINES,
|
|
177
|
+
pygltflib.POINTS,
|
|
178
|
+
pygltflib.LINE_LOOP,
|
|
179
|
+
pygltflib.LINE_STRIP,
|
|
180
|
+
pygltflib.TRIANGLE_STRIP,
|
|
181
|
+
pygltflib.TRIANGLE_FAN,
|
|
182
|
+
):
|
|
183
|
+
self.warn(f"Unhandled connectivity detected: {mode}. Geometry skipped.")
|
|
184
|
+
continue
|
|
185
|
+
glb_materialid = prim.material
|
|
186
|
+
line_width = self._callback_handler._omni.line_width
|
|
187
|
+
|
|
188
|
+
# GLB Prim -> DSG Part
|
|
189
|
+
part_name = f"{parentname}_prim{prim_idx}_"
|
|
190
|
+
cmd, part_pb = self._create_pb("PART", parent_id=parentid, name=part_name)
|
|
191
|
+
if mode == pygltflib.POINTS:
|
|
192
|
+
part_pb.render = dynamic_scene_graph_pb2.UpdatePart.RenderingMode.NODES
|
|
193
|
+
# Size of the spheres
|
|
194
|
+
part_pb.node_size_default = line_width
|
|
195
|
+
else:
|
|
196
|
+
part_pb.render = dynamic_scene_graph_pb2.UpdatePart.RenderingMode.CONNECTIVITY
|
|
197
|
+
part_pb.shading = dynamic_scene_graph_pb2.UpdatePart.ShadingMode.NODAL
|
|
198
|
+
self._map_material(glb_materialid, part_pb)
|
|
199
|
+
part_dsg_id = part_pb.id
|
|
200
|
+
self._handle_update_command(cmd)
|
|
201
|
+
|
|
202
|
+
# GLB Attributes -> DSG Geom
|
|
203
|
+
# Verts
|
|
204
|
+
num_verts = 0
|
|
205
|
+
if prim.attributes.POSITION is not None:
|
|
206
|
+
verts = self._get_data(prim.attributes.POSITION)
|
|
207
|
+
num_verts = len(verts) // 3
|
|
208
|
+
cmd, verts_pb = self._create_pb("GEOM", parent_id=part_dsg_id)
|
|
209
|
+
verts_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.COORDINATES
|
|
210
|
+
verts_pb.flt_array.extend(verts)
|
|
211
|
+
verts_pb.chunk_offset = 0
|
|
212
|
+
verts_pb.total_array_size = len(verts)
|
|
213
|
+
self._handle_update_command(cmd)
|
|
214
|
+
|
|
215
|
+
# Connectivity
|
|
216
|
+
if num_verts and (mode != pygltflib.POINTS):
|
|
217
|
+
if prim.indices is not None:
|
|
218
|
+
conn = self._get_data(prim.indices, 0)
|
|
219
|
+
else:
|
|
220
|
+
conn = numpy.array(list(range(num_verts)), dtype=numpy.uint32)
|
|
221
|
+
cmd, conn_pb = self._create_pb("GEOM", parent_id=part_dsg_id)
|
|
222
|
+
if mode == pygltflib.TRIANGLES:
|
|
223
|
+
conn_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.TRIANGLES
|
|
224
|
+
elif mode == pygltflib.TRIANGLE_STRIP:
|
|
225
|
+
conn_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.TRIANGLES
|
|
226
|
+
conn = self._tri_strip_to_tris(conn)
|
|
227
|
+
elif mode == pygltflib.TRIANGLE_FAN:
|
|
228
|
+
conn_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.TRIANGLES
|
|
229
|
+
conn = self._tri_fan_to_tris(conn)
|
|
230
|
+
elif mode == pygltflib.LINES:
|
|
231
|
+
conn_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.LINES
|
|
232
|
+
elif mode == pygltflib.LINE_LOOP:
|
|
233
|
+
conn_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.LINES
|
|
234
|
+
conn = self._line_loop_to_lines(conn)
|
|
235
|
+
elif mode == pygltflib.LINE_STRIP:
|
|
236
|
+
conn_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.LINES
|
|
237
|
+
conn = self._line_strip_to_lines(conn)
|
|
238
|
+
conn_pb.int_array.extend(conn)
|
|
239
|
+
conn_pb.chunk_offset = 0
|
|
240
|
+
conn_pb.total_array_size = len(conn)
|
|
241
|
+
self._handle_update_command(cmd)
|
|
242
|
+
|
|
243
|
+
# Normals
|
|
244
|
+
if prim.attributes.NORMAL is not None:
|
|
245
|
+
normals = self._get_data(prim.attributes.NORMAL)
|
|
246
|
+
cmd, normals_pb = self._create_pb("GEOM", parent_id=part_dsg_id)
|
|
247
|
+
normals_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.NODE_NORMALS
|
|
248
|
+
normals_pb.flt_array.extend(normals)
|
|
249
|
+
normals_pb.chunk_offset = 0
|
|
250
|
+
normals_pb.total_array_size = len(normals)
|
|
251
|
+
self._handle_update_command(cmd)
|
|
252
|
+
|
|
253
|
+
# Texture coords
|
|
254
|
+
if prim.attributes.TEXCOORD_0 is not None:
|
|
255
|
+
# Note: texture coords are stored as VEC2, so we get 2 components back
|
|
256
|
+
texcoords = self._get_data(prim.attributes.TEXCOORD_0, components=2)
|
|
257
|
+
# we only want the 's' component of an s,t pairing
|
|
258
|
+
texcoords = texcoords[::2]
|
|
259
|
+
cmd, texcoords_pb = self._create_pb("GEOM", parent_id=part_dsg_id)
|
|
260
|
+
texcoords_pb.payload_type = (
|
|
261
|
+
dynamic_scene_graph_pb2.UpdateGeom.ArrayType.NODE_VARIABLE
|
|
262
|
+
)
|
|
263
|
+
texcoords_pb.flt_array.extend(texcoords)
|
|
264
|
+
texcoords_pb.chunk_offset = 0
|
|
265
|
+
texcoords_pb.total_array_size = len(texcoords)
|
|
266
|
+
glb_varid = self._find_variable_from_glb_mat(glb_materialid)
|
|
267
|
+
if glb_varid:
|
|
268
|
+
texcoords_pb.variable_id = glb_varid
|
|
269
|
+
self._handle_update_command(cmd)
|
|
270
|
+
|
|
271
|
+
@staticmethod
|
|
272
|
+
def _tri_strip_to_tris(conn: numpy.ndarray) -> numpy.ndarray:
|
|
273
|
+
"""
|
|
274
|
+
Convert GL_TRIANGLE_STRIP connectivity into GL_TRIANGLES
|
|
275
|
+
|
|
276
|
+
Parameters
|
|
277
|
+
----------
|
|
278
|
+
conn: numpy.ndarray
|
|
279
|
+
The tri strip connectivity
|
|
280
|
+
|
|
281
|
+
Returns
|
|
282
|
+
-------
|
|
283
|
+
numpy.array:
|
|
284
|
+
Triangles connectivity
|
|
285
|
+
"""
|
|
286
|
+
tris = []
|
|
287
|
+
swap = False
|
|
288
|
+
for i in range(len(conn) - 2):
|
|
289
|
+
tris.append(conn[i])
|
|
290
|
+
if swap:
|
|
291
|
+
tris.append(conn[i + 2])
|
|
292
|
+
tris.append(conn[i + 1])
|
|
293
|
+
else:
|
|
294
|
+
tris.append(conn[i + 1])
|
|
295
|
+
tris.append(conn[i + 2])
|
|
296
|
+
swap = not swap
|
|
297
|
+
return numpy.array(tris, dtype=conn.dtype)
|
|
298
|
+
|
|
299
|
+
@staticmethod
|
|
300
|
+
def _tri_fan_to_tris(conn: numpy.ndarray) -> numpy.ndarray:
|
|
301
|
+
"""
|
|
302
|
+
Convert GL_TRIANGLE_FAN connectivity into GL_TRIANGLES
|
|
303
|
+
|
|
304
|
+
Parameters
|
|
305
|
+
----------
|
|
306
|
+
conn: numpy.ndarray
|
|
307
|
+
The fan connectivity
|
|
308
|
+
|
|
309
|
+
Returns
|
|
310
|
+
-------
|
|
311
|
+
numpy.array:
|
|
312
|
+
Triangles connectivity
|
|
313
|
+
"""
|
|
314
|
+
tris = []
|
|
315
|
+
for i in range(1, len(conn) - 1):
|
|
316
|
+
tris.append(conn[0])
|
|
317
|
+
tris.append(conn[i])
|
|
318
|
+
tris.append(conn[i + 1])
|
|
319
|
+
return numpy.array(tris, dtype=conn.dtype)
|
|
320
|
+
|
|
321
|
+
@staticmethod
|
|
322
|
+
def _line_strip_to_lines(conn) -> numpy.ndarray:
|
|
323
|
+
"""
|
|
324
|
+
Convert GL_LINE_STRIP connectivity into GL_LINES
|
|
325
|
+
|
|
326
|
+
Parameters
|
|
327
|
+
----------
|
|
328
|
+
conn: numpy.ndarray
|
|
329
|
+
The line strip connectivity
|
|
330
|
+
|
|
331
|
+
Returns
|
|
332
|
+
-------
|
|
333
|
+
numpy.array:
|
|
334
|
+
Lines connectivity
|
|
335
|
+
"""
|
|
336
|
+
lines = []
|
|
337
|
+
num_nodes = len(conn)
|
|
338
|
+
for i in range(num_nodes - 1):
|
|
339
|
+
lines.append(conn[i])
|
|
340
|
+
lines.append(conn[i + 1])
|
|
341
|
+
return numpy.array(lines, dtype=conn.dtype)
|
|
342
|
+
|
|
343
|
+
@staticmethod
|
|
344
|
+
def _line_loop_to_lines(conn) -> numpy.ndarray:
|
|
345
|
+
"""
|
|
346
|
+
Convert GL_LINE_LOOP connectivity into GL_LINES
|
|
347
|
+
|
|
348
|
+
Parameters
|
|
349
|
+
----------
|
|
350
|
+
conn: numpy.ndarray
|
|
351
|
+
The line loop connectivity
|
|
352
|
+
|
|
353
|
+
Returns
|
|
354
|
+
-------
|
|
355
|
+
numpy.array:
|
|
356
|
+
Lines connectivity
|
|
357
|
+
"""
|
|
358
|
+
lines = []
|
|
359
|
+
num_nodes = len(conn)
|
|
360
|
+
for i in range(num_nodes):
|
|
361
|
+
lines.append(conn[i])
|
|
362
|
+
if i + 1 == num_nodes:
|
|
363
|
+
lines.append(conn[0])
|
|
364
|
+
else:
|
|
365
|
+
lines.append(conn[i + 1])
|
|
366
|
+
return numpy.array(lines, dtype=conn.dtype)
|
|
367
|
+
|
|
368
|
+
def _get_data(
|
|
369
|
+
self,
|
|
370
|
+
accessorid: int,
|
|
371
|
+
components: int = 3,
|
|
372
|
+
) -> numpy.ndarray:
|
|
373
|
+
"""
|
|
374
|
+
Return the float buffer corresponding to the given accessorid. The id
|
|
375
|
+
is usually obtained from a primitive: primitive.attributes.POSITION
|
|
376
|
+
or primitive.attributes.NORMAL or primitive.attributes.TEXCOORD_0.
|
|
377
|
+
It can also come from primitive.indices. In that case, the number of
|
|
378
|
+
components needs to be set to 0.
|
|
379
|
+
|
|
380
|
+
Parameters
|
|
381
|
+
----------
|
|
382
|
+
accessorid: int
|
|
383
|
+
The accessor index of the primitive.
|
|
384
|
+
|
|
385
|
+
components: int
|
|
386
|
+
The number of floats per vertex for the values 1,2,3 if the number
|
|
387
|
+
of components is 0, read integer indices.
|
|
388
|
+
|
|
389
|
+
Returns
|
|
390
|
+
-------
|
|
391
|
+
numpy.ndarray
|
|
392
|
+
The float buffer corresponding to the nodal data or an int buffer of connectivity.
|
|
393
|
+
"""
|
|
394
|
+
dtypes = {}
|
|
395
|
+
dtypes[pygltflib.BYTE] = numpy.int8
|
|
396
|
+
dtypes[pygltflib.UNSIGNED_BYTE] = numpy.uint8
|
|
397
|
+
dtypes[pygltflib.SHORT] = numpy.int16
|
|
398
|
+
dtypes[pygltflib.UNSIGNED_SHORT] = numpy.uint16
|
|
399
|
+
dtypes[pygltflib.UNSIGNED_INT] = numpy.uint32
|
|
400
|
+
dtypes[pygltflib.FLOAT] = numpy.float32
|
|
401
|
+
|
|
402
|
+
binary_blob = self._gltf.binary_blob()
|
|
403
|
+
accessor = self._gltf.accessors[accessorid]
|
|
404
|
+
buffer_view = self._gltf.bufferViews[accessor.bufferView]
|
|
405
|
+
dtype = numpy.float32
|
|
406
|
+
data_dtype = dtypes[accessor.componentType]
|
|
407
|
+
count = accessor.count * components
|
|
408
|
+
# connectivity
|
|
409
|
+
if components == 0:
|
|
410
|
+
dtype = numpy.uint32
|
|
411
|
+
count = accessor.count
|
|
412
|
+
offset = buffer_view.byteOffset + accessor.byteOffset
|
|
413
|
+
blob = binary_blob[offset : offset + buffer_view.byteLength]
|
|
414
|
+
ret = numpy.frombuffer(blob, dtype=data_dtype, count=count)
|
|
415
|
+
if data_dtype != dtype:
|
|
416
|
+
return ret.astype(dtype)
|
|
417
|
+
return ret
|
|
418
|
+
|
|
419
|
+
def _walk_node(self, nodeid: int, parentid: int) -> None:
|
|
420
|
+
"""
|
|
421
|
+
Given a node id (likely from walking a scenes array), walk the mesh
|
|
422
|
+
objects in the node. A "node" has the keys "mesh" and "name".
|
|
423
|
+
|
|
424
|
+
Each node has a single mesh object in it.
|
|
425
|
+
|
|
426
|
+
Parameters
|
|
427
|
+
----------
|
|
428
|
+
nodeid: int
|
|
429
|
+
The node id to walk.
|
|
430
|
+
|
|
431
|
+
parentid: int
|
|
432
|
+
The DSG parent id.
|
|
433
|
+
|
|
434
|
+
"""
|
|
435
|
+
node = self._gltf.nodes[nodeid]
|
|
436
|
+
name = self._name(node)
|
|
437
|
+
matrix = self._transform(node)
|
|
438
|
+
|
|
439
|
+
# GLB node -> DSG Group
|
|
440
|
+
cmd, group_pb = self._create_pb("GROUP", parent_id=parentid, name=name)
|
|
441
|
+
group_pb.matrix4x4.extend(matrix)
|
|
442
|
+
if node.mesh is not None:
|
|
443
|
+
# This is a little ugly, but spheres have a size that is part of the PART
|
|
444
|
+
# protobuffer. So, if the current mesh has the "ANSYS_linewidth" extension,
|
|
445
|
+
# we need to temporally change the line width. However, if this is a lines
|
|
446
|
+
# object, then we need to set the ANSYS_linewidth attribute. Unfortunately,
|
|
447
|
+
# this is only available on the GROUP protobuffer, thus we will try to set
|
|
448
|
+
# both here.
|
|
449
|
+
# Note: in the EnSight push, ANSYS_linewidth will only ever be set on the
|
|
450
|
+
# top level node. In the GLB case, we will scope it to the group. Thus,
|
|
451
|
+
# every "mesh" protobuffer sequence will have an explicit line width in
|
|
452
|
+
# the group above the part.
|
|
453
|
+
|
|
454
|
+
# save/restore the current line_width
|
|
455
|
+
orig_width = self._callback_handler._omni.line_width
|
|
456
|
+
mesh = self._gltf.meshes[node.mesh]
|
|
457
|
+
try:
|
|
458
|
+
# check for line_width on the mesh object
|
|
459
|
+
width = float(mesh.extensions["ANSYS_linewidth"]["linewidth"])
|
|
460
|
+
# make sure spheres work
|
|
461
|
+
self._callback_handler._omni.line_width = width
|
|
462
|
+
except (KeyError, ValueError):
|
|
463
|
+
pass
|
|
464
|
+
# make sure lines work (via the group attributes map)
|
|
465
|
+
group_pb.attributes["ANSYS_linewidth"] = str(self._callback_handler._omni.line_width)
|
|
466
|
+
# send the group protobuffer
|
|
467
|
+
self._handle_update_command(cmd)
|
|
468
|
+
# Export the mesh
|
|
469
|
+
self._parse_mesh(node.mesh, group_pb.id, name)
|
|
470
|
+
# restore the old line_width
|
|
471
|
+
self._callback_handler._omni.line_width = orig_width
|
|
472
|
+
else:
|
|
473
|
+
# send the group protobuffer
|
|
474
|
+
self._handle_update_command(cmd)
|
|
475
|
+
|
|
476
|
+
# Handle node.rotation, node.translation, node.scale, node.matrix
|
|
477
|
+
for child_id in node.children:
|
|
478
|
+
self._walk_node(child_id, group_pb.id)
|
|
479
|
+
|
|
480
|
+
def start_uploads(self, timeline: List[float]) -> None:
|
|
481
|
+
"""
|
|
482
|
+
Begin an upload process for a potential collection of files.
|
|
483
|
+
|
|
484
|
+
Parameters
|
|
485
|
+
----------
|
|
486
|
+
timeline : List[float]
|
|
487
|
+
The time values for the files span this range of values.
|
|
488
|
+
"""
|
|
489
|
+
self._scene_id = self._next_id()
|
|
490
|
+
self._cur_timeline = timeline
|
|
491
|
+
self._callback_handler.begin_update()
|
|
492
|
+
self._update_status_file()
|
|
493
|
+
|
|
494
|
+
def end_uploads(self) -> None:
|
|
495
|
+
"""
|
|
496
|
+
The upload process for the current collection of files is complete.
|
|
497
|
+
"""
|
|
498
|
+
self._reset()
|
|
499
|
+
self._update_status_file()
|
|
500
|
+
|
|
501
|
+
def _find_variable_from_glb_mat(self, glb_material_id: int) -> Optional[int]:
|
|
502
|
+
"""
|
|
503
|
+
Given a glb_material id, find the corresponding dsg variable id
|
|
504
|
+
|
|
505
|
+
Parameters
|
|
506
|
+
----------
|
|
507
|
+
glb_material_id : int
|
|
508
|
+
The material id from the glb file.
|
|
509
|
+
|
|
510
|
+
Returns
|
|
511
|
+
-------
|
|
512
|
+
Optional[int]
|
|
513
|
+
The dsg variable id or None, if no variable is found.
|
|
514
|
+
"""
|
|
515
|
+
value = self._glb_textures.get(glb_material_id, None)
|
|
516
|
+
if value is not None:
|
|
517
|
+
return value["pb"].id
|
|
518
|
+
return None
|
|
519
|
+
|
|
520
|
+
def upload_file(self, glb_filename: str, timeline: List[float] = [0.0, 0.0]) -> bool:
|
|
521
|
+
"""
|
|
522
|
+
Parse a GLB file and call out to the handler to present the data
|
|
523
|
+
to another interface (e.g. Omniverse)
|
|
524
|
+
|
|
525
|
+
Parameters
|
|
526
|
+
----------
|
|
527
|
+
timeline : List[float]
|
|
528
|
+
The first and last time value for which the content of this file should be
|
|
529
|
+
visible.
|
|
530
|
+
|
|
531
|
+
glb_filename : str
|
|
532
|
+
The name of the GLB file to parse
|
|
533
|
+
|
|
534
|
+
Returns
|
|
535
|
+
-------
|
|
536
|
+
bool:
|
|
537
|
+
returns True on success, False otherwise
|
|
538
|
+
"""
|
|
539
|
+
try:
|
|
540
|
+
ok = True
|
|
541
|
+
self._gltf = pygltflib.GLTF2().load(glb_filename)
|
|
542
|
+
self.log(f"File: {glb_filename} Info: {self._gltf.asset}")
|
|
543
|
+
|
|
544
|
+
# check for GLTFWriter source
|
|
545
|
+
if (self._gltf.asset.generator is None) or (
|
|
546
|
+
("GLTF Writer" not in self._gltf.asset.generator)
|
|
547
|
+
and ("Ansys Ensight" not in self._gltf.asset.generator)
|
|
548
|
+
):
|
|
549
|
+
self.error(
|
|
550
|
+
f"Unable to process: {glb_filename} : Not written by GLTF Writer library"
|
|
551
|
+
)
|
|
552
|
+
return False
|
|
553
|
+
|
|
554
|
+
# Walk texture nodes -> DSG Variable buffers
|
|
555
|
+
for tex_idx, texture in enumerate(self._gltf.textures):
|
|
556
|
+
image = self._gltf.images[texture.source]
|
|
557
|
+
if image.uri is None:
|
|
558
|
+
bv = self._gltf.bufferViews[image.bufferView]
|
|
559
|
+
raw_png = self._gltf.binary_blob()[
|
|
560
|
+
bv.byteOffset : bv.byteOffset + bv.byteLength
|
|
561
|
+
]
|
|
562
|
+
else:
|
|
563
|
+
raw_png = self._gltf.get_data_from_buffer_uri(image.uri)
|
|
564
|
+
png_img = Image.open(io.BytesIO(raw_png))
|
|
565
|
+
raw_rgba = png_img.tobytes()
|
|
566
|
+
raw_rgba = raw_rgba[0 : len(raw_rgba) // png_img.size[1]]
|
|
567
|
+
var_name = "Variable_" + str(tex_idx)
|
|
568
|
+
cmd, var_pb = self._create_pb("VARIABLE", parent_id=self._scene_id, name=var_name)
|
|
569
|
+
var_pb.location = dynamic_scene_graph_pb2.UpdateVariable.VarLocation.NODAL
|
|
570
|
+
var_pb.dimension = dynamic_scene_graph_pb2.UpdateVariable.VarDimension.SCALAR
|
|
571
|
+
var_pb.undefined_value = -1e38
|
|
572
|
+
var_pb.pal_interp = (
|
|
573
|
+
dynamic_scene_graph_pb2.UpdateVariable.PaletteInterpolation.CONTINUOUS
|
|
574
|
+
)
|
|
575
|
+
var_pb.sub_levels = 0
|
|
576
|
+
var_pb.undefined_display = (
|
|
577
|
+
dynamic_scene_graph_pb2.UpdateVariable.UndefinedDisplay.AS_ZERO
|
|
578
|
+
)
|
|
579
|
+
var_pb.texture = raw_rgba
|
|
580
|
+
colors = numpy.frombuffer(raw_rgba, dtype=numpy.uint8)
|
|
581
|
+
colors.shape = (-1, 4)
|
|
582
|
+
num = len(colors)
|
|
583
|
+
levels = []
|
|
584
|
+
for i, c in enumerate(colors):
|
|
585
|
+
level = dynamic_scene_graph_pb2.VariableLevel()
|
|
586
|
+
level.value = float(i) / float(num - 1)
|
|
587
|
+
level.red = float(c[0]) / 255.0
|
|
588
|
+
level.green = float(c[1]) / 255.0
|
|
589
|
+
level.blue = float(c[2]) / 255.0
|
|
590
|
+
level.alpha = float(c[3]) / 255.0
|
|
591
|
+
levels.append(level)
|
|
592
|
+
var_pb.levels.extend(levels)
|
|
593
|
+
# create a map from GLB material index to glb
|
|
594
|
+
d = dict(pb=var_pb, idx=tex_idx)
|
|
595
|
+
# Find all the materials that map to this texture
|
|
596
|
+
for mat_idx, mat in enumerate(self._gltf.materials):
|
|
597
|
+
if not hasattr(mat, "pbrMetallicRoughness"):
|
|
598
|
+
continue
|
|
599
|
+
if not hasattr(mat.pbrMetallicRoughness, "baseColorTexture"):
|
|
600
|
+
continue
|
|
601
|
+
if not hasattr(mat.pbrMetallicRoughness.baseColorTexture, "index"):
|
|
602
|
+
continue
|
|
603
|
+
if mat.pbrMetallicRoughness.baseColorTexture.index == tex_idx:
|
|
604
|
+
material_index = mat_idx
|
|
605
|
+
# does this Variable/texture already exist?
|
|
606
|
+
duplicate = None
|
|
607
|
+
saved_id = var_pb.id
|
|
608
|
+
saved_name = var_pb.name
|
|
609
|
+
for key, value in self._glb_textures.items():
|
|
610
|
+
var_pb.name = value["pb"].name
|
|
611
|
+
var_pb.id = value["pb"].id
|
|
612
|
+
if value["pb"] == var_pb:
|
|
613
|
+
duplicate = key
|
|
614
|
+
break
|
|
615
|
+
var_pb.id = saved_id
|
|
616
|
+
var_pb.name = saved_name
|
|
617
|
+
# if a new texture, add the Variable and create an index to the material
|
|
618
|
+
if duplicate is None:
|
|
619
|
+
self._handle_update_command(cmd)
|
|
620
|
+
self._glb_textures[material_index] = d
|
|
621
|
+
else:
|
|
622
|
+
# create an additional reference to this variable from this material
|
|
623
|
+
self._glb_textures[material_index] = self._glb_textures[duplicate]
|
|
624
|
+
|
|
625
|
+
# GLB file: general layout
|
|
626
|
+
# scene: "default_index"
|
|
627
|
+
# scenes: [scene_index].nodes -> [node ids]
|
|
628
|
+
# was scene_id = self._gltf.scene
|
|
629
|
+
num_scenes = len(self._gltf.scenes)
|
|
630
|
+
for scene_idx in range(num_scenes):
|
|
631
|
+
# GLB Scene -> DSG View
|
|
632
|
+
cmd, view_pb = self._create_pb("VIEW", parent_id=self._scene_id)
|
|
633
|
+
view_pb.lookat.extend([0.0, 0.0, -1.0])
|
|
634
|
+
view_pb.lookfrom.extend([0.0, 0.0, 0.0])
|
|
635
|
+
view_pb.upvector.extend([0.0, 1.0, 0.0])
|
|
636
|
+
view_pb.timeline.extend(self._build_scene_timeline(scene_idx, timeline))
|
|
637
|
+
if len(self._gltf.cameras) > 0:
|
|
638
|
+
camera = self._gltf.cameras[0]
|
|
639
|
+
if camera.type == "orthographic":
|
|
640
|
+
view_pb.nearfar.extend(
|
|
641
|
+
[float(camera.orthographic.znear), float(camera.orthographic.zfar)]
|
|
642
|
+
)
|
|
643
|
+
else:
|
|
644
|
+
view_pb.nearfar.extend(
|
|
645
|
+
[float(camera.perspective.znear), float(camera.perspective.zfar)]
|
|
646
|
+
)
|
|
647
|
+
view_pb.fieldofview = camera.perspective.yfov
|
|
648
|
+
view_pb.aspectratio = camera.aspectratio.aspectRatio
|
|
649
|
+
self._handle_update_command(cmd)
|
|
650
|
+
# walk the scene nodes
|
|
651
|
+
scene = self._gltf.scenes[scene_idx]
|
|
652
|
+
try:
|
|
653
|
+
if self._callback_handler._omni.line_width == 0.0:
|
|
654
|
+
width = float(scene.extensions["ANSYS_linewidth"]["linewidth"])
|
|
655
|
+
self._callback_handler._omni.line_width = width
|
|
656
|
+
except (KeyError, ValueError):
|
|
657
|
+
# in the case where the extension does not exist or is mal-formed
|
|
658
|
+
pass
|
|
659
|
+
for node_id in scene.nodes:
|
|
660
|
+
self._walk_node(node_id, view_pb.id)
|
|
661
|
+
self._finish_part()
|
|
662
|
+
|
|
663
|
+
self._callback_handler.end_update()
|
|
664
|
+
|
|
665
|
+
except Exception as e:
|
|
666
|
+
import traceback
|
|
667
|
+
|
|
668
|
+
self.error(f"Unable to process: {glb_filename} : {e}")
|
|
669
|
+
traceback_str = "".join(traceback.format_tb(e.__traceback__))
|
|
670
|
+
logging.debug(f"Traceback: {traceback_str}")
|
|
671
|
+
ok = False
|
|
672
|
+
|
|
673
|
+
return ok
|
|
674
|
+
|
|
675
|
+
def _build_scene_timeline(self, scene_idx: int, input_timeline: List[float]) -> List[float]:
|
|
676
|
+
"""
|
|
677
|
+
For a given scene and externally supplied timeline, compute the timeline for the scene.
|
|
678
|
+
|
|
679
|
+
If the ANSYS_scene_time extension is present, use that value.
|
|
680
|
+
If there is only a single scene, return the supplied timeline.
|
|
681
|
+
If the supplied timeline is empty, use an integer timeline based on the number of scenes in the GLB file.
|
|
682
|
+
Carve up the timeline into chunks, one per scene.
|
|
683
|
+
|
|
684
|
+
Parameters
|
|
685
|
+
----------
|
|
686
|
+
scene_idx: int
|
|
687
|
+
The index of the scene to compute for.
|
|
688
|
+
|
|
689
|
+
input_timeline: List[float]
|
|
690
|
+
An externally supplied timeline.
|
|
691
|
+
|
|
692
|
+
Returns
|
|
693
|
+
-------
|
|
694
|
+
List[float]
|
|
695
|
+
The computed timeline.
|
|
696
|
+
"""
|
|
697
|
+
num_scenes = len(self._gltf.scenes)
|
|
698
|
+
# if ANSYS_scene_timevalue is used, time ranges will come from there
|
|
699
|
+
try:
|
|
700
|
+
t0 = self._gltf.scenes[scene_idx].extensions["ANSYS_scene_timevalue"]["timevalue"]
|
|
701
|
+
idx = scene_idx + 1
|
|
702
|
+
if idx < num_scenes:
|
|
703
|
+
t1 = self._gltf.scenes[idx].extensions["ANSYS_scene_timevalue"]["timevalue"]
|
|
704
|
+
else:
|
|
705
|
+
t1 = t0
|
|
706
|
+
return [t0, t1]
|
|
707
|
+
except KeyError:
|
|
708
|
+
# If we fail due to dictionary key issue, the extension does not exist or is
|
|
709
|
+
# improperly formatted.
|
|
710
|
+
pass
|
|
711
|
+
# if there is only one scene, then use the input timeline
|
|
712
|
+
if num_scenes == 1:
|
|
713
|
+
return input_timeline
|
|
714
|
+
# if the timeline has zero length, we make it the number of scenes
|
|
715
|
+
timeline = input_timeline
|
|
716
|
+
if timeline[1] - timeline[0] <= 0.0:
|
|
717
|
+
timeline = [0.0, float(num_scenes - 1)]
|
|
718
|
+
# carve time into the input timeline.
|
|
719
|
+
delta = (timeline[1] - timeline[0]) / float(num_scenes - 1)
|
|
720
|
+
output: List[float] = []
|
|
721
|
+
output.append(float(scene_idx) * delta + timeline[0])
|
|
722
|
+
if scene_idx < num_scenes - 1:
|
|
723
|
+
output.append(output[0] + delta)
|
|
724
|
+
else:
|
|
725
|
+
output.append(output[0])
|
|
726
|
+
return output
|
|
727
|
+
|
|
728
|
+
@staticmethod
|
|
729
|
+
def _transform(node: Any) -> List[float]:
|
|
730
|
+
"""
|
|
731
|
+
Convert the node "matrix" or "translation", "rotation" and "scale" values into
|
|
732
|
+
a 4x4 matrix representation.
|
|
733
|
+
|
|
734
|
+
"nodes": [
|
|
735
|
+
{
|
|
736
|
+
"matrix": [
|
|
737
|
+
1,0,0,0,
|
|
738
|
+
0,1,0,0,
|
|
739
|
+
0,0,1,0,
|
|
740
|
+
5,6,7,1
|
|
741
|
+
],
|
|
742
|
+
...
|
|
743
|
+
},
|
|
744
|
+
{
|
|
745
|
+
"translation":
|
|
746
|
+
[ 0,0,0 ],
|
|
747
|
+
"rotation":
|
|
748
|
+
[ 0,0,0,1 ],
|
|
749
|
+
"scale":
|
|
750
|
+
[ 1,1,1 ]
|
|
751
|
+
...
|
|
752
|
+
},
|
|
753
|
+
]
|
|
754
|
+
|
|
755
|
+
Parameters
|
|
756
|
+
----------
|
|
757
|
+
node: Any
|
|
758
|
+
The node to compute the matrix transform for.
|
|
759
|
+
|
|
760
|
+
Returns
|
|
761
|
+
-------
|
|
762
|
+
List[float]
|
|
763
|
+
The 4x4 transformation matrix.
|
|
764
|
+
|
|
765
|
+
"""
|
|
766
|
+
identity = numpy.identity(4)
|
|
767
|
+
if node.matrix:
|
|
768
|
+
tmp = numpy.array(node.matrix)
|
|
769
|
+
tmp.shape = (4, 4)
|
|
770
|
+
tmp = tmp.transpose()
|
|
771
|
+
return list(tmp.flatten())
|
|
772
|
+
if node.translation:
|
|
773
|
+
identity[3][0] = node.translation[0]
|
|
774
|
+
identity[3][1] = node.translation[1]
|
|
775
|
+
identity[3][2] = node.translation[2]
|
|
776
|
+
if node.rotation:
|
|
777
|
+
# In GLB, the null quat is [0,0,0,1] so reverse the vector here
|
|
778
|
+
q = [node.rotation[3], node.rotation[0], node.rotation[1], node.rotation[2]]
|
|
779
|
+
rot = numpy.array(
|
|
780
|
+
[
|
|
781
|
+
[q[0], -q[1], -q[2], -q[3]],
|
|
782
|
+
[q[1], q[0], -q[3], q[2]],
|
|
783
|
+
[q[2], q[3], q[0], -q[1]],
|
|
784
|
+
[q[3], -q[2], q[1], q[0]],
|
|
785
|
+
]
|
|
786
|
+
)
|
|
787
|
+
identity = numpy.multiply(identity, rot)
|
|
788
|
+
if node.scale:
|
|
789
|
+
s = node.scale
|
|
790
|
+
scale = numpy.array(
|
|
791
|
+
[
|
|
792
|
+
[s[0], 0.0, 0.0, 0.0],
|
|
793
|
+
[0.0, s[1], 0.0, 0.0],
|
|
794
|
+
[0.0, 0.0, s[2], 0.0],
|
|
795
|
+
[0.0, 0.0, 0.0, 1.0],
|
|
796
|
+
]
|
|
797
|
+
)
|
|
798
|
+
identity = numpy.multiply(identity, scale)
|
|
799
|
+
return list(identity.flatten())
|
|
800
|
+
|
|
801
|
+
def _name(self, node: Any) -> str:
|
|
802
|
+
"""
|
|
803
|
+
Given a GLB node object, return the name of the node. If the node does not
|
|
804
|
+
have a name, give it a generated node.
|
|
805
|
+
|
|
806
|
+
Parameters
|
|
807
|
+
----------
|
|
808
|
+
node: Any
|
|
809
|
+
The GLB node to get the name of.
|
|
810
|
+
|
|
811
|
+
Returns
|
|
812
|
+
-------
|
|
813
|
+
str
|
|
814
|
+
The name of the node.
|
|
815
|
+
"""
|
|
816
|
+
if hasattr(node, "name") and node.name:
|
|
817
|
+
return node.name
|
|
818
|
+
self._node_idx += 1
|
|
819
|
+
return f"Node_{self._node_idx}"
|
|
820
|
+
|
|
821
|
+
def _create_pb(
|
|
822
|
+
self, cmd_type: str, parent_id: int = -1, name: str = ""
|
|
823
|
+
) -> "dynamic_scene_graph_pb2.SceneUpdateCommand":
|
|
824
|
+
cmd = dynamic_scene_graph_pb2.SceneUpdateCommand()
|
|
825
|
+
if cmd_type == "PART":
|
|
826
|
+
cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_PART
|
|
827
|
+
subcmd = cmd.update_part
|
|
828
|
+
subcmd.hash = str(uuid.uuid1())
|
|
829
|
+
elif cmd_type == "GROUP":
|
|
830
|
+
cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GROUP
|
|
831
|
+
subcmd = cmd.update_group
|
|
832
|
+
elif cmd_type == "VARIABLE":
|
|
833
|
+
cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VARIABLE
|
|
834
|
+
subcmd = cmd.update_variable
|
|
835
|
+
elif cmd_type == "GEOM":
|
|
836
|
+
cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GEOM
|
|
837
|
+
subcmd = cmd.update_geom
|
|
838
|
+
subcmd.hash = str(uuid.uuid1())
|
|
839
|
+
elif cmd_type == "VIEW":
|
|
840
|
+
cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VIEW
|
|
841
|
+
subcmd = cmd.update_view
|
|
842
|
+
subcmd.id = self._next_id()
|
|
843
|
+
if parent_id >= 0:
|
|
844
|
+
subcmd.parent_id = parent_id
|
|
845
|
+
if cmd_type not in ("GEOM", "VIEW"):
|
|
846
|
+
if name:
|
|
847
|
+
subcmd.name = name
|
|
848
|
+
return cmd, subcmd
|