ansys-pyensight-core 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. ansys/pyensight/core/__init__.py +41 -0
  2. ansys/pyensight/core/common.py +341 -0
  3. ansys/pyensight/core/deep_pixel_view.html +98 -0
  4. ansys/pyensight/core/dockerlauncher.py +1124 -0
  5. ansys/pyensight/core/dvs.py +872 -0
  6. ansys/pyensight/core/enscontext.py +345 -0
  7. ansys/pyensight/core/enshell_grpc.py +641 -0
  8. ansys/pyensight/core/ensight_grpc.py +874 -0
  9. ansys/pyensight/core/ensobj.py +515 -0
  10. ansys/pyensight/core/launch_ensight.py +296 -0
  11. ansys/pyensight/core/launcher.py +388 -0
  12. ansys/pyensight/core/libuserd.py +2110 -0
  13. ansys/pyensight/core/listobj.py +280 -0
  14. ansys/pyensight/core/locallauncher.py +579 -0
  15. ansys/pyensight/core/py.typed +0 -0
  16. ansys/pyensight/core/renderable.py +880 -0
  17. ansys/pyensight/core/session.py +1923 -0
  18. ansys/pyensight/core/sgeo_poll.html +24 -0
  19. ansys/pyensight/core/utils/__init__.py +21 -0
  20. ansys/pyensight/core/utils/adr.py +111 -0
  21. ansys/pyensight/core/utils/dsg_server.py +1220 -0
  22. ansys/pyensight/core/utils/export.py +606 -0
  23. ansys/pyensight/core/utils/omniverse.py +769 -0
  24. ansys/pyensight/core/utils/omniverse_cli.py +614 -0
  25. ansys/pyensight/core/utils/omniverse_dsg_server.py +1196 -0
  26. ansys/pyensight/core/utils/omniverse_glb_server.py +848 -0
  27. ansys/pyensight/core/utils/parts.py +1221 -0
  28. ansys/pyensight/core/utils/query.py +487 -0
  29. ansys/pyensight/core/utils/readers.py +300 -0
  30. ansys/pyensight/core/utils/resources/Materials/000_sky.exr +0 -0
  31. ansys/pyensight/core/utils/support.py +128 -0
  32. ansys/pyensight/core/utils/variables.py +2019 -0
  33. ansys/pyensight/core/utils/views.py +674 -0
  34. ansys_pyensight_core-0.11.0.dist-info/METADATA +309 -0
  35. ansys_pyensight_core-0.11.0.dist-info/RECORD +37 -0
  36. ansys_pyensight_core-0.11.0.dist-info/WHEEL +4 -0
  37. ansys_pyensight_core-0.11.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,1220 @@
1
+ # Copyright (C) 2022 - 2026 ANSYS, Inc. and/or its affiliates.
2
+ # SPDX-License-Identifier: MIT
3
+ #
4
+ #
5
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ # of this software and associated documentation files (the "Software"), to deal
7
+ # in the Software without restriction, including without limitation the rights
8
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ # copies of the Software, and to permit persons to whom the Software is
10
+ # furnished to do so, subject to the following conditions:
11
+ #
12
+ # The above copyright notice and this permission notice shall be included in all
13
+ # copies or substantial portions of the Software.
14
+ #
15
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ # SOFTWARE.
22
+
23
+ import hashlib
24
+ import json
25
+ import logging
26
+ import os
27
+ import queue
28
+ import sys
29
+ import threading
30
+ import time
31
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional
32
+
33
+ from ansys.api.pyensight.v0 import dynamic_scene_graph_pb2
34
+ from ansys.pyensight.core import ensight_grpc
35
+ import numpy
36
+
37
+ original_stderr = sys.stderr
38
+ original_stdout = sys.stdout
39
+ sys.stderr = open(os.devnull, "w")
40
+ sys.stdout = open(os.devnull, "w")
41
+ try:
42
+ import dsgutils
43
+
44
+ dsgutils_loaded = True
45
+ except (ModuleNotFoundError, ImportError, AttributeError):
46
+ dsgutils_loaded = False
47
+ finally:
48
+ sys.stderr = original_stderr
49
+ sys.stdout = original_stdout
50
+
51
+ if TYPE_CHECKING:
52
+ from ansys.pyensight.core import Session
53
+
54
+
55
+ class Part(object):
56
+ def __init__(self, session: "DSGSession"):
57
+ """
58
+ This object roughly represents an EnSight "Part". It contains the connectivity,
59
+ coordinates, normals and texture coordinate information for one DSG entity
60
+
61
+ This object stores basic geometry information coming from the DSG protocol. The
62
+ update_geom() method can parse an "UpdateGeom" protobuffer and merges the results
63
+ into the Part object.
64
+
65
+ Parameters
66
+ ----------
67
+ session:
68
+ The DSG connection session object.
69
+ """
70
+ self.session = session
71
+ self.conn_tris = numpy.array([], dtype="int32")
72
+ self.conn_lines = numpy.array([], dtype="int32")
73
+ self.coords = numpy.array([], dtype="float32")
74
+ self.normals = numpy.array([], dtype="float32")
75
+ self.normals_elem = False
76
+ self.tcoords = numpy.array([], dtype="float32")
77
+ self.tcoords_elem = False
78
+ self.node_sizes = numpy.array([], dtype="float32")
79
+ self.cmd: Optional[Any] = None
80
+ self.hash = hashlib.new("sha256")
81
+ self._material: Optional[Any] = None
82
+ self.reset()
83
+
84
+ def reset(self, cmd: Any = None) -> None:
85
+ """
86
+ Reset the part object state to prepare the object
87
+ for a new part representation. Numpy arrays are cleared
88
+ and the state reset.
89
+
90
+ Parameters
91
+ ----------
92
+ cmd: Any
93
+ The DSG command that triggered this reset. Most likely
94
+ this is a UPDATE_PART command.
95
+
96
+ """
97
+ self.conn_tris = numpy.array([], dtype="int32")
98
+ self.conn_lines = numpy.array([], dtype="int32")
99
+ self.coords = numpy.array([], dtype="float32")
100
+ self.normals = numpy.array([], dtype="float32")
101
+ self.normals_elem = False
102
+ self.tcoords = numpy.array([], dtype="float32")
103
+ self.tcoords_var_id = None
104
+ self.tcoords_elem = False
105
+ self.node_sizes = numpy.array([], dtype="float32")
106
+ self.hash = hashlib.new("sha256")
107
+ if cmd is not None:
108
+ self.hash.update(cmd.hash.encode("utf-8"))
109
+ self.cmd = cmd
110
+ self._material = None
111
+
112
+ def _parse_material(self) -> None:
113
+ """
114
+ Parse the JSON string in the part command material string and
115
+ make the content accessible via material_names() and material().
116
+ """
117
+ if self._material is not None:
118
+ return
119
+ try:
120
+ if self.cmd.material_name: # type: ignore
121
+ self._material = json.loads(self.cmd.material_name) # type: ignore
122
+ for key, value in self._material.items():
123
+ value["name"] = key
124
+ else:
125
+ self._material = {}
126
+ except Exception as e:
127
+ self.session.warn(f"Unable to parse JSON material: {str(e)}")
128
+ self._material = {}
129
+
130
+ def material_names(self) -> List[str]:
131
+ """
132
+ Return the list of material names included in the part material.
133
+
134
+ Returns
135
+ -------
136
+ List[str]
137
+ The list of defined material names.
138
+ """
139
+ self._parse_material()
140
+ if self._material is None:
141
+ return []
142
+ return list(self._material.keys())
143
+
144
+ def material(self, name: str = "") -> dict:
145
+ """
146
+ Return the material dictionary for the specified material name.
147
+
148
+ Parameters
149
+ ----------
150
+ name: str
151
+ The material name to query. If no material name is given, the
152
+ first name in the material_names() list is used.
153
+
154
+ Returns
155
+ -------
156
+ dict
157
+ The material description dictionary or an empty dictionary.
158
+ """
159
+ self._parse_material()
160
+ if not name:
161
+ names = self.material_names()
162
+ if len(names):
163
+ name = names[0]
164
+ if self._material is None:
165
+ return {}
166
+ return self._material.get(name, {})
167
+
168
+ def update_geom(self, cmd: dynamic_scene_graph_pb2.UpdateGeom) -> None:
169
+ """
170
+ Merge an update geometry command into the numpy buffers being cached in this object
171
+
172
+ Parameters
173
+ ----------
174
+ cmd:
175
+ This is an array update command. It could be for coordinates, normals, variables, connectivity, etc.
176
+ """
177
+ if cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.COORDINATES:
178
+ if self.coords.size != cmd.total_array_size:
179
+ self.coords = numpy.resize(self.coords, cmd.total_array_size)
180
+ self.coords[cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)] = cmd.flt_array
181
+ elif cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.TRIANGLES:
182
+ if self.conn_tris.size != cmd.total_array_size:
183
+ self.conn_tris = numpy.resize(self.conn_tris, cmd.total_array_size)
184
+ self.conn_tris[cmd.chunk_offset : cmd.chunk_offset + len(cmd.int_array)] = cmd.int_array
185
+ elif cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.LINES:
186
+ if self.conn_lines.size != cmd.total_array_size:
187
+ self.conn_lines = numpy.resize(self.conn_lines, cmd.total_array_size)
188
+ self.conn_lines[
189
+ cmd.chunk_offset : cmd.chunk_offset + len(cmd.int_array)
190
+ ] = cmd.int_array
191
+ elif (cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_NORMALS) or (
192
+ cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.NODE_NORMALS
193
+ ):
194
+ self.normals_elem = cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_NORMALS
195
+ if self.normals.size != cmd.total_array_size:
196
+ self.normals = numpy.resize(self.normals, cmd.total_array_size)
197
+ self.normals[cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)] = cmd.flt_array
198
+ elif (cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_VARIABLE) or (
199
+ cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.NODE_VARIABLE
200
+ ):
201
+ # Get the variable definition
202
+ if cmd.variable_id in self.session.variables:
203
+ if self.cmd.color_variableid == cmd.variable_id: # type: ignore
204
+ # Receive the colorby var values
205
+ self.tcoords_elem = (
206
+ cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_VARIABLE
207
+ )
208
+ if self.tcoords.size != cmd.total_array_size:
209
+ self.tcoords = numpy.resize(self.tcoords, cmd.total_array_size)
210
+ self.tcoords[
211
+ cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)
212
+ ] = cmd.flt_array
213
+
214
+ # Add the variable hash to the Part's hash, to pick up palette changes
215
+ var_cmd = self.session.variables.get(cmd.variable_id, None)
216
+ if var_cmd is not None:
217
+ self.hash.update(var_cmd.hash.encode("utf-8"))
218
+
219
+ if self.cmd.node_size_variableid == cmd.variable_id: # type: ignore
220
+ # Receive the node size var values
221
+ if self.node_sizes.size != cmd.total_array_size:
222
+ self.node_sizes = numpy.resize(self.node_sizes, cmd.total_array_size)
223
+ self.node_sizes[
224
+ cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)
225
+ ] = cmd.flt_array
226
+ # Combine the hashes for the UpdatePart and all UpdateGeom messages
227
+ self.hash.update(cmd.hash.encode("utf-8"))
228
+
229
+ def nodal_surface_rep(self):
230
+ """
231
+ This function processes the geometry arrays and converts them into nodal representation.
232
+ It will duplicate triangles as needed (to preserve element normals) and will convert
233
+ variable data into texture coordinates.
234
+
235
+ Returns
236
+ -------
237
+ On failure, the method returns None for the first return value. The returned tuple is:
238
+
239
+ (part_command, vertices, connectivity, normals, tex_coords, var_command)
240
+
241
+ part_command: UPDATE_PART command object
242
+ vertices: numpy array of the nodal coordinates
243
+ connectivity: numpy array of the triangle indices into the vertices array
244
+ normals: numpy array of per vertex normal values (optional)
245
+ tcoords: numpy array of per vertex texture coordinates (optional)
246
+ var_command: UPDATE_VARIABLE command object for the variable the texture coordinate correspond to, if any
247
+ """
248
+ if self.cmd is None:
249
+ return None, None, None, None, None, None
250
+ if self.conn_tris.size == 0:
251
+ self.session.log(f"Note: part '{self.cmd.name}' contains no triangles.")
252
+ return None, None, None, None, None, None
253
+ verts = self.coords
254
+ _ = self._normalize_verts(verts)
255
+
256
+ conn = self.conn_tris
257
+ normals = self.normals
258
+ tcoords = None
259
+ if self.tcoords.size:
260
+ tcoords = self.tcoords
261
+ if self.tcoords_elem or self.normals_elem:
262
+ verts_per_prim = 3
263
+ num_prims = conn.size // verts_per_prim
264
+ # "flatten" the triangles to move values from elements to nodes
265
+ new_verts = numpy.ndarray((num_prims * verts_per_prim * 3,), dtype="float32")
266
+ new_conn = numpy.ndarray((num_prims * verts_per_prim,), dtype="int32")
267
+ new_tcoords = None
268
+ if tcoords is not None:
269
+ # remember that the input values are 1D at this point, we will expand to 2D later
270
+ new_tcoords = numpy.ndarray((num_prims * verts_per_prim,), dtype="float32")
271
+ new_normals = None
272
+ if normals is not None:
273
+ if normals.size == 0:
274
+ self.session.log("Warning: zero length normals!")
275
+ else:
276
+ new_normals = numpy.ndarray((num_prims * verts_per_prim * 3,), dtype="float32")
277
+ if dsgutils_loaded:
278
+ dsgutils.build_nodal_surface_rep(
279
+ verts_per_prim,
280
+ self.normals_elem,
281
+ self.tcoords_elem,
282
+ conn,
283
+ verts,
284
+ normals,
285
+ tcoords,
286
+ new_conn,
287
+ new_verts,
288
+ new_normals,
289
+ new_tcoords,
290
+ )
291
+ else:
292
+ j = 0
293
+ for i0 in range(num_prims):
294
+ for i1 in range(verts_per_prim):
295
+ idx = conn[i0 * verts_per_prim + i1]
296
+ # new connectivity (identity)
297
+ new_conn[j] = j
298
+ # copy the vertex
299
+ new_verts[j * 3 + 0] = verts[idx * 3 + 0]
300
+ new_verts[j * 3 + 1] = verts[idx * 3 + 1]
301
+ new_verts[j * 3 + 2] = verts[idx * 3 + 2]
302
+ if new_normals is not None:
303
+ if self.normals_elem:
304
+ # copy the normal associated with the face
305
+ new_normals[j * 3 + 0] = normals[i0 * 3 + 0]
306
+ new_normals[j * 3 + 1] = normals[i0 * 3 + 1]
307
+ new_normals[j * 3 + 2] = normals[i0 * 3 + 2]
308
+ else:
309
+ # copy the same normal as the vertex
310
+ new_normals[j * 3 + 0] = normals[idx * 3 + 0]
311
+ new_normals[j * 3 + 1] = normals[idx * 3 + 1]
312
+ new_normals[j * 3 + 2] = normals[idx * 3 + 2]
313
+ if new_tcoords is not None:
314
+ # remember, 1D texture coords at this point
315
+ if self.tcoords_elem:
316
+ # copy the texture coord associated with the face
317
+ new_tcoords[j] = tcoords[i0]
318
+ else:
319
+ # copy the same texture coord as the vertex
320
+ new_tcoords[j] = tcoords[idx]
321
+ j += 1
322
+ # new arrays.
323
+ verts = new_verts
324
+ conn = new_conn
325
+ normals = new_normals
326
+ if tcoords is not None:
327
+ tcoords = new_tcoords
328
+
329
+ var_cmd = None
330
+ # texture coords need transformation from variable value to [ST]
331
+ if tcoords is not None:
332
+ tcoords, var_cmd = self._build_st_coords(tcoords, verts.size // 3)
333
+
334
+ self.session.log(
335
+ f"Part '{self.cmd.name}' defined: {self.coords.size // 3} verts, {self.conn_tris.size // 3} tris."
336
+ )
337
+ command = self.cmd
338
+
339
+ return command, verts, conn, normals, tcoords, var_cmd
340
+
341
+ def _normalize_verts(self, verts: numpy.ndarray) -> float:
342
+ """
343
+ This function scales and translates vertices, so the longest axis in the scene is of
344
+ length 1.0, and data is centered at the origin
345
+
346
+ Returns the scale factor
347
+ """
348
+ s = 1.0
349
+ if self.session.normalize_geometry and self.session.scene_bounds is not None:
350
+ num_verts = verts.size // 3
351
+ midx = (self.session.scene_bounds[3] + self.session.scene_bounds[0]) * 0.5
352
+ midy = (self.session.scene_bounds[4] + self.session.scene_bounds[1]) * 0.5
353
+ midz = (self.session.scene_bounds[5] + self.session.scene_bounds[2]) * 0.5
354
+ dx = self.session.scene_bounds[3] - self.session.scene_bounds[0]
355
+ dy = self.session.scene_bounds[4] - self.session.scene_bounds[1]
356
+ dz = self.session.scene_bounds[5] - self.session.scene_bounds[2]
357
+ s = dx
358
+ if dy > s:
359
+ s = dy
360
+ if dz > s:
361
+ s = dz
362
+ if s == 0:
363
+ s = 1.0
364
+ for i in range(num_verts):
365
+ j = i * 3
366
+ verts[j + 0] = (verts[j + 0] - midx) / s
367
+ verts[j + 1] = (verts[j + 1] - midy) / s
368
+ verts[j + 2] = (verts[j + 2] - midz) / s
369
+ return 1.0 / s
370
+
371
+ def _build_st_coords(self, tcoords: numpy.ndarray, num_verts: int):
372
+ """
373
+ The Omniverse interface uses 2D texturing (s,t) to reference the texture map.
374
+ This method converts DSG texture coordinates (1D and in "variable" units) into
375
+ 2D OpenGL style [0.,1.] normalized coordinate space. the "t" coordinate will
376
+ always be 0.5.
377
+
378
+ Parameters
379
+ ----------
380
+ tcoords: numpy.ndarray
381
+ The DSG 1D texture coordinates, which are actually variable values.
382
+
383
+ num_verts: int
384
+ The number of vertices in the mesh.
385
+
386
+ Returns
387
+ -------
388
+ numpy.ndarray, Any
389
+ The ST OpenGL GL texture coordinate array and the variable definition DSG command.
390
+ """
391
+ var_dsg_id = self.cmd.color_variableid # type: ignore
392
+ var_cmd = self.session.variables[var_dsg_id]
393
+ v_min = None
394
+ v_max = None
395
+ for lvl in var_cmd.levels:
396
+ if (v_min is None) or (v_min > lvl.value):
397
+ v_min = lvl.value
398
+ if (v_max is None) or (v_max < lvl.value):
399
+ v_max = lvl.value
400
+ var_minmax: List[float] = [v_min, v_max] # type: ignore
401
+ # build a power of two x 1 texture
402
+ num_texels = len(var_cmd.texture) // 4
403
+ if dsgutils_loaded:
404
+ tmp = dsgutils.build_st_coords(tcoords, v_min, v_max, num_texels)
405
+ else:
406
+ half_texel = 1 / (num_texels * 2.0)
407
+ tmp = numpy.ndarray((num_verts * 2,), dtype="float32")
408
+ tmp.fill(0.5) # fill in the T coordinate...
409
+ tex_width = half_texel * 2 * (num_texels - 1) # center to center of num_texels
410
+ # if the range is 0, adjust the min by -1. The result is that the texture
411
+ # coords will get mapped to S=1.0 which is what EnSight does in this situation
412
+ if (var_minmax[1] - var_minmax[0]) == 0.0:
413
+ var_minmax[0] = var_minmax[0] - 1.0
414
+ var_width = var_minmax[1] - var_minmax[0]
415
+ for idx in range(num_verts):
416
+ # normalized S coord value (clamp)
417
+ s = (tcoords[idx] - var_minmax[0]) / var_width
418
+ if s < 0.0:
419
+ s = 0.0
420
+ if s > 1.0:
421
+ s = 1.0
422
+ # map to the texture range and set the S value
423
+ tmp[idx * 2] = s * tex_width + half_texel
424
+ return tmp, var_cmd
425
+
426
+ def line_rep(self):
427
+ """
428
+ This function processes the geometry arrays and returns values to represent line data.
429
+ The vertex array embeds the connectivity, so every two points represent a line segment.
430
+ The tcoords similarly follow the vertex array notion.
431
+
432
+ Returns
433
+ -------
434
+ On failure, the method returns None for the first return value. The returned tuple is:
435
+
436
+ (part_command, vertices, connectivity, tex_coords, var_command)
437
+
438
+ part_command: UPDATE_PART command object
439
+ vertices: numpy array of per-node coordinates (two per line segment)
440
+ tcoords: numpy array of per vertex texture coordinates (optional)
441
+ var_command: UPDATE_VARIABLE command object for the variable the colors correspond to, if any
442
+ """
443
+ if self.cmd is None:
444
+ return None, None, None, None
445
+ if self.cmd.render != self.cmd.CONNECTIVITY:
446
+ # Early out. Rendering type for this object is a surface rep, not a point rep
447
+ return None, None, None, None
448
+
449
+ num_lines = self.conn_lines.size // 2
450
+ if num_lines == 0:
451
+ return None, None, None, None
452
+ verts = numpy.ndarray((num_lines * 2 * 3,), dtype="float32")
453
+ tcoords = None
454
+ if self.tcoords.size:
455
+ tcoords = numpy.ndarray((num_lines * 2,), dtype="float32")
456
+ # TODO: handle elemental line values (self.tcoords_elem) by converting to nodal...
457
+ # if self.tcoords_elem:
458
+ for i in range(num_lines):
459
+ i0 = self.conn_lines[i * 2]
460
+ i1 = self.conn_lines[i * 2 + 1]
461
+ offset = i * 6
462
+ verts[offset + 0] = self.coords[i0 * 3 + 0]
463
+ verts[offset + 1] = self.coords[i0 * 3 + 1]
464
+ verts[offset + 2] = self.coords[i0 * 3 + 2]
465
+ verts[offset + 3] = self.coords[i1 * 3 + 0]
466
+ verts[offset + 4] = self.coords[i1 * 3 + 1]
467
+ verts[offset + 5] = self.coords[i1 * 3 + 2]
468
+ if tcoords is not None:
469
+ # tcoords are 1D at this point
470
+ offset = i * 2
471
+ tcoords[offset + 0] = self.tcoords[i0]
472
+ tcoords[offset + 1] = self.tcoords[i1]
473
+
474
+ _ = self._normalize_verts(verts)
475
+
476
+ var_cmd = None
477
+ # texture coords need transformation from variable value to [ST]
478
+ if tcoords is not None:
479
+ tcoords, var_cmd = self._build_st_coords(tcoords, verts.size // 3)
480
+
481
+ self.session.log(f"Part '{self.cmd.name}' defined: {num_lines} lines.")
482
+ command = self.cmd
483
+
484
+ return command, verts, tcoords, var_cmd
485
+
486
+ def point_rep(self):
487
+ """
488
+ This function processes the geometry arrays and returns values to represent point data
489
+
490
+ Returns
491
+ -------
492
+ On failure, the method returns None for the first return value. The returned tuple is:
493
+
494
+ (part_command, vertices, sizes, colors, var_command)
495
+
496
+ part_command: UPDATE_PART command object
497
+ vertices: numpy array of per-node coordinates
498
+ sizes: numpy array of per-node radii
499
+ colors: numpy array of per-node rgb colors
500
+ var_command: UPDATE_VARIABLE command object for the variable the colors correspond to, if any
501
+ """
502
+ if self.cmd is None:
503
+ return None, None, None, None, None
504
+ if self.cmd.render != self.cmd.NODES:
505
+ # Early out. Rendering type for this object is a surface rep, not a point rep
506
+ return None, None, None, None, None
507
+ verts = self.coords
508
+ num_verts = verts.size // 3
509
+ norm_scale = self._normalize_verts(verts)
510
+
511
+ # Convert var values in self.tcoords to RGB colors
512
+ # For now, look up RGB colors. Planned USD enhancements should allow tex coords instead.
513
+ colors = None
514
+ var_cmd = None
515
+
516
+ if self.tcoords.size and self.tcoords.size == num_verts:
517
+ var_dsg_id = self.cmd.color_variableid
518
+ var_cmd = self.session.variables[var_dsg_id]
519
+ if len(var_cmd.levels) == 0:
520
+ self.session.log(
521
+ f"Note: Node rep not created for part '{self.cmd.name}'. It has var values, but a palette with 0 levels."
522
+ )
523
+ return None, None, None, None, None
524
+
525
+ p_min = None
526
+ p_max = None
527
+ for lvl in var_cmd.levels:
528
+ if (p_min is None) or (p_min > lvl.value):
529
+ p_min = lvl.value
530
+ if (p_max is None) or (p_max < lvl.value):
531
+ p_max = lvl.value
532
+
533
+ num_texels = int(len(var_cmd.texture) / 4)
534
+
535
+ colors = numpy.ndarray((num_verts * 3,), dtype="float32")
536
+ low_color = [c / 255.0 for c in var_cmd.texture[0:3]]
537
+ high_color = [
538
+ c / 255.0 for c in var_cmd.texture[4 * (num_texels - 1) : 4 * (num_texels - 1) + 3]
539
+ ]
540
+ if p_min == p_max:
541
+ # Special case where palette min == palette max
542
+ mid_color = var_cmd[4 * (num_texels // 2) : 4 * (num_texels // 2) + 3]
543
+ for idx in range(num_verts):
544
+ val = self.tcoords[idx]
545
+ if val == p_min:
546
+ colors[idx * 3 : idx * 3 + 3] = mid_color
547
+ elif val < p_min:
548
+ colors[idx * 3 : idx * 3 + 3] = low_color
549
+ elif val > p_min:
550
+ colors[idx * 3 : idx * 3 + 3] = high_color
551
+ else:
552
+ for idx in range(num_verts):
553
+ val = self.tcoords[idx]
554
+ if val <= p_min:
555
+ colors[idx * 3 : idx * 3 + 3] = low_color
556
+ else:
557
+ pal_pos = (num_texels - 1) * (val - p_min) / (p_max - p_min)
558
+ pal_idx, pal_sub = divmod(pal_pos, 1)
559
+ pal_idx = int(pal_idx)
560
+
561
+ if pal_idx >= num_texels - 1:
562
+ colors[idx * 3 : idx * 3 + 3] = high_color
563
+ else:
564
+ col0 = var_cmd.texture[pal_idx * 4 : pal_idx * 4 + 3]
565
+ col1 = var_cmd.texture[4 + pal_idx * 4 : 4 + pal_idx * 4 + 3]
566
+ for ii in range(0, 3):
567
+ colors[idx * 3 + ii] = (
568
+ col0[ii] * pal_sub + col1[ii] * (1.0 - pal_sub)
569
+ ) / 255.0
570
+ self.session.log(f"Part '{self.cmd.name}' defined: {self.coords.size // 3} points.")
571
+
572
+ node_sizes = None
573
+ if self.node_sizes.size and self.node_sizes.size == num_verts:
574
+ # Pass out the node sizes if there is a size-by variable
575
+ node_size_default = self.cmd.node_size_default * norm_scale
576
+ node_sizes = numpy.ndarray((num_verts,), dtype="float32")
577
+ for ii in range(0, num_verts):
578
+ node_sizes[ii] = self.node_sizes[ii] * node_size_default
579
+ elif norm_scale != 1.0:
580
+ # Pass out the node sizes if the model is normalized to fit in a unit cube
581
+ node_size_default = self.cmd.node_size_default * norm_scale
582
+ node_sizes = numpy.ndarray((num_verts,), dtype="float32")
583
+ for ii in range(0, num_verts):
584
+ node_sizes[ii] = node_size_default
585
+
586
+ self.session.log(f"Part '{self.cmd.name}' defined: {self.coords.size // 3} points.")
587
+ command = self.cmd
588
+
589
+ return command, verts, node_sizes, colors, var_cmd
590
+
591
+
592
+ class UpdateHandler(object):
593
+ """
594
+ This class serves as the interface between a DSGSession and a hosting application.
595
+ The DSGSession processes the general aspects of the gRPC pipeline and collects the
596
+ various DSG objects into collections of: groups, variables, etc. It also coalesces
597
+ the individual array updates into a "Part" object which represents a single addressable
598
+ mesh chunk.
599
+ UpdateHandler methods are called as the various update happen, and it is called when
600
+ a mesh chunk has been entirely defined. In most scenarios, a subclass of UpdateHandler
601
+ is passed to the DSGSession to handshake the mesh data to the application target.
602
+ """
603
+
604
+ def __init__(self) -> None:
605
+ self._session: "DSGSession"
606
+
607
+ @property
608
+ def session(self) -> "DSGSession":
609
+ """The session object this handler has been associated with"""
610
+ return self._session
611
+
612
+ @session.setter
613
+ def session(self, session: "DSGSession") -> None:
614
+ self._session = session
615
+
616
+ def add_group(self, id: int, view: bool = False) -> None:
617
+ """Called when a new group command has been added: self.session.groups[id]"""
618
+ if view:
619
+ self.session.log(f"Adding view: {self.session.groups[id]}")
620
+ else:
621
+ self.session.log(f"Adding group: {self.session.groups[id].name}")
622
+
623
+ def add_variable(self, id: int) -> None:
624
+ """Called when a new group command has been added: self.session.variables[id]"""
625
+ self.session.log(f"Adding variable: {self.session.variables[id].name}")
626
+
627
+ def finalize_part(self, part: Part) -> None:
628
+ """Called when all the updates on a Part object have been completed.
629
+
630
+ Note: this superclass method should be called after the subclass has processed
631
+ the part geometry as the saved part command will be destroyed by this call.
632
+ """
633
+ if part.cmd:
634
+ self.session.log(f"Part finalized: {part.cmd.name}")
635
+ part.cmd = None
636
+
637
+ def start_connection(self) -> None:
638
+ """A new gRPC connection has been established: self.session.grpc"""
639
+ grpc = self.session.grpc
640
+ self.session.log(f"gRPC connection established to: {grpc.host}:{grpc.port}")
641
+
642
+ def end_connection(self) -> None:
643
+ """The previous gRPC connection has been closed"""
644
+ self.session.log("gRPC connection closed")
645
+
646
+ def begin_update(self) -> None:
647
+ """A new scene update is about to begin"""
648
+ self.session.log("Begin update ------------------------")
649
+
650
+ def end_update(self) -> None:
651
+ """The scene update is complete"""
652
+ self.session.log("End update ------------------------")
653
+
654
+ def get_dsg_cmd_attribute(self, obj: Any, name: str, default: Any = None) -> Optional[str]:
655
+ """Utility function to get an attribute from a DSG update object
656
+
657
+ Note: UpdateVariable and UpdateGroup commands support generic attributes
658
+ """
659
+ return obj.attributes.get(name, default)
660
+
661
+ def group_matrix(self, group: Any) -> Any:
662
+ matrix = group.matrix4x4
663
+ # The Case matrix is basically the camera transform. In vrmode, we only want
664
+ # the raw geometry, so use the identity matrix.
665
+ if (
666
+ self.get_dsg_cmd_attribute(group, "ENS_OBJ_TYPE") == "ENS_CASE"
667
+ ) and self.session.vrmode:
668
+ matrix = [
669
+ 1.0,
670
+ 0.0,
671
+ 0.0,
672
+ 0.0,
673
+ 0.0,
674
+ 1.0,
675
+ 0.0,
676
+ 0.0,
677
+ 0.0,
678
+ 0.0,
679
+ 1.0,
680
+ 0.0,
681
+ 0.0,
682
+ 0.0,
683
+ 0.0,
684
+ 1.0,
685
+ ]
686
+ return matrix
687
+
688
+
689
+ class DSGSession(object):
690
+ def __init__(
691
+ self,
692
+ port: int = 12345,
693
+ host: str = "127.0.0.1",
694
+ security_code: str = "",
695
+ verbose: int = 0,
696
+ normalize_geometry: bool = False,
697
+ vrmode: bool = False,
698
+ time_scale: float = 1.0,
699
+ handler: UpdateHandler = UpdateHandler(),
700
+ session: Optional["Session"] = None,
701
+ uds_path: Optional[str] = None,
702
+ grpc_use_tcp_sockets: bool = False,
703
+ grpc_allow_network_connections: bool = False,
704
+ grpc_disable_tls: bool = False,
705
+ disable_grpc_options: bool = False,
706
+ ):
707
+ """
708
+ Manage a gRPC connection and link it to an UpdateHandler instance
709
+
710
+ This class makes a DSG gRPC connection via the specified port and host (leveraging
711
+ the passed security code). As DSG protobuffers arrive, they are merged into Part
712
+ object instances and the UpdateHandler is invoked to further process them.
713
+
714
+ Parameters
715
+ ----------
716
+ port : int
717
+ The port number the EnSight gRPC service is running on.
718
+ The default is ``12345``.
719
+ host : str
720
+ Name of the host that the EnSight gRPC service is running on.
721
+ The default is ``"127.0.0.1"``, which is the localhost.
722
+ security_code : str
723
+ Shared security code for validating the gRPC communication.
724
+ The default is ``""``.
725
+ verbose : int
726
+ The verbosity level. If set to 1 or higher the class will call logging.info
727
+ for log output. The default is ``0``.
728
+ normalize_geometry : bool
729
+ If True, the scene coordinates will be remapped into the volume [-1,-1,-1] - [1,1,1]
730
+ The default is not to remap coordinates.
731
+ vrmode : bool
732
+ If True, do not include the EnSight camera in the generated view group. The default
733
+ is to include the EnSight view in the scene transformations.
734
+ time_scale : float
735
+ All DSG protobuffers time values will be multiplied by this factor after
736
+ being received. The default is ``1.0``.
737
+ handler : UpdateHandler
738
+ This is an UpdateHandler subclass that is called back when the state of
739
+ a scene transfer changes. For example, methods are called when the
740
+ transfer begins or ends and when a Part (mesh block) is ready for processing.
741
+ uds_path: string
742
+ The unix domain socket path if required for the gRPC connection
743
+ grpc_use_tcp_sockets: bool, optional
744
+ If using gRPC, and if True, then allow TCP Socket based connections
745
+ instead of only local connections.
746
+ grpc_allow_network_connections: bool, optional
747
+ If using gRPC and using TCP Socket based connections, listen on all networks.
748
+ grpc_disable_tls: bool, optional
749
+ If using gRPC and using TCP Socket based connections, disable TLS.
750
+ disable_grpc_options: bool, optional
751
+ Whether to disable the gRPC options check, and allow to run older
752
+ versions of EnSight
753
+ """
754
+ super().__init__()
755
+ if uds_path:
756
+ self._grpc = ensight_grpc.EnSightGRPC(
757
+ grpc_uds_pathname=uds_path,
758
+ secret_key=security_code,
759
+ grpc_use_tcp_sockets=grpc_use_tcp_sockets,
760
+ grpc_allow_network_connections=grpc_allow_network_connections,
761
+ grpc_disable_tls=grpc_disable_tls,
762
+ disable_grpc_options=disable_grpc_options,
763
+ )
764
+ else:
765
+ self._grpc = ensight_grpc.EnSightGRPC(
766
+ port=port,
767
+ host=host,
768
+ secret_key=security_code,
769
+ grpc_use_tcp_sockets=grpc_use_tcp_sockets,
770
+ grpc_allow_network_connections=grpc_allow_network_connections,
771
+ grpc_disable_tls=grpc_disable_tls,
772
+ disable_grpc_options=disable_grpc_options,
773
+ )
774
+ self._session = session
775
+ if self._session:
776
+ self._session.set_dsg_session(self)
777
+ self._callback_handler = handler
778
+ self._verbose = verbose
779
+ self._thread: Optional[threading.Thread] = None
780
+ self._message_queue: queue.Queue = queue.Queue() # Messages coming from EnSight
781
+ self._dsg_queue: Optional[queue.SimpleQueue] = None # Outgoing messages to EnSight
782
+ self._shutdown = False
783
+ self._dsg = None
784
+ # Prevent the protobuffer queue from growing w/o limits. The payload chunking is
785
+ # around 4MB, so 200 buffers would be a bit less than 1GB.
786
+ self._max_dsg_queue_size = int(os.environ.get("ANSYS_OV_SERVER_MAX_GRPC_QUEUE_SIZE", "200"))
787
+ self._normalize_geometry = normalize_geometry
788
+ self._vrmode = vrmode
789
+ self._time_scale = time_scale
790
+ self._time_limits = [
791
+ sys.float_info.max,
792
+ -sys.float_info.max,
793
+ ] # Min/max across all time steps
794
+ self._mesh_block_count = 0
795
+ self._variables: Dict[int, Any] = dict()
796
+ self._groups: Dict[int, Any] = dict()
797
+ self._part: Part = Part(self)
798
+ self._scene_bounds: Optional[List] = None
799
+ self._cur_timeline: List = [0.0, 0.0] # Start/End time for current update
800
+ self._callback_handler.session = self
801
+ # log any status changes to this file. external apps will be monitoring
802
+ self._status_file = os.environ.get("ANSYS_OV_SERVER_STATUS_FILENAME", "")
803
+ self._status = dict(status="idle", start_time=0.0, processed_buffers=0, total_buffers=0)
804
+ self._pyensight_grpc_coming = False
805
+
806
+ @property
807
+ def scene_bounds(self) -> Optional[List]:
808
+ return self._scene_bounds
809
+
810
+ @property
811
+ def mesh_block_count(self) -> int:
812
+ return self._mesh_block_count
813
+
814
+ @property
815
+ def max_dsg_queue_size(self) -> int:
816
+ return self._max_dsg_queue_size
817
+
818
+ @max_dsg_queue_size.setter
819
+ def max_dsg_queue_size(self, value: int) -> None:
820
+ self._max_dsg_queue_size = value
821
+
822
+ @property
823
+ def vrmode(self) -> bool:
824
+ return self._vrmode
825
+
826
+ @vrmode.setter
827
+ def vrmode(self, value: bool) -> None:
828
+ self._vrmode = value
829
+
830
+ @property
831
+ def normalize_geometry(self) -> bool:
832
+ return self._normalize_geometry
833
+
834
+ @normalize_geometry.setter
835
+ def normalize_geometry(self, value: bool) -> None:
836
+ self._normalize_geometry = value
837
+
838
+ @property
839
+ def variables(self) -> dict:
840
+ return self._variables
841
+
842
+ @property
843
+ def groups(self) -> dict:
844
+ return self._groups
845
+
846
+ @property
847
+ def part(self) -> Part:
848
+ return self._part
849
+
850
+ @property
851
+ def time_limits(self) -> List:
852
+ return self._time_limits
853
+
854
+ @property
855
+ def cur_timeline(self) -> List:
856
+ return self._cur_timeline
857
+
858
+ @cur_timeline.setter
859
+ def cur_timeline(self, timeline: List) -> None:
860
+ self._cur_timeline = timeline
861
+ self._time_limits[0] = min(self._time_limits[0], self._cur_timeline[0])
862
+ self._time_limits[1] = max(self._time_limits[1], self._cur_timeline[1])
863
+
864
+ @property
865
+ def grpc(self) -> ensight_grpc.EnSightGRPC:
866
+ return self._grpc
867
+
868
+ def log(self, s: str, level: int = 0) -> None:
869
+ """Log a string to the logging system
870
+
871
+ If the message level is less than the current verbosity,
872
+ emit the message.
873
+ """
874
+ if level < self._verbose:
875
+ logging.info(s)
876
+
877
+ @staticmethod
878
+ def warn(s: str) -> None:
879
+ """Issue a warning to the logging system
880
+
881
+ The logging message is mapped to "warn" and cannot be blocked via verbosity
882
+ checks.
883
+ """
884
+ logging.warning(s)
885
+
886
+ @staticmethod
887
+ def error(s: str) -> None:
888
+ """Issue an error to the logging system
889
+
890
+ The logging message is mapped to "error" and cannot be blocked via verbosity
891
+ checks.
892
+ """
893
+ logging.error(s)
894
+
895
+ def start(self) -> int:
896
+ """Start a gRPC connection to an EnSight instance
897
+
898
+ Make a gRPC connection and start a DSG stream handler.
899
+
900
+ Returns
901
+ -------
902
+ 0 on success, -1 on an error.
903
+ """
904
+ # Start by setting up and verifying the connection
905
+ self._grpc.connect()
906
+ if not self._grpc.is_connected():
907
+ self.log(f"Unable to establish gRPC connection to: {self._grpc.host}:{self._grpc.port}")
908
+ return -1
909
+ # Streaming API requires an iterator, so we make one from a queue
910
+ # it also returns an iterator. self._dsg_queue is the input stream interface
911
+ # self._dsg is the returned stream iterator.
912
+ if self._dsg is not None:
913
+ return 0
914
+ self._dsg_queue = queue.SimpleQueue()
915
+ self._dsg = self._grpc.dynamic_scene_graph_stream(
916
+ iter(self._dsg_queue.get, None) # type:ignore
917
+ )
918
+ self._thread = threading.Thread(target=self._poll_messages)
919
+ if self._thread is not None:
920
+ self._thread.start()
921
+ self._callback_handler.start_connection()
922
+ return 0
923
+
924
+ def end(self):
925
+ """Stop a gRPC connection to the EnSight instance"""
926
+ self._callback_handler.end_connection()
927
+ self._grpc.shutdown()
928
+ self._shutdown = True
929
+ self._thread.join()
930
+ self._grpc.shutdown()
931
+ self._dsg = None
932
+ self._thread = None
933
+ self._dsg_queue = None
934
+
935
+ def is_shutdown(self):
936
+ """Check the service shutdown request status"""
937
+ return self._shutdown
938
+
939
+ def _update_status_file(self, timed: bool = False):
940
+ """
941
+ Update the status file contents. The status file will contain the
942
+ following json object, stored as: self._status
943
+
944
+ {
945
+ 'status' : "working|idle",
946
+ 'start_time' : timestamp_of_update_begin,
947
+ 'processed_buffers' : number_of_protobuffers_processed,
948
+ 'total_buffers' : number_of_protobuffers_total,
949
+ }
950
+
951
+ Parameters
952
+ ----------
953
+ timed : bool, optional:
954
+ if True, only update every second.
955
+
956
+ """
957
+ if self._status_file:
958
+ current_time = time.time()
959
+ if timed:
960
+ last_time = self._status.get("last_time", 0.0)
961
+ if current_time - last_time < 1.0: # type: ignore
962
+ return
963
+ self._status["last_time"] = current_time
964
+ try:
965
+ message = json.dumps(self._status)
966
+ with open(self._status_file, "w") as status_file:
967
+ status_file.write(message)
968
+ except IOError:
969
+ pass # Note failure is expected here in some cases
970
+
971
+ def request_an_update(self, animation: bool = False, allow_spontaneous: bool = True) -> None:
972
+ """Start a DSG update
973
+ Send a command to the DSG protocol to "init" an update.
974
+
975
+ Parameters
976
+ ----------
977
+ animation:
978
+ if True, export all EnSight timesteps.
979
+ allow_spontaneous:
980
+ if True, allow EnSight to trigger async updates.
981
+ """
982
+ # Send an INIT command to trigger a stream of update packets
983
+ cmd = dynamic_scene_graph_pb2.SceneClientCommand()
984
+ cmd.command_type = dynamic_scene_graph_pb2.SceneClientCommand.INIT
985
+ # Allow EnSight push commands, but full scene only for now...
986
+ cmd.init.allow_spontaneous = allow_spontaneous
987
+ cmd.init.include_temporal_geometry = animation
988
+ cmd.init.allow_incremental_updates = False
989
+ cmd.init.maximum_chunk_size = 1024 * 1024
990
+ self._dsg_queue.put(cmd) # type:ignore
991
+
992
+ def _is_queue_full(self):
993
+ if not self.max_dsg_queue_size:
994
+ return False
995
+ if self._pyensight_grpc_coming:
996
+ return False
997
+ return self._message_queue.qsize() >= self.max_dsg_queue_size
998
+
999
+ def _poll_messages(self) -> None:
1000
+ """Core interface to grab DSG events from gRPC and queue them for processing
1001
+
1002
+ This is run by a thread that is monitoring the dsg RPC call for update messages
1003
+ it places them in _message_queue as it finds them. They are picked up by the
1004
+ main thread via get_next_message()
1005
+ """
1006
+ while not self._shutdown:
1007
+ try:
1008
+ self._message_queue.put(next(self._dsg)) # type:ignore
1009
+ # if the queue is getting too deep, wait a bit to avoid holding too
1010
+ # many messages (filling up memory)
1011
+ if self.max_dsg_queue_size:
1012
+ while self._is_queue_full():
1013
+ time.sleep(0.001)
1014
+ except Exception:
1015
+ self._shutdown = True
1016
+ self.log("DSG connection broken, calling exit")
1017
+ sys.exit(0)
1018
+
1019
+ def _get_next_message(self, wait: bool = True) -> Any:
1020
+ """Get the next queued up protobuffer message
1021
+
1022
+ Called by the main thread to get any messages that were pulled in from the
1023
+ dsg stream and placed here by _poll_messages()
1024
+ """
1025
+ try:
1026
+ return self._message_queue.get(block=wait)
1027
+ except queue.Empty:
1028
+ return None
1029
+
1030
+ def _reset(self):
1031
+ self._variables = {}
1032
+ self._groups = {}
1033
+ self._part = Part(self)
1034
+ self._scene_bounds = None
1035
+ self._mesh_block_count = 0 # reset when a new group shows up
1036
+
1037
+ def handle_one_update(self) -> None:
1038
+ """Monitor the DSG stream and handle a single update operation
1039
+
1040
+ Wait until we get the scene update begin message. From there, reset the current
1041
+ scene buckets and then parse all the incoming commands until we get the scene
1042
+ update end command. At which point, save the generated stage (started in the
1043
+ view command handler). Note: Parts are handled with an available bucket at all times.
1044
+ When a new part update comes in or the scene update end happens, the part is "finished".
1045
+ """
1046
+ # An update starts with a UPDATE_SCENE_BEGIN command
1047
+ cmd = self._get_next_message()
1048
+ while (cmd is not None) and (
1049
+ cmd.command_type != dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_SCENE_BEGIN
1050
+ ):
1051
+ # Look for a begin command
1052
+ cmd = self._get_next_message()
1053
+
1054
+ # Start anew
1055
+ self._reset()
1056
+ self._callback_handler.begin_update()
1057
+
1058
+ # Update our status
1059
+ self._status = dict(
1060
+ status="working", start_time=time.time(), processed_buffers=1, total_buffers=1
1061
+ )
1062
+ self._update_status_file()
1063
+
1064
+ # handle the various commands until UPDATE_SCENE_END
1065
+ cmd = self._get_next_message()
1066
+ while (cmd is not None) and (
1067
+ cmd.command_type != dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_SCENE_END
1068
+ ):
1069
+ self._handle_update_command(cmd)
1070
+ self._status["processed_buffers"] += 1 # type: ignore
1071
+ self._status["total_buffers"] = self._status["processed_buffers"] + self._message_queue.qsize() # type: ignore
1072
+ self._update_status_file(timed=True)
1073
+ cmd = self._get_next_message()
1074
+
1075
+ # Flush the last part
1076
+ self._finish_part()
1077
+
1078
+ self._callback_handler.end_update()
1079
+
1080
+ # Update our status
1081
+ self._status = dict(status="idle", start_time=0.0, processed_buffers=0, total_buffers=0)
1082
+ self._update_status_file()
1083
+
1084
+ def _handle_update_command(self, cmd: dynamic_scene_graph_pb2.SceneUpdateCommand) -> None:
1085
+ """Dispatch out a scene update command to the proper handler
1086
+
1087
+ Given a command object, pull out the correct portion of the protobuffer union and
1088
+ pass it to the appropriate handler.
1089
+
1090
+ Parameters
1091
+ ----------
1092
+ cmd:
1093
+ The command to be dispatched.
1094
+ """
1095
+ name = "Unknown"
1096
+ if cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.DELETE_ID:
1097
+ name = "Delete IDs"
1098
+ elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_PART:
1099
+ name = "Part update"
1100
+ tmp = cmd.update_part
1101
+ self._handle_part(tmp)
1102
+ elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GROUP:
1103
+ name = "Group update"
1104
+ tmp = cmd.update_group
1105
+ self._handle_group(tmp)
1106
+ elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GEOM:
1107
+ name = "Geom update"
1108
+ tmp = cmd.update_geom
1109
+ self._part.update_geom(tmp)
1110
+ elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VARIABLE:
1111
+ name = "Variable update"
1112
+ tmp = cmd.update_variable
1113
+ self._handle_variable(tmp)
1114
+ elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VIEW:
1115
+ name = "View update"
1116
+ tmp = cmd.update_view
1117
+ self._handle_view(tmp)
1118
+ elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_TEXTURE:
1119
+ name = "Texture update"
1120
+ self.log(f"{name} --------------------------")
1121
+
1122
+ def _finish_part(self) -> None:
1123
+ """Complete the current part
1124
+
1125
+ There is always a part being modified. This method completes the current part, committing
1126
+ it to the handler.
1127
+ """
1128
+ try:
1129
+ self._callback_handler.finalize_part(self.part)
1130
+ except Exception as e:
1131
+ import traceback
1132
+
1133
+ self.warn(f"Error encountered while finalizing part geometry: {str(e)}")
1134
+ traceback_str = "".join(traceback.format_tb(e.__traceback__))
1135
+ logging.debug(f"Traceback: {traceback_str}")
1136
+ self._mesh_block_count += 1
1137
+
1138
+ def _handle_part(self, part_cmd: Any) -> None:
1139
+ """Handle a DSG UPDATE_PART command
1140
+
1141
+ Finish the current part and set up the next part.
1142
+
1143
+ Parameters
1144
+ ----------
1145
+ part:
1146
+ The command coming from the EnSight stream.
1147
+ """
1148
+ self._finish_part()
1149
+ self._part.reset(part_cmd)
1150
+
1151
+ def find_group_pb(self, group_id: int) -> Any:
1152
+ """Return the group command protobuffer for a specific group id.
1153
+
1154
+ Parameters
1155
+ ----------
1156
+ group_id: int
1157
+ The group DSG protocol entity id.
1158
+
1159
+ Returns
1160
+ -------
1161
+ any
1162
+ The group command protobuffer or None.
1163
+ """
1164
+ return self._groups.get(group_id, None)
1165
+
1166
+ def _handle_group(self, group: Any) -> None:
1167
+ """Handle a DSG UPDATE_GROUP command
1168
+
1169
+ Parameters
1170
+ ----------
1171
+ group:
1172
+ The command coming from the EnSight stream.
1173
+ """
1174
+ # reset current mesh (part) count for unique "part" naming in USD
1175
+ self._mesh_block_count = 0
1176
+
1177
+ # record the scene bounds in case they are needed later
1178
+ self._groups[group.id] = group
1179
+ bounds = group.attributes.get("ENS_SCENE_BOUNDS", None)
1180
+ if bounds:
1181
+ minmax = list()
1182
+ for v in bounds.split(","):
1183
+ try:
1184
+ minmax.append(float(v))
1185
+ except ValueError:
1186
+ pass
1187
+ if len(minmax) == 6:
1188
+ self._scene_bounds = minmax
1189
+ # callback
1190
+ self._callback_handler.add_group(group.id)
1191
+
1192
+ def _handle_variable(self, var: Any) -> None:
1193
+ """Handle a DSG UPDATE_VARIABLE command
1194
+
1195
+ Save off the EnSight variable DSG command object.
1196
+
1197
+ Parameters
1198
+ ----------
1199
+ var:
1200
+ The command coming from the EnSight stream.
1201
+ """
1202
+ self._variables[var.id] = var
1203
+ self._callback_handler.add_variable(var.id)
1204
+
1205
+ def _handle_view(self, view: Any) -> None:
1206
+ """Handle a DSG UPDATE_VIEW command
1207
+
1208
+ Parameters
1209
+ ----------
1210
+ view:
1211
+ The command coming from the EnSight stream.
1212
+ """
1213
+ self._finish_part()
1214
+ self._scene_bounds = None
1215
+ self._groups[view.id] = view
1216
+ if len(view.timeline) == 2:
1217
+ view.timeline[0] *= self._time_scale
1218
+ view.timeline[1] *= self._time_scale
1219
+ self.cur_timeline = [view.timeline[0], view.timeline[1]]
1220
+ self._callback_handler.add_group(view.id, view=True)