lavavu-osmesa 1.9.9__cp313-cp313-manylinux_2_28_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. lavavu/LavaVuPython.py +561 -0
  2. lavavu/_LavaVuPython.cpython-313-x86_64-linux-gnu.so +0 -0
  3. lavavu/__init__.py +15 -0
  4. lavavu/__main__.py +12 -0
  5. lavavu/amalgamate.py +15 -0
  6. lavavu/aserver.py +359 -0
  7. lavavu/control.py +1731 -0
  8. lavavu/convert.py +888 -0
  9. lavavu/dict.json +2528 -0
  10. lavavu/font.bin +0 -0
  11. lavavu/html/LavaVu-amalgamated.css +282 -0
  12. lavavu/html/OK-min.js +99 -0
  13. lavavu/html/baseviewer.js +307 -0
  14. lavavu/html/control.css +104 -0
  15. lavavu/html/control.js +340 -0
  16. lavavu/html/dat-gui-light-theme.css +68 -0
  17. lavavu/html/dat.gui.min.js +2 -0
  18. lavavu/html/draw.js +2259 -0
  19. lavavu/html/drawbox.js +1039 -0
  20. lavavu/html/emscripten-template.js +184 -0
  21. lavavu/html/emscripten.css +92 -0
  22. lavavu/html/favicon.ico +0 -0
  23. lavavu/html/gl-matrix-min.js +47 -0
  24. lavavu/html/gui.css +25 -0
  25. lavavu/html/menu.js +615 -0
  26. lavavu/html/server.js +226 -0
  27. lavavu/html/stats.min.js +5 -0
  28. lavavu/html/styles.css +58 -0
  29. lavavu/html/webview-template.html +43 -0
  30. lavavu/html/webview.html +43 -0
  31. lavavu/lavavu.py +6200 -0
  32. lavavu/osmesa/LavaVuPython.py +561 -0
  33. lavavu/osmesa/_LavaVuPython.cpython-313-x86_64-linux-gnu.so +0 -0
  34. lavavu/osmesa/__init__.py +0 -0
  35. lavavu/points.py +191 -0
  36. lavavu/server.py +343 -0
  37. lavavu/shaders/default.frag +14 -0
  38. lavavu/shaders/default.vert +17 -0
  39. lavavu/shaders/fontShader.frag +20 -0
  40. lavavu/shaders/fontShader.vert +18 -0
  41. lavavu/shaders/lineShader.frag +39 -0
  42. lavavu/shaders/lineShader.vert +26 -0
  43. lavavu/shaders/pointShader.frag +127 -0
  44. lavavu/shaders/pointShader.vert +53 -0
  45. lavavu/shaders/triShader.frag +153 -0
  46. lavavu/shaders/triShader.vert +49 -0
  47. lavavu/shaders/volumeShader.frag +400 -0
  48. lavavu/shaders/volumeShader.vert +5 -0
  49. lavavu/tracers.py +207 -0
  50. lavavu/vutils.py +211 -0
  51. lavavu_osmesa-1.9.9.dist-info/METADATA +323 -0
  52. lavavu_osmesa-1.9.9.dist-info/RECORD +65 -0
  53. lavavu_osmesa-1.9.9.dist-info/WHEEL +5 -0
  54. lavavu_osmesa-1.9.9.dist-info/entry_points.txt +2 -0
  55. lavavu_osmesa-1.9.9.dist-info/licenses/LICENSE.md +179 -0
  56. lavavu_osmesa-1.9.9.dist-info/top_level.txt +1 -0
  57. lavavu_osmesa.libs/libLLVM-17-51492e70.so +0 -0
  58. lavavu_osmesa.libs/libOSMesa-f6a8f160.so.8.0.0 +0 -0
  59. lavavu_osmesa.libs/libdrm-b0291a67.so.2.4.0 +0 -0
  60. lavavu_osmesa.libs/libffi-3a37023a.so.6.0.2 +0 -0
  61. lavavu_osmesa.libs/libglapi-520b284c.so.0.0.0 +0 -0
  62. lavavu_osmesa.libs/libpcre2-8-516f4c9d.so.0.7.1 +0 -0
  63. lavavu_osmesa.libs/libselinux-d0805dcb.so.1 +0 -0
  64. lavavu_osmesa.libs/libtinfo-3a2cb85b.so.6.1 +0 -0
  65. lavavu_osmesa.libs/libzstd-76b78bac.so.1.4.4 +0 -0
lavavu/convert.py ADDED
@@ -0,0 +1,888 @@
1
+ from __future__ import print_function
2
+ """
3
+ Warning! EXPERIMENTAL:
4
+ these features and functions are under development, will have bugs,
5
+ and may be heavily modified in the future
6
+
7
+ Tools for converting between 3D data types
8
+
9
+ - Points to Volume
10
+ - Triangles to OBJ file
11
+ """
12
+ import numpy
13
+ import os
14
+ import sys
15
+
16
+ def try_import(module_name, second_try=False):
17
+ """
18
+ Attempts to import a module, then runs pip install if not found and attempts again
19
+ """
20
+ import importlib
21
+ try:
22
+ m = importlib.import_module(module_name)
23
+ globals()[module_name] = m
24
+ return m
25
+ except (ImportError) as e:
26
+ if not second_try:
27
+ import subprocess
28
+ subprocess.check_call([sys.executable, "-m", "pip", "install", module_name])
29
+ return try_import(module_name, True)
30
+ else:
31
+ raise(e)
32
+
33
+ def min_max_range(verts):
34
+ """
35
+ Get bounding box from a list of vertices
36
+ returns (min, max, range)
37
+ """
38
+ vmin = numpy.min(verts, axis=0)
39
+ vmax = numpy.max(verts, axis=0)
40
+ vrange = vmax - vmin
41
+ print("Bounding box ", (vmin, vmax), "Range ", vrange)
42
+ return (vmin, vmax, vrange)
43
+
44
+ def default_sample_grid(vrange, res=8):
45
+ """Calculate sample grid fine enough to capture point details
46
+ resolution of smallest dimension will be minres elements
47
+ """
48
+ #If provided a full resolution, use that, otherwise will be interpreted as min res
49
+ if isinstance(res, (list,tuple)):
50
+ if len(list(res)) == 3:
51
+ return list(res)
52
+ res = res[0]
53
+ #Use bounding box range min
54
+ minr = numpy.min(vrange)
55
+ #res parameter is minimum resolution
56
+ factor = float(res) / float(minr)
57
+ RES = [int(factor*(vrange[0])), int(factor*(vrange[1])), int(factor*(vrange[2]))]
58
+ print("Sample grid RES:",RES)
59
+ return RES
60
+
61
+ def points_to_volume(verts, weights=None, res=8, kdtree=False, normed=True, clamp=None, boundingbox=None):
62
+ """
63
+ Convert object vertices to a volume by interpolating points to a grid
64
+
65
+ - Vertex data only is used, so treated as a point cloud
66
+ - Can also be used to sample an irregular grid to a regular so volume render matches the grid dimensions
67
+ - Result is a density field that can be volume rendered
68
+
69
+ Default is to use numpy.histogramdd method, pass kdtree=True to use scipy.spatial.KDTree
70
+
71
+ TODO: support colour data too, converted density field becomes alpha channel (actually, use weights)
72
+
73
+ Returns
74
+ -------
75
+ values: numpy array of float32
76
+ The converted density field
77
+ boundingbox : 2,3
78
+ the minimum 3d vertex of the bounding box
79
+ the maximum 3d vertex of the bounding box
80
+
81
+ """
82
+ if kdtree:
83
+ return points_to_volume_tree(verts, weights, res)
84
+ else:
85
+ return points_to_volume_histogram(verts, weights, res, normed, clamp, boundingbox)
86
+
87
+ def points_to_volume_histogram(verts, weights, res=8, normed=True, clamp=None, boundingbox=None):
88
+ """
89
+ Using numpy.histogramdd to create 3d histogram volume grid
90
+ (Easily the fastest, but less control over output)
91
+ """
92
+ #Reshape to 3d vertices
93
+ verts = verts.reshape((-1,3))
94
+
95
+ #Get bounding box of swarm
96
+ vmin, vmax, vrange = min_max_range(verts)
97
+
98
+ #Minimum resolution to get ok sampling
99
+ if boundingbox is not None:
100
+ vmin = boundingbox[0]
101
+ vmax = boundingbox[1]
102
+ vrange = numpy.array(boundingbox[1]) - numpy.array(boundingbox[0])
103
+ RES = default_sample_grid(vrange, res)
104
+
105
+ if weights is not None:
106
+ weights = weights.ravel()
107
+ #Normalise (should be done by histogramdd anyway if normed=True)
108
+ weights = (weights - weights.min()) / (weights.max() - weights.min())
109
+ #print(verts.shape, weights.shape, weights.min(), weights.max())
110
+
111
+ #H, edges = numpy.histogramdd(verts, bins=RES)
112
+ if boundingbox is None:
113
+ H, edges = numpy.histogramdd(verts, weights=weights, bins=RES, density=normed) #density=True for newer numpy
114
+ else:
115
+ rg = ((vmin[0], vmax[0]), (vmin[1], vmax[1]), (vmin[2], vmax[2])) #provide bounding box as range
116
+ H, edges = numpy.histogramdd(verts, weights=weights, bins=RES, range=rg, density=normed) #density=True for newer numpy
117
+
118
+ #Reverse ordering X,Y,Z to Z,Y,X for volume data
119
+ values = H.transpose()
120
+
121
+ #Clamp [0,0.1] - optional (for a more binary output, points vs no points)
122
+ if clamp is not None:
123
+ values = numpy.clip(values, a_min=clamp[0], a_max=clamp[1])
124
+
125
+ #Normalise [0,1] if using probability density
126
+ if normed:
127
+ values = values / numpy.max(values)
128
+
129
+ return (values, vmin, vmax)
130
+
131
+ def points_to_volume_tree(verts, res=8):
132
+ """
133
+ Using scipy.spatial.KDTree to find nearest points on grid
134
+
135
+ Much slower, but more control
136
+
137
+ TODO: control parameters
138
+ """
139
+ #Reshape to 3d vertices
140
+ verts = verts.reshape((-1,3))
141
+
142
+ #Get bounding box of swarm
143
+ lmin, lmax, lrange = min_max_range(verts)
144
+ print("Volume bounds: ",lmin.tolist(), lmax.tolist(), lrange.tolist())
145
+
146
+ #Push out the edges a bit, will create a smoother boundary when we filter
147
+ #lmin -= 0.1*lrange
148
+ #lmax += 0.1*lrange
149
+ values = numpy.full(shape=(verts.shape[0]), dtype=numpy.float32, fill_value=1.0)
150
+
151
+ #Minimum resolution to get ok sampling
152
+ RES = default_sample_grid(lrange, res)
153
+
154
+ #Push out the edges a bit, will create a smoother boundary when we filter
155
+ cell = lrange / RES #Cell size
156
+ print("CELL:",cell)
157
+ lmin -= 2.0*cell
158
+ lmax += 2.0*cell
159
+
160
+ x = numpy.linspace(lmin[0], lmax[0] , RES[0])
161
+ y = numpy.linspace(lmin[1], lmax[1] , RES[1])
162
+ z = numpy.linspace(lmin[2], lmax[2] , RES[2])
163
+ print(x.shape, y.shape, z.shape)
164
+ Z, Y, X = numpy.meshgrid(z, y, x, indexing='ij') #ij=matrix indexing, xy=cartesian
165
+ XYZ = numpy.vstack((X,Y,Z)).reshape([3, -1]).T
166
+
167
+ print(lmin,lmax, XYZ.shape)
168
+
169
+ #KDtree for finding nearest neighbour points
170
+ from scipy import spatial
171
+ #sys.setrecursionlimit(1000)
172
+ #tree = spatial.KDTree(XYZ)
173
+ print("Building KDTree")
174
+ tree = spatial.cKDTree(XYZ)
175
+
176
+ #Outside distance to apply to grid points
177
+ MAXDIST = max(lrange) / max(RES) #Max cell size diagonal
178
+ print("Tree query, maxdist:",MAXDIST,max(lrange))
179
+
180
+ #Query all points, result is tuple: distances, indices
181
+ distances, indices = tree.query(verts, k=1) #Just get a single nearest neighbour for now
182
+
183
+ print("Calculate distances")
184
+ #Convert distances to [0,1] where 1=on grid and <= 0 is outside max range
185
+ distances = (MAXDIST - distances) / MAXDIST
186
+ print("Zero out of range distances")
187
+ distances *= (distances>0) #Zero negative elements by multiplication in-place
188
+
189
+ #Add the distances to the values at their nearest grid point indices
190
+ print("Compose distance field")
191
+ values = numpy.zeros(shape=(XYZ.shape[0]), dtype=numpy.float32)
192
+ values[indices] += distances
193
+ #Clip value max
194
+ print("Clip distance field")
195
+ values = values.clip(max=1.0)
196
+
197
+ #Reshape to actual grid dims Z,Y,X... (not required but allows slicing)
198
+ XYZ = XYZ.reshape(RES[::-1] + [3])
199
+ values = values.reshape(RES[::-1])
200
+ print(XYZ.shape, values.shape)
201
+
202
+ return (values, lmin, lmax)
203
+
204
+
205
+ def points_to_volume_3D(vol, objects, res=8, kdtree=False, blur=0, pad=None, normed=True, clamp=None, weights=None):
206
+ """
207
+ Interpolate points to grid and load into passed volume object
208
+
209
+ Given a list of objects and a volume object, convert a point cloud
210
+ from another object (or objects - list is supported) into a volume using
211
+ points_to_volume()
212
+ """
213
+ lv = vol.parent #Get the viewer from passed object
214
+ #lv.hide(objects) #Hide the converted objects
215
+
216
+ #Get vertices from lavavu objects
217
+ if weights:
218
+ pverts, bb_all, weights = lv.get_all_vertices(objects, weights)
219
+ else:
220
+ pverts, bb_all = lv.get_all_vertices(objects)
221
+
222
+ #blur = False
223
+ #Use bounding box of full model?
224
+ #vmin, vmax, vrange = min_max_range([lv["min"], lv["max"]])
225
+ #bb_all == (vmin, vmax)
226
+ vdata, vmin, vmax = points_to_volume(pverts, weights, res, kdtree, normed, clamp, bb_all)
227
+
228
+ if blur > 0:
229
+ if pad == None:
230
+ pad = int(blur*2)
231
+ if pad > 0:
232
+ print("Pad edges before blur", pad)
233
+ vdata = numpy.pad(vdata, pad, mode='constant')
234
+ print("Filter/blur distance field, sigma=%d" % blur)
235
+ from scipy.ndimage.filters import gaussian_filter
236
+ values = gaussian_filter(vdata, sigma=blur) #, mode='nearest')
237
+ else:
238
+ values = vdata #Need extra space at edges to use blur, so skip, or use pad()
239
+
240
+ print(numpy.min(values), numpy.max(values))
241
+ print(numpy.min(values), numpy.max(values))
242
+
243
+ vol.values(values)
244
+ vol.vertices((vmin, vmax))
245
+
246
+ def points_to_volume_4D(vol, objects, res=8, kdtree=False, blur=0, pad=None, normed=True, clamp=None, weights=None):
247
+ """
248
+ Interpolate points to grid at each timestep
249
+
250
+ Given a list of objects and a volume object, convert a time-varying point cloud
251
+ from another object (or objects - list is supported) into a volume using
252
+ points_to_volume_3D()
253
+ """
254
+ lv = vol.parent #Get the viewer from passed object
255
+
256
+ for step in lv.steps:
257
+ print("TIMESTEP:",step)
258
+ lv.timestep(step)
259
+ #TODO: timestep data not changing!!!
260
+ print(lv.step)
261
+ lv.display((200,150))
262
+
263
+ points_to_volume_3D(vol, objects, res, kdtree, blur, pad, normed, clamp, weights)
264
+
265
+ def colour2rgb(c):
266
+ return [c & 255, (c >> 8) & 255, (c >> 16) & 255]
267
+
268
+ def colour2hex(rgb):
269
+ def padhex2(i):
270
+ s = hex(int(i))
271
+ return s[2:].zfill(2)
272
+ return "#" + padhex2(rgb[0]) + padhex2(rgb[1]) + padhex2(rgb[2])
273
+
274
+ def _get_objects(source):
275
+ """
276
+ Returns a list of objects
277
+
278
+ If source is lavavu.Viewer() list contains all objects
279
+ If source is lavavu.Object() list contains that single object
280
+ """
281
+ if source.__class__.__name__ == 'Viewer':
282
+ return source.objects.list
283
+ elif not isinstance(source, list):
284
+ return [source]
285
+ else:
286
+ return source
287
+
288
+ def export_OBJ(filepath, source, verbose=False, vertexcolours=True):
289
+ """
290
+ Export given object(s) to an OBJ file
291
+ Supports only triangle mesh object data
292
+
293
+ If source is lavavu.Viewer() exports all objects
294
+ If source is lavavu.Object() exports single object
295
+
296
+ Set vertexcolours to support writing R,G,B values with vertices
297
+ (not part of OBJ standard but supported in some software)
298
+ """
299
+ mtlfilename = os.path.splitext(filepath)[0] + '.mtl'
300
+ objects = _get_objects(source)
301
+ with open(filepath, 'w') as f, open(mtlfilename, 'w') as m:
302
+ f.write("# OBJ file\n")
303
+ offset = 1
304
+ for obj in objects:
305
+ if obj["visible"]:
306
+ f.write("g %s\n" % obj.name)
307
+ offset = _write_OBJ(f, m, filepath, obj, offset, verbose, vertexcolours)
308
+
309
+ def _write_MTL(m, name, texture=None, diffuse=[1.0, 1.0, 1.0], ambient=None, specular=None, opacity=1.0, illum=None):
310
+ #http://paulbourke.net/dataformats/mtl/
311
+ #print("Writing MTL ", texture, diffuse, opacity, name)
312
+ mtl = "newmtl %s\n" % name
313
+ mtl += "Kd %.06f %.06ff %.06f\n" % (diffuse[0], diffuse[1], diffuse[2])
314
+ if ambient:
315
+ mtl += "Ka %.06f %.06ff %.06f\n" % (ambient[0], ambient[1], ambient[2])
316
+ if specular:
317
+ if illum is None:
318
+ illum = 2 #Highlight on
319
+ mtl += "Ks %.06f %.06ff %.06f\n" % (specular[0], specular[1], specular[2])
320
+ if len(specular) > 3:
321
+ mtl += "Ns %.06f\n" % specular[3]
322
+ mtl += "d %f\n" % (1.0 - opacity) #Dissolve: inverse of opacity
323
+ if illum is None:
324
+ illum = 1 #Default=1 = colour on, ambient on
325
+ mtl += "illum %d\n" % illum
326
+
327
+ if texture:
328
+ mtl += "map_Kd %s\n" % texture
329
+
330
+ mtl += '\n'
331
+ m.write(mtl)
332
+ mtl_line = "usemtl %s\n" % name
333
+ return mtl_line
334
+
335
+ def _write_OBJ(f, m, filepath, obj, offset=1, verbose=False, vertexcolours=True):
336
+ mtl_line = ""
337
+ cmaptexcoords = []
338
+ colourdict = None
339
+ import re
340
+ name = re.sub(r"\s+", "", obj["name"])
341
+ colourcount = sum([len(c) for c in obj.data.colours])
342
+ #print(f"{colourcount=}")
343
+ if m and obj["texture"]:
344
+ fn = obj["texture"]
345
+ if fn == 'colourmap':
346
+ fn = 'texture.png'
347
+ #Write palette.png
348
+ obj.parent.palette('image', 'texture')
349
+ if verbose:
350
+ print("Writing texture mtl ", fn)
351
+ mtl_line = _write_MTL(m, name, texture=fn, opacity=obj["opacity"])
352
+
353
+ elif m and colourcount > 0 and not vertexcolours:
354
+ #Define material for each (slow for high colour/vertex count - could sort faces by material before writing?)
355
+ if colourcount < 10000:
356
+ colourdict = {}
357
+ #SLOW!
358
+
359
+ #Get unique https://stackoverflow.com/a/33197029/866759
360
+ allcolours = numpy.concatenate(obj.data.colours)
361
+ unique = numpy.unique(allcolours)
362
+
363
+ for c in unique:
364
+ rgb = colour2rgb(c)
365
+ cs = colour2hex(rgb)
366
+ colourdict[c] = (cs,rgb)
367
+ if verbose:
368
+ print("Writing mtl lib (colour list), unique colours:",len(unique))
369
+ for c in colourdict:
370
+ cs, rgb = colourdict[c]
371
+ mtl_line = _write_MTL(m, cs, diffuse=[rgb[0]/255.0, rgb[1]/255.0, rgb[2]/255.0], opacity=obj["opacity"])
372
+ else:
373
+ #Support this full RGB palette?
374
+ #TODO: generate texcoord data using formula below
375
+ #18bit RGB = 262144 colours
376
+ #Top-left = (0,0,0)
377
+ #Bottom-right = (255,255,255)
378
+ #(8x8 tiles of 64x64)
379
+ #G - slow, Z index = G
380
+ #R - midd, Y axis = R
381
+ #B - fast, X axis = B
382
+ #X = B//4
383
+ #Y = R//4
384
+ #Z = G//4
385
+ #zx = Z % 8
386
+ #zy = Z // 8
387
+ #x = zx * 64 + X
388
+ #y = zy * 64 + Y
389
+ #u = x / 512
390
+ #v = y / 512
391
+
392
+ #https://upload.wikimedia.org/wikipedia/commons/3/34/RGB_18bits_palette.png
393
+ palimg = "iVBORw0KGgoAAAANSUhEUgAAAgAAAAIACAIAAAB7GkOtAAAABGdBTUEAANjr9RwUqgAAACBjSFJNAACH"\
394
+ "CgAAjAoAAPYWAACEzwAAczsAAOxVAAA6lwAAHU1girkoAAAGc0lEQVR42u3dQYoDMQxFwTZY93Duf8mQ"\
395
+ "A8hg9yIIVTHbgU9vHsrG8/mZL/7iv/8+n2G//fbbb//V/vkA0JAAAAgAAAIAgAAAIAAACAAAAgCAAAAg"\
396
+ "AAAIAAACAIAAACAAAAgAAAIAgAAAIAAACAAAAgDARQDCRwBwAQAgAAAIAAACAIAAACAAAAgAAAIAgAAA"\
397
+ "IAAACAAAAgCAAAAgAAAIAAACAIAAACAAAAgAAAIAQB4Aj8IDuAAAEAAABAAAAQBAAAAQAAAEAAABAEAA"\
398
+ "ABAAAAQAAAEAQAAAEAAABAAAAQBAAAAQAAAEAAABAGATAI/CA/QMwBrVC2a//fbbb//dfj8BAfS8AAQA"\
399
+ "QAAAEAAABAAAAQBAAAAQAAAEAAABAEAAABAAAAQAAAEAQAAAEAAABAAAAQBAAAAQAADOAxA+AoALAAAB"\
400
+ "AEAAABAAAAQAAAEAQAAAEAAABAAAAQBAAAAQAAAEAAABAEAAABAAAAQAAAEAQAAAEAAA8gB4FB7ABQCA"\
401
+ "AAAgAAAIAAACAIAAACAAAAgAAAIAgAAAIAAACAAAAgCAAAAgAAAIAAACAIAAACAAAAgAAJsAeBQeoGcA"\
402
+ "1qheMPvt77s/fH/7X+33ExBAzwtAAAAEAAABAEAAABAAAAQAAAEAQAAAEAAABAAAAQBAAAAQAAAEAAAB"\
403
+ "AEAAABAAAAQAAAEA4DwA4SMAuAAAEAAABAAAAQBAAAAQAAAEAAABAEAAABAAAAQAAAEAQAAAEAAABAAA"\
404
+ "AQBAAAAQAAAEAAABACAPgEfhAVwAAAgAAAIAgAAAIAAACAAAAgCAAAAgAAAIAAACAIAAACAAAAgAAAIA"\
405
+ "gAAAIAAACAAAAgCAAACwCYBH4QF6BuAzqhfMfvvtt9/+u/1+AgLoeQEIAIAAACAAAAgAAAIAgAAAIAAA"\
406
+ "CAAAAgCAAAAgAAAIAAACAIAAACAAAAgAAAIAgAAAIAAAnAcgfAQAFwAAAgCAAAAgAAAIAAACAIAAACAA"\
407
+ "AAgAAAIAgAAAIAAACAAAAgCAAAAgAAAIAAACAIAAACAAAOQB8Cg8gAsAAAEAQAAAEAAABAAAAQBAAAAQ"\
408
+ "AAAEAAABAEAAABAAAAQAAAEAQAAAEAAABAAAAQBAAAAQAAA2AfAoPEDPAKxRvWD222+//fbf7fcTEEDP"\
409
+ "C0AAAAQAAAEAQAAAEAAABAAAAQBAAAAQAAAEAAABAEAAABAAAAQAAAEAQAAAEAAABAAAAQDgPADhIwC4"\
410
+ "AAAQAAAEAAABAEAAABAAAAQAAAEAQAAAEAAABAAAAQBAAAAQAAAEAAABAEAAABAAAAQAAAEAIA+AR+EB"\
411
+ "XAAACAAAAgCAAAAgAAAIAAACAIAAACAAAAgAAAIAgAAAIAAACAAAAgCAAAAgAAAIAAACAIAAALAJgEfh"\
412
+ "AXoGYI3qBauten+n/fbbX3i/n4AAel4AAgAgAAAIAAACAIAAACAAAAgAAAIAgAAAIAAACAAAAgCAAAAg"\
413
+ "AAAIAAACAIAAACAAAAgAAOcBCB8BwAUAgAAAIAAACAAAAgCAAAAgAAAIAAACAIAAACAAAAgAAAIAgAAA"\
414
+ "IAAACAAAAgCAAAAgAAAIAAB5ADwKD+ACAEAAABAAAAQAAAEAQAAAEAAABAAAAQBAAAAQAAAEAAABAEAA"\
415
+ "ABAAAAQAAAEAQAAAEAAABACATQA8Cg/QMwCfUb1g9ttvv/323+33ExBAzwtAAAAEAAABAEAAABAAAAQA"\
416
+ "AAEAQAAAEAAABAAAAQBAAAAQAAAEAAABAEAAABAAAAQAAAEA4DwA4SMAuAAAEAAABAAAAQBAAAAQAAAE"\
417
+ "AAABAEAAABAAAAQAAAEAQAAAEAAABAAAAQBAAAAQAAAEAAABACAPgEfhAVwAAAgAAAIAgAAAIAAACAAA"\
418
+ "AgCAAAAgAAAIAAACAIAAACAAAAgAAAIAgAAAIAAACAAAAgCAAACwCYBH4QF6BmCN6gWz33777f+PKP/9"\
419
+ "/QQE0PMCEAAAAQBAAAAQAAAEAAABAEAAABAAAAQAAAEAQAAAEAAABAAAAQBAAAAQAAAEAAABAEAAADgP"\
420
+ "QPgIAC4AAAQAAAEAQAAAEAAABAAAAQBAAAAQAAAEAAABAEAAABAAAAQAAAEAQAAAEAAABAAAAQBAAADI"\
421
+ "A+BReAAXAAACAIAAACAAAAgAAAIAgAAAIAAACAAAAgCAAAAgAAAIAAACAIAAACAAAAgAAAIAgAAAIAAA"\
422
+ "bALgUXiAlr7Y4BnEOVAUKwAAAABJRU5ErkJggg=="
423
+ texfn = os.path.splitext(filepath)[0] + '.png'
424
+ import base64
425
+ with open(texfn, "wb") as fp:
426
+ fp.write(base64.b64decode(palimg))
427
+ mtl_line = _write_MTL(m, "palette_rgb", texture=texfn, opacity=obj["opacity"])
428
+ elif m and "colour" in obj:
429
+ #print("Writing mtl lib (default colour)")
430
+ c = obj.parent.parse_colour(obj["colour"])
431
+ mtl_line = _write_MTL(m, 'default-' + name, diffuse=c, opacity=obj["opacity"])
432
+
433
+ for o,data in enumerate(obj):
434
+ if verbose: print("[%s] element %d of %d" % (obj.name, o+1, len(obj.data.vertices)))
435
+ verts = data.vertices.reshape((-1,3))
436
+ if len(verts) == 0:
437
+ if verbose: print("No vertices")
438
+ continue
439
+ f.write("o Surface_%d\n" % o)
440
+ #f.write("o %s\n" % obj.name)
441
+ if m: f.write("mtllib " + os.path.splitext(os.path.basename(filepath))[0] + ".mtl\n")
442
+ indices = data.indices.reshape((-1,3))
443
+ normals = data.normals.reshape((-1,3))
444
+ texcoords = data.texcoords.reshape((-1,2))
445
+ #Calculate texcoords from colour values?
446
+ if len(texcoords) == 0 and obj["texture"] == 'colourmap':
447
+ label = obj["colourby"]
448
+ if isinstance(label,int):
449
+ #Use the given label index
450
+ sets = list(datasets.keys())
451
+ label = sets[label]
452
+ elif len(label) == 0:
453
+ #Use the default label
454
+ label = 'values'
455
+ valdata = data[label]
456
+ if len(valdata) >= o+1:
457
+ #Found matching value array
458
+ v = valdata
459
+ #Normalise [0,1]
460
+ texcoords = (v - numpy.min(v)) / numpy.ptp(v)
461
+ #Add 2nd dimension (not actually necessary,
462
+ #tex coords can by 1d but breaks some loaders (meshlab)
463
+ zeros = numpy.zeros((len(texcoords)))
464
+ texcoords = numpy.vstack((texcoords,zeros)).reshape([2, -1]).transpose()
465
+
466
+ #Colours?
467
+ cv0 = ""
468
+ vperc = 1
469
+ if len(data.colours):
470
+ vperc = int(verts.shape[0] / len(data.colours))
471
+
472
+ if verbose: print("- Writing vertices:",verts.shape)
473
+ for vi,v in enumerate(verts):
474
+ #Vertex colour with vertex? (only if flag passed)
475
+ if vertexcolours and len(data.colours):
476
+ #if vi%10==0: print(len(data.colours), len(verts), vperc, vi)
477
+ c = data.colours[len(data.colours) // vperc - 1]
478
+ rgb = colour2rgb(c)
479
+ f.write("v %.6f %.6f %.6f %.6f %.6f %.6f\n" % (v[0], v[1], v[2], rgb[0]/255.0, rgb[1]/255.0, rgb[2]/255.0))
480
+ else:
481
+ f.write("v %.6f %.6f %.6f\n" % (v[0], v[1], v[2]))
482
+ if verbose: print("- Writing normals:",normals.shape)
483
+ for n in normals:
484
+ f.write("vn %.6f %.6f %.6f\n" % (n[0], n[1], n[2]))
485
+ if verbose: print("- Writing texcoords:",texcoords.shape)
486
+ if len(texcoords.shape) == 2:
487
+ for t in texcoords:
488
+ f.write("vt %.6f %.6f\n" % (t[0], t[1]))
489
+ else:
490
+ for t in texcoords:
491
+ f.write("vt %.6f\n" % t)
492
+
493
+ #Face elements v/t/n v/t v//n
494
+ f.write(mtl_line)
495
+ if len(normals) and len(texcoords):
496
+ if verbose: print("- Writing faces (v/t/n):",indices.shape)
497
+ elif len(texcoords):
498
+ if verbose: print("- Writing faces (v/t):",indices.shape)
499
+ elif len(normals):
500
+ if verbose: print("- Writing faces (v//n):",indices.shape)
501
+ else:
502
+ if verbose: print("- Writing faces (v):",indices.shape)
503
+ if verbose: print("- Colours :",data.colours.shape)
504
+ if verbose: print("- Indices :",indices.shape)
505
+
506
+ for n,i in enumerate(indices):
507
+ if verbose and n%1000==0: print(".", end=''); sys.stdout.flush()
508
+ i0 = i[0]+offset
509
+ i1 = i[1]+offset
510
+ i2 = i[2]+offset
511
+ #Use mtl colours?
512
+ if colourdict:
513
+ ci = int(i[0] / vperc)
514
+ cv1 = data.colours[ci]
515
+ if cv0 != cv1 and cv1 in colourdict:
516
+ f.write("usemtl " + colourdict[cv1][0] + "\n")
517
+ cv0 = cv1
518
+
519
+ if len(normals) and len(texcoords):
520
+ f.write("f %d/%d/%d %d/%d/%d %d/%d/%d\n" % (i0, i0, i0, i1, i1, i1, i2, i2, i2))
521
+ elif len(texcoords):
522
+ f.write("f %d/%d %d/%d %d/%d\n" % (i0, i0, i1, i1, i2, i2))
523
+ elif len(normals):
524
+ f.write("f %d//%d %d//%d %d//%d\n" % (i0, i0, i1, i1, i2, i2))
525
+ else:
526
+ f.write("f %d %d %d\n" % (i0, i1, i2))
527
+ if verbose: print()
528
+
529
+ offset += verts.shape[0]
530
+ return offset
531
+
532
+ def export_PLY(filepath, source, binary=True):
533
+ """
534
+ Export given object(s) to a PLY file
535
+ Supports points or triangle mesh object data
536
+
537
+ If source is lavavu.Viewer() exports all objects
538
+ If source is lavavu.Object() exports single object
539
+
540
+ Parameters
541
+ ----------
542
+ filepath : str
543
+ Output file to write
544
+ source : lavavu.Viewer or lavavu.Object
545
+ Where to get object data to export
546
+ binary : boolean
547
+ Write vertex/face data as binary, default True
548
+ """
549
+ objects = _get_objects(source)
550
+ with open(filepath, mode='wb') as f:
551
+ voffset = 0
552
+ foffset = 0
553
+ #First count vertices, faces
554
+ fc = 0
555
+ vc = 0
556
+ for obj in objects:
557
+ for o,data in enumerate(obj):
558
+ vc += len(data.vertices) #Vertices now in shape (N,3) so len returns correct count
559
+ fc += len(data.indices) // 3
560
+
561
+ #Pass the counts first
562
+ vertex = None
563
+ face = None
564
+ print(vc, " vertices, ", fc, " faces")
565
+ for obj in objects:
566
+ for o,data in enumerate(obj):
567
+ print("[%s] element %d of %d, type %s" % (obj.name, o+1, len(obj.data), data.type))
568
+ #print("OFFSETS:",voffset,foffset)
569
+ verts = data.vertices.reshape((-1,3))
570
+ if len(verts) == 0:
571
+ print("No vertices")
572
+ return
573
+ if len(verts) != vc:
574
+ print("Vertex count error!", len(verts), vc, verts.shape)
575
+ return
576
+ indices = data.indices.reshape((-1,3))
577
+ normals = data.normals.reshape((-1,3))
578
+ texcoords = data.texcoords.reshape((-1,2))
579
+
580
+ vperc = 0
581
+ cperf = 0
582
+ if len(data.colours):
583
+ vperc = int(verts.shape[0] / len(data.colours))
584
+ C = data.colours
585
+ #print("COLOURS:",len(C),C.shape, verts.shape[0], verts.shape[0] / len(data.colours), vperc)
586
+
587
+ if data.type != 'points':
588
+ #Per face colours, or less
589
+ if face is None:
590
+ if vperc and vperc < len(verts):
591
+ #cperf = int(indices.shape[0] / len(data.colours))
592
+ face = numpy.zeros(shape=(fc), dtype=[('vertex_indices', 'i4', (3,)), ('red', 'u1'), ('green', 'u1'), ('blue', 'u1')])
593
+ else:
594
+ face = numpy.zeros(shape=(fc), dtype=[('vertex_indices', 'i4', (3,))])
595
+ print("FACE:",face.dtype)
596
+
597
+ for i,idx in enumerate(indices):
598
+ if i%1000==0: print(".", end=''); sys.stdout.flush()
599
+ if vperc and vperc < len(verts):
600
+ #Have colour, but less than vertices, apply to faces
601
+ ci = idx[0] // vperc
602
+ c = data.colours[ci]
603
+ rgb = colour2rgb(c)
604
+ face[i+foffset] = ([idx[0]+voffset, idx[1]+voffset, idx[2]+voffset], rgb[0], rgb[1], rgb[2])
605
+ else:
606
+ face[i+foffset] = ([idx[0]+voffset, idx[1]+voffset, idx[2]+voffset])
607
+ print()
608
+
609
+ #Construct and write vertex elements
610
+ if vertex is None:
611
+ #Setup vertex array based on first object element
612
+ D = [('x', 'f4'), ('y', 'f4'), ('z', 'f4')]
613
+ if normals.shape[0] == verts.shape[0]:
614
+ D += [('nx', 'f4'), ('ny', 'f4'), ('nz', 'f4')]
615
+ if texcoords.shape[0] == verts.shape[0]:
616
+ D += [('s', 'f4'), ('t', 'f4')]
617
+ if vperc and vperc == 1:
618
+ D += [('red', 'u1'), ('green', 'u1'), ('blue', 'u1')]
619
+ print("VERTEX:",D)
620
+ vertex = numpy.zeros(shape=(vc), dtype=D)
621
+
622
+ for i,v in enumerate(verts):
623
+ #if i%1000==0:
624
+ # print("vert index",i,vperc)
625
+ if i%1000==0: print(".", end=''); sys.stdout.flush()
626
+ E = [v[0], v[1], v[2]]
627
+ if normals.shape[0] == verts.shape[0]:
628
+ N = normals[i]
629
+ E += [N[0], N[1], N[2]]
630
+ if texcoords.shape[0] == verts.shape[0]:
631
+ T = texcoords[i]
632
+ E += [T[0], T[1]]
633
+ if vperc and vperc == 1:
634
+ c = data.colours[i]
635
+ rgb = colour2rgb(c)
636
+ E += [rgb[0], rgb[1], rgb[2]]
637
+ vertex[i+voffset] = tuple(E)
638
+ print()
639
+
640
+ #Update offsets : number of vertices / faces added
641
+ voffset += verts.shape[0]
642
+ foffset += indices.shape[0]
643
+
644
+ import plyfile
645
+ #vertex = numpy.array(vertex, dtype=vertex.dtype)
646
+ els = []
647
+ els.append(plyfile.PlyElement.describe(vertex, 'vertex'))
648
+ if face is not None:
649
+ els.append(plyfile.PlyElement.describe(face, 'face'))
650
+
651
+ #Write, text or binary
652
+ if binary:
653
+ print("Writing binary PLY data")
654
+ plyfile.PlyData(els).write(f)
655
+ else:
656
+ print("Writing ascii PLY data")
657
+ plyfile.PlyData(els, text=True).write(f)
658
+
659
+ def _get_PLY_colours(element):
660
+ """
661
+ Extract colour data from PLY element and return as a numpy rgba array
662
+ """
663
+ r = None
664
+ g = None
665
+ b = None
666
+ a = None
667
+ #print(element.properties)
668
+ for prop in element.properties:
669
+ #print(prop,prop.name,prop.dtype)
670
+ if 'red' in prop.name: r = element[prop.name]
671
+ if 'green' in prop.name: g = element[prop.name]
672
+ if 'blue' in prop.name: b = element[prop.name]
673
+ if 'alpha' in prop.name: a = element[prop.name]
674
+
675
+ if r is not None and g is not None and b is not None:
676
+ if a is None:
677
+ a = numpy.full(r.shape, 255)
678
+ #return numpy.array([r, g, b, a])
679
+ return numpy.vstack((r,g,b,a)).reshape([4, -1]).transpose()
680
+
681
+ return None
682
+
683
+ def plot_PLY(lv, filename):
684
+ """
685
+ Plot triangles from a PlyData instance. Assumptions:
686
+ has a 'vertex' element with 'x', 'y', and 'z' properties,
687
+ has a 'face' element with an integral list property 'vertex_indices',
688
+ all of whose elements have length 3.
689
+ """
690
+ import plyfile
691
+ plydata = plyfile.PlyData.read(filename)
692
+
693
+ x = plydata['vertex']['x']
694
+ y = plydata['vertex']['y']
695
+ z = plydata['vertex']['z']
696
+ V = numpy.vstack((x,y,z)).reshape([3, -1]).transpose()
697
+ #V = numpy.array([x, y, z])
698
+ #print("VERTS:", V.shape)
699
+
700
+ vp = []
701
+ for prop in plydata['vertex'].properties:
702
+ vp.append(prop.name)
703
+ print(prop.name)
704
+
705
+ N = None
706
+ if 'nx' in vp and 'ny' in vp and 'nz' in vp:
707
+ nx = plydata['vertex']['nx']
708
+ ny = plydata['vertex']['ny']
709
+ nz = plydata['vertex']['nz']
710
+ N = numpy.vstack((nx,ny,nz)).reshape([3, -1]).transpose()
711
+
712
+ T = None
713
+ if 's' in vp and 't' in vp:
714
+ s = plydata['vertex']['s']
715
+ t = plydata['vertex']['t']
716
+ T = numpy.vstack((s,t)).reshape([2, -1]).transpose()
717
+
718
+ C = _get_PLY_colours(plydata['vertex'])
719
+
720
+ if 'face' in plydata:
721
+ #print(plydata['face'])
722
+ #Face colours?
723
+ if C is None:
724
+ C = _get_PLY_colours(plydata['face'])
725
+
726
+ tri_idx = plydata['face']['vertex_indices']
727
+ idx_dtype = tri_idx[0].dtype
728
+ tri_idx = numpy.asarray(tri_idx).flatten()
729
+ #print(type(tri_idx),idx_dtype,tri_idx.shape)
730
+ #print(tri_idx)
731
+
732
+ triangles = numpy.array([t.tolist() for t in tri_idx]).flatten()
733
+ #triangles = numpy.fromiter(tri_idx, [('data', idx_dtype, (3,))], count=len(tri_idx))['data']
734
+
735
+ return lv.triangles(vertices=V, indices=triangles, colours=C, normals=N, texcoords=T)
736
+ else:
737
+ return lv.points(vertices=V, colours=C, normals=N, texcoords=T)
738
+
739
+ def export_any(filepath, source, name=None):
740
+ """
741
+ Export given object(s) to a file format supproted by trimesh, eg: GLTF or GLB file
742
+ See: https://trimsh.org/trimesh.exchange.html
743
+ Requires "trimesh" module
744
+ Supports triangle mesh object data
745
+
746
+ If source is lavavu.Viewer() exports all objects
747
+ If source is lavavu.Object() exports single object
748
+
749
+ Parameters
750
+ ----------
751
+ filepath : str
752
+ Output file to write
753
+ source : lavavu.Viewer or lavavu.Object
754
+ Where to get object data to export
755
+ """
756
+ #TODO: support points, lines
757
+ try_import('trimesh')
758
+
759
+ objects = _get_objects(source)
760
+ scene = trimesh.Scene()
761
+ facect = 0
762
+ for obj in objects:
763
+ #Use object name to export unless name provided
764
+ oname = name
765
+ if oname is None:
766
+ oname = obj["name"]
767
+ for i,e in enumerate(obj.data):
768
+ #print(e)
769
+ meshdict = {}
770
+ meshdict["vertices"] = e.vertices.reshape(-1,3)
771
+ meshdict["vertex_normals"] = e.normals.reshape(-1,3)
772
+ meshdict["faces"] = e.indices.reshape(-1,3)
773
+ facect += e.indices.shape[0]
774
+ if not len(meshdict["faces"]):
775
+ print("Empty",e)
776
+ continue
777
+ colours = e.colours
778
+ #print("ColourCount",len(colours))
779
+ if len(colours) == len(meshdict["vertices"]):
780
+ #View int32 into uint8 bytes
781
+ meshdict["vertex_colors"] = colours.view('uint8').reshape((-1,4))
782
+
783
+ mesh = trimesh.load_mesh(meshdict)
784
+ scene.add_geometry(mesh, geom_name=oname + '#' + str(i))
785
+
786
+ if len(colours) <= 1:
787
+ #If we don't set a default material colour, trimesh import will set default colour for every vertex
788
+ colour = obj.parent.parse_colour(obj["colour"])
789
+ colour = numpy.array(colour*255, dtype=numpy.uint8)
790
+ if len(colours) > 0:
791
+ colour = colours.view('uint8').reshape((-1,4))[0]
792
+ if obj["opacity"] < 1.0:
793
+ colour[3] *= obj["opacity"]
794
+ #print("Single colour: export as material", colour)
795
+ mesh.visual = trimesh.visual.TextureVisuals(material=trimesh.visual.material.PBRMaterial())
796
+ mesh.visual.material.baseColorFactor = colour
797
+
798
+ if facect > 0:
799
+ scene.export(file_obj=filepath)
800
+ else:
801
+ print("No triangle facets to export")
802
+
803
+ """
804
+ #Try GLTF ascii?
805
+ gltf = trimesh.exchange.gltf.export_gltf(scene)
806
+ import json
807
+ print("Mesh loaded, exporting GLTF")
808
+ gltf = trimesh.exchange.gltf.export_gltf(mesh) #, merge_buffers=True)
809
+ #print(gltf.keys())
810
+ #print(gltf['model.gltf'])
811
+ for fn in gltf:
812
+ mode = 'w'
813
+ #if ".bin" in fn: mode = 'wb'
814
+ with open(fn, 'wb') as f:
815
+ print("writing gltf component:" + fn)
816
+ f.write(gltf[fn])
817
+ """
818
+
819
+ def read_any(filepath, lv, scaling=None):
820
+ """
821
+ Load using trimesh, supports GLTF etc
822
+
823
+ See: https://trimsh.org/trimesh.exchange.html
824
+ Requires "trimesh" module
825
+ """
826
+ #TODO: support points, lines
827
+ try_import('trimesh')
828
+ scene = trimesh.load(filepath)
829
+ tris = None
830
+ for name in scene.geometry.keys():
831
+ geometry = scene.geometry[name]
832
+ idx = 0
833
+ if '#' in name:
834
+ name,idx = name.split('#')
835
+ idx = int(idx)
836
+
837
+ if idx == 0:
838
+ #Texturing for multiple objects is broken when loaded with triangles() renderer
839
+ #tris = lv.triangles(name)
840
+ #Use surface() instead and textures work as expected
841
+ tris = lv.surface(name)
842
+ else:
843
+ tris.append()
844
+
845
+ if scaling:
846
+ import numpy as np
847
+ matrix = np.eye(4)
848
+ matrix[0, 0] = scaling
849
+ matrix[1, 1] = scaling
850
+ matrix[2, 2] = scaling
851
+ geometry.apply_transform(matrix)
852
+
853
+ tris.vertices(geometry.vertices)
854
+ tris.normals(geometry.vertex_normals)
855
+ tris.indices(geometry.faces)
856
+
857
+ #adjacency_matrix = geometry.edges_sparse
858
+ #print(geometry)
859
+ #print(geometry.visual)
860
+
861
+ #Load vertex colours if available
862
+ if hasattr(geometry.visual, "vertex_colors"):
863
+ #print("HAVE VERTEX COLOURS",len(geometry.visual.vertex_colors))
864
+ tris.colours(geometry.visual.vertex_colors)
865
+ #Load single colour material
866
+ elif hasattr(geometry.visual, "material"):
867
+ if hasattr(geometry.visual.material, "baseColorTexture"):
868
+ #print("HAVE TEXTURE")
869
+ #print(geometry.visual.material)
870
+ #if hasattr(geometry.visual, "image"):
871
+ #print(geometry.visual.image)
872
+ #print(dir(geometry.visual.material))
873
+ #print(hasattr(geometry.visual, "material"))
874
+ #print(geometry.visual.material.baseColorTexture)
875
+ tris.texture(geometry.visual.material.baseColorTexture)
876
+ elif hasattr(geometry.visual.material, "baseColorFactor"):
877
+ #print("HAVE MATERIAL")
878
+ #print(geometry.visual.material)
879
+ #print(geometry.visual.material.baseColorFactor)
880
+ if idx == 0:
881
+ #Can set as prop, but only works for single el
882
+ tris["colour"] = geometry.visual.material.baseColorFactor
883
+ else:
884
+ tris.colours(geometry.visual.material.baseColorFactor)
885
+
886
+ idx += 1 #Inc index?
887
+
888
+ return tris