@combeenation/3d-viewer 18.2.0 → 18.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib-cjs/buildinfo.json +1 -1
- package/dist/lib-cjs/commonjs.tsconfig.tsbuildinfo +1 -1
- package/dist/lib-cjs/index.d.ts +1 -0
- package/dist/lib-cjs/index.js +1 -0
- package/dist/lib-cjs/index.js.map +1 -1
- package/dist/lib-cjs/internal/export-helper.d.ts +34 -0
- package/dist/lib-cjs/internal/export-helper.js +306 -0
- package/dist/lib-cjs/internal/export-helper.js.map +1 -0
- package/dist/lib-cjs/internal/node-helper.js +2 -2
- package/dist/lib-cjs/internal/node-helper.js.map +1 -1
- package/dist/lib-cjs/manager/camera-manager.js +2 -6
- package/dist/lib-cjs/manager/camera-manager.js.map +1 -1
- package/dist/lib-cjs/manager/dxf-export-manager.d.ts +39 -0
- package/dist/lib-cjs/manager/dxf-export-manager.js +116 -0
- package/dist/lib-cjs/manager/dxf-export-manager.js.map +1 -0
- package/dist/lib-cjs/manager/gltf-export-manager.d.ts +19 -39
- package/dist/lib-cjs/manager/gltf-export-manager.js +38 -179
- package/dist/lib-cjs/manager/gltf-export-manager.js.map +1 -1
- package/dist/lib-cjs/viewer.d.ts +3 -1
- package/dist/lib-cjs/viewer.js +4 -0
- package/dist/lib-cjs/viewer.js.map +1 -1
- package/package.json +3 -2
- package/src/index.ts +1 -0
- package/src/internal/export-helper.ts +378 -0
- package/src/internal/node-helper.ts +2 -2
- package/src/manager/camera-manager.ts +2 -6
- package/src/manager/dxf-export-manager.ts +123 -0
- package/src/manager/gltf-export-manager.ts +38 -231
- package/src/viewer.ts +6 -0
- package/dist/lib-cjs/internal/geometry-helper.d.ts +0 -14
- package/dist/lib-cjs/internal/geometry-helper.js +0 -114
- package/dist/lib-cjs/internal/geometry-helper.js.map +0 -1
- package/src/internal/geometry-helper.ts +0 -144
|
@@ -0,0 +1,378 @@
|
|
|
1
|
+
import {
|
|
2
|
+
DynamicTexture,
|
|
3
|
+
FloatArray,
|
|
4
|
+
Geometry,
|
|
5
|
+
InstancedMesh,
|
|
6
|
+
Material,
|
|
7
|
+
Mesh,
|
|
8
|
+
MorphTarget,
|
|
9
|
+
MorphTargetManager,
|
|
10
|
+
Node,
|
|
11
|
+
NodeDescription,
|
|
12
|
+
RenderTargetTexture,
|
|
13
|
+
TransformNode,
|
|
14
|
+
Vector3,
|
|
15
|
+
VertexBuffer,
|
|
16
|
+
Viewer,
|
|
17
|
+
} from '../index';
|
|
18
|
+
import { getIsScaledDownDevice } from './device-helper';
|
|
19
|
+
import {
|
|
20
|
+
clearInternalMetadataValue,
|
|
21
|
+
cloneInternalMetadata,
|
|
22
|
+
getInternalMetadataValue,
|
|
23
|
+
setInternalMetadataValue,
|
|
24
|
+
} from './metadata-helper';
|
|
25
|
+
import { nodeMatchesAnyCriteria } from './node-helper';
|
|
26
|
+
|
|
27
|
+
type ExportPreProcessSettings = {
|
|
28
|
+
excludeNodes?: NodeDescription[];
|
|
29
|
+
scaleDownTextures?: boolean;
|
|
30
|
+
};
|
|
31
|
+
type ExportPostProcessSettings = {
|
|
32
|
+
initialTextureSize?: number;
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Prepares scene for exports.
|
|
37
|
+
* The main task is to bake the transformation data into the vertices, as our current exports (GLB for AR and DXF) don't
|
|
38
|
+
* work conveniently without it:
|
|
39
|
+
* - negative signs in scaling values in GLBs lead to erroneous appearance in converted .usdz file
|
|
40
|
+
* - DXF faces calculation only takes vertices into account
|
|
41
|
+
*/
|
|
42
|
+
export async function exportPreProcess(viewer: Viewer, settings?: ExportPreProcessSettings): Promise<void> {
|
|
43
|
+
const { excludeNodes, scaleDownTextures } = settings ?? {};
|
|
44
|
+
|
|
45
|
+
// pause rendering, since we are altering the active scene, which should not be visible to the user
|
|
46
|
+
viewer.pauseRendering();
|
|
47
|
+
|
|
48
|
+
// exchange textures with scaled down versions (1024 px) if desired
|
|
49
|
+
// Example AR (GLB) export:
|
|
50
|
+
// - iOS devices will crash most likely when trying to access AR endpoints with such files
|
|
51
|
+
// - file size will be reduced, which leads to faster loading times
|
|
52
|
+
// - textures > 1024 px shouldn't make a difference on mobiles anyway
|
|
53
|
+
// we don't have to rescale anything if are already on a downscaled device, since the textures are already <= 1024
|
|
54
|
+
// also we have to be very cautios with copying textures on these devices, since we are potentially very limited
|
|
55
|
+
// with the available memory
|
|
56
|
+
const isScaledDownDevice = getIsScaledDownDevice(viewer.viewerSettings.limitTextureSize);
|
|
57
|
+
if (scaleDownTextures && !isScaledDownDevice) {
|
|
58
|
+
// the idea is to re-create all textures with a smaller texture size
|
|
59
|
+
// we have to exchange all materials for this to work
|
|
60
|
+
viewer.engine.clearInternalTexturesCache();
|
|
61
|
+
viewer.engine.getCaps().maxTextureSize = 1024;
|
|
62
|
+
|
|
63
|
+
viewer.scene.materials.forEach(material => _exchangePBRMaterial(material));
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// create clones of each node (recursively), optionally exchange with cloned materials and mark these nodes for the
|
|
67
|
+
// export
|
|
68
|
+
viewer.scene.rootNodes.forEach(rootNode => _prepareNodeForExport(viewer, rootNode, null, excludeNodes));
|
|
69
|
+
|
|
70
|
+
const nodesForExport = _getNodesMarkedForExport(viewer);
|
|
71
|
+
// bake transformation of all meshes, so that no negative scalings are left
|
|
72
|
+
// it's important that this is done AFTER instanced meshes have been converted (`_prepareNodeForExport`)
|
|
73
|
+
nodesForExport.forEach(node => {
|
|
74
|
+
if (node instanceof Mesh) _bakeGeometryOfMesh(node);
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
// reset transformation of all "TransformNodes", as transform nodes are not considered by the `_bakeGeometryOfMesh`
|
|
78
|
+
// function, still their transformation data has to be reset to eliminated negative scalings and to make the baking of
|
|
79
|
+
// child meshes work
|
|
80
|
+
// it's important that this is done AFTER all geometries have been baked
|
|
81
|
+
nodesForExport.forEach(node => {
|
|
82
|
+
_resetTransformation(node);
|
|
83
|
+
// recompute world matrix immediately after adapting transformation
|
|
84
|
+
node.computeWorldMatrix(true);
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Cleans up scene after export
|
|
90
|
+
*/
|
|
91
|
+
export async function exportPostProcess(viewer: Viewer, settings?: ExportPostProcessSettings): Promise<void> {
|
|
92
|
+
const { initialTextureSize } = settings ?? {};
|
|
93
|
+
|
|
94
|
+
// dispose all nodes, materials and textures that have only been created for the export
|
|
95
|
+
viewer.scene.rootNodes
|
|
96
|
+
.filter(rootNode => getInternalMetadataValue(rootNode, 'deleteAfterExport'))
|
|
97
|
+
.forEach(rootNode => rootNode.dispose());
|
|
98
|
+
|
|
99
|
+
viewer.scene.materials
|
|
100
|
+
.filter(material => getInternalMetadataValue(material, 'deleteAfterExport'))
|
|
101
|
+
.forEach(material => material.dispose(false, false));
|
|
102
|
+
// clean up temporary material exchange key for the rest of materials
|
|
103
|
+
viewer.scene.materials.forEach(material => clearInternalMetadataValue(material, 'exchangeMaterialWith'));
|
|
104
|
+
|
|
105
|
+
viewer.scene.textures
|
|
106
|
+
.filter(texture => getInternalMetadataValue(texture, 'deleteAfterExport'))
|
|
107
|
+
.forEach(texture => texture.dispose());
|
|
108
|
+
|
|
109
|
+
if (initialTextureSize) {
|
|
110
|
+
// reset texture size, only required if `scaleDownTextures` had been set in pre process
|
|
111
|
+
viewer.engine.getCaps().maxTextureSize = initialTextureSize;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
viewer.resumeRendering();
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Checks if a node should be available in the export
|
|
119
|
+
*/
|
|
120
|
+
export function isExportableTransformNode(node: Node, excludeNodes?: NodeDescription[]): node is TransformNode {
|
|
121
|
+
if (!(node instanceof TransformNode)) {
|
|
122
|
+
return false;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// maybe add some other criterias like "hasInfiniteDistance" and "isGeneratedBackgroundMesh" here as well
|
|
126
|
+
const isExcluded = nodeMatchesAnyCriteria(node, {
|
|
127
|
+
isInList: excludeNodes,
|
|
128
|
+
isDisabled: true,
|
|
129
|
+
isHtmlAnchorMesh: true,
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
return !isExcluded;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* Triggers a browser download from a given object URL or data URI./
|
|
137
|
+
* The file will be saved with the specified file name.
|
|
138
|
+
*/
|
|
139
|
+
export function downloadFile(url: string, fileName: string): void {
|
|
140
|
+
const link = document.createElement('a');
|
|
141
|
+
link.href = url;
|
|
142
|
+
link.download = fileName;
|
|
143
|
+
document.body.appendChild(link);
|
|
144
|
+
link.click();
|
|
145
|
+
document.body.removeChild(link);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
/**
|
|
149
|
+
* Add extension if not already set
|
|
150
|
+
*/
|
|
151
|
+
export function normalizeFileName(fileName: string | undefined, extension: string, defaultFileName?: string): string {
|
|
152
|
+
const extensionWithDot = '.' + extension;
|
|
153
|
+
const resFileName = fileName ?? defaultFileName ?? 'default';
|
|
154
|
+
|
|
155
|
+
return resFileName.endsWith(extensionWithDot) ? resFileName : resFileName + extensionWithDot;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Creates a clone of the material which should be used for the export.
|
|
160
|
+
* This is mostly required for recreating textures with lower sizes.
|
|
161
|
+
* CAUTION: Material exchanging is not supported for materials that contain certain texture types:
|
|
162
|
+
* - Dynamic textures (Paintables): Cloning dynamic textures doesn't clone the canvas context
|
|
163
|
+
* => so the clone is just empty
|
|
164
|
+
* - Render target textures: Disposing the clone will leave the scene in a "not ready" state
|
|
165
|
+
* => this scenario is not fully analyzed yet, but it's not really worth the effort right now, since this kind of
|
|
166
|
+
* of texture is not really used ATM
|
|
167
|
+
*/
|
|
168
|
+
function _exchangePBRMaterial(material: Material): void {
|
|
169
|
+
const baseTextures = material.getActiveTextures();
|
|
170
|
+
const hasDynamicTextures = baseTextures.some(texture => texture instanceof DynamicTexture);
|
|
171
|
+
const hasRenderTargetTextures = baseTextures.some(texture => texture instanceof RenderTargetTexture);
|
|
172
|
+
if (hasDynamicTextures || hasRenderTargetTextures) {
|
|
173
|
+
const textureTypesString = [
|
|
174
|
+
hasDynamicTextures ? 'Dynamic Textures' : '',
|
|
175
|
+
hasRenderTargetTextures ? 'Render Target Textures' : '',
|
|
176
|
+
]
|
|
177
|
+
.filter(Boolean)
|
|
178
|
+
.join();
|
|
179
|
+
console.warn(
|
|
180
|
+
`Couldn't exchange material "${material.name}" in export, as it contains unsupported texture type(s) (${textureTypesString}). The export will still work, but the textures of this material will keep their original size.`
|
|
181
|
+
);
|
|
182
|
+
|
|
183
|
+
return;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
const newName = `${material.name}_clone`;
|
|
187
|
+
const clonedMaterial = material.clone(newName)!;
|
|
188
|
+
cloneInternalMetadata(material, clonedMaterial);
|
|
189
|
+
const clonedTextures = clonedMaterial.getActiveTextures();
|
|
190
|
+
|
|
191
|
+
// mark all exported textures, so that they will be deleted after the export
|
|
192
|
+
clonedTextures.forEach(texture => setInternalMetadataValue(texture, 'deleteAfterExport', true));
|
|
193
|
+
|
|
194
|
+
setInternalMetadataValue(material, 'exchangeMaterialWith', clonedMaterial.uniqueId);
|
|
195
|
+
setInternalMetadataValue(clonedMaterial, 'deleteAfterExport', true);
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
/**
|
|
199
|
+
* Creates a clone of the node which should be used for the export.
|
|
200
|
+
* Also switches to the cloned material if required.
|
|
201
|
+
*/
|
|
202
|
+
function _prepareNodeForExport(
|
|
203
|
+
viewer: Viewer,
|
|
204
|
+
node: Node,
|
|
205
|
+
clonedParent: TransformNode | null,
|
|
206
|
+
excludeNodes?: NodeDescription[]
|
|
207
|
+
): void {
|
|
208
|
+
if (!isExportableTransformNode(node, excludeNodes)) {
|
|
209
|
+
return;
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
// clone original node and create unique name (via uniqueId) for the export
|
|
213
|
+
const clonedNodeName = `${node.name}_${node.uniqueId}`;
|
|
214
|
+
const clonedNode =
|
|
215
|
+
node instanceof InstancedMesh
|
|
216
|
+
? _createMeshFromInstancedMesh(node, clonedNodeName, clonedParent)
|
|
217
|
+
: node.clone(clonedNodeName, clonedParent, true)!;
|
|
218
|
+
cloneInternalMetadata(node, clonedNode);
|
|
219
|
+
|
|
220
|
+
// exchange material
|
|
221
|
+
if (clonedNode instanceof Mesh) {
|
|
222
|
+
const exchangeWithMaterial =
|
|
223
|
+
clonedNode.material && getInternalMetadataValue(clonedNode.material, 'exchangeMaterialWith');
|
|
224
|
+
if (exchangeWithMaterial) {
|
|
225
|
+
clonedNode.material = viewer.scene.getMaterialByUniqueID(exchangeWithMaterial as number);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
// signalize that this is a cloned node
|
|
230
|
+
setInternalMetadataValue(clonedNode, 'exportNode', true);
|
|
231
|
+
setInternalMetadataValue(clonedNode, 'deleteAfterExport', true);
|
|
232
|
+
|
|
233
|
+
// handle children
|
|
234
|
+
const childs = node.getChildTransformNodes(true);
|
|
235
|
+
childs.forEach(child => _prepareNodeForExport(viewer, child, clonedNode, excludeNodes));
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
/**
|
|
239
|
+
* Help function for receiving all nodes that are marked for the export
|
|
240
|
+
*/
|
|
241
|
+
function _getNodesMarkedForExport(viewer: Viewer): TransformNode[] {
|
|
242
|
+
const nodes = [...viewer.scene.meshes, ...viewer.scene.transformNodes];
|
|
243
|
+
|
|
244
|
+
const filteredNodes = nodes.filter(node => getInternalMetadataValue(node, 'exportNode'));
|
|
245
|
+
return filteredNodes;
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
/**
|
|
249
|
+
* Creates a "standard" mesh from an instanced mesh, by cloning the source mesh and applying transformation data
|
|
250
|
+
*/
|
|
251
|
+
function _createMeshFromInstancedMesh(
|
|
252
|
+
instancedMesh: InstancedMesh,
|
|
253
|
+
newName: string,
|
|
254
|
+
clonedParent: TransformNode | null
|
|
255
|
+
): Mesh {
|
|
256
|
+
// first create a clone of the source mesh
|
|
257
|
+
const newMesh = instancedMesh.sourceMesh.clone(newName, clonedParent, true);
|
|
258
|
+
cloneInternalMetadata(instancedMesh.sourceMesh, newMesh);
|
|
259
|
+
// apply the transformation data, it's important to create clones of the transformations to not touch the original
|
|
260
|
+
// transformation when applying changes (eg: geometry baking)
|
|
261
|
+
newMesh.position = instancedMesh.position.clone();
|
|
262
|
+
newMesh.rotation = instancedMesh.rotation.clone();
|
|
263
|
+
newMesh.scaling = instancedMesh.scaling.clone();
|
|
264
|
+
|
|
265
|
+
// rotation quaternion is optional
|
|
266
|
+
if (instancedMesh.rotationQuaternion) {
|
|
267
|
+
newMesh.rotationQuaternion = instancedMesh.rotationQuaternion.clone();
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
// also sync the enabled state from the original instanced mesh
|
|
271
|
+
newMesh.setEnabled(instancedMesh.isEnabled(false));
|
|
272
|
+
|
|
273
|
+
return newMesh;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
/**
|
|
277
|
+
* Removes transformation data from the mesh and stores it in the geometry, which is called "baking".
|
|
278
|
+
* Also considers the geometry change from morph targets.
|
|
279
|
+
*/
|
|
280
|
+
function _bakeGeometryOfMesh(mesh: Mesh): void {
|
|
281
|
+
if (!mesh.geometry) {
|
|
282
|
+
// no geometry available, nothing to do
|
|
283
|
+
return;
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
// geometries can be shared across multiple meshes, first make them unique to avoid side-effects
|
|
287
|
+
mesh.makeGeometryUnique();
|
|
288
|
+
|
|
289
|
+
// Babylon.js already provides a function for baking the current skeleton changes into the geometry
|
|
290
|
+
if (mesh.skeleton) {
|
|
291
|
+
mesh.applySkeleton(mesh.skeleton);
|
|
292
|
+
mesh.skeleton = null;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// NOTE: in difference to skeletons and transformations there is no baking function for morph targets (yet)
|
|
296
|
+
// however another approach could be to re-apply the position and normals data, as there are nice functions for it
|
|
297
|
+
// - `getPositionData(applySkeleton: boolean = false, applyMorph: boolean = false)`
|
|
298
|
+
// - `getNormalsData(applySkeleton: boolean = false, applyMorph: boolean = false)`
|
|
299
|
+
// you can decide if skeletons and morph targets can be added, which is exactly what we want
|
|
300
|
+
// I'm still hesitant to use it because "tangent" and "UV" kinds are not supported, whereas I'm not sure if it's
|
|
301
|
+
// required
|
|
302
|
+
// => try it out when there is enough time for detailed regression tests!
|
|
303
|
+
const morphTargetManager = mesh.morphTargetManager;
|
|
304
|
+
const geometry = mesh.geometry;
|
|
305
|
+
|
|
306
|
+
if (morphTargetManager?.numTargets) {
|
|
307
|
+
// apply morph target vertices data to mesh geometry
|
|
308
|
+
// mostly only the "PositionKind" is implemented
|
|
309
|
+
_bakeMorphTargetManagerIntoVertices(VertexBuffer.PositionKind, morphTargetManager, geometry);
|
|
310
|
+
_bakeMorphTargetManagerIntoVertices(VertexBuffer.NormalKind, morphTargetManager, geometry);
|
|
311
|
+
_bakeMorphTargetManagerIntoVertices(VertexBuffer.TangentKind, morphTargetManager, geometry);
|
|
312
|
+
_bakeMorphTargetManagerIntoVertices(VertexBuffer.UVKind, morphTargetManager, geometry);
|
|
313
|
+
|
|
314
|
+
// remove morph target manager with all it's morph targets
|
|
315
|
+
mesh.morphTargetManager = null;
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
// bake the transformation data in the mesh geometry, fortunately there is already a help function from Babylon.js
|
|
319
|
+
mesh.bakeCurrentTransformIntoVertices();
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
/**
|
|
323
|
+
* Resets transformation to initial state
|
|
324
|
+
*/
|
|
325
|
+
function _resetTransformation(node: TransformNode): void {
|
|
326
|
+
node.position = new Vector3(0, 0, 0);
|
|
327
|
+
node.rotation = new Vector3(0, 0, 0);
|
|
328
|
+
node.rotationQuaternion = null;
|
|
329
|
+
node.scaling = new Vector3(1, 1, 1);
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
/**
|
|
333
|
+
* @param kind morph targets can affect various vertices kinds, whereas "position" is the most common one
|
|
334
|
+
* still other kinds (like normals or tangents) can be affected as well and can be provided on this input
|
|
335
|
+
*/
|
|
336
|
+
function _bakeMorphTargetManagerIntoVertices(
|
|
337
|
+
kind: string,
|
|
338
|
+
morphTargetManager: MorphTargetManager,
|
|
339
|
+
geometry: Geometry
|
|
340
|
+
): void {
|
|
341
|
+
const origVerticesData = geometry.getVerticesData(kind);
|
|
342
|
+
if (!origVerticesData) {
|
|
343
|
+
// no vertices data for this kind availabe on the mesh geometry
|
|
344
|
+
return;
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
let verticesData = [...origVerticesData];
|
|
348
|
+
for (let i = 0; i < morphTargetManager.numTargets; i++) {
|
|
349
|
+
const target = morphTargetManager.getTarget(i);
|
|
350
|
+
const targetVerticesData = _getVerticesDataFromMorphTarget(kind, target);
|
|
351
|
+
if (targetVerticesData) {
|
|
352
|
+
// vertices data of this kind are implemented on the morph target
|
|
353
|
+
// add the influence of this morph target to the vertices data
|
|
354
|
+
// formula is taken from: https://doc.babylonjs.com/features/featuresDeepDive/mesh/morphTargets#basics
|
|
355
|
+
verticesData = verticesData.map(
|
|
356
|
+
(oldVal, idx) => oldVal + (targetVerticesData[idx] - origVerticesData[idx]) * target.influence
|
|
357
|
+
);
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
// apply the updated vertices data
|
|
362
|
+
geometry.setVerticesData(kind, verticesData);
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
function _getVerticesDataFromMorphTarget(kind: string, morphTarget: MorphTarget): FloatArray | null {
|
|
366
|
+
switch (kind) {
|
|
367
|
+
case VertexBuffer.PositionKind:
|
|
368
|
+
return morphTarget.getPositions();
|
|
369
|
+
case VertexBuffer.NormalKind:
|
|
370
|
+
return morphTarget.getNormals();
|
|
371
|
+
case VertexBuffer.TangentKind:
|
|
372
|
+
return morphTarget.getTangents();
|
|
373
|
+
case VertexBuffer.UVKind:
|
|
374
|
+
return morphTarget.getUVs();
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
return null;
|
|
378
|
+
}
|
|
@@ -59,8 +59,8 @@ export function nodeMatchesAnyCriteria(nodeToCheck: TransformNode, criteria: Nod
|
|
|
59
59
|
matchesIsGeneratedBackgroundMesh ||
|
|
60
60
|
matchesIsHtmlAnchorMesh ||
|
|
61
61
|
matchesIsDimensionLine;
|
|
62
|
-
// consider parent as well, BUT ONLY for list and
|
|
63
|
-
// child-parent relation
|
|
62
|
+
// consider parent as well, BUT ONLY for list, disabled state and background mesh, as the other criterias have nothing
|
|
63
|
+
// to do with a child-parent relation
|
|
64
64
|
if (!matchesAnyCriteria && nodeToCheck.parent instanceof TransformNode) {
|
|
65
65
|
matchesAnyCriteria = nodeMatchesAnyCriteria(nodeToCheck.parent, {
|
|
66
66
|
isInList,
|
|
@@ -11,6 +11,7 @@ import {
|
|
|
11
11
|
ViewerErrorIds,
|
|
12
12
|
ViewerEvent,
|
|
13
13
|
} from '../index';
|
|
14
|
+
import { downloadFile } from '../internal/export-helper';
|
|
14
15
|
import { nodeMatchesAnyCriteria } from '../internal/node-helper';
|
|
15
16
|
import { createScreenshotCanvas, trimCanvas } from '../internal/screenshot-helper';
|
|
16
17
|
|
|
@@ -401,12 +402,7 @@ export class CameraManager {
|
|
|
401
402
|
if (fileName) {
|
|
402
403
|
// rebuild Babylon.js default behaviour: if a file name is given, download the screenshot instead of returning the
|
|
403
404
|
// data string
|
|
404
|
-
|
|
405
|
-
link.href = imageStr;
|
|
406
|
-
link.download = fileName;
|
|
407
|
-
document.body.appendChild(link);
|
|
408
|
-
link.click();
|
|
409
|
-
document.body.removeChild(link);
|
|
405
|
+
downloadFile(imageStr3d, fileName);
|
|
410
406
|
|
|
411
407
|
return '';
|
|
412
408
|
} else {
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import { NodeDescription, Vector3, VertexBuffer, Viewer } from '../index';
|
|
2
|
+
import { downloadFile, exportPostProcess, exportPreProcess, normalizeFileName } from '../internal/export-helper';
|
|
3
|
+
import { getInternalMetadataValue } from '../internal/metadata-helper';
|
|
4
|
+
import { nodeMatchesAnyCriteria } from '../internal/node-helper';
|
|
5
|
+
import DRAWING, { Unit } from 'dxf-writer';
|
|
6
|
+
|
|
7
|
+
export type DxfUnit = Unit;
|
|
8
|
+
|
|
9
|
+
export type DxfExportSettings = {
|
|
10
|
+
fileName?: string;
|
|
11
|
+
/** List of nodes that should be excluded from the DXF export */
|
|
12
|
+
excludeNodes?: NodeDescription[];
|
|
13
|
+
/**
|
|
14
|
+
* Unit of the 3d model.\
|
|
15
|
+
* If a unit is set, CAD tools are able to convert the values from the DXF into the configured unit of the CAD tool.
|
|
16
|
+
*/
|
|
17
|
+
unit?: DxfUnit;
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Manager for creating DXF exports of the current viewer scene
|
|
22
|
+
*/
|
|
23
|
+
export class DxfExportManager {
|
|
24
|
+
/** @internal */
|
|
25
|
+
public constructor(protected viewer: Viewer) {}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Returns DXF export as `File` for further processing. (e.g upload as Cfgn file)
|
|
29
|
+
*/
|
|
30
|
+
public async createDxf(settings?: DxfExportSettings): Promise<File> {
|
|
31
|
+
const normalizedFileName = normalizeFileName(settings?.fileName, 'dxf', 'dxf-export');
|
|
32
|
+
|
|
33
|
+
const dxfBlob = await this._createDxfBlob(settings);
|
|
34
|
+
const dxfFile = new File([dxfBlob], normalizedFileName, { type: 'application/dxf' });
|
|
35
|
+
|
|
36
|
+
return dxfFile;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Creates DXF export and downloads the resulting file right away
|
|
41
|
+
*/
|
|
42
|
+
public async downloadDxf(settings?: DxfExportSettings): Promise<void> {
|
|
43
|
+
const normalizedFileName = normalizeFileName(settings?.fileName, 'dxf', 'dxf-export');
|
|
44
|
+
|
|
45
|
+
const dxfBlob = await this._createDxfBlob(settings);
|
|
46
|
+
|
|
47
|
+
const url = URL.createObjectURL(dxfBlob);
|
|
48
|
+
downloadFile(url, normalizedFileName);
|
|
49
|
+
URL.revokeObjectURL(url);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Help function for creating a blob with the DXF string, as this is needed for both public interface functions
|
|
54
|
+
* (`exportDxf` & `exportDxfToFile`)
|
|
55
|
+
*/
|
|
56
|
+
protected async _createDxfBlob(settings?: DxfExportSettings): Promise<Blob> {
|
|
57
|
+
// Transformation baking pre process is required for DXF exports, as we take the plain vertex information for the
|
|
58
|
+
// DXF conversion, therefore the vertices need to be located in world already
|
|
59
|
+
await exportPreProcess(this.viewer, { excludeNodes: settings?.excludeNodes });
|
|
60
|
+
const dxfString = this._createDxf(settings?.unit);
|
|
61
|
+
await exportPostProcess(this.viewer);
|
|
62
|
+
|
|
63
|
+
const dxfBlob = new Blob([dxfString], { type: 'application/dxf' });
|
|
64
|
+
return dxfBlob;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* DXF creation process.\
|
|
69
|
+
* We go through all vertices of all meshes and create DXF 3d faces accordingly.
|
|
70
|
+
*/
|
|
71
|
+
protected _createDxf(unit?: DxfUnit): string {
|
|
72
|
+
const DXF = new DRAWING();
|
|
73
|
+
if (unit) {
|
|
74
|
+
DXF.setUnits(unit);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const meshesToExport = this.viewer.scene.meshes.filter(
|
|
78
|
+
mesh =>
|
|
79
|
+
getInternalMetadataValue(mesh, 'exportNode') &&
|
|
80
|
+
// NOTE: can't add this criteria in pre export process, as pre export process works recursively and childs of
|
|
81
|
+
// transforms nodes or meshes without geometry can still have geometries and therefore be valid for the export
|
|
82
|
+
!nodeMatchesAnyCriteria(mesh, { hasInvalidBoundingInfo: true })
|
|
83
|
+
);
|
|
84
|
+
|
|
85
|
+
meshesToExport.forEach(mesh => {
|
|
86
|
+
const positions = mesh.getVerticesData(VertexBuffer.PositionKind);
|
|
87
|
+
const indices = mesh.getIndices();
|
|
88
|
+
if (!positions || !indices) {
|
|
89
|
+
console.warn(`Can't create DXF export of mesh "${mesh.name}", as there is no vertex data available`);
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
for (let i = 0; i < indices.length; i += 3) {
|
|
94
|
+
// A face always consists of 3 vertices.
|
|
95
|
+
// First we need to get the indizes of the vertices.
|
|
96
|
+
const idx1 = indices[i];
|
|
97
|
+
const idx2 = indices[i + 1];
|
|
98
|
+
const idx3 = indices[i + 2];
|
|
99
|
+
|
|
100
|
+
// Now get the x, y, z data of the associated index.
|
|
101
|
+
// One index stores 3 vertices, thats why the index is multiplied with 3 before accessing the actual vertex
|
|
102
|
+
// position.
|
|
103
|
+
// > and z components have dedicated offsets 1 and 2.
|
|
104
|
+
// ===============
|
|
105
|
+
// Important NOTE:
|
|
106
|
+
// ===============
|
|
107
|
+
// DXF is right-handed, according to tests in AutoDesk it seems like y and z components are just switched.
|
|
108
|
+
// So we use offset 2 for y and offset 1 for z.
|
|
109
|
+
const p1 = new Vector3(positions[idx1 * 3], positions[idx1 * 3 + 2], positions[idx1 * 3 + 1]);
|
|
110
|
+
const p2 = new Vector3(positions[idx2 * 3], positions[idx2 * 3 + 2], positions[idx2 * 3 + 1]);
|
|
111
|
+
const p3 = new Vector3(positions[idx3 * 3], positions[idx3 * 3 + 2], positions[idx3 * 3 + 1]);
|
|
112
|
+
|
|
113
|
+
// Add a 3D face to the DXF (3DFACE entity).
|
|
114
|
+
// The library works with 4 points per face, but we only have 3.
|
|
115
|
+
// Using the last point 3 times is fine though.
|
|
116
|
+
DXF.drawFace(p1.x, p1.y, p1.z, p2.x, p2.y, p2.z, p3.x, p3.y, p3.z, p3.x, p3.y, p3.z);
|
|
117
|
+
}
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
const dxfString = DXF.toDxfString();
|
|
121
|
+
return dxfString;
|
|
122
|
+
}
|
|
123
|
+
}
|