@needle-tools/three 0.169.10 → 0.169.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -0,0 +1,729 @@
|
|
|
1
|
+
import {
|
|
2
|
+
AnimationClip,
|
|
3
|
+
ColorKeyframeTrack,
|
|
4
|
+
InterpolateDiscrete,
|
|
5
|
+
InterpolateLinear,
|
|
6
|
+
NumberKeyframeTrack,
|
|
7
|
+
PropertyBinding,
|
|
8
|
+
QuaternionKeyframeTrack,
|
|
9
|
+
VectorKeyframeTrack,
|
|
10
|
+
SkinnedMesh
|
|
11
|
+
} from 'three';
|
|
12
|
+
|
|
13
|
+
// DUPLICATED from GLTFLoader.js
|
|
14
|
+
const ANIMATION_TARGET_TYPE = {
|
|
15
|
+
node: 'node',
|
|
16
|
+
material: 'material',
|
|
17
|
+
camera: 'camera',
|
|
18
|
+
light: 'light',
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
const KHR_ANIMATION_POINTER = 'KHR_animation_pointer';
|
|
22
|
+
|
|
23
|
+
// DUPLICATED from GLTFLoader.js
|
|
24
|
+
const INTERPOLATION = {
|
|
25
|
+
// We use a custom interpolant (GLTFCubicSplineInterpolation) for CUBICSPLINE tracks. Each
|
|
26
|
+
// keyframe track will be initialized with a default interpolation type, then modified.
|
|
27
|
+
CUBICSPLINE: undefined,
|
|
28
|
+
LINEAR: InterpolateLinear,
|
|
29
|
+
STEP: InterpolateDiscrete
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
// HACK monkey patching findNode to ensure we can map to other types required by KHR_animation_pointer.
|
|
33
|
+
const find = PropertyBinding.findNode;
|
|
34
|
+
const _animationPointerDebug = false;
|
|
35
|
+
let _havePatchedPropertyBindings = false;
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Animation Pointer Extension
|
|
39
|
+
*
|
|
40
|
+
* Draft Specification: https://github.com/ux3d/glTF/tree/extensions/KHR_animation_pointer/extensions/2.0/Khronos/KHR_animation_pointer
|
|
41
|
+
*/
|
|
42
|
+
class GLTFAnimationPointerExtension {
|
|
43
|
+
|
|
44
|
+
constructor( parser ) {
|
|
45
|
+
|
|
46
|
+
this.parser = parser;
|
|
47
|
+
this.name = KHR_ANIMATION_POINTER;
|
|
48
|
+
this.animationPointerResolver = null;
|
|
49
|
+
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
setAnimationPointerResolver( animationPointerResolver ) {
|
|
53
|
+
|
|
54
|
+
this.animationPointerResolver = animationPointerResolver;
|
|
55
|
+
return this;
|
|
56
|
+
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
_patchPropertyBindingFindNode() {
|
|
60
|
+
|
|
61
|
+
if ( _havePatchedPropertyBindings ) return;
|
|
62
|
+
_havePatchedPropertyBindings = true;
|
|
63
|
+
|
|
64
|
+
// "node" is the Animator component in our case
|
|
65
|
+
// "path" is the animated property path, just with translated material names.
|
|
66
|
+
PropertyBinding.findNode = function ( node, path ) {
|
|
67
|
+
|
|
68
|
+
if ( path.startsWith( '.materials.' ) ) {
|
|
69
|
+
|
|
70
|
+
if ( _animationPointerDebug ) console.log( 'FIND', path );
|
|
71
|
+
|
|
72
|
+
const remainingPath = path.substring( '.materials.'.length ).substring( path.indexOf( '.' ) );
|
|
73
|
+
const nextIndex = remainingPath.indexOf( '.' );
|
|
74
|
+
const uuid = nextIndex < 0 ? remainingPath : remainingPath.substring( 0, nextIndex );
|
|
75
|
+
let res = null;
|
|
76
|
+
node.traverse( x => {
|
|
77
|
+
|
|
78
|
+
if ( res !== null || ( x.type !== 'Mesh' && x.type !== 'SkinnedMesh' ) ) return;
|
|
79
|
+
if ( x[ 'material' ] && ( x[ 'material' ].uuid === uuid || x[ 'material' ].name === uuid ) ) {
|
|
80
|
+
|
|
81
|
+
res = x[ 'material' ];
|
|
82
|
+
if ( _animationPointerDebug ) console.log( res, remainingPath );
|
|
83
|
+
if ( res !== null ) {
|
|
84
|
+
|
|
85
|
+
if ( remainingPath.endsWith( '.map' ) )
|
|
86
|
+
res = res[ 'map' ];
|
|
87
|
+
else if ( remainingPath.endsWith( '.emissiveMap' ) )
|
|
88
|
+
res = res[ 'emissiveMap' ];
|
|
89
|
+
|
|
90
|
+
// TODO other texture slots only make sense if three.js actually supports them
|
|
91
|
+
// (currently only .map can have repeat/offset)
|
|
92
|
+
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
} );
|
|
98
|
+
|
|
99
|
+
return res;
|
|
100
|
+
|
|
101
|
+
} else if ( path.startsWith( '.nodes.' ) || path.startsWith( '.lights.' ) || path.startsWith( '.cameras.' ) ) {
|
|
102
|
+
|
|
103
|
+
const sections = path.split( '.' );
|
|
104
|
+
let currentTarget = undefined;
|
|
105
|
+
for ( let i = 1; i < sections.length; i ++ ) {
|
|
106
|
+
|
|
107
|
+
const val = sections[ i ];
|
|
108
|
+
const isUUID = val.length == 36;
|
|
109
|
+
if ( isUUID ) {
|
|
110
|
+
|
|
111
|
+
// access by UUID
|
|
112
|
+
currentTarget = node.getObjectByProperty( 'uuid', val );
|
|
113
|
+
|
|
114
|
+
} else if ( currentTarget && currentTarget[ val ] ) {
|
|
115
|
+
|
|
116
|
+
// access by index
|
|
117
|
+
const index = Number.parseInt( val );
|
|
118
|
+
let key = val;
|
|
119
|
+
if ( index >= 0 ) key = index;
|
|
120
|
+
currentTarget = currentTarget[ key ];
|
|
121
|
+
if ( _animationPointerDebug )
|
|
122
|
+
console.log( currentTarget );
|
|
123
|
+
|
|
124
|
+
} else {
|
|
125
|
+
|
|
126
|
+
// access by node name
|
|
127
|
+
const foundNode = node.getObjectByName( val );
|
|
128
|
+
|
|
129
|
+
if ( foundNode )
|
|
130
|
+
currentTarget = foundNode;
|
|
131
|
+
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
if ( ! currentTarget ) {
|
|
137
|
+
|
|
138
|
+
const originalFindResult = find( node, sections[ 2 ] );
|
|
139
|
+
|
|
140
|
+
if ( ! originalFindResult )
|
|
141
|
+
console.warn( KHR_ANIMATION_POINTER + ': Property binding not found', path, node, node.name, sections );
|
|
142
|
+
|
|
143
|
+
return originalFindResult;
|
|
144
|
+
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
if ( _animationPointerDebug )
|
|
148
|
+
console.log( 'NODE', path, currentTarget );
|
|
149
|
+
|
|
150
|
+
return currentTarget;
|
|
151
|
+
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
return find( node, path );
|
|
155
|
+
|
|
156
|
+
};
|
|
157
|
+
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/* DUPLICATE of functionality in GLTFLoader */
|
|
161
|
+
loadAnimationTargetFromChannel( animationChannel ) {
|
|
162
|
+
|
|
163
|
+
const target = animationChannel.target;
|
|
164
|
+
const name = target.node !== undefined ? target.node : target.id; // NOTE: target.id is deprecated.
|
|
165
|
+
return this.parser.getDependency( 'node', name );
|
|
166
|
+
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
loadAnimationTargetFromChannelWithAnimationPointer( animationChannel ) {
|
|
170
|
+
|
|
171
|
+
if ( ! this._havePatchedPropertyBindings )
|
|
172
|
+
this._patchPropertyBindingFindNode();
|
|
173
|
+
|
|
174
|
+
const target = animationChannel.target;
|
|
175
|
+
const useExtension = target.extensions && target.extensions[ KHR_ANIMATION_POINTER ] && target.path && target.path === 'pointer';
|
|
176
|
+
if ( ! useExtension ) return null;
|
|
177
|
+
|
|
178
|
+
let targetProperty = undefined;
|
|
179
|
+
|
|
180
|
+
// check if this is a extension animation
|
|
181
|
+
let type = ANIMATION_TARGET_TYPE.node;
|
|
182
|
+
let targetId = undefined;
|
|
183
|
+
|
|
184
|
+
if ( useExtension ) {
|
|
185
|
+
|
|
186
|
+
const ext = target.extensions[ KHR_ANIMATION_POINTER ];
|
|
187
|
+
let path = ext.pointer;
|
|
188
|
+
if ( _animationPointerDebug )
|
|
189
|
+
console.log( 'Original path: ' + path, target );
|
|
190
|
+
|
|
191
|
+
if ( ! path ) {
|
|
192
|
+
|
|
193
|
+
console.warn( 'Invalid path', ext, target );
|
|
194
|
+
return;
|
|
195
|
+
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
if ( path.startsWith( '/materials/' ) )
|
|
199
|
+
type = ANIMATION_TARGET_TYPE.material;
|
|
200
|
+
else if ( path.startsWith( '/extensions/KHR_lights_punctual/lights/' ) )
|
|
201
|
+
type = ANIMATION_TARGET_TYPE.light;
|
|
202
|
+
else if ( path.startsWith( '/cameras/' ) )
|
|
203
|
+
type = ANIMATION_TARGET_TYPE.camera;
|
|
204
|
+
|
|
205
|
+
targetId = this._tryResolveTargetId( path, type );
|
|
206
|
+
if ( targetId === null || isNaN( targetId ) ) {
|
|
207
|
+
|
|
208
|
+
console.warn( 'Failed resolving animation node id: ' + targetId, path );
|
|
209
|
+
return;
|
|
210
|
+
|
|
211
|
+
} else {
|
|
212
|
+
|
|
213
|
+
if ( _animationPointerDebug ) console.log( 'Resolved node ID for ' + type, targetId );
|
|
214
|
+
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
// TODO could be parsed better
|
|
218
|
+
switch ( type ) {
|
|
219
|
+
|
|
220
|
+
case ANIMATION_TARGET_TYPE.material:
|
|
221
|
+
const pathIndex = ( '/materials/' + targetId.toString() + '/' ).length;
|
|
222
|
+
const pathStart = path.substring( 0, pathIndex );
|
|
223
|
+
targetProperty = path.substring( pathIndex );
|
|
224
|
+
|
|
225
|
+
switch ( targetProperty ) {
|
|
226
|
+
|
|
227
|
+
// Core Spec PBR Properties
|
|
228
|
+
case 'pbrMetallicRoughness/baseColorFactor':
|
|
229
|
+
targetProperty = 'color';
|
|
230
|
+
break;
|
|
231
|
+
case 'pbrMetallicRoughness/roughnessFactor':
|
|
232
|
+
targetProperty = 'roughness';
|
|
233
|
+
break;
|
|
234
|
+
case 'pbrMetallicRoughness/metallicFactor':
|
|
235
|
+
targetProperty = 'metalness';
|
|
236
|
+
break;
|
|
237
|
+
case 'emissiveFactor':
|
|
238
|
+
targetProperty = 'emissive';
|
|
239
|
+
break;
|
|
240
|
+
case 'alphaCutoff':
|
|
241
|
+
targetProperty = 'alphaTest';
|
|
242
|
+
break;
|
|
243
|
+
case 'occlusionTexture/strength':
|
|
244
|
+
targetProperty = 'aoMapIntensity';
|
|
245
|
+
break;
|
|
246
|
+
case 'normalTexture/scale':
|
|
247
|
+
targetProperty = 'normalScale';
|
|
248
|
+
break;
|
|
249
|
+
|
|
250
|
+
// Core Spec + KHR_texture_transform
|
|
251
|
+
case 'pbrMetallicRoughness/baseColorTexture/extensions/KHR_texture_transform/scale':
|
|
252
|
+
targetProperty = 'map/repeat';
|
|
253
|
+
break;
|
|
254
|
+
case 'pbrMetallicRoughness/baseColorTexture/extensions/KHR_texture_transform/offset':
|
|
255
|
+
targetProperty = 'map/offset';
|
|
256
|
+
break;
|
|
257
|
+
|
|
258
|
+
// UV transforms for anything but map doesn't seem to currently be supported in three.js
|
|
259
|
+
case 'emissiveTexture/extensions/KHR_texture_transform/scale':
|
|
260
|
+
targetProperty = 'emissiveMap/repeat';
|
|
261
|
+
break;
|
|
262
|
+
case 'emissiveTexture/extensions/KHR_texture_transform/offset':
|
|
263
|
+
targetProperty = 'emissiveMap/offset';
|
|
264
|
+
break;
|
|
265
|
+
|
|
266
|
+
// KHR_materials_emissive_strength
|
|
267
|
+
case 'extensions/KHR_materials_emissive_strength/emissiveStrength':
|
|
268
|
+
targetProperty = 'emissiveIntensity';
|
|
269
|
+
break;
|
|
270
|
+
|
|
271
|
+
// KHR_materials_transmission
|
|
272
|
+
case 'extensions/KHR_materials_transmission/transmissionFactor':
|
|
273
|
+
targetProperty = 'transmission';
|
|
274
|
+
break;
|
|
275
|
+
|
|
276
|
+
// KHR_materials_ior
|
|
277
|
+
case 'extensions/KHR_materials_ior/ior':
|
|
278
|
+
targetProperty = 'ior';
|
|
279
|
+
break;
|
|
280
|
+
|
|
281
|
+
// KHR_materials_volume
|
|
282
|
+
case 'extensions/KHR_materials_volume/thicknessFactor':
|
|
283
|
+
targetProperty = 'thickness';
|
|
284
|
+
break;
|
|
285
|
+
case 'extensions/KHR_materials_volume/attenuationColor':
|
|
286
|
+
targetProperty = 'attenuationColor';
|
|
287
|
+
break;
|
|
288
|
+
case 'extensions/KHR_materials_volume/attenuationDistance':
|
|
289
|
+
targetProperty = 'attenuationDistance';
|
|
290
|
+
break;
|
|
291
|
+
|
|
292
|
+
// KHR_materials_iridescence
|
|
293
|
+
case 'extensions/KHR_materials_iridescence/iridescenceFactor':
|
|
294
|
+
targetProperty = 'iridescence';
|
|
295
|
+
break;
|
|
296
|
+
case 'extensions/KHR_materials_iridescence/iridescenceIor':
|
|
297
|
+
targetProperty = 'iridescenceIOR';
|
|
298
|
+
break;
|
|
299
|
+
case 'extensions/KHR_materials_iridescence/iridescenceThicknessMinimum':
|
|
300
|
+
targetProperty = 'iridescenceThicknessRange[0]';
|
|
301
|
+
break;
|
|
302
|
+
case 'extensions/KHR_materials_iridescence/iridescenceThicknessMaximum':
|
|
303
|
+
targetProperty = 'iridescenceThicknessRange[1]';
|
|
304
|
+
break;
|
|
305
|
+
|
|
306
|
+
// KHR_materials_clearcoat
|
|
307
|
+
case 'extensions/KHR_materials_clearcoat/clearcoatFactor':
|
|
308
|
+
targetProperty = 'clearcoat';
|
|
309
|
+
break;
|
|
310
|
+
case 'extensions/KHR_materials_clearcoat/clearcoatRoughnessFactor':
|
|
311
|
+
targetProperty = 'clearcoatRoughness';
|
|
312
|
+
break;
|
|
313
|
+
|
|
314
|
+
// KHR_materials_sheen
|
|
315
|
+
case 'extensions/KHR_materials_sheen/sheenColorFactor':
|
|
316
|
+
targetProperty = 'sheenColor';
|
|
317
|
+
break;
|
|
318
|
+
case 'extensions/KHR_materials_sheen/sheenRoughnessFactor':
|
|
319
|
+
targetProperty = 'sheenRoughness';
|
|
320
|
+
break;
|
|
321
|
+
|
|
322
|
+
// KHR_materials_specular
|
|
323
|
+
case 'extensions/KHR_materials_specular/specularFactor':
|
|
324
|
+
targetProperty = 'specularIntensity';
|
|
325
|
+
break;
|
|
326
|
+
case 'extensions/KHR_materials_specular/specularColorFactor':
|
|
327
|
+
targetProperty = 'specularColor';
|
|
328
|
+
break;
|
|
329
|
+
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
path = pathStart + targetProperty;
|
|
333
|
+
if ( _animationPointerDebug ) console.log( 'PROPERTY PATH', pathStart, targetProperty, path );
|
|
334
|
+
break;
|
|
335
|
+
|
|
336
|
+
case ANIMATION_TARGET_TYPE.node:
|
|
337
|
+
const pathIndexNode = ( '/nodes/' + targetId.toString() + '/' ).length;
|
|
338
|
+
const pathStartNode = path.substring( 0, pathIndexNode );
|
|
339
|
+
targetProperty = path.substring( pathIndexNode );
|
|
340
|
+
|
|
341
|
+
switch ( targetProperty ) {
|
|
342
|
+
|
|
343
|
+
case 'translation':
|
|
344
|
+
targetProperty = 'position';
|
|
345
|
+
break;
|
|
346
|
+
case 'rotation':
|
|
347
|
+
targetProperty = 'quaternion';
|
|
348
|
+
break;
|
|
349
|
+
case 'scale':
|
|
350
|
+
targetProperty = 'scale';
|
|
351
|
+
break;
|
|
352
|
+
case 'weights':
|
|
353
|
+
targetProperty = 'morphTargetInfluences';
|
|
354
|
+
break;
|
|
355
|
+
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
path = pathStartNode + targetProperty;
|
|
359
|
+
break;
|
|
360
|
+
|
|
361
|
+
case ANIMATION_TARGET_TYPE.light:
|
|
362
|
+
const pathIndexLight = ( '/extensions/KHR_lights_punctual/lights/' + targetId.toString() + '/' ).length;
|
|
363
|
+
targetProperty = path.substring( pathIndexLight );
|
|
364
|
+
|
|
365
|
+
switch ( targetProperty ) {
|
|
366
|
+
|
|
367
|
+
case 'color':
|
|
368
|
+
break;
|
|
369
|
+
case 'intensity':
|
|
370
|
+
break;
|
|
371
|
+
case 'spot/innerConeAngle':
|
|
372
|
+
// TODO would need to set .penumbra, but requires calculations on every animation change (?)
|
|
373
|
+
targetProperty = 'penumbra';
|
|
374
|
+
break;
|
|
375
|
+
case 'spot/outerConeAngle':
|
|
376
|
+
targetProperty = 'angle';
|
|
377
|
+
break;
|
|
378
|
+
case 'range':
|
|
379
|
+
targetProperty = 'distance';
|
|
380
|
+
break;
|
|
381
|
+
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
path = '/lights/' + targetId.toString() + '/' + targetProperty;
|
|
385
|
+
break;
|
|
386
|
+
|
|
387
|
+
case ANIMATION_TARGET_TYPE.camera:
|
|
388
|
+
const pathIndexCamera = ( '/cameras/' + targetId.toString() + '/' ).length;
|
|
389
|
+
const pathStartCamera = path.substring( 0, pathIndexCamera );
|
|
390
|
+
targetProperty = path.substring( pathIndexCamera );
|
|
391
|
+
|
|
392
|
+
switch ( targetProperty ) {
|
|
393
|
+
|
|
394
|
+
case 'perspective/yfov':
|
|
395
|
+
targetProperty = 'fov';
|
|
396
|
+
break;
|
|
397
|
+
case 'perspective/znear':
|
|
398
|
+
case 'orthographic/znear':
|
|
399
|
+
targetProperty = 'near';
|
|
400
|
+
break;
|
|
401
|
+
case 'perspective/zfar':
|
|
402
|
+
case 'orthographic/zfar':
|
|
403
|
+
targetProperty = 'far';
|
|
404
|
+
break;
|
|
405
|
+
case 'perspective/aspect':
|
|
406
|
+
targetProperty = 'aspect';
|
|
407
|
+
break;
|
|
408
|
+
// these two write to the same target property since three.js orthographic camera only supports 'zoom'.
|
|
409
|
+
// TODO should there be a warning for either of them? E.g. a warning for "xmag" so that "yfov" + "ymag" work by default?
|
|
410
|
+
case 'orthographic/xmag':
|
|
411
|
+
targetProperty = 'zoom';
|
|
412
|
+
break;
|
|
413
|
+
case 'orthographic/ymag':
|
|
414
|
+
targetProperty = 'zoom';
|
|
415
|
+
break;
|
|
416
|
+
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
path = pathStartCamera + targetProperty;
|
|
420
|
+
break;
|
|
421
|
+
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
const pointerResolver = this.animationPointerResolver;
|
|
425
|
+
if ( pointerResolver && pointerResolver.resolvePath ) {
|
|
426
|
+
|
|
427
|
+
path = pointerResolver.resolvePath( path );
|
|
428
|
+
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
target.extensions[ KHR_ANIMATION_POINTER ].pointer = path;
|
|
432
|
+
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
if ( targetId === null || isNaN( targetId ) ) {
|
|
436
|
+
|
|
437
|
+
console.warn( 'Failed resolving animation node id: ' + targetId, target );
|
|
438
|
+
return;
|
|
439
|
+
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
let depPromise;
|
|
443
|
+
|
|
444
|
+
if ( type === ANIMATION_TARGET_TYPE.node )
|
|
445
|
+
depPromise = this.parser.getDependency( 'node', targetId );
|
|
446
|
+
else if ( type === ANIMATION_TARGET_TYPE.material )
|
|
447
|
+
depPromise = this.parser.getDependency( 'material', targetId );
|
|
448
|
+
else if ( type === ANIMATION_TARGET_TYPE.light )
|
|
449
|
+
depPromise = this.parser.getDependency( 'light', targetId );
|
|
450
|
+
else if ( type === ANIMATION_TARGET_TYPE.camera )
|
|
451
|
+
depPromise = this.parser.getDependency( 'camera', targetId );
|
|
452
|
+
else
|
|
453
|
+
console.error( 'Unhandled type', type );
|
|
454
|
+
|
|
455
|
+
return depPromise;
|
|
456
|
+
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
createAnimationTracksWithAnimationPointer( node, inputAccessor, outputAccessor, sampler, target ) {
|
|
460
|
+
|
|
461
|
+
const useExtension = target.extensions && target.extensions[ KHR_ANIMATION_POINTER ] && target.path && target.path === 'pointer';
|
|
462
|
+
if ( ! useExtension ) return null;
|
|
463
|
+
|
|
464
|
+
let animationPointerPropertyPath = target.extensions[ KHR_ANIMATION_POINTER ].pointer;
|
|
465
|
+
if ( ! animationPointerPropertyPath ) return null;
|
|
466
|
+
|
|
467
|
+
const tracks = [];
|
|
468
|
+
|
|
469
|
+
animationPointerPropertyPath = animationPointerPropertyPath.replaceAll( '/', '.' );
|
|
470
|
+
// replace node/material/camera/light ID by UUID
|
|
471
|
+
const parts = animationPointerPropertyPath.split( '.' );
|
|
472
|
+
const hasName = node.name !== undefined && node.name !== null;
|
|
473
|
+
var nodeTargetName = hasName ? node.name : node.uuid;
|
|
474
|
+
parts[ 2 ] = nodeTargetName;
|
|
475
|
+
|
|
476
|
+
// specially handle the morphTargetInfluences property for multi-material meshes
|
|
477
|
+
// in which case the target object is a Group and the children are the actual targets
|
|
478
|
+
// see NE-3311
|
|
479
|
+
if ( parts[ 3 ] === 'morphTargetInfluences' ) {
|
|
480
|
+
|
|
481
|
+
if ( node.type === 'Group' ) {
|
|
482
|
+
|
|
483
|
+
if ( _animationPointerDebug )
|
|
484
|
+
console.log( 'Detected multi-material skinnedMesh export', animationPointerPropertyPath, node );
|
|
485
|
+
|
|
486
|
+
// We assume the children are skinned meshes
|
|
487
|
+
for ( const ch of node.children ) {
|
|
488
|
+
|
|
489
|
+
if ( ch instanceof SkinnedMesh && ch.morphTargetInfluences ) {
|
|
490
|
+
|
|
491
|
+
parts[ 3 ] = ch.name;
|
|
492
|
+
parts[ 4 ] = 'morphTargetInfluences';
|
|
493
|
+
__createTrack( this.parser );
|
|
494
|
+
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
return tracks;
|
|
500
|
+
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
// default
|
|
506
|
+
__createTrack( this.parser );
|
|
507
|
+
|
|
508
|
+
/** Create a new track using the current parts array */
|
|
509
|
+
function __createTrack( parser ) {
|
|
510
|
+
|
|
511
|
+
animationPointerPropertyPath = parts.join( '.' );
|
|
512
|
+
|
|
513
|
+
if ( _animationPointerDebug )
|
|
514
|
+
console.log( node, inputAccessor, outputAccessor, target, animationPointerPropertyPath );
|
|
515
|
+
|
|
516
|
+
let TypedKeyframeTrack;
|
|
517
|
+
|
|
518
|
+
switch ( outputAccessor.itemSize ) {
|
|
519
|
+
|
|
520
|
+
case 1:
|
|
521
|
+
TypedKeyframeTrack = NumberKeyframeTrack;
|
|
522
|
+
break;
|
|
523
|
+
case 2:
|
|
524
|
+
case 3:
|
|
525
|
+
TypedKeyframeTrack = VectorKeyframeTrack;
|
|
526
|
+
break;
|
|
527
|
+
case 4:
|
|
528
|
+
|
|
529
|
+
if ( animationPointerPropertyPath.endsWith( '.quaternion' ) )
|
|
530
|
+
TypedKeyframeTrack = QuaternionKeyframeTrack;
|
|
531
|
+
else
|
|
532
|
+
TypedKeyframeTrack = ColorKeyframeTrack;
|
|
533
|
+
|
|
534
|
+
break;
|
|
535
|
+
|
|
536
|
+
}
|
|
537
|
+
|
|
538
|
+
const interpolation = sampler.interpolation !== undefined ? INTERPOLATION[ sampler.interpolation ] : InterpolateLinear;
|
|
539
|
+
|
|
540
|
+
let outputArray = parser._getArrayFromAccessor( outputAccessor );
|
|
541
|
+
|
|
542
|
+
// convert fov values from radians to degrees
|
|
543
|
+
if ( animationPointerPropertyPath.endsWith( '.fov' ) ) {
|
|
544
|
+
|
|
545
|
+
outputArray = outputArray.map( value => value / Math.PI * 180 );
|
|
546
|
+
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
const track = new TypedKeyframeTrack(
|
|
550
|
+
animationPointerPropertyPath,
|
|
551
|
+
inputAccessor.array,
|
|
552
|
+
outputArray,
|
|
553
|
+
interpolation
|
|
554
|
+
);
|
|
555
|
+
|
|
556
|
+
// Override interpolation with custom factory method.
|
|
557
|
+
if ( interpolation === 'CUBICSPLINE' ) {
|
|
558
|
+
|
|
559
|
+
parser._createCubicSplineTrackInterpolant( track );
|
|
560
|
+
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
tracks.push( track );
|
|
564
|
+
|
|
565
|
+
// glTF has opacity animation as last component of baseColorFactor,
|
|
566
|
+
// so we need to split that up here and create a separate opacity track if that is animated.
|
|
567
|
+
if ( animationPointerPropertyPath && outputAccessor.itemSize === 4 &&
|
|
568
|
+
animationPointerPropertyPath.startsWith( '.materials.' ) && animationPointerPropertyPath.endsWith( '.color' ) ) {
|
|
569
|
+
|
|
570
|
+
const opacityArray = new Float32Array( outputArray.length / 4 );
|
|
571
|
+
|
|
572
|
+
for ( let j = 0, jl = outputArray.length / 4; j < jl; j += 1 ) {
|
|
573
|
+
|
|
574
|
+
opacityArray[ j ] = outputArray[ j * 4 + 3 ];
|
|
575
|
+
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
const opacityTrack = new TypedKeyframeTrack(
|
|
579
|
+
animationPointerPropertyPath.replace( '.color', '.opacity' ),
|
|
580
|
+
inputAccessor.array,
|
|
581
|
+
opacityArray,
|
|
582
|
+
interpolation
|
|
583
|
+
);
|
|
584
|
+
|
|
585
|
+
// Override interpolation with custom factory method.
|
|
586
|
+
if ( interpolation === 'CUBICSPLINE' ) {
|
|
587
|
+
|
|
588
|
+
parser._createCubicSplineTrackInterpolant( track );
|
|
589
|
+
|
|
590
|
+
}
|
|
591
|
+
|
|
592
|
+
tracks.push( opacityTrack );
|
|
593
|
+
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
return tracks;
|
|
599
|
+
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
_tryResolveTargetId( path, type ) {
|
|
603
|
+
|
|
604
|
+
let name = '';
|
|
605
|
+
if ( type === 'node' ) {
|
|
606
|
+
|
|
607
|
+
name = path.substring( '/nodes/'.length );
|
|
608
|
+
|
|
609
|
+
} else if ( type === 'material' ) {
|
|
610
|
+
|
|
611
|
+
name = path.substring( '/materials/'.length );
|
|
612
|
+
|
|
613
|
+
} else if ( type === 'light' ) {
|
|
614
|
+
|
|
615
|
+
name = path.substring( '/extensions/KHR_lights_punctual/lights/'.length );
|
|
616
|
+
|
|
617
|
+
} else if ( type === 'camera' ) {
|
|
618
|
+
|
|
619
|
+
name = path.substring( '/cameras/'.length );
|
|
620
|
+
|
|
621
|
+
}
|
|
622
|
+
|
|
623
|
+
name = name.substring( 0, name.indexOf( '/' ) );
|
|
624
|
+
const index = Number.parseInt( name );
|
|
625
|
+
return index;
|
|
626
|
+
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
/* MOSTLY DUPLICATE of GLTFLoader.loadAnimation, but also tries to resolve KHR_animation_pointer. */
|
|
630
|
+
/**
|
|
631
|
+
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#animations
|
|
632
|
+
* @param {number} animationIndex
|
|
633
|
+
* @return {Promise<AnimationClip>}
|
|
634
|
+
*/
|
|
635
|
+
loadAnimation( animationIndex ) {
|
|
636
|
+
|
|
637
|
+
const me = this;
|
|
638
|
+
const json = this.parser.json;
|
|
639
|
+
const parser = this.parser;
|
|
640
|
+
|
|
641
|
+
const animationDef = json.animations[ animationIndex ];
|
|
642
|
+
const animationName = animationDef.name ? animationDef.name : 'animation_' + animationIndex;
|
|
643
|
+
|
|
644
|
+
const pendingNodes = [];
|
|
645
|
+
const pendingInputAccessors = [];
|
|
646
|
+
const pendingOutputAccessors = [];
|
|
647
|
+
const pendingSamplers = [];
|
|
648
|
+
const pendingTargets = [];
|
|
649
|
+
|
|
650
|
+
for ( let i = 0, il = animationDef.channels.length; i < il; i ++ ) {
|
|
651
|
+
|
|
652
|
+
const channel = animationDef.channels[ i ];
|
|
653
|
+
const sampler = animationDef.samplers[ channel.sampler ];
|
|
654
|
+
const target = channel.target;
|
|
655
|
+
const input = animationDef.parameters !== undefined ? animationDef.parameters[ sampler.input ] : sampler.input;
|
|
656
|
+
const output = animationDef.parameters !== undefined ? animationDef.parameters[ sampler.output ] : sampler.output;
|
|
657
|
+
|
|
658
|
+
let nodeDependency = me.loadAnimationTargetFromChannelWithAnimationPointer( channel );
|
|
659
|
+
if ( ! nodeDependency )
|
|
660
|
+
nodeDependency = me.loadAnimationTargetFromChannel( channel );
|
|
661
|
+
|
|
662
|
+
pendingNodes.push( nodeDependency );
|
|
663
|
+
pendingInputAccessors.push( parser.getDependency( 'accessor', input ) );
|
|
664
|
+
pendingOutputAccessors.push( parser.getDependency( 'accessor', output ) );
|
|
665
|
+
pendingSamplers.push( sampler );
|
|
666
|
+
pendingTargets.push( target );
|
|
667
|
+
|
|
668
|
+
}
|
|
669
|
+
|
|
670
|
+
return Promise.all( [
|
|
671
|
+
|
|
672
|
+
Promise.all( pendingNodes ),
|
|
673
|
+
Promise.all( pendingInputAccessors ),
|
|
674
|
+
Promise.all( pendingOutputAccessors ),
|
|
675
|
+
Promise.all( pendingSamplers ),
|
|
676
|
+
Promise.all( pendingTargets )
|
|
677
|
+
|
|
678
|
+
] ).then( function ( dependencies ) {
|
|
679
|
+
|
|
680
|
+
const nodes = dependencies[ 0 ];
|
|
681
|
+
const inputAccessors = dependencies[ 1 ];
|
|
682
|
+
const outputAccessors = dependencies[ 2 ];
|
|
683
|
+
const samplers = dependencies[ 3 ];
|
|
684
|
+
const targets = dependencies[ 4 ];
|
|
685
|
+
|
|
686
|
+
const tracks = [];
|
|
687
|
+
|
|
688
|
+
for ( let i = 0, il = nodes.length; i < il; i ++ ) {
|
|
689
|
+
|
|
690
|
+
const node = nodes[ i ];
|
|
691
|
+
const inputAccessor = inputAccessors[ i ];
|
|
692
|
+
const outputAccessor = outputAccessors[ i ];
|
|
693
|
+
const sampler = samplers[ i ];
|
|
694
|
+
const target = targets[ i ];
|
|
695
|
+
|
|
696
|
+
if ( node === undefined ) continue;
|
|
697
|
+
|
|
698
|
+
if ( node.updateMatrix ) {
|
|
699
|
+
|
|
700
|
+
node.updateMatrix();
|
|
701
|
+
node.matrixAutoUpdate = true;
|
|
702
|
+
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
let createdTracks = me.createAnimationTracksWithAnimationPointer( node, inputAccessor, outputAccessor, sampler, target );
|
|
706
|
+
if ( ! createdTracks )
|
|
707
|
+
createdTracks = parser._createAnimationTracks( node, inputAccessor, outputAccessor, sampler, target );
|
|
708
|
+
|
|
709
|
+
if ( createdTracks ) {
|
|
710
|
+
|
|
711
|
+
for ( let k = 0; k < createdTracks.length; k ++ ) {
|
|
712
|
+
|
|
713
|
+
tracks.push( createdTracks[ k ] );
|
|
714
|
+
|
|
715
|
+
}
|
|
716
|
+
|
|
717
|
+
}
|
|
718
|
+
|
|
719
|
+
}
|
|
720
|
+
|
|
721
|
+
return new AnimationClip( animationName, undefined, tracks );
|
|
722
|
+
|
|
723
|
+
} );
|
|
724
|
+
|
|
725
|
+
}
|
|
726
|
+
|
|
727
|
+
}
|
|
728
|
+
|
|
729
|
+
export { GLTFAnimationPointerExtension };
|