@sage-rsc/talking-head-react 1.3.8 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -349,8 +349,7 @@ class TalkingHead {
349
349
  this.posePropNames = [...names];
350
350
 
351
351
  // Use "side" as the first pose, weight on left leg
352
- // Note: This will be overridden by gender-specific selection when avatar loads
353
- this.poseName = "side"; // First pose (default, will be gender-adjusted on avatar load)
352
+ this.poseName = "side"; // First pose
354
353
  this.poseWeightOnLeft = true; // Initial weight on left leg
355
354
  this.gesture = null; // Values that override pose properties
356
355
  this.poseCurrentTemplate = this.poseTemplates[this.poseName];
@@ -375,7 +374,6 @@ class TalkingHead {
375
374
  // 1. State (idle, speaking, listening)
376
375
  // 2. Mood (moodX, moodY)
377
376
  // 3. Pose (poseX, poseY)
378
- // 4. Body Movement (walking, prancing, gesturing, dancing, excited)
379
377
  // 5. View (full, upper, head)
380
378
  // 6. Body form ('M','F')
381
379
  // 7. Alt (sequence of objects with propabilities p. If p is not
@@ -453,18 +451,16 @@ class TalkingHead {
453
451
  ]
454
452
  },
455
453
  'happy' : {
456
- baseline: { mouthSmile: 0.2, eyesLookDown: 0 },
454
+ baseline: { mouthSmile: 0.2, eyesLookDown: 0.1 },
457
455
  speech: { deltaRate: 0, deltaPitch: 0.1, deltaVolume: 0 },
458
456
  anims: [
459
457
  { name: 'breathing', delay: 1500, dt: [ 1200,500,1000 ], vs: { chestInhale: [0.5,0.5,0] } },
460
458
  { name: 'pose',
461
459
  idle: {
462
460
  alt: [
463
- { p: 0.6, delay: [5000,30000], vs: { pose: ['side'] },
464
- 'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
465
- },
461
+ { p: 0.6, delay: [5000,30000], vs: { pose: ['side'] } },
466
462
  { p: 0.2, delay: [5000,30000], vs: { pose: ['hip'] },
467
- 'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
463
+ 'M': { delay: [5000,30000], vs: { pose: ['side'] } }
468
464
  },
469
465
  { p: 0.1, delay: [5000,30000], vs: { pose: ['straight'] } },
470
466
  { delay: [5000,10000], vs: { pose: ['wide'] } },
@@ -473,12 +469,8 @@ class TalkingHead {
473
469
  },
474
470
  speaking: {
475
471
  alt: [
476
- { p: 0.4, delay: [5000,30000], vs: { pose: ['side'] },
477
- 'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
478
- },
479
- { p: 0.4, delay: [5000,30000], vs: { pose: ['straight'] },
480
- 'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
481
- },
472
+ { p: 0.4, delay: [5000,30000], vs: { pose: ['side'] } },
473
+ { p: 0.4, delay: [5000,30000], vs: { pose: ['straight'] } },
482
474
  { delay: [5000,20000], vs: { pose: ['hip'] },
483
475
  'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
484
476
  },
@@ -773,6 +765,10 @@ class TalkingHead {
773
765
  this.animClips = [];
774
766
  this.animPoses = [];
775
767
 
768
+ // Animate
769
+ this.animate = this.animate.bind(this);
770
+ this._raf = null;
771
+
776
772
  // Clock
777
773
  this.animFrameDur = 1000/ this.opt.modelFPS;
778
774
  this.animClock = 0;
@@ -780,12 +776,9 @@ class TalkingHead {
780
776
  this.animTimeLast = 0;
781
777
  this.easing = this.sigmoidFactory(5); // Ease in and out
782
778
 
783
- // Lip-sync extensions, import statically
779
+ // Lip-sync extensions, import dynamically
784
780
  this.lipsync = {};
785
- this.opt.lipsyncModules.forEach( x => {
786
- // Load synchronously using statically imported modules
787
- this.lipsyncGetProcessor(x);
788
- });
781
+ this.opt.lipsyncModules.forEach( x => this.lipsyncGetProcessor(x) );
789
782
  this.visemeNames = [
790
783
  'aa', 'E', 'I', 'O', 'U', 'PP', 'SS', 'TH', 'DD', 'FF', 'kk',
791
784
  'nn', 'RR', 'CH', 'sil'
@@ -960,9 +953,6 @@ class TalkingHead {
960
953
  this.audioAnalyzerNode.smoothingTimeConstant = 0.1;
961
954
  this.audioAnalyzerNode.minDecibels = -70;
962
955
  this.audioAnalyzerNode.maxDecibels = -10;
963
-
964
- // Audio analyzer for precise lip-sync
965
- this.audioAnalyzer = new AudioAnalyzer(this.audioCtx);
966
956
  this.audioReverbNode = this.audioCtx.createConvolver();
967
957
 
968
958
  // Connect nodes
@@ -1122,20 +1112,24 @@ class TalkingHead {
1122
1112
  * Clear 3D object.
1123
1113
  * @param {Object} obj Object
1124
1114
  */
1125
- clearThree(obj){
1126
- while( obj.children.length ){
1115
+ clearThree(obj) {
1116
+ while (obj.children.length) {
1127
1117
  this.clearThree(obj.children[0]);
1128
1118
  obj.remove(obj.children[0]);
1129
1119
  }
1130
- if ( obj.geometry ) obj.geometry.dispose();
1131
1120
 
1132
- if ( obj.material ) {
1133
- Object.keys(obj.material).forEach( x => {
1134
- if ( obj.material[x] && obj.material[x] !== null && typeof obj.material[x].dispose === 'function' ) {
1135
- obj.material[x].dispose();
1136
- }
1137
- });
1138
- obj.material.dispose();
1121
+ if (obj.geometry) obj.geometry.dispose();
1122
+
1123
+ if (obj.material) {
1124
+ if (Array.isArray(obj.material)) {
1125
+ obj.material.forEach(m => {
1126
+ if (m.map) m.map.dispose();
1127
+ m.dispose();
1128
+ });
1129
+ } else {
1130
+ if (obj.material.map) obj.material.map.dispose();
1131
+ obj.material.dispose();
1132
+ }
1139
1133
  }
1140
1134
  }
1141
1135
 
@@ -1244,19 +1238,27 @@ class TalkingHead {
1244
1238
  this.stop();
1245
1239
  this.avatar = avatar;
1246
1240
 
1247
- // Initialize body movement properties
1241
+ // Initialize custom properties
1248
1242
  this.bodyMovement = avatar.bodyMovement || 'idle';
1249
1243
  this.movementIntensity = avatar.movementIntensity || 0.5;
1250
- this.showFullAvatar = avatar.showFullAvatar || false;
1251
-
1244
+ this.lockedPosition = null;
1245
+ this.originalPosition = null;
1246
+ this.positionWasLocked = false;
1247
+
1252
1248
  // Initialize FBX animation loader
1253
1249
  this.fbxAnimationLoader = null;
1254
1250
 
1255
1251
  // Dispose Dynamic Bones
1256
1252
  this.dynamicbones.dispose();
1257
1253
 
1258
- // Clear previous scene, if avatar was previously loaded
1259
- this.mixer = null;
1254
+ // Clear previous mixer/scene, if avatar was previously loaded
1255
+ if (this.mixer) {
1256
+ this.mixer.removeEventListener('finished', this._mixerHandler);
1257
+ this.mixer.stopAllAction();
1258
+ this.mixer.uncacheRoot(this.armature);
1259
+ this.mixer = null;
1260
+ this._mixerHandler = null;
1261
+ }
1260
1262
  if ( this.isAvatarOnly ) {
1261
1263
  if ( this.armature ) {
1262
1264
  this.clearThree( this.armature );
@@ -1570,7 +1572,7 @@ class TalkingHead {
1570
1572
  * Render scene.
1571
1573
  */
1572
1574
  render() {
1573
- if ( this.isRunning && !this.isAvatarOnly && this.renderer ) {
1575
+ if ( this.isRunning && !this.isAvatarOnly ) {
1574
1576
  this.renderer.render( this.scene, this.camera );
1575
1577
  }
1576
1578
  }
@@ -1579,7 +1581,7 @@ class TalkingHead {
1579
1581
  * Resize avatar.
1580
1582
  */
1581
1583
  onResize() {
1582
- if ( !this.isAvatarOnly && this.renderer ) {
1584
+ if ( !this.isAvatarOnly ) {
1583
1585
  this.camera.aspect = this.nodeAvatar.clientWidth / this.nodeAvatar.clientHeight;
1584
1586
  this.camera.updateProjectionMatrix();
1585
1587
  this.renderer.setSize( this.nodeAvatar.clientWidth, this.nodeAvatar.clientHeight );
@@ -1612,6 +1614,23 @@ class TalkingHead {
1612
1614
  // Apply shoulder adjustment to lower shoulders
1613
1615
  this.applyShoulderAdjustment();
1614
1616
  }
1617
+
1618
+ /**
1619
+ * Update avatar pose deltas
1620
+ */
1621
+ updatePoseDelta() {
1622
+ for( const [key,d] of Object.entries(this.poseDelta.props) ) {
1623
+ if ( d.x === 0 && d.y === 0 && d.z === 0 ) continue;
1624
+ e.set(d.x,d.y,d.z);
1625
+ const o = this.poseAvatar.props[key];
1626
+ if ( o.isQuaternion ) {
1627
+ q.setFromEuler(e);
1628
+ o.multiply(q);
1629
+ } else if ( o.isVector3 ) {
1630
+ o.add( e );
1631
+ }
1632
+ }
1633
+ }
1615
1634
 
1616
1635
  /**
1617
1636
  * Apply shoulder adjustment to lower shoulders to a more natural position
@@ -1675,23 +1694,6 @@ class TalkingHead {
1675
1694
  }
1676
1695
  }
1677
1696
 
1678
- /**
1679
- * Update avatar pose deltas
1680
- */
1681
- updatePoseDelta() {
1682
- for( const [key,d] of Object.entries(this.poseDelta.props) ) {
1683
- if ( d.x === 0 && d.y === 0 && d.z === 0 ) continue;
1684
- e.set(d.x,d.y,d.z);
1685
- const o = this.poseAvatar.props[key];
1686
- if ( o.isQuaternion ) {
1687
- q.setFromEuler(e);
1688
- o.multiply(q);
1689
- } else if ( o.isVector3 ) {
1690
- o.add( e );
1691
- }
1692
- }
1693
- }
1694
-
1695
1697
  /**
1696
1698
  * Update morph target values.
1697
1699
  * @param {number} dt Delta time in ms.
@@ -2156,523 +2158,87 @@ class TalkingHead {
2156
2158
 
2157
2159
  }
2158
2160
 
2161
+
2159
2162
  /**
2160
- * Initialize FBX animation loader
2161
- */
2162
- async initializeFBXAnimationLoader() {
2163
- try {
2164
- // Dynamic import to avoid loading issues
2165
- const { FBXAnimationLoader } = await import('./fbxAnimationLoader.js');
2166
- this.fbxAnimationLoader = new FBXAnimationLoader(this.armature);
2167
- console.log('FBX Animation Loader initialized');
2168
- } catch (error) {
2169
- console.warn('FBX Animation Loader not available:', error);
2170
- this.fbxAnimationLoader = null;
2171
- }
2163
+ * Get morph target names.
2164
+ * @return {string[]} Morph target names.
2165
+ */
2166
+ getMorphTargetNames() {
2167
+ return [ 'eyesRotateX', 'eyesRotateY', ...Object.keys(this.mtAvatar)].sort();
2172
2168
  }
2173
2169
 
2174
2170
  /**
2175
- * Set body movement type.
2176
- * @param {string} movement Movement type (idle, walking, prancing, gesturing, dancing, excited).
2177
- */
2178
- setBodyMovement(movement) {
2179
- this.bodyMovement = movement;
2180
-
2181
- // Only set avatar property if avatar exists
2182
- if (this.avatar) {
2183
- this.avatar.bodyMovement = movement;
2184
- }
2185
-
2186
- console.log('Body movement set to:', movement);
2187
-
2188
- // Respect the current showFullAvatar setting instead of forcing it to true
2189
- // Only unlock position when returning to idle
2190
- if (movement === 'idle') {
2191
- // Unlock position when returning to idle
2192
- this.unlockAvatarPosition();
2171
+ * Get baseline value for the morph target.
2172
+ * @param {string} mt Morph target name
2173
+ * @return {number} Value, null if not in baseline
2174
+ */
2175
+ getBaselineValue( mt ) {
2176
+ if ( mt === 'eyesRotateY' ) {
2177
+ const ll = this.getBaselineValue('eyeLookOutLeft');
2178
+ if ( ll === undefined ) return undefined;
2179
+ const lr = this.getBaselineValue('eyeLookInLeft');
2180
+ if ( lr === undefined ) return undefined;
2181
+ const rl = this.getBaselineValue('eyeLookOutRight');
2182
+ if ( rl === undefined ) return undefined;
2183
+ const rr = this.getBaselineValue('eyeLookInRight');
2184
+ if ( rr === undefined ) return undefined;
2185
+ return ll - lr;
2186
+ } else if ( mt === 'eyesRotateX' ) {
2187
+ const d = this.getBaselineValue('eyesLookDown');
2188
+ if ( d === undefined ) return undefined;
2189
+ const u = this.getBaselineValue('eyesLookUp');
2190
+ if ( u === undefined ) return undefined;
2191
+ return d - u;
2192
+ } else {
2193
+ return this.mtAvatar[mt]?.baseline;
2193
2194
  }
2194
- // Note: We no longer force showFullAvatar to true for body movements
2195
- // The avatar will use whatever showFullAvatar value was set by the user
2196
-
2197
- // Apply body movement animation
2198
- this.applyBodyMovementAnimation();
2199
2195
  }
2200
2196
 
2201
2197
  /**
2202
- * Apply body movement animation based on current movement type.
2203
- */
2204
- async applyBodyMovementAnimation() {
2205
- // Check if avatar is ready
2206
- if (!this.armature || !this.animQueue) {
2207
- console.log('Avatar not ready for body movement animations');
2208
- return;
2209
- }
2210
-
2211
- console.log('Avatar is running:', this.isRunning);
2212
- console.log('Animation queue exists:', !!this.animQueue);
2213
-
2214
- // Remove existing body movement animations
2215
- const beforeLength = this.animQueue.length;
2216
- this.animQueue = this.animQueue.filter(anim => !anim.template.name.startsWith('bodyMovement'));
2217
- const afterLength = this.animQueue.length;
2218
- console.log(`Filtered animation queue: ${beforeLength} -> ${afterLength} animations`);
2219
-
2220
- if (this.bodyMovement === 'idle') {
2221
- // Stop FBX animations if any
2222
- if (this.fbxAnimationLoader) {
2223
- this.fbxAnimationLoader.stopCurrentAnimation();
2224
- }
2225
- return; // No body movement for idle
2226
- }
2227
-
2228
- // Try to use FBX animations first
2229
- if (this.fbxAnimationLoader) {
2230
- try {
2231
- await this.fbxAnimationLoader.playGestureAnimation(this.bodyMovement, this.movementIntensity);
2232
- console.log('Applied FBX body movement animation:', this.bodyMovement);
2233
- return; // Successfully applied FBX animation
2234
- } catch (error) {
2235
- console.warn('FBX animation failed, falling back to code animation:', error);
2236
- }
2237
- }
2238
-
2239
- // Fallback to code-based animations
2240
- const movementAnim = this.createBodyMovementAnimation(this.bodyMovement);
2241
- console.log('Created movement animation:', movementAnim);
2242
- if (movementAnim) {
2243
- try {
2244
- // Use animFactory to create proper animation object
2245
- const animObj = this.animFactory(movementAnim, true); // true for looping
2246
-
2247
- // Validate the animation object before adding
2248
- if (animObj && animObj.ts && animObj.ts.length > 0) {
2249
- this.animQueue.push(animObj);
2250
- console.log('Applied code-based body movement animation:', this.bodyMovement);
2251
- console.log('Animation queue length:', this.animQueue.length);
2252
- console.log('Animation object:', animObj);
2253
- } else {
2254
- console.error('Invalid animation object created for:', this.bodyMovement);
2255
- console.error('Animation object:', animObj);
2256
- }
2257
- } catch (error) {
2258
- console.error('Error creating body movement animation:', error);
2198
+ * Set baseline for morph target.
2199
+ * @param {string} mt Morph target name
2200
+ * @param {number} val Value, null if to be removed from baseline
2201
+ */
2202
+ setBaselineValue( mt, val ) {
2203
+ if ( mt === 'eyesRotateY' ) {
2204
+ this.setBaselineValue('eyeLookOutLeft', (val === null) ? null : (val>0 ? val : 0) );
2205
+ this.setBaselineValue('eyeLookInLeft', (val === null) ? null : (val>0 ? 0 : -val) );
2206
+ this.setBaselineValue('eyeLookOutRight', (val === null) ? null : (val>0 ? 0 : -val) );
2207
+ this.setBaselineValue('eyeLookInRight', (val === null) ? null : (val>0 ? val : 0) );
2208
+ } else if ( mt === 'eyesRotateX' ) {
2209
+ this.setBaselineValue('eyesLookDown', (val === null) ? null : (val>0 ? val : 0) );
2210
+ this.setBaselineValue('eyesLookUp', (val === null) ? null : (val>0 ? 0 : -val) );
2211
+ } else {
2212
+ if ( this.mtAvatar.hasOwnProperty(mt) ) {
2213
+ Object.assign(this.mtAvatar[mt],{ base: null, baseline: val, needsUpdate: true });
2259
2214
  }
2260
2215
  }
2261
2216
  }
2262
2217
 
2263
2218
  /**
2264
- * Lock avatar position to prevent movement during animations.
2265
- */
2266
- lockAvatarPosition() {
2267
- if (!this.armature) {
2268
- console.warn('Cannot lock position: armature not available');
2269
- return;
2270
- }
2271
-
2272
- // Store the original position if not already stored
2273
- if (!this.originalPosition) {
2274
- this.originalPosition = {
2275
- x: this.armature.position.x,
2276
- y: this.armature.position.y,
2277
- z: this.armature.position.z
2278
- };
2279
- console.log('Original position stored:', this.originalPosition);
2280
- }
2281
-
2282
- // Lock the avatar at its CURRENT position (don't move it)
2283
- this.lockedPosition = {
2284
- x: this.armature.position.x,
2285
- y: this.armature.position.y,
2286
- z: this.armature.position.z
2287
- };
2288
-
2289
- console.log('Avatar position locked at current position:', this.lockedPosition);
2290
- }
2291
-
2292
- /**
2293
- * Unlock avatar position and restore original position.
2294
- */
2295
- unlockAvatarPosition() {
2296
- if (this.armature && this.originalPosition) {
2297
- // Restore avatar to its original position before locking
2298
- this.armature.position.set(
2299
- this.originalPosition.x,
2300
- this.originalPosition.y,
2301
- this.originalPosition.z
2302
- );
2303
- console.log('Avatar position restored to original:', this.originalPosition);
2304
- } else if (this.armature) {
2305
- // Fallback: reset to center if no original position was stored
2306
- this.armature.position.set(0, 0, 0);
2307
- console.log('Avatar position reset to center (0,0,0)');
2308
- }
2309
- this.lockedPosition = null;
2310
- this.originalPosition = null; // Clear original position after unlock
2311
- console.log('Avatar position unlocked');
2312
- }
2313
-
2314
- /**
2315
- * Ensure avatar stays at locked position.
2316
- */
2317
- maintainLockedPosition() {
2318
- if (this.lockedPosition && this.armature) {
2319
- // Enforce the locked position - keep avatar exactly where it was locked
2320
- // This prevents FBX animations from moving the avatar
2321
- this.armature.position.set(
2322
- this.lockedPosition.x,
2323
- this.lockedPosition.y,
2324
- this.lockedPosition.z
2325
- );
2326
- }
2327
- }
2328
-
2329
- /**
2330
- * Create body movement animation.
2331
- * @param {string} movementType Movement type.
2332
- * @returns {Object} Animation object.
2333
- */
2334
- createBodyMovementAnimation(movementType) {
2335
- const intensity = this.movementIntensity || 0.5;
2336
-
2337
- const movementAnimations = {
2338
- walking: {
2339
- name: 'bodyMovement_walking',
2340
- delay: [500, 2000],
2341
- dt: [800, 1200],
2342
- vs: {
2343
- bodyRotateY: [-0.1 * intensity, 0.1 * intensity, 0],
2344
- bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0],
2345
- bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
2346
- }
2347
- },
2348
- prancing: {
2349
- name: 'bodyMovement_prancing',
2350
- delay: [300, 1000],
2351
- dt: [400, 800],
2352
- vs: {
2353
- bodyRotateY: [-0.15 * intensity, 0.15 * intensity, 0],
2354
- bodyRotateZ: [-0.08 * intensity, 0.08 * intensity, 0],
2355
- bodyRotateX: [-0.05 * intensity, 0.05 * intensity, 0]
2356
- }
2357
- },
2358
- gesturing: {
2359
- name: 'bodyMovement_gesturing',
2360
- delay: [400, 1500],
2361
- dt: [600, 1000],
2362
- vs: {
2363
- bodyRotateY: [-0.08 * intensity, 0.08 * intensity, 0],
2364
- bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0]
2365
- }
2366
- },
2367
- dancing: {
2368
- name: 'bodyMovement_dancing',
2369
- delay: [200, 600],
2370
- dt: [400, 800],
2371
- vs: {
2372
- bodyRotateY: [-0.25 * intensity, 0.25 * intensity, 0],
2373
- bodyRotateZ: [-0.15 * intensity, 0.15 * intensity, 0],
2374
- bodyRotateX: [-0.1 * intensity, 0.1 * intensity, 0]
2375
- }
2376
- },
2377
- dancing2: {
2378
- name: 'bodyMovement_dancing2',
2379
- delay: [150, 500],
2380
- dt: [300, 700],
2381
- vs: {
2382
- bodyRotateY: [-0.3 * intensity, 0.3 * intensity, 0],
2383
- bodyRotateZ: [-0.2 * intensity, 0.2 * intensity, 0],
2384
- bodyRotateX: [-0.12 * intensity, 0.12 * intensity, 0]
2385
- }
2386
- },
2387
- dancing3: {
2388
- name: 'bodyMovement_dancing3',
2389
- delay: [100, 400],
2390
- dt: [200, 600],
2391
- vs: {
2392
- bodyRotateY: [-0.35 * intensity, 0.35 * intensity, 0],
2393
- bodyRotateZ: [-0.25 * intensity, 0.25 * intensity, 0],
2394
- bodyRotateX: [-0.15 * intensity, 0.15 * intensity, 0]
2395
- }
2396
- },
2397
- excited: {
2398
- name: 'bodyMovement_excited',
2399
- delay: [200, 600],
2400
- dt: [300, 700],
2401
- vs: {
2402
- bodyRotateY: [-0.12 * intensity, 0.12 * intensity, 0],
2403
- bodyRotateZ: [-0.06 * intensity, 0.06 * intensity, 0],
2404
- bodyRotateX: [-0.04 * intensity, 0.04 * intensity, 0]
2405
- }
2406
- },
2407
- happy: {
2408
- name: 'bodyMovement_happy',
2409
- delay: [300, 800],
2410
- dt: [500, 1000],
2411
- vs: {
2412
- bodyRotateY: [-0.08 * intensity, 0.08 * intensity, 0],
2413
- bodyRotateZ: [-0.04 * intensity, 0.04 * intensity, 0],
2414
- bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
2415
- }
2416
- },
2417
- surprised: {
2418
- name: 'bodyMovement_surprised',
2419
- delay: [100, 300],
2420
- dt: [200, 500],
2421
- vs: {
2422
- bodyRotateY: [-0.05 * intensity, 0.05 * intensity, 0],
2423
- bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0],
2424
- bodyRotateX: [-0.01 * intensity, 0.01 * intensity, 0]
2425
- }
2426
- },
2427
- thinking: {
2428
- name: 'bodyMovement_thinking',
2429
- delay: [800, 2000],
2430
- dt: [1000, 1500],
2431
- vs: {
2432
- bodyRotateY: [-0.06 * intensity, 0.06 * intensity, 0],
2433
- bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0],
2434
- bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
2435
- }
2436
- },
2437
- nodding: {
2438
- name: 'bodyMovement_nodding',
2439
- delay: [400, 800],
2440
- dt: [300, 600],
2441
- vs: {
2442
- bodyRotateX: [-0.1 * intensity, 0.1 * intensity, 0],
2443
- bodyRotateY: [-0.02 * intensity, 0.02 * intensity, 0]
2444
- }
2445
- },
2446
- shaking: {
2447
- name: 'bodyMovement_shaking',
2448
- delay: [200, 400],
2449
- dt: [150, 300],
2450
- vs: {
2451
- bodyRotateY: [-0.15 * intensity, 0.15 * intensity, 0],
2452
- bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0]
2453
- }
2454
- },
2455
- celebration: {
2456
- name: 'bodyMovement_celebration',
2457
- delay: [100, 300],
2458
- dt: [200, 500],
2459
- vs: {
2460
- bodyRotateY: [-0.2 * intensity, 0.2 * intensity, 0],
2461
- bodyRotateZ: [-0.1 * intensity, 0.1 * intensity, 0],
2462
- bodyRotateX: [-0.08 * intensity, 0.08 * intensity, 0]
2463
- }
2464
- },
2465
- energetic: {
2466
- name: 'bodyMovement_energetic',
2467
- delay: [150, 400],
2468
- dt: [250, 500],
2469
- vs: {
2470
- bodyRotateY: [-0.18 * intensity, 0.18 * intensity, 0],
2471
- bodyRotateZ: [-0.12 * intensity, 0.12 * intensity, 0],
2472
- bodyRotateX: [-0.08 * intensity, 0.08 * intensity, 0]
2473
- }
2474
- },
2475
- swaying: {
2476
- name: 'bodyMovement_swaying',
2477
- delay: [600, 1200],
2478
- dt: [800, 1000],
2479
- vs: {
2480
- bodyRotateY: [-0.1 * intensity, 0.1 * intensity, 0],
2481
- bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0]
2482
- }
2483
- },
2484
- bouncing: {
2485
- name: 'bodyMovement_bouncing',
2486
- delay: [300, 600],
2487
- dt: [400, 700],
2488
- vs: {
2489
- bodyRotateY: [-0.05 * intensity, 0.05 * intensity, 0]
2490
- }
2491
- }
2492
- };
2493
-
2494
- // Handle dance variations
2495
- if (movementType === 'dancing') {
2496
- const danceVariations = ['dancing', 'dancing2', 'dancing3'];
2497
- const randomDance = danceVariations[Math.floor(Math.random() * danceVariations.length)];
2498
- return movementAnimations[randomDance] || movementAnimations['dancing'];
2499
- }
2500
-
2501
- return movementAnimations[movementType] || null;
2502
- }
2503
-
2504
- /**
2505
- * Play a random dance animation
2506
- */
2507
- playRandomDance() {
2508
- const danceVariations = ['dancing', 'dancing2', 'dancing3'];
2509
- const randomDance = danceVariations[Math.floor(Math.random() * danceVariations.length)];
2510
- this.setBodyMovement(randomDance);
2511
- }
2512
-
2513
- /**
2514
- * Play a reaction animation
2515
- * @param {string} reactionType - Type of reaction (happy, surprised, thinking, etc.)
2516
- */
2517
- playReaction(reactionType) {
2518
- const validReactions = ['happy', 'surprised', 'thinking', 'nodding', 'shaking', 'celebration', 'energetic', 'swaying', 'bouncing'];
2519
- if (validReactions.includes(reactionType)) {
2520
- this.setBodyMovement(reactionType);
2521
-
2522
- // Auto-return to idle after a delay for non-looping reactions
2523
- const nonLoopingReactions = ['surprised', 'nodding', 'shaking', 'celebration'];
2524
- if (nonLoopingReactions.includes(reactionType)) {
2525
- setTimeout(() => {
2526
- this.setBodyMovement('idle');
2527
- }, 3000); // Return to idle after 3 seconds
2528
- }
2529
- } else {
2530
- console.warn('Invalid reaction type:', reactionType);
2531
- }
2532
- }
2533
-
2534
- /**
2535
- * Play a celebration sequence
2536
- */
2537
- playCelebration() {
2538
- this.playReaction('celebration');
2539
-
2540
- // After celebration, play a random dance
2541
- setTimeout(() => {
2542
- this.playRandomDance();
2543
- }, 2000);
2544
- }
2545
-
2546
- /**
2547
- * Set movement intensity.
2548
- * @param {number} intensity Movement intensity (0-1).
2549
- */
2550
- setMovementIntensity(intensity) {
2551
- this.movementIntensity = Math.max(0, Math.min(1, intensity));
2552
-
2553
- // Only set avatar property if avatar exists
2554
- if (this.avatar) {
2555
- this.avatar.movementIntensity = this.movementIntensity;
2556
- }
2557
-
2558
- console.log('Movement intensity set to:', this.movementIntensity);
2559
-
2560
- // Update FBX animation intensity if available
2561
- if (this.fbxAnimationLoader) {
2562
- this.fbxAnimationLoader.setIntensity(this.movementIntensity);
2563
- }
2564
-
2565
- // Reapply body movement animation with new intensity
2566
- this.applyBodyMovementAnimation();
2567
- }
2568
-
2569
- /**
2570
- * Set show full avatar.
2571
- * @param {boolean} show Whether to show full avatar.
2572
- */
2573
- setShowFullAvatar(show) {
2574
- this.showFullAvatar = show;
2575
-
2576
- // Only set avatar property if avatar exists
2577
- if (this.avatar) {
2578
- this.avatar.showFullAvatar = show;
2579
- }
2580
-
2581
- console.log('Show full avatar set to:', show);
2582
-
2583
- // Only change camera view if it's not already set to the desired view
2584
- // This prevents the avatar from sliding down when starting animations
2585
- if (show && this.viewName !== 'full') {
2586
- console.log('Changing camera view to full');
2587
- this.setView('full');
2588
- } else if (!show && this.viewName !== 'upper') {
2589
- console.log('Changing camera view to upper');
2590
- this.setView('upper');
2591
- } else {
2592
- console.log('Camera view already set to:', this.viewName);
2593
- }
2594
- }
2595
-
2596
- /**
2597
- * Get morph target names.
2598
- * @return {string[]} Morph target names.
2599
- */
2600
- getMorphTargetNames() {
2601
- return [ 'eyesRotateX', 'eyesRotateY', ...Object.keys(this.mtAvatar)].sort();
2602
- }
2603
-
2604
- /**
2605
- * Get baseline value for the morph target.
2606
- * @param {string} mt Morph target name
2607
- * @return {number} Value, null if not in baseline
2608
- */
2609
- getBaselineValue( mt ) {
2610
- if ( mt === 'eyesRotateY' ) {
2611
- const ll = this.getBaselineValue('eyeLookOutLeft');
2612
- if ( ll === undefined ) return undefined;
2613
- const lr = this.getBaselineValue('eyeLookInLeft');
2614
- if ( lr === undefined ) return undefined;
2615
- const rl = this.getBaselineValue('eyeLookOutRight');
2616
- if ( rl === undefined ) return undefined;
2617
- const rr = this.getBaselineValue('eyeLookInRight');
2618
- if ( rr === undefined ) return undefined;
2619
- return ll - lr;
2620
- } else if ( mt === 'eyesRotateX' ) {
2621
- const d = this.getBaselineValue('eyesLookDown');
2622
- if ( d === undefined ) return undefined;
2623
- const u = this.getBaselineValue('eyesLookUp');
2624
- if ( u === undefined ) return undefined;
2625
- return d - u;
2626
- } else {
2627
- return this.mtAvatar[mt]?.baseline;
2628
- }
2629
- }
2630
-
2631
- /**
2632
- * Set baseline for morph target.
2633
- * @param {string} mt Morph target name
2634
- * @param {number} val Value, null if to be removed from baseline
2635
- */
2636
- setBaselineValue( mt, val ) {
2637
- if ( mt === 'eyesRotateY' ) {
2638
- this.setBaselineValue('eyeLookOutLeft', (val === null) ? null : (val>0 ? val : 0) );
2639
- this.setBaselineValue('eyeLookInLeft', (val === null) ? null : (val>0 ? 0 : -val) );
2640
- this.setBaselineValue('eyeLookOutRight', (val === null) ? null : (val>0 ? 0 : -val) );
2641
- this.setBaselineValue('eyeLookInRight', (val === null) ? null : (val>0 ? val : 0) );
2642
- } else if ( mt === 'eyesRotateX' ) {
2643
- this.setBaselineValue('eyesLookDown', (val === null) ? null : (val>0 ? val : 0) );
2644
- this.setBaselineValue('eyesLookUp', (val === null) ? null : (val>0 ? 0 : -val) );
2645
- } else {
2646
- if ( this.mtAvatar.hasOwnProperty(mt) ) {
2647
- Object.assign(this.mtAvatar[mt],{ base: null, baseline: val, needsUpdate: true });
2648
- }
2649
- }
2650
- }
2651
-
2652
- /**
2653
- * Get fixed value for the morph target.
2654
- * @param {string} mt Morph target name
2655
- * @return {number} Value, null if not fixed
2656
- */
2657
- getFixedValue( mt ) {
2658
- if ( mt === 'eyesRotateY' ) {
2659
- const ll = this.getFixedValue('eyeLookOutLeft');
2660
- if ( ll === null ) return null;
2661
- const lr = this.getFixedValue('eyeLookInLeft');
2662
- if ( lr === null ) return null;
2663
- const rl = this.getFixedValue('eyeLookOutRight');
2664
- if ( rl === null ) return null;
2665
- const rr = this.getFixedValue('eyeLookInRight');
2666
- if ( rr === null ) return null;
2667
- return ll - lr;
2668
- } else if ( mt === 'eyesRotateX' ) {
2669
- const d = this.getFixedValue('eyesLookDown');
2670
- if ( d === null ) return null;
2671
- const u = this.getFixedValue('eyesLookUp');
2672
- if ( u === null ) return null;
2673
- return d - u;
2674
- } else {
2675
- return this.mtAvatar[mt]?.fixed;
2219
+ * Get fixed value for the morph target.
2220
+ * @param {string} mt Morph target name
2221
+ * @return {number} Value, null if not fixed
2222
+ */
2223
+ getFixedValue( mt ) {
2224
+ if ( mt === 'eyesRotateY' ) {
2225
+ const ll = this.getFixedValue('eyeLookOutLeft');
2226
+ if ( ll === null ) return null;
2227
+ const lr = this.getFixedValue('eyeLookInLeft');
2228
+ if ( lr === null ) return null;
2229
+ const rl = this.getFixedValue('eyeLookOutRight');
2230
+ if ( rl === null ) return null;
2231
+ const rr = this.getFixedValue('eyeLookInRight');
2232
+ if ( rr === null ) return null;
2233
+ return ll - lr;
2234
+ } else if ( mt === 'eyesRotateX' ) {
2235
+ const d = this.getFixedValue('eyesLookDown');
2236
+ if ( d === null ) return null;
2237
+ const u = this.getFixedValue('eyesLookUp');
2238
+ if ( u === null ) return null;
2239
+ return d - u;
2240
+ } else {
2241
+ return this.mtAvatar[mt]?.fixed;
2676
2242
  }
2677
2243
  }
2678
2244
 
@@ -2714,10 +2280,6 @@ class TalkingHead {
2714
2280
  let a = t;
2715
2281
  while(1) {
2716
2282
  if ( a.hasOwnProperty(this.stateName) ) {
2717
- // Debug: Log state selection
2718
- if (this.stateName === 'speaking' || this.stateName === 'idle') {
2719
- console.log('Selected state:', this.stateName, 'for avatar body:', this.avatar?.body);
2720
- }
2721
2283
  a = a[this.stateName];
2722
2284
  } else if ( a.hasOwnProperty(this.moodName) ) {
2723
2285
  a = a[this.moodName];
@@ -2725,9 +2287,7 @@ class TalkingHead {
2725
2287
  a = a[this.poseName];
2726
2288
  } else if ( a.hasOwnProperty(this.viewName) ) {
2727
2289
  a = a[this.viewName];
2728
- } else if ( this.avatar && this.avatar.body && a.hasOwnProperty(this.avatar.body) ) {
2729
- // Debug: Log gender-specific override
2730
- console.log('Applying gender-specific override:', this.avatar.body, 'for state:', this.stateName, 'keys:', Object.keys(a));
2290
+ } else if ( this.avatar.body && a.hasOwnProperty(this.avatar.body) ) {
2731
2291
  a = a[this.avatar.body];
2732
2292
  } else if ( a.hasOwnProperty('alt') ) {
2733
2293
 
@@ -2747,12 +2307,6 @@ class TalkingHead {
2747
2307
  }
2748
2308
  }
2749
2309
  a = b;
2750
- // Debug: Log selected alternative and check for gender override
2751
- if (this.avatar && this.avatar.body && a.hasOwnProperty(this.avatar.body)) {
2752
- console.log('Found gender override in selected alternative:', this.avatar.body, 'keys:', Object.keys(a));
2753
- }
2754
- // Continue loop to check for gender-specific override after selecting alternative
2755
- continue;
2756
2310
 
2757
2311
  } else {
2758
2312
  break;
@@ -2783,10 +2337,6 @@ class TalkingHead {
2783
2337
  }
2784
2338
 
2785
2339
  // Values
2786
- // Debug: Log pose selection
2787
- if (a.vs && a.vs.pose) {
2788
- console.log('Pose being selected from vs.pose:', a.vs.pose, 'for avatar body:', this.avatar?.body);
2789
- }
2790
2340
  for( let [mt,vs] of Object.entries(a.vs) ) {
2791
2341
  const base = this.getBaselineValue(mt);
2792
2342
  const vals = vs.map( x => {
@@ -2796,11 +2346,6 @@ class TalkingHead {
2796
2346
  } else if ( typeof x === 'function' ) {
2797
2347
  return x;
2798
2348
  } else if ( typeof x === 'string' || x instanceof String ) {
2799
- // Intercept pose values and override 'hip' and 'side' to 'wide' for male avatars
2800
- if (mt === 'pose' && this.avatar && this.avatar.body === 'M' && (x === 'hip' || x === 'side')) {
2801
- console.log('Intercepting pose', x, 'in animation factory, overriding to wide for male avatar');
2802
- return 'wide'; // Always use 'wide' for male avatars, never 'side' or 'hip'
2803
- }
2804
2349
  return x.slice();
2805
2350
  } else if ( Array.isArray(x) ) {
2806
2351
  if ( mt === 'gesture' ) {
@@ -2913,7 +2458,7 @@ class TalkingHead {
2913
2458
  if ( this.isAvatarOnly ) {
2914
2459
  dt = t;
2915
2460
  } else {
2916
- requestAnimationFrame( this.animate.bind(this) );
2461
+ this._raf = requestAnimationFrame( this.animate );
2917
2462
  dt = t - this.animTimeLast;
2918
2463
  if ( dt < this.animFrameDur ) return;
2919
2464
  this.animTimeLast = t;
@@ -2997,7 +2542,7 @@ class TalkingHead {
2997
2542
  const tasks = [];
2998
2543
  for( i=0, l=this.animQueue.length; i<l; i++ ) {
2999
2544
  const x = this.animQueue[i];
3000
- if ( !x || !x.ts || !x.ts.length || this.animClock < x.ts[0] ) continue;
2545
+ if ( this.animClock < x.ts[0] ) continue;
3001
2546
 
3002
2547
  for( j = x.ndx || 0, k = x.ts.length; j<k; j++ ) {
3003
2548
  if ( this.animClock < x.ts[j] ) break;
@@ -3091,18 +2636,7 @@ class TalkingHead {
3091
2636
  break;
3092
2637
 
3093
2638
  case 'pose':
3094
- // Ensure gender-appropriate pose for male avatars - always use 'wide', never 'side' or 'hip'
3095
- if (this.avatar && this.avatar.body === 'M') {
3096
- if (j === 'hip' || j === 'side') {
3097
- // Always override 'hip' and 'side' to 'wide' for male avatars
3098
- if (this.poseTemplates['wide']) {
3099
- j = 'wide';
3100
- console.log('Overriding pose', j === 'hip' ? 'hip' : 'side', 'to wide for male avatar');
3101
- }
3102
- }
3103
- }
3104
2639
  this.poseName = j;
3105
- console.log('Setting pose to:', this.poseName, 'for avatar body:', this.avatar?.body, 'state:', this.stateName);
3106
2640
  this.setPoseFromTemplate( this.poseTemplates[ this.poseName ] );
3107
2641
  break;
3108
2642
 
@@ -3334,28 +2868,17 @@ class TalkingHead {
3334
2868
  }
3335
2869
 
3336
2870
  /**
3337
- * Get lip-sync processor based on language. Uses statically imported modules.
2871
+ * Get lip-sync processor based on language. Import module dynamically.
3338
2872
  * @param {string} lang Language
3339
- * @param {string} [path="./"] Module path (ignored, kept for compatibility)
2873
+ * @param {string} [path="./"] Module path
3340
2874
  */
3341
2875
  lipsyncGetProcessor(lang, path="./") {
3342
2876
  if ( !this.lipsync.hasOwnProperty(lang) ) {
3343
- const langLower = lang.toLowerCase();
2877
+ const moduleName = path + 'lipsync-' + lang.toLowerCase() + '.mjs';
3344
2878
  const className = 'Lipsync' + lang.charAt(0).toUpperCase() + lang.slice(1);
3345
-
3346
- try {
3347
- // Use statically imported module
3348
- const module = LIPSYNC_MODULES[langLower];
3349
-
3350
- if (module && module[className]) {
2879
+ import(moduleName).then( module => {
3351
2880
  this.lipsync[lang] = new module[className];
3352
- console.log(`Loaded lip-sync module for ${lang}`);
3353
- } else {
3354
- console.warn(`Lip-sync module for ${lang} not found. Available modules:`, Object.keys(LIPSYNC_MODULES));
3355
- }
3356
- } catch (error) {
3357
- console.warn(`Failed to load lip-sync module for ${lang}:`, error);
3358
- }
2881
+ });
3359
2882
  }
3360
2883
  }
3361
2884
 
@@ -3744,589 +3267,148 @@ class TalkingHead {
3744
3267
  }
3745
3268
 
3746
3269
  if ( onsubtitles ) {
3747
- o.onSubtitles = onsubtitles;
3748
- }
3749
-
3750
- if ( opt.isRaw ) {
3751
- o.isRaw = true;
3752
- }
3753
-
3754
- if ( Object.keys(o).length ) {
3755
- this.speechQueue.push(o);
3756
- if ( !o.isRaw ) {
3757
- this.speechQueue.push( { break: 300 } );
3758
- }
3759
- this.startSpeaking();
3760
- }
3761
-
3762
- }
3763
-
3764
- /**
3765
- * Play audio playlist using Web Audio API.
3766
- * @param {boolean} [force=false] If true, forces to proceed
3767
- */
3768
- async playAudio(force=false) {
3769
- if ( !this.armature || (this.isAudioPlaying && !force) ) return;
3770
- this.isAudioPlaying = true;
3771
- if ( this.audioPlaylist.length ) {
3772
- const item = this.audioPlaylist.shift();
3773
-
3774
- // If Web Audio API is suspended, try to resume it
3775
- if ( this.audioCtx.state === "suspended" || this.audioCtx.state === "interrupted" ) {
3776
- const resume = this.audioCtx.resume();
3777
- const timeout = new Promise((_r, rej) => setTimeout(() => rej("p2"), 1000));
3778
- try {
3779
- await Promise.race([resume, timeout]);
3780
- } catch(e) {
3781
- console.log("Can't play audio. Web Audio API suspended. This is often due to calling some speak method before the first user action, which is typically prevented by the browser.");
3782
- this.playAudio(true);
3783
- return;
3784
- }
3785
- }
3786
-
3787
- // AudioBuffer
3788
- let audio;
3789
- if ( Array.isArray(item.audio) ) {
3790
- // Convert from PCM samples
3791
- let buf = this.concatArrayBuffers( item.audio );
3792
- audio = this.pcmToAudioBuffer(buf);
3793
- } else {
3794
- audio = item.audio;
3795
- }
3796
-
3797
- // Create audio source
3798
- this.audioSpeechSource = this.audioCtx.createBufferSource();
3799
- this.audioSpeechSource.buffer = audio;
3800
- this.audioSpeechSource.playbackRate.value = 1 / this.animSlowdownRate;
3801
- this.audioSpeechSource.connect(this.audioAnalyzerNode);
3802
- this.audioSpeechSource.addEventListener('ended', () => {
3803
- this.audioSpeechSource.disconnect();
3804
- this.playAudio(true);
3805
- }, { once: true });
3806
-
3807
- // Rescale lipsync and push to queue
3808
- let delay = 0;
3809
- if ( item.anim ) {
3810
- // Find the lowest negative time point, if any
3811
- if ( !item.isRaw ) {
3812
- delay = Math.abs(Math.min(0, ...item.anim.map( x => Math.min(...x.ts) ) ) );
3813
- }
3814
- item.anim.forEach( x => {
3815
- for(let i=0; i<x.ts.length; i++) {
3816
- x.ts[i] = this.animClock + x.ts[i] + delay;
3817
- }
3818
- this.animQueue.push(x);
3819
- });
3820
- }
3821
-
3822
- // Play, dealy in seconds so pre-animations can be played
3823
- this.audioSpeechSource.start(delay/1000);
3824
-
3825
- } else {
3826
- this.isAudioPlaying = false;
3827
- this.startSpeaking(true);
3828
- }
3829
- }
3830
-
3831
- /**
3832
- * Synthesize speech using browser's built-in Speech Synthesis API
3833
- * @param {Object} line Speech line object
3834
- */
3835
- async synthesizeWithBrowserTTS(line) {
3836
- return new Promise((resolve, reject) => {
3837
- // Get the text from the line
3838
- const text = line.text.map(x => x.word).join(' ');
3839
-
3840
- // Create speech synthesis utterance
3841
- const utterance = new SpeechSynthesisUtterance(text);
3842
-
3843
- // Set voice properties
3844
- const lang = line.lang || this.avatar.ttsLang || this.opt.ttsLang || 'en-US';
3845
- const rate = (line.rate || this.avatar.ttsRate || this.opt.ttsRate || 1) + this.mood.speech.deltaRate;
3846
- const pitch = (line.pitch || this.avatar.ttsPitch || this.opt.ttsPitch || 1) + this.mood.speech.deltaPitch;
3847
- const volume = (line.volume || this.avatar.ttsVolume || this.opt.ttsVolume || 1) + this.mood.speech.deltaVolume;
3848
-
3849
- utterance.lang = lang;
3850
- utterance.rate = Math.max(0.1, Math.min(10, rate));
3851
- utterance.pitch = Math.max(0, Math.min(2, pitch));
3852
- utterance.volume = Math.max(0, Math.min(1, volume));
3853
-
3854
- // Try to find a matching voice
3855
- const voices = speechSynthesis.getVoices();
3856
- const targetVoice = line.voice || this.avatar.ttsVoice || this.opt.ttsVoice;
3857
- if (targetVoice && voices.length > 0) {
3858
- const voice = voices.find(v => v.name.includes(targetVoice) || v.lang === lang);
3859
- if (voice) {
3860
- utterance.voice = voice;
3861
- }
3862
- }
3863
-
3864
- // Estimate duration based on text length and speech rate
3865
- const estimatedDuration = (text.length * 100) / utterance.rate; // Adjust for speech rate
3866
-
3867
- // Create audio buffer for the estimated duration
3868
- const audioBuffer = this.audioCtx.createBuffer(1, this.audioCtx.sampleRate * (estimatedDuration / 1000), this.audioCtx.sampleRate);
3869
-
3870
- // Generate lip-sync data from text using the existing lip-sync modules
3871
- const lipsyncLang = this.avatar.lipsyncLang || this.opt.lipsyncLang || 'en';
3872
- const processedText = this.lipsyncPreProcessText(text, lipsyncLang);
3873
- const lipsyncData = this.lipsyncWordsToVisemes(processedText, lipsyncLang);
3874
-
3875
- console.log('Browser TTS Lip-sync Debug:', {
3876
- text,
3877
- lipsyncLang,
3878
- processedText,
3879
- lipsyncData,
3880
- hasVisemes: lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0,
3881
- estimatedDuration
3882
- });
3883
-
3884
- // Generate lip-sync animation from the viseme data
3885
- const lipsyncAnim = [];
3886
- if (lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0) {
3887
- const totalDuration = lipsyncData.times[lipsyncData.visemes.length - 1] + lipsyncData.durations[lipsyncData.visemes.length - 1];
3888
-
3889
- for (let i = 0; i < lipsyncData.visemes.length; i++) {
3890
- const viseme = lipsyncData.visemes[i];
3891
- const relativeTime = lipsyncData.times[i] / totalDuration;
3892
- const relativeDuration = lipsyncData.durations[i] / totalDuration;
3893
-
3894
- const time = relativeTime * estimatedDuration;
3895
- const duration = relativeDuration * estimatedDuration;
3896
-
3897
- lipsyncAnim.push({
3898
- template: { name: 'viseme' },
3899
- ts: [time - Math.min(60, 2 * duration / 3), time + Math.min(25, duration / 2), time + duration + Math.min(60, duration / 2)],
3900
- vs: {
3901
- ['viseme_' + viseme]: [null, (viseme === 'PP' || viseme === 'FF') ? 0.9 : 0.6, 0]
3902
- }
3903
- });
3904
- }
3905
- }
3906
-
3907
- // Combine original animation with lip-sync animation
3908
- const combinedAnim = [...line.anim, ...lipsyncAnim];
3909
-
3910
- // Add to playlist
3911
- this.audioPlaylist.push({ anim: combinedAnim, audio: audioBuffer });
3912
- this.onSubtitles = line.onSubtitles || null;
3913
- this.resetLips();
3914
- if (line.mood) this.setMood(line.mood);
3915
- this.playAudio();
3916
-
3917
- // Handle speech synthesis events
3918
- utterance.onend = () => {
3919
- resolve();
3920
- };
3921
-
3922
- utterance.onerror = (event) => {
3923
- console.error('Speech synthesis error:', event.error);
3924
- reject(event.error);
3925
- };
3926
-
3927
- // Start speaking
3928
- speechSynthesis.speak(utterance);
3929
- });
3930
- }
3931
-
3932
- /**
3933
- * Synthesize speech using ElevenLabs TTS
3934
- * @param {Object} line Speech line object
3935
- */
3936
- async synthesizeWithElevenLabsTTS(line) {
3937
- // Get the text from the line
3938
- const text = line.text.map(x => x.word).join(' ');
3939
-
3940
- // ElevenLabs API request
3941
- const voiceId = line.voice || this.avatar.ttsVoice || this.opt.ttsVoice || "21m00Tcm4TlvDq8ikWAM"; // Default to Rachel
3942
-
3943
- const requestBody = {
3944
- text: text,
3945
- model_id: "eleven_monolingual_v1",
3946
- voice_settings: {
3947
- stability: 0.5,
3948
- similarity_boost: 0.5,
3949
- style: 0.0,
3950
- use_speaker_boost: true
3951
- }
3952
- };
3953
-
3954
- const response = await fetch(`${this.opt.ttsEndpoint}/${voiceId}`, {
3955
- method: 'POST',
3956
- headers: {
3957
- 'Accept': 'audio/mpeg',
3958
- 'Content-Type': 'application/json',
3959
- 'xi-api-key': this.opt.ttsApikey
3960
- },
3961
- body: JSON.stringify(requestBody)
3962
- });
3963
-
3964
- if (!response.ok) {
3965
- throw new Error(`ElevenLabs TTS error: ${response.status} ${response.statusText}`);
3966
- }
3967
-
3968
- // Get audio data
3969
- const audioArrayBuffer = await response.arrayBuffer();
3970
- const audioBuffer = await this.audioCtx.decodeAudioData(audioArrayBuffer);
3971
-
3972
- // Use text-based lip-sync with proper error handling
3973
- console.log('Using text-based lip-sync for debugging...');
3974
- const lipsyncLang = this.avatar.lipsyncLang || this.opt.lipsyncLang || 'en';
3975
-
3976
- let audioAnalysis;
3977
- try {
3978
- console.log('Lip-sync modules available:', {
3979
- hasLipsync: !!this.lipsync,
3980
- lipsyncKeys: this.lipsync ? Object.keys(this.lipsync) : [],
3981
- lipsyncLang: lipsyncLang
3982
- });
3983
-
3984
- const processedText = this.lipsyncPreProcessText(text, lipsyncLang);
3985
- const lipsyncData = this.lipsyncWordsToVisemes(processedText, lipsyncLang);
3986
-
3987
- console.log('Lip-sync data:', {
3988
- processedText,
3989
- lipsyncData,
3990
- hasVisemes: lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0
3991
- });
3992
-
3993
- if (lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0) {
3994
- // Create audio analysis structure for compatibility
3995
- audioAnalysis = {
3996
- visemes: lipsyncData.visemes.map((viseme, i) => ({
3997
- viseme: viseme,
3998
- startTime: (i * audioBuffer.duration) / lipsyncData.visemes.length,
3999
- endTime: ((i + 1) * audioBuffer.duration) / lipsyncData.visemes.length,
4000
- duration: audioBuffer.duration / lipsyncData.visemes.length,
4001
- intensity: 0.7
4002
- })),
4003
- words: [],
4004
- duration: audioBuffer.duration,
4005
- features: { onsets: [], boundaries: [] }
4006
- };
4007
- } else {
4008
- throw new Error('No visemes generated from text');
4009
- }
4010
- } catch (error) {
4011
- console.error('Text-based lip-sync failed, using fallback:', error);
4012
- // Fallback: create simple visemes from text
4013
- const words = text.toLowerCase().split(/\s+/);
4014
- const simpleVisemes = [];
4015
-
4016
- for (const word of words) {
4017
- // Simple phonetic mapping
4018
- for (const char of word) {
4019
- let viseme = 'aa'; // default
4020
- if ('aeiou'.includes(char)) viseme = 'aa';
4021
- else if ('bp'.includes(char)) viseme = 'PP';
4022
- else if ('fv'.includes(char)) viseme = 'FF';
4023
- else if ('st'.includes(char)) viseme = 'SS';
4024
- else if ('dln'.includes(char)) viseme = 'DD';
4025
- else if ('kg'.includes(char)) viseme = 'kk';
4026
- else if ('rw'.includes(char)) viseme = 'RR';
4027
-
4028
- simpleVisemes.push(viseme);
4029
- }
4030
- }
4031
-
4032
- audioAnalysis = {
4033
- visemes: simpleVisemes.map((viseme, i) => ({
4034
- viseme: viseme,
4035
- startTime: (i * audioBuffer.duration) / simpleVisemes.length,
4036
- endTime: ((i + 1) * audioBuffer.duration) / simpleVisemes.length,
4037
- duration: audioBuffer.duration / simpleVisemes.length,
4038
- intensity: 0.6
4039
- })),
4040
- words: [],
4041
- duration: audioBuffer.duration,
4042
- features: { onsets: [], boundaries: [] }
4043
- };
3270
+ o.onSubtitles = onsubtitles;
4044
3271
  }
4045
-
4046
- console.log('ElevenLabs TTS Audio Analysis:', {
4047
- text,
4048
- audioDuration: audioBuffer.duration,
4049
- visemeCount: audioAnalysis.visemes ? audioAnalysis.visemes.length : 0,
4050
- wordCount: audioAnalysis.words ? audioAnalysis.words.length : 0,
4051
- features: {
4052
- onsets: audioAnalysis.features && audioAnalysis.features.onsets ? audioAnalysis.features.onsets.length : 0,
4053
- boundaries: audioAnalysis.features && audioAnalysis.features.phonemeBoundaries ? audioAnalysis.features.phonemeBoundaries.length : 0
4054
- },
4055
- visemes: audioAnalysis.visemes ? audioAnalysis.visemes.slice(0, 3) : [] // Show first 3 visemes for debugging
4056
- });
4057
-
4058
- // Generate precise lip-sync animation from audio analysis
4059
- const lipsyncAnim = [];
4060
- if (audioAnalysis.visemes && audioAnalysis.visemes.length > 0) {
4061
- console.log('ElevenLabs: Generating lip-sync animation from', audioAnalysis.visemes.length, 'visemes');
4062
- for (let i = 0; i < audioAnalysis.visemes.length; i++) {
4063
- const visemeData = audioAnalysis.visemes[i];
4064
- const time = visemeData.startTime * 1000; // Convert to milliseconds
4065
- const duration = visemeData.duration * 1000;
4066
- const intensity = visemeData.intensity;
4067
-
4068
- lipsyncAnim.push({
4069
- template: { name: 'viseme' },
4070
- ts: [time - Math.min(60, 2 * duration / 3), time + Math.min(25, duration / 2), time + duration + Math.min(60, duration / 2)],
4071
- vs: {
4072
- ['viseme_' + visemeData.viseme]: [null, intensity, 0]
4073
- }
4074
- });
3272
+
3273
+ if ( opt.isRaw ) {
3274
+ o.isRaw = true;
4075
3275
  }
4076
- console.log('ElevenLabs: Generated', lipsyncAnim.length, 'lip-sync animation frames');
4077
- } else {
4078
- console.warn('ElevenLabs: No visemes available for lip-sync animation');
3276
+
3277
+ if ( Object.keys(o).length ) {
3278
+ this.speechQueue.push(o);
3279
+ if ( !o.isRaw ) {
3280
+ this.speechQueue.push( { break: 300 } );
3281
+ }
3282
+ this.startSpeaking();
4079
3283
  }
4080
-
4081
- // Combine original animation with lip-sync animation
4082
- const combinedAnim = [...line.anim, ...lipsyncAnim];
4083
- console.log('ElevenLabs: Combined animation frames:', combinedAnim.length, '(original:', line.anim.length, '+ lipsync:', lipsyncAnim.length, ')');
4084
-
4085
- // Add to playlist
4086
- this.audioPlaylist.push({ anim: combinedAnim, audio: audioBuffer });
4087
- this.onSubtitles = line.onSubtitles || null;
4088
- this.resetLips();
4089
- if (line.mood) this.setMood(line.mood);
4090
- this.playAudio();
3284
+
4091
3285
  }
4092
3286
 
4093
3287
  /**
4094
- * Synthesize speech using Deepgram Aura-2 TTS
4095
- * @param {Object} line Speech line object
4096
- */
4097
- async synthesizeWithDeepgramTTS(line) {
4098
- // Get the text from the line
4099
- const text = line.text.map(x => x.word).join(' ');
4100
-
4101
- // Deepgram API request
4102
- const voiceModel = line.voice || this.avatar.ttsVoice || this.opt.ttsVoice || "aura-2-thalia-en"; // Default to Thalia
4103
-
4104
- // Build URL with model as query parameter
4105
- const url = `${this.opt.ttsEndpoint}?model=${voiceModel}`;
4106
-
4107
- const response = await fetch(url, {
4108
- method: 'POST',
4109
- headers: {
4110
- 'Authorization': `Token ${this.opt.ttsApikey}`,
4111
- 'Content-Type': 'text/plain',
4112
- 'Accept': 'audio/mpeg'
4113
- },
4114
- body: text
4115
- });
3288
+ * Play audio playlist using Web Audio API.
3289
+ * @param {boolean} [force=false] If true, forces to proceed
3290
+ */
3291
+ async playAudio(force=false) {
3292
+ if ( !this.armature || (this.isAudioPlaying && !force) ) return;
3293
+ this.isAudioPlaying = true;
3294
+ if ( this.audioPlaylist.length ) {
3295
+ const item = this.audioPlaylist.shift();
4116
3296
 
4117
- if (!response.ok) {
4118
- throw new Error(`Deepgram TTS error: ${response.status} ${response.statusText}`);
4119
- }
3297
+ // If Web Audio API is suspended, try to resume it
3298
+ if ( this.audioCtx.state === "suspended" || this.audioCtx.state === "interrupted" ) {
3299
+ const resume = this.audioCtx.resume();
3300
+ const timeout = new Promise((_r, rej) => setTimeout(() => rej("p2"), 1000));
3301
+ try {
3302
+ await Promise.race([resume, timeout]);
3303
+ } catch(e) {
3304
+ console.log("Can't play audio. Web Audio API suspended. This is often due to calling some speak method before the first user action, which is typically prevented by the browser.");
3305
+ this.playAudio(true);
3306
+ return;
3307
+ }
3308
+ }
4120
3309
 
4121
- // Get audio data
4122
- const audioArrayBuffer = await response.arrayBuffer();
4123
- const audioBuffer = await this.audioCtx.decodeAudioData(audioArrayBuffer);
4124
-
4125
- // Use text-based lip-sync with proper error handling
4126
- console.log('Using text-based lip-sync for Deepgram...');
4127
- const lipsyncLang = this.avatar.lipsyncLang || this.opt.lipsyncLang || 'en';
4128
-
4129
- let audioAnalysis;
4130
- try {
4131
- console.log('Lip-sync modules available:', {
4132
- hasLipsync: !!this.lipsync,
4133
- lipsyncKeys: this.lipsync ? Object.keys(this.lipsync) : [],
4134
- lipsyncLang: lipsyncLang
4135
- });
4136
-
4137
- const processedText = this.lipsyncPreProcessText(text, lipsyncLang);
4138
- const lipsyncData = this.lipsyncWordsToVisemes(processedText, lipsyncLang);
4139
-
4140
- console.log('Lip-sync data:', {
4141
- processedText,
4142
- lipsyncData,
4143
- hasVisemes: lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0
4144
- });
4145
-
4146
- if (lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0) {
4147
- // Create audio analysis structure for compatibility
4148
- audioAnalysis = {
4149
- visemes: lipsyncData.visemes.map((viseme, i) => ({
4150
- viseme: viseme,
4151
- startTime: (i * audioBuffer.duration) / lipsyncData.visemes.length,
4152
- endTime: ((i + 1) * audioBuffer.duration) / lipsyncData.visemes.length,
4153
- duration: audioBuffer.duration / lipsyncData.visemes.length,
4154
- intensity: 0.7
4155
- })),
4156
- words: [],
4157
- duration: audioBuffer.duration,
4158
- features: { onsets: [], boundaries: [] }
4159
- };
3310
+ // AudioBuffer
3311
+ let audio;
3312
+ if ( Array.isArray(item.audio) ) {
3313
+ // Convert from PCM samples
3314
+ let buf = this.concatArrayBuffers( item.audio );
3315
+ audio = this.pcmToAudioBuffer(buf);
4160
3316
  } else {
4161
- throw new Error('No visemes generated from text');
3317
+ audio = item.audio;
4162
3318
  }
4163
- } catch (error) {
4164
- console.error('Text-based lip-sync failed, using fallback:', error);
4165
- // Fallback: create simple visemes from text
4166
- const words = text.toLowerCase().split(/\s+/);
4167
- const simpleVisemes = [];
4168
-
4169
- for (const word of words) {
4170
- // Simple phonetic mapping
4171
- for (const char of word) {
4172
- let viseme = 'aa'; // default
4173
- if ('aeiou'.includes(char)) viseme = 'aa';
4174
- else if ('bp'.includes(char)) viseme = 'PP';
4175
- else if ('fv'.includes(char)) viseme = 'FF';
4176
- else if ('st'.includes(char)) viseme = 'SS';
4177
- else if ('dln'.includes(char)) viseme = 'DD';
4178
- else if ('kg'.includes(char)) viseme = 'kk';
4179
- else if ('rw'.includes(char)) viseme = 'RR';
4180
-
4181
- simpleVisemes.push(viseme);
4182
- }
3319
+
3320
+ // Make sure previous audio source is cleared
3321
+ if (this.audioSpeechSource) {
3322
+ try { this.audioSpeechSource.stop?.() } catch(error) {};
3323
+ this.audioSpeechSource.disconnect();
3324
+ this.audioSpeechSource.onended = null;
3325
+ this.audioSpeechSource = null;
4183
3326
  }
4184
-
4185
- audioAnalysis = {
4186
- visemes: simpleVisemes.map((viseme, i) => ({
4187
- viseme: viseme,
4188
- startTime: (i * audioBuffer.duration) / simpleVisemes.length,
4189
- endTime: ((i + 1) * audioBuffer.duration) / simpleVisemes.length,
4190
- duration: audioBuffer.duration / simpleVisemes.length,
4191
- intensity: 0.6
4192
- })),
4193
- words: [],
4194
- duration: audioBuffer.duration,
4195
- features: { onsets: [], boundaries: [] }
3327
+
3328
+ // Create audio source
3329
+ const source = this.audioCtx.createBufferSource();
3330
+ this.audioSpeechSource = source;
3331
+ source.buffer = audio;
3332
+ source.playbackRate.value = 1 / this.animSlowdownRate;
3333
+ source.connect(this.audioAnalyzerNode);
3334
+ source.onended = () => {
3335
+ source.disconnect();
3336
+ source.onended = null;
3337
+ if ( this.audioSpeechSource === source ) {
3338
+ this.audioSpeechSource = null;
3339
+ }
3340
+ this.playAudio(true);
4196
3341
  };
4197
- }
4198
-
4199
- console.log('Deepgram TTS Audio Analysis:', {
4200
- text,
4201
- audioDuration: audioBuffer.duration,
4202
- visemeCount: audioAnalysis.visemes ? audioAnalysis.visemes.length : 0,
4203
- wordCount: audioAnalysis.words ? audioAnalysis.words.length : 0,
4204
- features: {
4205
- onsets: audioAnalysis.features && audioAnalysis.features.onsets ? audioAnalysis.features.onsets.length : 0,
4206
- boundaries: audioAnalysis.features && audioAnalysis.features.phonemeBoundaries ? audioAnalysis.features.phonemeBoundaries.length : 0
4207
- },
4208
- visemes: audioAnalysis.visemes ? audioAnalysis.visemes.slice(0, 3) : [] // Show first 3 visemes for debugging
4209
- });
4210
-
4211
- // Generate precise lip-sync animation from audio analysis
4212
- const lipsyncAnim = [];
4213
- if (audioAnalysis.visemes && audioAnalysis.visemes.length > 0) {
4214
- console.log('Deepgram: Generating lip-sync animation from', audioAnalysis.visemes.length, 'visemes');
4215
- for (let i = 0; i < audioAnalysis.visemes.length; i++) {
4216
- const visemeData = audioAnalysis.visemes[i];
4217
- const time = visemeData.startTime * 1000; // Convert to milliseconds
4218
- const duration = visemeData.duration * 1000;
4219
- const intensity = visemeData.intensity;
4220
-
4221
- lipsyncAnim.push({
4222
- template: { name: 'viseme' },
4223
- ts: [time - Math.min(60, 2 * duration / 3), time + Math.min(25, duration / 2), time + duration + Math.min(60, duration / 2)],
4224
- vs: {
4225
- ['viseme_' + visemeData.viseme]: [null, intensity, 0]
3342
+
3343
+ // Rescale lipsync and push to queue
3344
+ let delay = 0;
3345
+ if ( item.anim ) {
3346
+ // Find the lowest negative time point, if any
3347
+ if ( !item.isRaw ) {
3348
+ delay = Math.abs(Math.min(0, ...item.anim.map( x => Math.min(...x.ts) ) ) );
4226
3349
  }
4227
- });
4228
- }
4229
- console.log('Deepgram: Generated', lipsyncAnim.length, 'lip-sync animation frames');
3350
+ item.anim.forEach( x => {
3351
+ for(let i=0; i<x.ts.length; i++) {
3352
+ x.ts[i] = this.animClock + x.ts[i] + delay;
3353
+ }
3354
+ this.animQueue.push(x);
3355
+ });
3356
+ }
3357
+
3358
+ // Play, delay in seconds so pre-animations can be played
3359
+ source.start( this.audioCtx.currentTime + delay/1000);
3360
+
4230
3361
  } else {
4231
- console.warn('Deepgram: No visemes available for lip-sync animation');
3362
+ this.isAudioPlaying = false;
3363
+ this.startSpeaking(true);
4232
3364
  }
4233
-
4234
- // Combine original animation with lip-sync animation
4235
- const combinedAnim = [...line.anim, ...lipsyncAnim];
4236
- console.log('Deepgram: Combined animation frames:', combinedAnim.length, '(original:', line.anim.length, '+ lipsync:', lipsyncAnim.length, ')');
4237
-
4238
- // Add to playlist
4239
- this.audioPlaylist.push({ anim: combinedAnim, audio: audioBuffer });
4240
- this.onSubtitles = line.onSubtitles || null;
4241
- this.resetLips();
4242
- if (line.mood) this.setMood(line.mood);
4243
- this.playAudio();
4244
3365
  }
4245
3366
 
4246
3367
  /**
4247
- * Synthesize speech using Azure TTS
4248
- * @param {Object} line Speech line object
4249
- */
4250
- async synthesizeWithAzureTTS(line) {
4251
- // Get the text from the line
4252
- const text = line.text.map(x => x.word).join(' ');
4253
-
4254
- // Azure TTS SSML
4255
- const voiceName = line.voice || this.avatar.ttsVoice || this.opt.ttsVoice || "en-US-AriaNeural";
4256
- const ssml = `
4257
- <speak version="1.0" xmlns="http://www.w3.org/2001/10/synthesis" xml:lang="en-US">
4258
- <voice name="${voiceName}">
4259
- ${text}
4260
- </voice>
4261
- </speak>
4262
- `;
4263
-
4264
- const response = await fetch(this.opt.ttsEndpoint, {
4265
- method: 'POST',
4266
- headers: {
4267
- 'Ocp-Apim-Subscription-Key': this.opt.ttsApikey,
4268
- 'Content-Type': 'application/ssml+xml',
4269
- 'X-Microsoft-OutputFormat': 'audio-16khz-128kbitrate-mono-mp3'
4270
- },
4271
- body: ssml
4272
- });
3368
+ * Take the next queue item from the speech queue, convert it to text, and
3369
+ * load the audio file.
3370
+ * @param {boolean} [force=false] If true, forces to proceed (e.g. after break)
3371
+ */
3372
+ async startSpeaking( force = false ) {
3373
+ if ( !this.armature || (this.isSpeaking && !force) ) return;
3374
+ this.stateName = 'speaking';
3375
+ this.isSpeaking = true;
3376
+ if ( this.speechQueue.length ) {
3377
+ let line = this.speechQueue.shift();
3378
+ if ( line.emoji ) {
4273
3379
 
4274
- if (!response.ok) {
4275
- throw new Error(`Azure TTS error: ${response.status} ${response.statusText}`);
4276
- }
3380
+ // Look at the camera
3381
+ this.lookAtCamera(500);
4277
3382
 
4278
- // Get audio data
4279
- const audioArrayBuffer = await response.arrayBuffer();
4280
- const audioBuffer = await this.audioCtx.decodeAudioData(audioArrayBuffer);
4281
-
4282
- // Analyze audio for precise lip-sync timing
4283
- console.log('Analyzing audio for precise lip-sync...');
4284
- const audioAnalysis = await this.audioAnalyzer.analyzeAudio(audioBuffer, text);
4285
-
4286
- console.log('Azure TTS Audio Analysis:', {
4287
- text,
4288
- audioDuration: audioBuffer.duration,
4289
- visemeCount: audioAnalysis.visemes.length,
4290
- wordCount: audioAnalysis.words.length,
4291
- features: {
4292
- onsets: audioAnalysis.features.onsets.length,
4293
- boundaries: audioAnalysis.features.phonemeBoundaries.length
4294
- }
4295
- });
4296
-
4297
- // Generate precise lip-sync animation from audio analysis
4298
- const lipsyncAnim = [];
4299
- for (let i = 0; i < audioAnalysis.visemes.length; i++) {
4300
- const visemeData = audioAnalysis.visemes[i];
4301
- const time = visemeData.startTime * 1000; // Convert to milliseconds
4302
- const duration = visemeData.duration * 1000;
4303
- const intensity = visemeData.intensity;
4304
-
4305
- lipsyncAnim.push({
4306
- template: { name: 'viseme' },
4307
- ts: [time - Math.min(60, 2 * duration / 3), time + Math.min(25, duration / 2), time + duration + Math.min(60, duration / 2)],
4308
- vs: {
4309
- ['viseme_' + visemeData.viseme]: [null, intensity, 0]
3383
+ // Only emoji
3384
+ let duration = line.emoji.dt.reduce((a,b) => a+b,0);
3385
+ this.animQueue.push( this.animFactory( line.emoji ) );
3386
+ setTimeout( this.startSpeaking.bind(this), duration, true );
3387
+ } else if ( line.break ) {
3388
+ // Break
3389
+ setTimeout( this.startSpeaking.bind(this), line.break, true );
3390
+ } else if ( line.audio ) {
3391
+
3392
+ // Look at the camera
3393
+ if ( !line.isRaw ) {
3394
+ this.lookAtCamera(500);
3395
+ this.speakWithHands();
3396
+ this.resetLips();
4310
3397
  }
4311
- });
4312
- }
4313
-
4314
- // Combine original animation with lip-sync animation
4315
- const combinedAnim = [...line.anim, ...lipsyncAnim];
4316
-
4317
- // Add to playlist
4318
- this.audioPlaylist.push({ anim: combinedAnim, audio: audioBuffer });
4319
- this.onSubtitles = line.onSubtitles || null;
4320
- this.resetLips();
4321
- if (line.mood) this.setMood(line.mood);
4322
- this.playAudio();
4323
- }
4324
3398
 
4325
- /**
4326
- * Synthesize speech using external TTS service (Google Cloud, etc.)
4327
- * @param {Object} line Speech line object
4328
- */
4329
- async synthesizeWithExternalTTS(line) {
3399
+ // Make a playlist
3400
+ this.audioPlaylist.push({ anim: line.anim, audio: line.audio, isRaw: line.isRaw });
3401
+ this.onSubtitles = line.onSubtitles || null;
3402
+ if ( line.mood ) this.setMood( line.mood );
3403
+ this.playAudio();
3404
+
3405
+ } else if ( line.text ) {
3406
+
3407
+ // Look at the camera
3408
+ this.lookAtCamera(500);
3409
+
3410
+ // Spoken text
3411
+ try {
4330
3412
  // Convert text to SSML
4331
3413
  let ssml = "<speak>";
4332
3414
  line.text.forEach( (x,i) => {
@@ -4346,6 +3428,7 @@ class TalkingHead {
4346
3428
  });
4347
3429
  ssml += "</speak>";
4348
3430
 
3431
+
4349
3432
  const o = {
4350
3433
  method: "POST",
4351
3434
  headers: {
@@ -4433,70 +3516,6 @@ class TalkingHead {
4433
3516
 
4434
3517
  } else {
4435
3518
  this.startSpeaking(true);
4436
- }
4437
- }
4438
-
4439
- /**
4440
- * Take the next queue item from the speech queue, convert it to text, and
4441
- * load the audio file.
4442
- * @param {boolean} [force=false] If true, forces to proceed (e.g. after break)
4443
- */
4444
- async startSpeaking( force = false ) {
4445
- if ( !this.armature || (this.isSpeaking && !force) ) return;
4446
- this.stateName = 'speaking';
4447
- this.isSpeaking = true;
4448
- if ( this.speechQueue.length ) {
4449
- let line = this.speechQueue.shift();
4450
- if ( line.emoji ) {
4451
-
4452
- // Look at the camera
4453
- this.lookAtCamera(500);
4454
-
4455
- // Only emoji
4456
- let duration = line.emoji.dt.reduce((a,b) => a+b,0);
4457
- this.animQueue.push( this.animFactory( line.emoji ) );
4458
- setTimeout( this.startSpeaking.bind(this), duration, true );
4459
- } else if ( line.break ) {
4460
- // Break
4461
- setTimeout( this.startSpeaking.bind(this), line.break, true );
4462
- } else if ( line.audio ) {
4463
-
4464
- // Look at the camera
4465
- if ( !line.isRaw ) {
4466
- this.lookAtCamera(500);
4467
- this.speakWithHands();
4468
- this.resetLips();
4469
- }
4470
-
4471
- // Make a playlist
4472
- this.audioPlaylist.push({ anim: line.anim, audio: line.audio, isRaw: line.isRaw });
4473
- this.onSubtitles = line.onSubtitles || null;
4474
- if ( line.mood ) this.setMood( line.mood );
4475
- this.playAudio();
4476
-
4477
- } else if ( line.text ) {
4478
-
4479
- // Look at the camera
4480
- this.lookAtCamera(500);
4481
-
4482
- // Spoken text
4483
- try {
4484
- // Check which TTS service to use
4485
- if (!this.opt.ttsEndpoint || this.opt.ttsEndpoint === "") {
4486
- // Use browser's built-in speech synthesis
4487
- await this.synthesizeWithBrowserTTS(line);
4488
- } else if (this.opt.ttsService === "elevenlabs") {
4489
- // Use ElevenLabs TTS
4490
- await this.synthesizeWithElevenLabsTTS(line);
4491
- } else if (this.opt.ttsService === "deepgram") {
4492
- // Use Deepgram Aura-2 TTS
4493
- await this.synthesizeWithDeepgramTTS(line);
4494
- } else if (this.opt.ttsService === "azure") {
4495
- // Use Azure TTS
4496
- await this.synthesizeWithAzureTTS(line);
4497
- } else {
4498
- // Use external TTS service (Google Cloud, etc.)
4499
- await this.synthesizeWithExternalTTS(line);
4500
3519
  }
4501
3520
  } catch (error) {
4502
3521
  console.error("Error:", error);
@@ -4532,7 +3551,7 @@ class TalkingHead {
4532
3551
  * Pause speaking.
4533
3552
  */
4534
3553
  pauseSpeaking() {
4535
- try { this.audioSpeechSource.stop(); } catch(error) {}
3554
+ try { this.audioSpeechSource?.stop(); } catch(error) {}
4536
3555
  this.audioPlaylist.length = 0;
4537
3556
  this.stateName = 'idle';
4538
3557
  this.isSpeaking = false;
@@ -4548,7 +3567,7 @@ class TalkingHead {
4548
3567
  * Stop speaking and clear the speech queue.
4549
3568
  */
4550
3569
  stopSpeaking() {
4551
- try { this.audioSpeechSource.stop(); } catch(error) {}
3570
+ try { this.audioSpeechSource?.stop(); } catch(error) {}
4552
3571
  this.audioPlaylist.length = 0;
4553
3572
  this.speechQueue.length = 0;
4554
3573
  this.animQueue = this.animQueue.filter( x => x.template.name !== 'viseme' && x.template.name !== 'subtitles' && x.template.name !== 'blendshapes' );
@@ -5306,8 +4325,12 @@ class TalkingHead {
5306
4325
  */
5307
4326
  setSlowdownRate(k) {
5308
4327
  this.animSlowdownRate = k;
5309
- this.audioSpeechSource.playbackRate.value = 1 / this.animSlowdownRate;
5310
- this.audioBackgroundSource.playbackRate.value = 1 / this.animSlowdownRate;
4328
+ if ( this.audioSpeechSource ) {
4329
+ this.audioSpeechSource.playbackRate.value = 1 / this.animSlowdownRate;
4330
+ }
4331
+ if ( this.audioBackgroundSource ) {
4332
+ this.audioBackgroundSource.playbackRate.value = 1 / this.animSlowdownRate;
4333
+ }
5311
4334
  }
5312
4335
 
5313
4336
  /**
@@ -5336,7 +4359,7 @@ class TalkingHead {
5336
4359
  this.animTimeLast = performance.now();
5337
4360
  this.isRunning = true;
5338
4361
  if ( !this.isAvatarOnly ) {
5339
- requestAnimationFrame( this.animate.bind(this) );
4362
+ this._raf = requestAnimationFrame( this.animate );
5340
4363
  }
5341
4364
  }
5342
4365
  }
@@ -5392,7 +4415,6 @@ class TalkingHead {
5392
4415
  * @param {number} [ndx=0] Index of the clip
5393
4416
  * @param {number} [scale=0.01] Position scale factor
5394
4417
  */
5395
-
5396
4418
  async playAnimation(url, onprogress=null, dur=10, ndx=0, scale=0.01, disablePositionLock=false) {
5397
4419
  if ( !this.armature ) return;
5398
4420
 
@@ -5446,97 +4468,26 @@ class TalkingHead {
5446
4468
  action.fadeIn(0.5).play();
5447
4469
  console.log('FBX animation started successfully:', url);
5448
4470
  } catch (error) {
5449
- console.warn('FBX animation failed to start:', error);
5450
- // Stop the animation and unlock position on error
5451
- this.stopAnimation();
5452
- return;
5453
- }
5454
-
5455
- // Check if the animation actually has valid tracks
5456
- if (action.getClip().tracks.length === 0) {
5457
- console.warn('FBX animation has no valid tracks, stopping');
5458
- this.stopAnimation();
5459
- return;
5460
- }
5461
-
5462
- } else {
5463
-
5464
- // Validate file extension
5465
- const fileExtension = url.split('.').pop().toLowerCase();
5466
- if (fileExtension !== 'fbx') {
5467
- console.error(`Invalid file type for FBX animation: ${url}. Expected .fbx file.`);
5468
- return;
5469
- }
5470
-
5471
- // Check if file exists before attempting to load
5472
- let fileExists = false;
5473
- try {
5474
- const response = await fetch(url, { method: 'HEAD' });
5475
- fileExists = response.ok;
5476
- if (!fileExists) {
5477
- console.error(`FBX file not found at ${url}. Status: ${response.status}`);
5478
- console.error('Please check:');
5479
- console.error('1. File path is correct (note: path is case-sensitive)');
5480
- console.error('2. File exists in your public folder');
5481
- console.error('3. File is accessible (not blocked by server)');
5482
- return;
5483
- }
5484
- } catch (fetchError) {
5485
- console.warn(`Could not verify file existence for ${url}, attempting to load anyway:`, fetchError);
5486
- }
5487
-
5488
- // Load animation with error handling
5489
- const loader = new FBXLoader();
5490
- let fbx;
5491
-
5492
- try {
5493
- fbx = await loader.loadAsync( url, onprogress );
5494
- } catch (error) {
5495
- console.error(`Failed to load FBX animation from ${url}:`, error);
5496
- console.error('Error details:', {
5497
- message: error.message,
5498
- url: url,
5499
- suggestion: 'Make sure the file is a valid FBX file and the path is correct'
5500
- });
5501
-
5502
- // Try to provide helpful error message
5503
- if (error.message && error.message.includes('version number')) {
5504
- console.error('FBX Loader Error: Cannot find version number');
5505
- console.error('This error usually means:');
5506
- console.error('1. The file is not a valid FBX file (might be GLB, corrupted, or wrong format)');
5507
- console.error('2. The file might be corrupted');
5508
- console.error('3. The file path might be incorrect');
5509
- console.error('4. The server returned an HTML error page instead of the FBX file');
5510
- console.error('5. The file might not exist at that path');
5511
- console.error('');
5512
- console.error('Solution: Please verify:');
5513
- console.error(` - File exists at: ${url}`);
5514
- console.error(' - File is a valid FBX binary file');
5515
- console.error(' - File path matches your public folder structure');
5516
- console.error(' - File is not corrupted');
5517
- }
5518
-
5519
- // Try to fetch and check what we actually got
5520
- try {
5521
- const response = await fetch(url);
5522
- const contentType = response.headers.get('content-type');
5523
- const text = await response.text();
5524
- console.error(`Response details:`, {
5525
- status: response.status,
5526
- contentType: contentType,
5527
- firstBytes: text.substring(0, 100),
5528
- isHTML: text.trim().startsWith('<!DOCTYPE') || text.trim().startsWith('<html')
5529
- });
5530
- if (text.trim().startsWith('<!DOCTYPE') || text.trim().startsWith('<html')) {
5531
- console.error('The server returned an HTML page instead of an FBX file. The file path is likely incorrect.');
5532
- }
5533
- } catch (fetchError) {
5534
- console.error('Could not fetch file for debugging:', fetchError);
5535
- }
5536
-
4471
+ console.warn('FBX animation failed to start:', error);
4472
+ // Stop the animation and unlock position on error
4473
+ this.stopAnimation();
4474
+ return;
4475
+ }
4476
+
4477
+ // Check if the animation actually has valid tracks
4478
+ if (action.getClip().tracks.length === 0) {
4479
+ console.warn('FBX animation has no valid tracks, stopping');
4480
+ this.stopAnimation();
5537
4481
  return;
5538
4482
  }
5539
4483
 
4484
+ } else {
4485
+
4486
+ // Load animation
4487
+ const loader = new FBXLoader();
4488
+
4489
+ let fbx = await loader.loadAsync( url, onprogress );
4490
+
5540
4491
  if ( fbx && fbx.animations && fbx.animations[ndx] ) {
5541
4492
  let anim = fbx.animations[ndx];
5542
4493
 
@@ -5688,31 +4639,6 @@ class TalkingHead {
5688
4639
  return null; // No mapping found
5689
4640
  };
5690
4641
 
5691
- // Debug: Log FBX bone names and avatar bone names for comparison
5692
- const fbxBoneNames = new Set();
5693
- anim.tracks.forEach(track => {
5694
- const trackParts = track.name.split('.');
5695
- fbxBoneNames.add(trackParts[0]);
5696
- });
5697
-
5698
- console.log('=== Ready Player Me Animation Bone Analysis ===');
5699
- console.log('FBX bone names:', Array.from(fbxBoneNames).sort().join(', '));
5700
- console.log('Avatar skeleton bone names:', Array.from(availableBones).sort().join(', '));
5701
-
5702
- // Check for arm bones specifically
5703
- const fbxArmBones = Array.from(fbxBoneNames).filter(b =>
5704
- b.toLowerCase().includes('arm') ||
5705
- b.toLowerCase().includes('hand') ||
5706
- b.toLowerCase().includes('shoulder')
5707
- );
5708
- const avatarArmBones = Array.from(availableBones).filter(b =>
5709
- b.includes('Arm') ||
5710
- b.includes('Hand') ||
5711
- b.includes('Shoulder')
5712
- );
5713
- console.log('FBX arm/hand/shoulder bones:', fbxArmBones.sort().join(', '));
5714
- console.log('Avatar arm/hand/shoulder bones:', avatarArmBones.sort().join(', '));
5715
-
5716
4642
  // Filter and map animation tracks
5717
4643
  const mappedTracks = [];
5718
4644
  const unmappedBones = new Set();
@@ -5731,11 +4657,6 @@ class TalkingHead {
5731
4657
  const newTrackName = `${mappedBoneName}.${property}`;
5732
4658
  const newTrack = track.clone();
5733
4659
  newTrack.name = newTrackName;
5734
-
5735
- // Note: Rotation corrections removed - they were causing issues with both arms
5736
- // If left arm still has issues, it's likely a bone mapping problem, not rotation
5737
- // Focus on getting bone names mapped correctly first
5738
-
5739
4660
  mappedTracks.push(newTrack);
5740
4661
 
5741
4662
  // Store mapping for logging
@@ -5744,12 +4665,6 @@ class TalkingHead {
5744
4665
  }
5745
4666
  } else {
5746
4667
  unmappedBones.add(fbxBoneName);
5747
- // Log unmapped bones (especially arm bones)
5748
- if (fbxBoneName.toLowerCase().includes('arm') ||
5749
- fbxBoneName.toLowerCase().includes('hand') ||
5750
- fbxBoneName.toLowerCase().includes('shoulder')) {
5751
- console.warn(`⚠️ Arm bone "${fbxBoneName}" could not be mapped to avatar skeleton`);
5752
- }
5753
4668
  }
5754
4669
  });
5755
4670
 
@@ -5765,24 +4680,11 @@ class TalkingHead {
5765
4680
  console.log(`✓ Mapped ${boneNameMap.size} bone(s):`,
5766
4681
  Array.from(boneNameMap.entries()).map(([from, to]) => `${from}→${to}`).join(', '));
5767
4682
  }
5768
-
5769
- // Check if arm bones were mapped
5770
- const mappedArmBones = Array.from(boneNameMap.values()).filter(b =>
5771
- b.includes('Arm') || b.includes('Hand') || b.includes('Shoulder')
5772
- );
5773
- if (mappedArmBones.length > 0) {
5774
- console.log(`✓ Arm bones mapped: ${mappedArmBones.join(', ')}`);
5775
- } else {
5776
- console.warn('⚠️ No arm bones were mapped! This may cause arm rigging issues.');
5777
- }
5778
- } else {
5779
- console.error('❌ No tracks could be mapped! Animation may not work correctly.');
5780
4683
  }
5781
4684
 
5782
4685
  // Rename and scale Mixamo tracks, create a pose
5783
4686
  const props = {};
5784
4687
  anim.tracks.forEach( t => {
5785
- t.name = t.name.replaceAll('mixamorig','');
5786
4688
  const ids = t.name.split('.');
5787
4689
  if ( ids[1] === 'position' ) {
5788
4690
  for(let i=0; i<t.values.length; i++ ) {
@@ -5818,13 +4720,6 @@ class TalkingHead {
5818
4720
  } else {
5819
4721
  const msg = 'Animation ' + url + ' (ndx=' + ndx + ') not found';
5820
4722
  console.error(msg);
5821
- if (fbx && fbx.animations) {
5822
- console.error(`FBX file loaded but has ${fbx.animations.length} animation(s), requested index ${ndx}`);
5823
- } else if (fbx) {
5824
- console.error('FBX file loaded but contains no animations');
5825
- } else {
5826
- console.error('FBX file failed to load or is invalid');
5827
- }
5828
4723
  }
5829
4724
  }
5830
4725
  }
@@ -5834,21 +4729,16 @@ class TalkingHead {
5834
4729
  */
5835
4730
  stopAnimation() {
5836
4731
 
5837
- // Stop only the current FBX action, preserve mixer for morph targets
5838
- if (this.currentFBXAction) {
5839
- this.currentFBXAction.stop();
5840
- this.currentFBXAction = null;
5841
- console.log('FBX animation action stopped, mixer preserved for lip-sync');
5842
- }
5843
-
5844
- // Only destroy mixer if no other animations are running
5845
- // This allows morph target animations (lip-sync) to continue
5846
- if (this.mixer && this.mixer._actions.length === 0) {
4732
+ // Stop mixer
4733
+ if (this.mixer) {
4734
+ this.mixer.removeEventListener('finished', this._mixerHandler);
4735
+ this.mixer.stopAllAction();
4736
+ this.mixer.uncacheRoot(this.armature);
5847
4737
  this.mixer = null;
5848
- console.log('Mixer destroyed as no actions remain');
4738
+ this._mixerHandler = null;
5849
4739
  }
5850
-
5851
- // Unlock position when animation stops (only if it was locked)
4740
+
4741
+ // Unlock position if it was locked
5852
4742
  if (this.positionWasLocked) {
5853
4743
  this.unlockAvatarPosition();
5854
4744
  console.log('Position unlocked after FBX animation stopped');
@@ -5856,347 +4746,723 @@ class TalkingHead {
5856
4746
  console.log('Position was not locked, no unlock needed');
5857
4747
  }
5858
4748
 
5859
- // Restart gesture
4749
+ // Restart gesture
4750
+ if ( this.gesture ) {
4751
+ for( let [p,v] of Object.entries(this.gesture) ) {
4752
+ v.t = this.animClock;
4753
+ v.d = 1000;
4754
+ if ( this.poseTarget.props.hasOwnProperty(p) ) {
4755
+ this.poseTarget.props[p].copy(v);
4756
+ this.poseTarget.props[p].t = this.animClock;
4757
+ this.poseTarget.props[p].d = 1000;
4758
+ }
4759
+ }
4760
+ }
4761
+
4762
+ // Restart pose animation
4763
+ let anim = this.animQueue.find( x => x.template.name === 'pose' );
4764
+ if ( anim ) {
4765
+ anim.ts[0] = this.animClock;
4766
+ }
4767
+ this.setPoseFromTemplate( null );
4768
+
4769
+ }
4770
+
4771
+
4772
+ /**
4773
+ * Play RPM/Mixamo pose.
4774
+ * @param {string|Object} url Pose name | URL to FBX
4775
+ * @param {progressfn} [onprogress=null] Callback for progress
4776
+ * @param {number} [dur=5] Duration of the pose in seconds
4777
+ * @param {number} [ndx=0] Index of the clip
4778
+ * @param {number} [scale=0.01] Position scale factor
4779
+ */
4780
+ async playPose(url, onprogress=null, dur=5, ndx=0, scale=0.01) {
4781
+
4782
+ if ( !this.armature ) return;
4783
+
4784
+ // Check if we already have the pose template ready
4785
+ let pose = this.poseTemplates[url];
4786
+ if ( !pose ) {
4787
+ const item = this.animPoses.find( x => x.url === url+'-'+ndx );
4788
+ if ( item ) {
4789
+ pose = item.pose;
4790
+ }
4791
+ }
4792
+
4793
+ // If we have the template, use it, otherwise try to load it
4794
+ if ( pose ) {
4795
+
4796
+ this.poseName = url;
4797
+
4798
+ if (this.mixer) {
4799
+ this.mixer.removeEventListener('finished', this._mixerHandler);
4800
+ this.mixer.stopAllAction();
4801
+ this.mixer.uncacheRoot(this.armature);
4802
+ this.mixer = null;
4803
+ this._mixerHandler = null;
4804
+ }
4805
+ let anim = this.animQueue.find( x => x.template.name === 'pose' );
4806
+ if ( anim ) {
4807
+ anim.ts[0] = this.animClock + (dur * 1000) + 2000;
4808
+ }
4809
+ this.setPoseFromTemplate( pose );
4810
+
4811
+ } else {
4812
+
4813
+ // Load animation
4814
+ const loader = new FBXLoader();
4815
+
4816
+ let fbx = await loader.loadAsync( url, onprogress );
4817
+
4818
+ if ( fbx && fbx.animations && fbx.animations[ndx] ) {
4819
+ let anim = fbx.animations[ndx];
4820
+
4821
+ // Create a pose
4822
+ const props = {};
4823
+ anim.tracks.forEach( t => {
4824
+
4825
+ // Rename and scale Mixamo tracks
4826
+ t.name = t.name.replaceAll('mixamorig','');
4827
+ const ids = t.name.split('.');
4828
+ if ( ids[1] === 'position' ) {
4829
+ props[t.name] = new THREE.Vector3( t.values[0] * scale, t.values[1] * scale, t.values[2] * scale);
4830
+ } else if ( ids[1] === 'quaternion' ) {
4831
+ props[t.name] = new THREE.Quaternion( t.values[0], t.values[1], t.values[2], t.values[3] );
4832
+ } else if ( ids[1] === 'rotation' ) {
4833
+ props[ids[0]+".quaternion"] = new THREE.Quaternion().setFromEuler(new THREE.Euler( t.values[0], t.values[1], t.values[2],'XYZ' )).normalize();
4834
+ }
4835
+ });
4836
+
4837
+ // Add to pose
4838
+ const newPose = { props: props };
4839
+ if ( props['Hips.position'] ) {
4840
+ if ( props['Hips.position'].y < 0.5 ) {
4841
+ newPose.lying = true;
4842
+ } else {
4843
+ newPose.standing = true;
4844
+ }
4845
+ }
4846
+ this.animPoses.push({
4847
+ url: url+'-'+ndx,
4848
+ pose: newPose
4849
+ });
4850
+
4851
+ // Play
4852
+ this.playPose(url, onprogress, dur, ndx, scale);
4853
+
4854
+ } else {
4855
+ const msg = 'Pose ' + url + ' (ndx=' + ndx + ') not found';
4856
+ console.error(msg);
4857
+ }
4858
+ }
4859
+ }
4860
+
4861
+ /**
4862
+ * Stop the pose. (Functionality is the same as in stopAnimation.)
4863
+ */
4864
+ stopPose() {
4865
+ this.stopAnimation();
4866
+ }
4867
+
4868
+ /**
4869
+ * Play a gesture, which is either a hand gesture, an emoji animation or their
4870
+ * combination.
4871
+ * @param {string} name Gesture name
4872
+ * @param {number} [dur=3] Duration of the gesture in seconds
4873
+ * @param {boolean} [mirror=false] Mirror gesture
4874
+ * @param {number} [ms=1000] Transition time in milliseconds
4875
+ */
4876
+ playGesture(name, dur=3, mirror=false, ms=1000) {
4877
+
4878
+ if ( !this.armature ) return;
4879
+
4880
+ // Hand gesture, if any
4881
+ let g = this.gestureTemplates[name];
4882
+ if ( g ) {
4883
+
4884
+ // New gesture always overrides the existing one
4885
+ if ( this.gestureTimeout ) {
4886
+ clearTimeout( this.gestureTimeout );
4887
+ this.gestureTimeout = null;
4888
+ }
4889
+
4890
+ // Stop talking hands animation
4891
+ let ndx = this.animQueue.findIndex( y => y.template.name === "talkinghands" );
4892
+ if ( ndx !== -1 ) {
4893
+ this.animQueue[ndx].ts = this.animQueue[ndx].ts.map( x => 0 );
4894
+ }
4895
+
4896
+ // Set gesture
4897
+ this.gesture = this.propsToThreeObjects( g );
4898
+ if ( mirror ) {
4899
+ this.gesture = this.mirrorPose( this.gesture );
4900
+ }
4901
+ if ( name === "namaste" && this.avatar.body === 'M' ) {
4902
+ // Work-a-round for male model so that the hands meet
4903
+ this.gesture["RightArm.quaternion"].rotateTowards( new THREE.Quaternion(0,1,0,0), -0.25);
4904
+ this.gesture["LeftArm.quaternion"].rotateTowards( new THREE.Quaternion(0,1,0,0), -0.25);
4905
+ }
4906
+
4907
+ // Apply to target
4908
+ for( let [p,val] of Object.entries(this.gesture) ) {
4909
+ val.t = this.animClock;
4910
+ val.d = ms;
4911
+ if ( this.poseTarget.props.hasOwnProperty(p) ) {
4912
+ this.poseTarget.props[p].copy(val);
4913
+ this.poseTarget.props[p].t = this.animClock;
4914
+ this.poseTarget.props[p].d = ms;
4915
+ }
4916
+ }
4917
+
4918
+ // Timer
4919
+ if ( dur && Number.isFinite(dur) ) {
4920
+ this.gestureTimeout = setTimeout( this.stopGesture.bind(this,ms), 1000 * dur);
4921
+ }
4922
+ }
4923
+
4924
+ // Animated emoji, if any
4925
+ let em = this.animEmojis[name];
4926
+ if ( em ) {
4927
+
4928
+ // Follow link
4929
+ if ( em && em.link ) {
4930
+ em = this.animEmojis[em.link];
4931
+ }
4932
+
4933
+ if ( em ) {
4934
+ // Look at the camera for 500 ms
4935
+ this.lookAtCamera(500);
4936
+
4937
+ // Create animation and tag as gesture
4938
+ const anim = this.animFactory( em );
4939
+ anim.gesture = true;
4940
+
4941
+ // Rescale duration
4942
+ if ( dur && Number.isFinite(dur) ) {
4943
+ const first = anim.ts[0];
4944
+ const last = anim.ts[ anim.ts.length -1 ];
4945
+ const total = last - first;
4946
+ const excess = (dur * 1000) - total;
4947
+
4948
+ // If longer, increase longer parts; if shorter, scale everything
4949
+ if ( excess > 0 ) {
4950
+ const dt = [];
4951
+ for( let i=1; i<anim.ts.length; i++ ) dt.push( anim.ts[i] - anim.ts[i-1] );
4952
+ const rescale = em.template?.rescale || dt.map( x => x / total );
4953
+ const excess = dur * 1000 - total;
4954
+ anim.ts = anim.ts.map( (x,i,arr) => {
4955
+ return (i===0) ? first : (arr[i-1] + dt[i-1] + rescale[i-1] * excess);
4956
+ });
4957
+ } else {
4958
+ const scale = (dur * 1000) / total;
4959
+ anim.ts = anim.ts.map( x => first + scale * (x - first) );
4960
+ }
4961
+ }
4962
+
4963
+ this.animQueue.push( anim );
4964
+ }
4965
+ }
4966
+
4967
+ }
4968
+
4969
+ /**
4970
+ * Stop the gesture.
4971
+ * @param {number} [ms=1000] Transition time in milliseconds
4972
+ */
4973
+ stopGesture(ms=1000) {
4974
+
4975
+ // Stop gesture timer
4976
+ if ( this.gestureTimeout ) {
4977
+ clearTimeout( this.gestureTimeout );
4978
+ this.gestureTimeout = null;
4979
+ }
4980
+
4981
+ // Stop hand gesture, if any
5860
4982
  if ( this.gesture ) {
5861
- for( let [p,v] of Object.entries(this.gesture) ) {
5862
- v.t = this.animClock;
5863
- v.d = 1000;
4983
+ const gs = Object.entries(this.gesture);
4984
+ this.gesture = null;
4985
+ for( const [p,val] of gs ) {
5864
4986
  if ( this.poseTarget.props.hasOwnProperty(p) ) {
5865
- this.poseTarget.props[p].copy(v);
4987
+ this.poseTarget.props[p].copy( this.getPoseTemplateProp(p) );
5866
4988
  this.poseTarget.props[p].t = this.animClock;
5867
- this.poseTarget.props[p].d = 1000;
4989
+ this.poseTarget.props[p].d = ms;
5868
4990
  }
5869
4991
  }
5870
4992
  }
5871
4993
 
5872
- // Restart pose animation
5873
- let anim = this.animQueue.find( x => x.template.name === 'pose' );
5874
- if ( anim ) {
5875
- anim.ts[0] = this.animClock;
4994
+ // Stop animated emoji gesture, if any
4995
+ let i = this.animQueue.findIndex( y => y.gesture );
4996
+ if ( i !== -1 ) {
4997
+ this.animQueue.splice(i, 1);
5876
4998
  }
5877
- this.setPoseFromTemplate( null );
5878
4999
 
5879
5000
  }
5880
5001
 
5881
-
5882
5002
  /**
5883
- * Play RPM/Mixamo pose.
5884
- * @param {string|Object} url Pose name | URL to FBX
5885
- * @param {progressfn} [onprogress=null] Callback for progress
5886
- * @param {number} [dur=5] Duration of the pose in seconds
5887
- * @param {number} [ndx=0] Index of the clip
5888
- * @param {number} [scale=0.01] Position scale factor
5003
+ * Cyclic Coordinate Descent (CCD) Inverse Kinematic (IK) algorithm.
5004
+ * Adapted from:
5005
+ * https://github.com/mrdoob/three.js/blob/master/examples/jsm/animation/CCDIKSolver.js
5006
+ * @param {Object} ik IK configuration object
5007
+ * @param {Vector3} [target=null] Target coordinate, if null return to template
5008
+ * @param {Boolean} [relative=false] If true, target is relative to root
5009
+ * @param {numeric} [d=null] If set, apply in d milliseconds
5889
5010
  */
5890
- async playPose(url, onprogress=null, dur=5, ndx=0, scale=0.01) {
5891
-
5892
- if ( !this.armature ) return;
5011
+ ikSolve(ik, target=null, relative=false, d=null) {
5012
+ const targetVec = new THREE.Vector3();
5013
+ const effectorPos = new THREE.Vector3();
5014
+ const effectorVec = new THREE.Vector3();
5015
+ const linkPos = new THREE.Vector3();
5016
+ const invLinkQ = new THREE.Quaternion();
5017
+ const linkScale = new THREE.Vector3();
5018
+ const axis = new THREE.Vector3();
5019
+ const vector = new THREE.Vector3();
5893
5020
 
5894
- // Check if we already have the pose template ready
5895
- let pose = this.poseTemplates[url];
5896
- if ( !pose ) {
5897
- const item = this.animPoses.find( x => x.url === url+'-'+ndx );
5898
- if ( item ) {
5899
- pose = item.pose;
5900
- }
5021
+ // Reset IK setup positions and rotations
5022
+ const root = this.ikMesh.getObjectByName(ik.root);
5023
+ root.position.setFromMatrixPosition( this.armature.getObjectByName(ik.root).matrixWorld );
5024
+ root.quaternion.setFromRotationMatrix( this.armature.getObjectByName(ik.root).matrixWorld );
5025
+ if ( target && relative ) {
5026
+ target.applyQuaternion(this.armature.quaternion).add( root.position );
5901
5027
  }
5028
+ const effector = this.ikMesh.getObjectByName(ik.effector);
5029
+ const links = ik.links;
5030
+ links.forEach( x => {
5031
+ x.bone = this.ikMesh.getObjectByName(x.link);
5032
+ x.bone.quaternion.copy( this.getPoseTemplateProp(x.link+'.quaternion') );
5033
+ });
5034
+ root.updateMatrixWorld(true);
5035
+ const iterations = ik.iterations || 10;
5902
5036
 
5903
- // If we have the template, use it, otherwise try to load it
5904
- if ( pose ) {
5037
+ // Iterate
5038
+ if ( target ) {
5039
+ for ( let i = 0; i < iterations; i ++ ) {
5040
+ let rotated = false;
5041
+ for ( let j = 0, jl = links.length; j < jl; j++ ) {
5042
+ const bone = links[j].bone;
5043
+ bone.matrixWorld.decompose( linkPos, invLinkQ, linkScale );
5044
+ invLinkQ.invert();
5045
+ effectorPos.setFromMatrixPosition( effector.matrixWorld );
5046
+ effectorVec.subVectors( effectorPos, linkPos );
5047
+ effectorVec.applyQuaternion( invLinkQ );
5048
+ effectorVec.normalize();
5049
+ targetVec.subVectors( target, linkPos );
5050
+ targetVec.applyQuaternion( invLinkQ );
5051
+ targetVec.normalize();
5052
+ let angle = targetVec.dot( effectorVec );
5053
+ if ( angle > 1.0 ) {
5054
+ angle = 1.0;
5055
+ } else if ( angle < - 1.0 ) {
5056
+ angle = - 1.0;
5057
+ }
5058
+ angle = Math.acos( angle );
5059
+ if ( angle < 1e-5 ) continue;
5060
+ if ( links[j].minAngle !== undefined && angle < links[j].minAngle ) {
5061
+ angle = links[j].minAngle;
5062
+ }
5063
+ if ( links[j].maxAngle !== undefined && angle > links[j].maxAngle ) {
5064
+ angle = links[j].maxAngle;
5065
+ }
5066
+ axis.crossVectors( effectorVec, targetVec );
5067
+ axis.normalize();
5068
+ q.setFromAxisAngle( axis, angle );
5069
+ bone.quaternion.multiply( q );
5905
5070
 
5906
- this.poseName = url;
5071
+ // Constraints
5072
+ bone.rotation.setFromVector3( vector.setFromEuler( bone.rotation ).clamp( new THREE.Vector3(
5073
+ links[j].minx !== undefined ? links[j].minx : -Infinity,
5074
+ links[j].miny !== undefined ? links[j].miny : -Infinity,
5075
+ links[j].minz !== undefined ? links[j].minz : -Infinity
5076
+ ), new THREE.Vector3(
5077
+ links[j].maxx !== undefined ? links[j].maxx : Infinity,
5078
+ links[j].maxy !== undefined ? links[j].maxy : Infinity,
5079
+ links[j].maxz !== undefined ? links[j].maxz : Infinity
5080
+ )) );
5907
5081
 
5908
- this.mixer = null;
5909
- let anim = this.animQueue.find( x => x.template.name === 'pose' );
5910
- if ( anim ) {
5911
- anim.ts[0] = this.animClock + (dur * 1000) + 2000;
5082
+ bone.updateMatrixWorld( true );
5083
+ rotated = true;
5084
+ }
5085
+ if ( !rotated ) break;
5912
5086
  }
5913
- this.setPoseFromTemplate( pose );
5914
-
5915
- } else {
5916
-
5917
- // Load animation
5918
- const loader = new FBXLoader();
5919
-
5920
- let fbx = await loader.loadAsync( url, onprogress );
5087
+ }
5921
5088
 
5922
- if ( fbx && fbx.animations && fbx.animations[ndx] ) {
5923
- let anim = fbx.animations[ndx];
5089
+ // Apply
5090
+ if ( d ) {
5091
+ links.forEach( x => {
5092
+ this.poseTarget.props[x.link+".quaternion"].copy( x.bone.quaternion );
5093
+ this.poseTarget.props[x.link+".quaternion"].t = this.animClock;
5094
+ this.poseTarget.props[x.link+".quaternion"].d = d;
5095
+ });
5096
+ }
5097
+ }
5924
5098
 
5925
- // Create a pose
5926
- const props = {};
5927
- anim.tracks.forEach( t => {
5099
+ /**
5100
+ * Initialize FBX animation loader
5101
+ */
5102
+ async initializeFBXAnimationLoader() {
5103
+ try {
5104
+ // Dynamic import to avoid loading issues
5105
+ const { FBXAnimationLoader } = await import('./fbxAnimationLoader.js');
5106
+ this.fbxAnimationLoader = new FBXAnimationLoader(this.armature);
5107
+ console.log('FBX Animation Loader initialized');
5108
+ } catch (error) {
5109
+ console.warn('FBX Animation Loader not available:', error);
5110
+ this.fbxAnimationLoader = null;
5111
+ }
5112
+ }
5928
5113
 
5929
- // Rename and scale Mixamo tracks
5930
- t.name = t.name.replaceAll('mixamorig','');
5931
- const ids = t.name.split('.');
5932
- if ( ids[1] === 'position' ) {
5933
- props[t.name] = new THREE.Vector3( t.values[0] * scale, t.values[1] * scale, t.values[2] * scale);
5934
- } else if ( ids[1] === 'quaternion' ) {
5935
- props[t.name] = new THREE.Quaternion( t.values[0], t.values[1], t.values[2], t.values[3] );
5936
- } else if ( ids[1] === 'rotation' ) {
5937
- props[ids[0]+".quaternion"] = new THREE.Quaternion().setFromEuler(new THREE.Euler( t.values[0], t.values[1], t.values[2],'XYZ' )).normalize();
5938
- }
5939
- });
5114
+ /**
5115
+ * Set body movement type.
5116
+ * @param {string} movement Movement type (idle, walking, prancing, gesturing, dancing, excited).
5117
+ */
5118
+ setBodyMovement(movement) {
5119
+ this.bodyMovement = movement;
5120
+
5121
+ // Only set avatar property if avatar exists
5122
+ if (this.avatar) {
5123
+ this.avatar.bodyMovement = movement;
5124
+ }
5125
+
5126
+ console.log('Body movement set to:', movement);
5127
+
5128
+ // Respect the current showFullAvatar setting instead of forcing it to true
5129
+ // Only unlock position when returning to idle
5130
+ if (movement === 'idle') {
5131
+ // Unlock position when returning to idle
5132
+ this.unlockAvatarPosition();
5133
+ }
5134
+ // Note: We no longer force showFullAvatar to true for body movements
5135
+ // The avatar will use whatever showFullAvatar value was set by the user
5136
+
5137
+ // Apply body movement animation
5138
+ this.applyBodyMovementAnimation();
5139
+ }
5940
5140
 
5941
- // Add to pose
5942
- const newPose = { props: props };
5943
- if ( props['Hips.position'] ) {
5944
- if ( props['Hips.position'].y < 0.5 ) {
5945
- newPose.lying = true;
5946
- } else {
5947
- newPose.standing = true;
5948
- }
5141
+ /**
5142
+ * Apply body movement animation based on current movement type.
5143
+ */
5144
+ async applyBodyMovementAnimation() {
5145
+ // Check if avatar is ready
5146
+ if (!this.armature || !this.animQueue) {
5147
+ console.log('Avatar not ready for body movement animations');
5148
+ return;
5149
+ }
5150
+
5151
+ console.log('Avatar is running:', this.isRunning);
5152
+ console.log('Animation queue exists:', !!this.animQueue);
5153
+
5154
+ // Remove existing body movement animations
5155
+ const beforeLength = this.animQueue.length;
5156
+ this.animQueue = this.animQueue.filter(anim => !anim.template.name.startsWith('bodyMovement'));
5157
+ const afterLength = this.animQueue.length;
5158
+ console.log(`Filtered animation queue: ${beforeLength} -> ${afterLength} animations`);
5159
+
5160
+ if (this.bodyMovement === 'idle') {
5161
+ // Stop FBX animations if any
5162
+ if (this.fbxAnimationLoader) {
5163
+ this.fbxAnimationLoader.stopCurrentAnimation();
5164
+ }
5165
+ return; // No body movement for idle
5166
+ }
5167
+
5168
+ // Try to use FBX animations first
5169
+ if (this.fbxAnimationLoader) {
5170
+ try {
5171
+ await this.fbxAnimationLoader.playGestureAnimation(this.bodyMovement, this.movementIntensity);
5172
+ console.log('Applied FBX body movement animation:', this.bodyMovement);
5173
+ return; // Successfully applied FBX animation
5174
+ } catch (error) {
5175
+ console.warn('FBX animation failed, falling back to code animation:', error);
5176
+ }
5177
+ }
5178
+
5179
+ // Fallback to code-based animations
5180
+ const movementAnim = this.createBodyMovementAnimation(this.bodyMovement);
5181
+ console.log('Created movement animation:', movementAnim);
5182
+ if (movementAnim) {
5183
+ try {
5184
+ // Use animFactory to create proper animation object
5185
+ const animObj = this.animFactory(movementAnim, true); // true for looping
5186
+
5187
+ // Validate the animation object before adding
5188
+ if (animObj && animObj.ts && animObj.ts.length > 0) {
5189
+ this.animQueue.push(animObj);
5190
+ console.log('Applied code-based body movement animation:', this.bodyMovement);
5191
+ console.log('Animation queue length:', this.animQueue.length);
5192
+ console.log('Animation object:', animObj);
5193
+ } else {
5194
+ console.error('Invalid animation object created for:', this.bodyMovement);
5195
+ console.error('Animation object:', animObj);
5949
5196
  }
5950
- this.animPoses.push({
5951
- url: url+'-'+ndx,
5952
- pose: newPose
5953
- });
5954
-
5955
- // Play
5956
- this.playPose(url, onprogress, dur, ndx, scale);
5957
-
5958
- } else {
5959
- const msg = 'Pose ' + url + ' (ndx=' + ndx + ') not found';
5960
- console.error(msg);
5197
+ } catch (error) {
5198
+ console.error('Error creating body movement animation:', error);
5961
5199
  }
5962
5200
  }
5963
5201
  }
5964
5202
 
5965
5203
  /**
5966
- * Stop the pose. (Functionality is the same as in stopAnimation.)
5967
- */
5968
- stopPose() {
5969
- this.stopAnimation();
5204
+ * Lock avatar position to prevent movement during animations.
5205
+ */
5206
+ lockAvatarPosition() {
5207
+ if (!this.armature) {
5208
+ console.warn('Cannot lock position: armature not available');
5209
+ return;
5210
+ }
5211
+
5212
+ // Store the original position if not already stored
5213
+ if (!this.originalPosition) {
5214
+ this.originalPosition = {
5215
+ x: this.armature.position.x,
5216
+ y: this.armature.position.y,
5217
+ z: this.armature.position.z
5218
+ };
5219
+ console.log('Original position stored:', this.originalPosition);
5220
+ }
5221
+
5222
+ // Lock the avatar at its CURRENT position (don't move it)
5223
+ this.lockedPosition = {
5224
+ x: this.armature.position.x,
5225
+ y: this.armature.position.y,
5226
+ z: this.armature.position.z
5227
+ };
5228
+
5229
+ console.log('Avatar position locked at current position:', this.lockedPosition);
5970
5230
  }
5971
5231
 
5972
5232
  /**
5973
- * Play a gesture, which is either a hand gesture, an emoji animation or their
5974
- * combination.
5975
- * @param {string} name Gesture name
5976
- * @param {number} [dur=3] Duration of the gesture in seconds
5977
- * @param {boolean} [mirror=false] Mirror gesture
5978
- * @param {number} [ms=1000] Transition time in milliseconds
5979
- */
5980
- playGesture(name, dur=3, mirror=false, ms=1000) {
5981
-
5982
- if ( !this.armature ) return;
5983
-
5984
- // Hand gesture, if any
5985
- let g = this.gestureTemplates[name];
5986
- if ( g ) {
5987
-
5988
- // New gesture always overrides the existing one
5989
- if ( this.gestureTimeout ) {
5990
- clearTimeout( this.gestureTimeout );
5991
- this.gestureTimeout = null;
5992
- }
5993
-
5994
- // Stop talking hands animation
5995
- let ndx = this.animQueue.findIndex( y => y.template.name === "talkinghands" );
5996
- if ( ndx !== -1 ) {
5997
- this.animQueue[ndx].ts = this.animQueue[ndx].ts.map( x => 0 );
5998
- }
5233
+ * Unlock avatar position and restore original position.
5234
+ */
5235
+ unlockAvatarPosition() {
5236
+ if (this.armature && this.originalPosition) {
5237
+ // Restore avatar to its original position before locking
5238
+ this.armature.position.set(
5239
+ this.originalPosition.x,
5240
+ this.originalPosition.y,
5241
+ this.originalPosition.z
5242
+ );
5243
+ console.log('Avatar position restored to original:', this.originalPosition);
5244
+ } else if (this.armature) {
5245
+ // Fallback: reset to center if no original position was stored
5246
+ this.armature.position.set(0, 0, 0);
5247
+ console.log('Avatar position reset to center (0,0,0)');
5248
+ }
5249
+ this.lockedPosition = null;
5250
+ this.originalPosition = null; // Clear original position after unlock
5251
+ console.log('Avatar position unlocked');
5252
+ }
5999
5253
 
6000
- // Set gesture
6001
- this.gesture = this.propsToThreeObjects( g );
6002
- if ( mirror ) {
6003
- this.gesture = this.mirrorPose( this.gesture );
6004
- }
6005
- if ( name === "namaste" && this.avatar.body === 'M' ) {
6006
- // Work-a-round for male model so that the hands meet
6007
- this.gesture["RightArm.quaternion"].rotateTowards( new THREE.Quaternion(0,1,0,0), -0.25);
6008
- this.gesture["LeftArm.quaternion"].rotateTowards( new THREE.Quaternion(0,1,0,0), -0.25);
6009
- }
5254
+ /**
5255
+ * Ensure avatar stays at locked position.
5256
+ */
5257
+ maintainLockedPosition() {
5258
+ if (this.lockedPosition && this.armature) {
5259
+ // Enforce the locked position - keep avatar exactly where it was locked
5260
+ // This prevents FBX animations from moving the avatar
5261
+ this.armature.position.set(
5262
+ this.lockedPosition.x,
5263
+ this.lockedPosition.y,
5264
+ this.lockedPosition.z
5265
+ );
5266
+ }
5267
+ }
6010
5268
 
6011
- // Apply to target
6012
- for( let [p,val] of Object.entries(this.gesture) ) {
6013
- val.t = this.animClock;
6014
- val.d = ms;
6015
- if ( this.poseTarget.props.hasOwnProperty(p) ) {
6016
- this.poseTarget.props[p].copy(val);
6017
- this.poseTarget.props[p].t = this.animClock;
6018
- this.poseTarget.props[p].d = ms;
5269
+ /**
5270
+ * Create body movement animation.
5271
+ * @param {string} movementType Movement type.
5272
+ * @returns {Object} Animation object.
5273
+ */
5274
+ createBodyMovementAnimation(movementType) {
5275
+ const intensity = this.movementIntensity || 0.5;
5276
+
5277
+ const movementAnimations = {
5278
+ walking: {
5279
+ name: 'bodyMovement_walking',
5280
+ delay: [500, 2000],
5281
+ dt: [800, 1200],
5282
+ vs: {
5283
+ bodyRotateY: [-0.1 * intensity, 0.1 * intensity, 0],
5284
+ bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0],
5285
+ bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
5286
+ }
5287
+ },
5288
+ prancing: {
5289
+ name: 'bodyMovement_prancing',
5290
+ delay: [300, 1000],
5291
+ dt: [400, 800],
5292
+ vs: {
5293
+ bodyRotateY: [-0.15 * intensity, 0.15 * intensity, 0],
5294
+ bodyRotateZ: [-0.08 * intensity, 0.08 * intensity, 0],
5295
+ bodyRotateX: [-0.05 * intensity, 0.05 * intensity, 0]
5296
+ }
5297
+ },
5298
+ gesturing: {
5299
+ name: 'bodyMovement_gesturing',
5300
+ delay: [400, 1500],
5301
+ dt: [600, 1000],
5302
+ vs: {
5303
+ bodyRotateY: [-0.08 * intensity, 0.08 * intensity, 0],
5304
+ bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0]
5305
+ }
5306
+ },
5307
+ dancing: {
5308
+ name: 'bodyMovement_dancing',
5309
+ delay: [200, 600],
5310
+ dt: [400, 800],
5311
+ vs: {
5312
+ bodyRotateY: [-0.25 * intensity, 0.25 * intensity, 0],
5313
+ bodyRotateZ: [-0.15 * intensity, 0.15 * intensity, 0],
5314
+ bodyRotateX: [-0.1 * intensity, 0.1 * intensity, 0]
5315
+ }
5316
+ },
5317
+ dancing2: {
5318
+ name: 'bodyMovement_dancing2',
5319
+ delay: [150, 500],
5320
+ dt: [300, 700],
5321
+ vs: {
5322
+ bodyRotateY: [-0.3 * intensity, 0.3 * intensity, 0],
5323
+ bodyRotateZ: [-0.2 * intensity, 0.2 * intensity, 0],
5324
+ bodyRotateX: [-0.12 * intensity, 0.12 * intensity, 0]
5325
+ }
5326
+ },
5327
+ dancing3: {
5328
+ name: 'bodyMovement_dancing3',
5329
+ delay: [100, 400],
5330
+ dt: [200, 600],
5331
+ vs: {
5332
+ bodyRotateY: [-0.35 * intensity, 0.35 * intensity, 0],
5333
+ bodyRotateZ: [-0.25 * intensity, 0.25 * intensity, 0],
5334
+ bodyRotateX: [-0.15 * intensity, 0.15 * intensity, 0]
5335
+ }
5336
+ },
5337
+ excited: {
5338
+ name: 'bodyMovement_excited',
5339
+ delay: [200, 600],
5340
+ dt: [300, 700],
5341
+ vs: {
5342
+ bodyRotateY: [-0.12 * intensity, 0.12 * intensity, 0],
5343
+ bodyRotateZ: [-0.06 * intensity, 0.06 * intensity, 0],
5344
+ bodyRotateX: [-0.04 * intensity, 0.04 * intensity, 0]
5345
+ }
5346
+ },
5347
+ happy: {
5348
+ name: 'bodyMovement_happy',
5349
+ delay: [300, 800],
5350
+ dt: [500, 1000],
5351
+ vs: {
5352
+ bodyRotateY: [-0.08 * intensity, 0.08 * intensity, 0],
5353
+ bodyRotateZ: [-0.04 * intensity, 0.04 * intensity, 0],
5354
+ bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
5355
+ }
5356
+ },
5357
+ surprised: {
5358
+ name: 'bodyMovement_surprised',
5359
+ delay: [100, 300],
5360
+ dt: [200, 500],
5361
+ vs: {
5362
+ bodyRotateY: [-0.05 * intensity, 0.05 * intensity, 0],
5363
+ bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0],
5364
+ bodyRotateX: [-0.01 * intensity, 0.01 * intensity, 0]
5365
+ }
5366
+ },
5367
+ thinking: {
5368
+ name: 'bodyMovement_thinking',
5369
+ delay: [800, 2000],
5370
+ dt: [1000, 1500],
5371
+ vs: {
5372
+ bodyRotateY: [-0.06 * intensity, 0.06 * intensity, 0],
5373
+ bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0],
5374
+ bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
5375
+ }
5376
+ },
5377
+ nodding: {
5378
+ name: 'bodyMovement_nodding',
5379
+ delay: [400, 800],
5380
+ dt: [300, 600],
5381
+ vs: {
5382
+ bodyRotateX: [-0.1 * intensity, 0.1 * intensity, 0],
5383
+ bodyRotateY: [-0.02 * intensity, 0.02 * intensity, 0]
5384
+ }
5385
+ },
5386
+ shaking: {
5387
+ name: 'bodyMovement_shaking',
5388
+ delay: [200, 400],
5389
+ dt: [150, 300],
5390
+ vs: {
5391
+ bodyRotateY: [-0.15 * intensity, 0.15 * intensity, 0],
5392
+ bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0]
6019
5393
  }
6020
- }
6021
-
6022
- // Timer
6023
- if ( dur && Number.isFinite(dur) ) {
6024
- this.gestureTimeout = setTimeout( this.stopGesture.bind(this,ms), 1000 * dur);
6025
- }
6026
- }
6027
-
6028
- // Animated emoji, if any
6029
- let em = this.animEmojis[name];
6030
- if ( em ) {
6031
-
6032
- // Follow link
6033
- if ( em && em.link ) {
6034
- em = this.animEmojis[em.link];
6035
- }
6036
-
6037
- if ( em ) {
6038
- // Look at the camera for 500 ms
6039
- this.lookAtCamera(500);
6040
-
6041
- // Create animation and tag as gesture
6042
- const anim = this.animFactory( em );
6043
- anim.gesture = true;
6044
-
6045
- // Rescale duration
6046
- if ( dur && Number.isFinite(dur) ) {
6047
- const first = anim.ts[0];
6048
- const last = anim.ts[ anim.ts.length -1 ];
6049
- const total = last - first;
6050
- const excess = (dur * 1000) - total;
6051
-
6052
- // If longer, increase longer parts; if shorter, scale everything
6053
- if ( excess > 0 ) {
6054
- const dt = [];
6055
- for( let i=1; i<anim.ts.length; i++ ) dt.push( anim.ts[i] - anim.ts[i-1] );
6056
- const rescale = em.template?.rescale || dt.map( x => x / total );
6057
- const excess = dur * 1000 - total;
6058
- anim.ts = anim.ts.map( (x,i,arr) => {
6059
- return (i===0) ? first : (arr[i-1] + dt[i-1] + rescale[i-1] * excess);
6060
- });
6061
- } else {
6062
- const scale = (dur * 1000) / total;
6063
- anim.ts = anim.ts.map( x => first + scale * (x - first) );
6064
- }
5394
+ },
5395
+ celebration: {
5396
+ name: 'bodyMovement_celebration',
5397
+ delay: [100, 300],
5398
+ dt: [200, 500],
5399
+ vs: {
5400
+ bodyRotateY: [-0.2 * intensity, 0.2 * intensity, 0],
5401
+ bodyRotateZ: [-0.1 * intensity, 0.1 * intensity, 0],
5402
+ bodyRotateX: [-0.08 * intensity, 0.08 * intensity, 0]
6065
5403
  }
6066
-
6067
- this.animQueue.push( anim );
6068
- }
6069
- }
6070
-
6071
- }
6072
-
6073
- /**
6074
- * Stop the gesture.
6075
- * @param {number} [ms=1000] Transition time in milliseconds
6076
- */
6077
- stopGesture(ms=1000) {
6078
-
6079
- // Stop gesture timer
6080
- if ( this.gestureTimeout ) {
6081
- clearTimeout( this.gestureTimeout );
6082
- this.gestureTimeout = null;
6083
- }
6084
-
6085
- // Stop hand gesture, if any
6086
- if ( this.gesture ) {
6087
- const gs = Object.entries(this.gesture);
6088
- this.gesture = null;
6089
- for( const [p,val] of gs ) {
6090
- if ( this.poseTarget.props.hasOwnProperty(p) ) {
6091
- this.poseTarget.props[p].copy( this.getPoseTemplateProp(p) );
6092
- this.poseTarget.props[p].t = this.animClock;
6093
- this.poseTarget.props[p].d = ms;
5404
+ },
5405
+ energetic: {
5406
+ name: 'bodyMovement_energetic',
5407
+ delay: [150, 400],
5408
+ dt: [250, 500],
5409
+ vs: {
5410
+ bodyRotateY: [-0.18 * intensity, 0.18 * intensity, 0],
5411
+ bodyRotateZ: [-0.12 * intensity, 0.12 * intensity, 0],
5412
+ bodyRotateX: [-0.08 * intensity, 0.08 * intensity, 0]
5413
+ }
5414
+ },
5415
+ swaying: {
5416
+ name: 'bodyMovement_swaying',
5417
+ delay: [600, 1200],
5418
+ dt: [800, 1000],
5419
+ vs: {
5420
+ bodyRotateY: [-0.1 * intensity, 0.1 * intensity, 0],
5421
+ bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0]
5422
+ }
5423
+ },
5424
+ bouncing: {
5425
+ name: 'bodyMovement_bouncing',
5426
+ delay: [300, 600],
5427
+ dt: [400, 700],
5428
+ vs: {
5429
+ bodyRotateY: [-0.05 * intensity, 0.05 * intensity, 0]
6094
5430
  }
6095
5431
  }
5432
+ };
5433
+
5434
+ // Handle dance variations
5435
+ if (movementType === 'dancing') {
5436
+ const danceVariations = ['dancing', 'dancing2', 'dancing3'];
5437
+ const randomDance = danceVariations[Math.floor(Math.random() * danceVariations.length)];
5438
+ return movementAnimations[randomDance] || movementAnimations['dancing'];
6096
5439
  }
6097
-
6098
- // Stop animated emoji gesture, if any
6099
- let i = this.animQueue.findIndex( y => y.gesture );
6100
- if ( i !== -1 ) {
6101
- this.animQueue.splice(i, 1);
6102
- }
6103
-
5440
+
5441
+ return movementAnimations[movementType] || null;
6104
5442
  }
6105
5443
 
6106
5444
  /**
6107
- * Cyclic Coordinate Descent (CCD) Inverse Kinematic (IK) algorithm.
6108
- * Adapted from:
6109
- * https://github.com/mrdoob/three.js/blob/master/examples/jsm/animation/CCDIKSolver.js
6110
- * @param {Object} ik IK configuration object
6111
- * @param {Vector3} [target=null] Target coordinate, if null return to template
6112
- * @param {Boolean} [relative=false] If true, target is relative to root
6113
- * @param {numeric} [d=null] If set, apply in d milliseconds
6114
- */
6115
- ikSolve(ik, target=null, relative=false, d=null) {
6116
- const targetVec = new THREE.Vector3();
6117
- const effectorPos = new THREE.Vector3();
6118
- const effectorVec = new THREE.Vector3();
6119
- const linkPos = new THREE.Vector3();
6120
- const invLinkQ = new THREE.Quaternion();
6121
- const linkScale = new THREE.Vector3();
6122
- const axis = new THREE.Vector3();
6123
- const vector = new THREE.Vector3();
6124
-
6125
- // Reset IK setup positions and rotations
6126
- const root = this.ikMesh.getObjectByName(ik.root);
6127
- root.position.setFromMatrixPosition( this.armature.getObjectByName(ik.root).matrixWorld );
6128
- root.quaternion.setFromRotationMatrix( this.armature.getObjectByName(ik.root).matrixWorld );
6129
- if ( target && relative ) {
6130
- target.applyQuaternion(this.armature.quaternion).add( root.position );
5445
+ * Set movement intensity.
5446
+ * @param {number} intensity Movement intensity (0-1).
5447
+ */
5448
+ setMovementIntensity(intensity) {
5449
+ this.movementIntensity = Math.max(0, Math.min(1, intensity));
5450
+
5451
+ // Only set avatar property if avatar exists
5452
+ if (this.avatar) {
5453
+ this.avatar.movementIntensity = this.movementIntensity;
6131
5454
  }
6132
- const effector = this.ikMesh.getObjectByName(ik.effector);
6133
- const links = ik.links;
6134
- links.forEach( x => {
6135
- x.bone = this.ikMesh.getObjectByName(x.link);
6136
- x.bone.quaternion.copy( this.getPoseTemplateProp(x.link+'.quaternion') );
6137
- });
6138
- root.updateMatrixWorld(true);
6139
- const iterations = ik.iterations || 10;
6140
-
6141
- // Iterate
6142
- if ( target ) {
6143
- for ( let i = 0; i < iterations; i ++ ) {
6144
- let rotated = false;
6145
- for ( let j = 0, jl = links.length; j < jl; j++ ) {
6146
- const bone = links[j].bone;
6147
- bone.matrixWorld.decompose( linkPos, invLinkQ, linkScale );
6148
- invLinkQ.invert();
6149
- effectorPos.setFromMatrixPosition( effector.matrixWorld );
6150
- effectorVec.subVectors( effectorPos, linkPos );
6151
- effectorVec.applyQuaternion( invLinkQ );
6152
- effectorVec.normalize();
6153
- targetVec.subVectors( target, linkPos );
6154
- targetVec.applyQuaternion( invLinkQ );
6155
- targetVec.normalize();
6156
- let angle = targetVec.dot( effectorVec );
6157
- if ( angle > 1.0 ) {
6158
- angle = 1.0;
6159
- } else if ( angle < - 1.0 ) {
6160
- angle = - 1.0;
6161
- }
6162
- angle = Math.acos( angle );
6163
- if ( angle < 1e-5 ) continue;
6164
- if ( links[j].minAngle !== undefined && angle < links[j].minAngle ) {
6165
- angle = links[j].minAngle;
6166
- }
6167
- if ( links[j].maxAngle !== undefined && angle > links[j].maxAngle ) {
6168
- angle = links[j].maxAngle;
6169
- }
6170
- axis.crossVectors( effectorVec, targetVec );
6171
- axis.normalize();
6172
- q.setFromAxisAngle( axis, angle );
6173
- bone.quaternion.multiply( q );
6174
-
6175
- // Constraints
6176
- bone.rotation.setFromVector3( vector.setFromEuler( bone.rotation ).clamp( new THREE.Vector3(
6177
- links[j].minx !== undefined ? links[j].minx : -Infinity,
6178
- links[j].miny !== undefined ? links[j].miny : -Infinity,
6179
- links[j].minz !== undefined ? links[j].minz : -Infinity
6180
- ), new THREE.Vector3(
6181
- links[j].maxx !== undefined ? links[j].maxx : Infinity,
6182
- links[j].maxy !== undefined ? links[j].maxy : Infinity,
6183
- links[j].maxz !== undefined ? links[j].maxz : Infinity
6184
- )) );
6185
-
6186
- bone.updateMatrixWorld( true );
6187
- rotated = true;
6188
- }
6189
- if ( !rotated ) break;
6190
- }
5455
+
5456
+ console.log('Movement intensity set to:', this.movementIntensity);
5457
+
5458
+ // Update FBX animation intensity if available
5459
+ if (this.fbxAnimationLoader) {
5460
+ this.fbxAnimationLoader.setIntensity(this.movementIntensity);
6191
5461
  }
6192
-
6193
- // Apply
6194
- if ( d ) {
6195
- links.forEach( x => {
6196
- this.poseTarget.props[x.link+".quaternion"].copy( x.bone.quaternion );
6197
- this.poseTarget.props[x.link+".quaternion"].t = this.animClock;
6198
- this.poseTarget.props[x.link+".quaternion"].d = d;
6199
- });
5462
+
5463
+ // Reapply body movement animation with new intensity
5464
+ if (this.bodyMovement && this.bodyMovement !== 'idle') {
5465
+ this.applyBodyMovementAnimation();
6200
5466
  }
6201
5467
  }
6202
5468
 
@@ -6205,11 +5471,36 @@ class TalkingHead {
6205
5471
  */
6206
5472
  dispose() {
6207
5473
 
6208
- // Stop animation first to prevent render calls on disposed renderer
6209
- this.isRunning = false;
5474
+ // Stop animation, clear speech queue, stop stream
6210
5475
  this.stop();
6211
5476
  this.stopSpeaking();
6212
5477
  this.streamStop();
5478
+ this.stopAnimation();
5479
+
5480
+ // Cancel animation frame to prevent potential memory leak
5481
+ if (this._raf !== null) {
5482
+ cancelAnimationFrame(this._raf);
5483
+ this._raf = null;
5484
+ }
5485
+
5486
+ // Stop & disconnect buffer sources
5487
+ ['audioSpeechSource', 'audioBackgroundSource'].forEach(key => {
5488
+ const node = this[key];
5489
+ if (node) {
5490
+ try { node.stop?.() } catch(error) {};
5491
+ node.disconnect();
5492
+ node.onended = null; // remove closure references
5493
+ }
5494
+ });
5495
+
5496
+ // Disconnect gain nodes & analyser
5497
+ ['audioBackgroundGainNode', 'audioSpeechGainNode',
5498
+ 'audioStreamGainNode', 'audioAnalyzerNode'].forEach(key => {
5499
+ const node = this[key];
5500
+ if (node) {
5501
+ node.disconnect();
5502
+ }
5503
+ });
6213
5504
 
6214
5505
  // Dispose Three.JS objects
6215
5506
  if ( this.isAvatarOnly ) {
@@ -6222,18 +5513,34 @@ class TalkingHead {
6222
5513
  } else {
6223
5514
  this.clearThree(this.scene);
6224
5515
  this.resizeobserver.disconnect();
6225
-
6226
- // Dispose WebGL renderer
5516
+ this.resizeobserver = null;
5517
+
6227
5518
  if ( this.renderer ) {
6228
5519
  this.renderer.dispose();
6229
- if ( this.renderer.domElement && this.renderer.domElement.parentNode ) {
6230
- this.renderer.domElement.parentNode.removeChild(this.renderer.domElement);
6231
- }
5520
+ const gl = this.renderer.getContext();
5521
+ gl.getExtension('WEBGL_lose_context')?.loseContext();
5522
+ this.renderer.domElement?.remove();
5523
+ this.renderer.domElement = null;
6232
5524
  this.renderer = null;
6233
5525
  }
5526
+
5527
+ if ( this.controls ) {
5528
+ this.controls.dispose();
5529
+ this.controls = null;
5530
+ }
6234
5531
  }
5532
+
6235
5533
  this.clearThree( this.ikMesh );
6236
5534
  this.dynamicbones.dispose();
5535
+
5536
+ // Clean up FBX animation loader
5537
+ if (this.fbxAnimationLoader) {
5538
+ this.fbxAnimationLoader.stopCurrentAnimation();
5539
+ this.fbxAnimationLoader = null;
5540
+ }
5541
+
5542
+ // DOM
5543
+ this.nodeAvatar = null;
6237
5544
 
6238
5545
  }
6239
5546