@sage-rsc/talking-head-react 1.3.7 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -349,8 +349,7 @@ class TalkingHead {
349
349
  this.posePropNames = [...names];
350
350
 
351
351
  // Use "side" as the first pose, weight on left leg
352
- // Note: This will be overridden by gender-specific selection when avatar loads
353
- this.poseName = "side"; // First pose (default, will be gender-adjusted on avatar load)
352
+ this.poseName = "side"; // First pose
354
353
  this.poseWeightOnLeft = true; // Initial weight on left leg
355
354
  this.gesture = null; // Values that override pose properties
356
355
  this.poseCurrentTemplate = this.poseTemplates[this.poseName];
@@ -375,7 +374,6 @@ class TalkingHead {
375
374
  // 1. State (idle, speaking, listening)
376
375
  // 2. Mood (moodX, moodY)
377
376
  // 3. Pose (poseX, poseY)
378
- // 4. Body Movement (walking, prancing, gesturing, dancing, excited)
379
377
  // 5. View (full, upper, head)
380
378
  // 6. Body form ('M','F')
381
379
  // 7. Alt (sequence of objects with propabilities p. If p is not
@@ -453,18 +451,16 @@ class TalkingHead {
453
451
  ]
454
452
  },
455
453
  'happy' : {
456
- baseline: { mouthSmile: 0.2, eyesLookDown: 0 },
454
+ baseline: { mouthSmile: 0.2, eyesLookDown: 0.1 },
457
455
  speech: { deltaRate: 0, deltaPitch: 0.1, deltaVolume: 0 },
458
456
  anims: [
459
457
  { name: 'breathing', delay: 1500, dt: [ 1200,500,1000 ], vs: { chestInhale: [0.5,0.5,0] } },
460
458
  { name: 'pose',
461
459
  idle: {
462
460
  alt: [
463
- { p: 0.6, delay: [5000,30000], vs: { pose: ['side'] },
464
- 'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
465
- },
461
+ { p: 0.6, delay: [5000,30000], vs: { pose: ['side'] } },
466
462
  { p: 0.2, delay: [5000,30000], vs: { pose: ['hip'] },
467
- 'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
463
+ 'M': { delay: [5000,30000], vs: { pose: ['side'] } }
468
464
  },
469
465
  { p: 0.1, delay: [5000,30000], vs: { pose: ['straight'] } },
470
466
  { delay: [5000,10000], vs: { pose: ['wide'] } },
@@ -473,12 +469,8 @@ class TalkingHead {
473
469
  },
474
470
  speaking: {
475
471
  alt: [
476
- { p: 0.4, delay: [5000,30000], vs: { pose: ['side'] },
477
- 'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
478
- },
479
- { p: 0.4, delay: [5000,30000], vs: { pose: ['straight'] },
480
- 'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
481
- },
472
+ { p: 0.4, delay: [5000,30000], vs: { pose: ['side'] } },
473
+ { p: 0.4, delay: [5000,30000], vs: { pose: ['straight'] } },
482
474
  { delay: [5000,20000], vs: { pose: ['hip'] },
483
475
  'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
484
476
  },
@@ -773,6 +765,10 @@ class TalkingHead {
773
765
  this.animClips = [];
774
766
  this.animPoses = [];
775
767
 
768
+ // Animate
769
+ this.animate = this.animate.bind(this);
770
+ this._raf = null;
771
+
776
772
  // Clock
777
773
  this.animFrameDur = 1000/ this.opt.modelFPS;
778
774
  this.animClock = 0;
@@ -780,12 +776,9 @@ class TalkingHead {
780
776
  this.animTimeLast = 0;
781
777
  this.easing = this.sigmoidFactory(5); // Ease in and out
782
778
 
783
- // Lip-sync extensions, import statically
779
+ // Lip-sync extensions, import dynamically
784
780
  this.lipsync = {};
785
- this.opt.lipsyncModules.forEach( x => {
786
- // Load synchronously using statically imported modules
787
- this.lipsyncGetProcessor(x);
788
- });
781
+ this.opt.lipsyncModules.forEach( x => this.lipsyncGetProcessor(x) );
789
782
  this.visemeNames = [
790
783
  'aa', 'E', 'I', 'O', 'U', 'PP', 'SS', 'TH', 'DD', 'FF', 'kk',
791
784
  'nn', 'RR', 'CH', 'sil'
@@ -960,9 +953,6 @@ class TalkingHead {
960
953
  this.audioAnalyzerNode.smoothingTimeConstant = 0.1;
961
954
  this.audioAnalyzerNode.minDecibels = -70;
962
955
  this.audioAnalyzerNode.maxDecibels = -10;
963
-
964
- // Audio analyzer for precise lip-sync
965
- this.audioAnalyzer = new AudioAnalyzer(this.audioCtx);
966
956
  this.audioReverbNode = this.audioCtx.createConvolver();
967
957
 
968
958
  // Connect nodes
@@ -1122,20 +1112,24 @@ class TalkingHead {
1122
1112
  * Clear 3D object.
1123
1113
  * @param {Object} obj Object
1124
1114
  */
1125
- clearThree(obj){
1126
- while( obj.children.length ){
1115
+ clearThree(obj) {
1116
+ while (obj.children.length) {
1127
1117
  this.clearThree(obj.children[0]);
1128
1118
  obj.remove(obj.children[0]);
1129
1119
  }
1130
- if ( obj.geometry ) obj.geometry.dispose();
1131
1120
 
1132
- if ( obj.material ) {
1133
- Object.keys(obj.material).forEach( x => {
1134
- if ( obj.material[x] && obj.material[x] !== null && typeof obj.material[x].dispose === 'function' ) {
1135
- obj.material[x].dispose();
1136
- }
1137
- });
1138
- obj.material.dispose();
1121
+ if (obj.geometry) obj.geometry.dispose();
1122
+
1123
+ if (obj.material) {
1124
+ if (Array.isArray(obj.material)) {
1125
+ obj.material.forEach(m => {
1126
+ if (m.map) m.map.dispose();
1127
+ m.dispose();
1128
+ });
1129
+ } else {
1130
+ if (obj.material.map) obj.material.map.dispose();
1131
+ obj.material.dispose();
1132
+ }
1139
1133
  }
1140
1134
  }
1141
1135
 
@@ -1244,19 +1238,27 @@ class TalkingHead {
1244
1238
  this.stop();
1245
1239
  this.avatar = avatar;
1246
1240
 
1247
- // Initialize body movement properties
1241
+ // Initialize custom properties
1248
1242
  this.bodyMovement = avatar.bodyMovement || 'idle';
1249
1243
  this.movementIntensity = avatar.movementIntensity || 0.5;
1250
- this.showFullAvatar = avatar.showFullAvatar || false;
1251
-
1244
+ this.lockedPosition = null;
1245
+ this.originalPosition = null;
1246
+ this.positionWasLocked = false;
1247
+
1252
1248
  // Initialize FBX animation loader
1253
1249
  this.fbxAnimationLoader = null;
1254
1250
 
1255
1251
  // Dispose Dynamic Bones
1256
1252
  this.dynamicbones.dispose();
1257
1253
 
1258
- // Clear previous scene, if avatar was previously loaded
1259
- this.mixer = null;
1254
+ // Clear previous mixer/scene, if avatar was previously loaded
1255
+ if (this.mixer) {
1256
+ this.mixer.removeEventListener('finished', this._mixerHandler);
1257
+ this.mixer.stopAllAction();
1258
+ this.mixer.uncacheRoot(this.armature);
1259
+ this.mixer = null;
1260
+ this._mixerHandler = null;
1261
+ }
1260
1262
  if ( this.isAvatarOnly ) {
1261
1263
  if ( this.armature ) {
1262
1264
  this.clearThree( this.armature );
@@ -1570,7 +1572,7 @@ class TalkingHead {
1570
1572
  * Render scene.
1571
1573
  */
1572
1574
  render() {
1573
- if ( this.isRunning && !this.isAvatarOnly && this.renderer ) {
1575
+ if ( this.isRunning && !this.isAvatarOnly ) {
1574
1576
  this.renderer.render( this.scene, this.camera );
1575
1577
  }
1576
1578
  }
@@ -1579,7 +1581,7 @@ class TalkingHead {
1579
1581
  * Resize avatar.
1580
1582
  */
1581
1583
  onResize() {
1582
- if ( !this.isAvatarOnly && this.renderer ) {
1584
+ if ( !this.isAvatarOnly ) {
1583
1585
  this.camera.aspect = this.nodeAvatar.clientWidth / this.nodeAvatar.clientHeight;
1584
1586
  this.camera.updateProjectionMatrix();
1585
1587
  this.renderer.setSize( this.nodeAvatar.clientWidth, this.nodeAvatar.clientHeight );
@@ -1612,9 +1614,27 @@ class TalkingHead {
1612
1614
  // Apply shoulder adjustment to lower shoulders
1613
1615
  this.applyShoulderAdjustment();
1614
1616
  }
1617
+
1618
+ /**
1619
+ * Update avatar pose deltas
1620
+ */
1621
+ updatePoseDelta() {
1622
+ for( const [key,d] of Object.entries(this.poseDelta.props) ) {
1623
+ if ( d.x === 0 && d.y === 0 && d.z === 0 ) continue;
1624
+ e.set(d.x,d.y,d.z);
1625
+ const o = this.poseAvatar.props[key];
1626
+ if ( o.isQuaternion ) {
1627
+ q.setFromEuler(e);
1628
+ o.multiply(q);
1629
+ } else if ( o.isVector3 ) {
1630
+ o.add( e );
1631
+ }
1632
+ }
1633
+ }
1615
1634
 
1616
1635
  /**
1617
1636
  * Apply shoulder adjustment to lower shoulders to a more natural position
1637
+ * This is called from updatePoseBase for pose-based animations
1618
1638
  */
1619
1639
  applyShoulderAdjustment() {
1620
1640
  // Shoulder adjustment: reduce X-axis rotation by ~0.6 radians (34 degrees) to lower shoulders to a relaxed position
@@ -1637,21 +1657,40 @@ class TalkingHead {
1637
1657
  rightShoulder.setFromEuler(tempEuler, 'XYZ');
1638
1658
  }
1639
1659
  }
1640
-
1660
+
1641
1661
  /**
1642
- * Update avatar pose deltas
1643
- */
1644
- updatePoseDelta() {
1645
- for( const [key,d] of Object.entries(this.poseDelta.props) ) {
1646
- if ( d.x === 0 && d.y === 0 && d.z === 0 ) continue;
1647
- e.set(d.x,d.y,d.z);
1648
- const o = this.poseAvatar.props[key];
1649
- if ( o.isQuaternion ) {
1650
- q.setFromEuler(e);
1651
- o.multiply(q);
1652
- } else if ( o.isVector3 ) {
1653
- o.add( e );
1654
- }
1662
+ * Apply shoulder adjustment directly to bone objects
1663
+ * This is called AFTER FBX animations update to ensure shoulders stay relaxed
1664
+ * regardless of what the animation sets
1665
+ */
1666
+ applyShoulderAdjustmentToBones() {
1667
+ if (!this.armature) return;
1668
+
1669
+ // Shoulder adjustment: reduce X-axis rotation by ~0.6 radians (34 degrees) to lower shoulders
1670
+ const shoulderAdjustment = -0.6; // Negative to lower shoulders
1671
+ const tempEuler = new THREE.Euler();
1672
+ const tempQuaternion = new THREE.Quaternion();
1673
+
1674
+ // Get shoulder bones directly from armature
1675
+ const leftShoulderBone = this.armature.getObjectByName('LeftShoulder');
1676
+ const rightShoulderBone = this.armature.getObjectByName('RightShoulder');
1677
+
1678
+ // Adjust left shoulder bone directly
1679
+ if (leftShoulderBone && leftShoulderBone.quaternion) {
1680
+ tempEuler.setFromQuaternion(leftShoulderBone.quaternion, 'XYZ');
1681
+ tempEuler.x += shoulderAdjustment; // Reduce X rotation to lower shoulder
1682
+ tempQuaternion.setFromEuler(tempEuler, 'XYZ');
1683
+ leftShoulderBone.quaternion.copy(tempQuaternion);
1684
+ leftShoulderBone.updateMatrixWorld(true);
1685
+ }
1686
+
1687
+ // Adjust right shoulder bone directly
1688
+ if (rightShoulderBone && rightShoulderBone.quaternion) {
1689
+ tempEuler.setFromQuaternion(rightShoulderBone.quaternion, 'XYZ');
1690
+ tempEuler.x += shoulderAdjustment; // Reduce X rotation to lower shoulder
1691
+ tempQuaternion.setFromEuler(tempEuler, 'XYZ');
1692
+ rightShoulderBone.quaternion.copy(tempQuaternion);
1693
+ rightShoulderBone.updateMatrixWorld(true);
1655
1694
  }
1656
1695
  }
1657
1696
 
@@ -2119,523 +2158,87 @@ class TalkingHead {
2119
2158
 
2120
2159
  }
2121
2160
 
2161
+
2122
2162
  /**
2123
- * Initialize FBX animation loader
2124
- */
2125
- async initializeFBXAnimationLoader() {
2126
- try {
2127
- // Dynamic import to avoid loading issues
2128
- const { FBXAnimationLoader } = await import('./fbxAnimationLoader.js');
2129
- this.fbxAnimationLoader = new FBXAnimationLoader(this.armature);
2130
- console.log('FBX Animation Loader initialized');
2131
- } catch (error) {
2132
- console.warn('FBX Animation Loader not available:', error);
2133
- this.fbxAnimationLoader = null;
2134
- }
2163
+ * Get morph target names.
2164
+ * @return {string[]} Morph target names.
2165
+ */
2166
+ getMorphTargetNames() {
2167
+ return [ 'eyesRotateX', 'eyesRotateY', ...Object.keys(this.mtAvatar)].sort();
2135
2168
  }
2136
2169
 
2137
2170
  /**
2138
- * Set body movement type.
2139
- * @param {string} movement Movement type (idle, walking, prancing, gesturing, dancing, excited).
2140
- */
2141
- setBodyMovement(movement) {
2142
- this.bodyMovement = movement;
2143
-
2144
- // Only set avatar property if avatar exists
2145
- if (this.avatar) {
2146
- this.avatar.bodyMovement = movement;
2147
- }
2148
-
2149
- console.log('Body movement set to:', movement);
2150
-
2151
- // Respect the current showFullAvatar setting instead of forcing it to true
2152
- // Only unlock position when returning to idle
2153
- if (movement === 'idle') {
2154
- // Unlock position when returning to idle
2155
- this.unlockAvatarPosition();
2171
+ * Get baseline value for the morph target.
2172
+ * @param {string} mt Morph target name
2173
+ * @return {number} Value, null if not in baseline
2174
+ */
2175
+ getBaselineValue( mt ) {
2176
+ if ( mt === 'eyesRotateY' ) {
2177
+ const ll = this.getBaselineValue('eyeLookOutLeft');
2178
+ if ( ll === undefined ) return undefined;
2179
+ const lr = this.getBaselineValue('eyeLookInLeft');
2180
+ if ( lr === undefined ) return undefined;
2181
+ const rl = this.getBaselineValue('eyeLookOutRight');
2182
+ if ( rl === undefined ) return undefined;
2183
+ const rr = this.getBaselineValue('eyeLookInRight');
2184
+ if ( rr === undefined ) return undefined;
2185
+ return ll - lr;
2186
+ } else if ( mt === 'eyesRotateX' ) {
2187
+ const d = this.getBaselineValue('eyesLookDown');
2188
+ if ( d === undefined ) return undefined;
2189
+ const u = this.getBaselineValue('eyesLookUp');
2190
+ if ( u === undefined ) return undefined;
2191
+ return d - u;
2192
+ } else {
2193
+ return this.mtAvatar[mt]?.baseline;
2156
2194
  }
2157
- // Note: We no longer force showFullAvatar to true for body movements
2158
- // The avatar will use whatever showFullAvatar value was set by the user
2159
-
2160
- // Apply body movement animation
2161
- this.applyBodyMovementAnimation();
2162
2195
  }
2163
2196
 
2164
2197
  /**
2165
- * Apply body movement animation based on current movement type.
2166
- */
2167
- async applyBodyMovementAnimation() {
2168
- // Check if avatar is ready
2169
- if (!this.armature || !this.animQueue) {
2170
- console.log('Avatar not ready for body movement animations');
2171
- return;
2172
- }
2173
-
2174
- console.log('Avatar is running:', this.isRunning);
2175
- console.log('Animation queue exists:', !!this.animQueue);
2176
-
2177
- // Remove existing body movement animations
2178
- const beforeLength = this.animQueue.length;
2179
- this.animQueue = this.animQueue.filter(anim => !anim.template.name.startsWith('bodyMovement'));
2180
- const afterLength = this.animQueue.length;
2181
- console.log(`Filtered animation queue: ${beforeLength} -> ${afterLength} animations`);
2182
-
2183
- if (this.bodyMovement === 'idle') {
2184
- // Stop FBX animations if any
2185
- if (this.fbxAnimationLoader) {
2186
- this.fbxAnimationLoader.stopCurrentAnimation();
2187
- }
2188
- return; // No body movement for idle
2189
- }
2190
-
2191
- // Try to use FBX animations first
2192
- if (this.fbxAnimationLoader) {
2193
- try {
2194
- await this.fbxAnimationLoader.playGestureAnimation(this.bodyMovement, this.movementIntensity);
2195
- console.log('Applied FBX body movement animation:', this.bodyMovement);
2196
- return; // Successfully applied FBX animation
2197
- } catch (error) {
2198
- console.warn('FBX animation failed, falling back to code animation:', error);
2199
- }
2200
- }
2201
-
2202
- // Fallback to code-based animations
2203
- const movementAnim = this.createBodyMovementAnimation(this.bodyMovement);
2204
- console.log('Created movement animation:', movementAnim);
2205
- if (movementAnim) {
2206
- try {
2207
- // Use animFactory to create proper animation object
2208
- const animObj = this.animFactory(movementAnim, true); // true for looping
2209
-
2210
- // Validate the animation object before adding
2211
- if (animObj && animObj.ts && animObj.ts.length > 0) {
2212
- this.animQueue.push(animObj);
2213
- console.log('Applied code-based body movement animation:', this.bodyMovement);
2214
- console.log('Animation queue length:', this.animQueue.length);
2215
- console.log('Animation object:', animObj);
2216
- } else {
2217
- console.error('Invalid animation object created for:', this.bodyMovement);
2218
- console.error('Animation object:', animObj);
2219
- }
2220
- } catch (error) {
2221
- console.error('Error creating body movement animation:', error);
2198
+ * Set baseline for morph target.
2199
+ * @param {string} mt Morph target name
2200
+ * @param {number} val Value, null if to be removed from baseline
2201
+ */
2202
+ setBaselineValue( mt, val ) {
2203
+ if ( mt === 'eyesRotateY' ) {
2204
+ this.setBaselineValue('eyeLookOutLeft', (val === null) ? null : (val>0 ? val : 0) );
2205
+ this.setBaselineValue('eyeLookInLeft', (val === null) ? null : (val>0 ? 0 : -val) );
2206
+ this.setBaselineValue('eyeLookOutRight', (val === null) ? null : (val>0 ? 0 : -val) );
2207
+ this.setBaselineValue('eyeLookInRight', (val === null) ? null : (val>0 ? val : 0) );
2208
+ } else if ( mt === 'eyesRotateX' ) {
2209
+ this.setBaselineValue('eyesLookDown', (val === null) ? null : (val>0 ? val : 0) );
2210
+ this.setBaselineValue('eyesLookUp', (val === null) ? null : (val>0 ? 0 : -val) );
2211
+ } else {
2212
+ if ( this.mtAvatar.hasOwnProperty(mt) ) {
2213
+ Object.assign(this.mtAvatar[mt],{ base: null, baseline: val, needsUpdate: true });
2222
2214
  }
2223
2215
  }
2224
2216
  }
2225
2217
 
2226
2218
  /**
2227
- * Lock avatar position to prevent movement during animations.
2228
- */
2229
- lockAvatarPosition() {
2230
- if (!this.armature) {
2231
- console.warn('Cannot lock position: armature not available');
2232
- return;
2233
- }
2234
-
2235
- // Store the original position if not already stored
2236
- if (!this.originalPosition) {
2237
- this.originalPosition = {
2238
- x: this.armature.position.x,
2239
- y: this.armature.position.y,
2240
- z: this.armature.position.z
2241
- };
2242
- console.log('Original position stored:', this.originalPosition);
2243
- }
2244
-
2245
- // Lock the avatar at its CURRENT position (don't move it)
2246
- this.lockedPosition = {
2247
- x: this.armature.position.x,
2248
- y: this.armature.position.y,
2249
- z: this.armature.position.z
2250
- };
2251
-
2252
- console.log('Avatar position locked at current position:', this.lockedPosition);
2253
- }
2254
-
2255
- /**
2256
- * Unlock avatar position and restore original position.
2257
- */
2258
- unlockAvatarPosition() {
2259
- if (this.armature && this.originalPosition) {
2260
- // Restore avatar to its original position before locking
2261
- this.armature.position.set(
2262
- this.originalPosition.x,
2263
- this.originalPosition.y,
2264
- this.originalPosition.z
2265
- );
2266
- console.log('Avatar position restored to original:', this.originalPosition);
2267
- } else if (this.armature) {
2268
- // Fallback: reset to center if no original position was stored
2269
- this.armature.position.set(0, 0, 0);
2270
- console.log('Avatar position reset to center (0,0,0)');
2271
- }
2272
- this.lockedPosition = null;
2273
- this.originalPosition = null; // Clear original position after unlock
2274
- console.log('Avatar position unlocked');
2275
- }
2276
-
2277
- /**
2278
- * Ensure avatar stays at locked position.
2279
- */
2280
- maintainLockedPosition() {
2281
- if (this.lockedPosition && this.armature) {
2282
- // Enforce the locked position - keep avatar exactly where it was locked
2283
- // This prevents FBX animations from moving the avatar
2284
- this.armature.position.set(
2285
- this.lockedPosition.x,
2286
- this.lockedPosition.y,
2287
- this.lockedPosition.z
2288
- );
2289
- }
2290
- }
2291
-
2292
- /**
2293
- * Create body movement animation.
2294
- * @param {string} movementType Movement type.
2295
- * @returns {Object} Animation object.
2296
- */
2297
- createBodyMovementAnimation(movementType) {
2298
- const intensity = this.movementIntensity || 0.5;
2299
-
2300
- const movementAnimations = {
2301
- walking: {
2302
- name: 'bodyMovement_walking',
2303
- delay: [500, 2000],
2304
- dt: [800, 1200],
2305
- vs: {
2306
- bodyRotateY: [-0.1 * intensity, 0.1 * intensity, 0],
2307
- bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0],
2308
- bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
2309
- }
2310
- },
2311
- prancing: {
2312
- name: 'bodyMovement_prancing',
2313
- delay: [300, 1000],
2314
- dt: [400, 800],
2315
- vs: {
2316
- bodyRotateY: [-0.15 * intensity, 0.15 * intensity, 0],
2317
- bodyRotateZ: [-0.08 * intensity, 0.08 * intensity, 0],
2318
- bodyRotateX: [-0.05 * intensity, 0.05 * intensity, 0]
2319
- }
2320
- },
2321
- gesturing: {
2322
- name: 'bodyMovement_gesturing',
2323
- delay: [400, 1500],
2324
- dt: [600, 1000],
2325
- vs: {
2326
- bodyRotateY: [-0.08 * intensity, 0.08 * intensity, 0],
2327
- bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0]
2328
- }
2329
- },
2330
- dancing: {
2331
- name: 'bodyMovement_dancing',
2332
- delay: [200, 600],
2333
- dt: [400, 800],
2334
- vs: {
2335
- bodyRotateY: [-0.25 * intensity, 0.25 * intensity, 0],
2336
- bodyRotateZ: [-0.15 * intensity, 0.15 * intensity, 0],
2337
- bodyRotateX: [-0.1 * intensity, 0.1 * intensity, 0]
2338
- }
2339
- },
2340
- dancing2: {
2341
- name: 'bodyMovement_dancing2',
2342
- delay: [150, 500],
2343
- dt: [300, 700],
2344
- vs: {
2345
- bodyRotateY: [-0.3 * intensity, 0.3 * intensity, 0],
2346
- bodyRotateZ: [-0.2 * intensity, 0.2 * intensity, 0],
2347
- bodyRotateX: [-0.12 * intensity, 0.12 * intensity, 0]
2348
- }
2349
- },
2350
- dancing3: {
2351
- name: 'bodyMovement_dancing3',
2352
- delay: [100, 400],
2353
- dt: [200, 600],
2354
- vs: {
2355
- bodyRotateY: [-0.35 * intensity, 0.35 * intensity, 0],
2356
- bodyRotateZ: [-0.25 * intensity, 0.25 * intensity, 0],
2357
- bodyRotateX: [-0.15 * intensity, 0.15 * intensity, 0]
2358
- }
2359
- },
2360
- excited: {
2361
- name: 'bodyMovement_excited',
2362
- delay: [200, 600],
2363
- dt: [300, 700],
2364
- vs: {
2365
- bodyRotateY: [-0.12 * intensity, 0.12 * intensity, 0],
2366
- bodyRotateZ: [-0.06 * intensity, 0.06 * intensity, 0],
2367
- bodyRotateX: [-0.04 * intensity, 0.04 * intensity, 0]
2368
- }
2369
- },
2370
- happy: {
2371
- name: 'bodyMovement_happy',
2372
- delay: [300, 800],
2373
- dt: [500, 1000],
2374
- vs: {
2375
- bodyRotateY: [-0.08 * intensity, 0.08 * intensity, 0],
2376
- bodyRotateZ: [-0.04 * intensity, 0.04 * intensity, 0],
2377
- bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
2378
- }
2379
- },
2380
- surprised: {
2381
- name: 'bodyMovement_surprised',
2382
- delay: [100, 300],
2383
- dt: [200, 500],
2384
- vs: {
2385
- bodyRotateY: [-0.05 * intensity, 0.05 * intensity, 0],
2386
- bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0],
2387
- bodyRotateX: [-0.01 * intensity, 0.01 * intensity, 0]
2388
- }
2389
- },
2390
- thinking: {
2391
- name: 'bodyMovement_thinking',
2392
- delay: [800, 2000],
2393
- dt: [1000, 1500],
2394
- vs: {
2395
- bodyRotateY: [-0.06 * intensity, 0.06 * intensity, 0],
2396
- bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0],
2397
- bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
2398
- }
2399
- },
2400
- nodding: {
2401
- name: 'bodyMovement_nodding',
2402
- delay: [400, 800],
2403
- dt: [300, 600],
2404
- vs: {
2405
- bodyRotateX: [-0.1 * intensity, 0.1 * intensity, 0],
2406
- bodyRotateY: [-0.02 * intensity, 0.02 * intensity, 0]
2407
- }
2408
- },
2409
- shaking: {
2410
- name: 'bodyMovement_shaking',
2411
- delay: [200, 400],
2412
- dt: [150, 300],
2413
- vs: {
2414
- bodyRotateY: [-0.15 * intensity, 0.15 * intensity, 0],
2415
- bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0]
2416
- }
2417
- },
2418
- celebration: {
2419
- name: 'bodyMovement_celebration',
2420
- delay: [100, 300],
2421
- dt: [200, 500],
2422
- vs: {
2423
- bodyRotateY: [-0.2 * intensity, 0.2 * intensity, 0],
2424
- bodyRotateZ: [-0.1 * intensity, 0.1 * intensity, 0],
2425
- bodyRotateX: [-0.08 * intensity, 0.08 * intensity, 0]
2426
- }
2427
- },
2428
- energetic: {
2429
- name: 'bodyMovement_energetic',
2430
- delay: [150, 400],
2431
- dt: [250, 500],
2432
- vs: {
2433
- bodyRotateY: [-0.18 * intensity, 0.18 * intensity, 0],
2434
- bodyRotateZ: [-0.12 * intensity, 0.12 * intensity, 0],
2435
- bodyRotateX: [-0.08 * intensity, 0.08 * intensity, 0]
2436
- }
2437
- },
2438
- swaying: {
2439
- name: 'bodyMovement_swaying',
2440
- delay: [600, 1200],
2441
- dt: [800, 1000],
2442
- vs: {
2443
- bodyRotateY: [-0.1 * intensity, 0.1 * intensity, 0],
2444
- bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0]
2445
- }
2446
- },
2447
- bouncing: {
2448
- name: 'bodyMovement_bouncing',
2449
- delay: [300, 600],
2450
- dt: [400, 700],
2451
- vs: {
2452
- bodyRotateY: [-0.05 * intensity, 0.05 * intensity, 0]
2453
- }
2454
- }
2455
- };
2456
-
2457
- // Handle dance variations
2458
- if (movementType === 'dancing') {
2459
- const danceVariations = ['dancing', 'dancing2', 'dancing3'];
2460
- const randomDance = danceVariations[Math.floor(Math.random() * danceVariations.length)];
2461
- return movementAnimations[randomDance] || movementAnimations['dancing'];
2462
- }
2463
-
2464
- return movementAnimations[movementType] || null;
2465
- }
2466
-
2467
- /**
2468
- * Play a random dance animation
2469
- */
2470
- playRandomDance() {
2471
- const danceVariations = ['dancing', 'dancing2', 'dancing3'];
2472
- const randomDance = danceVariations[Math.floor(Math.random() * danceVariations.length)];
2473
- this.setBodyMovement(randomDance);
2474
- }
2475
-
2476
- /**
2477
- * Play a reaction animation
2478
- * @param {string} reactionType - Type of reaction (happy, surprised, thinking, etc.)
2479
- */
2480
- playReaction(reactionType) {
2481
- const validReactions = ['happy', 'surprised', 'thinking', 'nodding', 'shaking', 'celebration', 'energetic', 'swaying', 'bouncing'];
2482
- if (validReactions.includes(reactionType)) {
2483
- this.setBodyMovement(reactionType);
2484
-
2485
- // Auto-return to idle after a delay for non-looping reactions
2486
- const nonLoopingReactions = ['surprised', 'nodding', 'shaking', 'celebration'];
2487
- if (nonLoopingReactions.includes(reactionType)) {
2488
- setTimeout(() => {
2489
- this.setBodyMovement('idle');
2490
- }, 3000); // Return to idle after 3 seconds
2491
- }
2492
- } else {
2493
- console.warn('Invalid reaction type:', reactionType);
2494
- }
2495
- }
2496
-
2497
- /**
2498
- * Play a celebration sequence
2499
- */
2500
- playCelebration() {
2501
- this.playReaction('celebration');
2502
-
2503
- // After celebration, play a random dance
2504
- setTimeout(() => {
2505
- this.playRandomDance();
2506
- }, 2000);
2507
- }
2508
-
2509
- /**
2510
- * Set movement intensity.
2511
- * @param {number} intensity Movement intensity (0-1).
2512
- */
2513
- setMovementIntensity(intensity) {
2514
- this.movementIntensity = Math.max(0, Math.min(1, intensity));
2515
-
2516
- // Only set avatar property if avatar exists
2517
- if (this.avatar) {
2518
- this.avatar.movementIntensity = this.movementIntensity;
2519
- }
2520
-
2521
- console.log('Movement intensity set to:', this.movementIntensity);
2522
-
2523
- // Update FBX animation intensity if available
2524
- if (this.fbxAnimationLoader) {
2525
- this.fbxAnimationLoader.setIntensity(this.movementIntensity);
2526
- }
2527
-
2528
- // Reapply body movement animation with new intensity
2529
- this.applyBodyMovementAnimation();
2530
- }
2531
-
2532
- /**
2533
- * Set show full avatar.
2534
- * @param {boolean} show Whether to show full avatar.
2535
- */
2536
- setShowFullAvatar(show) {
2537
- this.showFullAvatar = show;
2538
-
2539
- // Only set avatar property if avatar exists
2540
- if (this.avatar) {
2541
- this.avatar.showFullAvatar = show;
2542
- }
2543
-
2544
- console.log('Show full avatar set to:', show);
2545
-
2546
- // Only change camera view if it's not already set to the desired view
2547
- // This prevents the avatar from sliding down when starting animations
2548
- if (show && this.viewName !== 'full') {
2549
- console.log('Changing camera view to full');
2550
- this.setView('full');
2551
- } else if (!show && this.viewName !== 'upper') {
2552
- console.log('Changing camera view to upper');
2553
- this.setView('upper');
2554
- } else {
2555
- console.log('Camera view already set to:', this.viewName);
2556
- }
2557
- }
2558
-
2559
- /**
2560
- * Get morph target names.
2561
- * @return {string[]} Morph target names.
2562
- */
2563
- getMorphTargetNames() {
2564
- return [ 'eyesRotateX', 'eyesRotateY', ...Object.keys(this.mtAvatar)].sort();
2565
- }
2566
-
2567
- /**
2568
- * Get baseline value for the morph target.
2569
- * @param {string} mt Morph target name
2570
- * @return {number} Value, null if not in baseline
2571
- */
2572
- getBaselineValue( mt ) {
2573
- if ( mt === 'eyesRotateY' ) {
2574
- const ll = this.getBaselineValue('eyeLookOutLeft');
2575
- if ( ll === undefined ) return undefined;
2576
- const lr = this.getBaselineValue('eyeLookInLeft');
2577
- if ( lr === undefined ) return undefined;
2578
- const rl = this.getBaselineValue('eyeLookOutRight');
2579
- if ( rl === undefined ) return undefined;
2580
- const rr = this.getBaselineValue('eyeLookInRight');
2581
- if ( rr === undefined ) return undefined;
2582
- return ll - lr;
2583
- } else if ( mt === 'eyesRotateX' ) {
2584
- const d = this.getBaselineValue('eyesLookDown');
2585
- if ( d === undefined ) return undefined;
2586
- const u = this.getBaselineValue('eyesLookUp');
2587
- if ( u === undefined ) return undefined;
2588
- return d - u;
2589
- } else {
2590
- return this.mtAvatar[mt]?.baseline;
2591
- }
2592
- }
2593
-
2594
- /**
2595
- * Set baseline for morph target.
2596
- * @param {string} mt Morph target name
2597
- * @param {number} val Value, null if to be removed from baseline
2598
- */
2599
- setBaselineValue( mt, val ) {
2600
- if ( mt === 'eyesRotateY' ) {
2601
- this.setBaselineValue('eyeLookOutLeft', (val === null) ? null : (val>0 ? val : 0) );
2602
- this.setBaselineValue('eyeLookInLeft', (val === null) ? null : (val>0 ? 0 : -val) );
2603
- this.setBaselineValue('eyeLookOutRight', (val === null) ? null : (val>0 ? 0 : -val) );
2604
- this.setBaselineValue('eyeLookInRight', (val === null) ? null : (val>0 ? val : 0) );
2605
- } else if ( mt === 'eyesRotateX' ) {
2606
- this.setBaselineValue('eyesLookDown', (val === null) ? null : (val>0 ? val : 0) );
2607
- this.setBaselineValue('eyesLookUp', (val === null) ? null : (val>0 ? 0 : -val) );
2608
- } else {
2609
- if ( this.mtAvatar.hasOwnProperty(mt) ) {
2610
- Object.assign(this.mtAvatar[mt],{ base: null, baseline: val, needsUpdate: true });
2611
- }
2612
- }
2613
- }
2614
-
2615
- /**
2616
- * Get fixed value for the morph target.
2617
- * @param {string} mt Morph target name
2618
- * @return {number} Value, null if not fixed
2619
- */
2620
- getFixedValue( mt ) {
2621
- if ( mt === 'eyesRotateY' ) {
2622
- const ll = this.getFixedValue('eyeLookOutLeft');
2623
- if ( ll === null ) return null;
2624
- const lr = this.getFixedValue('eyeLookInLeft');
2625
- if ( lr === null ) return null;
2626
- const rl = this.getFixedValue('eyeLookOutRight');
2627
- if ( rl === null ) return null;
2628
- const rr = this.getFixedValue('eyeLookInRight');
2629
- if ( rr === null ) return null;
2630
- return ll - lr;
2631
- } else if ( mt === 'eyesRotateX' ) {
2632
- const d = this.getFixedValue('eyesLookDown');
2633
- if ( d === null ) return null;
2634
- const u = this.getFixedValue('eyesLookUp');
2635
- if ( u === null ) return null;
2636
- return d - u;
2637
- } else {
2638
- return this.mtAvatar[mt]?.fixed;
2219
+ * Get fixed value for the morph target.
2220
+ * @param {string} mt Morph target name
2221
+ * @return {number} Value, null if not fixed
2222
+ */
2223
+ getFixedValue( mt ) {
2224
+ if ( mt === 'eyesRotateY' ) {
2225
+ const ll = this.getFixedValue('eyeLookOutLeft');
2226
+ if ( ll === null ) return null;
2227
+ const lr = this.getFixedValue('eyeLookInLeft');
2228
+ if ( lr === null ) return null;
2229
+ const rl = this.getFixedValue('eyeLookOutRight');
2230
+ if ( rl === null ) return null;
2231
+ const rr = this.getFixedValue('eyeLookInRight');
2232
+ if ( rr === null ) return null;
2233
+ return ll - lr;
2234
+ } else if ( mt === 'eyesRotateX' ) {
2235
+ const d = this.getFixedValue('eyesLookDown');
2236
+ if ( d === null ) return null;
2237
+ const u = this.getFixedValue('eyesLookUp');
2238
+ if ( u === null ) return null;
2239
+ return d - u;
2240
+ } else {
2241
+ return this.mtAvatar[mt]?.fixed;
2639
2242
  }
2640
2243
  }
2641
2244
 
@@ -2677,10 +2280,6 @@ class TalkingHead {
2677
2280
  let a = t;
2678
2281
  while(1) {
2679
2282
  if ( a.hasOwnProperty(this.stateName) ) {
2680
- // Debug: Log state selection
2681
- if (this.stateName === 'speaking' || this.stateName === 'idle') {
2682
- console.log('Selected state:', this.stateName, 'for avatar body:', this.avatar?.body);
2683
- }
2684
2283
  a = a[this.stateName];
2685
2284
  } else if ( a.hasOwnProperty(this.moodName) ) {
2686
2285
  a = a[this.moodName];
@@ -2688,9 +2287,7 @@ class TalkingHead {
2688
2287
  a = a[this.poseName];
2689
2288
  } else if ( a.hasOwnProperty(this.viewName) ) {
2690
2289
  a = a[this.viewName];
2691
- } else if ( this.avatar && this.avatar.body && a.hasOwnProperty(this.avatar.body) ) {
2692
- // Debug: Log gender-specific override
2693
- console.log('Applying gender-specific override:', this.avatar.body, 'for state:', this.stateName, 'keys:', Object.keys(a));
2290
+ } else if ( this.avatar.body && a.hasOwnProperty(this.avatar.body) ) {
2694
2291
  a = a[this.avatar.body];
2695
2292
  } else if ( a.hasOwnProperty('alt') ) {
2696
2293
 
@@ -2710,12 +2307,6 @@ class TalkingHead {
2710
2307
  }
2711
2308
  }
2712
2309
  a = b;
2713
- // Debug: Log selected alternative and check for gender override
2714
- if (this.avatar && this.avatar.body && a.hasOwnProperty(this.avatar.body)) {
2715
- console.log('Found gender override in selected alternative:', this.avatar.body, 'keys:', Object.keys(a));
2716
- }
2717
- // Continue loop to check for gender-specific override after selecting alternative
2718
- continue;
2719
2310
 
2720
2311
  } else {
2721
2312
  break;
@@ -2746,10 +2337,6 @@ class TalkingHead {
2746
2337
  }
2747
2338
 
2748
2339
  // Values
2749
- // Debug: Log pose selection
2750
- if (a.vs && a.vs.pose) {
2751
- console.log('Pose being selected from vs.pose:', a.vs.pose, 'for avatar body:', this.avatar?.body);
2752
- }
2753
2340
  for( let [mt,vs] of Object.entries(a.vs) ) {
2754
2341
  const base = this.getBaselineValue(mt);
2755
2342
  const vals = vs.map( x => {
@@ -2759,11 +2346,6 @@ class TalkingHead {
2759
2346
  } else if ( typeof x === 'function' ) {
2760
2347
  return x;
2761
2348
  } else if ( typeof x === 'string' || x instanceof String ) {
2762
- // Intercept pose values and override 'hip' and 'side' to 'wide' for male avatars
2763
- if (mt === 'pose' && this.avatar && this.avatar.body === 'M' && (x === 'hip' || x === 'side')) {
2764
- console.log('Intercepting pose', x, 'in animation factory, overriding to wide for male avatar');
2765
- return 'wide'; // Always use 'wide' for male avatars, never 'side' or 'hip'
2766
- }
2767
2349
  return x.slice();
2768
2350
  } else if ( Array.isArray(x) ) {
2769
2351
  if ( mt === 'gesture' ) {
@@ -2876,7 +2458,7 @@ class TalkingHead {
2876
2458
  if ( this.isAvatarOnly ) {
2877
2459
  dt = t;
2878
2460
  } else {
2879
- requestAnimationFrame( this.animate.bind(this) );
2461
+ this._raf = requestAnimationFrame( this.animate );
2880
2462
  dt = t - this.animTimeLast;
2881
2463
  if ( dt < this.animFrameDur ) return;
2882
2464
  this.animTimeLast = t;
@@ -2960,7 +2542,7 @@ class TalkingHead {
2960
2542
  const tasks = [];
2961
2543
  for( i=0, l=this.animQueue.length; i<l; i++ ) {
2962
2544
  const x = this.animQueue[i];
2963
- if ( !x || !x.ts || !x.ts.length || this.animClock < x.ts[0] ) continue;
2545
+ if ( this.animClock < x.ts[0] ) continue;
2964
2546
 
2965
2547
  for( j = x.ndx || 0, k = x.ts.length; j<k; j++ ) {
2966
2548
  if ( this.animClock < x.ts[j] ) break;
@@ -3054,18 +2636,7 @@ class TalkingHead {
3054
2636
  break;
3055
2637
 
3056
2638
  case 'pose':
3057
- // Ensure gender-appropriate pose for male avatars - always use 'wide', never 'side' or 'hip'
3058
- if (this.avatar && this.avatar.body === 'M') {
3059
- if (j === 'hip' || j === 'side') {
3060
- // Always override 'hip' and 'side' to 'wide' for male avatars
3061
- if (this.poseTemplates['wide']) {
3062
- j = 'wide';
3063
- console.log('Overriding pose', j === 'hip' ? 'hip' : 'side', 'to wide for male avatar');
3064
- }
3065
- }
3066
- }
3067
2639
  this.poseName = j;
3068
- console.log('Setting pose to:', this.poseName, 'for avatar body:', this.avatar?.body, 'state:', this.stateName);
3069
2640
  this.setPoseFromTemplate( this.poseTemplates[ this.poseName ] );
3070
2641
  break;
3071
2642
 
@@ -3221,6 +2792,10 @@ class TalkingHead {
3221
2792
  if (this.fbxAnimationLoader) {
3222
2793
  this.fbxAnimationLoader.update();
3223
2794
  }
2795
+
2796
+ // Apply shoulder adjustment AFTER FBX animations to ensure relaxed shoulders
2797
+ // This overrides any shoulder positions set by animations
2798
+ this.applyShoulderAdjustmentToBones();
3224
2799
 
3225
2800
  // Custom update
3226
2801
  if ( this.opt.update ) {
@@ -3293,28 +2868,17 @@ class TalkingHead {
3293
2868
  }
3294
2869
 
3295
2870
  /**
3296
- * Get lip-sync processor based on language. Uses statically imported modules.
2871
+ * Get lip-sync processor based on language. Import module dynamically.
3297
2872
  * @param {string} lang Language
3298
- * @param {string} [path="./"] Module path (ignored, kept for compatibility)
2873
+ * @param {string} [path="./"] Module path
3299
2874
  */
3300
2875
  lipsyncGetProcessor(lang, path="./") {
3301
2876
  if ( !this.lipsync.hasOwnProperty(lang) ) {
3302
- const langLower = lang.toLowerCase();
2877
+ const moduleName = path + 'lipsync-' + lang.toLowerCase() + '.mjs';
3303
2878
  const className = 'Lipsync' + lang.charAt(0).toUpperCase() + lang.slice(1);
3304
-
3305
- try {
3306
- // Use statically imported module
3307
- const module = LIPSYNC_MODULES[langLower];
3308
-
3309
- if (module && module[className]) {
2879
+ import(moduleName).then( module => {
3310
2880
  this.lipsync[lang] = new module[className];
3311
- console.log(`Loaded lip-sync module for ${lang}`);
3312
- } else {
3313
- console.warn(`Lip-sync module for ${lang} not found. Available modules:`, Object.keys(LIPSYNC_MODULES));
3314
- }
3315
- } catch (error) {
3316
- console.warn(`Failed to load lip-sync module for ${lang}:`, error);
3317
- }
2881
+ });
3318
2882
  }
3319
2883
  }
3320
2884
 
@@ -3703,589 +3267,148 @@ class TalkingHead {
3703
3267
  }
3704
3268
 
3705
3269
  if ( onsubtitles ) {
3706
- o.onSubtitles = onsubtitles;
3707
- }
3708
-
3709
- if ( opt.isRaw ) {
3710
- o.isRaw = true;
3711
- }
3712
-
3713
- if ( Object.keys(o).length ) {
3714
- this.speechQueue.push(o);
3715
- if ( !o.isRaw ) {
3716
- this.speechQueue.push( { break: 300 } );
3717
- }
3718
- this.startSpeaking();
3719
- }
3720
-
3721
- }
3722
-
3723
- /**
3724
- * Play audio playlist using Web Audio API.
3725
- * @param {boolean} [force=false] If true, forces to proceed
3726
- */
3727
- async playAudio(force=false) {
3728
- if ( !this.armature || (this.isAudioPlaying && !force) ) return;
3729
- this.isAudioPlaying = true;
3730
- if ( this.audioPlaylist.length ) {
3731
- const item = this.audioPlaylist.shift();
3732
-
3733
- // If Web Audio API is suspended, try to resume it
3734
- if ( this.audioCtx.state === "suspended" || this.audioCtx.state === "interrupted" ) {
3735
- const resume = this.audioCtx.resume();
3736
- const timeout = new Promise((_r, rej) => setTimeout(() => rej("p2"), 1000));
3737
- try {
3738
- await Promise.race([resume, timeout]);
3739
- } catch(e) {
3740
- console.log("Can't play audio. Web Audio API suspended. This is often due to calling some speak method before the first user action, which is typically prevented by the browser.");
3741
- this.playAudio(true);
3742
- return;
3743
- }
3744
- }
3745
-
3746
- // AudioBuffer
3747
- let audio;
3748
- if ( Array.isArray(item.audio) ) {
3749
- // Convert from PCM samples
3750
- let buf = this.concatArrayBuffers( item.audio );
3751
- audio = this.pcmToAudioBuffer(buf);
3752
- } else {
3753
- audio = item.audio;
3754
- }
3755
-
3756
- // Create audio source
3757
- this.audioSpeechSource = this.audioCtx.createBufferSource();
3758
- this.audioSpeechSource.buffer = audio;
3759
- this.audioSpeechSource.playbackRate.value = 1 / this.animSlowdownRate;
3760
- this.audioSpeechSource.connect(this.audioAnalyzerNode);
3761
- this.audioSpeechSource.addEventListener('ended', () => {
3762
- this.audioSpeechSource.disconnect();
3763
- this.playAudio(true);
3764
- }, { once: true });
3765
-
3766
- // Rescale lipsync and push to queue
3767
- let delay = 0;
3768
- if ( item.anim ) {
3769
- // Find the lowest negative time point, if any
3770
- if ( !item.isRaw ) {
3771
- delay = Math.abs(Math.min(0, ...item.anim.map( x => Math.min(...x.ts) ) ) );
3772
- }
3773
- item.anim.forEach( x => {
3774
- for(let i=0; i<x.ts.length; i++) {
3775
- x.ts[i] = this.animClock + x.ts[i] + delay;
3776
- }
3777
- this.animQueue.push(x);
3778
- });
3779
- }
3780
-
3781
- // Play, dealy in seconds so pre-animations can be played
3782
- this.audioSpeechSource.start(delay/1000);
3783
-
3784
- } else {
3785
- this.isAudioPlaying = false;
3786
- this.startSpeaking(true);
3787
- }
3788
- }
3789
-
3790
- /**
3791
- * Synthesize speech using browser's built-in Speech Synthesis API
3792
- * @param {Object} line Speech line object
3793
- */
3794
- async synthesizeWithBrowserTTS(line) {
3795
- return new Promise((resolve, reject) => {
3796
- // Get the text from the line
3797
- const text = line.text.map(x => x.word).join(' ');
3798
-
3799
- // Create speech synthesis utterance
3800
- const utterance = new SpeechSynthesisUtterance(text);
3801
-
3802
- // Set voice properties
3803
- const lang = line.lang || this.avatar.ttsLang || this.opt.ttsLang || 'en-US';
3804
- const rate = (line.rate || this.avatar.ttsRate || this.opt.ttsRate || 1) + this.mood.speech.deltaRate;
3805
- const pitch = (line.pitch || this.avatar.ttsPitch || this.opt.ttsPitch || 1) + this.mood.speech.deltaPitch;
3806
- const volume = (line.volume || this.avatar.ttsVolume || this.opt.ttsVolume || 1) + this.mood.speech.deltaVolume;
3807
-
3808
- utterance.lang = lang;
3809
- utterance.rate = Math.max(0.1, Math.min(10, rate));
3810
- utterance.pitch = Math.max(0, Math.min(2, pitch));
3811
- utterance.volume = Math.max(0, Math.min(1, volume));
3812
-
3813
- // Try to find a matching voice
3814
- const voices = speechSynthesis.getVoices();
3815
- const targetVoice = line.voice || this.avatar.ttsVoice || this.opt.ttsVoice;
3816
- if (targetVoice && voices.length > 0) {
3817
- const voice = voices.find(v => v.name.includes(targetVoice) || v.lang === lang);
3818
- if (voice) {
3819
- utterance.voice = voice;
3820
- }
3821
- }
3822
-
3823
- // Estimate duration based on text length and speech rate
3824
- const estimatedDuration = (text.length * 100) / utterance.rate; // Adjust for speech rate
3825
-
3826
- // Create audio buffer for the estimated duration
3827
- const audioBuffer = this.audioCtx.createBuffer(1, this.audioCtx.sampleRate * (estimatedDuration / 1000), this.audioCtx.sampleRate);
3828
-
3829
- // Generate lip-sync data from text using the existing lip-sync modules
3830
- const lipsyncLang = this.avatar.lipsyncLang || this.opt.lipsyncLang || 'en';
3831
- const processedText = this.lipsyncPreProcessText(text, lipsyncLang);
3832
- const lipsyncData = this.lipsyncWordsToVisemes(processedText, lipsyncLang);
3833
-
3834
- console.log('Browser TTS Lip-sync Debug:', {
3835
- text,
3836
- lipsyncLang,
3837
- processedText,
3838
- lipsyncData,
3839
- hasVisemes: lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0,
3840
- estimatedDuration
3841
- });
3842
-
3843
- // Generate lip-sync animation from the viseme data
3844
- const lipsyncAnim = [];
3845
- if (lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0) {
3846
- const totalDuration = lipsyncData.times[lipsyncData.visemes.length - 1] + lipsyncData.durations[lipsyncData.visemes.length - 1];
3847
-
3848
- for (let i = 0; i < lipsyncData.visemes.length; i++) {
3849
- const viseme = lipsyncData.visemes[i];
3850
- const relativeTime = lipsyncData.times[i] / totalDuration;
3851
- const relativeDuration = lipsyncData.durations[i] / totalDuration;
3852
-
3853
- const time = relativeTime * estimatedDuration;
3854
- const duration = relativeDuration * estimatedDuration;
3855
-
3856
- lipsyncAnim.push({
3857
- template: { name: 'viseme' },
3858
- ts: [time - Math.min(60, 2 * duration / 3), time + Math.min(25, duration / 2), time + duration + Math.min(60, duration / 2)],
3859
- vs: {
3860
- ['viseme_' + viseme]: [null, (viseme === 'PP' || viseme === 'FF') ? 0.9 : 0.6, 0]
3861
- }
3862
- });
3863
- }
3864
- }
3865
-
3866
- // Combine original animation with lip-sync animation
3867
- const combinedAnim = [...line.anim, ...lipsyncAnim];
3868
-
3869
- // Add to playlist
3870
- this.audioPlaylist.push({ anim: combinedAnim, audio: audioBuffer });
3871
- this.onSubtitles = line.onSubtitles || null;
3872
- this.resetLips();
3873
- if (line.mood) this.setMood(line.mood);
3874
- this.playAudio();
3875
-
3876
- // Handle speech synthesis events
3877
- utterance.onend = () => {
3878
- resolve();
3879
- };
3880
-
3881
- utterance.onerror = (event) => {
3882
- console.error('Speech synthesis error:', event.error);
3883
- reject(event.error);
3884
- };
3885
-
3886
- // Start speaking
3887
- speechSynthesis.speak(utterance);
3888
- });
3889
- }
3890
-
3891
- /**
3892
- * Synthesize speech using ElevenLabs TTS
3893
- * @param {Object} line Speech line object
3894
- */
3895
- async synthesizeWithElevenLabsTTS(line) {
3896
- // Get the text from the line
3897
- const text = line.text.map(x => x.word).join(' ');
3898
-
3899
- // ElevenLabs API request
3900
- const voiceId = line.voice || this.avatar.ttsVoice || this.opt.ttsVoice || "21m00Tcm4TlvDq8ikWAM"; // Default to Rachel
3901
-
3902
- const requestBody = {
3903
- text: text,
3904
- model_id: "eleven_monolingual_v1",
3905
- voice_settings: {
3906
- stability: 0.5,
3907
- similarity_boost: 0.5,
3908
- style: 0.0,
3909
- use_speaker_boost: true
3910
- }
3911
- };
3912
-
3913
- const response = await fetch(`${this.opt.ttsEndpoint}/${voiceId}`, {
3914
- method: 'POST',
3915
- headers: {
3916
- 'Accept': 'audio/mpeg',
3917
- 'Content-Type': 'application/json',
3918
- 'xi-api-key': this.opt.ttsApikey
3919
- },
3920
- body: JSON.stringify(requestBody)
3921
- });
3922
-
3923
- if (!response.ok) {
3924
- throw new Error(`ElevenLabs TTS error: ${response.status} ${response.statusText}`);
3925
- }
3926
-
3927
- // Get audio data
3928
- const audioArrayBuffer = await response.arrayBuffer();
3929
- const audioBuffer = await this.audioCtx.decodeAudioData(audioArrayBuffer);
3930
-
3931
- // Use text-based lip-sync with proper error handling
3932
- console.log('Using text-based lip-sync for debugging...');
3933
- const lipsyncLang = this.avatar.lipsyncLang || this.opt.lipsyncLang || 'en';
3934
-
3935
- let audioAnalysis;
3936
- try {
3937
- console.log('Lip-sync modules available:', {
3938
- hasLipsync: !!this.lipsync,
3939
- lipsyncKeys: this.lipsync ? Object.keys(this.lipsync) : [],
3940
- lipsyncLang: lipsyncLang
3941
- });
3942
-
3943
- const processedText = this.lipsyncPreProcessText(text, lipsyncLang);
3944
- const lipsyncData = this.lipsyncWordsToVisemes(processedText, lipsyncLang);
3945
-
3946
- console.log('Lip-sync data:', {
3947
- processedText,
3948
- lipsyncData,
3949
- hasVisemes: lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0
3950
- });
3951
-
3952
- if (lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0) {
3953
- // Create audio analysis structure for compatibility
3954
- audioAnalysis = {
3955
- visemes: lipsyncData.visemes.map((viseme, i) => ({
3956
- viseme: viseme,
3957
- startTime: (i * audioBuffer.duration) / lipsyncData.visemes.length,
3958
- endTime: ((i + 1) * audioBuffer.duration) / lipsyncData.visemes.length,
3959
- duration: audioBuffer.duration / lipsyncData.visemes.length,
3960
- intensity: 0.7
3961
- })),
3962
- words: [],
3963
- duration: audioBuffer.duration,
3964
- features: { onsets: [], boundaries: [] }
3965
- };
3966
- } else {
3967
- throw new Error('No visemes generated from text');
3968
- }
3969
- } catch (error) {
3970
- console.error('Text-based lip-sync failed, using fallback:', error);
3971
- // Fallback: create simple visemes from text
3972
- const words = text.toLowerCase().split(/\s+/);
3973
- const simpleVisemes = [];
3974
-
3975
- for (const word of words) {
3976
- // Simple phonetic mapping
3977
- for (const char of word) {
3978
- let viseme = 'aa'; // default
3979
- if ('aeiou'.includes(char)) viseme = 'aa';
3980
- else if ('bp'.includes(char)) viseme = 'PP';
3981
- else if ('fv'.includes(char)) viseme = 'FF';
3982
- else if ('st'.includes(char)) viseme = 'SS';
3983
- else if ('dln'.includes(char)) viseme = 'DD';
3984
- else if ('kg'.includes(char)) viseme = 'kk';
3985
- else if ('rw'.includes(char)) viseme = 'RR';
3986
-
3987
- simpleVisemes.push(viseme);
3988
- }
3989
- }
3990
-
3991
- audioAnalysis = {
3992
- visemes: simpleVisemes.map((viseme, i) => ({
3993
- viseme: viseme,
3994
- startTime: (i * audioBuffer.duration) / simpleVisemes.length,
3995
- endTime: ((i + 1) * audioBuffer.duration) / simpleVisemes.length,
3996
- duration: audioBuffer.duration / simpleVisemes.length,
3997
- intensity: 0.6
3998
- })),
3999
- words: [],
4000
- duration: audioBuffer.duration,
4001
- features: { onsets: [], boundaries: [] }
4002
- };
3270
+ o.onSubtitles = onsubtitles;
4003
3271
  }
4004
-
4005
- console.log('ElevenLabs TTS Audio Analysis:', {
4006
- text,
4007
- audioDuration: audioBuffer.duration,
4008
- visemeCount: audioAnalysis.visemes ? audioAnalysis.visemes.length : 0,
4009
- wordCount: audioAnalysis.words ? audioAnalysis.words.length : 0,
4010
- features: {
4011
- onsets: audioAnalysis.features && audioAnalysis.features.onsets ? audioAnalysis.features.onsets.length : 0,
4012
- boundaries: audioAnalysis.features && audioAnalysis.features.phonemeBoundaries ? audioAnalysis.features.phonemeBoundaries.length : 0
4013
- },
4014
- visemes: audioAnalysis.visemes ? audioAnalysis.visemes.slice(0, 3) : [] // Show first 3 visemes for debugging
4015
- });
4016
-
4017
- // Generate precise lip-sync animation from audio analysis
4018
- const lipsyncAnim = [];
4019
- if (audioAnalysis.visemes && audioAnalysis.visemes.length > 0) {
4020
- console.log('ElevenLabs: Generating lip-sync animation from', audioAnalysis.visemes.length, 'visemes');
4021
- for (let i = 0; i < audioAnalysis.visemes.length; i++) {
4022
- const visemeData = audioAnalysis.visemes[i];
4023
- const time = visemeData.startTime * 1000; // Convert to milliseconds
4024
- const duration = visemeData.duration * 1000;
4025
- const intensity = visemeData.intensity;
4026
-
4027
- lipsyncAnim.push({
4028
- template: { name: 'viseme' },
4029
- ts: [time - Math.min(60, 2 * duration / 3), time + Math.min(25, duration / 2), time + duration + Math.min(60, duration / 2)],
4030
- vs: {
4031
- ['viseme_' + visemeData.viseme]: [null, intensity, 0]
4032
- }
4033
- });
3272
+
3273
+ if ( opt.isRaw ) {
3274
+ o.isRaw = true;
4034
3275
  }
4035
- console.log('ElevenLabs: Generated', lipsyncAnim.length, 'lip-sync animation frames');
4036
- } else {
4037
- console.warn('ElevenLabs: No visemes available for lip-sync animation');
3276
+
3277
+ if ( Object.keys(o).length ) {
3278
+ this.speechQueue.push(o);
3279
+ if ( !o.isRaw ) {
3280
+ this.speechQueue.push( { break: 300 } );
3281
+ }
3282
+ this.startSpeaking();
4038
3283
  }
4039
-
4040
- // Combine original animation with lip-sync animation
4041
- const combinedAnim = [...line.anim, ...lipsyncAnim];
4042
- console.log('ElevenLabs: Combined animation frames:', combinedAnim.length, '(original:', line.anim.length, '+ lipsync:', lipsyncAnim.length, ')');
4043
-
4044
- // Add to playlist
4045
- this.audioPlaylist.push({ anim: combinedAnim, audio: audioBuffer });
4046
- this.onSubtitles = line.onSubtitles || null;
4047
- this.resetLips();
4048
- if (line.mood) this.setMood(line.mood);
4049
- this.playAudio();
3284
+
4050
3285
  }
4051
3286
 
4052
3287
  /**
4053
- * Synthesize speech using Deepgram Aura-2 TTS
4054
- * @param {Object} line Speech line object
4055
- */
4056
- async synthesizeWithDeepgramTTS(line) {
4057
- // Get the text from the line
4058
- const text = line.text.map(x => x.word).join(' ');
4059
-
4060
- // Deepgram API request
4061
- const voiceModel = line.voice || this.avatar.ttsVoice || this.opt.ttsVoice || "aura-2-thalia-en"; // Default to Thalia
4062
-
4063
- // Build URL with model as query parameter
4064
- const url = `${this.opt.ttsEndpoint}?model=${voiceModel}`;
4065
-
4066
- const response = await fetch(url, {
4067
- method: 'POST',
4068
- headers: {
4069
- 'Authorization': `Token ${this.opt.ttsApikey}`,
4070
- 'Content-Type': 'text/plain',
4071
- 'Accept': 'audio/mpeg'
4072
- },
4073
- body: text
4074
- });
3288
+ * Play audio playlist using Web Audio API.
3289
+ * @param {boolean} [force=false] If true, forces to proceed
3290
+ */
3291
+ async playAudio(force=false) {
3292
+ if ( !this.armature || (this.isAudioPlaying && !force) ) return;
3293
+ this.isAudioPlaying = true;
3294
+ if ( this.audioPlaylist.length ) {
3295
+ const item = this.audioPlaylist.shift();
4075
3296
 
4076
- if (!response.ok) {
4077
- throw new Error(`Deepgram TTS error: ${response.status} ${response.statusText}`);
4078
- }
3297
+ // If Web Audio API is suspended, try to resume it
3298
+ if ( this.audioCtx.state === "suspended" || this.audioCtx.state === "interrupted" ) {
3299
+ const resume = this.audioCtx.resume();
3300
+ const timeout = new Promise((_r, rej) => setTimeout(() => rej("p2"), 1000));
3301
+ try {
3302
+ await Promise.race([resume, timeout]);
3303
+ } catch(e) {
3304
+ console.log("Can't play audio. Web Audio API suspended. This is often due to calling some speak method before the first user action, which is typically prevented by the browser.");
3305
+ this.playAudio(true);
3306
+ return;
3307
+ }
3308
+ }
4079
3309
 
4080
- // Get audio data
4081
- const audioArrayBuffer = await response.arrayBuffer();
4082
- const audioBuffer = await this.audioCtx.decodeAudioData(audioArrayBuffer);
4083
-
4084
- // Use text-based lip-sync with proper error handling
4085
- console.log('Using text-based lip-sync for Deepgram...');
4086
- const lipsyncLang = this.avatar.lipsyncLang || this.opt.lipsyncLang || 'en';
4087
-
4088
- let audioAnalysis;
4089
- try {
4090
- console.log('Lip-sync modules available:', {
4091
- hasLipsync: !!this.lipsync,
4092
- lipsyncKeys: this.lipsync ? Object.keys(this.lipsync) : [],
4093
- lipsyncLang: lipsyncLang
4094
- });
4095
-
4096
- const processedText = this.lipsyncPreProcessText(text, lipsyncLang);
4097
- const lipsyncData = this.lipsyncWordsToVisemes(processedText, lipsyncLang);
4098
-
4099
- console.log('Lip-sync data:', {
4100
- processedText,
4101
- lipsyncData,
4102
- hasVisemes: lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0
4103
- });
4104
-
4105
- if (lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0) {
4106
- // Create audio analysis structure for compatibility
4107
- audioAnalysis = {
4108
- visemes: lipsyncData.visemes.map((viseme, i) => ({
4109
- viseme: viseme,
4110
- startTime: (i * audioBuffer.duration) / lipsyncData.visemes.length,
4111
- endTime: ((i + 1) * audioBuffer.duration) / lipsyncData.visemes.length,
4112
- duration: audioBuffer.duration / lipsyncData.visemes.length,
4113
- intensity: 0.7
4114
- })),
4115
- words: [],
4116
- duration: audioBuffer.duration,
4117
- features: { onsets: [], boundaries: [] }
4118
- };
3310
+ // AudioBuffer
3311
+ let audio;
3312
+ if ( Array.isArray(item.audio) ) {
3313
+ // Convert from PCM samples
3314
+ let buf = this.concatArrayBuffers( item.audio );
3315
+ audio = this.pcmToAudioBuffer(buf);
4119
3316
  } else {
4120
- throw new Error('No visemes generated from text');
3317
+ audio = item.audio;
4121
3318
  }
4122
- } catch (error) {
4123
- console.error('Text-based lip-sync failed, using fallback:', error);
4124
- // Fallback: create simple visemes from text
4125
- const words = text.toLowerCase().split(/\s+/);
4126
- const simpleVisemes = [];
4127
-
4128
- for (const word of words) {
4129
- // Simple phonetic mapping
4130
- for (const char of word) {
4131
- let viseme = 'aa'; // default
4132
- if ('aeiou'.includes(char)) viseme = 'aa';
4133
- else if ('bp'.includes(char)) viseme = 'PP';
4134
- else if ('fv'.includes(char)) viseme = 'FF';
4135
- else if ('st'.includes(char)) viseme = 'SS';
4136
- else if ('dln'.includes(char)) viseme = 'DD';
4137
- else if ('kg'.includes(char)) viseme = 'kk';
4138
- else if ('rw'.includes(char)) viseme = 'RR';
4139
-
4140
- simpleVisemes.push(viseme);
4141
- }
3319
+
3320
+ // Make sure previous audio source is cleared
3321
+ if (this.audioSpeechSource) {
3322
+ try { this.audioSpeechSource.stop?.() } catch(error) {};
3323
+ this.audioSpeechSource.disconnect();
3324
+ this.audioSpeechSource.onended = null;
3325
+ this.audioSpeechSource = null;
4142
3326
  }
4143
-
4144
- audioAnalysis = {
4145
- visemes: simpleVisemes.map((viseme, i) => ({
4146
- viseme: viseme,
4147
- startTime: (i * audioBuffer.duration) / simpleVisemes.length,
4148
- endTime: ((i + 1) * audioBuffer.duration) / simpleVisemes.length,
4149
- duration: audioBuffer.duration / simpleVisemes.length,
4150
- intensity: 0.6
4151
- })),
4152
- words: [],
4153
- duration: audioBuffer.duration,
4154
- features: { onsets: [], boundaries: [] }
3327
+
3328
+ // Create audio source
3329
+ const source = this.audioCtx.createBufferSource();
3330
+ this.audioSpeechSource = source;
3331
+ source.buffer = audio;
3332
+ source.playbackRate.value = 1 / this.animSlowdownRate;
3333
+ source.connect(this.audioAnalyzerNode);
3334
+ source.onended = () => {
3335
+ source.disconnect();
3336
+ source.onended = null;
3337
+ if ( this.audioSpeechSource === source ) {
3338
+ this.audioSpeechSource = null;
3339
+ }
3340
+ this.playAudio(true);
4155
3341
  };
4156
- }
4157
-
4158
- console.log('Deepgram TTS Audio Analysis:', {
4159
- text,
4160
- audioDuration: audioBuffer.duration,
4161
- visemeCount: audioAnalysis.visemes ? audioAnalysis.visemes.length : 0,
4162
- wordCount: audioAnalysis.words ? audioAnalysis.words.length : 0,
4163
- features: {
4164
- onsets: audioAnalysis.features && audioAnalysis.features.onsets ? audioAnalysis.features.onsets.length : 0,
4165
- boundaries: audioAnalysis.features && audioAnalysis.features.phonemeBoundaries ? audioAnalysis.features.phonemeBoundaries.length : 0
4166
- },
4167
- visemes: audioAnalysis.visemes ? audioAnalysis.visemes.slice(0, 3) : [] // Show first 3 visemes for debugging
4168
- });
4169
-
4170
- // Generate precise lip-sync animation from audio analysis
4171
- const lipsyncAnim = [];
4172
- if (audioAnalysis.visemes && audioAnalysis.visemes.length > 0) {
4173
- console.log('Deepgram: Generating lip-sync animation from', audioAnalysis.visemes.length, 'visemes');
4174
- for (let i = 0; i < audioAnalysis.visemes.length; i++) {
4175
- const visemeData = audioAnalysis.visemes[i];
4176
- const time = visemeData.startTime * 1000; // Convert to milliseconds
4177
- const duration = visemeData.duration * 1000;
4178
- const intensity = visemeData.intensity;
4179
-
4180
- lipsyncAnim.push({
4181
- template: { name: 'viseme' },
4182
- ts: [time - Math.min(60, 2 * duration / 3), time + Math.min(25, duration / 2), time + duration + Math.min(60, duration / 2)],
4183
- vs: {
4184
- ['viseme_' + visemeData.viseme]: [null, intensity, 0]
3342
+
3343
+ // Rescale lipsync and push to queue
3344
+ let delay = 0;
3345
+ if ( item.anim ) {
3346
+ // Find the lowest negative time point, if any
3347
+ if ( !item.isRaw ) {
3348
+ delay = Math.abs(Math.min(0, ...item.anim.map( x => Math.min(...x.ts) ) ) );
4185
3349
  }
4186
- });
4187
- }
4188
- console.log('Deepgram: Generated', lipsyncAnim.length, 'lip-sync animation frames');
3350
+ item.anim.forEach( x => {
3351
+ for(let i=0; i<x.ts.length; i++) {
3352
+ x.ts[i] = this.animClock + x.ts[i] + delay;
3353
+ }
3354
+ this.animQueue.push(x);
3355
+ });
3356
+ }
3357
+
3358
+ // Play, delay in seconds so pre-animations can be played
3359
+ source.start( this.audioCtx.currentTime + delay/1000);
3360
+
4189
3361
  } else {
4190
- console.warn('Deepgram: No visemes available for lip-sync animation');
3362
+ this.isAudioPlaying = false;
3363
+ this.startSpeaking(true);
4191
3364
  }
4192
-
4193
- // Combine original animation with lip-sync animation
4194
- const combinedAnim = [...line.anim, ...lipsyncAnim];
4195
- console.log('Deepgram: Combined animation frames:', combinedAnim.length, '(original:', line.anim.length, '+ lipsync:', lipsyncAnim.length, ')');
4196
-
4197
- // Add to playlist
4198
- this.audioPlaylist.push({ anim: combinedAnim, audio: audioBuffer });
4199
- this.onSubtitles = line.onSubtitles || null;
4200
- this.resetLips();
4201
- if (line.mood) this.setMood(line.mood);
4202
- this.playAudio();
4203
3365
  }
4204
3366
 
4205
3367
  /**
4206
- * Synthesize speech using Azure TTS
4207
- * @param {Object} line Speech line object
4208
- */
4209
- async synthesizeWithAzureTTS(line) {
4210
- // Get the text from the line
4211
- const text = line.text.map(x => x.word).join(' ');
4212
-
4213
- // Azure TTS SSML
4214
- const voiceName = line.voice || this.avatar.ttsVoice || this.opt.ttsVoice || "en-US-AriaNeural";
4215
- const ssml = `
4216
- <speak version="1.0" xmlns="http://www.w3.org/2001/10/synthesis" xml:lang="en-US">
4217
- <voice name="${voiceName}">
4218
- ${text}
4219
- </voice>
4220
- </speak>
4221
- `;
4222
-
4223
- const response = await fetch(this.opt.ttsEndpoint, {
4224
- method: 'POST',
4225
- headers: {
4226
- 'Ocp-Apim-Subscription-Key': this.opt.ttsApikey,
4227
- 'Content-Type': 'application/ssml+xml',
4228
- 'X-Microsoft-OutputFormat': 'audio-16khz-128kbitrate-mono-mp3'
4229
- },
4230
- body: ssml
4231
- });
3368
+ * Take the next queue item from the speech queue, convert it to text, and
3369
+ * load the audio file.
3370
+ * @param {boolean} [force=false] If true, forces to proceed (e.g. after break)
3371
+ */
3372
+ async startSpeaking( force = false ) {
3373
+ if ( !this.armature || (this.isSpeaking && !force) ) return;
3374
+ this.stateName = 'speaking';
3375
+ this.isSpeaking = true;
3376
+ if ( this.speechQueue.length ) {
3377
+ let line = this.speechQueue.shift();
3378
+ if ( line.emoji ) {
4232
3379
 
4233
- if (!response.ok) {
4234
- throw new Error(`Azure TTS error: ${response.status} ${response.statusText}`);
4235
- }
3380
+ // Look at the camera
3381
+ this.lookAtCamera(500);
4236
3382
 
4237
- // Get audio data
4238
- const audioArrayBuffer = await response.arrayBuffer();
4239
- const audioBuffer = await this.audioCtx.decodeAudioData(audioArrayBuffer);
4240
-
4241
- // Analyze audio for precise lip-sync timing
4242
- console.log('Analyzing audio for precise lip-sync...');
4243
- const audioAnalysis = await this.audioAnalyzer.analyzeAudio(audioBuffer, text);
4244
-
4245
- console.log('Azure TTS Audio Analysis:', {
4246
- text,
4247
- audioDuration: audioBuffer.duration,
4248
- visemeCount: audioAnalysis.visemes.length,
4249
- wordCount: audioAnalysis.words.length,
4250
- features: {
4251
- onsets: audioAnalysis.features.onsets.length,
4252
- boundaries: audioAnalysis.features.phonemeBoundaries.length
4253
- }
4254
- });
4255
-
4256
- // Generate precise lip-sync animation from audio analysis
4257
- const lipsyncAnim = [];
4258
- for (let i = 0; i < audioAnalysis.visemes.length; i++) {
4259
- const visemeData = audioAnalysis.visemes[i];
4260
- const time = visemeData.startTime * 1000; // Convert to milliseconds
4261
- const duration = visemeData.duration * 1000;
4262
- const intensity = visemeData.intensity;
4263
-
4264
- lipsyncAnim.push({
4265
- template: { name: 'viseme' },
4266
- ts: [time - Math.min(60, 2 * duration / 3), time + Math.min(25, duration / 2), time + duration + Math.min(60, duration / 2)],
4267
- vs: {
4268
- ['viseme_' + visemeData.viseme]: [null, intensity, 0]
3383
+ // Only emoji
3384
+ let duration = line.emoji.dt.reduce((a,b) => a+b,0);
3385
+ this.animQueue.push( this.animFactory( line.emoji ) );
3386
+ setTimeout( this.startSpeaking.bind(this), duration, true );
3387
+ } else if ( line.break ) {
3388
+ // Break
3389
+ setTimeout( this.startSpeaking.bind(this), line.break, true );
3390
+ } else if ( line.audio ) {
3391
+
3392
+ // Look at the camera
3393
+ if ( !line.isRaw ) {
3394
+ this.lookAtCamera(500);
3395
+ this.speakWithHands();
3396
+ this.resetLips();
4269
3397
  }
4270
- });
4271
- }
4272
-
4273
- // Combine original animation with lip-sync animation
4274
- const combinedAnim = [...line.anim, ...lipsyncAnim];
4275
-
4276
- // Add to playlist
4277
- this.audioPlaylist.push({ anim: combinedAnim, audio: audioBuffer });
4278
- this.onSubtitles = line.onSubtitles || null;
4279
- this.resetLips();
4280
- if (line.mood) this.setMood(line.mood);
4281
- this.playAudio();
4282
- }
4283
3398
 
4284
- /**
4285
- * Synthesize speech using external TTS service (Google Cloud, etc.)
4286
- * @param {Object} line Speech line object
4287
- */
4288
- async synthesizeWithExternalTTS(line) {
3399
+ // Make a playlist
3400
+ this.audioPlaylist.push({ anim: line.anim, audio: line.audio, isRaw: line.isRaw });
3401
+ this.onSubtitles = line.onSubtitles || null;
3402
+ if ( line.mood ) this.setMood( line.mood );
3403
+ this.playAudio();
3404
+
3405
+ } else if ( line.text ) {
3406
+
3407
+ // Look at the camera
3408
+ this.lookAtCamera(500);
3409
+
3410
+ // Spoken text
3411
+ try {
4289
3412
  // Convert text to SSML
4290
3413
  let ssml = "<speak>";
4291
3414
  line.text.forEach( (x,i) => {
@@ -4305,6 +3428,7 @@ class TalkingHead {
4305
3428
  });
4306
3429
  ssml += "</speak>";
4307
3430
 
3431
+
4308
3432
  const o = {
4309
3433
  method: "POST",
4310
3434
  headers: {
@@ -4392,70 +3516,6 @@ class TalkingHead {
4392
3516
 
4393
3517
  } else {
4394
3518
  this.startSpeaking(true);
4395
- }
4396
- }
4397
-
4398
- /**
4399
- * Take the next queue item from the speech queue, convert it to text, and
4400
- * load the audio file.
4401
- * @param {boolean} [force=false] If true, forces to proceed (e.g. after break)
4402
- */
4403
- async startSpeaking( force = false ) {
4404
- if ( !this.armature || (this.isSpeaking && !force) ) return;
4405
- this.stateName = 'speaking';
4406
- this.isSpeaking = true;
4407
- if ( this.speechQueue.length ) {
4408
- let line = this.speechQueue.shift();
4409
- if ( line.emoji ) {
4410
-
4411
- // Look at the camera
4412
- this.lookAtCamera(500);
4413
-
4414
- // Only emoji
4415
- let duration = line.emoji.dt.reduce((a,b) => a+b,0);
4416
- this.animQueue.push( this.animFactory( line.emoji ) );
4417
- setTimeout( this.startSpeaking.bind(this), duration, true );
4418
- } else if ( line.break ) {
4419
- // Break
4420
- setTimeout( this.startSpeaking.bind(this), line.break, true );
4421
- } else if ( line.audio ) {
4422
-
4423
- // Look at the camera
4424
- if ( !line.isRaw ) {
4425
- this.lookAtCamera(500);
4426
- this.speakWithHands();
4427
- this.resetLips();
4428
- }
4429
-
4430
- // Make a playlist
4431
- this.audioPlaylist.push({ anim: line.anim, audio: line.audio, isRaw: line.isRaw });
4432
- this.onSubtitles = line.onSubtitles || null;
4433
- if ( line.mood ) this.setMood( line.mood );
4434
- this.playAudio();
4435
-
4436
- } else if ( line.text ) {
4437
-
4438
- // Look at the camera
4439
- this.lookAtCamera(500);
4440
-
4441
- // Spoken text
4442
- try {
4443
- // Check which TTS service to use
4444
- if (!this.opt.ttsEndpoint || this.opt.ttsEndpoint === "") {
4445
- // Use browser's built-in speech synthesis
4446
- await this.synthesizeWithBrowserTTS(line);
4447
- } else if (this.opt.ttsService === "elevenlabs") {
4448
- // Use ElevenLabs TTS
4449
- await this.synthesizeWithElevenLabsTTS(line);
4450
- } else if (this.opt.ttsService === "deepgram") {
4451
- // Use Deepgram Aura-2 TTS
4452
- await this.synthesizeWithDeepgramTTS(line);
4453
- } else if (this.opt.ttsService === "azure") {
4454
- // Use Azure TTS
4455
- await this.synthesizeWithAzureTTS(line);
4456
- } else {
4457
- // Use external TTS service (Google Cloud, etc.)
4458
- await this.synthesizeWithExternalTTS(line);
4459
3519
  }
4460
3520
  } catch (error) {
4461
3521
  console.error("Error:", error);
@@ -4491,7 +3551,7 @@ class TalkingHead {
4491
3551
  * Pause speaking.
4492
3552
  */
4493
3553
  pauseSpeaking() {
4494
- try { this.audioSpeechSource.stop(); } catch(error) {}
3554
+ try { this.audioSpeechSource?.stop(); } catch(error) {}
4495
3555
  this.audioPlaylist.length = 0;
4496
3556
  this.stateName = 'idle';
4497
3557
  this.isSpeaking = false;
@@ -4507,7 +3567,7 @@ class TalkingHead {
4507
3567
  * Stop speaking and clear the speech queue.
4508
3568
  */
4509
3569
  stopSpeaking() {
4510
- try { this.audioSpeechSource.stop(); } catch(error) {}
3570
+ try { this.audioSpeechSource?.stop(); } catch(error) {}
4511
3571
  this.audioPlaylist.length = 0;
4512
3572
  this.speechQueue.length = 0;
4513
3573
  this.animQueue = this.animQueue.filter( x => x.template.name !== 'viseme' && x.template.name !== 'subtitles' && x.template.name !== 'blendshapes' );
@@ -5265,8 +4325,12 @@ class TalkingHead {
5265
4325
  */
5266
4326
  setSlowdownRate(k) {
5267
4327
  this.animSlowdownRate = k;
5268
- this.audioSpeechSource.playbackRate.value = 1 / this.animSlowdownRate;
5269
- this.audioBackgroundSource.playbackRate.value = 1 / this.animSlowdownRate;
4328
+ if ( this.audioSpeechSource ) {
4329
+ this.audioSpeechSource.playbackRate.value = 1 / this.animSlowdownRate;
4330
+ }
4331
+ if ( this.audioBackgroundSource ) {
4332
+ this.audioBackgroundSource.playbackRate.value = 1 / this.animSlowdownRate;
4333
+ }
5270
4334
  }
5271
4335
 
5272
4336
  /**
@@ -5295,7 +4359,7 @@ class TalkingHead {
5295
4359
  this.animTimeLast = performance.now();
5296
4360
  this.isRunning = true;
5297
4361
  if ( !this.isAvatarOnly ) {
5298
- requestAnimationFrame( this.animate.bind(this) );
4362
+ this._raf = requestAnimationFrame( this.animate );
5299
4363
  }
5300
4364
  }
5301
4365
  }
@@ -5351,7 +4415,6 @@ class TalkingHead {
5351
4415
  * @param {number} [ndx=0] Index of the clip
5352
4416
  * @param {number} [scale=0.01] Position scale factor
5353
4417
  */
5354
-
5355
4418
  async playAnimation(url, onprogress=null, dur=10, ndx=0, scale=0.01, disablePositionLock=false) {
5356
4419
  if ( !this.armature ) return;
5357
4420
 
@@ -5405,97 +4468,26 @@ class TalkingHead {
5405
4468
  action.fadeIn(0.5).play();
5406
4469
  console.log('FBX animation started successfully:', url);
5407
4470
  } catch (error) {
5408
- console.warn('FBX animation failed to start:', error);
5409
- // Stop the animation and unlock position on error
5410
- this.stopAnimation();
5411
- return;
5412
- }
5413
-
5414
- // Check if the animation actually has valid tracks
5415
- if (action.getClip().tracks.length === 0) {
5416
- console.warn('FBX animation has no valid tracks, stopping');
5417
- this.stopAnimation();
5418
- return;
5419
- }
5420
-
5421
- } else {
5422
-
5423
- // Validate file extension
5424
- const fileExtension = url.split('.').pop().toLowerCase();
5425
- if (fileExtension !== 'fbx') {
5426
- console.error(`Invalid file type for FBX animation: ${url}. Expected .fbx file.`);
5427
- return;
5428
- }
5429
-
5430
- // Check if file exists before attempting to load
5431
- let fileExists = false;
5432
- try {
5433
- const response = await fetch(url, { method: 'HEAD' });
5434
- fileExists = response.ok;
5435
- if (!fileExists) {
5436
- console.error(`FBX file not found at ${url}. Status: ${response.status}`);
5437
- console.error('Please check:');
5438
- console.error('1. File path is correct (note: path is case-sensitive)');
5439
- console.error('2. File exists in your public folder');
5440
- console.error('3. File is accessible (not blocked by server)');
5441
- return;
5442
- }
5443
- } catch (fetchError) {
5444
- console.warn(`Could not verify file existence for ${url}, attempting to load anyway:`, fetchError);
5445
- }
5446
-
5447
- // Load animation with error handling
5448
- const loader = new FBXLoader();
5449
- let fbx;
5450
-
5451
- try {
5452
- fbx = await loader.loadAsync( url, onprogress );
5453
- } catch (error) {
5454
- console.error(`Failed to load FBX animation from ${url}:`, error);
5455
- console.error('Error details:', {
5456
- message: error.message,
5457
- url: url,
5458
- suggestion: 'Make sure the file is a valid FBX file and the path is correct'
5459
- });
5460
-
5461
- // Try to provide helpful error message
5462
- if (error.message && error.message.includes('version number')) {
5463
- console.error('FBX Loader Error: Cannot find version number');
5464
- console.error('This error usually means:');
5465
- console.error('1. The file is not a valid FBX file (might be GLB, corrupted, or wrong format)');
5466
- console.error('2. The file might be corrupted');
5467
- console.error('3. The file path might be incorrect');
5468
- console.error('4. The server returned an HTML error page instead of the FBX file');
5469
- console.error('5. The file might not exist at that path');
5470
- console.error('');
5471
- console.error('Solution: Please verify:');
5472
- console.error(` - File exists at: ${url}`);
5473
- console.error(' - File is a valid FBX binary file');
5474
- console.error(' - File path matches your public folder structure');
5475
- console.error(' - File is not corrupted');
5476
- }
5477
-
5478
- // Try to fetch and check what we actually got
5479
- try {
5480
- const response = await fetch(url);
5481
- const contentType = response.headers.get('content-type');
5482
- const text = await response.text();
5483
- console.error(`Response details:`, {
5484
- status: response.status,
5485
- contentType: contentType,
5486
- firstBytes: text.substring(0, 100),
5487
- isHTML: text.trim().startsWith('<!DOCTYPE') || text.trim().startsWith('<html')
5488
- });
5489
- if (text.trim().startsWith('<!DOCTYPE') || text.trim().startsWith('<html')) {
5490
- console.error('The server returned an HTML page instead of an FBX file. The file path is likely incorrect.');
5491
- }
5492
- } catch (fetchError) {
5493
- console.error('Could not fetch file for debugging:', fetchError);
5494
- }
5495
-
4471
+ console.warn('FBX animation failed to start:', error);
4472
+ // Stop the animation and unlock position on error
4473
+ this.stopAnimation();
4474
+ return;
4475
+ }
4476
+
4477
+ // Check if the animation actually has valid tracks
4478
+ if (action.getClip().tracks.length === 0) {
4479
+ console.warn('FBX animation has no valid tracks, stopping');
4480
+ this.stopAnimation();
5496
4481
  return;
5497
4482
  }
5498
4483
 
4484
+ } else {
4485
+
4486
+ // Load animation
4487
+ const loader = new FBXLoader();
4488
+
4489
+ let fbx = await loader.loadAsync( url, onprogress );
4490
+
5499
4491
  if ( fbx && fbx.animations && fbx.animations[ndx] ) {
5500
4492
  let anim = fbx.animations[ndx];
5501
4493
 
@@ -5647,31 +4639,6 @@ class TalkingHead {
5647
4639
  return null; // No mapping found
5648
4640
  };
5649
4641
 
5650
- // Debug: Log FBX bone names and avatar bone names for comparison
5651
- const fbxBoneNames = new Set();
5652
- anim.tracks.forEach(track => {
5653
- const trackParts = track.name.split('.');
5654
- fbxBoneNames.add(trackParts[0]);
5655
- });
5656
-
5657
- console.log('=== Ready Player Me Animation Bone Analysis ===');
5658
- console.log('FBX bone names:', Array.from(fbxBoneNames).sort().join(', '));
5659
- console.log('Avatar skeleton bone names:', Array.from(availableBones).sort().join(', '));
5660
-
5661
- // Check for arm bones specifically
5662
- const fbxArmBones = Array.from(fbxBoneNames).filter(b =>
5663
- b.toLowerCase().includes('arm') ||
5664
- b.toLowerCase().includes('hand') ||
5665
- b.toLowerCase().includes('shoulder')
5666
- );
5667
- const avatarArmBones = Array.from(availableBones).filter(b =>
5668
- b.includes('Arm') ||
5669
- b.includes('Hand') ||
5670
- b.includes('Shoulder')
5671
- );
5672
- console.log('FBX arm/hand/shoulder bones:', fbxArmBones.sort().join(', '));
5673
- console.log('Avatar arm/hand/shoulder bones:', avatarArmBones.sort().join(', '));
5674
-
5675
4642
  // Filter and map animation tracks
5676
4643
  const mappedTracks = [];
5677
4644
  const unmappedBones = new Set();
@@ -5690,11 +4657,6 @@ class TalkingHead {
5690
4657
  const newTrackName = `${mappedBoneName}.${property}`;
5691
4658
  const newTrack = track.clone();
5692
4659
  newTrack.name = newTrackName;
5693
-
5694
- // Note: Rotation corrections removed - they were causing issues with both arms
5695
- // If left arm still has issues, it's likely a bone mapping problem, not rotation
5696
- // Focus on getting bone names mapped correctly first
5697
-
5698
4660
  mappedTracks.push(newTrack);
5699
4661
 
5700
4662
  // Store mapping for logging
@@ -5703,12 +4665,6 @@ class TalkingHead {
5703
4665
  }
5704
4666
  } else {
5705
4667
  unmappedBones.add(fbxBoneName);
5706
- // Log unmapped bones (especially arm bones)
5707
- if (fbxBoneName.toLowerCase().includes('arm') ||
5708
- fbxBoneName.toLowerCase().includes('hand') ||
5709
- fbxBoneName.toLowerCase().includes('shoulder')) {
5710
- console.warn(`⚠️ Arm bone "${fbxBoneName}" could not be mapped to avatar skeleton`);
5711
- }
5712
4668
  }
5713
4669
  });
5714
4670
 
@@ -5724,24 +4680,11 @@ class TalkingHead {
5724
4680
  console.log(`✓ Mapped ${boneNameMap.size} bone(s):`,
5725
4681
  Array.from(boneNameMap.entries()).map(([from, to]) => `${from}→${to}`).join(', '));
5726
4682
  }
5727
-
5728
- // Check if arm bones were mapped
5729
- const mappedArmBones = Array.from(boneNameMap.values()).filter(b =>
5730
- b.includes('Arm') || b.includes('Hand') || b.includes('Shoulder')
5731
- );
5732
- if (mappedArmBones.length > 0) {
5733
- console.log(`✓ Arm bones mapped: ${mappedArmBones.join(', ')}`);
5734
- } else {
5735
- console.warn('⚠️ No arm bones were mapped! This may cause arm rigging issues.');
5736
- }
5737
- } else {
5738
- console.error('❌ No tracks could be mapped! Animation may not work correctly.');
5739
4683
  }
5740
4684
 
5741
4685
  // Rename and scale Mixamo tracks, create a pose
5742
4686
  const props = {};
5743
4687
  anim.tracks.forEach( t => {
5744
- t.name = t.name.replaceAll('mixamorig','');
5745
4688
  const ids = t.name.split('.');
5746
4689
  if ( ids[1] === 'position' ) {
5747
4690
  for(let i=0; i<t.values.length; i++ ) {
@@ -5777,13 +4720,6 @@ class TalkingHead {
5777
4720
  } else {
5778
4721
  const msg = 'Animation ' + url + ' (ndx=' + ndx + ') not found';
5779
4722
  console.error(msg);
5780
- if (fbx && fbx.animations) {
5781
- console.error(`FBX file loaded but has ${fbx.animations.length} animation(s), requested index ${ndx}`);
5782
- } else if (fbx) {
5783
- console.error('FBX file loaded but contains no animations');
5784
- } else {
5785
- console.error('FBX file failed to load or is invalid');
5786
- }
5787
4723
  }
5788
4724
  }
5789
4725
  }
@@ -5793,21 +4729,16 @@ class TalkingHead {
5793
4729
  */
5794
4730
  stopAnimation() {
5795
4731
 
5796
- // Stop only the current FBX action, preserve mixer for morph targets
5797
- if (this.currentFBXAction) {
5798
- this.currentFBXAction.stop();
5799
- this.currentFBXAction = null;
5800
- console.log('FBX animation action stopped, mixer preserved for lip-sync');
5801
- }
5802
-
5803
- // Only destroy mixer if no other animations are running
5804
- // This allows morph target animations (lip-sync) to continue
5805
- if (this.mixer && this.mixer._actions.length === 0) {
4732
+ // Stop mixer
4733
+ if (this.mixer) {
4734
+ this.mixer.removeEventListener('finished', this._mixerHandler);
4735
+ this.mixer.stopAllAction();
4736
+ this.mixer.uncacheRoot(this.armature);
5806
4737
  this.mixer = null;
5807
- console.log('Mixer destroyed as no actions remain');
4738
+ this._mixerHandler = null;
5808
4739
  }
5809
-
5810
- // Unlock position when animation stops (only if it was locked)
4740
+
4741
+ // Unlock position if it was locked
5811
4742
  if (this.positionWasLocked) {
5812
4743
  this.unlockAvatarPosition();
5813
4744
  console.log('Position unlocked after FBX animation stopped');
@@ -5815,347 +4746,723 @@ class TalkingHead {
5815
4746
  console.log('Position was not locked, no unlock needed');
5816
4747
  }
5817
4748
 
5818
- // Restart gesture
4749
+ // Restart gesture
4750
+ if ( this.gesture ) {
4751
+ for( let [p,v] of Object.entries(this.gesture) ) {
4752
+ v.t = this.animClock;
4753
+ v.d = 1000;
4754
+ if ( this.poseTarget.props.hasOwnProperty(p) ) {
4755
+ this.poseTarget.props[p].copy(v);
4756
+ this.poseTarget.props[p].t = this.animClock;
4757
+ this.poseTarget.props[p].d = 1000;
4758
+ }
4759
+ }
4760
+ }
4761
+
4762
+ // Restart pose animation
4763
+ let anim = this.animQueue.find( x => x.template.name === 'pose' );
4764
+ if ( anim ) {
4765
+ anim.ts[0] = this.animClock;
4766
+ }
4767
+ this.setPoseFromTemplate( null );
4768
+
4769
+ }
4770
+
4771
+
4772
+ /**
4773
+ * Play RPM/Mixamo pose.
4774
+ * @param {string|Object} url Pose name | URL to FBX
4775
+ * @param {progressfn} [onprogress=null] Callback for progress
4776
+ * @param {number} [dur=5] Duration of the pose in seconds
4777
+ * @param {number} [ndx=0] Index of the clip
4778
+ * @param {number} [scale=0.01] Position scale factor
4779
+ */
4780
+ async playPose(url, onprogress=null, dur=5, ndx=0, scale=0.01) {
4781
+
4782
+ if ( !this.armature ) return;
4783
+
4784
+ // Check if we already have the pose template ready
4785
+ let pose = this.poseTemplates[url];
4786
+ if ( !pose ) {
4787
+ const item = this.animPoses.find( x => x.url === url+'-'+ndx );
4788
+ if ( item ) {
4789
+ pose = item.pose;
4790
+ }
4791
+ }
4792
+
4793
+ // If we have the template, use it, otherwise try to load it
4794
+ if ( pose ) {
4795
+
4796
+ this.poseName = url;
4797
+
4798
+ if (this.mixer) {
4799
+ this.mixer.removeEventListener('finished', this._mixerHandler);
4800
+ this.mixer.stopAllAction();
4801
+ this.mixer.uncacheRoot(this.armature);
4802
+ this.mixer = null;
4803
+ this._mixerHandler = null;
4804
+ }
4805
+ let anim = this.animQueue.find( x => x.template.name === 'pose' );
4806
+ if ( anim ) {
4807
+ anim.ts[0] = this.animClock + (dur * 1000) + 2000;
4808
+ }
4809
+ this.setPoseFromTemplate( pose );
4810
+
4811
+ } else {
4812
+
4813
+ // Load animation
4814
+ const loader = new FBXLoader();
4815
+
4816
+ let fbx = await loader.loadAsync( url, onprogress );
4817
+
4818
+ if ( fbx && fbx.animations && fbx.animations[ndx] ) {
4819
+ let anim = fbx.animations[ndx];
4820
+
4821
+ // Create a pose
4822
+ const props = {};
4823
+ anim.tracks.forEach( t => {
4824
+
4825
+ // Rename and scale Mixamo tracks
4826
+ t.name = t.name.replaceAll('mixamorig','');
4827
+ const ids = t.name.split('.');
4828
+ if ( ids[1] === 'position' ) {
4829
+ props[t.name] = new THREE.Vector3( t.values[0] * scale, t.values[1] * scale, t.values[2] * scale);
4830
+ } else if ( ids[1] === 'quaternion' ) {
4831
+ props[t.name] = new THREE.Quaternion( t.values[0], t.values[1], t.values[2], t.values[3] );
4832
+ } else if ( ids[1] === 'rotation' ) {
4833
+ props[ids[0]+".quaternion"] = new THREE.Quaternion().setFromEuler(new THREE.Euler( t.values[0], t.values[1], t.values[2],'XYZ' )).normalize();
4834
+ }
4835
+ });
4836
+
4837
+ // Add to pose
4838
+ const newPose = { props: props };
4839
+ if ( props['Hips.position'] ) {
4840
+ if ( props['Hips.position'].y < 0.5 ) {
4841
+ newPose.lying = true;
4842
+ } else {
4843
+ newPose.standing = true;
4844
+ }
4845
+ }
4846
+ this.animPoses.push({
4847
+ url: url+'-'+ndx,
4848
+ pose: newPose
4849
+ });
4850
+
4851
+ // Play
4852
+ this.playPose(url, onprogress, dur, ndx, scale);
4853
+
4854
+ } else {
4855
+ const msg = 'Pose ' + url + ' (ndx=' + ndx + ') not found';
4856
+ console.error(msg);
4857
+ }
4858
+ }
4859
+ }
4860
+
4861
+ /**
4862
+ * Stop the pose. (Functionality is the same as in stopAnimation.)
4863
+ */
4864
+ stopPose() {
4865
+ this.stopAnimation();
4866
+ }
4867
+
4868
+ /**
4869
+ * Play a gesture, which is either a hand gesture, an emoji animation or their
4870
+ * combination.
4871
+ * @param {string} name Gesture name
4872
+ * @param {number} [dur=3] Duration of the gesture in seconds
4873
+ * @param {boolean} [mirror=false] Mirror gesture
4874
+ * @param {number} [ms=1000] Transition time in milliseconds
4875
+ */
4876
+ playGesture(name, dur=3, mirror=false, ms=1000) {
4877
+
4878
+ if ( !this.armature ) return;
4879
+
4880
+ // Hand gesture, if any
4881
+ let g = this.gestureTemplates[name];
4882
+ if ( g ) {
4883
+
4884
+ // New gesture always overrides the existing one
4885
+ if ( this.gestureTimeout ) {
4886
+ clearTimeout( this.gestureTimeout );
4887
+ this.gestureTimeout = null;
4888
+ }
4889
+
4890
+ // Stop talking hands animation
4891
+ let ndx = this.animQueue.findIndex( y => y.template.name === "talkinghands" );
4892
+ if ( ndx !== -1 ) {
4893
+ this.animQueue[ndx].ts = this.animQueue[ndx].ts.map( x => 0 );
4894
+ }
4895
+
4896
+ // Set gesture
4897
+ this.gesture = this.propsToThreeObjects( g );
4898
+ if ( mirror ) {
4899
+ this.gesture = this.mirrorPose( this.gesture );
4900
+ }
4901
+ if ( name === "namaste" && this.avatar.body === 'M' ) {
4902
+ // Work-a-round for male model so that the hands meet
4903
+ this.gesture["RightArm.quaternion"].rotateTowards( new THREE.Quaternion(0,1,0,0), -0.25);
4904
+ this.gesture["LeftArm.quaternion"].rotateTowards( new THREE.Quaternion(0,1,0,0), -0.25);
4905
+ }
4906
+
4907
+ // Apply to target
4908
+ for( let [p,val] of Object.entries(this.gesture) ) {
4909
+ val.t = this.animClock;
4910
+ val.d = ms;
4911
+ if ( this.poseTarget.props.hasOwnProperty(p) ) {
4912
+ this.poseTarget.props[p].copy(val);
4913
+ this.poseTarget.props[p].t = this.animClock;
4914
+ this.poseTarget.props[p].d = ms;
4915
+ }
4916
+ }
4917
+
4918
+ // Timer
4919
+ if ( dur && Number.isFinite(dur) ) {
4920
+ this.gestureTimeout = setTimeout( this.stopGesture.bind(this,ms), 1000 * dur);
4921
+ }
4922
+ }
4923
+
4924
+ // Animated emoji, if any
4925
+ let em = this.animEmojis[name];
4926
+ if ( em ) {
4927
+
4928
+ // Follow link
4929
+ if ( em && em.link ) {
4930
+ em = this.animEmojis[em.link];
4931
+ }
4932
+
4933
+ if ( em ) {
4934
+ // Look at the camera for 500 ms
4935
+ this.lookAtCamera(500);
4936
+
4937
+ // Create animation and tag as gesture
4938
+ const anim = this.animFactory( em );
4939
+ anim.gesture = true;
4940
+
4941
+ // Rescale duration
4942
+ if ( dur && Number.isFinite(dur) ) {
4943
+ const first = anim.ts[0];
4944
+ const last = anim.ts[ anim.ts.length -1 ];
4945
+ const total = last - first;
4946
+ const excess = (dur * 1000) - total;
4947
+
4948
+ // If longer, increase longer parts; if shorter, scale everything
4949
+ if ( excess > 0 ) {
4950
+ const dt = [];
4951
+ for( let i=1; i<anim.ts.length; i++ ) dt.push( anim.ts[i] - anim.ts[i-1] );
4952
+ const rescale = em.template?.rescale || dt.map( x => x / total );
4953
+ const excess = dur * 1000 - total;
4954
+ anim.ts = anim.ts.map( (x,i,arr) => {
4955
+ return (i===0) ? first : (arr[i-1] + dt[i-1] + rescale[i-1] * excess);
4956
+ });
4957
+ } else {
4958
+ const scale = (dur * 1000) / total;
4959
+ anim.ts = anim.ts.map( x => first + scale * (x - first) );
4960
+ }
4961
+ }
4962
+
4963
+ this.animQueue.push( anim );
4964
+ }
4965
+ }
4966
+
4967
+ }
4968
+
4969
+ /**
4970
+ * Stop the gesture.
4971
+ * @param {number} [ms=1000] Transition time in milliseconds
4972
+ */
4973
+ stopGesture(ms=1000) {
4974
+
4975
+ // Stop gesture timer
4976
+ if ( this.gestureTimeout ) {
4977
+ clearTimeout( this.gestureTimeout );
4978
+ this.gestureTimeout = null;
4979
+ }
4980
+
4981
+ // Stop hand gesture, if any
5819
4982
  if ( this.gesture ) {
5820
- for( let [p,v] of Object.entries(this.gesture) ) {
5821
- v.t = this.animClock;
5822
- v.d = 1000;
4983
+ const gs = Object.entries(this.gesture);
4984
+ this.gesture = null;
4985
+ for( const [p,val] of gs ) {
5823
4986
  if ( this.poseTarget.props.hasOwnProperty(p) ) {
5824
- this.poseTarget.props[p].copy(v);
4987
+ this.poseTarget.props[p].copy( this.getPoseTemplateProp(p) );
5825
4988
  this.poseTarget.props[p].t = this.animClock;
5826
- this.poseTarget.props[p].d = 1000;
4989
+ this.poseTarget.props[p].d = ms;
5827
4990
  }
5828
4991
  }
5829
4992
  }
5830
4993
 
5831
- // Restart pose animation
5832
- let anim = this.animQueue.find( x => x.template.name === 'pose' );
5833
- if ( anim ) {
5834
- anim.ts[0] = this.animClock;
4994
+ // Stop animated emoji gesture, if any
4995
+ let i = this.animQueue.findIndex( y => y.gesture );
4996
+ if ( i !== -1 ) {
4997
+ this.animQueue.splice(i, 1);
5835
4998
  }
5836
- this.setPoseFromTemplate( null );
5837
4999
 
5838
5000
  }
5839
5001
 
5840
-
5841
5002
  /**
5842
- * Play RPM/Mixamo pose.
5843
- * @param {string|Object} url Pose name | URL to FBX
5844
- * @param {progressfn} [onprogress=null] Callback for progress
5845
- * @param {number} [dur=5] Duration of the pose in seconds
5846
- * @param {number} [ndx=0] Index of the clip
5847
- * @param {number} [scale=0.01] Position scale factor
5003
+ * Cyclic Coordinate Descent (CCD) Inverse Kinematic (IK) algorithm.
5004
+ * Adapted from:
5005
+ * https://github.com/mrdoob/three.js/blob/master/examples/jsm/animation/CCDIKSolver.js
5006
+ * @param {Object} ik IK configuration object
5007
+ * @param {Vector3} [target=null] Target coordinate, if null return to template
5008
+ * @param {Boolean} [relative=false] If true, target is relative to root
5009
+ * @param {numeric} [d=null] If set, apply in d milliseconds
5848
5010
  */
5849
- async playPose(url, onprogress=null, dur=5, ndx=0, scale=0.01) {
5850
-
5851
- if ( !this.armature ) return;
5011
+ ikSolve(ik, target=null, relative=false, d=null) {
5012
+ const targetVec = new THREE.Vector3();
5013
+ const effectorPos = new THREE.Vector3();
5014
+ const effectorVec = new THREE.Vector3();
5015
+ const linkPos = new THREE.Vector3();
5016
+ const invLinkQ = new THREE.Quaternion();
5017
+ const linkScale = new THREE.Vector3();
5018
+ const axis = new THREE.Vector3();
5019
+ const vector = new THREE.Vector3();
5852
5020
 
5853
- // Check if we already have the pose template ready
5854
- let pose = this.poseTemplates[url];
5855
- if ( !pose ) {
5856
- const item = this.animPoses.find( x => x.url === url+'-'+ndx );
5857
- if ( item ) {
5858
- pose = item.pose;
5859
- }
5021
+ // Reset IK setup positions and rotations
5022
+ const root = this.ikMesh.getObjectByName(ik.root);
5023
+ root.position.setFromMatrixPosition( this.armature.getObjectByName(ik.root).matrixWorld );
5024
+ root.quaternion.setFromRotationMatrix( this.armature.getObjectByName(ik.root).matrixWorld );
5025
+ if ( target && relative ) {
5026
+ target.applyQuaternion(this.armature.quaternion).add( root.position );
5860
5027
  }
5028
+ const effector = this.ikMesh.getObjectByName(ik.effector);
5029
+ const links = ik.links;
5030
+ links.forEach( x => {
5031
+ x.bone = this.ikMesh.getObjectByName(x.link);
5032
+ x.bone.quaternion.copy( this.getPoseTemplateProp(x.link+'.quaternion') );
5033
+ });
5034
+ root.updateMatrixWorld(true);
5035
+ const iterations = ik.iterations || 10;
5861
5036
 
5862
- // If we have the template, use it, otherwise try to load it
5863
- if ( pose ) {
5037
+ // Iterate
5038
+ if ( target ) {
5039
+ for ( let i = 0; i < iterations; i ++ ) {
5040
+ let rotated = false;
5041
+ for ( let j = 0, jl = links.length; j < jl; j++ ) {
5042
+ const bone = links[j].bone;
5043
+ bone.matrixWorld.decompose( linkPos, invLinkQ, linkScale );
5044
+ invLinkQ.invert();
5045
+ effectorPos.setFromMatrixPosition( effector.matrixWorld );
5046
+ effectorVec.subVectors( effectorPos, linkPos );
5047
+ effectorVec.applyQuaternion( invLinkQ );
5048
+ effectorVec.normalize();
5049
+ targetVec.subVectors( target, linkPos );
5050
+ targetVec.applyQuaternion( invLinkQ );
5051
+ targetVec.normalize();
5052
+ let angle = targetVec.dot( effectorVec );
5053
+ if ( angle > 1.0 ) {
5054
+ angle = 1.0;
5055
+ } else if ( angle < - 1.0 ) {
5056
+ angle = - 1.0;
5057
+ }
5058
+ angle = Math.acos( angle );
5059
+ if ( angle < 1e-5 ) continue;
5060
+ if ( links[j].minAngle !== undefined && angle < links[j].minAngle ) {
5061
+ angle = links[j].minAngle;
5062
+ }
5063
+ if ( links[j].maxAngle !== undefined && angle > links[j].maxAngle ) {
5064
+ angle = links[j].maxAngle;
5065
+ }
5066
+ axis.crossVectors( effectorVec, targetVec );
5067
+ axis.normalize();
5068
+ q.setFromAxisAngle( axis, angle );
5069
+ bone.quaternion.multiply( q );
5864
5070
 
5865
- this.poseName = url;
5071
+ // Constraints
5072
+ bone.rotation.setFromVector3( vector.setFromEuler( bone.rotation ).clamp( new THREE.Vector3(
5073
+ links[j].minx !== undefined ? links[j].minx : -Infinity,
5074
+ links[j].miny !== undefined ? links[j].miny : -Infinity,
5075
+ links[j].minz !== undefined ? links[j].minz : -Infinity
5076
+ ), new THREE.Vector3(
5077
+ links[j].maxx !== undefined ? links[j].maxx : Infinity,
5078
+ links[j].maxy !== undefined ? links[j].maxy : Infinity,
5079
+ links[j].maxz !== undefined ? links[j].maxz : Infinity
5080
+ )) );
5866
5081
 
5867
- this.mixer = null;
5868
- let anim = this.animQueue.find( x => x.template.name === 'pose' );
5869
- if ( anim ) {
5870
- anim.ts[0] = this.animClock + (dur * 1000) + 2000;
5082
+ bone.updateMatrixWorld( true );
5083
+ rotated = true;
5084
+ }
5085
+ if ( !rotated ) break;
5871
5086
  }
5872
- this.setPoseFromTemplate( pose );
5873
-
5874
- } else {
5875
-
5876
- // Load animation
5877
- const loader = new FBXLoader();
5878
-
5879
- let fbx = await loader.loadAsync( url, onprogress );
5087
+ }
5880
5088
 
5881
- if ( fbx && fbx.animations && fbx.animations[ndx] ) {
5882
- let anim = fbx.animations[ndx];
5089
+ // Apply
5090
+ if ( d ) {
5091
+ links.forEach( x => {
5092
+ this.poseTarget.props[x.link+".quaternion"].copy( x.bone.quaternion );
5093
+ this.poseTarget.props[x.link+".quaternion"].t = this.animClock;
5094
+ this.poseTarget.props[x.link+".quaternion"].d = d;
5095
+ });
5096
+ }
5097
+ }
5883
5098
 
5884
- // Create a pose
5885
- const props = {};
5886
- anim.tracks.forEach( t => {
5099
+ /**
5100
+ * Initialize FBX animation loader
5101
+ */
5102
+ async initializeFBXAnimationLoader() {
5103
+ try {
5104
+ // Dynamic import to avoid loading issues
5105
+ const { FBXAnimationLoader } = await import('./fbxAnimationLoader.js');
5106
+ this.fbxAnimationLoader = new FBXAnimationLoader(this.armature);
5107
+ console.log('FBX Animation Loader initialized');
5108
+ } catch (error) {
5109
+ console.warn('FBX Animation Loader not available:', error);
5110
+ this.fbxAnimationLoader = null;
5111
+ }
5112
+ }
5887
5113
 
5888
- // Rename and scale Mixamo tracks
5889
- t.name = t.name.replaceAll('mixamorig','');
5890
- const ids = t.name.split('.');
5891
- if ( ids[1] === 'position' ) {
5892
- props[t.name] = new THREE.Vector3( t.values[0] * scale, t.values[1] * scale, t.values[2] * scale);
5893
- } else if ( ids[1] === 'quaternion' ) {
5894
- props[t.name] = new THREE.Quaternion( t.values[0], t.values[1], t.values[2], t.values[3] );
5895
- } else if ( ids[1] === 'rotation' ) {
5896
- props[ids[0]+".quaternion"] = new THREE.Quaternion().setFromEuler(new THREE.Euler( t.values[0], t.values[1], t.values[2],'XYZ' )).normalize();
5897
- }
5898
- });
5114
+ /**
5115
+ * Set body movement type.
5116
+ * @param {string} movement Movement type (idle, walking, prancing, gesturing, dancing, excited).
5117
+ */
5118
+ setBodyMovement(movement) {
5119
+ this.bodyMovement = movement;
5120
+
5121
+ // Only set avatar property if avatar exists
5122
+ if (this.avatar) {
5123
+ this.avatar.bodyMovement = movement;
5124
+ }
5125
+
5126
+ console.log('Body movement set to:', movement);
5127
+
5128
+ // Respect the current showFullAvatar setting instead of forcing it to true
5129
+ // Only unlock position when returning to idle
5130
+ if (movement === 'idle') {
5131
+ // Unlock position when returning to idle
5132
+ this.unlockAvatarPosition();
5133
+ }
5134
+ // Note: We no longer force showFullAvatar to true for body movements
5135
+ // The avatar will use whatever showFullAvatar value was set by the user
5136
+
5137
+ // Apply body movement animation
5138
+ this.applyBodyMovementAnimation();
5139
+ }
5899
5140
 
5900
- // Add to pose
5901
- const newPose = { props: props };
5902
- if ( props['Hips.position'] ) {
5903
- if ( props['Hips.position'].y < 0.5 ) {
5904
- newPose.lying = true;
5905
- } else {
5906
- newPose.standing = true;
5907
- }
5141
+ /**
5142
+ * Apply body movement animation based on current movement type.
5143
+ */
5144
+ async applyBodyMovementAnimation() {
5145
+ // Check if avatar is ready
5146
+ if (!this.armature || !this.animQueue) {
5147
+ console.log('Avatar not ready for body movement animations');
5148
+ return;
5149
+ }
5150
+
5151
+ console.log('Avatar is running:', this.isRunning);
5152
+ console.log('Animation queue exists:', !!this.animQueue);
5153
+
5154
+ // Remove existing body movement animations
5155
+ const beforeLength = this.animQueue.length;
5156
+ this.animQueue = this.animQueue.filter(anim => !anim.template.name.startsWith('bodyMovement'));
5157
+ const afterLength = this.animQueue.length;
5158
+ console.log(`Filtered animation queue: ${beforeLength} -> ${afterLength} animations`);
5159
+
5160
+ if (this.bodyMovement === 'idle') {
5161
+ // Stop FBX animations if any
5162
+ if (this.fbxAnimationLoader) {
5163
+ this.fbxAnimationLoader.stopCurrentAnimation();
5164
+ }
5165
+ return; // No body movement for idle
5166
+ }
5167
+
5168
+ // Try to use FBX animations first
5169
+ if (this.fbxAnimationLoader) {
5170
+ try {
5171
+ await this.fbxAnimationLoader.playGestureAnimation(this.bodyMovement, this.movementIntensity);
5172
+ console.log('Applied FBX body movement animation:', this.bodyMovement);
5173
+ return; // Successfully applied FBX animation
5174
+ } catch (error) {
5175
+ console.warn('FBX animation failed, falling back to code animation:', error);
5176
+ }
5177
+ }
5178
+
5179
+ // Fallback to code-based animations
5180
+ const movementAnim = this.createBodyMovementAnimation(this.bodyMovement);
5181
+ console.log('Created movement animation:', movementAnim);
5182
+ if (movementAnim) {
5183
+ try {
5184
+ // Use animFactory to create proper animation object
5185
+ const animObj = this.animFactory(movementAnim, true); // true for looping
5186
+
5187
+ // Validate the animation object before adding
5188
+ if (animObj && animObj.ts && animObj.ts.length > 0) {
5189
+ this.animQueue.push(animObj);
5190
+ console.log('Applied code-based body movement animation:', this.bodyMovement);
5191
+ console.log('Animation queue length:', this.animQueue.length);
5192
+ console.log('Animation object:', animObj);
5193
+ } else {
5194
+ console.error('Invalid animation object created for:', this.bodyMovement);
5195
+ console.error('Animation object:', animObj);
5908
5196
  }
5909
- this.animPoses.push({
5910
- url: url+'-'+ndx,
5911
- pose: newPose
5912
- });
5913
-
5914
- // Play
5915
- this.playPose(url, onprogress, dur, ndx, scale);
5916
-
5917
- } else {
5918
- const msg = 'Pose ' + url + ' (ndx=' + ndx + ') not found';
5919
- console.error(msg);
5197
+ } catch (error) {
5198
+ console.error('Error creating body movement animation:', error);
5920
5199
  }
5921
5200
  }
5922
5201
  }
5923
5202
 
5924
5203
  /**
5925
- * Stop the pose. (Functionality is the same as in stopAnimation.)
5926
- */
5927
- stopPose() {
5928
- this.stopAnimation();
5204
+ * Lock avatar position to prevent movement during animations.
5205
+ */
5206
+ lockAvatarPosition() {
5207
+ if (!this.armature) {
5208
+ console.warn('Cannot lock position: armature not available');
5209
+ return;
5210
+ }
5211
+
5212
+ // Store the original position if not already stored
5213
+ if (!this.originalPosition) {
5214
+ this.originalPosition = {
5215
+ x: this.armature.position.x,
5216
+ y: this.armature.position.y,
5217
+ z: this.armature.position.z
5218
+ };
5219
+ console.log('Original position stored:', this.originalPosition);
5220
+ }
5221
+
5222
+ // Lock the avatar at its CURRENT position (don't move it)
5223
+ this.lockedPosition = {
5224
+ x: this.armature.position.x,
5225
+ y: this.armature.position.y,
5226
+ z: this.armature.position.z
5227
+ };
5228
+
5229
+ console.log('Avatar position locked at current position:', this.lockedPosition);
5929
5230
  }
5930
5231
 
5931
5232
  /**
5932
- * Play a gesture, which is either a hand gesture, an emoji animation or their
5933
- * combination.
5934
- * @param {string} name Gesture name
5935
- * @param {number} [dur=3] Duration of the gesture in seconds
5936
- * @param {boolean} [mirror=false] Mirror gesture
5937
- * @param {number} [ms=1000] Transition time in milliseconds
5938
- */
5939
- playGesture(name, dur=3, mirror=false, ms=1000) {
5940
-
5941
- if ( !this.armature ) return;
5942
-
5943
- // Hand gesture, if any
5944
- let g = this.gestureTemplates[name];
5945
- if ( g ) {
5946
-
5947
- // New gesture always overrides the existing one
5948
- if ( this.gestureTimeout ) {
5949
- clearTimeout( this.gestureTimeout );
5950
- this.gestureTimeout = null;
5951
- }
5952
-
5953
- // Stop talking hands animation
5954
- let ndx = this.animQueue.findIndex( y => y.template.name === "talkinghands" );
5955
- if ( ndx !== -1 ) {
5956
- this.animQueue[ndx].ts = this.animQueue[ndx].ts.map( x => 0 );
5957
- }
5233
+ * Unlock avatar position and restore original position.
5234
+ */
5235
+ unlockAvatarPosition() {
5236
+ if (this.armature && this.originalPosition) {
5237
+ // Restore avatar to its original position before locking
5238
+ this.armature.position.set(
5239
+ this.originalPosition.x,
5240
+ this.originalPosition.y,
5241
+ this.originalPosition.z
5242
+ );
5243
+ console.log('Avatar position restored to original:', this.originalPosition);
5244
+ } else if (this.armature) {
5245
+ // Fallback: reset to center if no original position was stored
5246
+ this.armature.position.set(0, 0, 0);
5247
+ console.log('Avatar position reset to center (0,0,0)');
5248
+ }
5249
+ this.lockedPosition = null;
5250
+ this.originalPosition = null; // Clear original position after unlock
5251
+ console.log('Avatar position unlocked');
5252
+ }
5958
5253
 
5959
- // Set gesture
5960
- this.gesture = this.propsToThreeObjects( g );
5961
- if ( mirror ) {
5962
- this.gesture = this.mirrorPose( this.gesture );
5963
- }
5964
- if ( name === "namaste" && this.avatar.body === 'M' ) {
5965
- // Work-a-round for male model so that the hands meet
5966
- this.gesture["RightArm.quaternion"].rotateTowards( new THREE.Quaternion(0,1,0,0), -0.25);
5967
- this.gesture["LeftArm.quaternion"].rotateTowards( new THREE.Quaternion(0,1,0,0), -0.25);
5968
- }
5254
+ /**
5255
+ * Ensure avatar stays at locked position.
5256
+ */
5257
+ maintainLockedPosition() {
5258
+ if (this.lockedPosition && this.armature) {
5259
+ // Enforce the locked position - keep avatar exactly where it was locked
5260
+ // This prevents FBX animations from moving the avatar
5261
+ this.armature.position.set(
5262
+ this.lockedPosition.x,
5263
+ this.lockedPosition.y,
5264
+ this.lockedPosition.z
5265
+ );
5266
+ }
5267
+ }
5969
5268
 
5970
- // Apply to target
5971
- for( let [p,val] of Object.entries(this.gesture) ) {
5972
- val.t = this.animClock;
5973
- val.d = ms;
5974
- if ( this.poseTarget.props.hasOwnProperty(p) ) {
5975
- this.poseTarget.props[p].copy(val);
5976
- this.poseTarget.props[p].t = this.animClock;
5977
- this.poseTarget.props[p].d = ms;
5269
+ /**
5270
+ * Create body movement animation.
5271
+ * @param {string} movementType Movement type.
5272
+ * @returns {Object} Animation object.
5273
+ */
5274
+ createBodyMovementAnimation(movementType) {
5275
+ const intensity = this.movementIntensity || 0.5;
5276
+
5277
+ const movementAnimations = {
5278
+ walking: {
5279
+ name: 'bodyMovement_walking',
5280
+ delay: [500, 2000],
5281
+ dt: [800, 1200],
5282
+ vs: {
5283
+ bodyRotateY: [-0.1 * intensity, 0.1 * intensity, 0],
5284
+ bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0],
5285
+ bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
5286
+ }
5287
+ },
5288
+ prancing: {
5289
+ name: 'bodyMovement_prancing',
5290
+ delay: [300, 1000],
5291
+ dt: [400, 800],
5292
+ vs: {
5293
+ bodyRotateY: [-0.15 * intensity, 0.15 * intensity, 0],
5294
+ bodyRotateZ: [-0.08 * intensity, 0.08 * intensity, 0],
5295
+ bodyRotateX: [-0.05 * intensity, 0.05 * intensity, 0]
5296
+ }
5297
+ },
5298
+ gesturing: {
5299
+ name: 'bodyMovement_gesturing',
5300
+ delay: [400, 1500],
5301
+ dt: [600, 1000],
5302
+ vs: {
5303
+ bodyRotateY: [-0.08 * intensity, 0.08 * intensity, 0],
5304
+ bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0]
5305
+ }
5306
+ },
5307
+ dancing: {
5308
+ name: 'bodyMovement_dancing',
5309
+ delay: [200, 600],
5310
+ dt: [400, 800],
5311
+ vs: {
5312
+ bodyRotateY: [-0.25 * intensity, 0.25 * intensity, 0],
5313
+ bodyRotateZ: [-0.15 * intensity, 0.15 * intensity, 0],
5314
+ bodyRotateX: [-0.1 * intensity, 0.1 * intensity, 0]
5315
+ }
5316
+ },
5317
+ dancing2: {
5318
+ name: 'bodyMovement_dancing2',
5319
+ delay: [150, 500],
5320
+ dt: [300, 700],
5321
+ vs: {
5322
+ bodyRotateY: [-0.3 * intensity, 0.3 * intensity, 0],
5323
+ bodyRotateZ: [-0.2 * intensity, 0.2 * intensity, 0],
5324
+ bodyRotateX: [-0.12 * intensity, 0.12 * intensity, 0]
5325
+ }
5326
+ },
5327
+ dancing3: {
5328
+ name: 'bodyMovement_dancing3',
5329
+ delay: [100, 400],
5330
+ dt: [200, 600],
5331
+ vs: {
5332
+ bodyRotateY: [-0.35 * intensity, 0.35 * intensity, 0],
5333
+ bodyRotateZ: [-0.25 * intensity, 0.25 * intensity, 0],
5334
+ bodyRotateX: [-0.15 * intensity, 0.15 * intensity, 0]
5335
+ }
5336
+ },
5337
+ excited: {
5338
+ name: 'bodyMovement_excited',
5339
+ delay: [200, 600],
5340
+ dt: [300, 700],
5341
+ vs: {
5342
+ bodyRotateY: [-0.12 * intensity, 0.12 * intensity, 0],
5343
+ bodyRotateZ: [-0.06 * intensity, 0.06 * intensity, 0],
5344
+ bodyRotateX: [-0.04 * intensity, 0.04 * intensity, 0]
5345
+ }
5346
+ },
5347
+ happy: {
5348
+ name: 'bodyMovement_happy',
5349
+ delay: [300, 800],
5350
+ dt: [500, 1000],
5351
+ vs: {
5352
+ bodyRotateY: [-0.08 * intensity, 0.08 * intensity, 0],
5353
+ bodyRotateZ: [-0.04 * intensity, 0.04 * intensity, 0],
5354
+ bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
5355
+ }
5356
+ },
5357
+ surprised: {
5358
+ name: 'bodyMovement_surprised',
5359
+ delay: [100, 300],
5360
+ dt: [200, 500],
5361
+ vs: {
5362
+ bodyRotateY: [-0.05 * intensity, 0.05 * intensity, 0],
5363
+ bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0],
5364
+ bodyRotateX: [-0.01 * intensity, 0.01 * intensity, 0]
5365
+ }
5366
+ },
5367
+ thinking: {
5368
+ name: 'bodyMovement_thinking',
5369
+ delay: [800, 2000],
5370
+ dt: [1000, 1500],
5371
+ vs: {
5372
+ bodyRotateY: [-0.06 * intensity, 0.06 * intensity, 0],
5373
+ bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0],
5374
+ bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
5375
+ }
5376
+ },
5377
+ nodding: {
5378
+ name: 'bodyMovement_nodding',
5379
+ delay: [400, 800],
5380
+ dt: [300, 600],
5381
+ vs: {
5382
+ bodyRotateX: [-0.1 * intensity, 0.1 * intensity, 0],
5383
+ bodyRotateY: [-0.02 * intensity, 0.02 * intensity, 0]
5384
+ }
5385
+ },
5386
+ shaking: {
5387
+ name: 'bodyMovement_shaking',
5388
+ delay: [200, 400],
5389
+ dt: [150, 300],
5390
+ vs: {
5391
+ bodyRotateY: [-0.15 * intensity, 0.15 * intensity, 0],
5392
+ bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0]
5978
5393
  }
5979
- }
5980
-
5981
- // Timer
5982
- if ( dur && Number.isFinite(dur) ) {
5983
- this.gestureTimeout = setTimeout( this.stopGesture.bind(this,ms), 1000 * dur);
5984
- }
5985
- }
5986
-
5987
- // Animated emoji, if any
5988
- let em = this.animEmojis[name];
5989
- if ( em ) {
5990
-
5991
- // Follow link
5992
- if ( em && em.link ) {
5993
- em = this.animEmojis[em.link];
5994
- }
5995
-
5996
- if ( em ) {
5997
- // Look at the camera for 500 ms
5998
- this.lookAtCamera(500);
5999
-
6000
- // Create animation and tag as gesture
6001
- const anim = this.animFactory( em );
6002
- anim.gesture = true;
6003
-
6004
- // Rescale duration
6005
- if ( dur && Number.isFinite(dur) ) {
6006
- const first = anim.ts[0];
6007
- const last = anim.ts[ anim.ts.length -1 ];
6008
- const total = last - first;
6009
- const excess = (dur * 1000) - total;
6010
-
6011
- // If longer, increase longer parts; if shorter, scale everything
6012
- if ( excess > 0 ) {
6013
- const dt = [];
6014
- for( let i=1; i<anim.ts.length; i++ ) dt.push( anim.ts[i] - anim.ts[i-1] );
6015
- const rescale = em.template?.rescale || dt.map( x => x / total );
6016
- const excess = dur * 1000 - total;
6017
- anim.ts = anim.ts.map( (x,i,arr) => {
6018
- return (i===0) ? first : (arr[i-1] + dt[i-1] + rescale[i-1] * excess);
6019
- });
6020
- } else {
6021
- const scale = (dur * 1000) / total;
6022
- anim.ts = anim.ts.map( x => first + scale * (x - first) );
6023
- }
5394
+ },
5395
+ celebration: {
5396
+ name: 'bodyMovement_celebration',
5397
+ delay: [100, 300],
5398
+ dt: [200, 500],
5399
+ vs: {
5400
+ bodyRotateY: [-0.2 * intensity, 0.2 * intensity, 0],
5401
+ bodyRotateZ: [-0.1 * intensity, 0.1 * intensity, 0],
5402
+ bodyRotateX: [-0.08 * intensity, 0.08 * intensity, 0]
6024
5403
  }
6025
-
6026
- this.animQueue.push( anim );
6027
- }
6028
- }
6029
-
6030
- }
6031
-
6032
- /**
6033
- * Stop the gesture.
6034
- * @param {number} [ms=1000] Transition time in milliseconds
6035
- */
6036
- stopGesture(ms=1000) {
6037
-
6038
- // Stop gesture timer
6039
- if ( this.gestureTimeout ) {
6040
- clearTimeout( this.gestureTimeout );
6041
- this.gestureTimeout = null;
6042
- }
6043
-
6044
- // Stop hand gesture, if any
6045
- if ( this.gesture ) {
6046
- const gs = Object.entries(this.gesture);
6047
- this.gesture = null;
6048
- for( const [p,val] of gs ) {
6049
- if ( this.poseTarget.props.hasOwnProperty(p) ) {
6050
- this.poseTarget.props[p].copy( this.getPoseTemplateProp(p) );
6051
- this.poseTarget.props[p].t = this.animClock;
6052
- this.poseTarget.props[p].d = ms;
5404
+ },
5405
+ energetic: {
5406
+ name: 'bodyMovement_energetic',
5407
+ delay: [150, 400],
5408
+ dt: [250, 500],
5409
+ vs: {
5410
+ bodyRotateY: [-0.18 * intensity, 0.18 * intensity, 0],
5411
+ bodyRotateZ: [-0.12 * intensity, 0.12 * intensity, 0],
5412
+ bodyRotateX: [-0.08 * intensity, 0.08 * intensity, 0]
5413
+ }
5414
+ },
5415
+ swaying: {
5416
+ name: 'bodyMovement_swaying',
5417
+ delay: [600, 1200],
5418
+ dt: [800, 1000],
5419
+ vs: {
5420
+ bodyRotateY: [-0.1 * intensity, 0.1 * intensity, 0],
5421
+ bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0]
5422
+ }
5423
+ },
5424
+ bouncing: {
5425
+ name: 'bodyMovement_bouncing',
5426
+ delay: [300, 600],
5427
+ dt: [400, 700],
5428
+ vs: {
5429
+ bodyRotateY: [-0.05 * intensity, 0.05 * intensity, 0]
6053
5430
  }
6054
5431
  }
5432
+ };
5433
+
5434
+ // Handle dance variations
5435
+ if (movementType === 'dancing') {
5436
+ const danceVariations = ['dancing', 'dancing2', 'dancing3'];
5437
+ const randomDance = danceVariations[Math.floor(Math.random() * danceVariations.length)];
5438
+ return movementAnimations[randomDance] || movementAnimations['dancing'];
6055
5439
  }
6056
-
6057
- // Stop animated emoji gesture, if any
6058
- let i = this.animQueue.findIndex( y => y.gesture );
6059
- if ( i !== -1 ) {
6060
- this.animQueue.splice(i, 1);
6061
- }
6062
-
5440
+
5441
+ return movementAnimations[movementType] || null;
6063
5442
  }
6064
5443
 
6065
5444
  /**
6066
- * Cyclic Coordinate Descent (CCD) Inverse Kinematic (IK) algorithm.
6067
- * Adapted from:
6068
- * https://github.com/mrdoob/three.js/blob/master/examples/jsm/animation/CCDIKSolver.js
6069
- * @param {Object} ik IK configuration object
6070
- * @param {Vector3} [target=null] Target coordinate, if null return to template
6071
- * @param {Boolean} [relative=false] If true, target is relative to root
6072
- * @param {numeric} [d=null] If set, apply in d milliseconds
6073
- */
6074
- ikSolve(ik, target=null, relative=false, d=null) {
6075
- const targetVec = new THREE.Vector3();
6076
- const effectorPos = new THREE.Vector3();
6077
- const effectorVec = new THREE.Vector3();
6078
- const linkPos = new THREE.Vector3();
6079
- const invLinkQ = new THREE.Quaternion();
6080
- const linkScale = new THREE.Vector3();
6081
- const axis = new THREE.Vector3();
6082
- const vector = new THREE.Vector3();
6083
-
6084
- // Reset IK setup positions and rotations
6085
- const root = this.ikMesh.getObjectByName(ik.root);
6086
- root.position.setFromMatrixPosition( this.armature.getObjectByName(ik.root).matrixWorld );
6087
- root.quaternion.setFromRotationMatrix( this.armature.getObjectByName(ik.root).matrixWorld );
6088
- if ( target && relative ) {
6089
- target.applyQuaternion(this.armature.quaternion).add( root.position );
5445
+ * Set movement intensity.
5446
+ * @param {number} intensity Movement intensity (0-1).
5447
+ */
5448
+ setMovementIntensity(intensity) {
5449
+ this.movementIntensity = Math.max(0, Math.min(1, intensity));
5450
+
5451
+ // Only set avatar property if avatar exists
5452
+ if (this.avatar) {
5453
+ this.avatar.movementIntensity = this.movementIntensity;
6090
5454
  }
6091
- const effector = this.ikMesh.getObjectByName(ik.effector);
6092
- const links = ik.links;
6093
- links.forEach( x => {
6094
- x.bone = this.ikMesh.getObjectByName(x.link);
6095
- x.bone.quaternion.copy( this.getPoseTemplateProp(x.link+'.quaternion') );
6096
- });
6097
- root.updateMatrixWorld(true);
6098
- const iterations = ik.iterations || 10;
6099
-
6100
- // Iterate
6101
- if ( target ) {
6102
- for ( let i = 0; i < iterations; i ++ ) {
6103
- let rotated = false;
6104
- for ( let j = 0, jl = links.length; j < jl; j++ ) {
6105
- const bone = links[j].bone;
6106
- bone.matrixWorld.decompose( linkPos, invLinkQ, linkScale );
6107
- invLinkQ.invert();
6108
- effectorPos.setFromMatrixPosition( effector.matrixWorld );
6109
- effectorVec.subVectors( effectorPos, linkPos );
6110
- effectorVec.applyQuaternion( invLinkQ );
6111
- effectorVec.normalize();
6112
- targetVec.subVectors( target, linkPos );
6113
- targetVec.applyQuaternion( invLinkQ );
6114
- targetVec.normalize();
6115
- let angle = targetVec.dot( effectorVec );
6116
- if ( angle > 1.0 ) {
6117
- angle = 1.0;
6118
- } else if ( angle < - 1.0 ) {
6119
- angle = - 1.0;
6120
- }
6121
- angle = Math.acos( angle );
6122
- if ( angle < 1e-5 ) continue;
6123
- if ( links[j].minAngle !== undefined && angle < links[j].minAngle ) {
6124
- angle = links[j].minAngle;
6125
- }
6126
- if ( links[j].maxAngle !== undefined && angle > links[j].maxAngle ) {
6127
- angle = links[j].maxAngle;
6128
- }
6129
- axis.crossVectors( effectorVec, targetVec );
6130
- axis.normalize();
6131
- q.setFromAxisAngle( axis, angle );
6132
- bone.quaternion.multiply( q );
6133
-
6134
- // Constraints
6135
- bone.rotation.setFromVector3( vector.setFromEuler( bone.rotation ).clamp( new THREE.Vector3(
6136
- links[j].minx !== undefined ? links[j].minx : -Infinity,
6137
- links[j].miny !== undefined ? links[j].miny : -Infinity,
6138
- links[j].minz !== undefined ? links[j].minz : -Infinity
6139
- ), new THREE.Vector3(
6140
- links[j].maxx !== undefined ? links[j].maxx : Infinity,
6141
- links[j].maxy !== undefined ? links[j].maxy : Infinity,
6142
- links[j].maxz !== undefined ? links[j].maxz : Infinity
6143
- )) );
6144
-
6145
- bone.updateMatrixWorld( true );
6146
- rotated = true;
6147
- }
6148
- if ( !rotated ) break;
6149
- }
5455
+
5456
+ console.log('Movement intensity set to:', this.movementIntensity);
5457
+
5458
+ // Update FBX animation intensity if available
5459
+ if (this.fbxAnimationLoader) {
5460
+ this.fbxAnimationLoader.setIntensity(this.movementIntensity);
6150
5461
  }
6151
-
6152
- // Apply
6153
- if ( d ) {
6154
- links.forEach( x => {
6155
- this.poseTarget.props[x.link+".quaternion"].copy( x.bone.quaternion );
6156
- this.poseTarget.props[x.link+".quaternion"].t = this.animClock;
6157
- this.poseTarget.props[x.link+".quaternion"].d = d;
6158
- });
5462
+
5463
+ // Reapply body movement animation with new intensity
5464
+ if (this.bodyMovement && this.bodyMovement !== 'idle') {
5465
+ this.applyBodyMovementAnimation();
6159
5466
  }
6160
5467
  }
6161
5468
 
@@ -6164,11 +5471,36 @@ class TalkingHead {
6164
5471
  */
6165
5472
  dispose() {
6166
5473
 
6167
- // Stop animation first to prevent render calls on disposed renderer
6168
- this.isRunning = false;
5474
+ // Stop animation, clear speech queue, stop stream
6169
5475
  this.stop();
6170
5476
  this.stopSpeaking();
6171
5477
  this.streamStop();
5478
+ this.stopAnimation();
5479
+
5480
+ // Cancel animation frame to prevent potential memory leak
5481
+ if (this._raf !== null) {
5482
+ cancelAnimationFrame(this._raf);
5483
+ this._raf = null;
5484
+ }
5485
+
5486
+ // Stop & disconnect buffer sources
5487
+ ['audioSpeechSource', 'audioBackgroundSource'].forEach(key => {
5488
+ const node = this[key];
5489
+ if (node) {
5490
+ try { node.stop?.() } catch(error) {};
5491
+ node.disconnect();
5492
+ node.onended = null; // remove closure references
5493
+ }
5494
+ });
5495
+
5496
+ // Disconnect gain nodes & analyser
5497
+ ['audioBackgroundGainNode', 'audioSpeechGainNode',
5498
+ 'audioStreamGainNode', 'audioAnalyzerNode'].forEach(key => {
5499
+ const node = this[key];
5500
+ if (node) {
5501
+ node.disconnect();
5502
+ }
5503
+ });
6172
5504
 
6173
5505
  // Dispose Three.JS objects
6174
5506
  if ( this.isAvatarOnly ) {
@@ -6181,18 +5513,34 @@ class TalkingHead {
6181
5513
  } else {
6182
5514
  this.clearThree(this.scene);
6183
5515
  this.resizeobserver.disconnect();
6184
-
6185
- // Dispose WebGL renderer
5516
+ this.resizeobserver = null;
5517
+
6186
5518
  if ( this.renderer ) {
6187
5519
  this.renderer.dispose();
6188
- if ( this.renderer.domElement && this.renderer.domElement.parentNode ) {
6189
- this.renderer.domElement.parentNode.removeChild(this.renderer.domElement);
6190
- }
5520
+ const gl = this.renderer.getContext();
5521
+ gl.getExtension('WEBGL_lose_context')?.loseContext();
5522
+ this.renderer.domElement?.remove();
5523
+ this.renderer.domElement = null;
6191
5524
  this.renderer = null;
6192
5525
  }
5526
+
5527
+ if ( this.controls ) {
5528
+ this.controls.dispose();
5529
+ this.controls = null;
5530
+ }
6193
5531
  }
5532
+
6194
5533
  this.clearThree( this.ikMesh );
6195
5534
  this.dynamicbones.dispose();
5535
+
5536
+ // Clean up FBX animation loader
5537
+ if (this.fbxAnimationLoader) {
5538
+ this.fbxAnimationLoader.stopCurrentAnimation();
5539
+ this.fbxAnimationLoader = null;
5540
+ }
5541
+
5542
+ // DOM
5543
+ this.nodeAvatar = null;
6196
5544
 
6197
5545
  }
6198
5546