@sage-rsc/talking-head-react 1.3.8 → 1.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -349,8 +349,7 @@ class TalkingHead {
349
349
  this.posePropNames = [...names];
350
350
 
351
351
  // Use "side" as the first pose, weight on left leg
352
- // Note: This will be overridden by gender-specific selection when avatar loads
353
- this.poseName = "side"; // First pose (default, will be gender-adjusted on avatar load)
352
+ this.poseName = "side"; // First pose
354
353
  this.poseWeightOnLeft = true; // Initial weight on left leg
355
354
  this.gesture = null; // Values that override pose properties
356
355
  this.poseCurrentTemplate = this.poseTemplates[this.poseName];
@@ -375,7 +374,6 @@ class TalkingHead {
375
374
  // 1. State (idle, speaking, listening)
376
375
  // 2. Mood (moodX, moodY)
377
376
  // 3. Pose (poseX, poseY)
378
- // 4. Body Movement (walking, prancing, gesturing, dancing, excited)
379
377
  // 5. View (full, upper, head)
380
378
  // 6. Body form ('M','F')
381
379
  // 7. Alt (sequence of objects with propabilities p. If p is not
@@ -453,18 +451,16 @@ class TalkingHead {
453
451
  ]
454
452
  },
455
453
  'happy' : {
456
- baseline: { mouthSmile: 0.2, eyesLookDown: 0 },
454
+ baseline: { mouthSmile: 0.2, eyesLookDown: 0.1 },
457
455
  speech: { deltaRate: 0, deltaPitch: 0.1, deltaVolume: 0 },
458
456
  anims: [
459
457
  { name: 'breathing', delay: 1500, dt: [ 1200,500,1000 ], vs: { chestInhale: [0.5,0.5,0] } },
460
458
  { name: 'pose',
461
459
  idle: {
462
460
  alt: [
463
- { p: 0.6, delay: [5000,30000], vs: { pose: ['side'] },
464
- 'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
465
- },
461
+ { p: 0.6, delay: [5000,30000], vs: { pose: ['side'] } },
466
462
  { p: 0.2, delay: [5000,30000], vs: { pose: ['hip'] },
467
- 'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
463
+ 'M': { delay: [5000,30000], vs: { pose: ['side'] } }
468
464
  },
469
465
  { p: 0.1, delay: [5000,30000], vs: { pose: ['straight'] } },
470
466
  { delay: [5000,10000], vs: { pose: ['wide'] } },
@@ -473,12 +469,8 @@ class TalkingHead {
473
469
  },
474
470
  speaking: {
475
471
  alt: [
476
- { p: 0.4, delay: [5000,30000], vs: { pose: ['side'] },
477
- 'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
478
- },
479
- { p: 0.4, delay: [5000,30000], vs: { pose: ['straight'] },
480
- 'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
481
- },
472
+ { p: 0.4, delay: [5000,30000], vs: { pose: ['side'] } },
473
+ { p: 0.4, delay: [5000,30000], vs: { pose: ['straight'] } },
482
474
  { delay: [5000,20000], vs: { pose: ['hip'] },
483
475
  'M': { delay: [5000,30000], vs: { pose: ['wide'] } }
484
476
  },
@@ -773,6 +765,10 @@ class TalkingHead {
773
765
  this.animClips = [];
774
766
  this.animPoses = [];
775
767
 
768
+ // Animate
769
+ this.animate = this.animate.bind(this);
770
+ this._raf = null;
771
+
776
772
  // Clock
777
773
  this.animFrameDur = 1000/ this.opt.modelFPS;
778
774
  this.animClock = 0;
@@ -780,12 +776,9 @@ class TalkingHead {
780
776
  this.animTimeLast = 0;
781
777
  this.easing = this.sigmoidFactory(5); // Ease in and out
782
778
 
783
- // Lip-sync extensions, import statically
779
+ // Lip-sync extensions, import dynamically
784
780
  this.lipsync = {};
785
- this.opt.lipsyncModules.forEach( x => {
786
- // Load synchronously using statically imported modules
787
- this.lipsyncGetProcessor(x);
788
- });
781
+ this.opt.lipsyncModules.forEach( x => this.lipsyncGetProcessor(x) );
789
782
  this.visemeNames = [
790
783
  'aa', 'E', 'I', 'O', 'U', 'PP', 'SS', 'TH', 'DD', 'FF', 'kk',
791
784
  'nn', 'RR', 'CH', 'sil'
@@ -960,9 +953,6 @@ class TalkingHead {
960
953
  this.audioAnalyzerNode.smoothingTimeConstant = 0.1;
961
954
  this.audioAnalyzerNode.minDecibels = -70;
962
955
  this.audioAnalyzerNode.maxDecibels = -10;
963
-
964
- // Audio analyzer for precise lip-sync
965
- this.audioAnalyzer = new AudioAnalyzer(this.audioCtx);
966
956
  this.audioReverbNode = this.audioCtx.createConvolver();
967
957
 
968
958
  // Connect nodes
@@ -1122,20 +1112,24 @@ class TalkingHead {
1122
1112
  * Clear 3D object.
1123
1113
  * @param {Object} obj Object
1124
1114
  */
1125
- clearThree(obj){
1126
- while( obj.children.length ){
1115
+ clearThree(obj) {
1116
+ while (obj.children.length) {
1127
1117
  this.clearThree(obj.children[0]);
1128
1118
  obj.remove(obj.children[0]);
1129
1119
  }
1130
- if ( obj.geometry ) obj.geometry.dispose();
1131
1120
 
1132
- if ( obj.material ) {
1133
- Object.keys(obj.material).forEach( x => {
1134
- if ( obj.material[x] && obj.material[x] !== null && typeof obj.material[x].dispose === 'function' ) {
1135
- obj.material[x].dispose();
1136
- }
1137
- });
1121
+ if (obj.geometry) obj.geometry.dispose();
1122
+
1123
+ if (obj.material) {
1124
+ if (Array.isArray(obj.material)) {
1125
+ obj.material.forEach(m => {
1126
+ if (m.map) m.map.dispose();
1127
+ m.dispose();
1128
+ });
1129
+ } else {
1130
+ if (obj.material.map) obj.material.map.dispose();
1138
1131
  obj.material.dispose();
1132
+ }
1139
1133
  }
1140
1134
  }
1141
1135
 
@@ -1244,10 +1238,12 @@ class TalkingHead {
1244
1238
  this.stop();
1245
1239
  this.avatar = avatar;
1246
1240
 
1247
- // Initialize body movement properties
1241
+ // Initialize custom properties
1248
1242
  this.bodyMovement = avatar.bodyMovement || 'idle';
1249
1243
  this.movementIntensity = avatar.movementIntensity || 0.5;
1250
- this.showFullAvatar = avatar.showFullAvatar || false;
1244
+ this.lockedPosition = null;
1245
+ this.originalPosition = null;
1246
+ this.positionWasLocked = false;
1251
1247
 
1252
1248
  // Initialize FBX animation loader
1253
1249
  this.fbxAnimationLoader = null;
@@ -1255,8 +1251,14 @@ class TalkingHead {
1255
1251
  // Dispose Dynamic Bones
1256
1252
  this.dynamicbones.dispose();
1257
1253
 
1258
- // Clear previous scene, if avatar was previously loaded
1254
+ // Clear previous mixer/scene, if avatar was previously loaded
1255
+ if (this.mixer) {
1256
+ this.mixer.removeEventListener('finished', this._mixerHandler);
1257
+ this.mixer.stopAllAction();
1258
+ this.mixer.uncacheRoot(this.armature);
1259
1259
  this.mixer = null;
1260
+ this._mixerHandler = null;
1261
+ }
1260
1262
  if ( this.isAvatarOnly ) {
1261
1263
  if ( this.armature ) {
1262
1264
  this.clearThree( this.armature );
@@ -1570,7 +1572,7 @@ class TalkingHead {
1570
1572
  * Render scene.
1571
1573
  */
1572
1574
  render() {
1573
- if ( this.isRunning && !this.isAvatarOnly && this.renderer ) {
1575
+ if ( this.isRunning && !this.isAvatarOnly ) {
1574
1576
  this.renderer.render( this.scene, this.camera );
1575
1577
  }
1576
1578
  }
@@ -1579,7 +1581,7 @@ class TalkingHead {
1579
1581
  * Resize avatar.
1580
1582
  */
1581
1583
  onResize() {
1582
- if ( !this.isAvatarOnly && this.renderer ) {
1584
+ if ( !this.isAvatarOnly ) {
1583
1585
  this.camera.aspect = this.nodeAvatar.clientWidth / this.nodeAvatar.clientHeight;
1584
1586
  this.camera.updateProjectionMatrix();
1585
1587
  this.renderer.setSize( this.nodeAvatar.clientWidth, this.nodeAvatar.clientHeight );
@@ -1612,6 +1614,23 @@ class TalkingHead {
1612
1614
  // Apply shoulder adjustment to lower shoulders
1613
1615
  this.applyShoulderAdjustment();
1614
1616
  }
1617
+
1618
+ /**
1619
+ * Update avatar pose deltas
1620
+ */
1621
+ updatePoseDelta() {
1622
+ for( const [key,d] of Object.entries(this.poseDelta.props) ) {
1623
+ if ( d.x === 0 && d.y === 0 && d.z === 0 ) continue;
1624
+ e.set(d.x,d.y,d.z);
1625
+ const o = this.poseAvatar.props[key];
1626
+ if ( o.isQuaternion ) {
1627
+ q.setFromEuler(e);
1628
+ o.multiply(q);
1629
+ } else if ( o.isVector3 ) {
1630
+ o.add( e );
1631
+ }
1632
+ }
1633
+ }
1615
1634
 
1616
1635
  /**
1617
1636
  * Apply shoulder adjustment to lower shoulders to a more natural position
@@ -1675,23 +1694,6 @@ class TalkingHead {
1675
1694
  }
1676
1695
  }
1677
1696
 
1678
- /**
1679
- * Update avatar pose deltas
1680
- */
1681
- updatePoseDelta() {
1682
- for( const [key,d] of Object.entries(this.poseDelta.props) ) {
1683
- if ( d.x === 0 && d.y === 0 && d.z === 0 ) continue;
1684
- e.set(d.x,d.y,d.z);
1685
- const o = this.poseAvatar.props[key];
1686
- if ( o.isQuaternion ) {
1687
- q.setFromEuler(e);
1688
- o.multiply(q);
1689
- } else if ( o.isVector3 ) {
1690
- o.add( e );
1691
- }
1692
- }
1693
- }
1694
-
1695
1697
  /**
1696
1698
  * Update morph target values.
1697
1699
  * @param {number} dt Delta time in ms.
@@ -2038,6 +2040,11 @@ class TalkingHead {
2038
2040
  */
2039
2041
  setPoseFromTemplate(template, ms=2000) {
2040
2042
 
2043
+ // Guard against disposal: check if required objects exist
2044
+ if (!this.poseFactory || !this.poseTemplates) {
2045
+ return;
2046
+ }
2047
+
2041
2048
  // Special cases
2042
2049
  const isIntermediate = template && this.poseTarget && this.poseTarget.template && ((this.poseTarget.template.standing && template.lying) || (this.poseTarget.template.lying && template.standing));
2043
2050
  const isSameTemplate = template && (template === this.poseCurrentTemplate);
@@ -2051,13 +2058,18 @@ class TalkingHead {
2051
2058
  this.setPoseFromTemplate(template,ms);
2052
2059
  }, duration);
2053
2060
  } else {
2054
- this.poseCurrentTemplate = template || this.poseCurrentTemplate;
2061
+ this.poseCurrentTemplate = template || this.poseCurrentTemplate;
2055
2062
  }
2056
2063
 
2057
2064
  // Set target
2058
2065
  this.poseTarget = this.poseFactory(this.poseCurrentTemplate, duration);
2059
2066
  this.poseWeightOnLeft = true;
2060
2067
 
2068
+ // Guard: ensure poseTarget was created successfully
2069
+ if (!this.poseTarget || !this.poseTarget.props) {
2070
+ return;
2071
+ }
2072
+
2061
2073
  // Mirror properties, if necessary
2062
2074
  if ( (!isSameTemplate && !isWeightOnLeft) || (isSameTemplate && isWeightOnLeft ) ) {
2063
2075
  this.poseTarget.props = this.mirrorPose(this.poseTarget.props);
@@ -2076,13 +2088,16 @@ class TalkingHead {
2076
2088
  }
2077
2089
 
2078
2090
  // Make sure deltas are included in the target
2091
+ // Guard against disposal: check if poseBase and its props exist
2092
+ if (this.poseBase && this.poseBase.props && this.poseDelta && this.poseDelta.props) {
2079
2093
  Object.keys(this.poseDelta.props).forEach( key => {
2080
- if ( !this.poseTarget.props.hasOwnProperty(key) ) {
2094
+ if ( !this.poseTarget.props.hasOwnProperty(key) && this.poseBase.props[key] ) {
2081
2095
  this.poseTarget.props[key] = this.poseBase.props[key].clone();
2082
2096
  this.poseTarget.props[key].t = this.animClock;
2083
2097
  this.poseTarget.props[key].d = duration;
2084
2098
  }
2085
2099
  });
2100
+ }
2086
2101
 
2087
2102
  }
2088
2103
 
@@ -2156,523 +2171,87 @@ class TalkingHead {
2156
2171
 
2157
2172
  }
2158
2173
 
2174
+
2159
2175
  /**
2160
- * Initialize FBX animation loader
2161
- */
2162
- async initializeFBXAnimationLoader() {
2163
- try {
2164
- // Dynamic import to avoid loading issues
2165
- const { FBXAnimationLoader } = await import('./fbxAnimationLoader.js');
2166
- this.fbxAnimationLoader = new FBXAnimationLoader(this.armature);
2167
- console.log('FBX Animation Loader initialized');
2168
- } catch (error) {
2169
- console.warn('FBX Animation Loader not available:', error);
2170
- this.fbxAnimationLoader = null;
2171
- }
2176
+ * Get morph target names.
2177
+ * @return {string[]} Morph target names.
2178
+ */
2179
+ getMorphTargetNames() {
2180
+ return [ 'eyesRotateX', 'eyesRotateY', ...Object.keys(this.mtAvatar)].sort();
2172
2181
  }
2173
2182
 
2174
2183
  /**
2175
- * Set body movement type.
2176
- * @param {string} movement Movement type (idle, walking, prancing, gesturing, dancing, excited).
2177
- */
2178
- setBodyMovement(movement) {
2179
- this.bodyMovement = movement;
2180
-
2181
- // Only set avatar property if avatar exists
2182
- if (this.avatar) {
2183
- this.avatar.bodyMovement = movement;
2184
- }
2185
-
2186
- console.log('Body movement set to:', movement);
2187
-
2188
- // Respect the current showFullAvatar setting instead of forcing it to true
2189
- // Only unlock position when returning to idle
2190
- if (movement === 'idle') {
2191
- // Unlock position when returning to idle
2192
- this.unlockAvatarPosition();
2184
+ * Get baseline value for the morph target.
2185
+ * @param {string} mt Morph target name
2186
+ * @return {number} Value, null if not in baseline
2187
+ */
2188
+ getBaselineValue( mt ) {
2189
+ if ( mt === 'eyesRotateY' ) {
2190
+ const ll = this.getBaselineValue('eyeLookOutLeft');
2191
+ if ( ll === undefined ) return undefined;
2192
+ const lr = this.getBaselineValue('eyeLookInLeft');
2193
+ if ( lr === undefined ) return undefined;
2194
+ const rl = this.getBaselineValue('eyeLookOutRight');
2195
+ if ( rl === undefined ) return undefined;
2196
+ const rr = this.getBaselineValue('eyeLookInRight');
2197
+ if ( rr === undefined ) return undefined;
2198
+ return ll - lr;
2199
+ } else if ( mt === 'eyesRotateX' ) {
2200
+ const d = this.getBaselineValue('eyesLookDown');
2201
+ if ( d === undefined ) return undefined;
2202
+ const u = this.getBaselineValue('eyesLookUp');
2203
+ if ( u === undefined ) return undefined;
2204
+ return d - u;
2205
+ } else {
2206
+ return this.mtAvatar[mt]?.baseline;
2193
2207
  }
2194
- // Note: We no longer force showFullAvatar to true for body movements
2195
- // The avatar will use whatever showFullAvatar value was set by the user
2196
-
2197
- // Apply body movement animation
2198
- this.applyBodyMovementAnimation();
2199
2208
  }
2200
2209
 
2201
2210
  /**
2202
- * Apply body movement animation based on current movement type.
2203
- */
2204
- async applyBodyMovementAnimation() {
2205
- // Check if avatar is ready
2206
- if (!this.armature || !this.animQueue) {
2207
- console.log('Avatar not ready for body movement animations');
2208
- return;
2209
- }
2210
-
2211
- console.log('Avatar is running:', this.isRunning);
2212
- console.log('Animation queue exists:', !!this.animQueue);
2213
-
2214
- // Remove existing body movement animations
2215
- const beforeLength = this.animQueue.length;
2216
- this.animQueue = this.animQueue.filter(anim => !anim.template.name.startsWith('bodyMovement'));
2217
- const afterLength = this.animQueue.length;
2218
- console.log(`Filtered animation queue: ${beforeLength} -> ${afterLength} animations`);
2219
-
2220
- if (this.bodyMovement === 'idle') {
2221
- // Stop FBX animations if any
2222
- if (this.fbxAnimationLoader) {
2223
- this.fbxAnimationLoader.stopCurrentAnimation();
2224
- }
2225
- return; // No body movement for idle
2226
- }
2227
-
2228
- // Try to use FBX animations first
2229
- if (this.fbxAnimationLoader) {
2230
- try {
2231
- await this.fbxAnimationLoader.playGestureAnimation(this.bodyMovement, this.movementIntensity);
2232
- console.log('Applied FBX body movement animation:', this.bodyMovement);
2233
- return; // Successfully applied FBX animation
2234
- } catch (error) {
2235
- console.warn('FBX animation failed, falling back to code animation:', error);
2236
- }
2237
- }
2238
-
2239
- // Fallback to code-based animations
2240
- const movementAnim = this.createBodyMovementAnimation(this.bodyMovement);
2241
- console.log('Created movement animation:', movementAnim);
2242
- if (movementAnim) {
2243
- try {
2244
- // Use animFactory to create proper animation object
2245
- const animObj = this.animFactory(movementAnim, true); // true for looping
2246
-
2247
- // Validate the animation object before adding
2248
- if (animObj && animObj.ts && animObj.ts.length > 0) {
2249
- this.animQueue.push(animObj);
2250
- console.log('Applied code-based body movement animation:', this.bodyMovement);
2251
- console.log('Animation queue length:', this.animQueue.length);
2252
- console.log('Animation object:', animObj);
2253
- } else {
2254
- console.error('Invalid animation object created for:', this.bodyMovement);
2255
- console.error('Animation object:', animObj);
2256
- }
2257
- } catch (error) {
2258
- console.error('Error creating body movement animation:', error);
2211
+ * Set baseline for morph target.
2212
+ * @param {string} mt Morph target name
2213
+ * @param {number} val Value, null if to be removed from baseline
2214
+ */
2215
+ setBaselineValue( mt, val ) {
2216
+ if ( mt === 'eyesRotateY' ) {
2217
+ this.setBaselineValue('eyeLookOutLeft', (val === null) ? null : (val>0 ? val : 0) );
2218
+ this.setBaselineValue('eyeLookInLeft', (val === null) ? null : (val>0 ? 0 : -val) );
2219
+ this.setBaselineValue('eyeLookOutRight', (val === null) ? null : (val>0 ? 0 : -val) );
2220
+ this.setBaselineValue('eyeLookInRight', (val === null) ? null : (val>0 ? val : 0) );
2221
+ } else if ( mt === 'eyesRotateX' ) {
2222
+ this.setBaselineValue('eyesLookDown', (val === null) ? null : (val>0 ? val : 0) );
2223
+ this.setBaselineValue('eyesLookUp', (val === null) ? null : (val>0 ? 0 : -val) );
2224
+ } else {
2225
+ if ( this.mtAvatar.hasOwnProperty(mt) ) {
2226
+ Object.assign(this.mtAvatar[mt],{ base: null, baseline: val, needsUpdate: true });
2259
2227
  }
2260
2228
  }
2261
2229
  }
2262
2230
 
2263
2231
  /**
2264
- * Lock avatar position to prevent movement during animations.
2265
- */
2266
- lockAvatarPosition() {
2267
- if (!this.armature) {
2268
- console.warn('Cannot lock position: armature not available');
2269
- return;
2270
- }
2271
-
2272
- // Store the original position if not already stored
2273
- if (!this.originalPosition) {
2274
- this.originalPosition = {
2275
- x: this.armature.position.x,
2276
- y: this.armature.position.y,
2277
- z: this.armature.position.z
2278
- };
2279
- console.log('Original position stored:', this.originalPosition);
2280
- }
2281
-
2282
- // Lock the avatar at its CURRENT position (don't move it)
2283
- this.lockedPosition = {
2284
- x: this.armature.position.x,
2285
- y: this.armature.position.y,
2286
- z: this.armature.position.z
2287
- };
2288
-
2289
- console.log('Avatar position locked at current position:', this.lockedPosition);
2290
- }
2291
-
2292
- /**
2293
- * Unlock avatar position and restore original position.
2294
- */
2295
- unlockAvatarPosition() {
2296
- if (this.armature && this.originalPosition) {
2297
- // Restore avatar to its original position before locking
2298
- this.armature.position.set(
2299
- this.originalPosition.x,
2300
- this.originalPosition.y,
2301
- this.originalPosition.z
2302
- );
2303
- console.log('Avatar position restored to original:', this.originalPosition);
2304
- } else if (this.armature) {
2305
- // Fallback: reset to center if no original position was stored
2306
- this.armature.position.set(0, 0, 0);
2307
- console.log('Avatar position reset to center (0,0,0)');
2308
- }
2309
- this.lockedPosition = null;
2310
- this.originalPosition = null; // Clear original position after unlock
2311
- console.log('Avatar position unlocked');
2312
- }
2313
-
2314
- /**
2315
- * Ensure avatar stays at locked position.
2316
- */
2317
- maintainLockedPosition() {
2318
- if (this.lockedPosition && this.armature) {
2319
- // Enforce the locked position - keep avatar exactly where it was locked
2320
- // This prevents FBX animations from moving the avatar
2321
- this.armature.position.set(
2322
- this.lockedPosition.x,
2323
- this.lockedPosition.y,
2324
- this.lockedPosition.z
2325
- );
2326
- }
2327
- }
2328
-
2329
- /**
2330
- * Create body movement animation.
2331
- * @param {string} movementType Movement type.
2332
- * @returns {Object} Animation object.
2333
- */
2334
- createBodyMovementAnimation(movementType) {
2335
- const intensity = this.movementIntensity || 0.5;
2336
-
2337
- const movementAnimations = {
2338
- walking: {
2339
- name: 'bodyMovement_walking',
2340
- delay: [500, 2000],
2341
- dt: [800, 1200],
2342
- vs: {
2343
- bodyRotateY: [-0.1 * intensity, 0.1 * intensity, 0],
2344
- bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0],
2345
- bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
2346
- }
2347
- },
2348
- prancing: {
2349
- name: 'bodyMovement_prancing',
2350
- delay: [300, 1000],
2351
- dt: [400, 800],
2352
- vs: {
2353
- bodyRotateY: [-0.15 * intensity, 0.15 * intensity, 0],
2354
- bodyRotateZ: [-0.08 * intensity, 0.08 * intensity, 0],
2355
- bodyRotateX: [-0.05 * intensity, 0.05 * intensity, 0]
2356
- }
2357
- },
2358
- gesturing: {
2359
- name: 'bodyMovement_gesturing',
2360
- delay: [400, 1500],
2361
- dt: [600, 1000],
2362
- vs: {
2363
- bodyRotateY: [-0.08 * intensity, 0.08 * intensity, 0],
2364
- bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0]
2365
- }
2366
- },
2367
- dancing: {
2368
- name: 'bodyMovement_dancing',
2369
- delay: [200, 600],
2370
- dt: [400, 800],
2371
- vs: {
2372
- bodyRotateY: [-0.25 * intensity, 0.25 * intensity, 0],
2373
- bodyRotateZ: [-0.15 * intensity, 0.15 * intensity, 0],
2374
- bodyRotateX: [-0.1 * intensity, 0.1 * intensity, 0]
2375
- }
2376
- },
2377
- dancing2: {
2378
- name: 'bodyMovement_dancing2',
2379
- delay: [150, 500],
2380
- dt: [300, 700],
2381
- vs: {
2382
- bodyRotateY: [-0.3 * intensity, 0.3 * intensity, 0],
2383
- bodyRotateZ: [-0.2 * intensity, 0.2 * intensity, 0],
2384
- bodyRotateX: [-0.12 * intensity, 0.12 * intensity, 0]
2385
- }
2386
- },
2387
- dancing3: {
2388
- name: 'bodyMovement_dancing3',
2389
- delay: [100, 400],
2390
- dt: [200, 600],
2391
- vs: {
2392
- bodyRotateY: [-0.35 * intensity, 0.35 * intensity, 0],
2393
- bodyRotateZ: [-0.25 * intensity, 0.25 * intensity, 0],
2394
- bodyRotateX: [-0.15 * intensity, 0.15 * intensity, 0]
2395
- }
2396
- },
2397
- excited: {
2398
- name: 'bodyMovement_excited',
2399
- delay: [200, 600],
2400
- dt: [300, 700],
2401
- vs: {
2402
- bodyRotateY: [-0.12 * intensity, 0.12 * intensity, 0],
2403
- bodyRotateZ: [-0.06 * intensity, 0.06 * intensity, 0],
2404
- bodyRotateX: [-0.04 * intensity, 0.04 * intensity, 0]
2405
- }
2406
- },
2407
- happy: {
2408
- name: 'bodyMovement_happy',
2409
- delay: [300, 800],
2410
- dt: [500, 1000],
2411
- vs: {
2412
- bodyRotateY: [-0.08 * intensity, 0.08 * intensity, 0],
2413
- bodyRotateZ: [-0.04 * intensity, 0.04 * intensity, 0],
2414
- bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
2415
- }
2416
- },
2417
- surprised: {
2418
- name: 'bodyMovement_surprised',
2419
- delay: [100, 300],
2420
- dt: [200, 500],
2421
- vs: {
2422
- bodyRotateY: [-0.05 * intensity, 0.05 * intensity, 0],
2423
- bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0],
2424
- bodyRotateX: [-0.01 * intensity, 0.01 * intensity, 0]
2425
- }
2426
- },
2427
- thinking: {
2428
- name: 'bodyMovement_thinking',
2429
- delay: [800, 2000],
2430
- dt: [1000, 1500],
2431
- vs: {
2432
- bodyRotateY: [-0.06 * intensity, 0.06 * intensity, 0],
2433
- bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0],
2434
- bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
2435
- }
2436
- },
2437
- nodding: {
2438
- name: 'bodyMovement_nodding',
2439
- delay: [400, 800],
2440
- dt: [300, 600],
2441
- vs: {
2442
- bodyRotateX: [-0.1 * intensity, 0.1 * intensity, 0],
2443
- bodyRotateY: [-0.02 * intensity, 0.02 * intensity, 0]
2444
- }
2445
- },
2446
- shaking: {
2447
- name: 'bodyMovement_shaking',
2448
- delay: [200, 400],
2449
- dt: [150, 300],
2450
- vs: {
2451
- bodyRotateY: [-0.15 * intensity, 0.15 * intensity, 0],
2452
- bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0]
2453
- }
2454
- },
2455
- celebration: {
2456
- name: 'bodyMovement_celebration',
2457
- delay: [100, 300],
2458
- dt: [200, 500],
2459
- vs: {
2460
- bodyRotateY: [-0.2 * intensity, 0.2 * intensity, 0],
2461
- bodyRotateZ: [-0.1 * intensity, 0.1 * intensity, 0],
2462
- bodyRotateX: [-0.08 * intensity, 0.08 * intensity, 0]
2463
- }
2464
- },
2465
- energetic: {
2466
- name: 'bodyMovement_energetic',
2467
- delay: [150, 400],
2468
- dt: [250, 500],
2469
- vs: {
2470
- bodyRotateY: [-0.18 * intensity, 0.18 * intensity, 0],
2471
- bodyRotateZ: [-0.12 * intensity, 0.12 * intensity, 0],
2472
- bodyRotateX: [-0.08 * intensity, 0.08 * intensity, 0]
2473
- }
2474
- },
2475
- swaying: {
2476
- name: 'bodyMovement_swaying',
2477
- delay: [600, 1200],
2478
- dt: [800, 1000],
2479
- vs: {
2480
- bodyRotateY: [-0.1 * intensity, 0.1 * intensity, 0],
2481
- bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0]
2482
- }
2483
- },
2484
- bouncing: {
2485
- name: 'bodyMovement_bouncing',
2486
- delay: [300, 600],
2487
- dt: [400, 700],
2488
- vs: {
2489
- bodyRotateY: [-0.05 * intensity, 0.05 * intensity, 0]
2490
- }
2491
- }
2492
- };
2493
-
2494
- // Handle dance variations
2495
- if (movementType === 'dancing') {
2496
- const danceVariations = ['dancing', 'dancing2', 'dancing3'];
2497
- const randomDance = danceVariations[Math.floor(Math.random() * danceVariations.length)];
2498
- return movementAnimations[randomDance] || movementAnimations['dancing'];
2499
- }
2500
-
2501
- return movementAnimations[movementType] || null;
2502
- }
2503
-
2504
- /**
2505
- * Play a random dance animation
2506
- */
2507
- playRandomDance() {
2508
- const danceVariations = ['dancing', 'dancing2', 'dancing3'];
2509
- const randomDance = danceVariations[Math.floor(Math.random() * danceVariations.length)];
2510
- this.setBodyMovement(randomDance);
2511
- }
2512
-
2513
- /**
2514
- * Play a reaction animation
2515
- * @param {string} reactionType - Type of reaction (happy, surprised, thinking, etc.)
2516
- */
2517
- playReaction(reactionType) {
2518
- const validReactions = ['happy', 'surprised', 'thinking', 'nodding', 'shaking', 'celebration', 'energetic', 'swaying', 'bouncing'];
2519
- if (validReactions.includes(reactionType)) {
2520
- this.setBodyMovement(reactionType);
2521
-
2522
- // Auto-return to idle after a delay for non-looping reactions
2523
- const nonLoopingReactions = ['surprised', 'nodding', 'shaking', 'celebration'];
2524
- if (nonLoopingReactions.includes(reactionType)) {
2525
- setTimeout(() => {
2526
- this.setBodyMovement('idle');
2527
- }, 3000); // Return to idle after 3 seconds
2528
- }
2529
- } else {
2530
- console.warn('Invalid reaction type:', reactionType);
2531
- }
2532
- }
2533
-
2534
- /**
2535
- * Play a celebration sequence
2536
- */
2537
- playCelebration() {
2538
- this.playReaction('celebration');
2539
-
2540
- // After celebration, play a random dance
2541
- setTimeout(() => {
2542
- this.playRandomDance();
2543
- }, 2000);
2544
- }
2545
-
2546
- /**
2547
- * Set movement intensity.
2548
- * @param {number} intensity Movement intensity (0-1).
2549
- */
2550
- setMovementIntensity(intensity) {
2551
- this.movementIntensity = Math.max(0, Math.min(1, intensity));
2552
-
2553
- // Only set avatar property if avatar exists
2554
- if (this.avatar) {
2555
- this.avatar.movementIntensity = this.movementIntensity;
2556
- }
2557
-
2558
- console.log('Movement intensity set to:', this.movementIntensity);
2559
-
2560
- // Update FBX animation intensity if available
2561
- if (this.fbxAnimationLoader) {
2562
- this.fbxAnimationLoader.setIntensity(this.movementIntensity);
2563
- }
2564
-
2565
- // Reapply body movement animation with new intensity
2566
- this.applyBodyMovementAnimation();
2567
- }
2568
-
2569
- /**
2570
- * Set show full avatar.
2571
- * @param {boolean} show Whether to show full avatar.
2572
- */
2573
- setShowFullAvatar(show) {
2574
- this.showFullAvatar = show;
2575
-
2576
- // Only set avatar property if avatar exists
2577
- if (this.avatar) {
2578
- this.avatar.showFullAvatar = show;
2579
- }
2580
-
2581
- console.log('Show full avatar set to:', show);
2582
-
2583
- // Only change camera view if it's not already set to the desired view
2584
- // This prevents the avatar from sliding down when starting animations
2585
- if (show && this.viewName !== 'full') {
2586
- console.log('Changing camera view to full');
2587
- this.setView('full');
2588
- } else if (!show && this.viewName !== 'upper') {
2589
- console.log('Changing camera view to upper');
2590
- this.setView('upper');
2591
- } else {
2592
- console.log('Camera view already set to:', this.viewName);
2593
- }
2594
- }
2595
-
2596
- /**
2597
- * Get morph target names.
2598
- * @return {string[]} Morph target names.
2599
- */
2600
- getMorphTargetNames() {
2601
- return [ 'eyesRotateX', 'eyesRotateY', ...Object.keys(this.mtAvatar)].sort();
2602
- }
2603
-
2604
- /**
2605
- * Get baseline value for the morph target.
2606
- * @param {string} mt Morph target name
2607
- * @return {number} Value, null if not in baseline
2608
- */
2609
- getBaselineValue( mt ) {
2610
- if ( mt === 'eyesRotateY' ) {
2611
- const ll = this.getBaselineValue('eyeLookOutLeft');
2612
- if ( ll === undefined ) return undefined;
2613
- const lr = this.getBaselineValue('eyeLookInLeft');
2614
- if ( lr === undefined ) return undefined;
2615
- const rl = this.getBaselineValue('eyeLookOutRight');
2616
- if ( rl === undefined ) return undefined;
2617
- const rr = this.getBaselineValue('eyeLookInRight');
2618
- if ( rr === undefined ) return undefined;
2619
- return ll - lr;
2620
- } else if ( mt === 'eyesRotateX' ) {
2621
- const d = this.getBaselineValue('eyesLookDown');
2622
- if ( d === undefined ) return undefined;
2623
- const u = this.getBaselineValue('eyesLookUp');
2624
- if ( u === undefined ) return undefined;
2625
- return d - u;
2626
- } else {
2627
- return this.mtAvatar[mt]?.baseline;
2628
- }
2629
- }
2630
-
2631
- /**
2632
- * Set baseline for morph target.
2633
- * @param {string} mt Morph target name
2634
- * @param {number} val Value, null if to be removed from baseline
2635
- */
2636
- setBaselineValue( mt, val ) {
2637
- if ( mt === 'eyesRotateY' ) {
2638
- this.setBaselineValue('eyeLookOutLeft', (val === null) ? null : (val>0 ? val : 0) );
2639
- this.setBaselineValue('eyeLookInLeft', (val === null) ? null : (val>0 ? 0 : -val) );
2640
- this.setBaselineValue('eyeLookOutRight', (val === null) ? null : (val>0 ? 0 : -val) );
2641
- this.setBaselineValue('eyeLookInRight', (val === null) ? null : (val>0 ? val : 0) );
2642
- } else if ( mt === 'eyesRotateX' ) {
2643
- this.setBaselineValue('eyesLookDown', (val === null) ? null : (val>0 ? val : 0) );
2644
- this.setBaselineValue('eyesLookUp', (val === null) ? null : (val>0 ? 0 : -val) );
2645
- } else {
2646
- if ( this.mtAvatar.hasOwnProperty(mt) ) {
2647
- Object.assign(this.mtAvatar[mt],{ base: null, baseline: val, needsUpdate: true });
2648
- }
2649
- }
2650
- }
2651
-
2652
- /**
2653
- * Get fixed value for the morph target.
2654
- * @param {string} mt Morph target name
2655
- * @return {number} Value, null if not fixed
2656
- */
2657
- getFixedValue( mt ) {
2658
- if ( mt === 'eyesRotateY' ) {
2659
- const ll = this.getFixedValue('eyeLookOutLeft');
2660
- if ( ll === null ) return null;
2661
- const lr = this.getFixedValue('eyeLookInLeft');
2662
- if ( lr === null ) return null;
2663
- const rl = this.getFixedValue('eyeLookOutRight');
2664
- if ( rl === null ) return null;
2665
- const rr = this.getFixedValue('eyeLookInRight');
2666
- if ( rr === null ) return null;
2667
- return ll - lr;
2668
- } else if ( mt === 'eyesRotateX' ) {
2669
- const d = this.getFixedValue('eyesLookDown');
2670
- if ( d === null ) return null;
2671
- const u = this.getFixedValue('eyesLookUp');
2672
- if ( u === null ) return null;
2673
- return d - u;
2674
- } else {
2675
- return this.mtAvatar[mt]?.fixed;
2232
+ * Get fixed value for the morph target.
2233
+ * @param {string} mt Morph target name
2234
+ * @return {number} Value, null if not fixed
2235
+ */
2236
+ getFixedValue( mt ) {
2237
+ if ( mt === 'eyesRotateY' ) {
2238
+ const ll = this.getFixedValue('eyeLookOutLeft');
2239
+ if ( ll === null ) return null;
2240
+ const lr = this.getFixedValue('eyeLookInLeft');
2241
+ if ( lr === null ) return null;
2242
+ const rl = this.getFixedValue('eyeLookOutRight');
2243
+ if ( rl === null ) return null;
2244
+ const rr = this.getFixedValue('eyeLookInRight');
2245
+ if ( rr === null ) return null;
2246
+ return ll - lr;
2247
+ } else if ( mt === 'eyesRotateX' ) {
2248
+ const d = this.getFixedValue('eyesLookDown');
2249
+ if ( d === null ) return null;
2250
+ const u = this.getFixedValue('eyesLookUp');
2251
+ if ( u === null ) return null;
2252
+ return d - u;
2253
+ } else {
2254
+ return this.mtAvatar[mt]?.fixed;
2676
2255
  }
2677
2256
  }
2678
2257
 
@@ -2714,10 +2293,6 @@ class TalkingHead {
2714
2293
  let a = t;
2715
2294
  while(1) {
2716
2295
  if ( a.hasOwnProperty(this.stateName) ) {
2717
- // Debug: Log state selection
2718
- if (this.stateName === 'speaking' || this.stateName === 'idle') {
2719
- console.log('Selected state:', this.stateName, 'for avatar body:', this.avatar?.body);
2720
- }
2721
2296
  a = a[this.stateName];
2722
2297
  } else if ( a.hasOwnProperty(this.moodName) ) {
2723
2298
  a = a[this.moodName];
@@ -2725,9 +2300,7 @@ class TalkingHead {
2725
2300
  a = a[this.poseName];
2726
2301
  } else if ( a.hasOwnProperty(this.viewName) ) {
2727
2302
  a = a[this.viewName];
2728
- } else if ( this.avatar && this.avatar.body && a.hasOwnProperty(this.avatar.body) ) {
2729
- // Debug: Log gender-specific override
2730
- console.log('Applying gender-specific override:', this.avatar.body, 'for state:', this.stateName, 'keys:', Object.keys(a));
2303
+ } else if ( this.avatar.body && a.hasOwnProperty(this.avatar.body) ) {
2731
2304
  a = a[this.avatar.body];
2732
2305
  } else if ( a.hasOwnProperty('alt') ) {
2733
2306
 
@@ -2747,12 +2320,6 @@ class TalkingHead {
2747
2320
  }
2748
2321
  }
2749
2322
  a = b;
2750
- // Debug: Log selected alternative and check for gender override
2751
- if (this.avatar && this.avatar.body && a.hasOwnProperty(this.avatar.body)) {
2752
- console.log('Found gender override in selected alternative:', this.avatar.body, 'keys:', Object.keys(a));
2753
- }
2754
- // Continue loop to check for gender-specific override after selecting alternative
2755
- continue;
2756
2323
 
2757
2324
  } else {
2758
2325
  break;
@@ -2783,10 +2350,6 @@ class TalkingHead {
2783
2350
  }
2784
2351
 
2785
2352
  // Values
2786
- // Debug: Log pose selection
2787
- if (a.vs && a.vs.pose) {
2788
- console.log('Pose being selected from vs.pose:', a.vs.pose, 'for avatar body:', this.avatar?.body);
2789
- }
2790
2353
  for( let [mt,vs] of Object.entries(a.vs) ) {
2791
2354
  const base = this.getBaselineValue(mt);
2792
2355
  const vals = vs.map( x => {
@@ -2796,11 +2359,6 @@ class TalkingHead {
2796
2359
  } else if ( typeof x === 'function' ) {
2797
2360
  return x;
2798
2361
  } else if ( typeof x === 'string' || x instanceof String ) {
2799
- // Intercept pose values and override 'hip' and 'side' to 'wide' for male avatars
2800
- if (mt === 'pose' && this.avatar && this.avatar.body === 'M' && (x === 'hip' || x === 'side')) {
2801
- console.log('Intercepting pose', x, 'in animation factory, overriding to wide for male avatar');
2802
- return 'wide'; // Always use 'wide' for male avatars, never 'side' or 'hip'
2803
- }
2804
2362
  return x.slice();
2805
2363
  } else if ( Array.isArray(x) ) {
2806
2364
  if ( mt === 'gesture' ) {
@@ -2913,7 +2471,7 @@ class TalkingHead {
2913
2471
  if ( this.isAvatarOnly ) {
2914
2472
  dt = t;
2915
2473
  } else {
2916
- requestAnimationFrame( this.animate.bind(this) );
2474
+ this._raf = requestAnimationFrame( this.animate );
2917
2475
  dt = t - this.animTimeLast;
2918
2476
  if ( dt < this.animFrameDur ) return;
2919
2477
  this.animTimeLast = t;
@@ -2997,7 +2555,7 @@ class TalkingHead {
2997
2555
  const tasks = [];
2998
2556
  for( i=0, l=this.animQueue.length; i<l; i++ ) {
2999
2557
  const x = this.animQueue[i];
3000
- if ( !x || !x.ts || !x.ts.length || this.animClock < x.ts[0] ) continue;
2558
+ if ( this.animClock < x.ts[0] ) continue;
3001
2559
 
3002
2560
  for( j = x.ndx || 0, k = x.ts.length; j<k; j++ ) {
3003
2561
  if ( this.animClock < x.ts[j] ) break;
@@ -3091,18 +2649,7 @@ class TalkingHead {
3091
2649
  break;
3092
2650
 
3093
2651
  case 'pose':
3094
- // Ensure gender-appropriate pose for male avatars - always use 'wide', never 'side' or 'hip'
3095
- if (this.avatar && this.avatar.body === 'M') {
3096
- if (j === 'hip' || j === 'side') {
3097
- // Always override 'hip' and 'side' to 'wide' for male avatars
3098
- if (this.poseTemplates['wide']) {
3099
- j = 'wide';
3100
- console.log('Overriding pose', j === 'hip' ? 'hip' : 'side', 'to wide for male avatar');
3101
- }
3102
- }
3103
- }
3104
2652
  this.poseName = j;
3105
- console.log('Setting pose to:', this.poseName, 'for avatar body:', this.avatar?.body, 'state:', this.stateName);
3106
2653
  this.setPoseFromTemplate( this.poseTemplates[ this.poseName ] );
3107
2654
  break;
3108
2655
 
@@ -3334,27 +2881,23 @@ class TalkingHead {
3334
2881
  }
3335
2882
 
3336
2883
  /**
3337
- * Get lip-sync processor based on language. Uses statically imported modules.
2884
+ * Get lip-sync processor based on language. Use statically imported modules.
3338
2885
  * @param {string} lang Language
3339
- * @param {string} [path="./"] Module path (ignored, kept for compatibility)
2886
+ * @param {string} [path="./"] Module path (ignored, using static imports)
3340
2887
  */
3341
2888
  lipsyncGetProcessor(lang, path="./") {
3342
2889
  if ( !this.lipsync.hasOwnProperty(lang) ) {
3343
2890
  const langLower = lang.toLowerCase();
3344
- const className = 'Lipsync' + lang.charAt(0).toUpperCase() + lang.slice(1);
3345
-
3346
- try {
3347
- // Use statically imported module
3348
- const module = LIPSYNC_MODULES[langLower];
3349
-
3350
- if (module && module[className]) {
3351
- this.lipsync[lang] = new module[className];
3352
- console.log(`Loaded lip-sync module for ${lang}`);
2891
+ // Use statically imported modules from LIPSYNC_MODULES
2892
+ if (LIPSYNC_MODULES[langLower]) {
2893
+ const className = 'Lipsync' + lang.charAt(0).toUpperCase() + lang.slice(1);
2894
+ if (LIPSYNC_MODULES[langLower][className]) {
2895
+ this.lipsync[lang] = new LIPSYNC_MODULES[langLower][className];
3353
2896
  } else {
3354
- console.warn(`Lip-sync module for ${lang} not found. Available modules:`, Object.keys(LIPSYNC_MODULES));
2897
+ console.warn(`Lipsync class ${className} not found in module for language ${lang}`);
3355
2898
  }
3356
- } catch (error) {
3357
- console.warn(`Failed to load lip-sync module for ${lang}:`, error);
2899
+ } else {
2900
+ console.warn(`Lipsync module for language ${lang} not found in static imports`);
3358
2901
  }
3359
2902
  }
3360
2903
  }
@@ -3743,590 +3286,149 @@ class TalkingHead {
3743
3286
  }
3744
3287
  }
3745
3288
 
3746
- if ( onsubtitles ) {
3747
- o.onSubtitles = onsubtitles;
3748
- }
3749
-
3750
- if ( opt.isRaw ) {
3751
- o.isRaw = true;
3752
- }
3753
-
3754
- if ( Object.keys(o).length ) {
3755
- this.speechQueue.push(o);
3756
- if ( !o.isRaw ) {
3757
- this.speechQueue.push( { break: 300 } );
3758
- }
3759
- this.startSpeaking();
3760
- }
3761
-
3762
- }
3763
-
3764
- /**
3765
- * Play audio playlist using Web Audio API.
3766
- * @param {boolean} [force=false] If true, forces to proceed
3767
- */
3768
- async playAudio(force=false) {
3769
- if ( !this.armature || (this.isAudioPlaying && !force) ) return;
3770
- this.isAudioPlaying = true;
3771
- if ( this.audioPlaylist.length ) {
3772
- const item = this.audioPlaylist.shift();
3773
-
3774
- // If Web Audio API is suspended, try to resume it
3775
- if ( this.audioCtx.state === "suspended" || this.audioCtx.state === "interrupted" ) {
3776
- const resume = this.audioCtx.resume();
3777
- const timeout = new Promise((_r, rej) => setTimeout(() => rej("p2"), 1000));
3778
- try {
3779
- await Promise.race([resume, timeout]);
3780
- } catch(e) {
3781
- console.log("Can't play audio. Web Audio API suspended. This is often due to calling some speak method before the first user action, which is typically prevented by the browser.");
3782
- this.playAudio(true);
3783
- return;
3784
- }
3785
- }
3786
-
3787
- // AudioBuffer
3788
- let audio;
3789
- if ( Array.isArray(item.audio) ) {
3790
- // Convert from PCM samples
3791
- let buf = this.concatArrayBuffers( item.audio );
3792
- audio = this.pcmToAudioBuffer(buf);
3793
- } else {
3794
- audio = item.audio;
3795
- }
3796
-
3797
- // Create audio source
3798
- this.audioSpeechSource = this.audioCtx.createBufferSource();
3799
- this.audioSpeechSource.buffer = audio;
3800
- this.audioSpeechSource.playbackRate.value = 1 / this.animSlowdownRate;
3801
- this.audioSpeechSource.connect(this.audioAnalyzerNode);
3802
- this.audioSpeechSource.addEventListener('ended', () => {
3803
- this.audioSpeechSource.disconnect();
3804
- this.playAudio(true);
3805
- }, { once: true });
3806
-
3807
- // Rescale lipsync and push to queue
3808
- let delay = 0;
3809
- if ( item.anim ) {
3810
- // Find the lowest negative time point, if any
3811
- if ( !item.isRaw ) {
3812
- delay = Math.abs(Math.min(0, ...item.anim.map( x => Math.min(...x.ts) ) ) );
3813
- }
3814
- item.anim.forEach( x => {
3815
- for(let i=0; i<x.ts.length; i++) {
3816
- x.ts[i] = this.animClock + x.ts[i] + delay;
3817
- }
3818
- this.animQueue.push(x);
3819
- });
3820
- }
3821
-
3822
- // Play, dealy in seconds so pre-animations can be played
3823
- this.audioSpeechSource.start(delay/1000);
3824
-
3825
- } else {
3826
- this.isAudioPlaying = false;
3827
- this.startSpeaking(true);
3828
- }
3829
- }
3830
-
3831
- /**
3832
- * Synthesize speech using browser's built-in Speech Synthesis API
3833
- * @param {Object} line Speech line object
3834
- */
3835
- async synthesizeWithBrowserTTS(line) {
3836
- return new Promise((resolve, reject) => {
3837
- // Get the text from the line
3838
- const text = line.text.map(x => x.word).join(' ');
3839
-
3840
- // Create speech synthesis utterance
3841
- const utterance = new SpeechSynthesisUtterance(text);
3842
-
3843
- // Set voice properties
3844
- const lang = line.lang || this.avatar.ttsLang || this.opt.ttsLang || 'en-US';
3845
- const rate = (line.rate || this.avatar.ttsRate || this.opt.ttsRate || 1) + this.mood.speech.deltaRate;
3846
- const pitch = (line.pitch || this.avatar.ttsPitch || this.opt.ttsPitch || 1) + this.mood.speech.deltaPitch;
3847
- const volume = (line.volume || this.avatar.ttsVolume || this.opt.ttsVolume || 1) + this.mood.speech.deltaVolume;
3848
-
3849
- utterance.lang = lang;
3850
- utterance.rate = Math.max(0.1, Math.min(10, rate));
3851
- utterance.pitch = Math.max(0, Math.min(2, pitch));
3852
- utterance.volume = Math.max(0, Math.min(1, volume));
3853
-
3854
- // Try to find a matching voice
3855
- const voices = speechSynthesis.getVoices();
3856
- const targetVoice = line.voice || this.avatar.ttsVoice || this.opt.ttsVoice;
3857
- if (targetVoice && voices.length > 0) {
3858
- const voice = voices.find(v => v.name.includes(targetVoice) || v.lang === lang);
3859
- if (voice) {
3860
- utterance.voice = voice;
3861
- }
3862
- }
3863
-
3864
- // Estimate duration based on text length and speech rate
3865
- const estimatedDuration = (text.length * 100) / utterance.rate; // Adjust for speech rate
3866
-
3867
- // Create audio buffer for the estimated duration
3868
- const audioBuffer = this.audioCtx.createBuffer(1, this.audioCtx.sampleRate * (estimatedDuration / 1000), this.audioCtx.sampleRate);
3869
-
3870
- // Generate lip-sync data from text using the existing lip-sync modules
3871
- const lipsyncLang = this.avatar.lipsyncLang || this.opt.lipsyncLang || 'en';
3872
- const processedText = this.lipsyncPreProcessText(text, lipsyncLang);
3873
- const lipsyncData = this.lipsyncWordsToVisemes(processedText, lipsyncLang);
3874
-
3875
- console.log('Browser TTS Lip-sync Debug:', {
3876
- text,
3877
- lipsyncLang,
3878
- processedText,
3879
- lipsyncData,
3880
- hasVisemes: lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0,
3881
- estimatedDuration
3882
- });
3883
-
3884
- // Generate lip-sync animation from the viseme data
3885
- const lipsyncAnim = [];
3886
- if (lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0) {
3887
- const totalDuration = lipsyncData.times[lipsyncData.visemes.length - 1] + lipsyncData.durations[lipsyncData.visemes.length - 1];
3888
-
3889
- for (let i = 0; i < lipsyncData.visemes.length; i++) {
3890
- const viseme = lipsyncData.visemes[i];
3891
- const relativeTime = lipsyncData.times[i] / totalDuration;
3892
- const relativeDuration = lipsyncData.durations[i] / totalDuration;
3893
-
3894
- const time = relativeTime * estimatedDuration;
3895
- const duration = relativeDuration * estimatedDuration;
3896
-
3897
- lipsyncAnim.push({
3898
- template: { name: 'viseme' },
3899
- ts: [time - Math.min(60, 2 * duration / 3), time + Math.min(25, duration / 2), time + duration + Math.min(60, duration / 2)],
3900
- vs: {
3901
- ['viseme_' + viseme]: [null, (viseme === 'PP' || viseme === 'FF') ? 0.9 : 0.6, 0]
3902
- }
3903
- });
3904
- }
3905
- }
3906
-
3907
- // Combine original animation with lip-sync animation
3908
- const combinedAnim = [...line.anim, ...lipsyncAnim];
3909
-
3910
- // Add to playlist
3911
- this.audioPlaylist.push({ anim: combinedAnim, audio: audioBuffer });
3912
- this.onSubtitles = line.onSubtitles || null;
3913
- this.resetLips();
3914
- if (line.mood) this.setMood(line.mood);
3915
- this.playAudio();
3916
-
3917
- // Handle speech synthesis events
3918
- utterance.onend = () => {
3919
- resolve();
3920
- };
3921
-
3922
- utterance.onerror = (event) => {
3923
- console.error('Speech synthesis error:', event.error);
3924
- reject(event.error);
3925
- };
3926
-
3927
- // Start speaking
3928
- speechSynthesis.speak(utterance);
3929
- });
3930
- }
3931
-
3932
- /**
3933
- * Synthesize speech using ElevenLabs TTS
3934
- * @param {Object} line Speech line object
3935
- */
3936
- async synthesizeWithElevenLabsTTS(line) {
3937
- // Get the text from the line
3938
- const text = line.text.map(x => x.word).join(' ');
3939
-
3940
- // ElevenLabs API request
3941
- const voiceId = line.voice || this.avatar.ttsVoice || this.opt.ttsVoice || "21m00Tcm4TlvDq8ikWAM"; // Default to Rachel
3942
-
3943
- const requestBody = {
3944
- text: text,
3945
- model_id: "eleven_monolingual_v1",
3946
- voice_settings: {
3947
- stability: 0.5,
3948
- similarity_boost: 0.5,
3949
- style: 0.0,
3950
- use_speaker_boost: true
3951
- }
3952
- };
3953
-
3954
- const response = await fetch(`${this.opt.ttsEndpoint}/${voiceId}`, {
3955
- method: 'POST',
3956
- headers: {
3957
- 'Accept': 'audio/mpeg',
3958
- 'Content-Type': 'application/json',
3959
- 'xi-api-key': this.opt.ttsApikey
3960
- },
3961
- body: JSON.stringify(requestBody)
3962
- });
3963
-
3964
- if (!response.ok) {
3965
- throw new Error(`ElevenLabs TTS error: ${response.status} ${response.statusText}`);
3966
- }
3967
-
3968
- // Get audio data
3969
- const audioArrayBuffer = await response.arrayBuffer();
3970
- const audioBuffer = await this.audioCtx.decodeAudioData(audioArrayBuffer);
3971
-
3972
- // Use text-based lip-sync with proper error handling
3973
- console.log('Using text-based lip-sync for debugging...');
3974
- const lipsyncLang = this.avatar.lipsyncLang || this.opt.lipsyncLang || 'en';
3975
-
3976
- let audioAnalysis;
3977
- try {
3978
- console.log('Lip-sync modules available:', {
3979
- hasLipsync: !!this.lipsync,
3980
- lipsyncKeys: this.lipsync ? Object.keys(this.lipsync) : [],
3981
- lipsyncLang: lipsyncLang
3982
- });
3983
-
3984
- const processedText = this.lipsyncPreProcessText(text, lipsyncLang);
3985
- const lipsyncData = this.lipsyncWordsToVisemes(processedText, lipsyncLang);
3986
-
3987
- console.log('Lip-sync data:', {
3988
- processedText,
3989
- lipsyncData,
3990
- hasVisemes: lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0
3991
- });
3992
-
3993
- if (lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0) {
3994
- // Create audio analysis structure for compatibility
3995
- audioAnalysis = {
3996
- visemes: lipsyncData.visemes.map((viseme, i) => ({
3997
- viseme: viseme,
3998
- startTime: (i * audioBuffer.duration) / lipsyncData.visemes.length,
3999
- endTime: ((i + 1) * audioBuffer.duration) / lipsyncData.visemes.length,
4000
- duration: audioBuffer.duration / lipsyncData.visemes.length,
4001
- intensity: 0.7
4002
- })),
4003
- words: [],
4004
- duration: audioBuffer.duration,
4005
- features: { onsets: [], boundaries: [] }
4006
- };
4007
- } else {
4008
- throw new Error('No visemes generated from text');
4009
- }
4010
- } catch (error) {
4011
- console.error('Text-based lip-sync failed, using fallback:', error);
4012
- // Fallback: create simple visemes from text
4013
- const words = text.toLowerCase().split(/\s+/);
4014
- const simpleVisemes = [];
4015
-
4016
- for (const word of words) {
4017
- // Simple phonetic mapping
4018
- for (const char of word) {
4019
- let viseme = 'aa'; // default
4020
- if ('aeiou'.includes(char)) viseme = 'aa';
4021
- else if ('bp'.includes(char)) viseme = 'PP';
4022
- else if ('fv'.includes(char)) viseme = 'FF';
4023
- else if ('st'.includes(char)) viseme = 'SS';
4024
- else if ('dln'.includes(char)) viseme = 'DD';
4025
- else if ('kg'.includes(char)) viseme = 'kk';
4026
- else if ('rw'.includes(char)) viseme = 'RR';
4027
-
4028
- simpleVisemes.push(viseme);
4029
- }
4030
- }
4031
-
4032
- audioAnalysis = {
4033
- visemes: simpleVisemes.map((viseme, i) => ({
4034
- viseme: viseme,
4035
- startTime: (i * audioBuffer.duration) / simpleVisemes.length,
4036
- endTime: ((i + 1) * audioBuffer.duration) / simpleVisemes.length,
4037
- duration: audioBuffer.duration / simpleVisemes.length,
4038
- intensity: 0.6
4039
- })),
4040
- words: [],
4041
- duration: audioBuffer.duration,
4042
- features: { onsets: [], boundaries: [] }
4043
- };
3289
+ if ( onsubtitles ) {
3290
+ o.onSubtitles = onsubtitles;
4044
3291
  }
4045
-
4046
- console.log('ElevenLabs TTS Audio Analysis:', {
4047
- text,
4048
- audioDuration: audioBuffer.duration,
4049
- visemeCount: audioAnalysis.visemes ? audioAnalysis.visemes.length : 0,
4050
- wordCount: audioAnalysis.words ? audioAnalysis.words.length : 0,
4051
- features: {
4052
- onsets: audioAnalysis.features && audioAnalysis.features.onsets ? audioAnalysis.features.onsets.length : 0,
4053
- boundaries: audioAnalysis.features && audioAnalysis.features.phonemeBoundaries ? audioAnalysis.features.phonemeBoundaries.length : 0
4054
- },
4055
- visemes: audioAnalysis.visemes ? audioAnalysis.visemes.slice(0, 3) : [] // Show first 3 visemes for debugging
4056
- });
4057
-
4058
- // Generate precise lip-sync animation from audio analysis
4059
- const lipsyncAnim = [];
4060
- if (audioAnalysis.visemes && audioAnalysis.visemes.length > 0) {
4061
- console.log('ElevenLabs: Generating lip-sync animation from', audioAnalysis.visemes.length, 'visemes');
4062
- for (let i = 0; i < audioAnalysis.visemes.length; i++) {
4063
- const visemeData = audioAnalysis.visemes[i];
4064
- const time = visemeData.startTime * 1000; // Convert to milliseconds
4065
- const duration = visemeData.duration * 1000;
4066
- const intensity = visemeData.intensity;
4067
-
4068
- lipsyncAnim.push({
4069
- template: { name: 'viseme' },
4070
- ts: [time - Math.min(60, 2 * duration / 3), time + Math.min(25, duration / 2), time + duration + Math.min(60, duration / 2)],
4071
- vs: {
4072
- ['viseme_' + visemeData.viseme]: [null, intensity, 0]
4073
- }
4074
- });
3292
+
3293
+ if ( opt.isRaw ) {
3294
+ o.isRaw = true;
4075
3295
  }
4076
- console.log('ElevenLabs: Generated', lipsyncAnim.length, 'lip-sync animation frames');
4077
- } else {
4078
- console.warn('ElevenLabs: No visemes available for lip-sync animation');
3296
+
3297
+ if ( Object.keys(o).length ) {
3298
+ this.speechQueue.push(o);
3299
+ if ( !o.isRaw ) {
3300
+ this.speechQueue.push( { break: 300 } );
3301
+ }
3302
+ this.startSpeaking();
4079
3303
  }
4080
-
4081
- // Combine original animation with lip-sync animation
4082
- const combinedAnim = [...line.anim, ...lipsyncAnim];
4083
- console.log('ElevenLabs: Combined animation frames:', combinedAnim.length, '(original:', line.anim.length, '+ lipsync:', lipsyncAnim.length, ')');
4084
-
4085
- // Add to playlist
4086
- this.audioPlaylist.push({ anim: combinedAnim, audio: audioBuffer });
4087
- this.onSubtitles = line.onSubtitles || null;
4088
- this.resetLips();
4089
- if (line.mood) this.setMood(line.mood);
4090
- this.playAudio();
3304
+
4091
3305
  }
4092
3306
 
4093
3307
  /**
4094
- * Synthesize speech using Deepgram Aura-2 TTS
4095
- * @param {Object} line Speech line object
4096
- */
4097
- async synthesizeWithDeepgramTTS(line) {
4098
- // Get the text from the line
4099
- const text = line.text.map(x => x.word).join(' ');
4100
-
4101
- // Deepgram API request
4102
- const voiceModel = line.voice || this.avatar.ttsVoice || this.opt.ttsVoice || "aura-2-thalia-en"; // Default to Thalia
4103
-
4104
- // Build URL with model as query parameter
4105
- const url = `${this.opt.ttsEndpoint}?model=${voiceModel}`;
4106
-
4107
- const response = await fetch(url, {
4108
- method: 'POST',
4109
- headers: {
4110
- 'Authorization': `Token ${this.opt.ttsApikey}`,
4111
- 'Content-Type': 'text/plain',
4112
- 'Accept': 'audio/mpeg'
4113
- },
4114
- body: text
4115
- });
3308
+ * Play audio playlist using Web Audio API.
3309
+ * @param {boolean} [force=false] If true, forces to proceed
3310
+ */
3311
+ async playAudio(force=false) {
3312
+ if ( !this.armature || (this.isAudioPlaying && !force) ) return;
3313
+ this.isAudioPlaying = true;
3314
+ if ( this.audioPlaylist.length ) {
3315
+ const item = this.audioPlaylist.shift();
4116
3316
 
4117
- if (!response.ok) {
4118
- throw new Error(`Deepgram TTS error: ${response.status} ${response.statusText}`);
4119
- }
3317
+ // If Web Audio API is suspended, try to resume it
3318
+ if ( this.audioCtx.state === "suspended" || this.audioCtx.state === "interrupted" ) {
3319
+ const resume = this.audioCtx.resume();
3320
+ const timeout = new Promise((_r, rej) => setTimeout(() => rej("p2"), 1000));
3321
+ try {
3322
+ await Promise.race([resume, timeout]);
3323
+ } catch(e) {
3324
+ console.log("Can't play audio. Web Audio API suspended. This is often due to calling some speak method before the first user action, which is typically prevented by the browser.");
3325
+ this.playAudio(true);
3326
+ return;
3327
+ }
3328
+ }
4120
3329
 
4121
- // Get audio data
4122
- const audioArrayBuffer = await response.arrayBuffer();
4123
- const audioBuffer = await this.audioCtx.decodeAudioData(audioArrayBuffer);
4124
-
4125
- // Use text-based lip-sync with proper error handling
4126
- console.log('Using text-based lip-sync for Deepgram...');
4127
- const lipsyncLang = this.avatar.lipsyncLang || this.opt.lipsyncLang || 'en';
4128
-
4129
- let audioAnalysis;
4130
- try {
4131
- console.log('Lip-sync modules available:', {
4132
- hasLipsync: !!this.lipsync,
4133
- lipsyncKeys: this.lipsync ? Object.keys(this.lipsync) : [],
4134
- lipsyncLang: lipsyncLang
4135
- });
4136
-
4137
- const processedText = this.lipsyncPreProcessText(text, lipsyncLang);
4138
- const lipsyncData = this.lipsyncWordsToVisemes(processedText, lipsyncLang);
4139
-
4140
- console.log('Lip-sync data:', {
4141
- processedText,
4142
- lipsyncData,
4143
- hasVisemes: lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0
4144
- });
4145
-
4146
- if (lipsyncData && lipsyncData.visemes && lipsyncData.visemes.length > 0) {
4147
- // Create audio analysis structure for compatibility
4148
- audioAnalysis = {
4149
- visemes: lipsyncData.visemes.map((viseme, i) => ({
4150
- viseme: viseme,
4151
- startTime: (i * audioBuffer.duration) / lipsyncData.visemes.length,
4152
- endTime: ((i + 1) * audioBuffer.duration) / lipsyncData.visemes.length,
4153
- duration: audioBuffer.duration / lipsyncData.visemes.length,
4154
- intensity: 0.7
4155
- })),
4156
- words: [],
4157
- duration: audioBuffer.duration,
4158
- features: { onsets: [], boundaries: [] }
4159
- };
3330
+ // AudioBuffer
3331
+ let audio;
3332
+ if ( Array.isArray(item.audio) ) {
3333
+ // Convert from PCM samples
3334
+ let buf = this.concatArrayBuffers( item.audio );
3335
+ audio = this.pcmToAudioBuffer(buf);
4160
3336
  } else {
4161
- throw new Error('No visemes generated from text');
3337
+ audio = item.audio;
4162
3338
  }
4163
- } catch (error) {
4164
- console.error('Text-based lip-sync failed, using fallback:', error);
4165
- // Fallback: create simple visemes from text
4166
- const words = text.toLowerCase().split(/\s+/);
4167
- const simpleVisemes = [];
4168
-
4169
- for (const word of words) {
4170
- // Simple phonetic mapping
4171
- for (const char of word) {
4172
- let viseme = 'aa'; // default
4173
- if ('aeiou'.includes(char)) viseme = 'aa';
4174
- else if ('bp'.includes(char)) viseme = 'PP';
4175
- else if ('fv'.includes(char)) viseme = 'FF';
4176
- else if ('st'.includes(char)) viseme = 'SS';
4177
- else if ('dln'.includes(char)) viseme = 'DD';
4178
- else if ('kg'.includes(char)) viseme = 'kk';
4179
- else if ('rw'.includes(char)) viseme = 'RR';
4180
-
4181
- simpleVisemes.push(viseme);
4182
- }
3339
+
3340
+ // Make sure previous audio source is cleared
3341
+ if (this.audioSpeechSource) {
3342
+ try { this.audioSpeechSource.stop?.() } catch(error) {};
3343
+ this.audioSpeechSource.disconnect();
3344
+ this.audioSpeechSource.onended = null;
3345
+ this.audioSpeechSource = null;
4183
3346
  }
4184
-
4185
- audioAnalysis = {
4186
- visemes: simpleVisemes.map((viseme, i) => ({
4187
- viseme: viseme,
4188
- startTime: (i * audioBuffer.duration) / simpleVisemes.length,
4189
- endTime: ((i + 1) * audioBuffer.duration) / simpleVisemes.length,
4190
- duration: audioBuffer.duration / simpleVisemes.length,
4191
- intensity: 0.6
4192
- })),
4193
- words: [],
4194
- duration: audioBuffer.duration,
4195
- features: { onsets: [], boundaries: [] }
3347
+
3348
+ // Create audio source
3349
+ const source = this.audioCtx.createBufferSource();
3350
+ this.audioSpeechSource = source;
3351
+ source.buffer = audio;
3352
+ source.playbackRate.value = 1 / this.animSlowdownRate;
3353
+ source.connect(this.audioAnalyzerNode);
3354
+ source.onended = () => {
3355
+ source.disconnect();
3356
+ source.onended = null;
3357
+ if ( this.audioSpeechSource === source ) {
3358
+ this.audioSpeechSource = null;
3359
+ }
3360
+ this.playAudio(true);
4196
3361
  };
4197
- }
4198
-
4199
- console.log('Deepgram TTS Audio Analysis:', {
4200
- text,
4201
- audioDuration: audioBuffer.duration,
4202
- visemeCount: audioAnalysis.visemes ? audioAnalysis.visemes.length : 0,
4203
- wordCount: audioAnalysis.words ? audioAnalysis.words.length : 0,
4204
- features: {
4205
- onsets: audioAnalysis.features && audioAnalysis.features.onsets ? audioAnalysis.features.onsets.length : 0,
4206
- boundaries: audioAnalysis.features && audioAnalysis.features.phonemeBoundaries ? audioAnalysis.features.phonemeBoundaries.length : 0
4207
- },
4208
- visemes: audioAnalysis.visemes ? audioAnalysis.visemes.slice(0, 3) : [] // Show first 3 visemes for debugging
4209
- });
4210
-
4211
- // Generate precise lip-sync animation from audio analysis
4212
- const lipsyncAnim = [];
4213
- if (audioAnalysis.visemes && audioAnalysis.visemes.length > 0) {
4214
- console.log('Deepgram: Generating lip-sync animation from', audioAnalysis.visemes.length, 'visemes');
4215
- for (let i = 0; i < audioAnalysis.visemes.length; i++) {
4216
- const visemeData = audioAnalysis.visemes[i];
4217
- const time = visemeData.startTime * 1000; // Convert to milliseconds
4218
- const duration = visemeData.duration * 1000;
4219
- const intensity = visemeData.intensity;
4220
-
4221
- lipsyncAnim.push({
4222
- template: { name: 'viseme' },
4223
- ts: [time - Math.min(60, 2 * duration / 3), time + Math.min(25, duration / 2), time + duration + Math.min(60, duration / 2)],
4224
- vs: {
4225
- ['viseme_' + visemeData.viseme]: [null, intensity, 0]
3362
+
3363
+ // Rescale lipsync and push to queue
3364
+ let delay = 0;
3365
+ if ( item.anim ) {
3366
+ // Find the lowest negative time point, if any
3367
+ if ( !item.isRaw ) {
3368
+ delay = Math.abs(Math.min(0, ...item.anim.map( x => Math.min(...x.ts) ) ) );
4226
3369
  }
4227
- });
4228
- }
4229
- console.log('Deepgram: Generated', lipsyncAnim.length, 'lip-sync animation frames');
3370
+ item.anim.forEach( x => {
3371
+ for(let i=0; i<x.ts.length; i++) {
3372
+ x.ts[i] = this.animClock + x.ts[i] + delay;
3373
+ }
3374
+ this.animQueue.push(x);
3375
+ });
3376
+ }
3377
+
3378
+ // Play, delay in seconds so pre-animations can be played
3379
+ source.start( this.audioCtx.currentTime + delay/1000);
3380
+
4230
3381
  } else {
4231
- console.warn('Deepgram: No visemes available for lip-sync animation');
3382
+ this.isAudioPlaying = false;
3383
+ this.startSpeaking(true);
4232
3384
  }
4233
-
4234
- // Combine original animation with lip-sync animation
4235
- const combinedAnim = [...line.anim, ...lipsyncAnim];
4236
- console.log('Deepgram: Combined animation frames:', combinedAnim.length, '(original:', line.anim.length, '+ lipsync:', lipsyncAnim.length, ')');
4237
-
4238
- // Add to playlist
4239
- this.audioPlaylist.push({ anim: combinedAnim, audio: audioBuffer });
4240
- this.onSubtitles = line.onSubtitles || null;
4241
- this.resetLips();
4242
- if (line.mood) this.setMood(line.mood);
4243
- this.playAudio();
4244
3385
  }
4245
3386
 
4246
3387
  /**
4247
- * Synthesize speech using Azure TTS
4248
- * @param {Object} line Speech line object
4249
- */
4250
- async synthesizeWithAzureTTS(line) {
4251
- // Get the text from the line
4252
- const text = line.text.map(x => x.word).join(' ');
4253
-
4254
- // Azure TTS SSML
4255
- const voiceName = line.voice || this.avatar.ttsVoice || this.opt.ttsVoice || "en-US-AriaNeural";
4256
- const ssml = `
4257
- <speak version="1.0" xmlns="http://www.w3.org/2001/10/synthesis" xml:lang="en-US">
4258
- <voice name="${voiceName}">
4259
- ${text}
4260
- </voice>
4261
- </speak>
4262
- `;
4263
-
4264
- const response = await fetch(this.opt.ttsEndpoint, {
4265
- method: 'POST',
4266
- headers: {
4267
- 'Ocp-Apim-Subscription-Key': this.opt.ttsApikey,
4268
- 'Content-Type': 'application/ssml+xml',
4269
- 'X-Microsoft-OutputFormat': 'audio-16khz-128kbitrate-mono-mp3'
4270
- },
4271
- body: ssml
4272
- });
3388
+ * Take the next queue item from the speech queue, convert it to text, and
3389
+ * load the audio file.
3390
+ * @param {boolean} [force=false] If true, forces to proceed (e.g. after break)
3391
+ */
3392
+ async startSpeaking( force = false ) {
3393
+ if ( !this.armature || (this.isSpeaking && !force) ) return;
3394
+ this.stateName = 'speaking';
3395
+ this.isSpeaking = true;
3396
+ if ( this.speechQueue.length ) {
3397
+ let line = this.speechQueue.shift();
3398
+ if ( line.emoji ) {
4273
3399
 
4274
- if (!response.ok) {
4275
- throw new Error(`Azure TTS error: ${response.status} ${response.statusText}`);
4276
- }
3400
+ // Look at the camera
3401
+ this.lookAtCamera(500);
4277
3402
 
4278
- // Get audio data
4279
- const audioArrayBuffer = await response.arrayBuffer();
4280
- const audioBuffer = await this.audioCtx.decodeAudioData(audioArrayBuffer);
4281
-
4282
- // Analyze audio for precise lip-sync timing
4283
- console.log('Analyzing audio for precise lip-sync...');
4284
- const audioAnalysis = await this.audioAnalyzer.analyzeAudio(audioBuffer, text);
4285
-
4286
- console.log('Azure TTS Audio Analysis:', {
4287
- text,
4288
- audioDuration: audioBuffer.duration,
4289
- visemeCount: audioAnalysis.visemes.length,
4290
- wordCount: audioAnalysis.words.length,
4291
- features: {
4292
- onsets: audioAnalysis.features.onsets.length,
4293
- boundaries: audioAnalysis.features.phonemeBoundaries.length
4294
- }
4295
- });
4296
-
4297
- // Generate precise lip-sync animation from audio analysis
4298
- const lipsyncAnim = [];
4299
- for (let i = 0; i < audioAnalysis.visemes.length; i++) {
4300
- const visemeData = audioAnalysis.visemes[i];
4301
- const time = visemeData.startTime * 1000; // Convert to milliseconds
4302
- const duration = visemeData.duration * 1000;
4303
- const intensity = visemeData.intensity;
4304
-
4305
- lipsyncAnim.push({
4306
- template: { name: 'viseme' },
4307
- ts: [time - Math.min(60, 2 * duration / 3), time + Math.min(25, duration / 2), time + duration + Math.min(60, duration / 2)],
4308
- vs: {
4309
- ['viseme_' + visemeData.viseme]: [null, intensity, 0]
3403
+ // Only emoji
3404
+ let duration = line.emoji.dt.reduce((a,b) => a+b,0);
3405
+ this.animQueue.push( this.animFactory( line.emoji ) );
3406
+ setTimeout( this.startSpeaking.bind(this), duration, true );
3407
+ } else if ( line.break ) {
3408
+ // Break
3409
+ setTimeout( this.startSpeaking.bind(this), line.break, true );
3410
+ } else if ( line.audio ) {
3411
+
3412
+ // Look at the camera
3413
+ if ( !line.isRaw ) {
3414
+ this.lookAtCamera(500);
3415
+ this.speakWithHands();
3416
+ this.resetLips();
4310
3417
  }
4311
- });
4312
- }
4313
-
4314
- // Combine original animation with lip-sync animation
4315
- const combinedAnim = [...line.anim, ...lipsyncAnim];
4316
-
4317
- // Add to playlist
4318
- this.audioPlaylist.push({ anim: combinedAnim, audio: audioBuffer });
4319
- this.onSubtitles = line.onSubtitles || null;
4320
- this.resetLips();
4321
- if (line.mood) this.setMood(line.mood);
4322
- this.playAudio();
4323
- }
4324
3418
 
4325
- /**
4326
- * Synthesize speech using external TTS service (Google Cloud, etc.)
4327
- * @param {Object} line Speech line object
4328
- */
4329
- async synthesizeWithExternalTTS(line) {
3419
+ // Make a playlist
3420
+ this.audioPlaylist.push({ anim: line.anim, audio: line.audio, isRaw: line.isRaw });
3421
+ this.onSubtitles = line.onSubtitles || null;
3422
+ if ( line.mood ) this.setMood( line.mood );
3423
+ this.playAudio();
3424
+
3425
+ } else if ( line.text ) {
3426
+
3427
+ // Look at the camera
3428
+ this.lookAtCamera(500);
3429
+
3430
+ // Spoken text
3431
+ try {
4330
3432
  // Convert text to SSML
4331
3433
  let ssml = "<speak>";
4332
3434
  line.text.forEach( (x,i) => {
@@ -4346,6 +3448,7 @@ class TalkingHead {
4346
3448
  });
4347
3449
  ssml += "</speak>";
4348
3450
 
3451
+
4349
3452
  const o = {
4350
3453
  method: "POST",
4351
3454
  headers: {
@@ -4433,70 +3536,6 @@ class TalkingHead {
4433
3536
 
4434
3537
  } else {
4435
3538
  this.startSpeaking(true);
4436
- }
4437
- }
4438
-
4439
- /**
4440
- * Take the next queue item from the speech queue, convert it to text, and
4441
- * load the audio file.
4442
- * @param {boolean} [force=false] If true, forces to proceed (e.g. after break)
4443
- */
4444
- async startSpeaking( force = false ) {
4445
- if ( !this.armature || (this.isSpeaking && !force) ) return;
4446
- this.stateName = 'speaking';
4447
- this.isSpeaking = true;
4448
- if ( this.speechQueue.length ) {
4449
- let line = this.speechQueue.shift();
4450
- if ( line.emoji ) {
4451
-
4452
- // Look at the camera
4453
- this.lookAtCamera(500);
4454
-
4455
- // Only emoji
4456
- let duration = line.emoji.dt.reduce((a,b) => a+b,0);
4457
- this.animQueue.push( this.animFactory( line.emoji ) );
4458
- setTimeout( this.startSpeaking.bind(this), duration, true );
4459
- } else if ( line.break ) {
4460
- // Break
4461
- setTimeout( this.startSpeaking.bind(this), line.break, true );
4462
- } else if ( line.audio ) {
4463
-
4464
- // Look at the camera
4465
- if ( !line.isRaw ) {
4466
- this.lookAtCamera(500);
4467
- this.speakWithHands();
4468
- this.resetLips();
4469
- }
4470
-
4471
- // Make a playlist
4472
- this.audioPlaylist.push({ anim: line.anim, audio: line.audio, isRaw: line.isRaw });
4473
- this.onSubtitles = line.onSubtitles || null;
4474
- if ( line.mood ) this.setMood( line.mood );
4475
- this.playAudio();
4476
-
4477
- } else if ( line.text ) {
4478
-
4479
- // Look at the camera
4480
- this.lookAtCamera(500);
4481
-
4482
- // Spoken text
4483
- try {
4484
- // Check which TTS service to use
4485
- if (!this.opt.ttsEndpoint || this.opt.ttsEndpoint === "") {
4486
- // Use browser's built-in speech synthesis
4487
- await this.synthesizeWithBrowserTTS(line);
4488
- } else if (this.opt.ttsService === "elevenlabs") {
4489
- // Use ElevenLabs TTS
4490
- await this.synthesizeWithElevenLabsTTS(line);
4491
- } else if (this.opt.ttsService === "deepgram") {
4492
- // Use Deepgram Aura-2 TTS
4493
- await this.synthesizeWithDeepgramTTS(line);
4494
- } else if (this.opt.ttsService === "azure") {
4495
- // Use Azure TTS
4496
- await this.synthesizeWithAzureTTS(line);
4497
- } else {
4498
- // Use external TTS service (Google Cloud, etc.)
4499
- await this.synthesizeWithExternalTTS(line);
4500
3539
  }
4501
3540
  } catch (error) {
4502
3541
  console.error("Error:", error);
@@ -4532,7 +3571,7 @@ class TalkingHead {
4532
3571
  * Pause speaking.
4533
3572
  */
4534
3573
  pauseSpeaking() {
4535
- try { this.audioSpeechSource.stop(); } catch(error) {}
3574
+ try { this.audioSpeechSource?.stop(); } catch(error) {}
4536
3575
  this.audioPlaylist.length = 0;
4537
3576
  this.stateName = 'idle';
4538
3577
  this.isSpeaking = false;
@@ -4548,7 +3587,7 @@ class TalkingHead {
4548
3587
  * Stop speaking and clear the speech queue.
4549
3588
  */
4550
3589
  stopSpeaking() {
4551
- try { this.audioSpeechSource.stop(); } catch(error) {}
3590
+ try { this.audioSpeechSource?.stop(); } catch(error) {}
4552
3591
  this.audioPlaylist.length = 0;
4553
3592
  this.speechQueue.length = 0;
4554
3593
  this.animQueue = this.animQueue.filter( x => x.template.name !== 'viseme' && x.template.name !== 'subtitles' && x.template.name !== 'blendshapes' );
@@ -5306,8 +4345,12 @@ class TalkingHead {
5306
4345
  */
5307
4346
  setSlowdownRate(k) {
5308
4347
  this.animSlowdownRate = k;
4348
+ if ( this.audioSpeechSource ) {
5309
4349
  this.audioSpeechSource.playbackRate.value = 1 / this.animSlowdownRate;
4350
+ }
4351
+ if ( this.audioBackgroundSource ) {
5310
4352
  this.audioBackgroundSource.playbackRate.value = 1 / this.animSlowdownRate;
4353
+ }
5311
4354
  }
5312
4355
 
5313
4356
  /**
@@ -5336,7 +4379,7 @@ class TalkingHead {
5336
4379
  this.animTimeLast = performance.now();
5337
4380
  this.isRunning = true;
5338
4381
  if ( !this.isAvatarOnly ) {
5339
- requestAnimationFrame( this.animate.bind(this) );
4382
+ this._raf = requestAnimationFrame( this.animate );
5340
4383
  }
5341
4384
  }
5342
4385
  }
@@ -5392,7 +4435,6 @@ class TalkingHead {
5392
4435
  * @param {number} [ndx=0] Index of the clip
5393
4436
  * @param {number} [scale=0.01] Position scale factor
5394
4437
  */
5395
-
5396
4438
  async playAnimation(url, onprogress=null, dur=10, ndx=0, scale=0.01, disablePositionLock=false) {
5397
4439
  if ( !this.armature ) return;
5398
4440
 
@@ -5445,98 +4487,27 @@ class TalkingHead {
5445
4487
  try {
5446
4488
  action.fadeIn(0.5).play();
5447
4489
  console.log('FBX animation started successfully:', url);
5448
- } catch (error) {
5449
- console.warn('FBX animation failed to start:', error);
5450
- // Stop the animation and unlock position on error
5451
- this.stopAnimation();
5452
- return;
5453
- }
5454
-
5455
- // Check if the animation actually has valid tracks
5456
- if (action.getClip().tracks.length === 0) {
5457
- console.warn('FBX animation has no valid tracks, stopping');
5458
- this.stopAnimation();
5459
- return;
5460
- }
5461
-
5462
- } else {
5463
-
5464
- // Validate file extension
5465
- const fileExtension = url.split('.').pop().toLowerCase();
5466
- if (fileExtension !== 'fbx') {
5467
- console.error(`Invalid file type for FBX animation: ${url}. Expected .fbx file.`);
5468
- return;
5469
- }
5470
-
5471
- // Check if file exists before attempting to load
5472
- let fileExists = false;
5473
- try {
5474
- const response = await fetch(url, { method: 'HEAD' });
5475
- fileExists = response.ok;
5476
- if (!fileExists) {
5477
- console.error(`FBX file not found at ${url}. Status: ${response.status}`);
5478
- console.error('Please check:');
5479
- console.error('1. File path is correct (note: path is case-sensitive)');
5480
- console.error('2. File exists in your public folder');
5481
- console.error('3. File is accessible (not blocked by server)');
5482
- return;
5483
- }
5484
- } catch (fetchError) {
5485
- console.warn(`Could not verify file existence for ${url}, attempting to load anyway:`, fetchError);
5486
- }
5487
-
5488
- // Load animation with error handling
5489
- const loader = new FBXLoader();
5490
- let fbx;
5491
-
5492
- try {
5493
- fbx = await loader.loadAsync( url, onprogress );
5494
- } catch (error) {
5495
- console.error(`Failed to load FBX animation from ${url}:`, error);
5496
- console.error('Error details:', {
5497
- message: error.message,
5498
- url: url,
5499
- suggestion: 'Make sure the file is a valid FBX file and the path is correct'
5500
- });
5501
-
5502
- // Try to provide helpful error message
5503
- if (error.message && error.message.includes('version number')) {
5504
- console.error('FBX Loader Error: Cannot find version number');
5505
- console.error('This error usually means:');
5506
- console.error('1. The file is not a valid FBX file (might be GLB, corrupted, or wrong format)');
5507
- console.error('2. The file might be corrupted');
5508
- console.error('3. The file path might be incorrect');
5509
- console.error('4. The server returned an HTML error page instead of the FBX file');
5510
- console.error('5. The file might not exist at that path');
5511
- console.error('');
5512
- console.error('Solution: Please verify:');
5513
- console.error(` - File exists at: ${url}`);
5514
- console.error(' - File is a valid FBX binary file');
5515
- console.error(' - File path matches your public folder structure');
5516
- console.error(' - File is not corrupted');
5517
- }
5518
-
5519
- // Try to fetch and check what we actually got
5520
- try {
5521
- const response = await fetch(url);
5522
- const contentType = response.headers.get('content-type');
5523
- const text = await response.text();
5524
- console.error(`Response details:`, {
5525
- status: response.status,
5526
- contentType: contentType,
5527
- firstBytes: text.substring(0, 100),
5528
- isHTML: text.trim().startsWith('<!DOCTYPE') || text.trim().startsWith('<html')
5529
- });
5530
- if (text.trim().startsWith('<!DOCTYPE') || text.trim().startsWith('<html')) {
5531
- console.error('The server returned an HTML page instead of an FBX file. The file path is likely incorrect.');
5532
- }
5533
- } catch (fetchError) {
5534
- console.error('Could not fetch file for debugging:', fetchError);
5535
- }
5536
-
4490
+ } catch (error) {
4491
+ console.warn('FBX animation failed to start:', error);
4492
+ // Stop the animation and unlock position on error
4493
+ this.stopAnimation();
4494
+ return;
4495
+ }
4496
+
4497
+ // Check if the animation actually has valid tracks
4498
+ if (action.getClip().tracks.length === 0) {
4499
+ console.warn('FBX animation has no valid tracks, stopping');
4500
+ this.stopAnimation();
5537
4501
  return;
5538
4502
  }
5539
4503
 
4504
+ } else {
4505
+
4506
+ // Load animation
4507
+ const loader = new FBXLoader();
4508
+
4509
+ let fbx = await loader.loadAsync( url, onprogress );
4510
+
5540
4511
  if ( fbx && fbx.animations && fbx.animations[ndx] ) {
5541
4512
  let anim = fbx.animations[ndx];
5542
4513
 
@@ -5688,31 +4659,6 @@ class TalkingHead {
5688
4659
  return null; // No mapping found
5689
4660
  };
5690
4661
 
5691
- // Debug: Log FBX bone names and avatar bone names for comparison
5692
- const fbxBoneNames = new Set();
5693
- anim.tracks.forEach(track => {
5694
- const trackParts = track.name.split('.');
5695
- fbxBoneNames.add(trackParts[0]);
5696
- });
5697
-
5698
- console.log('=== Ready Player Me Animation Bone Analysis ===');
5699
- console.log('FBX bone names:', Array.from(fbxBoneNames).sort().join(', '));
5700
- console.log('Avatar skeleton bone names:', Array.from(availableBones).sort().join(', '));
5701
-
5702
- // Check for arm bones specifically
5703
- const fbxArmBones = Array.from(fbxBoneNames).filter(b =>
5704
- b.toLowerCase().includes('arm') ||
5705
- b.toLowerCase().includes('hand') ||
5706
- b.toLowerCase().includes('shoulder')
5707
- );
5708
- const avatarArmBones = Array.from(availableBones).filter(b =>
5709
- b.includes('Arm') ||
5710
- b.includes('Hand') ||
5711
- b.includes('Shoulder')
5712
- );
5713
- console.log('FBX arm/hand/shoulder bones:', fbxArmBones.sort().join(', '));
5714
- console.log('Avatar arm/hand/shoulder bones:', avatarArmBones.sort().join(', '));
5715
-
5716
4662
  // Filter and map animation tracks
5717
4663
  const mappedTracks = [];
5718
4664
  const unmappedBones = new Set();
@@ -5731,11 +4677,6 @@ class TalkingHead {
5731
4677
  const newTrackName = `${mappedBoneName}.${property}`;
5732
4678
  const newTrack = track.clone();
5733
4679
  newTrack.name = newTrackName;
5734
-
5735
- // Note: Rotation corrections removed - they were causing issues with both arms
5736
- // If left arm still has issues, it's likely a bone mapping problem, not rotation
5737
- // Focus on getting bone names mapped correctly first
5738
-
5739
4680
  mappedTracks.push(newTrack);
5740
4681
 
5741
4682
  // Store mapping for logging
@@ -5744,12 +4685,6 @@ class TalkingHead {
5744
4685
  }
5745
4686
  } else {
5746
4687
  unmappedBones.add(fbxBoneName);
5747
- // Log unmapped bones (especially arm bones)
5748
- if (fbxBoneName.toLowerCase().includes('arm') ||
5749
- fbxBoneName.toLowerCase().includes('hand') ||
5750
- fbxBoneName.toLowerCase().includes('shoulder')) {
5751
- console.warn(`⚠️ Arm bone "${fbxBoneName}" could not be mapped to avatar skeleton`);
5752
- }
5753
4688
  }
5754
4689
  });
5755
4690
 
@@ -5765,24 +4700,11 @@ class TalkingHead {
5765
4700
  console.log(`✓ Mapped ${boneNameMap.size} bone(s):`,
5766
4701
  Array.from(boneNameMap.entries()).map(([from, to]) => `${from}→${to}`).join(', '));
5767
4702
  }
5768
-
5769
- // Check if arm bones were mapped
5770
- const mappedArmBones = Array.from(boneNameMap.values()).filter(b =>
5771
- b.includes('Arm') || b.includes('Hand') || b.includes('Shoulder')
5772
- );
5773
- if (mappedArmBones.length > 0) {
5774
- console.log(`✓ Arm bones mapped: ${mappedArmBones.join(', ')}`);
5775
- } else {
5776
- console.warn('⚠️ No arm bones were mapped! This may cause arm rigging issues.');
5777
- }
5778
- } else {
5779
- console.error('❌ No tracks could be mapped! Animation may not work correctly.');
5780
4703
  }
5781
4704
 
5782
4705
  // Rename and scale Mixamo tracks, create a pose
5783
4706
  const props = {};
5784
4707
  anim.tracks.forEach( t => {
5785
- t.name = t.name.replaceAll('mixamorig','');
5786
4708
  const ids = t.name.split('.');
5787
4709
  if ( ids[1] === 'position' ) {
5788
4710
  for(let i=0; i<t.values.length; i++ ) {
@@ -5818,13 +4740,6 @@ class TalkingHead {
5818
4740
  } else {
5819
4741
  const msg = 'Animation ' + url + ' (ndx=' + ndx + ') not found';
5820
4742
  console.error(msg);
5821
- if (fbx && fbx.animations) {
5822
- console.error(`FBX file loaded but has ${fbx.animations.length} animation(s), requested index ${ndx}`);
5823
- } else if (fbx) {
5824
- console.error('FBX file loaded but contains no animations');
5825
- } else {
5826
- console.error('FBX file failed to load or is invalid');
5827
- }
5828
4743
  }
5829
4744
  }
5830
4745
  }
@@ -5834,21 +4749,16 @@ class TalkingHead {
5834
4749
  */
5835
4750
  stopAnimation() {
5836
4751
 
5837
- // Stop only the current FBX action, preserve mixer for morph targets
5838
- if (this.currentFBXAction) {
5839
- this.currentFBXAction.stop();
5840
- this.currentFBXAction = null;
5841
- console.log('FBX animation action stopped, mixer preserved for lip-sync');
5842
- }
5843
-
5844
- // Only destroy mixer if no other animations are running
5845
- // This allows morph target animations (lip-sync) to continue
5846
- if (this.mixer && this.mixer._actions.length === 0) {
4752
+ // Stop mixer
4753
+ if (this.mixer) {
4754
+ this.mixer.removeEventListener('finished', this._mixerHandler);
4755
+ this.mixer.stopAllAction();
4756
+ this.mixer.uncacheRoot(this.armature);
5847
4757
  this.mixer = null;
5848
- console.log('Mixer destroyed as no actions remain');
4758
+ this._mixerHandler = null;
5849
4759
  }
5850
4760
 
5851
- // Unlock position when animation stops (only if it was locked)
4761
+ // Unlock position if it was locked
5852
4762
  if (this.positionWasLocked) {
5853
4763
  this.unlockAvatarPosition();
5854
4764
  console.log('Position unlocked after FBX animation stopped');
@@ -5856,347 +4766,729 @@ class TalkingHead {
5856
4766
  console.log('Position was not locked, no unlock needed');
5857
4767
  }
5858
4768
 
5859
- // Restart gesture
4769
+ // Restart gesture
4770
+ if ( this.gesture && this.poseTarget && this.poseTarget.props ) {
4771
+ for( let [p,v] of Object.entries(this.gesture) ) {
4772
+ v.t = this.animClock;
4773
+ v.d = 1000;
4774
+ if ( this.poseTarget.props.hasOwnProperty(p) ) {
4775
+ this.poseTarget.props[p].copy(v);
4776
+ this.poseTarget.props[p].t = this.animClock;
4777
+ this.poseTarget.props[p].d = 1000;
4778
+ }
4779
+ }
4780
+ }
4781
+
4782
+ // Restart pose animation
4783
+ if ( this.animQueue ) {
4784
+ let anim = this.animQueue.find( x => x.template.name === 'pose' );
4785
+ if ( anim ) {
4786
+ anim.ts[0] = this.animClock;
4787
+ }
4788
+ }
4789
+
4790
+ // Only call setPoseFromTemplate if poseFactory exists (not disposed)
4791
+ if ( this.poseFactory ) {
4792
+ this.setPoseFromTemplate( null );
4793
+ }
4794
+
4795
+ }
4796
+
4797
+
4798
+ /**
4799
+ * Play RPM/Mixamo pose.
4800
+ * @param {string|Object} url Pose name | URL to FBX
4801
+ * @param {progressfn} [onprogress=null] Callback for progress
4802
+ * @param {number} [dur=5] Duration of the pose in seconds
4803
+ * @param {number} [ndx=0] Index of the clip
4804
+ * @param {number} [scale=0.01] Position scale factor
4805
+ */
4806
+ async playPose(url, onprogress=null, dur=5, ndx=0, scale=0.01) {
4807
+
4808
+ if ( !this.armature ) return;
4809
+
4810
+ // Check if we already have the pose template ready
4811
+ let pose = this.poseTemplates[url];
4812
+ if ( !pose ) {
4813
+ const item = this.animPoses.find( x => x.url === url+'-'+ndx );
4814
+ if ( item ) {
4815
+ pose = item.pose;
4816
+ }
4817
+ }
4818
+
4819
+ // If we have the template, use it, otherwise try to load it
4820
+ if ( pose ) {
4821
+
4822
+ this.poseName = url;
4823
+
4824
+ if (this.mixer) {
4825
+ this.mixer.removeEventListener('finished', this._mixerHandler);
4826
+ this.mixer.stopAllAction();
4827
+ this.mixer.uncacheRoot(this.armature);
4828
+ this.mixer = null;
4829
+ this._mixerHandler = null;
4830
+ }
4831
+ let anim = this.animQueue.find( x => x.template.name === 'pose' );
4832
+ if ( anim ) {
4833
+ anim.ts[0] = this.animClock + (dur * 1000) + 2000;
4834
+ }
4835
+ this.setPoseFromTemplate( pose );
4836
+
4837
+ } else {
4838
+
4839
+ // Load animation
4840
+ const loader = new FBXLoader();
4841
+
4842
+ let fbx = await loader.loadAsync( url, onprogress );
4843
+
4844
+ if ( fbx && fbx.animations && fbx.animations[ndx] ) {
4845
+ let anim = fbx.animations[ndx];
4846
+
4847
+ // Create a pose
4848
+ const props = {};
4849
+ anim.tracks.forEach( t => {
4850
+
4851
+ // Rename and scale Mixamo tracks
4852
+ t.name = t.name.replaceAll('mixamorig','');
4853
+ const ids = t.name.split('.');
4854
+ if ( ids[1] === 'position' ) {
4855
+ props[t.name] = new THREE.Vector3( t.values[0] * scale, t.values[1] * scale, t.values[2] * scale);
4856
+ } else if ( ids[1] === 'quaternion' ) {
4857
+ props[t.name] = new THREE.Quaternion( t.values[0], t.values[1], t.values[2], t.values[3] );
4858
+ } else if ( ids[1] === 'rotation' ) {
4859
+ props[ids[0]+".quaternion"] = new THREE.Quaternion().setFromEuler(new THREE.Euler( t.values[0], t.values[1], t.values[2],'XYZ' )).normalize();
4860
+ }
4861
+ });
4862
+
4863
+ // Add to pose
4864
+ const newPose = { props: props };
4865
+ if ( props['Hips.position'] ) {
4866
+ if ( props['Hips.position'].y < 0.5 ) {
4867
+ newPose.lying = true;
4868
+ } else {
4869
+ newPose.standing = true;
4870
+ }
4871
+ }
4872
+ this.animPoses.push({
4873
+ url: url+'-'+ndx,
4874
+ pose: newPose
4875
+ });
4876
+
4877
+ // Play
4878
+ this.playPose(url, onprogress, dur, ndx, scale);
4879
+
4880
+ } else {
4881
+ const msg = 'Pose ' + url + ' (ndx=' + ndx + ') not found';
4882
+ console.error(msg);
4883
+ }
4884
+ }
4885
+ }
4886
+
4887
+ /**
4888
+ * Stop the pose. (Functionality is the same as in stopAnimation.)
4889
+ */
4890
+ stopPose() {
4891
+ this.stopAnimation();
4892
+ }
4893
+
4894
+ /**
4895
+ * Play a gesture, which is either a hand gesture, an emoji animation or their
4896
+ * combination.
4897
+ * @param {string} name Gesture name
4898
+ * @param {number} [dur=3] Duration of the gesture in seconds
4899
+ * @param {boolean} [mirror=false] Mirror gesture
4900
+ * @param {number} [ms=1000] Transition time in milliseconds
4901
+ */
4902
+ playGesture(name, dur=3, mirror=false, ms=1000) {
4903
+
4904
+ if ( !this.armature ) return;
4905
+
4906
+ // Hand gesture, if any
4907
+ let g = this.gestureTemplates[name];
4908
+ if ( g ) {
4909
+
4910
+ // New gesture always overrides the existing one
4911
+ if ( this.gestureTimeout ) {
4912
+ clearTimeout( this.gestureTimeout );
4913
+ this.gestureTimeout = null;
4914
+ }
4915
+
4916
+ // Stop talking hands animation
4917
+ let ndx = this.animQueue.findIndex( y => y.template.name === "talkinghands" );
4918
+ if ( ndx !== -1 ) {
4919
+ this.animQueue[ndx].ts = this.animQueue[ndx].ts.map( x => 0 );
4920
+ }
4921
+
4922
+ // Set gesture
4923
+ this.gesture = this.propsToThreeObjects( g );
4924
+ if ( mirror ) {
4925
+ this.gesture = this.mirrorPose( this.gesture );
4926
+ }
4927
+ if ( name === "namaste" && this.avatar.body === 'M' ) {
4928
+ // Work-a-round for male model so that the hands meet
4929
+ this.gesture["RightArm.quaternion"].rotateTowards( new THREE.Quaternion(0,1,0,0), -0.25);
4930
+ this.gesture["LeftArm.quaternion"].rotateTowards( new THREE.Quaternion(0,1,0,0), -0.25);
4931
+ }
4932
+
4933
+ // Apply to target
4934
+ for( let [p,val] of Object.entries(this.gesture) ) {
4935
+ val.t = this.animClock;
4936
+ val.d = ms;
4937
+ if ( this.poseTarget.props.hasOwnProperty(p) ) {
4938
+ this.poseTarget.props[p].copy(val);
4939
+ this.poseTarget.props[p].t = this.animClock;
4940
+ this.poseTarget.props[p].d = ms;
4941
+ }
4942
+ }
4943
+
4944
+ // Timer
4945
+ if ( dur && Number.isFinite(dur) ) {
4946
+ this.gestureTimeout = setTimeout( this.stopGesture.bind(this,ms), 1000 * dur);
4947
+ }
4948
+ }
4949
+
4950
+ // Animated emoji, if any
4951
+ let em = this.animEmojis[name];
4952
+ if ( em ) {
4953
+
4954
+ // Follow link
4955
+ if ( em && em.link ) {
4956
+ em = this.animEmojis[em.link];
4957
+ }
4958
+
4959
+ if ( em ) {
4960
+ // Look at the camera for 500 ms
4961
+ this.lookAtCamera(500);
4962
+
4963
+ // Create animation and tag as gesture
4964
+ const anim = this.animFactory( em );
4965
+ anim.gesture = true;
4966
+
4967
+ // Rescale duration
4968
+ if ( dur && Number.isFinite(dur) ) {
4969
+ const first = anim.ts[0];
4970
+ const last = anim.ts[ anim.ts.length -1 ];
4971
+ const total = last - first;
4972
+ const excess = (dur * 1000) - total;
4973
+
4974
+ // If longer, increase longer parts; if shorter, scale everything
4975
+ if ( excess > 0 ) {
4976
+ const dt = [];
4977
+ for( let i=1; i<anim.ts.length; i++ ) dt.push( anim.ts[i] - anim.ts[i-1] );
4978
+ const rescale = em.template?.rescale || dt.map( x => x / total );
4979
+ const excess = dur * 1000 - total;
4980
+ anim.ts = anim.ts.map( (x,i,arr) => {
4981
+ return (i===0) ? first : (arr[i-1] + dt[i-1] + rescale[i-1] * excess);
4982
+ });
4983
+ } else {
4984
+ const scale = (dur * 1000) / total;
4985
+ anim.ts = anim.ts.map( x => first + scale * (x - first) );
4986
+ }
4987
+ }
4988
+
4989
+ this.animQueue.push( anim );
4990
+ }
4991
+ }
4992
+
4993
+ }
4994
+
4995
+ /**
4996
+ * Stop the gesture.
4997
+ * @param {number} [ms=1000] Transition time in milliseconds
4998
+ */
4999
+ stopGesture(ms=1000) {
5000
+
5001
+ // Stop gesture timer
5002
+ if ( this.gestureTimeout ) {
5003
+ clearTimeout( this.gestureTimeout );
5004
+ this.gestureTimeout = null;
5005
+ }
5006
+
5007
+ // Stop hand gesture, if any
5860
5008
  if ( this.gesture ) {
5861
- for( let [p,v] of Object.entries(this.gesture) ) {
5862
- v.t = this.animClock;
5863
- v.d = 1000;
5009
+ const gs = Object.entries(this.gesture);
5010
+ this.gesture = null;
5011
+ for( const [p,val] of gs ) {
5864
5012
  if ( this.poseTarget.props.hasOwnProperty(p) ) {
5865
- this.poseTarget.props[p].copy(v);
5013
+ this.poseTarget.props[p].copy( this.getPoseTemplateProp(p) );
5866
5014
  this.poseTarget.props[p].t = this.animClock;
5867
- this.poseTarget.props[p].d = 1000;
5015
+ this.poseTarget.props[p].d = ms;
5868
5016
  }
5869
5017
  }
5870
5018
  }
5871
5019
 
5872
- // Restart pose animation
5873
- let anim = this.animQueue.find( x => x.template.name === 'pose' );
5874
- if ( anim ) {
5875
- anim.ts[0] = this.animClock;
5020
+ // Stop animated emoji gesture, if any
5021
+ let i = this.animQueue.findIndex( y => y.gesture );
5022
+ if ( i !== -1 ) {
5023
+ this.animQueue.splice(i, 1);
5876
5024
  }
5877
- this.setPoseFromTemplate( null );
5878
5025
 
5879
5026
  }
5880
5027
 
5881
-
5882
5028
  /**
5883
- * Play RPM/Mixamo pose.
5884
- * @param {string|Object} url Pose name | URL to FBX
5885
- * @param {progressfn} [onprogress=null] Callback for progress
5886
- * @param {number} [dur=5] Duration of the pose in seconds
5887
- * @param {number} [ndx=0] Index of the clip
5888
- * @param {number} [scale=0.01] Position scale factor
5029
+ * Cyclic Coordinate Descent (CCD) Inverse Kinematic (IK) algorithm.
5030
+ * Adapted from:
5031
+ * https://github.com/mrdoob/three.js/blob/master/examples/jsm/animation/CCDIKSolver.js
5032
+ * @param {Object} ik IK configuration object
5033
+ * @param {Vector3} [target=null] Target coordinate, if null return to template
5034
+ * @param {Boolean} [relative=false] If true, target is relative to root
5035
+ * @param {numeric} [d=null] If set, apply in d milliseconds
5889
5036
  */
5890
- async playPose(url, onprogress=null, dur=5, ndx=0, scale=0.01) {
5891
-
5892
- if ( !this.armature ) return;
5037
+ ikSolve(ik, target=null, relative=false, d=null) {
5038
+ const targetVec = new THREE.Vector3();
5039
+ const effectorPos = new THREE.Vector3();
5040
+ const effectorVec = new THREE.Vector3();
5041
+ const linkPos = new THREE.Vector3();
5042
+ const invLinkQ = new THREE.Quaternion();
5043
+ const linkScale = new THREE.Vector3();
5044
+ const axis = new THREE.Vector3();
5045
+ const vector = new THREE.Vector3();
5893
5046
 
5894
- // Check if we already have the pose template ready
5895
- let pose = this.poseTemplates[url];
5896
- if ( !pose ) {
5897
- const item = this.animPoses.find( x => x.url === url+'-'+ndx );
5898
- if ( item ) {
5899
- pose = item.pose;
5900
- }
5047
+ // Reset IK setup positions and rotations
5048
+ const root = this.ikMesh.getObjectByName(ik.root);
5049
+ root.position.setFromMatrixPosition( this.armature.getObjectByName(ik.root).matrixWorld );
5050
+ root.quaternion.setFromRotationMatrix( this.armature.getObjectByName(ik.root).matrixWorld );
5051
+ if ( target && relative ) {
5052
+ target.applyQuaternion(this.armature.quaternion).add( root.position );
5901
5053
  }
5054
+ const effector = this.ikMesh.getObjectByName(ik.effector);
5055
+ const links = ik.links;
5056
+ links.forEach( x => {
5057
+ x.bone = this.ikMesh.getObjectByName(x.link);
5058
+ x.bone.quaternion.copy( this.getPoseTemplateProp(x.link+'.quaternion') );
5059
+ });
5060
+ root.updateMatrixWorld(true);
5061
+ const iterations = ik.iterations || 10;
5902
5062
 
5903
- // If we have the template, use it, otherwise try to load it
5904
- if ( pose ) {
5063
+ // Iterate
5064
+ if ( target ) {
5065
+ for ( let i = 0; i < iterations; i ++ ) {
5066
+ let rotated = false;
5067
+ for ( let j = 0, jl = links.length; j < jl; j++ ) {
5068
+ const bone = links[j].bone;
5069
+ bone.matrixWorld.decompose( linkPos, invLinkQ, linkScale );
5070
+ invLinkQ.invert();
5071
+ effectorPos.setFromMatrixPosition( effector.matrixWorld );
5072
+ effectorVec.subVectors( effectorPos, linkPos );
5073
+ effectorVec.applyQuaternion( invLinkQ );
5074
+ effectorVec.normalize();
5075
+ targetVec.subVectors( target, linkPos );
5076
+ targetVec.applyQuaternion( invLinkQ );
5077
+ targetVec.normalize();
5078
+ let angle = targetVec.dot( effectorVec );
5079
+ if ( angle > 1.0 ) {
5080
+ angle = 1.0;
5081
+ } else if ( angle < - 1.0 ) {
5082
+ angle = - 1.0;
5083
+ }
5084
+ angle = Math.acos( angle );
5085
+ if ( angle < 1e-5 ) continue;
5086
+ if ( links[j].minAngle !== undefined && angle < links[j].minAngle ) {
5087
+ angle = links[j].minAngle;
5088
+ }
5089
+ if ( links[j].maxAngle !== undefined && angle > links[j].maxAngle ) {
5090
+ angle = links[j].maxAngle;
5091
+ }
5092
+ axis.crossVectors( effectorVec, targetVec );
5093
+ axis.normalize();
5094
+ q.setFromAxisAngle( axis, angle );
5095
+ bone.quaternion.multiply( q );
5905
5096
 
5906
- this.poseName = url;
5097
+ // Constraints
5098
+ bone.rotation.setFromVector3( vector.setFromEuler( bone.rotation ).clamp( new THREE.Vector3(
5099
+ links[j].minx !== undefined ? links[j].minx : -Infinity,
5100
+ links[j].miny !== undefined ? links[j].miny : -Infinity,
5101
+ links[j].minz !== undefined ? links[j].minz : -Infinity
5102
+ ), new THREE.Vector3(
5103
+ links[j].maxx !== undefined ? links[j].maxx : Infinity,
5104
+ links[j].maxy !== undefined ? links[j].maxy : Infinity,
5105
+ links[j].maxz !== undefined ? links[j].maxz : Infinity
5106
+ )) );
5907
5107
 
5908
- this.mixer = null;
5909
- let anim = this.animQueue.find( x => x.template.name === 'pose' );
5910
- if ( anim ) {
5911
- anim.ts[0] = this.animClock + (dur * 1000) + 2000;
5108
+ bone.updateMatrixWorld( true );
5109
+ rotated = true;
5110
+ }
5111
+ if ( !rotated ) break;
5912
5112
  }
5913
- this.setPoseFromTemplate( pose );
5914
-
5915
- } else {
5916
-
5917
- // Load animation
5918
- const loader = new FBXLoader();
5919
-
5920
- let fbx = await loader.loadAsync( url, onprogress );
5113
+ }
5921
5114
 
5922
- if ( fbx && fbx.animations && fbx.animations[ndx] ) {
5923
- let anim = fbx.animations[ndx];
5115
+ // Apply
5116
+ if ( d ) {
5117
+ links.forEach( x => {
5118
+ this.poseTarget.props[x.link+".quaternion"].copy( x.bone.quaternion );
5119
+ this.poseTarget.props[x.link+".quaternion"].t = this.animClock;
5120
+ this.poseTarget.props[x.link+".quaternion"].d = d;
5121
+ });
5122
+ }
5123
+ }
5924
5124
 
5925
- // Create a pose
5926
- const props = {};
5927
- anim.tracks.forEach( t => {
5125
+ /**
5126
+ * Initialize FBX animation loader
5127
+ */
5128
+ async initializeFBXAnimationLoader() {
5129
+ try {
5130
+ // Dynamic import to avoid loading issues
5131
+ const { FBXAnimationLoader } = await import('./fbxAnimationLoader.js');
5132
+ this.fbxAnimationLoader = new FBXAnimationLoader(this.armature);
5133
+ console.log('FBX Animation Loader initialized');
5134
+ } catch (error) {
5135
+ console.warn('FBX Animation Loader not available:', error);
5136
+ this.fbxAnimationLoader = null;
5137
+ }
5138
+ }
5928
5139
 
5929
- // Rename and scale Mixamo tracks
5930
- t.name = t.name.replaceAll('mixamorig','');
5931
- const ids = t.name.split('.');
5932
- if ( ids[1] === 'position' ) {
5933
- props[t.name] = new THREE.Vector3( t.values[0] * scale, t.values[1] * scale, t.values[2] * scale);
5934
- } else if ( ids[1] === 'quaternion' ) {
5935
- props[t.name] = new THREE.Quaternion( t.values[0], t.values[1], t.values[2], t.values[3] );
5936
- } else if ( ids[1] === 'rotation' ) {
5937
- props[ids[0]+".quaternion"] = new THREE.Quaternion().setFromEuler(new THREE.Euler( t.values[0], t.values[1], t.values[2],'XYZ' )).normalize();
5938
- }
5939
- });
5140
+ /**
5141
+ * Set body movement type.
5142
+ * @param {string} movement Movement type (idle, walking, prancing, gesturing, dancing, excited).
5143
+ */
5144
+ setBodyMovement(movement) {
5145
+ this.bodyMovement = movement;
5146
+
5147
+ // Only set avatar property if avatar exists
5148
+ if (this.avatar) {
5149
+ this.avatar.bodyMovement = movement;
5150
+ }
5151
+
5152
+ console.log('Body movement set to:', movement);
5153
+
5154
+ // Respect the current showFullAvatar setting instead of forcing it to true
5155
+ // Only unlock position when returning to idle
5156
+ if (movement === 'idle') {
5157
+ // Unlock position when returning to idle
5158
+ this.unlockAvatarPosition();
5159
+ }
5160
+ // Note: We no longer force showFullAvatar to true for body movements
5161
+ // The avatar will use whatever showFullAvatar value was set by the user
5162
+
5163
+ // Apply body movement animation
5164
+ this.applyBodyMovementAnimation();
5165
+ }
5940
5166
 
5941
- // Add to pose
5942
- const newPose = { props: props };
5943
- if ( props['Hips.position'] ) {
5944
- if ( props['Hips.position'].y < 0.5 ) {
5945
- newPose.lying = true;
5946
- } else {
5947
- newPose.standing = true;
5948
- }
5167
+ /**
5168
+ * Apply body movement animation based on current movement type.
5169
+ */
5170
+ async applyBodyMovementAnimation() {
5171
+ // Check if avatar is ready
5172
+ if (!this.armature || !this.animQueue) {
5173
+ console.log('Avatar not ready for body movement animations');
5174
+ return;
5175
+ }
5176
+
5177
+ console.log('Avatar is running:', this.isRunning);
5178
+ console.log('Animation queue exists:', !!this.animQueue);
5179
+
5180
+ // Remove existing body movement animations
5181
+ const beforeLength = this.animQueue.length;
5182
+ this.animQueue = this.animQueue.filter(anim => !anim.template.name.startsWith('bodyMovement'));
5183
+ const afterLength = this.animQueue.length;
5184
+ console.log(`Filtered animation queue: ${beforeLength} -> ${afterLength} animations`);
5185
+
5186
+ if (this.bodyMovement === 'idle') {
5187
+ // Stop FBX animations if any
5188
+ if (this.fbxAnimationLoader) {
5189
+ this.fbxAnimationLoader.stopCurrentAnimation();
5190
+ }
5191
+ return; // No body movement for idle
5192
+ }
5193
+
5194
+ // Try to use FBX animations first
5195
+ if (this.fbxAnimationLoader) {
5196
+ try {
5197
+ await this.fbxAnimationLoader.playGestureAnimation(this.bodyMovement, this.movementIntensity);
5198
+ console.log('Applied FBX body movement animation:', this.bodyMovement);
5199
+ return; // Successfully applied FBX animation
5200
+ } catch (error) {
5201
+ console.warn('FBX animation failed, falling back to code animation:', error);
5202
+ }
5203
+ }
5204
+
5205
+ // Fallback to code-based animations
5206
+ const movementAnim = this.createBodyMovementAnimation(this.bodyMovement);
5207
+ console.log('Created movement animation:', movementAnim);
5208
+ if (movementAnim) {
5209
+ try {
5210
+ // Use animFactory to create proper animation object
5211
+ const animObj = this.animFactory(movementAnim, true); // true for looping
5212
+
5213
+ // Validate the animation object before adding
5214
+ if (animObj && animObj.ts && animObj.ts.length > 0) {
5215
+ this.animQueue.push(animObj);
5216
+ console.log('Applied code-based body movement animation:', this.bodyMovement);
5217
+ console.log('Animation queue length:', this.animQueue.length);
5218
+ console.log('Animation object:', animObj);
5219
+ } else {
5220
+ console.error('Invalid animation object created for:', this.bodyMovement);
5221
+ console.error('Animation object:', animObj);
5949
5222
  }
5950
- this.animPoses.push({
5951
- url: url+'-'+ndx,
5952
- pose: newPose
5953
- });
5954
-
5955
- // Play
5956
- this.playPose(url, onprogress, dur, ndx, scale);
5957
-
5958
- } else {
5959
- const msg = 'Pose ' + url + ' (ndx=' + ndx + ') not found';
5960
- console.error(msg);
5223
+ } catch (error) {
5224
+ console.error('Error creating body movement animation:', error);
5961
5225
  }
5962
5226
  }
5963
5227
  }
5964
5228
 
5965
5229
  /**
5966
- * Stop the pose. (Functionality is the same as in stopAnimation.)
5967
- */
5968
- stopPose() {
5969
- this.stopAnimation();
5230
+ * Lock avatar position to prevent movement during animations.
5231
+ */
5232
+ lockAvatarPosition() {
5233
+ if (!this.armature) {
5234
+ console.warn('Cannot lock position: armature not available');
5235
+ return;
5236
+ }
5237
+
5238
+ // Store the original position if not already stored
5239
+ if (!this.originalPosition) {
5240
+ this.originalPosition = {
5241
+ x: this.armature.position.x,
5242
+ y: this.armature.position.y,
5243
+ z: this.armature.position.z
5244
+ };
5245
+ console.log('Original position stored:', this.originalPosition);
5246
+ }
5247
+
5248
+ // Lock the avatar at its CURRENT position (don't move it)
5249
+ this.lockedPosition = {
5250
+ x: this.armature.position.x,
5251
+ y: this.armature.position.y,
5252
+ z: this.armature.position.z
5253
+ };
5254
+
5255
+ console.log('Avatar position locked at current position:', this.lockedPosition);
5970
5256
  }
5971
5257
 
5972
5258
  /**
5973
- * Play a gesture, which is either a hand gesture, an emoji animation or their
5974
- * combination.
5975
- * @param {string} name Gesture name
5976
- * @param {number} [dur=3] Duration of the gesture in seconds
5977
- * @param {boolean} [mirror=false] Mirror gesture
5978
- * @param {number} [ms=1000] Transition time in milliseconds
5979
- */
5980
- playGesture(name, dur=3, mirror=false, ms=1000) {
5981
-
5982
- if ( !this.armature ) return;
5983
-
5984
- // Hand gesture, if any
5985
- let g = this.gestureTemplates[name];
5986
- if ( g ) {
5987
-
5988
- // New gesture always overrides the existing one
5989
- if ( this.gestureTimeout ) {
5990
- clearTimeout( this.gestureTimeout );
5991
- this.gestureTimeout = null;
5992
- }
5993
-
5994
- // Stop talking hands animation
5995
- let ndx = this.animQueue.findIndex( y => y.template.name === "talkinghands" );
5996
- if ( ndx !== -1 ) {
5997
- this.animQueue[ndx].ts = this.animQueue[ndx].ts.map( x => 0 );
5998
- }
5259
+ * Unlock avatar position and restore original position.
5260
+ */
5261
+ unlockAvatarPosition() {
5262
+ if (this.armature && this.originalPosition) {
5263
+ // Restore avatar to its original position before locking
5264
+ this.armature.position.set(
5265
+ this.originalPosition.x,
5266
+ this.originalPosition.y,
5267
+ this.originalPosition.z
5268
+ );
5269
+ console.log('Avatar position restored to original:', this.originalPosition);
5270
+ } else if (this.armature) {
5271
+ // Fallback: reset to center if no original position was stored
5272
+ this.armature.position.set(0, 0, 0);
5273
+ console.log('Avatar position reset to center (0,0,0)');
5274
+ }
5275
+ this.lockedPosition = null;
5276
+ this.originalPosition = null; // Clear original position after unlock
5277
+ console.log('Avatar position unlocked');
5278
+ }
5999
5279
 
6000
- // Set gesture
6001
- this.gesture = this.propsToThreeObjects( g );
6002
- if ( mirror ) {
6003
- this.gesture = this.mirrorPose( this.gesture );
6004
- }
6005
- if ( name === "namaste" && this.avatar.body === 'M' ) {
6006
- // Work-a-round for male model so that the hands meet
6007
- this.gesture["RightArm.quaternion"].rotateTowards( new THREE.Quaternion(0,1,0,0), -0.25);
6008
- this.gesture["LeftArm.quaternion"].rotateTowards( new THREE.Quaternion(0,1,0,0), -0.25);
6009
- }
5280
+ /**
5281
+ * Ensure avatar stays at locked position.
5282
+ */
5283
+ maintainLockedPosition() {
5284
+ if (this.lockedPosition && this.armature) {
5285
+ // Enforce the locked position - keep avatar exactly where it was locked
5286
+ // This prevents FBX animations from moving the avatar
5287
+ this.armature.position.set(
5288
+ this.lockedPosition.x,
5289
+ this.lockedPosition.y,
5290
+ this.lockedPosition.z
5291
+ );
5292
+ }
5293
+ }
6010
5294
 
6011
- // Apply to target
6012
- for( let [p,val] of Object.entries(this.gesture) ) {
6013
- val.t = this.animClock;
6014
- val.d = ms;
6015
- if ( this.poseTarget.props.hasOwnProperty(p) ) {
6016
- this.poseTarget.props[p].copy(val);
6017
- this.poseTarget.props[p].t = this.animClock;
6018
- this.poseTarget.props[p].d = ms;
5295
+ /**
5296
+ * Create body movement animation.
5297
+ * @param {string} movementType Movement type.
5298
+ * @returns {Object} Animation object.
5299
+ */
5300
+ createBodyMovementAnimation(movementType) {
5301
+ const intensity = this.movementIntensity || 0.5;
5302
+
5303
+ const movementAnimations = {
5304
+ walking: {
5305
+ name: 'bodyMovement_walking',
5306
+ delay: [500, 2000],
5307
+ dt: [800, 1200],
5308
+ vs: {
5309
+ bodyRotateY: [-0.1 * intensity, 0.1 * intensity, 0],
5310
+ bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0],
5311
+ bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
5312
+ }
5313
+ },
5314
+ prancing: {
5315
+ name: 'bodyMovement_prancing',
5316
+ delay: [300, 1000],
5317
+ dt: [400, 800],
5318
+ vs: {
5319
+ bodyRotateY: [-0.15 * intensity, 0.15 * intensity, 0],
5320
+ bodyRotateZ: [-0.08 * intensity, 0.08 * intensity, 0],
5321
+ bodyRotateX: [-0.05 * intensity, 0.05 * intensity, 0]
5322
+ }
5323
+ },
5324
+ gesturing: {
5325
+ name: 'bodyMovement_gesturing',
5326
+ delay: [400, 1500],
5327
+ dt: [600, 1000],
5328
+ vs: {
5329
+ bodyRotateY: [-0.08 * intensity, 0.08 * intensity, 0],
5330
+ bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0]
5331
+ }
5332
+ },
5333
+ dancing: {
5334
+ name: 'bodyMovement_dancing',
5335
+ delay: [200, 600],
5336
+ dt: [400, 800],
5337
+ vs: {
5338
+ bodyRotateY: [-0.25 * intensity, 0.25 * intensity, 0],
5339
+ bodyRotateZ: [-0.15 * intensity, 0.15 * intensity, 0],
5340
+ bodyRotateX: [-0.1 * intensity, 0.1 * intensity, 0]
5341
+ }
5342
+ },
5343
+ dancing2: {
5344
+ name: 'bodyMovement_dancing2',
5345
+ delay: [150, 500],
5346
+ dt: [300, 700],
5347
+ vs: {
5348
+ bodyRotateY: [-0.3 * intensity, 0.3 * intensity, 0],
5349
+ bodyRotateZ: [-0.2 * intensity, 0.2 * intensity, 0],
5350
+ bodyRotateX: [-0.12 * intensity, 0.12 * intensity, 0]
5351
+ }
5352
+ },
5353
+ dancing3: {
5354
+ name: 'bodyMovement_dancing3',
5355
+ delay: [100, 400],
5356
+ dt: [200, 600],
5357
+ vs: {
5358
+ bodyRotateY: [-0.35 * intensity, 0.35 * intensity, 0],
5359
+ bodyRotateZ: [-0.25 * intensity, 0.25 * intensity, 0],
5360
+ bodyRotateX: [-0.15 * intensity, 0.15 * intensity, 0]
5361
+ }
5362
+ },
5363
+ excited: {
5364
+ name: 'bodyMovement_excited',
5365
+ delay: [200, 600],
5366
+ dt: [300, 700],
5367
+ vs: {
5368
+ bodyRotateY: [-0.12 * intensity, 0.12 * intensity, 0],
5369
+ bodyRotateZ: [-0.06 * intensity, 0.06 * intensity, 0],
5370
+ bodyRotateX: [-0.04 * intensity, 0.04 * intensity, 0]
5371
+ }
5372
+ },
5373
+ happy: {
5374
+ name: 'bodyMovement_happy',
5375
+ delay: [300, 800],
5376
+ dt: [500, 1000],
5377
+ vs: {
5378
+ bodyRotateY: [-0.08 * intensity, 0.08 * intensity, 0],
5379
+ bodyRotateZ: [-0.04 * intensity, 0.04 * intensity, 0],
5380
+ bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
5381
+ }
5382
+ },
5383
+ surprised: {
5384
+ name: 'bodyMovement_surprised',
5385
+ delay: [100, 300],
5386
+ dt: [200, 500],
5387
+ vs: {
5388
+ bodyRotateY: [-0.05 * intensity, 0.05 * intensity, 0],
5389
+ bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0],
5390
+ bodyRotateX: [-0.01 * intensity, 0.01 * intensity, 0]
5391
+ }
5392
+ },
5393
+ thinking: {
5394
+ name: 'bodyMovement_thinking',
5395
+ delay: [800, 2000],
5396
+ dt: [1000, 1500],
5397
+ vs: {
5398
+ bodyRotateY: [-0.06 * intensity, 0.06 * intensity, 0],
5399
+ bodyRotateZ: [-0.03 * intensity, 0.03 * intensity, 0],
5400
+ bodyRotateX: [-0.02 * intensity, 0.02 * intensity, 0]
5401
+ }
5402
+ },
5403
+ nodding: {
5404
+ name: 'bodyMovement_nodding',
5405
+ delay: [400, 800],
5406
+ dt: [300, 600],
5407
+ vs: {
5408
+ bodyRotateX: [-0.1 * intensity, 0.1 * intensity, 0],
5409
+ bodyRotateY: [-0.02 * intensity, 0.02 * intensity, 0]
5410
+ }
5411
+ },
5412
+ shaking: {
5413
+ name: 'bodyMovement_shaking',
5414
+ delay: [200, 400],
5415
+ dt: [150, 300],
5416
+ vs: {
5417
+ bodyRotateY: [-0.15 * intensity, 0.15 * intensity, 0],
5418
+ bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0]
6019
5419
  }
6020
- }
6021
-
6022
- // Timer
6023
- if ( dur && Number.isFinite(dur) ) {
6024
- this.gestureTimeout = setTimeout( this.stopGesture.bind(this,ms), 1000 * dur);
6025
- }
6026
- }
6027
-
6028
- // Animated emoji, if any
6029
- let em = this.animEmojis[name];
6030
- if ( em ) {
6031
-
6032
- // Follow link
6033
- if ( em && em.link ) {
6034
- em = this.animEmojis[em.link];
6035
- }
6036
-
6037
- if ( em ) {
6038
- // Look at the camera for 500 ms
6039
- this.lookAtCamera(500);
6040
-
6041
- // Create animation and tag as gesture
6042
- const anim = this.animFactory( em );
6043
- anim.gesture = true;
6044
-
6045
- // Rescale duration
6046
- if ( dur && Number.isFinite(dur) ) {
6047
- const first = anim.ts[0];
6048
- const last = anim.ts[ anim.ts.length -1 ];
6049
- const total = last - first;
6050
- const excess = (dur * 1000) - total;
6051
-
6052
- // If longer, increase longer parts; if shorter, scale everything
6053
- if ( excess > 0 ) {
6054
- const dt = [];
6055
- for( let i=1; i<anim.ts.length; i++ ) dt.push( anim.ts[i] - anim.ts[i-1] );
6056
- const rescale = em.template?.rescale || dt.map( x => x / total );
6057
- const excess = dur * 1000 - total;
6058
- anim.ts = anim.ts.map( (x,i,arr) => {
6059
- return (i===0) ? first : (arr[i-1] + dt[i-1] + rescale[i-1] * excess);
6060
- });
6061
- } else {
6062
- const scale = (dur * 1000) / total;
6063
- anim.ts = anim.ts.map( x => first + scale * (x - first) );
6064
- }
5420
+ },
5421
+ celebration: {
5422
+ name: 'bodyMovement_celebration',
5423
+ delay: [100, 300],
5424
+ dt: [200, 500],
5425
+ vs: {
5426
+ bodyRotateY: [-0.2 * intensity, 0.2 * intensity, 0],
5427
+ bodyRotateZ: [-0.1 * intensity, 0.1 * intensity, 0],
5428
+ bodyRotateX: [-0.08 * intensity, 0.08 * intensity, 0]
6065
5429
  }
6066
-
6067
- this.animQueue.push( anim );
6068
- }
6069
- }
6070
-
6071
- }
6072
-
6073
- /**
6074
- * Stop the gesture.
6075
- * @param {number} [ms=1000] Transition time in milliseconds
6076
- */
6077
- stopGesture(ms=1000) {
6078
-
6079
- // Stop gesture timer
6080
- if ( this.gestureTimeout ) {
6081
- clearTimeout( this.gestureTimeout );
6082
- this.gestureTimeout = null;
6083
- }
6084
-
6085
- // Stop hand gesture, if any
6086
- if ( this.gesture ) {
6087
- const gs = Object.entries(this.gesture);
6088
- this.gesture = null;
6089
- for( const [p,val] of gs ) {
6090
- if ( this.poseTarget.props.hasOwnProperty(p) ) {
6091
- this.poseTarget.props[p].copy( this.getPoseTemplateProp(p) );
6092
- this.poseTarget.props[p].t = this.animClock;
6093
- this.poseTarget.props[p].d = ms;
5430
+ },
5431
+ energetic: {
5432
+ name: 'bodyMovement_energetic',
5433
+ delay: [150, 400],
5434
+ dt: [250, 500],
5435
+ vs: {
5436
+ bodyRotateY: [-0.18 * intensity, 0.18 * intensity, 0],
5437
+ bodyRotateZ: [-0.12 * intensity, 0.12 * intensity, 0],
5438
+ bodyRotateX: [-0.08 * intensity, 0.08 * intensity, 0]
5439
+ }
5440
+ },
5441
+ swaying: {
5442
+ name: 'bodyMovement_swaying',
5443
+ delay: [600, 1200],
5444
+ dt: [800, 1000],
5445
+ vs: {
5446
+ bodyRotateY: [-0.1 * intensity, 0.1 * intensity, 0],
5447
+ bodyRotateZ: [-0.05 * intensity, 0.05 * intensity, 0]
5448
+ }
5449
+ },
5450
+ bouncing: {
5451
+ name: 'bodyMovement_bouncing',
5452
+ delay: [300, 600],
5453
+ dt: [400, 700],
5454
+ vs: {
5455
+ bodyRotateY: [-0.05 * intensity, 0.05 * intensity, 0]
6094
5456
  }
6095
5457
  }
5458
+ };
5459
+
5460
+ // Handle dance variations
5461
+ if (movementType === 'dancing') {
5462
+ const danceVariations = ['dancing', 'dancing2', 'dancing3'];
5463
+ const randomDance = danceVariations[Math.floor(Math.random() * danceVariations.length)];
5464
+ return movementAnimations[randomDance] || movementAnimations['dancing'];
6096
5465
  }
6097
-
6098
- // Stop animated emoji gesture, if any
6099
- let i = this.animQueue.findIndex( y => y.gesture );
6100
- if ( i !== -1 ) {
6101
- this.animQueue.splice(i, 1);
6102
- }
6103
-
5466
+
5467
+ return movementAnimations[movementType] || null;
6104
5468
  }
6105
5469
 
6106
5470
  /**
6107
- * Cyclic Coordinate Descent (CCD) Inverse Kinematic (IK) algorithm.
6108
- * Adapted from:
6109
- * https://github.com/mrdoob/three.js/blob/master/examples/jsm/animation/CCDIKSolver.js
6110
- * @param {Object} ik IK configuration object
6111
- * @param {Vector3} [target=null] Target coordinate, if null return to template
6112
- * @param {Boolean} [relative=false] If true, target is relative to root
6113
- * @param {numeric} [d=null] If set, apply in d milliseconds
6114
- */
6115
- ikSolve(ik, target=null, relative=false, d=null) {
6116
- const targetVec = new THREE.Vector3();
6117
- const effectorPos = new THREE.Vector3();
6118
- const effectorVec = new THREE.Vector3();
6119
- const linkPos = new THREE.Vector3();
6120
- const invLinkQ = new THREE.Quaternion();
6121
- const linkScale = new THREE.Vector3();
6122
- const axis = new THREE.Vector3();
6123
- const vector = new THREE.Vector3();
6124
-
6125
- // Reset IK setup positions and rotations
6126
- const root = this.ikMesh.getObjectByName(ik.root);
6127
- root.position.setFromMatrixPosition( this.armature.getObjectByName(ik.root).matrixWorld );
6128
- root.quaternion.setFromRotationMatrix( this.armature.getObjectByName(ik.root).matrixWorld );
6129
- if ( target && relative ) {
6130
- target.applyQuaternion(this.armature.quaternion).add( root.position );
5471
+ * Set movement intensity.
5472
+ * @param {number} intensity Movement intensity (0-1).
5473
+ */
5474
+ setMovementIntensity(intensity) {
5475
+ this.movementIntensity = Math.max(0, Math.min(1, intensity));
5476
+
5477
+ // Only set avatar property if avatar exists
5478
+ if (this.avatar) {
5479
+ this.avatar.movementIntensity = this.movementIntensity;
6131
5480
  }
6132
- const effector = this.ikMesh.getObjectByName(ik.effector);
6133
- const links = ik.links;
6134
- links.forEach( x => {
6135
- x.bone = this.ikMesh.getObjectByName(x.link);
6136
- x.bone.quaternion.copy( this.getPoseTemplateProp(x.link+'.quaternion') );
6137
- });
6138
- root.updateMatrixWorld(true);
6139
- const iterations = ik.iterations || 10;
6140
-
6141
- // Iterate
6142
- if ( target ) {
6143
- for ( let i = 0; i < iterations; i ++ ) {
6144
- let rotated = false;
6145
- for ( let j = 0, jl = links.length; j < jl; j++ ) {
6146
- const bone = links[j].bone;
6147
- bone.matrixWorld.decompose( linkPos, invLinkQ, linkScale );
6148
- invLinkQ.invert();
6149
- effectorPos.setFromMatrixPosition( effector.matrixWorld );
6150
- effectorVec.subVectors( effectorPos, linkPos );
6151
- effectorVec.applyQuaternion( invLinkQ );
6152
- effectorVec.normalize();
6153
- targetVec.subVectors( target, linkPos );
6154
- targetVec.applyQuaternion( invLinkQ );
6155
- targetVec.normalize();
6156
- let angle = targetVec.dot( effectorVec );
6157
- if ( angle > 1.0 ) {
6158
- angle = 1.0;
6159
- } else if ( angle < - 1.0 ) {
6160
- angle = - 1.0;
6161
- }
6162
- angle = Math.acos( angle );
6163
- if ( angle < 1e-5 ) continue;
6164
- if ( links[j].minAngle !== undefined && angle < links[j].minAngle ) {
6165
- angle = links[j].minAngle;
6166
- }
6167
- if ( links[j].maxAngle !== undefined && angle > links[j].maxAngle ) {
6168
- angle = links[j].maxAngle;
6169
- }
6170
- axis.crossVectors( effectorVec, targetVec );
6171
- axis.normalize();
6172
- q.setFromAxisAngle( axis, angle );
6173
- bone.quaternion.multiply( q );
6174
-
6175
- // Constraints
6176
- bone.rotation.setFromVector3( vector.setFromEuler( bone.rotation ).clamp( new THREE.Vector3(
6177
- links[j].minx !== undefined ? links[j].minx : -Infinity,
6178
- links[j].miny !== undefined ? links[j].miny : -Infinity,
6179
- links[j].minz !== undefined ? links[j].minz : -Infinity
6180
- ), new THREE.Vector3(
6181
- links[j].maxx !== undefined ? links[j].maxx : Infinity,
6182
- links[j].maxy !== undefined ? links[j].maxy : Infinity,
6183
- links[j].maxz !== undefined ? links[j].maxz : Infinity
6184
- )) );
6185
-
6186
- bone.updateMatrixWorld( true );
6187
- rotated = true;
6188
- }
6189
- if ( !rotated ) break;
6190
- }
5481
+
5482
+ console.log('Movement intensity set to:', this.movementIntensity);
5483
+
5484
+ // Update FBX animation intensity if available
5485
+ if (this.fbxAnimationLoader) {
5486
+ this.fbxAnimationLoader.setIntensity(this.movementIntensity);
6191
5487
  }
6192
-
6193
- // Apply
6194
- if ( d ) {
6195
- links.forEach( x => {
6196
- this.poseTarget.props[x.link+".quaternion"].copy( x.bone.quaternion );
6197
- this.poseTarget.props[x.link+".quaternion"].t = this.animClock;
6198
- this.poseTarget.props[x.link+".quaternion"].d = d;
6199
- });
5488
+
5489
+ // Reapply body movement animation with new intensity
5490
+ if (this.bodyMovement && this.bodyMovement !== 'idle') {
5491
+ this.applyBodyMovementAnimation();
6200
5492
  }
6201
5493
  }
6202
5494
 
@@ -6205,11 +5497,36 @@ class TalkingHead {
6205
5497
  */
6206
5498
  dispose() {
6207
5499
 
6208
- // Stop animation first to prevent render calls on disposed renderer
6209
- this.isRunning = false;
5500
+ // Stop animation, clear speech queue, stop stream
6210
5501
  this.stop();
6211
5502
  this.stopSpeaking();
6212
5503
  this.streamStop();
5504
+ this.stopAnimation();
5505
+
5506
+ // Cancel animation frame to prevent potential memory leak
5507
+ if (this._raf !== null) {
5508
+ cancelAnimationFrame(this._raf);
5509
+ this._raf = null;
5510
+ }
5511
+
5512
+ // Stop & disconnect buffer sources
5513
+ ['audioSpeechSource', 'audioBackgroundSource'].forEach(key => {
5514
+ const node = this[key];
5515
+ if (node) {
5516
+ try { node.stop?.() } catch(error) {};
5517
+ node.disconnect();
5518
+ node.onended = null; // remove closure references
5519
+ }
5520
+ });
5521
+
5522
+ // Disconnect gain nodes & analyser
5523
+ ['audioBackgroundGainNode', 'audioSpeechGainNode',
5524
+ 'audioStreamGainNode', 'audioAnalyzerNode'].forEach(key => {
5525
+ const node = this[key];
5526
+ if (node) {
5527
+ node.disconnect();
5528
+ }
5529
+ });
6213
5530
 
6214
5531
  // Dispose Three.JS objects
6215
5532
  if ( this.isAvatarOnly ) {
@@ -6222,19 +5539,35 @@ class TalkingHead {
6222
5539
  } else {
6223
5540
  this.clearThree(this.scene);
6224
5541
  this.resizeobserver.disconnect();
5542
+ this.resizeobserver = null;
6225
5543
 
6226
- // Dispose WebGL renderer
6227
5544
  if ( this.renderer ) {
6228
5545
  this.renderer.dispose();
6229
- if ( this.renderer.domElement && this.renderer.domElement.parentNode ) {
6230
- this.renderer.domElement.parentNode.removeChild(this.renderer.domElement);
6231
- }
5546
+ const gl = this.renderer.getContext();
5547
+ gl.getExtension('WEBGL_lose_context')?.loseContext();
5548
+ this.renderer.domElement?.remove();
5549
+ this.renderer.domElement = null;
6232
5550
  this.renderer = null;
6233
5551
  }
5552
+
5553
+ if ( this.controls ) {
5554
+ this.controls.dispose();
5555
+ this.controls = null;
5556
+ }
6234
5557
  }
5558
+
6235
5559
  this.clearThree( this.ikMesh );
6236
5560
  this.dynamicbones.dispose();
6237
5561
 
5562
+ // Clean up FBX animation loader
5563
+ if (this.fbxAnimationLoader) {
5564
+ this.fbxAnimationLoader.stopCurrentAnimation();
5565
+ this.fbxAnimationLoader = null;
5566
+ }
5567
+
5568
+ // DOM
5569
+ this.nodeAvatar = null;
5570
+
6238
5571
  }
6239
5572
 
6240
5573
  }