@sage-rsc/talking-head-react 1.0.49 → 1.0.51

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sage-rsc/talking-head-react",
3
- "version": "1.0.49",
3
+ "version": "1.0.51",
4
4
  "description": "A reusable React component for 3D talking avatars with lip-sync and text-to-speech",
5
5
  "main": "./dist/index.cjs",
6
6
  "module": "./dist/index.js",
@@ -289,120 +289,71 @@ const TalkingHeadAvatar = forwardRef(({
289
289
  const talkingHead = talkingHeadRef.current;
290
290
 
291
291
  // Set up a polling mechanism to detect when speech finishes
292
- // Wait for audio to actually start playing before checking if it's finished
293
292
  let checkInterval = null;
294
293
  let checkCount = 0;
295
- let audioStarted = false;
296
294
  const maxChecks = 1200; // 60 seconds max (50ms intervals)
297
- const maxWaitForAudioStart = 10000; // 10 seconds max to wait for audio to start
295
+ let callbackFired = false;
298
296
 
299
- // First, wait for audio to actually start playing (API call completes and audio is added to playlist)
300
- let waitForAudioStartCount = 0;
301
- const waitForAudioStart = setInterval(() => {
302
- waitForAudioStartCount++;
303
-
304
- // Check if audio has started playing (audioPlaylist has items OR isAudioPlaying is true)
305
- // Also check if isSpeaking is true (indicating API call has started processing)
306
- if (talkingHead && talkingHead.isSpeaking && (
307
- (talkingHead.audioPlaylist && talkingHead.audioPlaylist.length > 0) ||
308
- (talkingHead.isAudioPlaying === true)
309
- )) {
310
- audioStarted = true;
311
- clearInterval(waitForAudioStart);
312
-
313
- // Now start checking if speech has finished
314
- checkInterval = setInterval(checkSpeechFinished, 50);
315
- }
316
-
317
- // Also check if speech queue is empty and not speaking (meaning all sentences processed)
318
- // This handles the case where text was split into sentences but all are processed
319
- const speechQueueEmpty = !talkingHead.speechQueue || talkingHead.speechQueue.length === 0;
320
-
321
- if (talkingHead && !talkingHead.isSpeaking && speechQueueEmpty &&
322
- (!talkingHead.audioPlaylist || talkingHead.audioPlaylist.length === 0) &&
323
- (!talkingHead.isAudioPlaying || talkingHead.isAudioPlaying === false)) {
324
- // All speech has finished (all sentences processed and audio finished)
325
- clearInterval(waitForAudioStart);
326
- try {
327
- options.onSpeechEnd();
328
- } catch (e) {
329
- console.error('Error in onSpeechEnd callback:', e);
330
- }
331
- return;
332
- }
333
-
334
- // Timeout if audio doesn't start within reasonable time
335
- if (waitForAudioStartCount * 50 > maxWaitForAudioStart) {
336
- clearInterval(waitForAudioStart);
337
- // Check if speech has actually started (isSpeaking should be true)
338
- // If isSpeaking is false, the speech might have failed or completed very quickly
339
- if (talkingHead && talkingHead.isSpeaking) {
340
- // Still waiting for API, but assume it will start soon
341
- audioStarted = true;
342
- checkInterval = setInterval(checkSpeechFinished, 50);
343
- } else if (talkingHead && !talkingHead.isSpeaking && speechQueueEmpty &&
344
- (!talkingHead.audioPlaylist || talkingHead.audioPlaylist.length === 0) &&
345
- (!talkingHead.isAudioPlaying || talkingHead.isAudioPlaying === false)) {
346
- // Speech never started or finished immediately, call callback
347
- try {
348
- options.onSpeechEnd();
349
- } catch (e) {
350
- console.error('Error in onSpeechEnd callback:', e);
351
- }
352
- }
353
- }
354
- }, 50);
355
-
356
- const checkSpeechFinished = () => {
297
+ // Start checking immediately - don't wait for audio to start
298
+ checkInterval = setInterval(() => {
357
299
  checkCount++;
300
+
301
+ // Safety timeout - call callback anyway after max time
358
302
  if (checkCount > maxChecks) {
359
- // Timeout - call callback anyway
360
303
  if (checkInterval) {
361
304
  clearInterval(checkInterval);
362
305
  checkInterval = null;
363
306
  }
364
- try {
365
- options.onSpeechEnd();
366
- } catch (e) {
367
- console.error('Error in onSpeechEnd callback:', e);
307
+ if (!callbackFired) {
308
+ callbackFired = true;
309
+ try {
310
+ options.onSpeechEnd();
311
+ } catch (e) {
312
+ console.error('Error in onSpeechEnd callback (timeout):', e);
313
+ }
368
314
  }
369
315
  return;
370
316
  }
371
317
 
372
- // Only check if audio has started playing
373
- if (!audioStarted) {
374
- return;
375
- }
376
-
377
318
  // Check if speech has finished:
378
- // 1. Not speaking OR speech queue is empty
379
- // 2. Audio playlist is empty (no more audio to play)
380
- // 3. Not currently playing audio
381
- // 4. Speech queue is empty (all sentences have been processed)
319
+ // 1. isSpeaking is false (not currently processing speech)
320
+ // 2. speechQueue is empty (all sentences processed)
321
+ // 3. audioPlaylist is empty (no more audio to play)
322
+ // 4. isAudioPlaying is false (not currently playing audio)
382
323
  const speechQueueEmpty = !talkingHead.speechQueue || talkingHead.speechQueue.length === 0;
324
+ const audioPlaylistEmpty = !talkingHead.audioPlaylist || talkingHead.audioPlaylist.length === 0;
383
325
 
384
326
  const isFinished = talkingHead &&
385
- (!talkingHead.isSpeaking || talkingHead.isSpeaking === false) &&
386
- (!talkingHead.audioPlaylist || talkingHead.audioPlaylist.length === 0) &&
387
- (!talkingHead.isAudioPlaying || talkingHead.isAudioPlaying === false) &&
388
- speechQueueEmpty;
327
+ talkingHead.isSpeaking === false &&
328
+ speechQueueEmpty &&
329
+ audioPlaylistEmpty &&
330
+ talkingHead.isAudioPlaying === false;
389
331
 
390
- if (isFinished) {
391
- if (checkInterval) {
392
- clearInterval(checkInterval);
393
- checkInterval = null;
394
- }
395
-
396
- // Small delay to ensure everything is settled
332
+ if (isFinished && !callbackFired) {
333
+ // Double-check after a small delay to ensure it's really finished
397
334
  setTimeout(() => {
398
- try {
399
- options.onSpeechEnd();
400
- } catch (e) {
401
- console.error('Error in onSpeechEnd callback:', e);
335
+ // Re-check one more time to be sure
336
+ const finalCheck = talkingHead &&
337
+ talkingHead.isSpeaking === false &&
338
+ (!talkingHead.speechQueue || talkingHead.speechQueue.length === 0) &&
339
+ (!talkingHead.audioPlaylist || talkingHead.audioPlaylist.length === 0) &&
340
+ talkingHead.isAudioPlaying === false;
341
+
342
+ if (finalCheck && !callbackFired) {
343
+ callbackFired = true;
344
+ if (checkInterval) {
345
+ clearInterval(checkInterval);
346
+ checkInterval = null;
347
+ }
348
+ try {
349
+ options.onSpeechEnd();
350
+ } catch (e) {
351
+ console.error('Error in onSpeechEnd callback:', e);
352
+ }
402
353
  }
403
- }, 50);
354
+ }, 100);
404
355
  }
405
- };
356
+ }, 100); // Check every 100ms for better reliability
406
357
  }
407
358
 
408
359
  if (talkingHeadRef.current.lipsync && Object.keys(talkingHeadRef.current.lipsync).length > 0) {
@@ -180,9 +180,9 @@ class TalkingHead {
180
180
  lightSpotDispersion: 1,
181
181
  avatarMood: "neutral",
182
182
  avatarMute: false,
183
- avatarIdleEyeContact: 0.2,
183
+ avatarIdleEyeContact: 0.6,
184
184
  avatarIdleHeadMove: 0.5,
185
- avatarSpeakingEyeContact: 0.5,
185
+ avatarSpeakingEyeContact: 0.8,
186
186
  avatarSpeakingHeadMove: 0.5,
187
187
  avatarIgnoreCamera: false,
188
188
  listeningSilenceThresholdLevel: 40,
@@ -426,7 +426,7 @@ class TalkingHead {
426
426
 
427
427
  this.animMoods = {
428
428
  'neutral' : {
429
- baseline: { eyesLookDown: 0.1 },
429
+ baseline: { eyesLookDown: 0 },
430
430
  speech: { deltaRate: 0, deltaPitch: 0, deltaVolume: 0 },
431
431
  anims: [
432
432
  { name: 'breathing', delay: 1500, dt: [ 1200,500,1000 ], vs: { chestInhale: [0.5,0.5,0] } },
@@ -448,7 +448,7 @@ class TalkingHead {
448
448
  ]
449
449
  },
450
450
  'happy' : {
451
- baseline: { mouthSmile: 0.2, eyesLookDown: 0.1 },
451
+ baseline: { mouthSmile: 0.2, eyesLookDown: 0 },
452
452
  speech: { deltaRate: 0, deltaPitch: 0.1, deltaVolume: 0 },
453
453
  anims: [
454
454
  { name: 'breathing', delay: 1500, dt: [ 1200,500,1000 ], vs: { chestInhale: [0.5,0.5,0] } },
@@ -2100,14 +2100,14 @@ class TalkingHead {
2100
2100
 
2101
2101
  console.log('Body movement set to:', movement);
2102
2102
 
2103
- // Ensure avatar is in full body mode for body movements
2104
- if (movement !== 'idle') {
2105
- this.setShowFullAvatar(true);
2106
- // Don't lock position for code-based animations - they should be stable by design
2107
- } else {
2103
+ // Respect the current showFullAvatar setting instead of forcing it to true
2104
+ // Only unlock position when returning to idle
2105
+ if (movement === 'idle') {
2108
2106
  // Unlock position when returning to idle
2109
2107
  this.unlockAvatarPosition();
2110
2108
  }
2109
+ // Note: We no longer force showFullAvatar to true for body movements
2110
+ // The avatar will use whatever showFullAvatar value was set by the user
2111
2111
 
2112
2112
  // Apply body movement animation
2113
2113
  this.applyBodyMovementAnimation();