@sage-rsc/talking-head-react 1.0.56 → 1.0.57
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +2 -2
- package/dist/index.js +522 -502
- package/package.json +1 -1
- package/src/components/CurriculumLearning.jsx +3 -0
- package/src/components/TalkingHeadAvatar.jsx +48 -0
package/package.json
CHANGED
|
@@ -992,6 +992,9 @@ const CurriculumLearning = forwardRef(({
|
|
|
992
992
|
}
|
|
993
993
|
},
|
|
994
994
|
stopSpeaking: () => avatarRef.current?.stopSpeaking(),
|
|
995
|
+
pauseSpeaking: () => avatarRef.current?.pauseSpeaking(),
|
|
996
|
+
resumeSpeaking: async () => await avatarRef.current?.resumeSpeaking(),
|
|
997
|
+
isPaused: () => avatarRef.current?.isPaused || false,
|
|
995
998
|
setMood: (mood) => avatarRef.current?.setMood(mood),
|
|
996
999
|
playAnimation: (animationName, disablePositionLock) => avatarRef.current?.playAnimation(animationName, disablePositionLock),
|
|
997
1000
|
setBodyMovement: (movement) => avatarRef.current?.setBodyMovement(movement),
|
|
@@ -50,9 +50,11 @@ const TalkingHeadAvatar = forwardRef(({
|
|
|
50
50
|
const containerRef = useRef(null);
|
|
51
51
|
const talkingHeadRef = useRef(null);
|
|
52
52
|
const showFullAvatarRef = useRef(showFullAvatar);
|
|
53
|
+
const pausedSpeechRef = useRef(null); // Track paused speech for resume
|
|
53
54
|
const [isLoading, setIsLoading] = useState(true);
|
|
54
55
|
const [error, setError] = useState(null);
|
|
55
56
|
const [isReady, setIsReady] = useState(false);
|
|
57
|
+
const [isPaused, setIsPaused] = useState(false);
|
|
56
58
|
|
|
57
59
|
// Update ref when prop changes
|
|
58
60
|
useEffect(() => {
|
|
@@ -275,6 +277,10 @@ const TalkingHeadAvatar = forwardRef(({
|
|
|
275
277
|
const speakText = useCallback(async (textToSpeak, options = {}) => {
|
|
276
278
|
if (talkingHeadRef.current && isReady) {
|
|
277
279
|
try {
|
|
280
|
+
// Clear paused speech when starting new speech
|
|
281
|
+
pausedSpeechRef.current = { text: textToSpeak, options };
|
|
282
|
+
setIsPaused(false);
|
|
283
|
+
|
|
278
284
|
// Always resume audio context first (required for user interaction)
|
|
279
285
|
await resumeAudioContext();
|
|
280
286
|
|
|
@@ -385,9 +391,48 @@ const TalkingHeadAvatar = forwardRef(({
|
|
|
385
391
|
if (talkingHeadRef.current.setSlowdownRate) {
|
|
386
392
|
talkingHeadRef.current.setSlowdownRate(1.0);
|
|
387
393
|
}
|
|
394
|
+
// Clear paused speech when stopping
|
|
395
|
+
pausedSpeechRef.current = null;
|
|
396
|
+
setIsPaused(false);
|
|
397
|
+
}
|
|
398
|
+
}, []);
|
|
399
|
+
|
|
400
|
+
const pauseSpeaking = useCallback(() => {
|
|
401
|
+
if (talkingHeadRef.current && talkingHeadRef.current.pauseSpeaking) {
|
|
402
|
+
// Only pause if we have paused speech stored (meaning something is currently speaking)
|
|
403
|
+
if (pausedSpeechRef.current && pausedSpeechRef.current.text) {
|
|
404
|
+
talkingHeadRef.current.pauseSpeaking();
|
|
405
|
+
setIsPaused(true);
|
|
406
|
+
}
|
|
388
407
|
}
|
|
389
408
|
}, []);
|
|
390
409
|
|
|
410
|
+
const resumeSpeaking = useCallback(async () => {
|
|
411
|
+
if (talkingHeadRef.current && pausedSpeechRef.current) {
|
|
412
|
+
setIsPaused(false);
|
|
413
|
+
const pausedSpeech = pausedSpeechRef.current;
|
|
414
|
+
pausedSpeechRef.current = null;
|
|
415
|
+
|
|
416
|
+
// Resume audio context
|
|
417
|
+
await resumeAudioContext();
|
|
418
|
+
|
|
419
|
+
// Re-speak the paused text
|
|
420
|
+
if (pausedSpeech.text) {
|
|
421
|
+
const speakOptions = {
|
|
422
|
+
...pausedSpeech.options,
|
|
423
|
+
lipsyncLang: pausedSpeech.options.lipsyncLang || defaultAvatarConfig.lipsyncLang || 'en'
|
|
424
|
+
};
|
|
425
|
+
|
|
426
|
+
if (talkingHeadRef.current.lipsync && Object.keys(talkingHeadRef.current.lipsync).length > 0) {
|
|
427
|
+
if (talkingHeadRef.current.setSlowdownRate) {
|
|
428
|
+
talkingHeadRef.current.setSlowdownRate(1.05);
|
|
429
|
+
}
|
|
430
|
+
talkingHeadRef.current.speakText(pausedSpeech.text, speakOptions);
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
}, [resumeAudioContext]);
|
|
435
|
+
|
|
391
436
|
const setMood = useCallback((mood) => {
|
|
392
437
|
if (talkingHeadRef.current) {
|
|
393
438
|
talkingHeadRef.current.setMood(mood);
|
|
@@ -465,11 +510,14 @@ const TalkingHeadAvatar = forwardRef(({
|
|
|
465
510
|
useImperativeHandle(ref, () => ({
|
|
466
511
|
speakText,
|
|
467
512
|
stopSpeaking,
|
|
513
|
+
pauseSpeaking,
|
|
514
|
+
resumeSpeaking,
|
|
468
515
|
resumeAudioContext,
|
|
469
516
|
setMood,
|
|
470
517
|
setTimingAdjustment,
|
|
471
518
|
playAnimation,
|
|
472
519
|
isReady,
|
|
520
|
+
isPaused,
|
|
473
521
|
talkingHead: talkingHeadRef.current,
|
|
474
522
|
handleResize,
|
|
475
523
|
setBodyMovement: (movement) => {
|