ui-soxo-bootstrap-core 2.6.26 → 2.6.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/core/lib/components/global-header/global-header.js +3 -4
- package/core/lib/components/sidemenu/sidemenu.scss +1 -1
- package/core/lib/models/process/components/process-dashboard/process-dashboard.js +469 -3
- package/core/lib/models/process/components/process-dashboard/process-dashboard.scss +4 -0
- package/core/modules/steps/action-buttons.js +57 -47
- package/core/modules/steps/action-buttons.scss +45 -34
- package/core/modules/steps/chat-assistant.js +141 -0
- package/core/modules/steps/openai-realtime.js +275 -0
- package/core/modules/steps/readme.md +167 -0
- package/core/modules/steps/steps.js +1030 -89
- package/core/modules/steps/steps.scss +546 -285
- package/core/modules/steps/voice-navigation.js +709 -0
- package/package.json +1 -1
|
@@ -4,25 +4,272 @@
|
|
|
4
4
|
* - Manages a multi-step, time-tracked process workflow.
|
|
5
5
|
* - Dynamically renders step-specific components based on configuration.
|
|
6
6
|
* - Tracks step and process durations with local persistence support.
|
|
7
|
-
* - Supports step navigation (next, previous, skip,
|
|
7
|
+
* - Supports step navigation (next, previous, skip, breadcrumb, keyboard).
|
|
8
8
|
* - Handles process submission and optional chaining to the next process.
|
|
9
|
-
* -
|
|
9
|
+
* - Renders a single active step view with compact breadcrumb controls.
|
|
10
10
|
*/
|
|
11
|
-
import
|
|
12
|
-
import {
|
|
13
|
-
import { Card } from './../../lib';
|
|
14
|
-
import * as genericComponents from './../../lib';
|
|
11
|
+
import { ArrowLeftOutlined, ArrowRightOutlined, CompressOutlined, ExpandOutlined, SoundOutlined } from '@ant-design/icons';
|
|
12
|
+
import { Empty, Select, Spin, message } from 'antd';
|
|
15
13
|
import moment from 'moment';
|
|
16
|
-
import {
|
|
17
|
-
import
|
|
14
|
+
import { useEffect, useRef, useState } from 'react';
|
|
15
|
+
import { ExternalWindow } from '../../components';
|
|
18
16
|
import { Dashboard } from '../../models';
|
|
17
|
+
import * as genericComponents from './../../lib';
|
|
18
|
+
import { Button, Card, Location } from './../../lib';
|
|
19
|
+
import { createOpenAIRealtimeSession, hasOpenAIRealtimeCredentials } from './openai-realtime';
|
|
19
20
|
import './steps.scss';
|
|
20
|
-
import TimelinePanel from './timeline';
|
|
21
|
-
import { ExternalWindow } from '../../components';
|
|
22
21
|
|
|
23
|
-
|
|
22
|
+
const STEP_WELCOME_LINES = [
|
|
23
|
+
'Welcome to your AI Automated Consultation process.',
|
|
24
|
+
'You are in the right place for a smooth and guided health journey.',
|
|
25
|
+
'This experience is designed to keep your consultation easy and stress-free.',
|
|
26
|
+
];
|
|
27
|
+
|
|
28
|
+
const STEP_INTRO_LINES = [
|
|
29
|
+
'In this process, we will walk you through a seamless and friendly AI interaction experience.',
|
|
30
|
+
'Each step is simple, guided, and focused on helping you feel prepared.',
|
|
31
|
+
'We will guide you through each stage so you always know what happens next.',
|
|
32
|
+
];
|
|
33
|
+
|
|
34
|
+
const STEP_EXPECTATION_LINES = [
|
|
35
|
+
'A care specialist may ask quick clarification questions to ensure your details are accurate.',
|
|
36
|
+
'This step focuses on collecting clear inputs so the care team can support you faster.',
|
|
37
|
+
'You can expect a guided workflow with minimal waiting and clear instructions.',
|
|
38
|
+
'The goal in this step is to keep your consultation organized and easy to follow.',
|
|
39
|
+
];
|
|
40
|
+
|
|
41
|
+
const STEP_COMFORT_LINES = [
|
|
42
|
+
'Take your time. There is no rush and assistance is always available nearby.',
|
|
43
|
+
'If anything feels unclear, the next prompt will guide you before you continue.',
|
|
44
|
+
'You can pause and review information before moving to the next stage.',
|
|
45
|
+
'Your responses here help personalize the rest of your consultation flow.',
|
|
46
|
+
];
|
|
47
|
+
|
|
48
|
+
const STEP_TIP_LINES = [
|
|
49
|
+
'Tip: Keep your previous reports or test details ready for quicker progress.',
|
|
50
|
+
'Tip: Follow on-screen prompts one at a time for the smoothest experience.',
|
|
51
|
+
'Tip: If you are unsure about a question, answer what you know and continue.',
|
|
52
|
+
'Tip: Stay relaxed; this process is built to be simple and patient-friendly.',
|
|
53
|
+
];
|
|
54
|
+
|
|
55
|
+
const VOICE_PROVIDER_OPTIONS = [
|
|
56
|
+
{ label: 'Gemini', value: 'gemini' },
|
|
57
|
+
{ label: 'ElevenLabs', value: 'elevenlabs' },
|
|
58
|
+
{ label: 'OpenAI', value: 'openai' },
|
|
59
|
+
];
|
|
60
|
+
|
|
61
|
+
const GEMINI_VOICE_OPTIONS = [{ label: 'Kore', value: 'Kore' }];
|
|
62
|
+
const DEFAULT_GEMINI_TTS_VOICE = process.env.GEMINI_TTS_VOICE || process.env.REACT_APP_GEMINI_TTS_VOICE || GEMINI_VOICE_OPTIONS[0].value;
|
|
63
|
+
const OPENAI_TTS_VOICE_OPTIONS = [
|
|
64
|
+
{ label: 'Alloy', value: 'alloy' },
|
|
65
|
+
{ label: 'Ash', value: 'ash' },
|
|
66
|
+
{ label: 'Coral', value: 'coral' },
|
|
67
|
+
{ label: 'Echo', value: 'echo' },
|
|
68
|
+
{ label: 'Fable', value: 'fable' },
|
|
69
|
+
{ label: 'Nova', value: 'nova' },
|
|
70
|
+
{ label: 'Onyx', value: 'onyx' },
|
|
71
|
+
{ label: 'Sage', value: 'sage' },
|
|
72
|
+
{ label: 'Shimmer', value: 'shimmer' },
|
|
73
|
+
];
|
|
74
|
+
const DEFAULT_OPENAI_TTS_VOICE =
|
|
75
|
+
process.env.OPENAI_TTS_VOICE ||
|
|
76
|
+
process.env.REACT_APP_OPENAI_TTS_VOICE ||
|
|
77
|
+
process.env.OPENAI_REALTIME_VOICE ||
|
|
78
|
+
process.env.REACT_APP_OPENAI_REALTIME_VOICE ||
|
|
79
|
+
OPENAI_TTS_VOICE_OPTIONS[0].value;
|
|
80
|
+
|
|
81
|
+
const ELEVENLABS_VOICE_OPTIONS = [
|
|
82
|
+
{ label: 'Rachel', value: '21m00Tcm4TlvDq8ikWAM' },
|
|
83
|
+
{ label: 'Adam', value: 'pNInz6obpgDQGcFmaJgB' },
|
|
84
|
+
{ label: 'Bella', value: 'EXAVITQu4vr4xnSDxMaL' },
|
|
85
|
+
{ label: 'Antoni', value: 'ErXwobaYiN019PkySvjV' },
|
|
86
|
+
{ label: 'Josh', value: 'TxGEqnHWrfWFTfGW9XjX' },
|
|
87
|
+
];
|
|
88
|
+
const DEFAULT_ELEVENLABS_VOICE_ID =
|
|
89
|
+
process.env.ELEVENLABS_VOICE_ID ||
|
|
90
|
+
process.env.ELEVEN_LABS_VOICE_ID ||
|
|
91
|
+
process.env.REACT_APP_ELEVENLABS_VOICE_ID ||
|
|
92
|
+
ELEVENLABS_VOICE_OPTIONS[0].value;
|
|
93
|
+
|
|
94
|
+
const SARVAM_VOICE_OPTIONS = [
|
|
95
|
+
{ label: 'Anushka', value: 'anushka' },
|
|
96
|
+
{ label: 'Manisha', value: 'manisha' },
|
|
97
|
+
{ label: 'Vidya', value: 'vidya' },
|
|
98
|
+
{ label: 'Arya', value: 'arya' },
|
|
99
|
+
{ label: 'Karun', value: 'karun' },
|
|
100
|
+
{ label: 'Hitesh', value: 'hitesh' },
|
|
101
|
+
];
|
|
102
|
+
|
|
103
|
+
const ELEVENLABS_TTS_API_BASE_URL =
|
|
104
|
+
process.env.ELEVENLABS_TTS_API_BASE_URL || process.env.REACT_APP_ELEVENLABS_TTS_API_BASE_URL || 'https://api.elevenlabs.io/v1/text-to-speech';
|
|
105
|
+
const ELEVENLABS_MODEL_ID = process.env.ELEVENLABS_MODEL_ID || process.env.REACT_APP_ELEVENLABS_MODEL_ID || 'eleven_multilingual_v2';
|
|
106
|
+
const ELEVENLABS_OUTPUT_FORMAT = process.env.ELEVENLABS_OUTPUT_FORMAT || process.env.REACT_APP_ELEVENLABS_OUTPUT_FORMAT || 'mp3_44100_128';
|
|
107
|
+
const GEMINI_TTS_MODEL = process.env.GEMINI_TTS_MODEL || process.env.REACT_APP_GEMINI_TTS_MODEL || 'gemini-2.5-flash-preview-tts';
|
|
108
|
+
const GEMINI_TTS_API_BASE_URL =
|
|
109
|
+
process.env.GEMINI_API_BASE_URL || process.env.REACT_APP_GEMINI_API_BASE_URL || 'https://generativelanguage.googleapis.com/v1beta';
|
|
110
|
+
const OPENAI_TTS_ENDPOINT = process.env.OPENAI_TTS_ENDPOINT || process.env.REACT_APP_OPENAI_TTS_ENDPOINT || 'https://api.openai.com/v1/audio/speech';
|
|
111
|
+
const OPENAI_TTS_MODEL = process.env.OPENAI_TTS_MODEL || process.env.REACT_APP_OPENAI_TTS_MODEL || 'gpt-4o-mini-tts';
|
|
112
|
+
const OPENAI_TTS_FORMAT = process.env.OPENAI_TTS_FORMAT || process.env.REACT_APP_OPENAI_TTS_FORMAT || 'mp3';
|
|
113
|
+
const SARVAM_TTS_ENDPOINT = process.env.SARVAM_TTS_ENDPOINT || process.env.REACT_APP_SARVAM_TTS_ENDPOINT || 'https://api.sarvam.ai/text-to-speech';
|
|
114
|
+
const SARVAM_TTS_MODEL = process.env.SARVAM_TTS_MODEL || process.env.REACT_APP_SARVAM_TTS_MODEL || 'bulbul:v2';
|
|
115
|
+
const SARVAM_TARGET_LANGUAGE_CODE = process.env.SARVAM_TARGET_LANGUAGE_CODE || process.env.REACT_APP_SARVAM_TARGET_LANGUAGE_CODE || 'en-IN';
|
|
116
|
+
const SARVAM_OUTPUT_AUDIO_CODEC = process.env.SARVAM_OUTPUT_AUDIO_CODEC || process.env.REACT_APP_SARVAM_OUTPUT_AUDIO_CODEC || 'wav';
|
|
117
|
+
const NARRATION_CONTROLS_ENABLED = false;
|
|
118
|
+
|
|
119
|
+
function getFromStorage(storageKey) {
|
|
120
|
+
if (typeof window === 'undefined' || !window.localStorage) {
|
|
121
|
+
return null;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
try {
|
|
125
|
+
return window.localStorage.getItem(storageKey);
|
|
126
|
+
} catch (error) {
|
|
127
|
+
return null;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
function getElevenLabsApiKey() {
|
|
132
|
+
return (
|
|
133
|
+
process.env.ELEVEN_LABS_KEY ||
|
|
134
|
+
process.env.ELEVENLABS_API_KEY ||
|
|
135
|
+
process.env.REACT_APP_ELEVEN_LABS_KEY ||
|
|
136
|
+
process.env.REACT_APP_ELEVENLABS_API_KEY ||
|
|
137
|
+
getFromStorage('eleven_labs_key') ||
|
|
138
|
+
getFromStorage('elevenlabs_api_key') ||
|
|
139
|
+
getFromStorage('ELEVEN_LABS_KEY') ||
|
|
140
|
+
getFromStorage('REACT_APP_ELEVEN_LABS_KEY') ||
|
|
141
|
+
getFromStorage('REACT_APP_ELEVENLABS_API_KEY')
|
|
142
|
+
);
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
function getGeminiApiKey() {
|
|
146
|
+
return (
|
|
147
|
+
process.env.GEMINI_API_KEY ||
|
|
148
|
+
process.env.REACT_APP_GEMINI_API_KEY ||
|
|
149
|
+
getFromStorage('gemini_api_key') ||
|
|
150
|
+
getFromStorage('GEMINI_API_KEY') ||
|
|
151
|
+
getFromStorage('REACT_APP_GEMINI_API_KEY')
|
|
152
|
+
);
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
function getOpenAIApiKey() {
|
|
156
|
+
return (
|
|
157
|
+
process.env.OPEN_AI_KEY ||
|
|
158
|
+
process.env.OPENAI_API_KEY ||
|
|
159
|
+
process.env.REACT_APP_OPEN_AI_KEY ||
|
|
160
|
+
process.env.REACT_APP_OPENAI_API_KEY ||
|
|
161
|
+
getFromStorage('open_ai_key') ||
|
|
162
|
+
getFromStorage('openai_api_key') ||
|
|
163
|
+
getFromStorage('OPEN_AI_KEY') ||
|
|
164
|
+
getFromStorage('OPENAI_API_KEY') ||
|
|
165
|
+
getFromStorage('REACT_APP_OPEN_AI_KEY') ||
|
|
166
|
+
getFromStorage('REACT_APP_OPENAI_API_KEY')
|
|
167
|
+
);
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
function getSarvamApiKey() {
|
|
171
|
+
return (
|
|
172
|
+
process.env.SARVAM_API_KEY ||
|
|
173
|
+
process.env.REACT_APP_SARVAM_API_KEY ||
|
|
174
|
+
getFromStorage('sarvam_api_key') ||
|
|
175
|
+
getFromStorage('REACT_APP_SARVAM_API_KEY')
|
|
176
|
+
);
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
function base64AudioToBlob(base64Audio = '', mimeType = 'audio/wav') {
|
|
180
|
+
const cleanedBase64 = base64Audio.includes(',') ? base64Audio.split(',').pop() : base64Audio;
|
|
181
|
+
const binaryString = typeof window !== 'undefined' && window.atob ? window.atob(cleanedBase64) : atob(cleanedBase64);
|
|
182
|
+
const bytes = new Uint8Array(binaryString.length);
|
|
183
|
+
|
|
184
|
+
for (let index = 0; index < binaryString.length; index += 1) {
|
|
185
|
+
bytes[index] = binaryString.charCodeAt(index);
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
return new Blob([bytes], { type: mimeType });
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
function extractGeminiAudio(payload) {
|
|
192
|
+
const candidates = payload && payload.candidates ? payload.candidates : [];
|
|
24
193
|
|
|
194
|
+
for (const candidate of candidates) {
|
|
195
|
+
const parts = candidate && candidate.content && candidate.content.parts ? candidate.content.parts : [];
|
|
196
|
+
|
|
197
|
+
for (const part of parts) {
|
|
198
|
+
const inlineData = part.inlineData || part.inline_data || part.audio;
|
|
199
|
+
|
|
200
|
+
if (inlineData && inlineData.data) {
|
|
201
|
+
return {
|
|
202
|
+
mimeType: inlineData.mimeType || inlineData.mime_type || 'audio/wav',
|
|
203
|
+
data: inlineData.data,
|
|
204
|
+
};
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
return null;
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
function hashText(value = '') {
|
|
213
|
+
let hash = 0;
|
|
214
|
+
|
|
215
|
+
for (let index = 0; index < value.length; index += 1) {
|
|
216
|
+
hash = (hash << 5) - hash + value.charCodeAt(index);
|
|
217
|
+
hash |= 0;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
return Math.abs(hash);
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
function pickBySeed(items = [], seed = 0) {
|
|
224
|
+
if (!items.length) {
|
|
225
|
+
return '';
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
return items[seed % items.length];
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
function buildGuestStepGuide(step, index, total) {
|
|
232
|
+
if (!step) {
|
|
233
|
+
return {
|
|
234
|
+
welcome: STEP_WELCOME_LINES[0],
|
|
235
|
+
intro: STEP_INTRO_LINES[0],
|
|
236
|
+
expectation: 'We are preparing your consultation journey.',
|
|
237
|
+
comfort: STEP_COMFORT_LINES[0],
|
|
238
|
+
tip: STEP_TIP_LINES[0],
|
|
239
|
+
narration: '',
|
|
240
|
+
};
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
const stepName = step.step_name || `Step ${index + 1}`;
|
|
244
|
+
const seedSource = `${step.step_id || step.id || index}-${stepName}`;
|
|
245
|
+
const seed = hashText(seedSource);
|
|
246
|
+
|
|
247
|
+
const welcome = index === 0 ? STEP_WELCOME_LINES[0] : `Now entering ${stepName}.`;
|
|
248
|
+
const intro = index === 0 ? STEP_INTRO_LINES[0] : pickBySeed(STEP_INTRO_LINES, seed + 1);
|
|
249
|
+
const expectation = step.step_description || pickBySeed(STEP_EXPECTATION_LINES, seed + 2);
|
|
250
|
+
const comfort = pickBySeed(STEP_COMFORT_LINES, seed + 3);
|
|
251
|
+
const tip = pickBySeed(STEP_TIP_LINES, seed + 4);
|
|
252
|
+
|
|
253
|
+
return {
|
|
254
|
+
welcome,
|
|
255
|
+
intro,
|
|
256
|
+
expectation,
|
|
257
|
+
comfort,
|
|
258
|
+
tip,
|
|
259
|
+
narration: [
|
|
260
|
+
`Step ${index + 1} of ${total}. ${stepName}.`,
|
|
261
|
+
welcome,
|
|
262
|
+
intro,
|
|
263
|
+
`What to expect: ${expectation}`,
|
|
264
|
+
`Comfort note: ${comfort}`,
|
|
265
|
+
`Helpful tip: ${tip}`,
|
|
266
|
+
].join(' '),
|
|
267
|
+
};
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
export default function ProcessStepsPage({ match, CustomComponents = {}, ...props }) {
|
|
25
271
|
const allComponents = { ...genericComponents, ...CustomComponents };
|
|
272
|
+
const GuestInfoComponent = allComponents.EntryInfo;
|
|
26
273
|
|
|
27
274
|
const [loading, setLoading] = useState(false);
|
|
28
275
|
const [steps, setSteps] = useState([]);
|
|
@@ -33,11 +280,33 @@ export default function ProcessStepsPage({ match, CustomComponents = {}, ...prop
|
|
|
33
280
|
const [stepStartTime, setStepStartTime] = useState(null);
|
|
34
281
|
const [processStartTime, setProcessStartTime] = useState(null);
|
|
35
282
|
const [processTimings, setProcessTimings] = useState([]);
|
|
36
|
-
const [timelineCollapsed, setTimelineCollapsed] = useState(false);
|
|
37
283
|
const [showExternalWindow, setShowExternalWindow] = useState(false);
|
|
38
284
|
const [externalWin, setExternalWin] = useState(null);
|
|
39
|
-
const [
|
|
40
|
-
const
|
|
285
|
+
const [autoNarration, setAutoNarration] = useState(NARRATION_CONTROLS_ENABLED);
|
|
286
|
+
const [voiceProvider, setVoiceProvider] = useState(
|
|
287
|
+
process.env.REACT_APP_STEP_TTS_PROVIDER && process.env.REACT_APP_STEP_TTS_PROVIDER !== 'browser'
|
|
288
|
+
? process.env.REACT_APP_STEP_TTS_PROVIDER
|
|
289
|
+
: 'gemini'
|
|
290
|
+
);
|
|
291
|
+
const [browserVoiceOptions, setBrowserVoiceOptions] = useState([]);
|
|
292
|
+
const [voiceSelections, setVoiceSelections] = useState({
|
|
293
|
+
browser: process.env.REACT_APP_STEP_BROWSER_VOICE || process.env.REACT_APP_STEP_TTS_VOICE || '',
|
|
294
|
+
gemini: DEFAULT_GEMINI_TTS_VOICE,
|
|
295
|
+
elevenlabs: DEFAULT_ELEVENLABS_VOICE_ID,
|
|
296
|
+
openai: DEFAULT_OPENAI_TTS_VOICE,
|
|
297
|
+
sarvam: process.env.REACT_APP_SARVAM_SPEAKER || SARVAM_VOICE_OPTIONS[0].value,
|
|
298
|
+
});
|
|
299
|
+
const [stepSlideDirection, setStepSlideDirection] = useState('forward');
|
|
300
|
+
const [showNextProcessAction, setShowNextProcessAction] = useState(false);
|
|
301
|
+
const [isStepFullscreen, setIsStepFullscreen] = useState(false);
|
|
302
|
+
const [realtimeStatus, setRealtimeStatus] = useState('idle');
|
|
303
|
+
|
|
304
|
+
const narrationUtteranceRef = useRef(null);
|
|
305
|
+
const narrationAudioRef = useRef(null);
|
|
306
|
+
const narrationAudioUrlRef = useRef(null);
|
|
307
|
+
const narrationFallbackNoticeRef = useRef(false);
|
|
308
|
+
const realtimeSessionRef = useRef(null);
|
|
309
|
+
const fullscreenViewportRef = useRef(null);
|
|
41
310
|
|
|
42
311
|
const urlParams = Location.search();
|
|
43
312
|
const isConsultationMode = String(urlParams?.consultation).toLowerCase() === 'true';
|
|
@@ -47,11 +316,24 @@ export default function ProcessStepsPage({ match, CustomComponents = {}, ...prop
|
|
|
47
316
|
useEffect(() => {
|
|
48
317
|
loadProcess(currentProcessId);
|
|
49
318
|
|
|
50
|
-
|
|
51
|
-
|
|
319
|
+
let savedTimings = [];
|
|
320
|
+
try {
|
|
321
|
+
const saved = localStorage.getItem(`processTimings_${currentProcessId}`);
|
|
322
|
+
if (saved) {
|
|
323
|
+
const parsed = JSON.parse(saved);
|
|
324
|
+
if (Array.isArray(parsed)) {
|
|
325
|
+
savedTimings = parsed;
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
} catch (error) {
|
|
329
|
+
console.warn('Unable to restore process timings from local storage.', error);
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
setProcessTimings(savedTimings);
|
|
52
333
|
|
|
53
334
|
setProcessStartTime(Date.now());
|
|
54
335
|
setStepStartTime(Date.now());
|
|
336
|
+
setShowNextProcessAction(false);
|
|
55
337
|
}, [currentProcessId]);
|
|
56
338
|
|
|
57
339
|
//// Reset step start time whenever the active step changes
|
|
@@ -67,10 +349,110 @@ export default function ProcessStepsPage({ match, CustomComponents = {}, ...prop
|
|
|
67
349
|
}
|
|
68
350
|
}, [activeStep, steps]);
|
|
69
351
|
|
|
352
|
+
useEffect(() => {
|
|
353
|
+
if (steps[activeStep]?.order_seqtype !== 'E') {
|
|
354
|
+
setShowNextProcessAction(false);
|
|
355
|
+
}
|
|
356
|
+
}, [activeStep, steps]);
|
|
357
|
+
|
|
358
|
+
useEffect(() => {
|
|
359
|
+
if (typeof window === 'undefined' || !window.speechSynthesis) {
|
|
360
|
+
return undefined;
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
const updateBrowserVoices = () => {
|
|
364
|
+
const voices = window.speechSynthesis
|
|
365
|
+
.getVoices()
|
|
366
|
+
.map((voice) => ({
|
|
367
|
+
label: `${voice.name} (${voice.lang})`,
|
|
368
|
+
value: voice.voiceURI || voice.name,
|
|
369
|
+
}))
|
|
370
|
+
.sort((voiceA, voiceB) => voiceA.label.localeCompare(voiceB.label));
|
|
371
|
+
|
|
372
|
+
setBrowserVoiceOptions(voices);
|
|
373
|
+
|
|
374
|
+
if (voices.length) {
|
|
375
|
+
setVoiceSelections((oldSelections) => {
|
|
376
|
+
if (oldSelections.browser) {
|
|
377
|
+
return oldSelections;
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
return {
|
|
381
|
+
...oldSelections,
|
|
382
|
+
browser: voices[0].value,
|
|
383
|
+
};
|
|
384
|
+
});
|
|
385
|
+
}
|
|
386
|
+
};
|
|
387
|
+
|
|
388
|
+
updateBrowserVoices();
|
|
389
|
+
if (typeof window.speechSynthesis.addEventListener === 'function') {
|
|
390
|
+
window.speechSynthesis.addEventListener('voiceschanged', updateBrowserVoices);
|
|
391
|
+
} else {
|
|
392
|
+
window.speechSynthesis.onvoiceschanged = updateBrowserVoices;
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
return () => {
|
|
396
|
+
if (typeof window.speechSynthesis.removeEventListener === 'function') {
|
|
397
|
+
window.speechSynthesis.removeEventListener('voiceschanged', updateBrowserVoices);
|
|
398
|
+
} else if (window.speechSynthesis.onvoiceschanged === updateBrowserVoices) {
|
|
399
|
+
window.speechSynthesis.onvoiceschanged = null;
|
|
400
|
+
}
|
|
401
|
+
};
|
|
402
|
+
}, []);
|
|
403
|
+
|
|
404
|
+
useEffect(() => {
|
|
405
|
+
narrationFallbackNoticeRef.current = false;
|
|
406
|
+
}, [voiceProvider]);
|
|
407
|
+
|
|
408
|
+
useEffect(() => {
|
|
409
|
+
const isSupportedProvider = VOICE_PROVIDER_OPTIONS.some((option) => option.value === voiceProvider);
|
|
410
|
+
|
|
411
|
+
if (!isSupportedProvider) {
|
|
412
|
+
setVoiceProvider('gemini');
|
|
413
|
+
}
|
|
414
|
+
}, [voiceProvider]);
|
|
415
|
+
|
|
416
|
+
useEffect(() => {
|
|
417
|
+
stopNarration();
|
|
418
|
+
}, [voiceProvider, voiceSelections.browser, voiceSelections.gemini, voiceSelections.elevenlabs, voiceSelections.openai, voiceSelections.sarvam]);
|
|
419
|
+
|
|
420
|
+
useEffect(() => {
|
|
421
|
+
const providerVoices =
|
|
422
|
+
voiceProvider === 'gemini'
|
|
423
|
+
? GEMINI_VOICE_OPTIONS
|
|
424
|
+
: voiceProvider === 'elevenlabs'
|
|
425
|
+
? ELEVENLABS_VOICE_OPTIONS
|
|
426
|
+
: voiceProvider === 'openai'
|
|
427
|
+
? OPENAI_TTS_VOICE_OPTIONS
|
|
428
|
+
: SARVAM_VOICE_OPTIONS;
|
|
429
|
+
|
|
430
|
+
if (!providerVoices.length) {
|
|
431
|
+
return;
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
setVoiceSelections((oldSelections) => {
|
|
435
|
+
if (oldSelections[voiceProvider]) {
|
|
436
|
+
return oldSelections;
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
return {
|
|
440
|
+
...oldSelections,
|
|
441
|
+
[voiceProvider]: providerVoices[0].value,
|
|
442
|
+
};
|
|
443
|
+
});
|
|
444
|
+
}, [voiceProvider, browserVoiceOptions]);
|
|
445
|
+
|
|
70
446
|
// Save updated process timings to state and localStorage
|
|
71
447
|
const saveTimings = (updated) => {
|
|
72
|
-
|
|
73
|
-
|
|
448
|
+
const safeTimings = Array.isArray(updated) ? updated : [];
|
|
449
|
+
setProcessTimings(safeTimings);
|
|
450
|
+
|
|
451
|
+
try {
|
|
452
|
+
localStorage.setItem(`processTimings_${currentProcessId}`, JSON.stringify(safeTimings));
|
|
453
|
+
} catch (error) {
|
|
454
|
+
console.warn('Unable to persist process timings to local storage.', error);
|
|
455
|
+
}
|
|
74
456
|
};
|
|
75
457
|
// Record time spent on the current step
|
|
76
458
|
|
|
@@ -85,7 +467,8 @@ export default function ProcessStepsPage({ match, CustomComponents = {}, ...prop
|
|
|
85
467
|
const stepId = steps[activeStep].step_id;
|
|
86
468
|
// Clone existing timings
|
|
87
469
|
|
|
88
|
-
const
|
|
470
|
+
const previousTimings = Array.isArray(processTimings) ? processTimings : [];
|
|
471
|
+
const updated = [...previousTimings];
|
|
89
472
|
const index = updated.findIndex((t) => t.step_id === stepId);
|
|
90
473
|
// Create timing entry for the step
|
|
91
474
|
|
|
@@ -157,7 +540,11 @@ export default function ProcessStepsPage({ match, CustomComponents = {}, ...prop
|
|
|
157
540
|
const response = await Dashboard.processLog(payload);
|
|
158
541
|
|
|
159
542
|
if (response.success) {
|
|
160
|
-
|
|
543
|
+
try {
|
|
544
|
+
localStorage.removeItem(`processTimings_${currentProcessId}`);
|
|
545
|
+
} catch (error) {
|
|
546
|
+
console.warn('Unable to clear process timings from local storage.', error);
|
|
547
|
+
}
|
|
161
548
|
setProcessTimings([]);
|
|
162
549
|
return true;
|
|
163
550
|
}
|
|
@@ -177,9 +564,21 @@ export default function ProcessStepsPage({ match, CustomComponents = {}, ...prop
|
|
|
177
564
|
* - Navigates to the specified step index.
|
|
178
565
|
*/
|
|
179
566
|
const gotoStep = (index, status = 'completed') => {
|
|
567
|
+
if (!steps.length) {
|
|
568
|
+
return;
|
|
569
|
+
}
|
|
570
|
+
|
|
571
|
+
const nextIndex = Math.max(0, Math.min(index, steps.length - 1));
|
|
572
|
+
|
|
573
|
+
if (nextIndex === activeStep) {
|
|
574
|
+
return;
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
setStepSlideDirection(nextIndex > activeStep ? 'forward' : 'backward');
|
|
578
|
+
|
|
180
579
|
const updated = recordStepTime(status);
|
|
181
580
|
saveTimings(updated);
|
|
182
|
-
setActiveStep(
|
|
581
|
+
setActiveStep(nextIndex);
|
|
183
582
|
};
|
|
184
583
|
/**
|
|
185
584
|
* Navigate to the next step
|
|
@@ -198,7 +597,7 @@ export default function ProcessStepsPage({ match, CustomComponents = {}, ...prop
|
|
|
198
597
|
*/
|
|
199
598
|
const handleSkip = () => gotoStep(activeStep + 1, 'skipped');
|
|
200
599
|
/**
|
|
201
|
-
*
|
|
600
|
+
* Breadcrumb Navigation
|
|
202
601
|
* - Navigates directly to the selected step.
|
|
203
602
|
* - Records timing data for the current step.
|
|
204
603
|
*/
|
|
@@ -230,6 +629,398 @@ export default function ProcessStepsPage({ match, CustomComponents = {}, ...prop
|
|
|
230
629
|
setShowExternalWindow(true);
|
|
231
630
|
}
|
|
232
631
|
};
|
|
632
|
+
|
|
633
|
+
function clearNarrationAudio() {
|
|
634
|
+
if (narrationAudioRef.current) {
|
|
635
|
+
narrationAudioRef.current.pause();
|
|
636
|
+
narrationAudioRef.current.src = '';
|
|
637
|
+
narrationAudioRef.current = null;
|
|
638
|
+
}
|
|
639
|
+
|
|
640
|
+
if (narrationAudioUrlRef.current && typeof window !== 'undefined' && window.URL) {
|
|
641
|
+
window.URL.revokeObjectURL(narrationAudioUrlRef.current);
|
|
642
|
+
narrationAudioUrlRef.current = null;
|
|
643
|
+
}
|
|
644
|
+
}
|
|
645
|
+
|
|
646
|
+
function stopNarration() {
|
|
647
|
+
clearNarrationAudio();
|
|
648
|
+
|
|
649
|
+
if (typeof window !== 'undefined' && window.speechSynthesis) {
|
|
650
|
+
window.speechSynthesis.cancel();
|
|
651
|
+
}
|
|
652
|
+
|
|
653
|
+
narrationUtteranceRef.current = null;
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
function buildRealtimeInstructions() {
|
|
657
|
+
const step = steps[activeStep];
|
|
658
|
+
const stepName = step?.step_name || `Step ${activeStep + 1}`;
|
|
659
|
+
const stepDescription = step?.step_description || 'No additional description.';
|
|
660
|
+
|
|
661
|
+
return [
|
|
662
|
+
'You are a warm, concise healthcare concierge assisting a guest during a guided process.',
|
|
663
|
+
`Current step: ${stepName}.`,
|
|
664
|
+
`Step description: ${stepDescription}.`,
|
|
665
|
+
'Answer in short, helpful sentences and keep the guest calm and informed.',
|
|
666
|
+
'Avoid medical diagnosis or treatment advice.',
|
|
667
|
+
].join(' ');
|
|
668
|
+
}
|
|
669
|
+
|
|
670
|
+
async function startRealtimeConversation() {
|
|
671
|
+
if (realtimeSessionRef.current) {
|
|
672
|
+
return;
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
const session = createOpenAIRealtimeSession({
|
|
676
|
+
instructions: buildRealtimeInstructions(),
|
|
677
|
+
onStatus: (status) => {
|
|
678
|
+
setRealtimeStatus(status);
|
|
679
|
+
},
|
|
680
|
+
onError: (error) => {
|
|
681
|
+
console.error('OpenAI Realtime error:', error);
|
|
682
|
+
message.error(error?.message || 'OpenAI Realtime connection failed.');
|
|
683
|
+
},
|
|
684
|
+
});
|
|
685
|
+
|
|
686
|
+
realtimeSessionRef.current = session;
|
|
687
|
+
try {
|
|
688
|
+
await session.connect();
|
|
689
|
+
} catch (error) {
|
|
690
|
+
realtimeSessionRef.current = null;
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
|
|
694
|
+
function stopRealtimeConversation() {
|
|
695
|
+
if (realtimeSessionRef.current) {
|
|
696
|
+
realtimeSessionRef.current.disconnect();
|
|
697
|
+
realtimeSessionRef.current = null;
|
|
698
|
+
}
|
|
699
|
+
setRealtimeStatus('idle');
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
function playAudioBlob(audioBlob) {
|
|
703
|
+
return new Promise((resolve, reject) => {
|
|
704
|
+
if (typeof window === 'undefined' || !window.Audio || !window.URL) {
|
|
705
|
+
reject(new Error('Audio playback is not available.'));
|
|
706
|
+
return;
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
const audioUrl = window.URL.createObjectURL(audioBlob);
|
|
710
|
+
const audio = new window.Audio(audioUrl);
|
|
711
|
+
|
|
712
|
+
narrationAudioRef.current = audio;
|
|
713
|
+
narrationAudioUrlRef.current = audioUrl;
|
|
714
|
+
|
|
715
|
+
const cleanup = () => {
|
|
716
|
+
if (narrationAudioRef.current === audio) {
|
|
717
|
+
narrationAudioRef.current = null;
|
|
718
|
+
}
|
|
719
|
+
|
|
720
|
+
if (narrationAudioUrlRef.current === audioUrl) {
|
|
721
|
+
window.URL.revokeObjectURL(audioUrl);
|
|
722
|
+
narrationAudioUrlRef.current = null;
|
|
723
|
+
}
|
|
724
|
+
};
|
|
725
|
+
|
|
726
|
+
audio.onended = () => {
|
|
727
|
+
cleanup();
|
|
728
|
+
resolve();
|
|
729
|
+
};
|
|
730
|
+
|
|
731
|
+
audio.onpause = () => {
|
|
732
|
+
cleanup();
|
|
733
|
+
resolve();
|
|
734
|
+
};
|
|
735
|
+
|
|
736
|
+
audio.onerror = () => {
|
|
737
|
+
cleanup();
|
|
738
|
+
reject(new Error('Audio playback failed.'));
|
|
739
|
+
};
|
|
740
|
+
|
|
741
|
+
audio.play().catch((error) => {
|
|
742
|
+
cleanup();
|
|
743
|
+
reject(error);
|
|
744
|
+
});
|
|
745
|
+
});
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
function speakWithBrowser(text) {
|
|
749
|
+
return new Promise((resolve, reject) => {
|
|
750
|
+
if (typeof window === 'undefined' || !window.speechSynthesis || !window.SpeechSynthesisUtterance) {
|
|
751
|
+
reject(new Error('Speech synthesis is not available.'));
|
|
752
|
+
return;
|
|
753
|
+
}
|
|
754
|
+
|
|
755
|
+
const utterance = new window.SpeechSynthesisUtterance(text);
|
|
756
|
+
utterance.lang = process.env.REACT_APP_STEP_TTS_LANG || 'en-US';
|
|
757
|
+
|
|
758
|
+
const rate = Number(process.env.REACT_APP_STEP_TTS_RATE || 1);
|
|
759
|
+
const pitch = Number(process.env.REACT_APP_STEP_TTS_PITCH || 1);
|
|
760
|
+
|
|
761
|
+
utterance.rate = Number.isFinite(rate) ? rate : 1;
|
|
762
|
+
utterance.pitch = Number.isFinite(pitch) ? pitch : 1;
|
|
763
|
+
|
|
764
|
+
const selectedBrowserVoice = voiceSelections.browser;
|
|
765
|
+
if (selectedBrowserVoice) {
|
|
766
|
+
const browserVoice = window.speechSynthesis.getVoices().find((voice) => (voice.voiceURI || voice.name) === selectedBrowserVoice);
|
|
767
|
+
|
|
768
|
+
if (browserVoice) {
|
|
769
|
+
utterance.voice = browserVoice;
|
|
770
|
+
}
|
|
771
|
+
}
|
|
772
|
+
|
|
773
|
+
utterance.onend = () => {
|
|
774
|
+
if (narrationUtteranceRef.current === utterance) {
|
|
775
|
+
narrationUtteranceRef.current = null;
|
|
776
|
+
}
|
|
777
|
+
resolve();
|
|
778
|
+
};
|
|
779
|
+
|
|
780
|
+
utterance.onerror = () => {
|
|
781
|
+
if (narrationUtteranceRef.current === utterance) {
|
|
782
|
+
narrationUtteranceRef.current = null;
|
|
783
|
+
}
|
|
784
|
+
reject(new Error('Browser narration failed.'));
|
|
785
|
+
};
|
|
786
|
+
|
|
787
|
+
narrationUtteranceRef.current = utterance;
|
|
788
|
+
window.speechSynthesis.speak(utterance);
|
|
789
|
+
});
|
|
790
|
+
}
|
|
791
|
+
|
|
792
|
+
async function synthesizeGeminiAudio(text) {
|
|
793
|
+
const apiKey = getGeminiApiKey();
|
|
794
|
+
|
|
795
|
+
if (!apiKey) {
|
|
796
|
+
throw new Error('Gemini API key is missing.');
|
|
797
|
+
}
|
|
798
|
+
|
|
799
|
+
const selectedVoiceName = voiceSelections.gemini || DEFAULT_GEMINI_TTS_VOICE;
|
|
800
|
+
const endpoint = `${GEMINI_TTS_API_BASE_URL}/models/${GEMINI_TTS_MODEL}:generateContent?key=${encodeURIComponent(apiKey)}`;
|
|
801
|
+
const response = await fetch(endpoint, {
|
|
802
|
+
method: 'POST',
|
|
803
|
+
headers: {
|
|
804
|
+
'Content-Type': 'application/json',
|
|
805
|
+
},
|
|
806
|
+
body: JSON.stringify({
|
|
807
|
+
contents: [
|
|
808
|
+
{
|
|
809
|
+
role: 'user',
|
|
810
|
+
parts: [{ text }],
|
|
811
|
+
},
|
|
812
|
+
],
|
|
813
|
+
generationConfig: {
|
|
814
|
+
responseModalities: ['AUDIO'],
|
|
815
|
+
speechConfig: {
|
|
816
|
+
voiceConfig: {
|
|
817
|
+
prebuiltVoiceConfig: {
|
|
818
|
+
voiceName: selectedVoiceName,
|
|
819
|
+
},
|
|
820
|
+
},
|
|
821
|
+
},
|
|
822
|
+
},
|
|
823
|
+
}),
|
|
824
|
+
});
|
|
825
|
+
|
|
826
|
+
if (!response.ok) {
|
|
827
|
+
throw new Error(`Gemini TTS request failed with status ${response.status}.`);
|
|
828
|
+
}
|
|
829
|
+
|
|
830
|
+
const payload = await response.json();
|
|
831
|
+
const audio = extractGeminiAudio(payload);
|
|
832
|
+
|
|
833
|
+
if (!audio || !audio.data) {
|
|
834
|
+
throw new Error('Gemini did not return audio data.');
|
|
835
|
+
}
|
|
836
|
+
|
|
837
|
+
return base64AudioToBlob(audio.data, audio.mimeType || 'audio/wav');
|
|
838
|
+
}
|
|
839
|
+
|
|
840
|
+
async function synthesizeOpenAIAudio(text) {
|
|
841
|
+
const apiKey = getOpenAIApiKey();
|
|
842
|
+
|
|
843
|
+
if (!apiKey) {
|
|
844
|
+
throw new Error('OpenAI API key is missing.');
|
|
845
|
+
}
|
|
846
|
+
|
|
847
|
+
const selectedVoice = voiceSelections.openai || DEFAULT_OPENAI_TTS_VOICE;
|
|
848
|
+
const response = await fetch(OPENAI_TTS_ENDPOINT, {
|
|
849
|
+
method: 'POST',
|
|
850
|
+
headers: {
|
|
851
|
+
'Content-Type': 'application/json',
|
|
852
|
+
Authorization: `Bearer ${apiKey}`,
|
|
853
|
+
},
|
|
854
|
+
body: JSON.stringify({
|
|
855
|
+
model: OPENAI_TTS_MODEL,
|
|
856
|
+
voice: selectedVoice,
|
|
857
|
+
input: text,
|
|
858
|
+
response_format: OPENAI_TTS_FORMAT,
|
|
859
|
+
}),
|
|
860
|
+
});
|
|
861
|
+
|
|
862
|
+
if (!response.ok) {
|
|
863
|
+
throw new Error(`OpenAI TTS request failed with status ${response.status}.`);
|
|
864
|
+
}
|
|
865
|
+
|
|
866
|
+
return response.blob();
|
|
867
|
+
}
|
|
868
|
+
|
|
869
|
+
async function synthesizeElevenLabsAudio(text) {
|
|
870
|
+
const apiKey = getElevenLabsApiKey();
|
|
871
|
+
|
|
872
|
+
if (!apiKey) {
|
|
873
|
+
throw new Error('ElevenLabs API key is missing.');
|
|
874
|
+
}
|
|
875
|
+
|
|
876
|
+
const selectedVoiceId = voiceSelections.elevenlabs || DEFAULT_ELEVENLABS_VOICE_ID;
|
|
877
|
+
const endpoint = `${ELEVENLABS_TTS_API_BASE_URL}/${encodeURIComponent(selectedVoiceId)}/stream?output_format=${encodeURIComponent(
|
|
878
|
+
ELEVENLABS_OUTPUT_FORMAT
|
|
879
|
+
)}`;
|
|
880
|
+
const response = await fetch(endpoint, {
|
|
881
|
+
method: 'POST',
|
|
882
|
+
headers: {
|
|
883
|
+
'Content-Type': 'application/json',
|
|
884
|
+
Accept: 'audio/mpeg',
|
|
885
|
+
'xi-api-key': apiKey,
|
|
886
|
+
},
|
|
887
|
+
body: JSON.stringify({
|
|
888
|
+
text,
|
|
889
|
+
model_id: ELEVENLABS_MODEL_ID,
|
|
890
|
+
}),
|
|
891
|
+
});
|
|
892
|
+
|
|
893
|
+
if (!response.ok) {
|
|
894
|
+
throw new Error(`ElevenLabs TTS request failed with status ${response.status}.`);
|
|
895
|
+
}
|
|
896
|
+
|
|
897
|
+
return response.blob();
|
|
898
|
+
}
|
|
899
|
+
|
|
900
|
+
async function synthesizeSarvamAudio(text) {
|
|
901
|
+
const apiKey = getSarvamApiKey();
|
|
902
|
+
|
|
903
|
+
if (!apiKey) {
|
|
904
|
+
throw new Error('Sarvam API key is missing.');
|
|
905
|
+
}
|
|
906
|
+
|
|
907
|
+
const selectedSpeaker = voiceSelections.sarvam || SARVAM_VOICE_OPTIONS[0].value;
|
|
908
|
+
const response = await fetch(SARVAM_TTS_ENDPOINT, {
|
|
909
|
+
method: 'POST',
|
|
910
|
+
headers: {
|
|
911
|
+
'Content-Type': 'application/json',
|
|
912
|
+
'api-subscription-key': apiKey,
|
|
913
|
+
},
|
|
914
|
+
body: JSON.stringify({
|
|
915
|
+
text,
|
|
916
|
+
target_language_code: SARVAM_TARGET_LANGUAGE_CODE,
|
|
917
|
+
model: SARVAM_TTS_MODEL,
|
|
918
|
+
speaker: selectedSpeaker,
|
|
919
|
+
output_audio_codec: SARVAM_OUTPUT_AUDIO_CODEC,
|
|
920
|
+
}),
|
|
921
|
+
});
|
|
922
|
+
|
|
923
|
+
const payload = await response.json().catch(() => null);
|
|
924
|
+
|
|
925
|
+
if (!response.ok) {
|
|
926
|
+
throw new Error(`Sarvam TTS request failed with status ${response.status}.`);
|
|
927
|
+
}
|
|
928
|
+
|
|
929
|
+
const audioBase64 = payload?.audios?.[0];
|
|
930
|
+
if (!audioBase64) {
|
|
931
|
+
throw new Error('Sarvam did not return any audio data.');
|
|
932
|
+
}
|
|
933
|
+
|
|
934
|
+
const codec = (SARVAM_OUTPUT_AUDIO_CODEC || '').toLowerCase();
|
|
935
|
+
const mimeType = codec === 'mp3' ? 'audio/mpeg' : 'audio/wav';
|
|
936
|
+
|
|
937
|
+
return base64AudioToBlob(audioBase64, mimeType);
|
|
938
|
+
}
|
|
939
|
+
|
|
940
|
+
async function speakText(text) {
|
|
941
|
+
if (!text || typeof window === 'undefined') {
|
|
942
|
+
return;
|
|
943
|
+
}
|
|
944
|
+
|
|
945
|
+
stopNarration();
|
|
946
|
+
|
|
947
|
+
if (voiceProvider === 'gemini') {
|
|
948
|
+
const geminiAudio = await synthesizeGeminiAudio(text);
|
|
949
|
+
await playAudioBlob(geminiAudio);
|
|
950
|
+
return;
|
|
951
|
+
}
|
|
952
|
+
|
|
953
|
+
if (voiceProvider === 'elevenlabs') {
|
|
954
|
+
const elevenLabsAudio = await synthesizeElevenLabsAudio(text);
|
|
955
|
+
await playAudioBlob(elevenLabsAudio);
|
|
956
|
+
return;
|
|
957
|
+
}
|
|
958
|
+
|
|
959
|
+
if (voiceProvider === 'openai') {
|
|
960
|
+
const openAiAudio = await synthesizeOpenAIAudio(text);
|
|
961
|
+
await playAudioBlob(openAiAudio);
|
|
962
|
+
return;
|
|
963
|
+
}
|
|
964
|
+
|
|
965
|
+
if (voiceProvider === 'sarvam') {
|
|
966
|
+
const sarvamAudio = await synthesizeSarvamAudio(text);
|
|
967
|
+
await playAudioBlob(sarvamAudio);
|
|
968
|
+
return;
|
|
969
|
+
}
|
|
970
|
+
|
|
971
|
+
throw new Error('Browser narration is disabled. Use Gemini, ElevenLabs, or OpenAI.');
|
|
972
|
+
}
|
|
973
|
+
|
|
974
|
+
async function speakCurrentStep() {
|
|
975
|
+
const step = steps[activeStep];
|
|
976
|
+
const guide = buildGuestStepGuide(step, activeStep, steps.length);
|
|
977
|
+
|
|
978
|
+
try {
|
|
979
|
+
await speakText(guide.narration);
|
|
980
|
+
} catch (error) {
|
|
981
|
+
if (!narrationFallbackNoticeRef.current) {
|
|
982
|
+
const providerLabel =
|
|
983
|
+
voiceProvider === 'gemini'
|
|
984
|
+
? 'Gemini'
|
|
985
|
+
: voiceProvider === 'elevenlabs'
|
|
986
|
+
? 'ElevenLabs'
|
|
987
|
+
: voiceProvider === 'openai'
|
|
988
|
+
? 'OpenAI'
|
|
989
|
+
: 'Selected provider';
|
|
990
|
+
message.warning(`${providerLabel} narration failed.`);
|
|
991
|
+
narrationFallbackNoticeRef.current = true;
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
message.error(error?.message || 'Unable to play narration for this step.');
|
|
995
|
+
}
|
|
996
|
+
}
|
|
997
|
+
|
|
998
|
+
async function toggleStepFullscreen() {
|
|
999
|
+
if (typeof document === 'undefined') {
|
|
1000
|
+
return;
|
|
1001
|
+
}
|
|
1002
|
+
|
|
1003
|
+
const targetElement = fullscreenViewportRef.current;
|
|
1004
|
+
|
|
1005
|
+
if (!targetElement || !targetElement.requestFullscreen) {
|
|
1006
|
+
return;
|
|
1007
|
+
}
|
|
1008
|
+
|
|
1009
|
+
try {
|
|
1010
|
+
if (document.fullscreenElement === targetElement) {
|
|
1011
|
+
await document.exitFullscreen();
|
|
1012
|
+
return;
|
|
1013
|
+
}
|
|
1014
|
+
|
|
1015
|
+
if (document.fullscreenElement) {
|
|
1016
|
+
await document.exitFullscreen();
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
await targetElement.requestFullscreen();
|
|
1020
|
+
} catch (error) {
|
|
1021
|
+
console.error('Failed to toggle step fullscreen mode:', error);
|
|
1022
|
+
}
|
|
1023
|
+
}
|
|
233
1024
|
/**
|
|
234
1025
|
* Dynamic Step Renderer
|
|
235
1026
|
* - Resolves and renders step-specific components dynamically.
|
|
@@ -275,88 +1066,238 @@ export default function ProcessStepsPage({ match, CustomComponents = {}, ...prop
|
|
|
275
1066
|
}, [activeStep, steps, externalWin]);
|
|
276
1067
|
|
|
277
1068
|
useEffect(() => {
|
|
278
|
-
|
|
279
|
-
|
|
1069
|
+
if (typeof document === 'undefined') {
|
|
1070
|
+
return undefined;
|
|
1071
|
+
}
|
|
1072
|
+
|
|
1073
|
+
const handleFullscreenChange = () => {
|
|
1074
|
+
setIsStepFullscreen(document.fullscreenElement === fullscreenViewportRef.current);
|
|
280
1075
|
};
|
|
281
1076
|
|
|
282
|
-
document.addEventListener('fullscreenchange',
|
|
283
|
-
|
|
1077
|
+
document.addEventListener('fullscreenchange', handleFullscreenChange);
|
|
1078
|
+
handleFullscreenChange();
|
|
284
1079
|
|
|
285
1080
|
return () => {
|
|
286
|
-
document.removeEventListener('fullscreenchange',
|
|
1081
|
+
document.removeEventListener('fullscreenchange', handleFullscreenChange);
|
|
287
1082
|
};
|
|
288
1083
|
}, []);
|
|
289
1084
|
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
1085
|
+
useEffect(() => {
|
|
1086
|
+
if (!NARRATION_CONTROLS_ENABLED || !autoNarration) {
|
|
1087
|
+
return;
|
|
1088
|
+
}
|
|
293
1089
|
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
await document.exitFullscreen();
|
|
298
|
-
}
|
|
299
|
-
return;
|
|
300
|
-
}
|
|
1090
|
+
if (loading || !steps.length || !steps[activeStep]) {
|
|
1091
|
+
return;
|
|
1092
|
+
}
|
|
301
1093
|
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
1094
|
+
speakCurrentStep();
|
|
1095
|
+
}, [
|
|
1096
|
+
activeStep,
|
|
1097
|
+
steps,
|
|
1098
|
+
loading,
|
|
1099
|
+
autoNarration,
|
|
1100
|
+
voiceProvider,
|
|
1101
|
+
voiceSelections.browser,
|
|
1102
|
+
voiceSelections.elevenlabs,
|
|
1103
|
+
voiceSelections.openai,
|
|
1104
|
+
voiceSelections.sarvam,
|
|
1105
|
+
]);
|
|
1106
|
+
|
|
1107
|
+
useEffect(() => {
|
|
1108
|
+
const session = realtimeSessionRef.current;
|
|
1109
|
+
if (!session || session.status !== 'connected') {
|
|
1110
|
+
return;
|
|
307
1111
|
}
|
|
308
|
-
|
|
1112
|
+
|
|
1113
|
+
session.sendEvent({
|
|
1114
|
+
type: 'session.update',
|
|
1115
|
+
session: {
|
|
1116
|
+
instructions: buildRealtimeInstructions(),
|
|
1117
|
+
},
|
|
1118
|
+
});
|
|
1119
|
+
}, [activeStep, steps]);
|
|
1120
|
+
|
|
1121
|
+
useEffect(() => {
|
|
1122
|
+
return () => {
|
|
1123
|
+
stopNarration();
|
|
1124
|
+
stopRealtimeConversation();
|
|
1125
|
+
};
|
|
1126
|
+
}, []);
|
|
309
1127
|
|
|
310
1128
|
/**
|
|
311
|
-
* Renders the main process UI including
|
|
1129
|
+
* Renders the main process UI including breadcrumb, step details,
|
|
312
1130
|
* and action buttons. This content is reused in both normal view
|
|
313
1131
|
* and external window view.
|
|
314
1132
|
*/
|
|
315
|
-
const renderContent = (
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
<
|
|
334
|
-
{/*
|
|
335
|
-
<
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
1133
|
+
const renderContent = () => {
|
|
1134
|
+
const currentStep = steps[activeStep];
|
|
1135
|
+
const isFinalStep = currentStep?.order_seqtype === 'E';
|
|
1136
|
+
const currentVoiceOptions =
|
|
1137
|
+
voiceProvider === 'gemini'
|
|
1138
|
+
? GEMINI_VOICE_OPTIONS
|
|
1139
|
+
: voiceProvider === 'elevenlabs'
|
|
1140
|
+
? ELEVENLABS_VOICE_OPTIONS
|
|
1141
|
+
: voiceProvider === 'openai'
|
|
1142
|
+
? OPENAI_TTS_VOICE_OPTIONS
|
|
1143
|
+
: SARVAM_VOICE_OPTIONS;
|
|
1144
|
+
const currentVoiceValue = voiceSelections[voiceProvider] || undefined;
|
|
1145
|
+
const openAiTokenEndpoint = process.env.OPENAI_REALTIME_TOKEN_ENDPOINT || process.env.REACT_APP_OPENAI_REALTIME_TOKEN_ENDPOINT;
|
|
1146
|
+
const canStartRealtime = hasOpenAIRealtimeCredentials(openAiTokenEndpoint);
|
|
1147
|
+
|
|
1148
|
+
return (
|
|
1149
|
+
<div className="process-steps-page">
|
|
1150
|
+
<div ref={fullscreenViewportRef} className="steps-viewport">
|
|
1151
|
+
<Card className="steps-main-card">
|
|
1152
|
+
{/* {activeStep > 0 && GuestInfoComponent && (
|
|
1153
|
+
<div className="steps-patient-bar">
|
|
1154
|
+
<GuestInfoComponent params={urlParams} />
|
|
1155
|
+
</div>
|
|
1156
|
+
)} */}
|
|
1157
|
+
|
|
1158
|
+
<div className="steps-top-bar">
|
|
1159
|
+
<div className="steps-breadcrumb-strip">
|
|
1160
|
+
{steps.length ? (
|
|
1161
|
+
steps.map((stepItem, stepIndex) => {
|
|
1162
|
+
const isActiveBreadcrumb = stepIndex === activeStep;
|
|
1163
|
+
const isCompletedBreadcrumb = stepIndex < activeStep;
|
|
1164
|
+
|
|
1165
|
+
return (
|
|
1166
|
+
<button
|
|
1167
|
+
key={stepItem.step_id || `${stepItem.step_name || 'step'}_${stepIndex}`}
|
|
1168
|
+
type="button"
|
|
1169
|
+
className={`steps-breadcrumb-item${isActiveBreadcrumb ? ' active' : ''}${isCompletedBreadcrumb ? ' completed' : ''}`}
|
|
1170
|
+
onClick={() => handleTimelineClick(stepIndex)}
|
|
1171
|
+
>
|
|
1172
|
+
<span className="steps-breadcrumb-index">{stepIndex + 1}</span>
|
|
1173
|
+
<span className="steps-breadcrumb-label">{stepItem.step_name || `Step ${stepIndex + 1}`}</span>
|
|
1174
|
+
</button>
|
|
1175
|
+
);
|
|
1176
|
+
})
|
|
1177
|
+
) : (
|
|
1178
|
+
<span className="steps-breadcrumb-empty">No steps loaded</span>
|
|
1179
|
+
)}
|
|
1180
|
+
</div>
|
|
1181
|
+
|
|
1182
|
+
<div className="steps-nav-actions">
|
|
1183
|
+
<Button icon={isStepFullscreen ? <CompressOutlined /> : <ExpandOutlined />} onClick={toggleStepFullscreen}>
|
|
1184
|
+
{isStepFullscreen ? 'Exit Full Screen' : 'Full Screen'}
|
|
1185
|
+
</Button>
|
|
1186
|
+
|
|
1187
|
+
{activeStep > 0 && (
|
|
1188
|
+
<Button icon={<ArrowLeftOutlined />} onClick={handlePrevious}>
|
|
1189
|
+
Back
|
|
1190
|
+
</Button>
|
|
1191
|
+
)}
|
|
1192
|
+
|
|
1193
|
+
{/* {activeStep > 0 && !isFinalStep && (
|
|
1194
|
+
<Button type="default" onClick={handleSkip}>
|
|
1195
|
+
Skip
|
|
1196
|
+
</Button>
|
|
1197
|
+
)} */}
|
|
1198
|
+
|
|
1199
|
+
{isFinalStep ? (
|
|
1200
|
+
<>
|
|
1201
|
+
{!showNextProcessAction && (
|
|
1202
|
+
<Button
|
|
1203
|
+
type="primary"
|
|
1204
|
+
onClick={async () => {
|
|
1205
|
+
const success = await handleFinish();
|
|
1206
|
+
if (success && nextProcessId?.next_process_id) {
|
|
1207
|
+
setShowNextProcessAction(true);
|
|
1208
|
+
}
|
|
1209
|
+
}}
|
|
1210
|
+
>
|
|
1211
|
+
Finish
|
|
1212
|
+
</Button>
|
|
1213
|
+
)}
|
|
1214
|
+
{showNextProcessAction && nextProcessId?.next_process_id && (
|
|
1215
|
+
<Button type="primary" onClick={handleStartNextProcess}>
|
|
1216
|
+
Start {nextProcessId.next_process_name} <ArrowRightOutlined />
|
|
1217
|
+
</Button>
|
|
1218
|
+
)}
|
|
1219
|
+
</>
|
|
1220
|
+
) : (
|
|
1221
|
+
<Button type="primary" disabled={!isStepCompleted} onClick={handleNext}>
|
|
1222
|
+
{activeStep === 0 ? 'Start Consultation' : 'Next'} <ArrowRightOutlined />
|
|
1223
|
+
</Button>
|
|
1224
|
+
)}
|
|
1225
|
+
</div>
|
|
1226
|
+
</div>
|
|
1227
|
+
|
|
1228
|
+
<div className={`steps-content-panel${isStepFullscreen ? ' is-fullscreen' : ''}`}>
|
|
1229
|
+
<div className="steps-stage-body">
|
|
1230
|
+
<div
|
|
1231
|
+
key={`${currentProcessId}_${activeStep}`}
|
|
1232
|
+
className={`steps-chat-step-card ${stepSlideDirection === 'backward' ? 'slide-backward' : 'slide-forward'}`}
|
|
1233
|
+
>
|
|
1234
|
+
{/* <div className="steps-chat-step-top">
|
|
1235
|
+
<span className="steps-index-pill">
|
|
1236
|
+
Step {Math.min(activeStep + 1, steps.length || 1)} of {steps.length || 1}
|
|
1237
|
+
</span>
|
|
1238
|
+
<h2 className="steps-title">{currentStep?.step_name || 'No step selected'}</h2>
|
|
1239
|
+
{currentStep?.step_description ? <p className="steps-description">{currentStep.step_description}</p> : null}
|
|
1240
|
+
</div> */}
|
|
1241
|
+
|
|
1242
|
+
<div className="steps-chat-step-component">
|
|
1243
|
+
{loading ? (
|
|
1244
|
+
<div className="steps-chat-loading">
|
|
1245
|
+
<Spin />
|
|
1246
|
+
</div>
|
|
1247
|
+
) : null}
|
|
1248
|
+
{!loading ? <DynamicComponent /> : null}
|
|
1249
|
+
</div>
|
|
1250
|
+
</div>
|
|
1251
|
+
</div>
|
|
1252
|
+
</div>
|
|
1253
|
+
|
|
1254
|
+
{NARRATION_CONTROLS_ENABLED ? (
|
|
1255
|
+
<div className="steps-bottom-nav steps-narration-bar">
|
|
1256
|
+
<Select
|
|
1257
|
+
className="steps-voice-provider-select"
|
|
1258
|
+
value={voiceProvider}
|
|
1259
|
+
options={VOICE_PROVIDER_OPTIONS}
|
|
1260
|
+
onChange={(value) => setVoiceProvider(value)}
|
|
1261
|
+
/>
|
|
1262
|
+
<Select
|
|
1263
|
+
className="steps-voice-select"
|
|
1264
|
+
value={currentVoiceValue}
|
|
1265
|
+
options={currentVoiceOptions}
|
|
1266
|
+
onChange={(value) =>
|
|
1267
|
+
setVoiceSelections((oldSelections) => ({
|
|
1268
|
+
...oldSelections,
|
|
1269
|
+
[voiceProvider]: value,
|
|
1270
|
+
}))
|
|
1271
|
+
}
|
|
1272
|
+
placeholder="Select Voice"
|
|
1273
|
+
optionFilterProp="label"
|
|
1274
|
+
showSearch
|
|
1275
|
+
disabled={!currentVoiceOptions.length}
|
|
1276
|
+
/>
|
|
1277
|
+
<Button icon={<SoundOutlined />} onClick={speakCurrentStep} disabled={!currentStep}>
|
|
1278
|
+
Read Step
|
|
1279
|
+
</Button>
|
|
1280
|
+
<Button
|
|
1281
|
+
type={realtimeStatus === 'connected' ? 'default' : 'primary'}
|
|
1282
|
+
disabled={!canStartRealtime}
|
|
1283
|
+
onClick={() => {
|
|
1284
|
+
if (realtimeStatus === 'connected' || realtimeStatus === 'connecting') {
|
|
1285
|
+
stopRealtimeConversation();
|
|
1286
|
+
return;
|
|
1287
|
+
}
|
|
1288
|
+
startRealtimeConversation();
|
|
1289
|
+
}}
|
|
1290
|
+
>
|
|
1291
|
+
{realtimeStatus === 'connected' ? 'Stop Conversation' : realtimeStatus === 'connecting' ? 'Connecting...' : 'Start Conversation'}
|
|
1292
|
+
</Button>
|
|
1293
|
+
<Button onClick={() => setAutoNarration((oldValue) => !oldValue)}>Auto Narration: {autoNarration ? 'On' : 'Off'}</Button>
|
|
1294
|
+
</div>
|
|
1295
|
+
) : null}
|
|
1296
|
+
</Card>
|
|
1297
|
+
</div>
|
|
1298
|
+
</div>
|
|
1299
|
+
);
|
|
1300
|
+
};
|
|
360
1301
|
/**
|
|
361
1302
|
* Renders content in both the main window and an external window
|
|
362
1303
|
* when external window mode is enabled.
|