@designcrowd/fe-shared-lib 1.6.9 → 1.6.10-voiceText
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +2 -0
- package/package.json +1 -1
- package/src/atoms/components/Icon/Icon.vue +2 -0
- package/src/atoms/components/Icon/icons/microphone.vue +5 -0
- package/src/atoms/components/Modal/Modal.vue +0 -9
- package/src/atoms/components/VoiceToTextButton/VoiceToTextButton.stories.js +107 -0
- package/src/atoms/components/VoiceToTextButton/VoiceToTextButton.vue +84 -0
- package/src/useVoiceToText.js +174 -0
package/index.js
CHANGED
|
@@ -22,6 +22,7 @@ export { default as WebsiteContextualUpgradeModal } from './src/experiences/comp
|
|
|
22
22
|
export { WEBSITE_UPGRADE_CONTEXT_TYPES } from './src/experiences/models/websiteContextualModel';
|
|
23
23
|
|
|
24
24
|
export { setSharedLibLocaleAsync, tr } from './src/useSharedLibTranslate';
|
|
25
|
+
export { useVoiceToText } from './src/useVoiceToText';
|
|
25
26
|
|
|
26
27
|
export { default as Button } from './src/atoms/components/Button/Button.vue';
|
|
27
28
|
export { default as ButtonGroup } from './src/atoms/components/ButtonGroup/ButtonGroup.vue';
|
|
@@ -59,4 +60,5 @@ export { default as Select } from './src/atoms/components/Select/Select.vue';
|
|
|
59
60
|
export { default as NumberStepper } from './src/atoms/components/NumberStepper/NumberStepper.vue';
|
|
60
61
|
export { default as CopyToClipboardText } from './src/atoms/components/CopyToClipboardText/CopyToClipboardText.vue';
|
|
61
62
|
export { default as SparkleIcon } from './src/atoms/components/SparkleIcon/SparkleIcon.vue';
|
|
63
|
+
export { default as VoiceToTextButton } from './src/atoms/components/VoiceToTextButton/VoiceToTextButton.vue';
|
|
62
64
|
export { default as SideNavigationPanel } from './src/experiences/components/SideNavigationPanel/SideNavigationPanel.vue';
|
package/package.json
CHANGED
|
@@ -237,6 +237,7 @@ import IconLinkInBioFilled from './icons/link-in-bio-filled.vue';
|
|
|
237
237
|
import IconMedia from './icons/media.vue';
|
|
238
238
|
import IconMinusCircleLight from './icons/minus-circle-light.vue';
|
|
239
239
|
import IconMinus from './icons/minus.vue';
|
|
240
|
+
import IconMicrophone from './icons/microphone.vue';
|
|
240
241
|
import IconMobile from './icons/mobile.vue';
|
|
241
242
|
import IconOther from './icons/other.vue';
|
|
242
243
|
import IconPageButtons from './icons/page-buttons.vue';
|
|
@@ -636,6 +637,7 @@ export default {
|
|
|
636
637
|
IconContactMessage,
|
|
637
638
|
IconMinus,
|
|
638
639
|
IconMinusCircleLight,
|
|
640
|
+
IconMicrophone,
|
|
639
641
|
IconMobile,
|
|
640
642
|
IconMug,
|
|
641
643
|
IconOther,
|
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
<template>
|
|
2
2
|
<div
|
|
3
3
|
v-show="visible"
|
|
4
|
-
role="dialog"
|
|
5
|
-
aria-modal="true"
|
|
6
4
|
:aria-hidden="visible ? 'false' : 'true'"
|
|
7
|
-
:aria-label="ariaLabel"
|
|
8
5
|
class="tw-bg-black tw-flex tw-items-center tw-justify-center tw-fixed tw-w-full tw-h-full tw-top-0 tw-left-0 tw-z-50"
|
|
9
6
|
:class="{
|
|
10
7
|
'tw-px-4': fullScreenBreakpoint === undefined && !isImageMode,
|
|
@@ -50,7 +47,6 @@
|
|
|
50
47
|
v-if="!mandatory"
|
|
51
48
|
class="tw-absolute tw-right-0 tw-top-0 tw-border-none tw-text-secondary-500 tw-appearance-none tw-bg-transparent tw-p-0 tw-w-8 tw-h-8 tw-ml-auto tw-cursor-pointer"
|
|
52
49
|
data-test-modal-close-btn
|
|
53
|
-
aria-label="Close"
|
|
54
50
|
@click="close($event)"
|
|
55
51
|
>
|
|
56
52
|
<span class="tw-sr-only">Close modal</span>
|
|
@@ -165,11 +161,6 @@ export default {
|
|
|
165
161
|
required: false,
|
|
166
162
|
default: false,
|
|
167
163
|
},
|
|
168
|
-
ariaLabel: {
|
|
169
|
-
type: String,
|
|
170
|
-
required: false,
|
|
171
|
-
default: undefined,
|
|
172
|
-
},
|
|
173
164
|
},
|
|
174
165
|
computed: {
|
|
175
166
|
isImageMode() {
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import VoiceToTextButton from './VoiceToTextButton.vue';
|
|
2
|
+
import TextInput from '../TextInput/TextInput.vue';
|
|
3
|
+
|
|
4
|
+
export default {
|
|
5
|
+
title: 'Components/VoiceToTextButton',
|
|
6
|
+
component: VoiceToTextButton,
|
|
7
|
+
};
|
|
8
|
+
|
|
9
|
+
export const Default = () => ({
|
|
10
|
+
components: { VoiceToTextButton },
|
|
11
|
+
template: '<VoiceToTextButton />',
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
export const WithTranscriptDisplay = () => ({
|
|
15
|
+
components: { VoiceToTextButton, TextInput },
|
|
16
|
+
data() {
|
|
17
|
+
return {
|
|
18
|
+
transcript: '',
|
|
19
|
+
isListening: false,
|
|
20
|
+
error: null,
|
|
21
|
+
};
|
|
22
|
+
},
|
|
23
|
+
template: `
|
|
24
|
+
<div class="tw-flex tw-flex-col tw-gap-4 tw-max-w-md">
|
|
25
|
+
<div class="tw-flex tw-gap-2 tw-items-end">
|
|
26
|
+
<TextInput
|
|
27
|
+
v-model="transcript"
|
|
28
|
+
label="Search"
|
|
29
|
+
placeholder="Click the mic and speak..."
|
|
30
|
+
class="tw-flex-1"
|
|
31
|
+
/>
|
|
32
|
+
<VoiceToTextButton
|
|
33
|
+
@on-transcript="transcript = $event"
|
|
34
|
+
@on-interim-transcript="transcript = $event"
|
|
35
|
+
@on-start="isListening = true; error = null"
|
|
36
|
+
@on-stop="isListening = false"
|
|
37
|
+
@on-error="error = $event"
|
|
38
|
+
/>
|
|
39
|
+
</div>
|
|
40
|
+
<div class="tw-text-sm tw-text-grayscale-600">
|
|
41
|
+
<p v-if="isListening" class="tw-text-primary-500 tw-font-medium">
|
|
42
|
+
Listening... speak now
|
|
43
|
+
</p>
|
|
44
|
+
<p v-else-if="error" class="tw-text-error-500">
|
|
45
|
+
{{ error }}
|
|
46
|
+
</p>
|
|
47
|
+
<p v-else>
|
|
48
|
+
Click the microphone button to start voice input
|
|
49
|
+
</p>
|
|
50
|
+
</div>
|
|
51
|
+
</div>
|
|
52
|
+
`,
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
WithTranscriptDisplay.story = {
|
|
56
|
+
name: 'Interactive Demo',
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
export const Disabled = () => ({
|
|
60
|
+
components: { VoiceToTextButton },
|
|
61
|
+
template: `
|
|
62
|
+
<div class="tw-flex tw-gap-4 tw-items-center">
|
|
63
|
+
<div class="tw-text-center">
|
|
64
|
+
<VoiceToTextButton />
|
|
65
|
+
<p class="tw-text-xs tw-mt-2 tw-text-grayscale-600">Enabled</p>
|
|
66
|
+
</div>
|
|
67
|
+
<div class="tw-text-center">
|
|
68
|
+
<VoiceToTextButton :disabled="true" />
|
|
69
|
+
<p class="tw-text-xs tw-mt-2 tw-text-grayscale-600">Disabled</p>
|
|
70
|
+
</div>
|
|
71
|
+
</div>
|
|
72
|
+
`,
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
Disabled.story = {
|
|
76
|
+
name: 'Disabled State',
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
export const LanguageVariants = () => ({
|
|
80
|
+
components: { VoiceToTextButton },
|
|
81
|
+
data() {
|
|
82
|
+
return {
|
|
83
|
+
languages: [
|
|
84
|
+
{ code: 'en-US', name: 'English (US)' },
|
|
85
|
+
{ code: 'de-DE', name: 'German' },
|
|
86
|
+
{ code: 'fr-FR', name: 'French' },
|
|
87
|
+
{ code: 'es-ES', name: 'Spanish' },
|
|
88
|
+
],
|
|
89
|
+
};
|
|
90
|
+
},
|
|
91
|
+
template: `
|
|
92
|
+
<div class="tw-flex tw-flex-wrap tw-gap-4">
|
|
93
|
+
<div
|
|
94
|
+
v-for="lang in languages"
|
|
95
|
+
:key="lang.code"
|
|
96
|
+
class="tw-text-center tw-p-2 tw-border tw-border-grayscale-300 tw-rounded"
|
|
97
|
+
>
|
|
98
|
+
<VoiceToTextButton :lang="lang.code" />
|
|
99
|
+
<p class="tw-text-xs tw-mt-2">{{ lang.name }}</p>
|
|
100
|
+
</div>
|
|
101
|
+
</div>
|
|
102
|
+
`,
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
LanguageVariants.story = {
|
|
106
|
+
name: 'Language Variants',
|
|
107
|
+
};
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
<template>
|
|
2
|
+
<Button
|
|
3
|
+
v-if="isSupported"
|
|
4
|
+
icon="microphone"
|
|
5
|
+
:disabled="disabled"
|
|
6
|
+
variant="outline"
|
|
7
|
+
:class="{ 'voice-listening': isListening }"
|
|
8
|
+
:aria-label="isListening ? 'Stop voice input' : 'Start voice input'"
|
|
9
|
+
data-test="voice-to-text-button"
|
|
10
|
+
@on-click="toggle"
|
|
11
|
+
/>
|
|
12
|
+
</template>
|
|
13
|
+
|
|
14
|
+
<script setup>
|
|
15
|
+
import { watch, toRef } from 'vue';
|
|
16
|
+
import Button from '../Button/Button.vue';
|
|
17
|
+
import { useVoiceToText } from '../../../useVoiceToText';
|
|
18
|
+
|
|
19
|
+
const props = defineProps({
|
|
20
|
+
lang: {
|
|
21
|
+
type: String,
|
|
22
|
+
default: 'en-US',
|
|
23
|
+
},
|
|
24
|
+
disabled: {
|
|
25
|
+
type: Boolean,
|
|
26
|
+
default: false,
|
|
27
|
+
},
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
const emit = defineEmits(['on-transcript', 'on-interim-transcript', 'on-start', 'on-stop', 'on-error']);
|
|
31
|
+
|
|
32
|
+
const { isSupported, isListening, transcript, isFinal, error, toggle, setLang } = useVoiceToText({
|
|
33
|
+
lang: props.lang,
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
// Keep recognition language in sync with prop
|
|
37
|
+
watch(toRef(props, 'lang'), setLang);
|
|
38
|
+
|
|
39
|
+
// Watch for transcript changes and emit appropriate events
|
|
40
|
+
watch(
|
|
41
|
+
[transcript, isFinal],
|
|
42
|
+
([newTranscript, newIsFinal]) => {
|
|
43
|
+
if (newIsFinal && newTranscript) {
|
|
44
|
+
emit('on-transcript', newTranscript);
|
|
45
|
+
} else if (newTranscript) {
|
|
46
|
+
emit('on-interim-transcript', newTranscript);
|
|
47
|
+
}
|
|
48
|
+
},
|
|
49
|
+
{ flush: 'sync' },
|
|
50
|
+
);
|
|
51
|
+
|
|
52
|
+
// Watch for listening state changes
|
|
53
|
+
watch(isListening, (newVal, oldVal) => {
|
|
54
|
+
if (newVal && !oldVal) {
|
|
55
|
+
emit('on-start');
|
|
56
|
+
}
|
|
57
|
+
if (!newVal && oldVal) {
|
|
58
|
+
emit('on-stop');
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
// Watch for errors
|
|
63
|
+
watch(error, (newError) => {
|
|
64
|
+
if (newError) {
|
|
65
|
+
emit('on-error', newError);
|
|
66
|
+
}
|
|
67
|
+
});
|
|
68
|
+
</script>
|
|
69
|
+
|
|
70
|
+
<style scoped>
|
|
71
|
+
.voice-listening {
|
|
72
|
+
animation: voice-pulse 1.5s ease-in-out infinite;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
@keyframes voice-pulse {
|
|
76
|
+
0%,
|
|
77
|
+
100% {
|
|
78
|
+
box-shadow: 0 0 0 0 var(--color-primary, rgba(242, 27, 63, 0.4));
|
|
79
|
+
}
|
|
80
|
+
50% {
|
|
81
|
+
box-shadow: 0 0 0 8px transparent;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
</style>
|
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
import { ref, computed, readonly } from 'vue';
|
|
2
|
+
|
|
3
|
+
// Singleton instance and state (lazily initialized)
|
|
4
|
+
let recognition = null;
|
|
5
|
+
let isInitialized = false;
|
|
6
|
+
let errorClearTimeout = null;
|
|
7
|
+
let state = null;
|
|
8
|
+
|
|
9
|
+
// Error message mapping per spec
|
|
10
|
+
const ERROR_MESSAGES = {
|
|
11
|
+
'not-allowed': 'Microphone permission was denied. Please allow access.',
|
|
12
|
+
'language-not-supported': 'This language is not supported.',
|
|
13
|
+
network: 'A network error occurred. Please check your connection.',
|
|
14
|
+
'audio-capture': 'No microphone was found or microphone is not working.',
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
const ERROR_CLEAR_DELAY = 5000;
|
|
18
|
+
|
|
19
|
+
function getState() {
|
|
20
|
+
if (!state) {
|
|
21
|
+
state = {
|
|
22
|
+
isListening: ref(false),
|
|
23
|
+
transcript: ref(''),
|
|
24
|
+
isFinal: ref(false),
|
|
25
|
+
error: ref(null),
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
return state;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Singleton composable that wraps the Web Speech API (SpeechRecognition).
|
|
33
|
+
* All calls to useVoiceToText() return the same shared instance.
|
|
34
|
+
*
|
|
35
|
+
* @param {Object} options
|
|
36
|
+
* @param {string} options.lang - BCP 47 language tag (default: 'en-US')
|
|
37
|
+
* @returns {Object} Voice-to-text state and controls
|
|
38
|
+
*/
|
|
39
|
+
export function useVoiceToText(options = {}) {
|
|
40
|
+
const { lang = 'en-US' } = options;
|
|
41
|
+
|
|
42
|
+
// Get or create shared state
|
|
43
|
+
const { isListening, transcript, isFinal, error } = getState();
|
|
44
|
+
|
|
45
|
+
// Check for browser support
|
|
46
|
+
const SpeechRecognition =
|
|
47
|
+
typeof window !== 'undefined' ? window.SpeechRecognition || window.webkitSpeechRecognition : null;
|
|
48
|
+
|
|
49
|
+
const isSupported = computed(() => !!SpeechRecognition);
|
|
50
|
+
|
|
51
|
+
// Initialize singleton once
|
|
52
|
+
if (!isInitialized && SpeechRecognition) {
|
|
53
|
+
recognition = new SpeechRecognition();
|
|
54
|
+
recognition.continuous = true;
|
|
55
|
+
recognition.interimResults = true;
|
|
56
|
+
|
|
57
|
+
recognition.onresult = (event) => {
|
|
58
|
+
let interimTranscript = '';
|
|
59
|
+
let finalTranscript = '';
|
|
60
|
+
|
|
61
|
+
for (let i = event.resultIndex; i < event.results.length; i += 1) {
|
|
62
|
+
const result = event.results[i];
|
|
63
|
+
if (result.isFinal) {
|
|
64
|
+
finalTranscript += result[0].transcript;
|
|
65
|
+
} else {
|
|
66
|
+
interimTranscript += result[0].transcript;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
if (finalTranscript) {
|
|
71
|
+
transcript.value = finalTranscript;
|
|
72
|
+
isFinal.value = true;
|
|
73
|
+
} else {
|
|
74
|
+
transcript.value = interimTranscript;
|
|
75
|
+
isFinal.value = false;
|
|
76
|
+
}
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
recognition.onerror = (event) => {
|
|
80
|
+
// Suppress no-speech and aborted errors per spec
|
|
81
|
+
if (event.error === 'no-speech' || event.error === 'aborted') {
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
const message = ERROR_MESSAGES[event.error] || 'An error occurred with speech recognition.';
|
|
86
|
+
error.value = message;
|
|
87
|
+
|
|
88
|
+
// eslint-disable-next-line no-console
|
|
89
|
+
console.warn('[useVoiceToText]', event.error, message);
|
|
90
|
+
|
|
91
|
+
// Auto-clear error after timeout
|
|
92
|
+
if (errorClearTimeout) {
|
|
93
|
+
clearTimeout(errorClearTimeout);
|
|
94
|
+
}
|
|
95
|
+
errorClearTimeout = setTimeout(() => {
|
|
96
|
+
error.value = null;
|
|
97
|
+
}, ERROR_CLEAR_DELAY);
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
recognition.onend = () => {
|
|
101
|
+
isListening.value = false;
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
isInitialized = true;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// Update language on existing instance
|
|
108
|
+
if (recognition) {
|
|
109
|
+
recognition.lang = lang;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
const start = () => {
|
|
113
|
+
if (!recognition || isListening.value) return;
|
|
114
|
+
|
|
115
|
+
// Clear previous state
|
|
116
|
+
transcript.value = '';
|
|
117
|
+
isFinal.value = false;
|
|
118
|
+
error.value = null;
|
|
119
|
+
if (errorClearTimeout) {
|
|
120
|
+
clearTimeout(errorClearTimeout);
|
|
121
|
+
errorClearTimeout = null;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
try {
|
|
125
|
+
recognition.start();
|
|
126
|
+
isListening.value = true;
|
|
127
|
+
} catch (e) {
|
|
128
|
+
// Handle case where recognition is already started
|
|
129
|
+
// eslint-disable-next-line no-console
|
|
130
|
+
console.warn('[useVoiceToText] Failed to start:', e.message);
|
|
131
|
+
}
|
|
132
|
+
};
|
|
133
|
+
|
|
134
|
+
const stop = () => {
|
|
135
|
+
if (!recognition || !isListening.value) return;
|
|
136
|
+
|
|
137
|
+
try {
|
|
138
|
+
recognition.stop();
|
|
139
|
+
} catch (e) {
|
|
140
|
+
// Handle case where recognition is already stopped
|
|
141
|
+
// eslint-disable-next-line no-console
|
|
142
|
+
console.warn('[useVoiceToText] Failed to stop:', e.message);
|
|
143
|
+
}
|
|
144
|
+
};
|
|
145
|
+
|
|
146
|
+
const toggle = () => {
|
|
147
|
+
if (isListening.value) {
|
|
148
|
+
stop();
|
|
149
|
+
} else {
|
|
150
|
+
start();
|
|
151
|
+
}
|
|
152
|
+
};
|
|
153
|
+
|
|
154
|
+
const setLang = (newLang) => {
|
|
155
|
+
if (recognition) {
|
|
156
|
+
recognition.lang = newLang;
|
|
157
|
+
}
|
|
158
|
+
};
|
|
159
|
+
|
|
160
|
+
return {
|
|
161
|
+
// State (reactive, read-only)
|
|
162
|
+
isSupported,
|
|
163
|
+
isListening: readonly(isListening),
|
|
164
|
+
transcript: readonly(transcript),
|
|
165
|
+
isFinal: readonly(isFinal),
|
|
166
|
+
error: readonly(error),
|
|
167
|
+
|
|
168
|
+
// Actions
|
|
169
|
+
start,
|
|
170
|
+
stop,
|
|
171
|
+
toggle,
|
|
172
|
+
setLang,
|
|
173
|
+
};
|
|
174
|
+
}
|