@modochats/widget 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/models/conversation.js +1 -1
- package/dist/src/types/app.js +1 -0
- package/package.json +1 -1
- package/.vscode/settings.json +0 -3
- package/.yarn/install-state.gz +0 -0
- package/cdn-dist/README.md +0 -42
- package/cdn-dist/modo-web-component.js +0 -1344
- package/cdn-dist/modo-web-component.min.js +0 -1
- package/cdn-dist/package.json +0 -27
- package/dist/types/src/app.d.ts +0 -30
- package/dist/types/src/app.d.ts.map +0 -1
- package/dist/types/src/constants/index.d.ts +0 -10
- package/dist/types/src/constants/index.d.ts.map +0 -1
- package/dist/types/src/constants/regex.d.ts +0 -3
- package/dist/types/src/constants/regex.d.ts.map +0 -1
- package/dist/types/src/index.d.ts +0 -10
- package/dist/types/src/index.d.ts.map +0 -1
- package/dist/types/src/models/chatbot.d.ts +0 -24
- package/dist/types/src/models/chatbot.d.ts.map +0 -1
- package/dist/types/src/models/conversation.d.ts +0 -23
- package/dist/types/src/models/conversation.d.ts.map +0 -1
- package/dist/types/src/models/customer-data.d.ts +0 -32
- package/dist/types/src/models/customer-data.d.ts.map +0 -1
- package/dist/types/src/models/message-utils.d.ts +0 -13
- package/dist/types/src/models/message-utils.d.ts.map +0 -1
- package/dist/types/src/services/chat/conversation.d.ts +0 -23
- package/dist/types/src/services/chat/conversation.d.ts.map +0 -1
- package/dist/types/src/services/chat/message-utils.d.ts +0 -13
- package/dist/types/src/services/chat/message-utils.d.ts.map +0 -1
- package/dist/types/src/services/chat/model.d.ts +0 -28
- package/dist/types/src/services/chat/model.d.ts.map +0 -1
- package/dist/types/src/services/chatbot/chatbot.d.ts +0 -24
- package/dist/types/src/services/chatbot/chatbot.d.ts.map +0 -1
- package/dist/types/src/services/checker.d.ts +0 -4
- package/dist/types/src/services/checker.d.ts.map +0 -1
- package/dist/types/src/services/listeners/adders.d.ts +0 -4
- package/dist/types/src/services/listeners/adders.d.ts.map +0 -1
- package/dist/types/src/services/listeners/fn.d.ts +0 -4
- package/dist/types/src/services/listeners/fn.d.ts.map +0 -1
- package/dist/types/src/services/socket/utils.d.ts +0 -3
- package/dist/types/src/services/socket/utils.d.ts.map +0 -1
- package/dist/types/src/services/ui/fn.d.ts +0 -14
- package/dist/types/src/services/ui/fn.d.ts.map +0 -1
- package/dist/types/src/services/ui/html.d.ts +0 -4
- package/dist/types/src/services/ui/html.d.ts.map +0 -1
- package/dist/types/src/services/user/customer-data.d.ts +0 -32
- package/dist/types/src/services/user/customer-data.d.ts.map +0 -1
- package/dist/types/src/services/voice-chat/model.d.ts +0 -13
- package/dist/types/src/services/voice-chat/model.d.ts.map +0 -1
- package/dist/types/src/services/voice-chat/utils.d.ts +0 -10
- package/dist/types/src/services/voice-chat/utils.d.ts.map +0 -1
- package/dist/types/src/tools/fetch.d.ts +0 -3
- package/dist/types/src/tools/fetch.d.ts.map +0 -1
- package/dist/types/src/types/app.d.ts +0 -18
- package/dist/types/src/types/app.d.ts.map +0 -1
- package/dist/types/src/types/conversation.d.ts +0 -15
- package/dist/types/src/types/conversation.d.ts.map +0 -1
- package/dist/types/src/types/socket.d.ts +0 -7
- package/dist/types/src/types/socket.d.ts.map +0 -1
- package/dist/types/src/types/window.d.ts +0 -10
- package/dist/types/src/types/window.d.ts.map +0 -1
- package/dist/types/src/utils/audio.d.ts +0 -4
- package/dist/types/src/utils/audio.d.ts.map +0 -1
- package/dist/types/src/utils/browser.d.ts +0 -3
- package/dist/types/src/utils/browser.d.ts.map +0 -1
- package/dist/types/src/utils/fetch.d.ts +0 -19
- package/dist/types/src/utils/fetch.d.ts.map +0 -1
- package/dist/types/src/utils/uuid.d.ts +0 -7
- package/dist/types/src/utils/uuid.d.ts.map +0 -1
- package/rollup.config.js +0 -18
- package/rollup.dev.config.js +0 -22
- package/scripts/create-umd-bundle.js +0 -213
- package/scripts/terser-minify.js +0 -112
- package/src/app.ts +0 -117
- package/src/constants/index.ts +0 -21
- package/src/constants/regex.ts +0 -2
- package/src/index.ts +0 -16
- package/src/services/chat/conversation.ts +0 -135
- package/src/services/chat/message-utils.ts +0 -221
- package/src/services/chat/model.ts +0 -139
- package/src/services/chatbot/chatbot.ts +0 -66
- package/src/services/checker.ts +0 -10
- package/src/services/listeners/adders.ts +0 -178
- package/src/services/listeners/fn.ts +0 -77
- package/src/services/socket/utils.ts +0 -9
- package/src/services/ui/fn.ts +0 -254
- package/src/services/ui/html.ts +0 -192
- package/src/services/user/customer-data.ts +0 -78
- package/src/services/voice-chat/model.ts +0 -79
- package/src/services/voice-chat/utils.ts +0 -137
- package/src/tools/fetch.ts +0 -7
- package/src/types/app.ts +0 -17
- package/src/types/conversation.ts +0 -14
- package/src/types/socket.ts +0 -7
- package/src/types/window.ts +0 -12
- package/src/utils/audio.ts +0 -67
- package/src/utils/browser.ts +0 -4
- package/src/utils/fetch.ts +0 -98
- package/src/utils/uuid.ts +0 -13
- package/temp/audio/new-message.mp3 +0 -0
- package/temp/audio/on-hold.mp3 +0 -0
- package/temp/audio-processor.js +0 -261
- package/temp/css/index.css +0 -2283
- package/temp/dev.html +0 -87
- package/temp/index.html +0 -16
- package/tsconfig.json +0 -119
|
@@ -1,137 +0,0 @@
|
|
|
1
|
-
function toggleVoiceChatLayout() {
|
|
2
|
-
const widget = window.getMWidget?.();
|
|
3
|
-
const voiceOverlay = widget?.container?.querySelector(".mw-voice-agent-overlay");
|
|
4
|
-
|
|
5
|
-
if (voiceOverlay) {
|
|
6
|
-
voiceOverlay.classList.toggle("mw-active");
|
|
7
|
-
voiceOverlay.classList.toggle("mw-hidden");
|
|
8
|
-
}
|
|
9
|
-
}
|
|
10
|
-
|
|
11
|
-
function initVoiceChatLayout() {
|
|
12
|
-
const widget = window.getMWidget?.();
|
|
13
|
-
const voiceOverlay = widget?.container?.querySelector(".mw-voice-agent-overlay");
|
|
14
|
-
const voiceCloseBtn = voiceOverlay?.querySelector(".mw-voice-close-btn");
|
|
15
|
-
const voiceDisconnectBtn = voiceOverlay?.querySelector(".mw-voice-disconnect-btn");
|
|
16
|
-
const voiceCallBtn = widget?.container?.querySelector(".mw-voice-call-btn");
|
|
17
|
-
|
|
18
|
-
// Show voice call button
|
|
19
|
-
if (voiceCallBtn) {
|
|
20
|
-
voiceCallBtn.classList.remove("mw-hidden");
|
|
21
|
-
voiceCallBtn.classList.add("mw-visible");
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
// Set logo from chatbot data
|
|
25
|
-
const logoImg = voiceOverlay?.querySelector(".mw-voice-agent-logo") as HTMLImageElement;
|
|
26
|
-
if (logoImg && widget?.chatbot?.image) {
|
|
27
|
-
logoImg.src = widget.chatbot.image;
|
|
28
|
-
logoImg.alt = widget.chatbot.name || "چت بات";
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
// Set title
|
|
32
|
-
const titleEl = voiceOverlay?.querySelector(".mw-voice-agent-title") as HTMLElement;
|
|
33
|
-
if (titleEl) {
|
|
34
|
-
titleEl.textContent = widget?.chatbot?.name || "تماس صوتی";
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
// Call button click handler
|
|
38
|
-
voiceCallBtn?.addEventListener("click", () => {
|
|
39
|
-
if (voiceOverlay) {
|
|
40
|
-
voiceOverlay.classList.remove("mw-hidden");
|
|
41
|
-
voiceOverlay.classList.add("mw-active");
|
|
42
|
-
// Connect to voice instance
|
|
43
|
-
widget?.voiceChat?.connect();
|
|
44
|
-
}
|
|
45
|
-
});
|
|
46
|
-
|
|
47
|
-
// Close button click handler
|
|
48
|
-
voiceCloseBtn?.addEventListener("click", () => {
|
|
49
|
-
if (voiceOverlay) {
|
|
50
|
-
voiceOverlay.classList.remove("mw-active");
|
|
51
|
-
voiceOverlay.classList.add("mw-hidden");
|
|
52
|
-
// Disconnect from voice instance
|
|
53
|
-
widget?.voiceChat?.disconnect();
|
|
54
|
-
}
|
|
55
|
-
});
|
|
56
|
-
|
|
57
|
-
// Disconnect button click handler
|
|
58
|
-
voiceDisconnectBtn?.addEventListener("click", () => {
|
|
59
|
-
if (voiceOverlay) {
|
|
60
|
-
voiceOverlay.classList.remove("mw-active");
|
|
61
|
-
voiceOverlay.classList.add("mw-hidden");
|
|
62
|
-
// Disconnect from voice instance
|
|
63
|
-
widget?.voiceChat?.disconnect();
|
|
64
|
-
}
|
|
65
|
-
});
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
function updateVoiceChatStatus(status: string, color?: string) {
|
|
69
|
-
const widget = window.getMWidget?.();
|
|
70
|
-
const statusEl = widget?.container?.querySelector(".mw-voice-agent-status") as HTMLElement;
|
|
71
|
-
|
|
72
|
-
if (statusEl) {
|
|
73
|
-
statusEl.textContent = status;
|
|
74
|
-
if (color) {
|
|
75
|
-
statusEl.style.color = color;
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
function handleVoiceConnected() {
|
|
81
|
-
const widget = window.getMWidget?.();
|
|
82
|
-
const logoEl = widget?.container?.querySelector(".mw-voice-agent-logo") as HTMLElement;
|
|
83
|
-
const statusEl = widget?.container?.querySelector(".mw-voice-agent-status") as HTMLElement;
|
|
84
|
-
|
|
85
|
-
// Add animation classes when connected
|
|
86
|
-
if (logoEl) {
|
|
87
|
-
logoEl.style.animation = "mw-voice-pulse 2s ease-in-out infinite";
|
|
88
|
-
}
|
|
89
|
-
if (statusEl) {
|
|
90
|
-
statusEl.style.animation = "mw-pulse 1.5s ease-in-out infinite";
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
updateVoiceChatStatus("متصل ✓", "#68d391"); // Green
|
|
94
|
-
}
|
|
95
|
-
|
|
96
|
-
function handleVoiceDisconnected(reason?: string) {
|
|
97
|
-
const widget = window.getMWidget?.();
|
|
98
|
-
const logoEl = widget?.container?.querySelector(".mw-voice-agent-logo") as HTMLElement;
|
|
99
|
-
const statusEl = widget?.container?.querySelector(".mw-voice-agent-status") as HTMLElement;
|
|
100
|
-
|
|
101
|
-
// Remove animations when disconnected
|
|
102
|
-
if (logoEl) {
|
|
103
|
-
logoEl.style.animation = "none";
|
|
104
|
-
}
|
|
105
|
-
if (statusEl) {
|
|
106
|
-
statusEl.style.animation = "none";
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
const statusText = reason ? `قطع شد: ${reason}` : "قطع شد";
|
|
110
|
-
updateVoiceChatStatus(statusText, "#fc8181"); // Red
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
function handleVoiceConnectionError(message: string) {
|
|
114
|
-
updateVoiceChatStatus(`خطا: ${message}`, "#fbb040"); // Warning/Orange
|
|
115
|
-
|
|
116
|
-
// Also show error in console with better visibility
|
|
117
|
-
console.error("🔴 Voice Connection Error:", message);
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
function handleMicrophonePaused() {
|
|
121
|
-
updateVoiceChatStatus("⏸ میکروفن متوقف شد", "#fbb040"); // Orange
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
function handleMicrophoneResumed() {
|
|
125
|
-
updateVoiceChatStatus("🎤 میکروفن فعال", "#68d391"); // Green
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
export {
|
|
129
|
-
toggleVoiceChatLayout,
|
|
130
|
-
initVoiceChatLayout,
|
|
131
|
-
updateVoiceChatStatus,
|
|
132
|
-
handleVoiceConnected,
|
|
133
|
-
handleVoiceDisconnected,
|
|
134
|
-
handleVoiceConnectionError,
|
|
135
|
-
handleMicrophonePaused,
|
|
136
|
-
handleMicrophoneResumed
|
|
137
|
-
};
|
package/src/tools/fetch.ts
DELETED
package/src/types/app.ts
DELETED
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
interface WidgetOptions {
|
|
2
|
-
position: "left" | "right";
|
|
3
|
-
theme: "dark" | "light";
|
|
4
|
-
primaryColor: string;
|
|
5
|
-
title: string;
|
|
6
|
-
foregroundColor: string;
|
|
7
|
-
userData?: Record<string, any>;
|
|
8
|
-
autoInit?: boolean;
|
|
9
|
-
fullScreen: boolean;
|
|
10
|
-
}
|
|
11
|
-
interface FetchPaginationRes<T = any> {
|
|
12
|
-
results: T[];
|
|
13
|
-
next: string | null;
|
|
14
|
-
prev: string | null;
|
|
15
|
-
count: number;
|
|
16
|
-
}
|
|
17
|
-
export {WidgetOptions, FetchPaginationRes};
|
package/src/types/socket.ts
DELETED
package/src/types/window.ts
DELETED
package/src/utils/audio.ts
DELETED
|
@@ -1,67 +0,0 @@
|
|
|
1
|
-
// Cache for preloaded audio elements
|
|
2
|
-
const audioCache = new Map<string, HTMLAudioElement>();
|
|
3
|
-
|
|
4
|
-
const preloadAudio = (audioPath: string): Promise<HTMLAudioElement> => {
|
|
5
|
-
return new Promise((resolve, reject) => {
|
|
6
|
-
if (audioCache.has(audioPath)) {
|
|
7
|
-
resolve(audioCache.get(audioPath)!);
|
|
8
|
-
return;
|
|
9
|
-
}
|
|
10
|
-
|
|
11
|
-
const audioElement = new Audio(audioPath);
|
|
12
|
-
audioElement.volume = 0.5;
|
|
13
|
-
audioElement.preload = "auto";
|
|
14
|
-
|
|
15
|
-
audioElement.addEventListener("canplaythrough", () => {
|
|
16
|
-
audioCache.set(audioPath, audioElement);
|
|
17
|
-
resolve(audioElement);
|
|
18
|
-
});
|
|
19
|
-
|
|
20
|
-
audioElement.addEventListener("error", error => {
|
|
21
|
-
reject(error);
|
|
22
|
-
});
|
|
23
|
-
|
|
24
|
-
// Start loading
|
|
25
|
-
audioElement.load();
|
|
26
|
-
});
|
|
27
|
-
};
|
|
28
|
-
|
|
29
|
-
const playAudio = async (audioPath: string) => {
|
|
30
|
-
try {
|
|
31
|
-
// Try to get preloaded audio or create new one
|
|
32
|
-
let audioElement = audioCache.get(audioPath);
|
|
33
|
-
|
|
34
|
-
if (!audioElement) {
|
|
35
|
-
audioElement = new Audio(audioPath);
|
|
36
|
-
audioElement.volume = 0.5;
|
|
37
|
-
audioElement.preload = "auto";
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
// Play the audio
|
|
41
|
-
await audioElement.play();
|
|
42
|
-
} catch (error) {
|
|
43
|
-
console.warn("Failed to play audio:", error);
|
|
44
|
-
// Fallback: try to play a simple beep sound using Web Audio API
|
|
45
|
-
try {
|
|
46
|
-
const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)();
|
|
47
|
-
const oscillator = audioContext.createOscillator();
|
|
48
|
-
const gainNode = audioContext.createGain();
|
|
49
|
-
|
|
50
|
-
oscillator.connect(gainNode);
|
|
51
|
-
gainNode.connect(audioContext.destination);
|
|
52
|
-
|
|
53
|
-
oscillator.frequency.setValueAtTime(800, audioContext.currentTime);
|
|
54
|
-
oscillator.type = "sine";
|
|
55
|
-
|
|
56
|
-
gainNode.gain.setValueAtTime(0.3, audioContext.currentTime);
|
|
57
|
-
gainNode.gain.exponentialRampToValueAtTime(0.01, audioContext.currentTime + 0.5);
|
|
58
|
-
|
|
59
|
-
oscillator.start(audioContext.currentTime);
|
|
60
|
-
oscillator.stop(audioContext.currentTime + 0.5);
|
|
61
|
-
} catch (fallbackError) {
|
|
62
|
-
console.warn("Audio fallback also failed:", fallbackError);
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
};
|
|
66
|
-
|
|
67
|
-
export {playAudio, preloadAudio};
|
package/src/utils/browser.ts
DELETED
package/src/utils/fetch.ts
DELETED
|
@@ -1,98 +0,0 @@
|
|
|
1
|
-
import {$fetch} from "#src/tools/fetch.js";
|
|
2
|
-
import {FetchPaginationRes} from "#src/types/app.js";
|
|
3
|
-
|
|
4
|
-
const fetchChatbot = async (publicKey: string) => {
|
|
5
|
-
return await $fetch<Record<string, any>>(`/v1/chatbot/public/${publicKey}`);
|
|
6
|
-
};
|
|
7
|
-
|
|
8
|
-
const fetchSendMessage = async (
|
|
9
|
-
chatbotId: number,
|
|
10
|
-
content: string,
|
|
11
|
-
uniqueId: string,
|
|
12
|
-
conversationUuid?: string,
|
|
13
|
-
phoneNumber?: string,
|
|
14
|
-
options?: {
|
|
15
|
-
file?: File;
|
|
16
|
-
replyTo?: number;
|
|
17
|
-
}
|
|
18
|
-
) => {
|
|
19
|
-
const formData = new FormData();
|
|
20
|
-
formData.append("chatbot_id", chatbotId.toString());
|
|
21
|
-
formData.append("content", content);
|
|
22
|
-
formData.append("message_type", "0");
|
|
23
|
-
formData.append("unique_id", uniqueId);
|
|
24
|
-
if (conversationUuid) formData.append("conversation_id", conversationUuid);
|
|
25
|
-
formData.append("url", window?.location?.href || "");
|
|
26
|
-
formData.append("title", document?.title || "");
|
|
27
|
-
if (phoneNumber && phoneNumber !== "no phone number") formData.append("phone_number", phoneNumber);
|
|
28
|
-
if (options?.file) formData.append("file", options.file);
|
|
29
|
-
if (options?.replyTo) formData.append("response_to", options.replyTo.toString());
|
|
30
|
-
|
|
31
|
-
return await $fetch("/v2/conversations/website/send-message/", {
|
|
32
|
-
method: "POST",
|
|
33
|
-
body: formData
|
|
34
|
-
});
|
|
35
|
-
};
|
|
36
|
-
const fetchGetAccessTokenForSocket = async (chatbotId: string, conversationUuid: string, uniqueId: string) => {
|
|
37
|
-
return await $fetch<{access_token: string; conversation_uuid: string; expires_in: number}>("/v2/conversations/websocket/auth/", {
|
|
38
|
-
method: "POST",
|
|
39
|
-
body: {
|
|
40
|
-
chatbot_id: chatbotId,
|
|
41
|
-
conversation_uuid: conversationUuid,
|
|
42
|
-
unique_id: uniqueId
|
|
43
|
-
}
|
|
44
|
-
});
|
|
45
|
-
};
|
|
46
|
-
const fetchConversationMessages = async (conversationUuid: string, chatbotUuid: string) => {
|
|
47
|
-
return await $fetch<Record<string, any>>(`/v2/conversations/website/conversations/${conversationUuid}/chatbot/${chatbotUuid}/messages/`);
|
|
48
|
-
};
|
|
49
|
-
const fetchUpdateUserData = async (chatbotUuid: string, uniqueId: string, userData: Record<string, any>) => {
|
|
50
|
-
return await $fetch("/v1/chatbot/customners/set-user-data", {
|
|
51
|
-
method: "POST",
|
|
52
|
-
body: {
|
|
53
|
-
chatbot_uuid: chatbotUuid,
|
|
54
|
-
unique_id: uniqueId,
|
|
55
|
-
user_data: userData
|
|
56
|
-
}
|
|
57
|
-
});
|
|
58
|
-
};
|
|
59
|
-
const fetchReadMessage = async (id: number) => {
|
|
60
|
-
return await $fetch(`/v2/conversations/messages/${id}/`, {
|
|
61
|
-
method: "POST"
|
|
62
|
-
});
|
|
63
|
-
};
|
|
64
|
-
const fetchMarkConversationAsRead = async (conversationUuid: string, uniqueId: string) => {
|
|
65
|
-
return await $fetch(`/v2/conversations/website/conversations/${conversationUuid}/messages/seen`, {
|
|
66
|
-
method: "POST",
|
|
67
|
-
body: {
|
|
68
|
-
unique_id: uniqueId
|
|
69
|
-
}
|
|
70
|
-
});
|
|
71
|
-
};
|
|
72
|
-
|
|
73
|
-
const fetchMessageFeedback = async (id: number, uniqueId: string, conversationUuid: string, liked: boolean) => {
|
|
74
|
-
return await $fetch(`/v2/conversations/website/conversations/messages/feedback`, {
|
|
75
|
-
method: "POST",
|
|
76
|
-
body: {
|
|
77
|
-
unique_id: uniqueId,
|
|
78
|
-
feedback: liked ? 1 : 0,
|
|
79
|
-
message_id: id,
|
|
80
|
-
conversation_uuid: conversationUuid
|
|
81
|
-
}
|
|
82
|
-
});
|
|
83
|
-
};
|
|
84
|
-
|
|
85
|
-
const fetchConversations = async (conversationUuid: string, uniqueId: string) => {
|
|
86
|
-
return await $fetch<FetchPaginationRes>(`/v2/conversations/website/conversations/${conversationUuid}/customer/${uniqueId}`);
|
|
87
|
-
};
|
|
88
|
-
export {
|
|
89
|
-
fetchChatbot,
|
|
90
|
-
fetchSendMessage,
|
|
91
|
-
fetchGetAccessTokenForSocket,
|
|
92
|
-
fetchConversationMessages,
|
|
93
|
-
fetchUpdateUserData,
|
|
94
|
-
fetchReadMessage,
|
|
95
|
-
fetchMarkConversationAsRead,
|
|
96
|
-
fetchMessageFeedback,
|
|
97
|
-
fetchConversations
|
|
98
|
-
};
|
package/src/utils/uuid.ts
DELETED
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Generates a random UUID v4
|
|
3
|
-
* @returns A random UUID string
|
|
4
|
-
*/
|
|
5
|
-
const generateUUID = (): string => {
|
|
6
|
-
return "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, function (c) {
|
|
7
|
-
const r = (Math.random() * 16) | 0;
|
|
8
|
-
const v = c === "x" ? r : (r & 0x3) | 0x8;
|
|
9
|
-
return v.toString(16);
|
|
10
|
-
});
|
|
11
|
-
};
|
|
12
|
-
|
|
13
|
-
export {generateUUID};
|
|
Binary file
|
package/temp/audio/on-hold.mp3
DELETED
|
Binary file
|
package/temp/audio-processor.js
DELETED
|
@@ -1,261 +0,0 @@
|
|
|
1
|
-
class AudioProcessor extends AudioWorkletProcessor {
|
|
2
|
-
constructor() {
|
|
3
|
-
super();
|
|
4
|
-
this.bufferSize = 4096;
|
|
5
|
-
this.buffer = new Float32Array(this.bufferSize);
|
|
6
|
-
this.bufferIndex = 0;
|
|
7
|
-
|
|
8
|
-
this.noiseThreshold = 0.01;
|
|
9
|
-
this.voiceThreshold = 0.08;
|
|
10
|
-
this.silenceFrames = 0;
|
|
11
|
-
this.maxSilenceFrames = 8;
|
|
12
|
-
this.isVoiceActive = false;
|
|
13
|
-
this.isPaused = false;
|
|
14
|
-
|
|
15
|
-
this.noiseProfile = new Float32Array(128).fill(0);
|
|
16
|
-
this.noiseProfileIndex = 0;
|
|
17
|
-
this.noiseProfileReady = false;
|
|
18
|
-
|
|
19
|
-
this.previousSamples = new Float32Array(this.bufferSize).fill(0);
|
|
20
|
-
|
|
21
|
-
this.preRollBuffer = [];
|
|
22
|
-
this.maxPreRollBuffers = 5;
|
|
23
|
-
|
|
24
|
-
this.tailBuffer = [];
|
|
25
|
-
this.maxTailBuffers = 3;
|
|
26
|
-
this.sendingTail = false;
|
|
27
|
-
|
|
28
|
-
this.justResumed = false;
|
|
29
|
-
this.resumedFrameCount = 0;
|
|
30
|
-
this.resumedFrameThreshold = 150;
|
|
31
|
-
|
|
32
|
-
this.port.onmessage = (event) => {
|
|
33
|
-
if (event.data.type === 'set-voice-threshold') {
|
|
34
|
-
this.voiceThreshold = event.data.value;
|
|
35
|
-
} else if (event.data.type === 'set-noise-threshold') {
|
|
36
|
-
this.noiseThreshold = event.data.value;
|
|
37
|
-
} else if (event.data.type === 'pause') {
|
|
38
|
-
this.isPaused = true;
|
|
39
|
-
console.log('AudioProcessor: Input paused');
|
|
40
|
-
} else if (event.data.type === 'resume') {
|
|
41
|
-
this.isPaused = false;
|
|
42
|
-
// Reset VAD state when resuming but KEEP pre-roll buffer
|
|
43
|
-
this.isVoiceActive = false;
|
|
44
|
-
this.silenceFrames = 0;
|
|
45
|
-
// Reset noise profile to quickly adapt to new environment
|
|
46
|
-
this.noiseProfile.fill(0);
|
|
47
|
-
this.noiseProfileIndex = 0;
|
|
48
|
-
this.noiseProfileReady = false;
|
|
49
|
-
// Be extra sensitive for the first ~1 second after resume
|
|
50
|
-
this.justResumed = true;
|
|
51
|
-
this.resumedFrameCount = 0;
|
|
52
|
-
// Don't clear preRollBuffer - it may contain the start of speech
|
|
53
|
-
console.log('AudioProcessor: Input resumed - VAD and noise profile reset, sensitivity boosted');
|
|
54
|
-
}
|
|
55
|
-
};
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
calculateRMS(samples) {
|
|
59
|
-
let sum = 0;
|
|
60
|
-
for (let i = 0; i < samples.length; i++) {
|
|
61
|
-
sum += samples[i] * samples[i];
|
|
62
|
-
}
|
|
63
|
-
return Math.sqrt(sum / samples.length);
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
calculateDB(rms) {
|
|
67
|
-
if (rms <= 0 || isNaN(rms)) return -Infinity;
|
|
68
|
-
const db = 20 * Math.log10(rms);
|
|
69
|
-
return isNaN(db) ? -Infinity : db;
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
updateNoiseProfile(samples) {
|
|
73
|
-
const rms = this.calculateRMS(samples);
|
|
74
|
-
this.noiseProfile[this.noiseProfileIndex] = rms;
|
|
75
|
-
this.noiseProfileIndex = (this.noiseProfileIndex + 1) % this.noiseProfile.length;
|
|
76
|
-
|
|
77
|
-
if (this.noiseProfileIndex === 0) {
|
|
78
|
-
this.noiseProfileReady = true;
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
getNoiseFloor() {
|
|
83
|
-
if (!this.noiseProfileReady) {
|
|
84
|
-
return this.noiseThreshold;
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
const sorted = Array.from(this.noiseProfile).sort((a, b) => a - b);
|
|
88
|
-
return sorted[Math.floor(sorted.length * 0.25)];
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
applySpectralSubtraction(samples) {
|
|
92
|
-
const output = new Float32Array(samples.length);
|
|
93
|
-
const noiseFloor = this.getNoiseFloor();
|
|
94
|
-
|
|
95
|
-
for (let i = 0; i < samples.length; i++) {
|
|
96
|
-
const signal = Math.abs(samples[i]);
|
|
97
|
-
|
|
98
|
-
if (signal < noiseFloor * 2) {
|
|
99
|
-
output[i] = 0;
|
|
100
|
-
} else {
|
|
101
|
-
const cleaned = signal - noiseFloor;
|
|
102
|
-
output[i] = samples[i] >= 0 ? cleaned : -cleaned;
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
return output;
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
applyHighPassFilter(samples) {
|
|
110
|
-
const output = new Float32Array(samples.length);
|
|
111
|
-
const alpha = 0.95;
|
|
112
|
-
|
|
113
|
-
output[0] = samples[0];
|
|
114
|
-
for (let i = 1; i < samples.length; i++) {
|
|
115
|
-
output[i] = alpha * (output[i-1] + samples[i] - this.previousSamples[i]);
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
this.previousSamples.set(samples);
|
|
119
|
-
return output;
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
applyNoiseGate(samples, rms) {
|
|
123
|
-
const noiseFloor = this.getNoiseFloor();
|
|
124
|
-
let adaptiveThreshold = Math.max(this.noiseThreshold, noiseFloor * 1.8);
|
|
125
|
-
|
|
126
|
-
if (this.justResumed) {
|
|
127
|
-
adaptiveThreshold = adaptiveThreshold * 0.3;
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
if (rms < adaptiveThreshold) {
|
|
131
|
-
return new Float32Array(samples.length);
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
return samples;
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
detectVoice(rms) {
|
|
138
|
-
const noiseFloor = this.getNoiseFloor();
|
|
139
|
-
|
|
140
|
-
let adaptiveVoiceThreshold;
|
|
141
|
-
if (this.justResumed) {
|
|
142
|
-
this.resumedFrameCount++;
|
|
143
|
-
|
|
144
|
-
const boostProgress = Math.min(this.resumedFrameCount / this.resumedFrameThreshold, 1.0);
|
|
145
|
-
const boostMultiplier = 0.2 + (0.8 * boostProgress);
|
|
146
|
-
|
|
147
|
-
adaptiveVoiceThreshold = this.voiceThreshold * boostMultiplier;
|
|
148
|
-
|
|
149
|
-
if (this.resumedFrameCount % 20 === 0) {
|
|
150
|
-
console.log(`AudioProcessor: Boosted threshold: ${adaptiveVoiceThreshold.toFixed(4)} (${(boostMultiplier * 100).toFixed(0)}% of normal, RMS: ${rms.toFixed(4)})`);
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
if (this.resumedFrameCount > this.resumedFrameThreshold) {
|
|
154
|
-
this.justResumed = false;
|
|
155
|
-
console.log('AudioProcessor: Sensitivity boost ended, returning to adaptive threshold');
|
|
156
|
-
}
|
|
157
|
-
} else {
|
|
158
|
-
const multiplier = 1.8;
|
|
159
|
-
adaptiveVoiceThreshold = Math.max(this.voiceThreshold, noiseFloor * multiplier);
|
|
160
|
-
}
|
|
161
|
-
|
|
162
|
-
const wasActive = this.isVoiceActive;
|
|
163
|
-
|
|
164
|
-
if (rms > adaptiveVoiceThreshold) {
|
|
165
|
-
if (!this.isVoiceActive) {
|
|
166
|
-
this.isVoiceActive = true;
|
|
167
|
-
if (this.justResumed) {
|
|
168
|
-
console.log('AudioProcessor: Voice detected with boost!');
|
|
169
|
-
}
|
|
170
|
-
return { justActivated: true, isActive: true };
|
|
171
|
-
}
|
|
172
|
-
this.isVoiceActive = true;
|
|
173
|
-
this.silenceFrames = 0;
|
|
174
|
-
} else if (this.isVoiceActive) {
|
|
175
|
-
this.silenceFrames++;
|
|
176
|
-
if (this.silenceFrames > this.maxSilenceFrames) {
|
|
177
|
-
this.isVoiceActive = false;
|
|
178
|
-
this.sendingTail = true;
|
|
179
|
-
this.preRollBuffer = [];
|
|
180
|
-
return { justActivated: false, isActive: false, justDeactivated: true };
|
|
181
|
-
}
|
|
182
|
-
} else {
|
|
183
|
-
this.updateNoiseProfile(this.buffer);
|
|
184
|
-
}
|
|
185
|
-
|
|
186
|
-
return { justActivated: false, isActive: this.isVoiceActive, justDeactivated: false };
|
|
187
|
-
}
|
|
188
|
-
|
|
189
|
-
process(inputs, outputs, parameters) {
|
|
190
|
-
const input = inputs[0];
|
|
191
|
-
if (input.length > 0) {
|
|
192
|
-
const inputChannel = input[0];
|
|
193
|
-
|
|
194
|
-
for (let i = 0; i < inputChannel.length; i++) {
|
|
195
|
-
this.buffer[this.bufferIndex] = inputChannel[i];
|
|
196
|
-
this.bufferIndex++;
|
|
197
|
-
|
|
198
|
-
if (this.bufferIndex >= this.bufferSize) {
|
|
199
|
-
const rms = this.calculateRMS(this.buffer);
|
|
200
|
-
const db = this.calculateDB(rms);
|
|
201
|
-
const voiceStatus = this.detectVoice(rms);
|
|
202
|
-
|
|
203
|
-
this.port.postMessage({
|
|
204
|
-
type: 'voice-level',
|
|
205
|
-
rms: rms,
|
|
206
|
-
db: db,
|
|
207
|
-
isActive: voiceStatus.isActive,
|
|
208
|
-
noiseFloor: this.getNoiseFloor(),
|
|
209
|
-
isPaused: this.isPaused
|
|
210
|
-
});
|
|
211
|
-
|
|
212
|
-
if (!this.isPaused) {
|
|
213
|
-
let processedBuffer = this.buffer;
|
|
214
|
-
|
|
215
|
-
const int16Buffer = new Int16Array(this.bufferSize);
|
|
216
|
-
for (let j = 0; j < this.bufferSize; j++) {
|
|
217
|
-
const s = Math.max(-1, Math.min(1, processedBuffer[j]));
|
|
218
|
-
int16Buffer[j] = s < 0 ? s * 0x8000 : s * 0x7FFF;
|
|
219
|
-
}
|
|
220
|
-
|
|
221
|
-
if (voiceStatus.justActivated && this.preRollBuffer.length > 0) {
|
|
222
|
-
for (const preRollBuffer of this.preRollBuffer) {
|
|
223
|
-
this.port.postMessage(preRollBuffer);
|
|
224
|
-
}
|
|
225
|
-
this.preRollBuffer = [];
|
|
226
|
-
this.tailBuffer = [];
|
|
227
|
-
this.sendingTail = false;
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
if (voiceStatus.isActive) {
|
|
231
|
-
this.port.postMessage(int16Buffer.buffer);
|
|
232
|
-
this.tailBuffer = [];
|
|
233
|
-
this.sendingTail = false;
|
|
234
|
-
} else if (this.sendingTail) {
|
|
235
|
-
this.tailBuffer.push(int16Buffer.buffer);
|
|
236
|
-
if (this.tailBuffer.length >= this.maxTailBuffers) {
|
|
237
|
-
for (const tailBuffer of this.tailBuffer) {
|
|
238
|
-
this.port.postMessage(tailBuffer);
|
|
239
|
-
}
|
|
240
|
-
this.port.postMessage({ type: 'voice-ended' });
|
|
241
|
-
this.tailBuffer = [];
|
|
242
|
-
this.sendingTail = false;
|
|
243
|
-
}
|
|
244
|
-
} else {
|
|
245
|
-
this.preRollBuffer.push(int16Buffer.buffer);
|
|
246
|
-
if (this.preRollBuffer.length > this.maxPreRollBuffers) {
|
|
247
|
-
this.preRollBuffer.shift();
|
|
248
|
-
}
|
|
249
|
-
}
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
this.bufferIndex = 0;
|
|
253
|
-
}
|
|
254
|
-
}
|
|
255
|
-
}
|
|
256
|
-
|
|
257
|
-
return true;
|
|
258
|
-
}
|
|
259
|
-
}
|
|
260
|
-
|
|
261
|
-
registerProcessor('audio-processor', AudioProcessor);
|