@phonghq/go-chat 1.0.13 → 1.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,32 +3,41 @@ import { dataProfile } from '../utils/chat/auth';
3
3
  let socket = null;
4
4
  let dataCallBack = [];
5
5
  let callId = '';
6
- const BASE_URL = 'wss://web-socket.dev01.dtsmart.dev/web-stream';
7
- // const BASE_URL = 'wss://web-socket-test.dev01.dtsmart.dev/web-stream'
6
+ // const BASE_URL = 'wss://web-socket.dev01.dtsmart.dev/web-stream'
7
+ const BASE_URL = 'wss://web-socket-test.dev01.dtsmart.dev/web-stream';
8
8
  const BARE_URL_INBOUND = 'https://web-socket.dev01.dtsmart.dev';
9
9
  export function initWebSocket() {
10
- if (socket && socket.readyState === 1) {
11
- console.log("WebSocket đã kết nối, bỏ qua connect.");
12
- return;
13
- }
14
- // socket = new WebSocket('wss://' + callId)
15
- socket = new WebSocket(BASE_URL);
16
- socket.binaryType = 'arraybuffer';
17
- socket.onopen = (event) => {
18
- console.log('Connected!');
19
- };
20
- socket.onmessage = (event) => {
21
- const data = tryParseJson(event.data);
22
- dataCallBack.forEach((e) => {
23
- e.callBack(data);
24
- });
25
- };
26
- socket.onclose = (event) => {
27
- console.log('Disconnected:', event.reason);
28
- };
29
- socket.onerror = (event) => {
30
- console.error('Socket error:', event);
31
- };
10
+ return new Promise((resolve, reject) => {
11
+ if (socket && socket.readyState === 1) {
12
+ console.log("WebSocket đã kết nối, bỏ qua connect.");
13
+ resolve('Connected');
14
+ return;
15
+ }
16
+ const timer = setTimeout(() => {
17
+ socket?.close?.();
18
+ reject(new Error("WebSocket connection timeout"));
19
+ }, 5000);
20
+ // socket = new WebSocket('wss://' + callId)
21
+ socket = new WebSocket(BASE_URL);
22
+ socket.binaryType = 'arraybuffer';
23
+ socket.onopen = (event) => {
24
+ clearTimeout(timer);
25
+ resolve('Connected');
26
+ };
27
+ socket.onmessage = (event) => {
28
+ const data = tryParseJson(event.data);
29
+ dataCallBack.forEach((e) => {
30
+ e.callBack(data);
31
+ });
32
+ };
33
+ socket.onclose = (event) => {
34
+ console.log('Disconnected:', event.reason);
35
+ };
36
+ socket.onerror = (event) => {
37
+ console.error('Socket error:', event);
38
+ reject(event);
39
+ };
40
+ });
32
41
  }
33
42
  export const socketSend = (data) => {
34
43
  try {
@@ -1 +1,10 @@
1
- export {};
1
+ export var PLIVO_CALL_STATUS;
2
+ (function (PLIVO_CALL_STATUS) {
3
+ PLIVO_CALL_STATUS["CONNECTING"] = "Connecting...";
4
+ PLIVO_CALL_STATUS["CALLING"] = "calling";
5
+ PLIVO_CALL_STATUS["RINGING"] = "ringing";
6
+ PLIVO_CALL_STATUS["CONNECT_FAILED"] = "failed";
7
+ PLIVO_CALL_STATUS["CALL_START"] = "in-progress";
8
+ PLIVO_CALL_STATUS["CALL_END"] = "completed";
9
+ PLIVO_CALL_STATUS["NO_ANSWER"] = "no-answer";
10
+ })(PLIVO_CALL_STATUS || (PLIVO_CALL_STATUS = {}));
@@ -1,7 +1,7 @@
1
1
  import axios from '../../plugins/axios';
2
2
  import { dataProfile } from '../../utils/chat/auth';
3
- const BARE_WEBSOCKET_URL = 'https://web-socket.dev01.dtsmart.dev';
4
- // const BARE_WEBSOCKET_URL = 'https://web-socket-test.dev01.dtsmart.dev'
3
+ // const BARE_WEBSOCKET_URL = 'https://web-socket.dev01.dtsmart.dev'
4
+ const BARE_WEBSOCKET_URL = 'https://web-socket-test.dev01.dtsmart.dev';
5
5
  // const BARE_WEBSOCKET_URL = 'http://192.168.1.173:3000'
6
6
  export const getIceService = async () => {
7
7
  const res = await axios.post('/api/v1/message/call/ice-servers', {
@@ -77,9 +77,7 @@ export const plivoCall = async (user) => {
77
77
  if (!response.ok) {
78
78
  throw new Error(`Response status: ${response.status}`);
79
79
  }
80
- console.log(response);
81
80
  const result = await response.json();
82
- console.log(result);
83
81
  return result;
84
82
  };
85
83
  export const plivoEndCall = async (uuid) => {
@@ -120,6 +118,21 @@ export const downloadRecord = async (url_pub) => {
120
118
  a.download = "recording.mp3";
121
119
  a.click();
122
120
  a.remove();
123
- // const result = await response.json()
124
- console.log(a);
121
+ };
122
+ export const getPlivoAccessToken = async () => {
123
+ const url = BARE_WEBSOCKET_URL + '/ws/generate-token';
124
+ const response = await fetch(url, {
125
+ method: 'POST',
126
+ body: JSON.stringify({
127
+ clientId: dataProfile.value?.tenant_id,
128
+ }),
129
+ headers: {
130
+ ['Content-Type']: 'application/json'
131
+ }
132
+ });
133
+ if (!response.ok) {
134
+ throw new Error(`Response status: ${response.status}`);
135
+ }
136
+ const result = await response.json();
137
+ return result?.accessToken;
125
138
  };
@@ -2,3 +2,12 @@ export type IResCall = {
2
2
  from: string;
3
3
  to: string;
4
4
  };
5
+ export declare const enum PLIVO_CALL_STATUS {
6
+ CONNECTING = "Connecting...",
7
+ CALLING = "calling",
8
+ RINGING = "ringing",
9
+ CONNECT_FAILED = "failed",
10
+ CALL_START = "in-progress",
11
+ CALL_END = "completed",
12
+ NO_ANSWER = "no-answer"
13
+ }
@@ -8,3 +8,4 @@ export declare const callOutBound: (user: IResUser) => Promise<void>;
8
8
  export declare const plivoCall: (user: IResUser) => Promise<any>;
9
9
  export declare const plivoEndCall: (uuid: string) => Promise<void>;
10
10
  export declare const downloadRecord: (url_pub: string) => Promise<void>;
11
+ export declare const getPlivoAccessToken: () => Promise<any>;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@phonghq/go-chat",
3
- "version": "1.0.13",
3
+ "version": "1.0.14",
4
4
  "private": false,
5
5
  "files": [
6
6
  "dist"
@@ -34,6 +34,7 @@
34
34
  "lucide-vue-next": "^0.536.0",
35
35
  "mqtt": "^4.3.7",
36
36
  "pinia": "^2.1.7",
37
+ "plivo-browser-sdk": "^2.2.21",
37
38
  "prettier": "3.3.3",
38
39
  "reka-ui": "^2.6.0",
39
40
  "tailwind-merge": "^3.3.1",
@@ -1,64 +0,0 @@
1
- export declare function useAudioStream(wsUrl: string): {
2
- ws: import("vue").Ref<{
3
- binaryType: BinaryType;
4
- readonly bufferedAmount: number;
5
- readonly extensions: string;
6
- onclose: ((this: WebSocket, ev: CloseEvent) => any) | null;
7
- onerror: ((this: WebSocket, ev: Event) => any) | null;
8
- onmessage: ((this: WebSocket, ev: MessageEvent<any>) => any) | null;
9
- onopen: ((this: WebSocket, ev: Event) => any) | null;
10
- readonly protocol: string;
11
- readonly readyState: number;
12
- readonly url: string;
13
- close: (code?: number | undefined, reason?: string | undefined) => void;
14
- send: (data: string | ArrayBufferView | Blob | ArrayBufferLike) => void;
15
- readonly CONNECTING: 0;
16
- readonly OPEN: 1;
17
- readonly CLOSING: 2;
18
- readonly CLOSED: 3;
19
- addEventListener: {
20
- <K extends keyof WebSocketEventMap>(type: K, listener: (this: WebSocket, ev: WebSocketEventMap[K]) => any, options?: boolean | AddEventListenerOptions | undefined): void;
21
- (type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions | undefined): void;
22
- };
23
- removeEventListener: {
24
- <K_1 extends keyof WebSocketEventMap>(type: K_1, listener: (this: WebSocket, ev: WebSocketEventMap[K_1]) => any, options?: boolean | EventListenerOptions | undefined): void;
25
- (type: string, listener: EventListenerOrEventListenerObject, options?: boolean | EventListenerOptions | undefined): void;
26
- };
27
- dispatchEvent: (event: Event) => boolean;
28
- } | null, WebSocket | {
29
- binaryType: BinaryType;
30
- readonly bufferedAmount: number;
31
- readonly extensions: string;
32
- onclose: ((this: WebSocket, ev: CloseEvent) => any) | null;
33
- onerror: ((this: WebSocket, ev: Event) => any) | null;
34
- onmessage: ((this: WebSocket, ev: MessageEvent<any>) => any) | null;
35
- onopen: ((this: WebSocket, ev: Event) => any) | null;
36
- readonly protocol: string;
37
- readonly readyState: number;
38
- readonly url: string;
39
- close: (code?: number | undefined, reason?: string | undefined) => void;
40
- send: (data: string | ArrayBufferView | Blob | ArrayBufferLike) => void;
41
- readonly CONNECTING: 0;
42
- readonly OPEN: 1;
43
- readonly CLOSING: 2;
44
- readonly CLOSED: 3;
45
- addEventListener: {
46
- <K extends keyof WebSocketEventMap>(type: K, listener: (this: WebSocket, ev: WebSocketEventMap[K]) => any, options?: boolean | AddEventListenerOptions | undefined): void;
47
- (type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions | undefined): void;
48
- };
49
- removeEventListener: {
50
- <K_1 extends keyof WebSocketEventMap>(type: K_1, listener: (this: WebSocket, ev: WebSocketEventMap[K_1]) => any, options?: boolean | EventListenerOptions | undefined): void;
51
- (type: string, listener: EventListenerOrEventListenerObject, options?: boolean | EventListenerOptions | undefined): void;
52
- };
53
- dispatchEvent: (event: Event) => boolean;
54
- } | null>;
55
- statusText: import("vue").Ref<string, string>;
56
- micLevel: import("vue").Ref<number, number>;
57
- recording: import("vue").Ref<boolean, boolean>;
58
- connect: () => void;
59
- disconnect: () => void;
60
- resumeAudio: () => void;
61
- enqueueSpeakerChunk: (arrayBuffer: ArrayBuffer) => Promise<void>;
62
- processSpeakerQueue: () => void;
63
- startRecording: () => Promise<void>;
64
- };
@@ -1,196 +0,0 @@
1
- import { ref, onBeforeUnmount } from 'vue';
2
- export function useAudioStream(wsUrl) {
3
- const SAMPLE_RATE = 24000;
4
- const CHUNK_SIZE = 480;
5
- const PREBUFFER_SEC = 0.4;
6
- let audioCtx;
7
- let processor;
8
- let input;
9
- let stream;
10
- const ws = ref(null);
11
- const statusText = ref('Tap to Speak with Vico');
12
- // Speaker queue
13
- let speakerQueue = [];
14
- let nextPlayTime = 0;
15
- // UI state
16
- const recording = ref(false);
17
- const micLevel = ref(0);
18
- // Status management
19
- const STATUS = {
20
- IDLE: 'Tap to Speak with Vico',
21
- CONNECTING: 'Connecting...',
22
- LISTENING: 'Listening...',
23
- SPEAKING: 'Speaking...'
24
- };
25
- let currentStatus = STATUS.IDLE;
26
- function setStatus(newStatus) {
27
- if (currentStatus !== newStatus) {
28
- currentStatus = newStatus;
29
- statusText.value = newStatus;
30
- }
31
- }
32
- // 🎤 Float32 → PCM16
33
- function floatTo16BitPCM(float32Array) {
34
- const buffer = new ArrayBuffer(float32Array.length * 2);
35
- const view = new DataView(buffer);
36
- for (let i = 0; i < float32Array.length; i++) {
37
- let s = Math.max(-1, Math.min(1, float32Array[i]));
38
- view.setInt16(i * 2, s < 0 ? s * 0x8000 : s * 0x7fff, true);
39
- }
40
- return buffer;
41
- }
42
- // 🔊 PCM16 → Float32
43
- function int16ToFloat32(int16Array) {
44
- const float32 = new Float32Array(int16Array.length);
45
- for (let i = 0; i < int16Array.length; i++) {
46
- float32[i] = int16Array[i] / 32768;
47
- }
48
- return float32;
49
- }
50
- // 📥 enqueue speaker chunk
51
- async function enqueueSpeakerChunk(arrayBuffer) {
52
- const int16View = new Int16Array(arrayBuffer);
53
- const float32Data = int16ToFloat32(int16View);
54
- speakerQueue.push(float32Data);
55
- }
56
- // 🔊 process queue
57
- function processSpeakerQueue() {
58
- try {
59
- if (speakerQueue.length > 0) {
60
- const chunk = speakerQueue.shift();
61
- if (chunk) {
62
- const audioBuffer = audioCtx.createBuffer(1, chunk.length, SAMPLE_RATE);
63
- audioBuffer.getChannelData(0).set(chunk);
64
- const source = audioCtx.createBufferSource();
65
- source.buffer = audioBuffer;
66
- source.connect(audioCtx.destination);
67
- if (nextPlayTime < audioCtx.currentTime + 0.05) {
68
- nextPlayTime = audioCtx.currentTime + PREBUFFER_SEC;
69
- }
70
- source.start();
71
- nextPlayTime += audioBuffer.duration;
72
- setStatus(STATUS.SPEAKING);
73
- }
74
- }
75
- else if (recording.value) {
76
- setStatus(STATUS.LISTENING);
77
- }
78
- }
79
- catch (e) {
80
- console.log(e);
81
- }
82
- requestAnimationFrame(processSpeakerQueue);
83
- }
84
- // 🎤 start mic
85
- async function startRecording() {
86
- audioCtx = new AudioContext({ sampleRate: SAMPLE_RATE });
87
- return;
88
- stream = await navigator.mediaDevices.getUserMedia({ audio: true });
89
- input = audioCtx.createMediaStreamSource(stream);
90
- processor = audioCtx.createScriptProcessor(1024, 1, 1);
91
- processor.onaudioprocess = (e) => {
92
- if (!ws.value || ws.value.readyState !== WebSocket.OPEN)
93
- return;
94
- const inputData = e.inputBuffer.getChannelData(0);
95
- // calculate mic level
96
- let sum = 0;
97
- for (let i = 0; i < inputData.length; i++)
98
- sum += inputData[i] ** 2;
99
- micLevel.value = Math.sqrt(sum / inputData.length);
100
- // chunking & send
101
- for (let i = 0; i < inputData.length; i += CHUNK_SIZE) {
102
- const slice = inputData.slice(i, i + CHUNK_SIZE);
103
- const binaryChunk = floatTo16BitPCM(slice);
104
- ws.value.send(binaryChunk);
105
- }
106
- };
107
- input.connect(processor);
108
- processor.connect(audioCtx.destination);
109
- recording.value = true;
110
- setStatus(STATUS.LISTENING);
111
- }
112
- // ⏹ stop mic
113
- function stopRecording() {
114
- recording.value = false;
115
- processor?.disconnect();
116
- input?.disconnect();
117
- stream?.getTracks().forEach((t) => t.stop());
118
- if (audioCtx && audioCtx.state !== 'closed') {
119
- audioCtx
120
- .close()
121
- .then(() => console.log('AudioContext closed successfully.'))
122
- .catch((err) => console.error('Error closing AudioContext:', err))
123
- .finally(() => (micLevel.value = 0));
124
- }
125
- setStatus(STATUS.IDLE);
126
- }
127
- const getAudioContext = () => {
128
- if (!audioCtx || audioCtx.state === 'closed') {
129
- audioCtx = new AudioContext({ sampleRate: SAMPLE_RATE });
130
- }
131
- return audioCtx;
132
- };
133
- const safeResumeAudio = () => {
134
- const ctx = getAudioContext();
135
- if (ctx.state === 'suspended') {
136
- ctx.resume().catch((err) => console.error('Error resuming AudioContext:', err));
137
- }
138
- };
139
- function connect() {
140
- if (ws.value && ws.value.readyState === WebSocket.OPEN)
141
- return;
142
- setStatus(STATUS.CONNECTING);
143
- ws.value = new WebSocket(wsUrl);
144
- ws.value.binaryType = 'arraybuffer';
145
- ws.value.onopen = () => {
146
- console.log('✅ WS connected');
147
- startRecording();
148
- processSpeakerQueue();
149
- };
150
- ws.value.onmessage = (event) => {
151
- if (event.data instanceof ArrayBuffer) {
152
- enqueueSpeakerChunk(event.data);
153
- return;
154
- }
155
- if (typeof event.data === 'string') {
156
- try {
157
- const msg = JSON.parse(event.data);
158
- if (msg.type === 'AudioStop' || msg.code === 'UserStartedSpeaking') {
159
- // speakerQueue.length = 0
160
- nextPlayTime = 0;
161
- setStatus(STATUS.LISTENING);
162
- return;
163
- }
164
- }
165
- catch (err) {
166
- console.warn('⚠️ Parse JSON error:', err, event.data);
167
- }
168
- }
169
- console.log('⚠️ Unknown WS message, closing...');
170
- disconnect();
171
- };
172
- ws.value.onclose = () => {
173
- console.log('❌ WS closed');
174
- stopRecording();
175
- };
176
- }
177
- function disconnect() {
178
- ws.value?.close();
179
- stopRecording();
180
- }
181
- onBeforeUnmount(() => {
182
- disconnect();
183
- });
184
- return {
185
- ws,
186
- statusText,
187
- micLevel,
188
- recording,
189
- connect,
190
- disconnect,
191
- resumeAudio: safeResumeAudio,
192
- enqueueSpeakerChunk,
193
- processSpeakerQueue,
194
- startRecording
195
- };
196
- }