@phonghq/go-chat 1.0.9 → 1.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,7 +3,7 @@ import IconPlan from '../../../assets/icons/IconPlan.vue';
3
3
  import EmojiPicker from 'vue3-emoji-picker';
4
4
  import 'vue3-emoji-picker/css';
5
5
  import dayjs from 'dayjs';
6
- import { DATE_FORMATS } from '../../../constant/datetime';
6
+ import { DATE_FORMATS, TIME_ZONE_UTC } from '../../../constant/datetime';
7
7
  import { dataProfile } from '../../../utils/chat/auth';
8
8
  import { ref } from 'vue';
9
9
  import PopoverBase from '../../../components/chat/common/popover/PopoverBase.vue';
@@ -35,9 +35,10 @@ const handleSendMessage = async (type) => {
35
35
  keyword.value = '';
36
36
  chatId++;
37
37
  const id = props.data?.id.toString() + '-' + chatId;
38
+ console.log(dayjs().tz(TIME_ZONE_UTC).format(DATE_FORMATS['DATE_FORMAT_FULL']));
38
39
  const data = {
39
40
  conversation_id: 135,
40
- created_at: dayjs().format(DATE_FORMATS['DATE_FORMAT_FULL']),
41
+ created_at: dayjs().tz(TIME_ZONE_UTC).format(DATE_FORMATS['DATE_FORMAT_FULL']),
41
42
  id,
42
43
  message: messageContent,
43
44
  receiver_id: props.data?.id || 0,
@@ -1,15 +1,17 @@
1
1
  /// <reference types="C:/phonghq/go-chat-v2/node_modules/.vue-global-types/vue_3.5_0.d.ts" />
2
- import { ref, onMounted, onUnmounted, watch } from 'vue';
3
- import DrawerBase from '../../../components/common/drawer/DrawerBase.vue';
2
+ import { ref, onMounted, onUnmounted, watch, computed } from 'vue';
4
3
  import IconPhone from '../../../assets/icons/call/IconPhone.vue';
5
4
  import { dataProfile } from '../../../utils/chat/auth.js';
6
- import { addHandleMqttMessage, connectMqtt, removeHandleMqttMessage, subscribeToTopic, unsubscribeFromTopic } from '../../../plugins/mqtt.js';
7
- import { TOPIC_DETAIL_CALL } from '../../../constant/mqtt.js';
8
5
  import { useCallHelper } from '../../../composable/useCallHelper';
9
6
  import IconPhoneCancel from '../../../assets/icons/call/IconPhoneCancel.vue';
10
7
  import { addHandleWebSK, removeHandleWebSK } from '../../../plugins/websocket';
11
- import { callClient, callOutBound } from '../../../utils/chat/call';
12
- const { handleOffer, handleOfferResponse, call, end, endPc, addIceCandidate, sendOfferOk } = useCallHelper();
8
+ import { callClient } from '../../../utils/chat/call';
9
+ import { useAudioStream } from '../../../composable/TestSound';
10
+ import DrawerBaseCustom from '../../../components/common/drawer/DrawerBaseCustom.vue';
11
+ import Avatar from '../../../components/chat/customer/Avatar.vue';
12
+ const { handleOffer, handleOfferResponse, call, end, userRemoter, handleMedia, startIncomingCall } = useCallHelper();
13
+ const { enqueueSpeakerChunk, processSpeakerQueue } = useAudioStream('');
14
+ const props = withDefaults(defineProps(), {});
13
15
  const STATUS = {
14
16
  CONNECTING: 'Connecting...',
15
17
  CALLING: 'calling',
@@ -18,18 +20,22 @@ const STATUS = {
18
20
  CALL_START: 'in-progress',
19
21
  CALL_END: 'completed'
20
22
  };
21
- const STATUS_LABEL = {
22
- [STATUS.CONNECTING]: 'Connecting...',
23
- [STATUS.CALLING]: 'Calling...',
24
- [STATUS.RINGING]: 'Ringing...',
25
- [STATUS.CONNECT_FAILED]: 'Connect Error',
26
- [STATUS.CALL_START]: '',
27
- [STATUS.CALL_END]: 'Call Ended'
28
- };
23
+ const STATUS_LABEL = computed(() => {
24
+ return {
25
+ [STATUS.CONNECTING]: 'Connecting...',
26
+ [STATUS.CALLING]: 'Calling...',
27
+ [STATUS.RINGING]: 'Ringing...',
28
+ [STATUS.CONNECT_FAILED]: errorMessage.value || 'Connect Error',
29
+ [STATUS.CALL_START]: '',
30
+ [STATUS.CALL_END]: 'Call Ended'
31
+ };
32
+ });
29
33
  const callStatus = ref(STATUS.CONNECTING);
30
34
  const duration = ref('00:00');
31
35
  const drawerVisible = ref(false);
32
36
  const disable = ref(false);
37
+ const errorMessage = ref('');
38
+ const drawerVisibleRef = ref(null);
33
39
  let timer = null;
34
40
  let timeOut = null;
35
41
  onMounted(() => {
@@ -43,6 +49,7 @@ onUnmounted(() => {
43
49
  clearInterval(timer);
44
50
  if (timeOut)
45
51
  clearTimeout(timeOut);
52
+ errorMessage.value = '';
46
53
  });
47
54
  let dataWebSK = null;
48
55
  function startTimer() {
@@ -66,21 +73,28 @@ function endCall() {
66
73
  clearInterval(timer);
67
74
  disable.value = false;
68
75
  drawerVisible.value = false;
76
+ drawerVisibleRef.value?.close();
77
+ if (timer)
78
+ clearInterval(timer);
79
+ if (timeOut)
80
+ clearTimeout(timeOut);
81
+ errorMessage.value = '';
69
82
  }
70
83
  const open = () => {
71
84
  drawerVisible.value = true;
72
85
  disable.value = true;
86
+ drawerVisibleRef.value?.open();
73
87
  };
74
88
  const startCall = async (data) => {
75
89
  try {
76
90
  callStatus.value = STATUS.CONNECTING;
77
91
  open();
78
- await callOutBound(data);
79
- // await call(data)
92
+ await call(data);
80
93
  callStatus.value = STATUS.CALLING;
81
94
  }
82
95
  catch (e) {
83
96
  console.log(e);
97
+ errorMessage.value = e?.message || JSON.stringify(e);
84
98
  callStatus.value = STATUS.CONNECT_FAILED;
85
99
  }
86
100
  };
@@ -90,6 +104,7 @@ const answer = async () => {
90
104
  // sendOfferOk()
91
105
  };
92
106
  const handleMqttMessage = async (message) => {
107
+ let da = true;
93
108
  if (message?.data?.status == STATUS.RINGING) {
94
109
  if (message?.data?.to_number == dataProfile.value?.phone) {
95
110
  callStatus.value = STATUS.RINGING;
@@ -101,89 +116,104 @@ const handleMqttMessage = async (message) => {
101
116
  }
102
117
  }
103
118
  else if (message?.data?.status == STATUS.CALL_START) {
104
- callStatus.value = STATUS.CALL_START;
119
+ try {
120
+ await startIncomingCall();
121
+ startTimer();
122
+ callStatus.value = STATUS.CALL_START;
123
+ }
124
+ catch (e) {
125
+ console.log(e);
126
+ endCall();
127
+ }
105
128
  }
106
129
  else if (message?.data?.status == STATUS.CALL_END) {
107
130
  endCall();
108
131
  }
132
+ else {
133
+ da = false;
134
+ handleMedia(message);
135
+ }
136
+ if (da)
137
+ console.log(message);
109
138
  };
110
- // const handleMqttMessage = async (topic: string, message: any) => {
111
- // if (topic == TOPIC_DETAIL_CALL + dataProfile.value?.id) {
112
- // console.log(message)
113
- // if (message?.type === 'offer') {
114
- // const status = await handleOffer(message)
115
- // if (status) {
116
- // callStatus.value = STATUS.CALLING
117
- // open()
118
- // }
119
- // } else if (message?.type === 'offer-response') {
120
- // await handleOfferResponse(message)
121
- // callStatus.value = STATUS.CALL_START
122
- // } else if (message?.type === 'candidate') {
123
- // addIceCandidate(message)
124
- // } else if (message?.type === 'end-call') {
125
- // endPc(message?.link)
126
- // callStatus.value = STATUS.CALL_END
127
- // }
128
- // }
129
- // }
130
139
  const __VLS_exposed = { startCall, endCall };
131
140
  defineExpose(__VLS_exposed);
132
141
  watch(dataProfile, async (newValue, oldValue) => {
133
142
  if (oldValue?.id && oldValue?.id != newValue?.id) {
134
- unsubscribeFromTopic(TOPIC_DETAIL_CALL + oldValue?.id);
135
- removeHandleMqttMessage('call-message');
136
143
  removeHandleWebSK('call-message');
137
144
  }
138
145
  if (newValue?.id && oldValue?.id != newValue?.id) {
139
- await connectMqtt();
140
- subscribeToTopic(TOPIC_DETAIL_CALL + newValue?.id);
141
- addHandleMqttMessage('call-message', TOPIC_DETAIL_CALL + newValue?.id, handleMqttMessage);
146
+ addHandleWebSK('call-message', handleMqttMessage);
142
147
  addHandleWebSK('call-message', handleMqttMessage);
143
148
  }
144
149
  }, { immediate: true });
145
150
  debugger; /* PartiallyEnd: #3632/scriptSetup.vue */
151
+ const __VLS_defaults = {};
146
152
  const __VLS_ctx = {
147
153
  ...{},
148
154
  ...{},
155
+ ...{},
156
+ ...{},
149
157
  };
150
158
  let __VLS_elements;
151
159
  let __VLS_components;
152
160
  let __VLS_directives;
153
- /** @type {[typeof DrawerBase, typeof DrawerBase, ]} */ ;
161
+ /** @type {[typeof DrawerBaseCustom, typeof DrawerBaseCustom, ]} */ ;
154
162
  // @ts-ignore
155
- const __VLS_0 = __VLS_asFunctionalComponent(DrawerBase, new DrawerBase({
156
- open: (__VLS_ctx.drawerVisible),
163
+ const __VLS_0 = __VLS_asFunctionalComponent(DrawerBaseCustom, new DrawerBaseCustom({
164
+ ref: "drawerVisibleRef",
157
165
  width: (500),
158
- disableClose: (__VLS_ctx.disable),
166
+ disabledClose: (__VLS_ctx.disable),
167
+ responsive: (__VLS_ctx.responsive),
159
168
  }));
160
169
  const __VLS_1 = __VLS_0({
161
- open: (__VLS_ctx.drawerVisible),
170
+ ref: "drawerVisibleRef",
162
171
  width: (500),
163
- disableClose: (__VLS_ctx.disable),
172
+ disabledClose: (__VLS_ctx.disable),
173
+ responsive: (__VLS_ctx.responsive),
164
174
  }, ...__VLS_functionalComponentArgsRest(__VLS_0));
165
- const { default: __VLS_3 } = __VLS_2.slots;
175
+ /** @type {typeof __VLS_ctx.drawerVisibleRef} */ ;
176
+ var __VLS_3 = {};
177
+ const { default: __VLS_5 } = __VLS_2.slots;
166
178
  // @ts-ignore
167
- [drawerVisible, disable,];
179
+ [disable, responsive, drawerVisibleRef,];
168
180
  {
169
- const { content: __VLS_4 } = __VLS_2.slots;
181
+ const { content: __VLS_6 } = __VLS_2.slots;
170
182
  __VLS_asFunctionalElement(__VLS_elements.div, __VLS_elements.div)({
171
- ...{ class: "flex flex-col items-center justify-center h-screen bg-chat-haze-300 text-white relative" },
183
+ ...{ class: "flex flex-col items-center justify-center h-full bg-chat-haze-300 text-white relative" },
172
184
  });
173
185
  __VLS_asFunctionalElement(__VLS_elements.div, __VLS_elements.div)({
174
- ...{ class: "w-32 h-32 rounded-full overflow-hidden mb-4 border-4 border-gray-700" },
175
- });
176
- __VLS_asFunctionalElement(__VLS_elements.img)({
177
- src: "https://i.pravatar.cc/300",
178
- alt: "avatar",
179
- ...{ class: "w-full h-full object-cover" },
186
+ ...{ class: "h-40 w-40 rounded-full overflow-hidden mb-4 border-4 border-gray-700 flex-center" },
180
187
  });
188
+ /** @type {[typeof Avatar, ]} */ ;
189
+ // @ts-ignore
190
+ const __VLS_7 = __VLS_asFunctionalComponent(Avatar, new Avatar({
191
+ ...{ class: "" },
192
+ src: (__VLS_ctx.userRemoter?.avatar ?? ''),
193
+ id: (__VLS_ctx.userRemoter?.id ?? ''),
194
+ color: (__VLS_ctx.userRemoter?.color),
195
+ name: (__VLS_ctx.userRemoter?.username),
196
+ size: "xxl",
197
+ }));
198
+ const __VLS_8 = __VLS_7({
199
+ ...{ class: "" },
200
+ src: (__VLS_ctx.userRemoter?.avatar ?? ''),
201
+ id: (__VLS_ctx.userRemoter?.id ?? ''),
202
+ color: (__VLS_ctx.userRemoter?.color),
203
+ name: (__VLS_ctx.userRemoter?.username),
204
+ size: "xxl",
205
+ }, ...__VLS_functionalComponentArgsRest(__VLS_7));
206
+ // @ts-ignore
207
+ [userRemoter, userRemoter, userRemoter, userRemoter,];
181
208
  __VLS_asFunctionalElement(__VLS_elements.div, __VLS_elements.div)({
182
209
  ...{ class: "text-center" },
183
210
  });
184
211
  __VLS_asFunctionalElement(__VLS_elements.h2, __VLS_elements.h2)({
185
212
  ...{ class: "text-2xl font-semibold" },
186
213
  });
214
+ (__VLS_ctx.userRemoter?.username);
215
+ // @ts-ignore
216
+ [userRemoter,];
187
217
  __VLS_asFunctionalElement(__VLS_elements.p, __VLS_elements.p)({
188
218
  ...{ class: "text-gray-400 mt-1" },
189
219
  });
@@ -204,8 +234,8 @@ const { default: __VLS_3 } = __VLS_2.slots;
204
234
  [answer,];
205
235
  /** @type {[typeof IconPhone, ]} */ ;
206
236
  // @ts-ignore
207
- const __VLS_5 = __VLS_asFunctionalComponent(IconPhone, new IconPhone({}));
208
- const __VLS_6 = __VLS_5({}, ...__VLS_functionalComponentArgsRest(__VLS_5));
237
+ const __VLS_11 = __VLS_asFunctionalComponent(IconPhone, new IconPhone({}));
238
+ const __VLS_12 = __VLS_11({}, ...__VLS_functionalComponentArgsRest(__VLS_11));
209
239
  }
210
240
  __VLS_asFunctionalElement(__VLS_elements.button, __VLS_elements.button)({
211
241
  ...{ onClick: (__VLS_ctx.endCall) },
@@ -215,8 +245,8 @@ const { default: __VLS_3 } = __VLS_2.slots;
215
245
  [endCall,];
216
246
  /** @type {[typeof IconPhoneCancel, ]} */ ;
217
247
  // @ts-ignore
218
- const __VLS_9 = __VLS_asFunctionalComponent(IconPhoneCancel, new IconPhoneCancel({}));
219
- const __VLS_10 = __VLS_9({}, ...__VLS_functionalComponentArgsRest(__VLS_9));
248
+ const __VLS_15 = __VLS_asFunctionalComponent(IconPhoneCancel, new IconPhoneCancel({}));
249
+ const __VLS_16 = __VLS_15({}, ...__VLS_functionalComponentArgsRest(__VLS_15));
220
250
  __VLS_asFunctionalElement(__VLS_elements.p, __VLS_elements.p)({
221
251
  ...{ class: "absolute bottom-6 text-gray-500 text-sm" },
222
252
  });
@@ -228,30 +258,28 @@ var __VLS_2;
228
258
  __VLS_asFunctionalElement(__VLS_elements.audio, __VLS_elements.audio)({
229
259
  id: "go-chat-local-audio",
230
260
  autoplay: true,
231
- muted: true,
232
261
  });
233
262
  __VLS_asFunctionalElement(__VLS_elements.audio, __VLS_elements.audio)({
234
263
  id: "go-chat-remote-audio",
235
264
  autoplay: true,
236
265
  });
266
+ /** @type {__VLS_StyleScopedClasses['']} */ ;
237
267
  /** @type {__VLS_StyleScopedClasses['flex']} */ ;
238
268
  /** @type {__VLS_StyleScopedClasses['flex-col']} */ ;
239
269
  /** @type {__VLS_StyleScopedClasses['items-center']} */ ;
240
270
  /** @type {__VLS_StyleScopedClasses['justify-center']} */ ;
241
- /** @type {__VLS_StyleScopedClasses['h-screen']} */ ;
271
+ /** @type {__VLS_StyleScopedClasses['h-full']} */ ;
242
272
  /** @type {__VLS_StyleScopedClasses['bg-chat-haze-300']} */ ;
243
273
  /** @type {__VLS_StyleScopedClasses['text-white']} */ ;
244
274
  /** @type {__VLS_StyleScopedClasses['relative']} */ ;
245
- /** @type {__VLS_StyleScopedClasses['w-32']} */ ;
246
- /** @type {__VLS_StyleScopedClasses['h-32']} */ ;
275
+ /** @type {__VLS_StyleScopedClasses['h-40']} */ ;
276
+ /** @type {__VLS_StyleScopedClasses['w-40']} */ ;
247
277
  /** @type {__VLS_StyleScopedClasses['rounded-full']} */ ;
248
278
  /** @type {__VLS_StyleScopedClasses['overflow-hidden']} */ ;
249
279
  /** @type {__VLS_StyleScopedClasses['mb-4']} */ ;
250
280
  /** @type {__VLS_StyleScopedClasses['border-4']} */ ;
251
281
  /** @type {__VLS_StyleScopedClasses['border-gray-700']} */ ;
252
- /** @type {__VLS_StyleScopedClasses['w-full']} */ ;
253
- /** @type {__VLS_StyleScopedClasses['h-full']} */ ;
254
- /** @type {__VLS_StyleScopedClasses['object-cover']} */ ;
282
+ /** @type {__VLS_StyleScopedClasses['flex-center']} */ ;
255
283
  /** @type {__VLS_StyleScopedClasses['text-center']} */ ;
256
284
  /** @type {__VLS_StyleScopedClasses['text-2xl']} */ ;
257
285
  /** @type {__VLS_StyleScopedClasses['font-semibold']} */ ;
@@ -285,7 +313,11 @@ __VLS_asFunctionalElement(__VLS_elements.audio, __VLS_elements.audio)({
285
313
  /** @type {__VLS_StyleScopedClasses['bottom-6']} */ ;
286
314
  /** @type {__VLS_StyleScopedClasses['text-gray-500']} */ ;
287
315
  /** @type {__VLS_StyleScopedClasses['text-sm']} */ ;
316
+ // @ts-ignore
317
+ var __VLS_4 = __VLS_3;
288
318
  const __VLS_export = (await import('vue')).defineComponent({
289
319
  setup: () => (__VLS_exposed),
320
+ __typeProps: {},
321
+ props: {},
290
322
  });
291
323
  export default {};
@@ -0,0 +1,128 @@
1
+ /// <reference types="C:/phonghq/go-chat-v2/node_modules/.vue-global-types/vue_3.5_0.d.ts" />
2
+ import { computed, ref } from 'vue';
3
+ const props = withDefaults(defineProps(), {
4
+ position: 'right',
5
+ showTitle: true,
6
+ width: 550
7
+ });
8
+ const emits = defineEmits();
9
+ const show = ref(false);
10
+ const boxStyles = computed(() => {
11
+ let transform = '';
12
+ if (show.value) {
13
+ transform = 'translateX(0)';
14
+ }
15
+ else if (props.position === 'right') {
16
+ transform = 'translateX(100%)';
17
+ }
18
+ else if (props.position === 'left') {
19
+ transform = 'translateX(-100%)';
20
+ }
21
+ return {
22
+ width: props.responsive == 'mobile' ? '100%' : props.width + 'px',
23
+ [props.position]: '0',
24
+ transform
25
+ };
26
+ });
27
+ // CREATE FUNCTION
28
+ const open = () => {
29
+ show.value = true;
30
+ };
31
+ const close = async () => {
32
+ await emits('afterClose');
33
+ show.value = false;
34
+ };
35
+ const __VLS_exposed = { close, open };
36
+ defineExpose(__VLS_exposed);
37
+ debugger; /* PartiallyEnd: #3632/scriptSetup.vue */
38
+ const __VLS_defaults = {
39
+ position: 'right',
40
+ showTitle: true,
41
+ width: 550
42
+ };
43
+ const __VLS_ctx = {
44
+ ...{},
45
+ ...{},
46
+ ...{},
47
+ ...{},
48
+ ...{},
49
+ };
50
+ let __VLS_elements;
51
+ let __VLS_components;
52
+ let __VLS_directives;
53
+ /** @type {__VLS_StyleScopedClasses['drawer-box']} */ ;
54
+ __VLS_asFunctionalElement(__VLS_elements.div, __VLS_elements.div)({
55
+ ...{ class: "absolute top-0 bottom-0 right-0 left-0 z-[1500] drawer" },
56
+ ...{ class: (__VLS_ctx.show ? 'opacity-1' : 'pointer-events-none opacity-0 delay-300') },
57
+ });
58
+ // @ts-ignore
59
+ [show,];
60
+ if (!__VLS_ctx.disabledClose) {
61
+ // @ts-ignore
62
+ [disabledClose,];
63
+ __VLS_asFunctionalElement(__VLS_elements.div, __VLS_elements.div)({
64
+ ...{ onClick: (__VLS_ctx.close) },
65
+ ...{ onTouchstart: (__VLS_ctx.close) },
66
+ ...{ class: "absolute w-full h-full z-[1500]" },
67
+ });
68
+ // @ts-ignore
69
+ [close, close,];
70
+ }
71
+ __VLS_asFunctionalElement(__VLS_elements.div, __VLS_elements.div)({
72
+ ...{ class: "relative w-full h-full" },
73
+ });
74
+ __VLS_asFunctionalElement(__VLS_elements.div, __VLS_elements.div)({
75
+ ...{ class: "duration-300 ease-out absolute top-0 bg-white flex flex-col drawer-box" },
76
+ ...{ style: (__VLS_ctx.boxStyles) },
77
+ });
78
+ // @ts-ignore
79
+ [boxStyles,];
80
+ __VLS_asFunctionalElement(__VLS_elements.div, __VLS_elements.div)({
81
+ ...{ class: "edit-scrollbar grow overflow-auto w-full min-w-full drawer-content" },
82
+ });
83
+ var __VLS_0 = {};
84
+ __VLS_asFunctionalElement(__VLS_elements.div, __VLS_elements.div)({
85
+ ...{ class: "flex p-4 content" },
86
+ });
87
+ __VLS_asFunctionalElement(__VLS_elements.p, __VLS_elements.p)({});
88
+ /** @type {__VLS_StyleScopedClasses['absolute']} */ ;
89
+ /** @type {__VLS_StyleScopedClasses['top-0']} */ ;
90
+ /** @type {__VLS_StyleScopedClasses['bottom-0']} */ ;
91
+ /** @type {__VLS_StyleScopedClasses['right-0']} */ ;
92
+ /** @type {__VLS_StyleScopedClasses['left-0']} */ ;
93
+ /** @type {__VLS_StyleScopedClasses['z-[1500]']} */ ;
94
+ /** @type {__VLS_StyleScopedClasses['drawer']} */ ;
95
+ /** @type {__VLS_StyleScopedClasses['absolute']} */ ;
96
+ /** @type {__VLS_StyleScopedClasses['w-full']} */ ;
97
+ /** @type {__VLS_StyleScopedClasses['h-full']} */ ;
98
+ /** @type {__VLS_StyleScopedClasses['z-[1500]']} */ ;
99
+ /** @type {__VLS_StyleScopedClasses['relative']} */ ;
100
+ /** @type {__VLS_StyleScopedClasses['w-full']} */ ;
101
+ /** @type {__VLS_StyleScopedClasses['h-full']} */ ;
102
+ /** @type {__VLS_StyleScopedClasses['duration-300']} */ ;
103
+ /** @type {__VLS_StyleScopedClasses['ease-out']} */ ;
104
+ /** @type {__VLS_StyleScopedClasses['absolute']} */ ;
105
+ /** @type {__VLS_StyleScopedClasses['top-0']} */ ;
106
+ /** @type {__VLS_StyleScopedClasses['bg-white']} */ ;
107
+ /** @type {__VLS_StyleScopedClasses['flex']} */ ;
108
+ /** @type {__VLS_StyleScopedClasses['flex-col']} */ ;
109
+ /** @type {__VLS_StyleScopedClasses['drawer-box']} */ ;
110
+ /** @type {__VLS_StyleScopedClasses['edit-scrollbar']} */ ;
111
+ /** @type {__VLS_StyleScopedClasses['grow']} */ ;
112
+ /** @type {__VLS_StyleScopedClasses['overflow-auto']} */ ;
113
+ /** @type {__VLS_StyleScopedClasses['w-full']} */ ;
114
+ /** @type {__VLS_StyleScopedClasses['min-w-full']} */ ;
115
+ /** @type {__VLS_StyleScopedClasses['drawer-content']} */ ;
116
+ /** @type {__VLS_StyleScopedClasses['flex']} */ ;
117
+ /** @type {__VLS_StyleScopedClasses['p-4']} */ ;
118
+ /** @type {__VLS_StyleScopedClasses['content']} */ ;
119
+ // @ts-ignore
120
+ var __VLS_1 = __VLS_0;
121
+ const __VLS_base = (await import('vue')).defineComponent({
122
+ setup: () => (__VLS_exposed),
123
+ __typeEmits: {},
124
+ __typeProps: {},
125
+ props: {},
126
+ });
127
+ const __VLS_export = {};
128
+ export default {};
@@ -21,11 +21,11 @@ DrawerOverlay;
21
21
  // @ts-ignore
22
22
  const __VLS_1 = __VLS_asFunctionalComponent(__VLS_0, new __VLS_0({
23
23
  ...(__VLS_ctx.delegatedProps),
24
- ...{ class: (__VLS_ctx.cn('fixed inset-0 z-50 bg-black/60', props.class)) },
24
+ ...{ class: (__VLS_ctx.cn('absolute inset-0 z-50 bg-black/60', props.class)) },
25
25
  }));
26
26
  const __VLS_2 = __VLS_1({
27
27
  ...(__VLS_ctx.delegatedProps),
28
- ...{ class: (__VLS_ctx.cn('fixed inset-0 z-50 bg-black/60', props.class)) },
28
+ ...{ class: (__VLS_ctx.cn('absolute inset-0 z-50 bg-black/60', props.class)) },
29
29
  }, ...__VLS_functionalComponentArgsRest(__VLS_1));
30
30
  var __VLS_4 = {};
31
31
  // @ts-ignore
@@ -0,0 +1,196 @@
1
+ import { ref, onBeforeUnmount } from 'vue';
2
+ export function useAudioStream(wsUrl) {
3
+ const SAMPLE_RATE = 24000;
4
+ const CHUNK_SIZE = 480;
5
+ const PREBUFFER_SEC = 0.4;
6
+ let audioCtx;
7
+ let processor;
8
+ let input;
9
+ let stream;
10
+ const ws = ref(null);
11
+ const statusText = ref('Tap to Speak with Vico');
12
+ // Speaker queue
13
+ let speakerQueue = [];
14
+ let nextPlayTime = 0;
15
+ // UI state
16
+ const recording = ref(false);
17
+ const micLevel = ref(0);
18
+ // Status management
19
+ const STATUS = {
20
+ IDLE: 'Tap to Speak with Vico',
21
+ CONNECTING: 'Connecting...',
22
+ LISTENING: 'Listening...',
23
+ SPEAKING: 'Speaking...'
24
+ };
25
+ let currentStatus = STATUS.IDLE;
26
+ function setStatus(newStatus) {
27
+ if (currentStatus !== newStatus) {
28
+ currentStatus = newStatus;
29
+ statusText.value = newStatus;
30
+ }
31
+ }
32
+ // 🎤 Float32 → PCM16
33
+ function floatTo16BitPCM(float32Array) {
34
+ const buffer = new ArrayBuffer(float32Array.length * 2);
35
+ const view = new DataView(buffer);
36
+ for (let i = 0; i < float32Array.length; i++) {
37
+ let s = Math.max(-1, Math.min(1, float32Array[i]));
38
+ view.setInt16(i * 2, s < 0 ? s * 0x8000 : s * 0x7fff, true);
39
+ }
40
+ return buffer;
41
+ }
42
+ // 🔊 PCM16 → Float32
43
+ function int16ToFloat32(int16Array) {
44
+ const float32 = new Float32Array(int16Array.length);
45
+ for (let i = 0; i < int16Array.length; i++) {
46
+ float32[i] = int16Array[i] / 32768;
47
+ }
48
+ return float32;
49
+ }
50
+ // 📥 enqueue speaker chunk
51
+ async function enqueueSpeakerChunk(arrayBuffer) {
52
+ const int16View = new Int16Array(arrayBuffer);
53
+ const float32Data = int16ToFloat32(int16View);
54
+ speakerQueue.push(float32Data);
55
+ }
56
+ // 🔊 process queue
57
+ function processSpeakerQueue() {
58
+ try {
59
+ if (speakerQueue.length > 0) {
60
+ const chunk = speakerQueue.shift();
61
+ if (chunk) {
62
+ const audioBuffer = audioCtx.createBuffer(1, chunk.length, SAMPLE_RATE);
63
+ audioBuffer.getChannelData(0).set(chunk);
64
+ const source = audioCtx.createBufferSource();
65
+ source.buffer = audioBuffer;
66
+ source.connect(audioCtx.destination);
67
+ if (nextPlayTime < audioCtx.currentTime + 0.05) {
68
+ nextPlayTime = audioCtx.currentTime + PREBUFFER_SEC;
69
+ }
70
+ source.start();
71
+ nextPlayTime += audioBuffer.duration;
72
+ setStatus(STATUS.SPEAKING);
73
+ }
74
+ }
75
+ else if (recording.value) {
76
+ setStatus(STATUS.LISTENING);
77
+ }
78
+ }
79
+ catch (e) {
80
+ console.log(e);
81
+ }
82
+ requestAnimationFrame(processSpeakerQueue);
83
+ }
84
+ // 🎤 start mic
85
+ async function startRecording() {
86
+ audioCtx = new AudioContext({ sampleRate: SAMPLE_RATE });
87
+ return;
88
+ stream = await navigator.mediaDevices.getUserMedia({ audio: true });
89
+ input = audioCtx.createMediaStreamSource(stream);
90
+ processor = audioCtx.createScriptProcessor(1024, 1, 1);
91
+ processor.onaudioprocess = (e) => {
92
+ if (!ws.value || ws.value.readyState !== WebSocket.OPEN)
93
+ return;
94
+ const inputData = e.inputBuffer.getChannelData(0);
95
+ // calculate mic level
96
+ let sum = 0;
97
+ for (let i = 0; i < inputData.length; i++)
98
+ sum += inputData[i] ** 2;
99
+ micLevel.value = Math.sqrt(sum / inputData.length);
100
+ // chunking & send
101
+ for (let i = 0; i < inputData.length; i += CHUNK_SIZE) {
102
+ const slice = inputData.slice(i, i + CHUNK_SIZE);
103
+ const binaryChunk = floatTo16BitPCM(slice);
104
+ ws.value.send(binaryChunk);
105
+ }
106
+ };
107
+ input.connect(processor);
108
+ processor.connect(audioCtx.destination);
109
+ recording.value = true;
110
+ setStatus(STATUS.LISTENING);
111
+ }
112
+ // ⏹ stop mic
113
+ function stopRecording() {
114
+ recording.value = false;
115
+ processor?.disconnect();
116
+ input?.disconnect();
117
+ stream?.getTracks().forEach((t) => t.stop());
118
+ if (audioCtx && audioCtx.state !== 'closed') {
119
+ audioCtx
120
+ .close()
121
+ .then(() => console.log('AudioContext closed successfully.'))
122
+ .catch((err) => console.error('Error closing AudioContext:', err))
123
+ .finally(() => (micLevel.value = 0));
124
+ }
125
+ setStatus(STATUS.IDLE);
126
+ }
127
+ const getAudioContext = () => {
128
+ if (!audioCtx || audioCtx.state === 'closed') {
129
+ audioCtx = new AudioContext({ sampleRate: SAMPLE_RATE });
130
+ }
131
+ return audioCtx;
132
+ };
133
+ const safeResumeAudio = () => {
134
+ const ctx = getAudioContext();
135
+ if (ctx.state === 'suspended') {
136
+ ctx.resume().catch((err) => console.error('Error resuming AudioContext:', err));
137
+ }
138
+ };
139
+ function connect() {
140
+ if (ws.value && ws.value.readyState === WebSocket.OPEN)
141
+ return;
142
+ setStatus(STATUS.CONNECTING);
143
+ ws.value = new WebSocket(wsUrl);
144
+ ws.value.binaryType = 'arraybuffer';
145
+ ws.value.onopen = () => {
146
+ console.log('✅ WS connected');
147
+ startRecording();
148
+ processSpeakerQueue();
149
+ };
150
+ ws.value.onmessage = (event) => {
151
+ if (event.data instanceof ArrayBuffer) {
152
+ enqueueSpeakerChunk(event.data);
153
+ return;
154
+ }
155
+ if (typeof event.data === 'string') {
156
+ try {
157
+ const msg = JSON.parse(event.data);
158
+ if (msg.type === 'AudioStop' || msg.code === 'UserStartedSpeaking') {
159
+ // speakerQueue.length = 0
160
+ nextPlayTime = 0;
161
+ setStatus(STATUS.LISTENING);
162
+ return;
163
+ }
164
+ }
165
+ catch (err) {
166
+ console.warn('⚠️ Parse JSON error:', err, event.data);
167
+ }
168
+ }
169
+ console.log('⚠️ Unknown WS message, closing...');
170
+ disconnect();
171
+ };
172
+ ws.value.onclose = () => {
173
+ console.log('❌ WS closed');
174
+ stopRecording();
175
+ };
176
+ }
177
+ function disconnect() {
178
+ ws.value?.close();
179
+ stopRecording();
180
+ }
181
+ onBeforeUnmount(() => {
182
+ disconnect();
183
+ });
184
+ return {
185
+ ws,
186
+ statusText,
187
+ micLevel,
188
+ recording,
189
+ connect,
190
+ disconnect,
191
+ resumeAudio: safeResumeAudio,
192
+ enqueueSpeakerChunk,
193
+ processSpeakerQueue,
194
+ startRecording
195
+ };
196
+ }