vue_zhongyou 1.0.22 → 1.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "vue_zhongyou",
3
- "version": "1.0.22",
3
+ "version": "1.0.23",
4
4
  "description": "",
5
5
  "main": "index.js",
6
6
  "keywords": [],
@@ -0,0 +1,591 @@
1
+ template
2
+ <template>
3
+ <div class="input-area">
4
+ <!-- 输入区域 -->
5
+ <div v-if="!isReadyRecording" class="input-wrapper">
6
+ <van-field
7
+ v-model="inputText"
8
+ type="textarea"
9
+ rows="1"
10
+ autosize
11
+ placeholder="输入您的问题..."
12
+ :disabled="isLoading"
13
+ @keydown.enter.exact.prevent="handleEnter"
14
+ @keydown.shift.enter.exact="handleShiftEnter"
15
+ @focus="handleInputFocus"
16
+ class="chat-input"
17
+ />
18
+ <img v-if="inputText.length === 0" @click="handleClickBtn('recording')" src="@/assets/svg/voice.svg" alt="清除图标" class="clear-icon" />
19
+ <img v-else @click="handleEnter" src="@/assets/svg/send.svg" alt="发送图标" class="send-icon" />
20
+ </div>
21
+ <!-- 录音区域 -->
22
+ <div v-else
23
+ class="voice-container"
24
+ :class="{
25
+ recording: isRecording
26
+ }"
27
+ @touchstart.prevent="startRecord" @touchend="handleTouchEnd"
28
+ @touchmove="handleTouchMove"
29
+ @touchcancel="cancelRecord"
30
+ @mousedown.prevent="startRecord"
31
+ @mouseup="handleMouseUp"
32
+ @mouseleave="handleMouseLeave"
33
+ >
34
+ <div class="btn-tip" v-show="isRecording">{{ isTouchInside ? '松手发送,上移取消' : '松手取消' }}</div>
35
+ <div v-if="isRecording" class="waveform-container" ref="waveformRef"></div>
36
+ <div v-show="!isRecording" class="input-wrapper" style="display: flex; align-items: center;max-width: 100%;gap: 8px;">
37
+ <div class="hold-say">按住说话</div>
38
+ <img src="@/assets/svg/sendText.svg" alt="录音图标" class="voice-icon"
39
+ @mousedown.stop @touchstart.stop @click.stop="handleClickBtn('cancel')"
40
+ />
41
+ </div>
42
+ </div>
43
+ </div>
44
+ </template>
45
+
46
+ <script setup>
47
+ import { ref, onMounted, onUnmounted,nextTick } from 'vue'
48
+ import WaveSurfer from 'wavesurfer.js'
49
+
50
+ const props = defineProps({
51
+ isLoading: {
52
+ type: Boolean,
53
+ default: false
54
+ }
55
+ })
56
+
57
+ const emit = defineEmits(['sendMessage'])
58
+
59
+ const inputText = ref('')
60
+ const isReadyRecording = ref(false) // 是否准备好录音
61
+
62
+
63
+
64
+ const handleEnter = () => {
65
+ if (inputText.value.trim()) {
66
+ emit('sendMessage', inputText.value)
67
+ inputText.value = ''
68
+ }
69
+ }
70
+
71
+ const handleShiftEnter = () => {
72
+ inputText.value += '\n'
73
+ }
74
+
75
+ const handleInputFocus = () => {
76
+ if (props.isLoading) {
77
+ setTimeout(() => {
78
+ inputText.value = ''
79
+ }, 500)
80
+ }
81
+ }
82
+ const handleClickBtn = (type) => {
83
+ if (type === 'recording') {
84
+ isReadyRecording.value = true
85
+ } else if (type === 'cancel') {
86
+ isReadyRecording.value = false
87
+ }
88
+ }
89
+
90
+ defineExpose({
91
+ inputText
92
+ })
93
+
94
+
95
+
96
+ // 录音逻辑
97
+
98
+ const isRecording = ref(false)
99
+ const audioUrl = ref('')
100
+ const recordTip = ref('')
101
+ const isTouchInside = ref(true) // 跟踪触摸是否在按钮内
102
+ const voiceButtonRef = ref(null)
103
+ const waveformRef = ref(null)
104
+ let mediaRecorder = null // 媒体录制器实例
105
+ let stream = null // 音频流
106
+ let recordTimer = null // 录音定时器
107
+ const audioChunks = ref([]) // 存储录音二进制数据
108
+ const waveSurferInstance = ref(null) // 波形图实例
109
+ const audioContext = ref(null) // 音频上下文
110
+ const analyser = ref(null) // 分析器
111
+ const source = ref(null) // 音频源
112
+ const animationFrame = ref(null) // 动画帧
113
+ const dataArray = ref(null) // 音频数据数组
114
+
115
+ onUnmounted(() => {
116
+ cancelRecord()
117
+ // 确保清理所有资源
118
+ if (animationFrame.value) {
119
+ cancelAnimationFrame(animationFrame.value);
120
+ }
121
+ if (waveSurferInstance.value) {
122
+ waveSurferInstance.value.destroy();
123
+ waveSurferInstance.value = null;
124
+ }
125
+ if (audioContext.value) {
126
+ audioContext.value.close();
127
+ audioContext.value = null;
128
+ }
129
+ })
130
+ // 开始录音(使用原生MediaRecorder)
131
+ const startRecord = async () => {
132
+ try {
133
+ // 判断浏览器是否支持录音API
134
+ if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
135
+ recordTip.value = '当前浏览器不支持录音功能'
136
+ return
137
+ }
138
+
139
+ // 获取麦克风权限,开启音频流
140
+ stream = await navigator.mediaDevices.getUserMedia({ audio: true })
141
+
142
+ // 检测浏览器支持的音频格式
143
+ let options = { mimeType: 'audio/webm' }
144
+ if (MediaRecorder.isTypeSupported('audio/webm;codecs=opus')) {
145
+ options = { mimeType: 'audio/webm;codecs=opus' }
146
+ } else if (MediaRecorder.isTypeSupported('audio/webm')) {
147
+ options = { mimeType: 'audio/webm' }
148
+ } else if (MediaRecorder.isTypeSupported('audio/ogg;codecs=opus')) {
149
+ options = { mimeType: 'audio/ogg;codecs=opus' }
150
+ } else if (MediaRecorder.isTypeSupported('audio/mp4')) {
151
+ options = { mimeType: 'audio/mp4' }
152
+ }
153
+
154
+ mediaRecorder = new MediaRecorder(stream, options)
155
+ audioChunks.value = []
156
+ recordTip.value = ''
157
+
158
+ // 实时收集录音数据
159
+ mediaRecorder.ondataavailable = (e) => {
160
+ audioChunks.value.push(e.data)
161
+ }
162
+
163
+ // 开始录音
164
+ mediaRecorder.start()
165
+ isRecording.value = true
166
+ recordTip.value = '正在录音...'
167
+
168
+ // 设置最长录音时间为1分钟
169
+ recordTimer = setTimeout(() => {
170
+ stopRecord();
171
+ recordTip.value = '录音已超时(最长1分钟)';
172
+ }, 60 * 1000); // 60秒
173
+
174
+ // 创建实时波形图
175
+ await createRealTimeWaveform();
176
+ } catch (err) {
177
+ console.error('录音开启失败:', err)
178
+ const errMap = {
179
+ 'NotAllowedError': '麦克风权限被拒绝!请开启',
180
+ 'NotFoundError': '未检测到麦克风',
181
+ 'SecurityError': '请在HTTPS环境下使用'
182
+ };
183
+ recordTip.value = errMap[err.name] || `录音失败:${err.message}`;
184
+ isRecording.value = false
185
+ }
186
+ }
187
+
188
+ // 停止录音
189
+ const stopRecord = () => {
190
+ if (!isRecording.value || !mediaRecorder) return
191
+ if (mediaRecorder.state !== 'recording') return
192
+
193
+ // 清除录音定时器
194
+ if (recordTimer) {
195
+ clearTimeout(recordTimer);
196
+ recordTimer = null;
197
+ }
198
+
199
+ // 停止录音并释放麦克风
200
+ mediaRecorder.stop()
201
+ stream.getTracks().forEach(track => track.stop())
202
+ isRecording.value = false
203
+
204
+ // 将音频转换为WAV格式
205
+ const convertToWav = async (blob) => {
206
+ return new Promise(async (resolve, reject) => {
207
+ try {
208
+ // 创建音频上下文
209
+ const audioContext = new (window.AudioContext || window.webkitAudioContext)({
210
+ sampleRate: 16000 // 设置采样率为16000Hz,这是语音识别常用采样率
211
+ })
212
+
213
+ // 将Blob转换为ArrayBuffer
214
+ const arrayBuffer = await blob.arrayBuffer()
215
+
216
+ // 解码音频数据
217
+ const audioBuffer = await audioContext.decodeAudioData(arrayBuffer)
218
+
219
+ // 创建WAV文件头
220
+ const numOfChan = audioBuffer.numberOfChannels
221
+ const length = audioBuffer.length * numOfChan * 2
222
+ const buffer = new ArrayBuffer(44 + length)
223
+ const view = new DataView(buffer)
224
+
225
+ // RIFF标识符
226
+ writeString(view, 0, 'RIFF')
227
+ // 文件长度
228
+ view.setUint32(4, 32 + length, true)
229
+ // WAVE标识符
230
+ writeString(view, 8, 'WAVE')
231
+ // fmt标识符
232
+ writeString(view, 12, 'fmt ')
233
+ // fmt块长度
234
+ view.setUint32(16, 16, true)
235
+ // 格式类型 (PCM)
236
+ view.setUint16(20, 1, true)
237
+ // 通道数
238
+ view.setUint16(22, numOfChan, true)
239
+ // 采样率
240
+ view.setUint32(24, audioBuffer.sampleRate, true)
241
+ // 字节率
242
+ view.setUint32(28, audioBuffer.sampleRate * numOfChan * 2, true)
243
+ // 块对齐
244
+ view.setUint16(32, numOfChan * 2, true)
245
+ // 位深度
246
+ view.setUint16(34, 16, true)
247
+ // data标识符
248
+ writeString(view, 36, 'data')
249
+ // 数据长度
250
+ view.setUint32(40, length, true)
251
+
252
+ // 将PCM数据写入缓冲区
253
+ floatTo16BitPCM(view, 44, audioBuffer.getChannelData(0))
254
+ if (numOfChan === 2) {
255
+ floatTo16BitPCM(view, 44 + audioBuffer.length * 2, audioBuffer.getChannelData(1))
256
+ }
257
+
258
+ // 创建WAV Blob
259
+ const wavBlob = new Blob([view], { type: 'audio/wav' })
260
+ resolve(wavBlob)
261
+ } catch (error) {
262
+ reject(error)
263
+ }
264
+ })
265
+ }
266
+
267
+ // 辅助函数:写入字符串到DataView
268
+ const writeString = (view, offset, string) => {
269
+ for (let i = 0; i < string.length; i++) {
270
+ view.setUint8(offset + i, string.charCodeAt(i))
271
+ }
272
+ }
273
+
274
+ // 辅助函数:将32位浮点数转换为16位PCM
275
+ const floatTo16BitPCM = (view, offset, buffer) => {
276
+ for (let i = 0; i < buffer.length; i++, offset += 2) {
277
+ const s = Math.max(-1, Math.min(1, buffer[i]))
278
+ view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true)
279
+ }
280
+ }
281
+
282
+ // 录音结束的回调 - 生成音频文件
283
+ mediaRecorder.onstop = async () => {
284
+ try {
285
+ console.log(audioChunks.value);
286
+
287
+ // 使用录制时的实际MIME类型
288
+ const audioBlob = new Blob(audioChunks.value, { type: mediaRecorder.mimeType })
289
+
290
+ // 转换为WAV格式
291
+ // audioUrl.value = URL.createObjectURL(audioBlob)
292
+ const wavBlob = await convertToWav(audioBlob)
293
+
294
+ audioUrl.value = URL.createObjectURL(wavBlob)
295
+ recordTip.value = '录音完成,已自动播放'
296
+
297
+ // ✅ 核心:这里把 wavBlob 传给后端做【语音转文字】即可
298
+ // 传参格式:new FormData().append('file', wavBlob, 'voice.wav')
299
+ console.log('原始录音文件Blob对象:', audioBlob)
300
+ console.log('原始录音文件MIME类型:', mediaRecorder.mimeType)
301
+ console.log('原始录音文件大小:', audioBlob.size)
302
+ console.log('WAV格式录音文件Blob对象:', wavBlob)
303
+ console.log('WAV格式录音文件MIME类型:', wavBlob.type)
304
+ console.log('WAV格式录音文件大小:', wavBlob.size)
305
+ } catch (error) {
306
+ console.error('音频转换失败:', error)
307
+ recordTip.value = '音频转换失败'
308
+ }
309
+ }
310
+ // 停止波形图
311
+ stopRealTimeWaveform();
312
+ }
313
+
314
+ // 检查触摸点是否在元素内
315
+ const isTouchWithinElement = (event, element) => {
316
+ const touch = event.touches[0] || event.changedTouches[0];
317
+ const rect = element.getBoundingClientRect();
318
+ return (
319
+ touch.clientX >= rect.left &&
320
+ touch.clientX <= rect.right &&
321
+ touch.clientY >= rect.top &&
322
+ touch.clientY <= rect.bottom
323
+ );
324
+ };
325
+
326
+ // 检查鼠标是否在元素内
327
+ const isMouseWithinElement = (event, element) => {
328
+ const rect = element.getBoundingClientRect();
329
+ return (
330
+ event.clientX >= rect.left &&
331
+ event.clientX <= rect.right &&
332
+ event.clientY >= rect.top &&
333
+ event.clientY <= rect.bottom
334
+ );
335
+ };
336
+
337
+ // 触摸移动事件处理
338
+ const handleTouchMove = (event) => {
339
+ const button = event.currentTarget;
340
+ isTouchInside.value = isTouchWithinElement(event, button);
341
+ };
342
+
343
+ // 触摸结束事件处理
344
+ const handleTouchEnd = (event) => {
345
+ console.log('触摸结束事件触发');
346
+ if (!isRecording.value) return;
347
+
348
+ const button = event.currentTarget;
349
+ const isWithin = isTouchWithinElement(event, button);
350
+
351
+ if (isWithin) {
352
+ // 手指在按钮内松开,停止录音(发送)
353
+ stopRecord();
354
+ } else {
355
+ // 手指在按钮外松开,取消录音
356
+ cancelRecord();
357
+ }
358
+ };
359
+
360
+ // 鼠标抬起事件处理
361
+ const handleMouseUp = (event) => {
362
+ console.log('鼠标抬起事件触发');
363
+ if (!isRecording.value) return;
364
+
365
+ const button = event.currentTarget;
366
+ const isWithin = isMouseWithinElement(event, button);
367
+
368
+ if (isWithin) {
369
+ // 鼠标在按钮内抬起,停止录音(发送)
370
+ stopRecord();
371
+ } else {
372
+ // 鼠标在按钮外抬起,取消录音
373
+ cancelRecord();
374
+ }
375
+ };
376
+
377
+ // 鼠标离开事件处理
378
+ const handleMouseLeave = (event) => {
379
+ console.log('鼠标离开按钮区域');
380
+
381
+ if (isRecording.value) {
382
+ // 鼠标离开按钮区域,取消录音
383
+ cancelRecord();
384
+ }
385
+ };
386
+
387
+ // 创建实时波形图
388
+ const createRealTimeWaveform = async () => {
389
+ if (!mediaRecorder) return
390
+
391
+ // 确保 DOM 已更新
392
+ await nextTick();
393
+
394
+ // 如果已有波形图实例,先销毁
395
+ if (waveSurferInstance.value) {
396
+ waveSurferInstance.value.destroy();
397
+ waveSurferInstance.value = null;
398
+ }
399
+
400
+ // 创建音频上下文和分析器
401
+ audioContext.value = new (window.AudioContext || window.webkitAudioContext)();
402
+ analyser.value = audioContext.value.createAnalyser();
403
+ source.value = audioContext.value.createMediaStreamSource(stream);
404
+ source.value.connect(analyser.value);
405
+
406
+ // 配置分析器
407
+ analyser.value.fftSize = 256;
408
+ const bufferLength = analyser.value.frequencyBinCount;
409
+ dataArray.value = new Uint8Array(bufferLength);
410
+
411
+
412
+ // 使用 canvas 手动绘制波形
413
+ const container = waveformRef.value;
414
+ if (!container) return;
415
+
416
+ // 确保容器干净
417
+ container.innerHTML = '';
418
+
419
+ // 创建 canvas 元素
420
+ const canvas = document.createElement('canvas');
421
+ canvas.width = container.clientWidth || 160;
422
+ canvas.height = container.clientHeight || 50;
423
+
424
+ canvas.style.width = '100%';
425
+ canvas.style.height = '100%';
426
+ container.appendChild(canvas);
427
+
428
+ const ctx = canvas.getContext('2d');
429
+
430
+ // 实时绘制波形
431
+ const draw = () => {
432
+ if (!isRecording.value || !analyser.value || !ctx) {
433
+ if (animationFrame.value) {
434
+ cancelAnimationFrame(animationFrame.value);
435
+ animationFrame.value = null;
436
+ }
437
+ return;
438
+ }
439
+
440
+ // 获取频率数据
441
+ analyser.value.getByteFrequencyData(dataArray.value);
442
+
443
+
444
+ // 清空画布
445
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
446
+
447
+ // 设置波形颜色
448
+ ctx.fillStyle = '#fff';
449
+
450
+ // 计算每个条的宽度和间距
451
+ const barWidth = 2;
452
+ const barSpacing = 2;
453
+ const totalBarWidth = barWidth + barSpacing;
454
+
455
+ // 绘制波形条
456
+ for (let i = 0; i < dataArray.value.length; i++) {
457
+ const value = dataArray.value[i];
458
+ const percent = value / 255; // 转换为0到1的范围
459
+
460
+ // 设置最小高度为canvas高度的5%,最大高度为canvas高度的80%
461
+ const minHeight = canvas.height * 0.1;
462
+ const maxHeight = canvas.height * 0.6;
463
+ const height = Math.max(minHeight, Math.min(canvas.height * percent, maxHeight));
464
+ const offset = (canvas.height - height) / 2;
465
+ const x = i * totalBarWidth;
466
+
467
+ // 绘制条形
468
+ ctx.fillRect(x, offset, barWidth, height);
469
+ }
470
+
471
+ animationFrame.value = requestAnimationFrame(draw);
472
+ };
473
+
474
+ draw();
475
+ };
476
+
477
+ // 停止实时波形图
478
+ const stopRealTimeWaveform = () => {
479
+ if (animationFrame.value) {
480
+ cancelAnimationFrame(animationFrame.value);
481
+ animationFrame.value = null;
482
+ }
483
+
484
+ if (audioContext.value) {
485
+ audioContext.value.close();
486
+ audioContext.value = null;
487
+ }
488
+
489
+ if (analyser.value) {
490
+ analyser.value = null;
491
+ }
492
+
493
+ if (source.value) {
494
+ source.value = null;
495
+ }
496
+
497
+ // 清空波形图容器
498
+ const container = waveformRef.value;
499
+ if (container) {
500
+ container.innerHTML = '';
501
+ }
502
+ };
503
+
504
+ // 取消录音(手指滑出按钮、页面销毁时触发)
505
+ const cancelRecord = () => {
506
+ if (isRecording.value && mediaRecorder) {
507
+ // 清除录音定时器
508
+ if (recordTimer) {
509
+ clearTimeout(recordTimer);
510
+ recordTimer = null;
511
+ }
512
+ mediaRecorder.stop()
513
+ stream?.getTracks().forEach(track => track.stop())
514
+ audioChunks.value = []
515
+ isRecording.value = false
516
+ recordTip.value = '已取消录音'
517
+ }
518
+ // 停止波形图
519
+ stopRealTimeWaveform();
520
+ // 重置触摸状态
521
+ isTouchInside.value = true;
522
+ }
523
+
524
+ </script>
525
+
526
+ <style scoped>
527
+ .input-area {
528
+ position: fixed;
529
+ bottom: 0;
530
+ left: 0;
531
+ right: 0;
532
+ background: #fff;
533
+ border-top: 1px solid #ebedf0;
534
+ /* padding: 8px 12px;
535
+ padding-bottom: calc(8px + env(safe-area-inset-bottom)); */
536
+ z-index: 100;
537
+ }
538
+
539
+ .input-wrapper {
540
+ display: flex;
541
+ align-items: center;
542
+ padding: 8px 12px;
543
+ }
544
+
545
+ .chat-input {
546
+ flex: 1;
547
+ margin-right: 10px;
548
+ }
549
+
550
+ .send-btn {
551
+ height: 32px;
552
+ }
553
+
554
+ .btn-tip{
555
+ position: absolute;
556
+ top: 0px;
557
+ left: 50%;
558
+ transform: translate(-50%,-100%);
559
+ text-align: center;
560
+ color: #848080;
561
+ }
562
+ .recording{
563
+ background: #2217eb;
564
+ }
565
+
566
+ .voice-container{
567
+ width: 100%;
568
+ position: relative;
569
+ .waveform-container {
570
+ position: absolute;
571
+ z-index: 200;
572
+ height: 100px;
573
+ /* height: 100%; */
574
+ /* height: 80px; */
575
+ display: flex;
576
+ align-items: center;
577
+ justify-content: center;
578
+ position: relative;
579
+ }
580
+
581
+ .hold-say{
582
+ flex: 1;
583
+ height: 44px;
584
+ text-align: center;
585
+ line-height: 44px;
586
+ color: #848080;
587
+ font-weight: bold;
588
+ }
589
+ }
590
+
591
+ </style>