@siact/sime-x-vue 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1033 @@
1
+ import { inject, defineComponent, shallowRef, provide, renderSlot, computed, openBlock, createBlock, Transition, withCtx, createElementBlock, normalizeClass, createElementVNode, toDisplayString, createVNode, createCommentVNode, ref, onBeforeUnmount, reactive, watch, normalizeStyle, withModifiers, nextTick } from 'vue';
2
+ import { HostBridge } from '@siact/sime-bridge';
3
+ import { WakeWordDetectorStandalone, SpeechTranscriberStandalone } from 'web-voice-kit';
4
+
5
+ const AiChatbotXKey = Symbol("sime-x");
6
+ function injectStrict(key, defaultValue, treatDefaultAsFactory) {
7
+ let result;
8
+ if (defaultValue === void 0) {
9
+ result = inject(key);
10
+ } else if (treatDefaultAsFactory === true) {
11
+ result = inject(key, defaultValue, true);
12
+ } else {
13
+ result = inject(key, defaultValue, false);
14
+ }
15
+ if (!result) {
16
+ throw new Error(`Could not resolve ${key.description}`);
17
+ }
18
+ return result;
19
+ }
20
+
21
+ var clientCommandKey = /* @__PURE__ */ ((clientCommandKey2) => {
22
+ clientCommandKey2["SET_THEME"] = "SiMeAgent_setTheme";
23
+ clientCommandKey2["APPEND_MESSAGE"] = "SiMeAgent_appendMessage";
24
+ clientCommandKey2["WAKE"] = "SiMeAgent_wake";
25
+ clientCommandKey2["TRANSITION"] = "SiMeAgent_transition";
26
+ clientCommandKey2["TRANSITION_END"] = "SiMeAgent_transition_end";
27
+ clientCommandKey2["START_NEW_CONVERSATION"] = "SiMeAgent_startNewConversation";
28
+ clientCommandKey2["RECOGNITION"] = "SiMeAgent_recognition";
29
+ return clientCommandKey2;
30
+ })(clientCommandKey || {});
31
+
32
+ const _sfc_main$3 = /* @__PURE__ */ defineComponent({
33
+ __name: "sime-provider",
34
+ props: {
35
+ project: {},
36
+ description: {},
37
+ debug: { type: Boolean },
38
+ chatbotUrl: {},
39
+ appId: {},
40
+ appToken: {},
41
+ voiceConfig: {}
42
+ },
43
+ setup(__props) {
44
+ const props = __props;
45
+ const hostBridge = shallowRef(new HostBridge());
46
+ const startListeningRef = shallowRef(async () => {
47
+ });
48
+ const stopListeningRef = shallowRef(async () => {
49
+ });
50
+ const toggleCollapseRef = shallowRef(async () => {
51
+ });
52
+ const openDialogRef = shallowRef(async () => {
53
+ });
54
+ const closeDialogRef = shallowRef(async () => {
55
+ });
56
+ provide(AiChatbotXKey, {
57
+ chatbotUrl: () => props.chatbotUrl,
58
+ appId: () => props.appId,
59
+ appToken: () => props.appToken,
60
+ voiceConfig: () => props.voiceConfig,
61
+ startListening: () => startListeningRef.value(),
62
+ stopListening: () => stopListeningRef.value(),
63
+ toggleCollapse: () => toggleCollapseRef.value(),
64
+ openDialog: () => openDialogRef.value(),
65
+ closeDialog: () => closeDialogRef.value(),
66
+ registerVoiceMethods: (methods) => {
67
+ startListeningRef.value = methods.start;
68
+ stopListeningRef.value = methods.stop;
69
+ toggleCollapseRef.value = methods.toggleCollapse;
70
+ openDialogRef.value = methods.openDialog;
71
+ closeDialogRef.value = methods.closeDialog;
72
+ },
73
+ clientCommand: () => hostBridge.value.clientCommands(),
74
+ hostCommads: () => hostBridge.value.hostCommands(),
75
+ registerCommand: (cmd) => {
76
+ hostBridge.value.registerCommand(cmd);
77
+ },
78
+ unregisterCommand: (name) => {
79
+ hostBridge.value.unregisterCommand(name);
80
+ },
81
+ async appendMessage(message) {
82
+ await hostBridge.value.executeClientCommand(clientCommandKey.APPEND_MESSAGE, [message]);
83
+ },
84
+ async setTheme(theme) {
85
+ await hostBridge.value.executeClientCommand(clientCommandKey.SET_THEME, [theme]);
86
+ },
87
+ async weak() {
88
+ await hostBridge.value.executeClientCommand(clientCommandKey.WAKE);
89
+ },
90
+ async startNewConversation() {
91
+ await hostBridge.value.executeClientCommand(clientCommandKey.START_NEW_CONVERSATION);
92
+ },
93
+ setIframeElement(iframe) {
94
+ hostBridge.value.setIframe(iframe);
95
+ },
96
+ async recognition(message, commands) {
97
+ return await hostBridge.value.executeClientCommand(clientCommandKey.RECOGNITION, [message, commands]);
98
+ },
99
+ async executeCommand(commandName, args = []) {
100
+ return await hostBridge.value.executeCommand(commandName, args);
101
+ }
102
+ });
103
+ return (_ctx, _cache) => {
104
+ return renderSlot(_ctx.$slots, "default");
105
+ };
106
+ }
107
+ });
108
+
109
+ const _hoisted_1$2 = { class: "content-container" };
110
+ const _hoisted_2$2 = { class: "status-header" };
111
+ const _hoisted_3$1 = { class: "status-text" };
112
+ const _hoisted_4$1 = {
113
+ key: 0,
114
+ class: "transcription-content"
115
+ };
116
+ const _hoisted_5$1 = {
117
+ key: 1,
118
+ class: "placeholder-text"
119
+ };
120
+ const _sfc_main$2 = /* @__PURE__ */ defineComponent({
121
+ __name: "voice-status",
122
+ props: {
123
+ status: {},
124
+ transcriptionText: {},
125
+ isTranscribing: { type: Boolean }
126
+ },
127
+ setup(__props) {
128
+ const props = __props;
129
+ const currentMode = computed(() => {
130
+ if (props.isTranscribing) return "mode-transcribing";
131
+ if (props.status === "wake") return "mode-wake";
132
+ return "mode-standby";
133
+ });
134
+ const statusLabel = computed(() => {
135
+ if (props.isTranscribing) return "正在聆听您的问题...";
136
+ if (props.status === "wake") return "您好,有什么可以帮助您的吗?";
137
+ return "Standby";
138
+ });
139
+ return (_ctx, _cache) => {
140
+ return openBlock(), createBlock(Transition, { name: "voice-panel" }, {
141
+ default: withCtx(() => [
142
+ __props.status === "wake" || __props.isTranscribing ? (openBlock(), createElementBlock("div", {
143
+ key: 0,
144
+ class: normalizeClass(["voice-status-wrapper", currentMode.value])
145
+ }, [
146
+ _cache[0] || (_cache[0] = createElementVNode("div", { class: "indicator-container" }, [
147
+ createElementVNode("div", { class: "ambient-glow" }),
148
+ createElementVNode("div", { class: "ripple-layer" }, [
149
+ createElementVNode("div", { class: "ripple delay-1" }),
150
+ createElementVNode("div", { class: "ripple delay-2" }),
151
+ createElementVNode("div", { class: "ripple delay-3" })
152
+ ]),
153
+ createElementVNode("div", { class: "icon-core" }, [
154
+ createElementVNode("svg", {
155
+ class: "mic-icon",
156
+ viewBox: "0 0 24 24",
157
+ fill: "none",
158
+ stroke: "currentColor",
159
+ "stroke-width": "2",
160
+ "stroke-linecap": "round",
161
+ "stroke-linejoin": "round"
162
+ }, [
163
+ createElementVNode("rect", {
164
+ x: "9",
165
+ y: "2",
166
+ width: "6",
167
+ height: "12",
168
+ rx: "3",
169
+ class: "mic-capsule"
170
+ }),
171
+ createElementVNode("path", {
172
+ d: "M5 10C5 13.866 8.13401 17 12 17C15.866 17 19 13.866 19 10",
173
+ class: "mic-stand"
174
+ }),
175
+ createElementVNode("path", {
176
+ d: "M12 17V21M8 21H16",
177
+ class: "mic-base"
178
+ })
179
+ ])
180
+ ])
181
+ ], -1)),
182
+ createElementVNode("div", _hoisted_1$2, [
183
+ createElementVNode("div", _hoisted_2$2, [
184
+ createElementVNode("span", _hoisted_3$1, toDisplayString(statusLabel.value), 1)
185
+ ]),
186
+ createElementVNode("div", {
187
+ class: normalizeClass(["text-window", { "has-text": !!__props.transcriptionText }])
188
+ }, [
189
+ createVNode(Transition, {
190
+ name: "fade-slide",
191
+ mode: "out-in"
192
+ }, {
193
+ default: withCtx(() => [
194
+ __props.transcriptionText ? (openBlock(), createElementBlock("p", _hoisted_4$1, toDisplayString(__props.transcriptionText), 1)) : __props.status === "wake" ? (openBlock(), createElementBlock("p", _hoisted_5$1, "Listening...")) : createCommentVNode("", true)
195
+ ]),
196
+ _: 1
197
+ })
198
+ ], 2)
199
+ ])
200
+ ], 2)) : createCommentVNode("", true)
201
+ ]),
202
+ _: 1
203
+ });
204
+ };
205
+ }
206
+ });
207
+
208
+ const _export_sfc = (sfc, props) => {
209
+ const target = sfc.__vccOpts || sfc;
210
+ for (const [key, val] of props) {
211
+ target[key] = val;
212
+ }
213
+ return target;
214
+ };
215
+
216
+ const VoiceStatus = /* @__PURE__ */ _export_sfc(_sfc_main$2, [["__scopeId", "data-v-c9fa6caf"]]);
217
+
218
+ const _hoisted_1$1 = {
219
+ key: 0,
220
+ class: "execution-bubble"
221
+ };
222
+ const _hoisted_2$1 = { class: "exec-text" };
223
+ const _sfc_main$1 = /* @__PURE__ */ defineComponent({
224
+ __name: "execution-status",
225
+ props: {
226
+ visible: { type: Boolean },
227
+ text: {}
228
+ },
229
+ setup(__props) {
230
+ return (_ctx, _cache) => {
231
+ return openBlock(), createBlock(Transition, { name: "exec-bubble" }, {
232
+ default: withCtx(() => [
233
+ __props.visible ? (openBlock(), createElementBlock("div", _hoisted_1$1, [
234
+ createElementVNode("span", _hoisted_2$1, toDisplayString(__props.text || "执行中"), 1),
235
+ _cache[0] || (_cache[0] = createElementVNode("div", { class: "loading-dots" }, [
236
+ createElementVNode("span", { class: "dot" }),
237
+ createElementVNode("span", { class: "dot" }),
238
+ createElementVNode("span", { class: "dot" })
239
+ ], -1))
240
+ ])) : createCommentVNode("", true)
241
+ ]),
242
+ _: 1
243
+ });
244
+ };
245
+ }
246
+ });
247
+
248
+ const ExecutionStatus = /* @__PURE__ */ _export_sfc(_sfc_main$1, [["__scopeId", "data-v-8244ff0d"]]);
249
+
250
+ const ensureMicrophonePermission = async () => {
251
+ if (typeof navigator === "undefined" || typeof window === "undefined") {
252
+ console.log("当前环境不支持麦克风访问");
253
+ return false;
254
+ }
255
+ if (!navigator.mediaDevices?.getUserMedia || !navigator.mediaDevices?.enumerateDevices) {
256
+ console.log("当前环境不支持麦克风访问");
257
+ return false;
258
+ }
259
+ try {
260
+ const devices = await navigator.mediaDevices.enumerateDevices();
261
+ const audioInputDevices = devices.filter((device) => device.kind === "audioinput");
262
+ if (audioInputDevices.length === 0) {
263
+ console.log("未检测到麦克风设备,请连接麦克风后重试。");
264
+ return false;
265
+ }
266
+ if ("permissions" in navigator && navigator.permissions?.query) {
267
+ try {
268
+ const status = await navigator.permissions.query({ name: "microphone" });
269
+ if (status.state === "denied") {
270
+ console.log("麦克风权限被禁用,请在浏览器设置中开启。");
271
+ return false;
272
+ }
273
+ } catch (e) {
274
+ console.warn("Permission query not supported:", e);
275
+ }
276
+ }
277
+ let stream = null;
278
+ try {
279
+ stream = await navigator.mediaDevices.getUserMedia({
280
+ audio: {
281
+ echoCancellation: true,
282
+ noiseSuppression: true,
283
+ autoGainControl: true
284
+ }
285
+ });
286
+ const audioTracks = stream.getAudioTracks();
287
+ if (audioTracks.length === 0) {
288
+ console.log("无法获取麦克风音频轨道。");
289
+ return false;
290
+ }
291
+ const activeTrack = audioTracks[0];
292
+ if (!activeTrack.enabled || activeTrack.readyState !== "live") {
293
+ console.log("麦克风设备不可用,请检查设备连接。");
294
+ return false;
295
+ }
296
+ return true;
297
+ } finally {
298
+ if (stream) {
299
+ stream.getTracks().forEach((track) => track.stop());
300
+ }
301
+ }
302
+ } catch (error) {
303
+ console.error("Microphone permission check failed", error);
304
+ if (error.name === "NotFoundError" || error.name === "DevicesNotFoundError") {
305
+ console.log("未检测到麦克风设备,请连接麦克风后重试。");
306
+ } else if (error.name === "NotAllowedError" || error.name === "PermissionDeniedError") {
307
+ console.log("麦克风权限被拒绝,请在浏览器设置中允许访问。");
308
+ } else if (error.name === "NotReadableError" || error.name === "TrackStartError") {
309
+ console.log("麦克风被其他应用占用或无法访问。");
310
+ } else {
311
+ console.log("无法访问麦克风,请检查设备连接和浏览器权限。");
312
+ }
313
+ return false;
314
+ }
315
+ };
316
+
317
+ const _hoisted_1 = ["data-theme"];
318
+ const _hoisted_2 = { class: "fab-avatar-wrapper" };
319
+ const _hoisted_3 = ["src"];
320
+ const _hoisted_4 = {
321
+ key: 0,
322
+ class: "listening-badge"
323
+ };
324
+ const _hoisted_5 = { class: "header-left" };
325
+ const _hoisted_6 = { class: "logo-icon" };
326
+ const _hoisted_7 = ["src"];
327
+ const _hoisted_8 = { class: "title" };
328
+ const _hoisted_9 = { class: "actions" };
329
+ const _hoisted_10 = ["title"];
330
+ const _hoisted_11 = {
331
+ key: 0,
332
+ class: "voice-indicator"
333
+ };
334
+ const _hoisted_12 = ["title"];
335
+ const _hoisted_13 = ["title"];
336
+ const _hoisted_14 = {
337
+ width: "16",
338
+ height: "16",
339
+ viewBox: "0 0 24 24",
340
+ fill: "none"
341
+ };
342
+ const _hoisted_15 = ["d"];
343
+ const _hoisted_16 = ["src"];
344
+ const _sfc_main = /* @__PURE__ */ defineComponent({
345
+ __name: "sime-x",
346
+ props: {
347
+ xLogo: {},
348
+ xSize: {},
349
+ xTitle: {},
350
+ xTheme: {},
351
+ xDialogSize: {},
352
+ wakeWords: {},
353
+ modelPath: {}
354
+ },
355
+ emits: ["start-transcribing", "stop-transcribing"],
356
+ setup(__props, { emit: __emit }) {
357
+ const props = __props;
358
+ const emit = __emit;
359
+ const aiChatbotX = injectStrict(AiChatbotXKey);
360
+ const chatbotUrl = ref("");
361
+ const voiceStatus = ref("standby");
362
+ const transcriptionText = ref("");
363
+ const isTranscribing = ref(false);
364
+ const isProcessing = ref(false);
365
+ const visible = ref(false);
366
+ const isCollapsed = ref(false);
367
+ const fabRef = ref(null);
368
+ const positionReady = ref(false);
369
+ let detector = null;
370
+ let transcriber = null;
371
+ const isInitializing = ref(false);
372
+ const initError = ref("");
373
+ const getSystemTheme = () => {
374
+ return window.matchMedia("(prefers-color-scheme: dark)").matches ? "dark" : "light";
375
+ };
376
+ const currentTheme = ref(props.xTheme === "system" ? getSystemTheme() : props.xTheme || "light");
377
+ const cycleTheme = async () => {
378
+ currentTheme.value = currentTheme.value === "light" ? "dark" : "light";
379
+ aiChatbotX.setTheme(currentTheme.value);
380
+ };
381
+ const startNewConversation = () => {
382
+ aiChatbotX.startNewConversation();
383
+ };
384
+ const themeTooltip = computed(() => currentTheme.value === "light" ? "切换到深色模式" : "切换到浅色模式");
385
+ const voiceButtonTooltip = computed(() => {
386
+ if (isInitializing.value) return "初始化中...";
387
+ if (initError.value) return `错误: ${initError.value}`;
388
+ switch (voiceStatus.value) {
389
+ case "standby":
390
+ return "开启语音监听";
391
+ case "listening":
392
+ return "监听中,等待唤醒词...";
393
+ case "wake":
394
+ return "已唤醒!";
395
+ default:
396
+ return "语音监听";
397
+ }
398
+ });
399
+ if (props.xTheme === "system") {
400
+ const mediaQuery = window.matchMedia("(prefers-color-scheme: dark)");
401
+ const handleThemeChange = (e) => {
402
+ currentTheme.value = e.matches ? "dark" : "light";
403
+ };
404
+ mediaQuery.addEventListener("change", handleThemeChange);
405
+ onBeforeUnmount(() => {
406
+ mediaQuery.removeEventListener("change", handleThemeChange);
407
+ });
408
+ }
409
+ const initVoiceDetector = () => {
410
+ if (detector || isInitializing.value) return;
411
+ isInitializing.value = true;
412
+ initError.value = "";
413
+ if (!props.modelPath) {
414
+ initError.value = "未提供语音模型文件";
415
+ isInitializing.value = false;
416
+ return;
417
+ }
418
+ try {
419
+ detector = new WakeWordDetectorStandalone({
420
+ modelPath: props.modelPath,
421
+ sampleRate: 16e3,
422
+ usePartial: true,
423
+ autoReset: {
424
+ enabled: true,
425
+ resetDelayMs: 5e3
426
+ }
427
+ });
428
+ const wakeWords = props.wakeWords || ["你好", "您好"];
429
+ detector.setWakeWords(wakeWords);
430
+ detector.onWake(() => {
431
+ console.log("[VoiceDetector] 检测到唤醒词");
432
+ voiceStatus.value = "wake";
433
+ transcriptionText.value = "";
434
+ isTranscribing.value = false;
435
+ startTranscribing();
436
+ });
437
+ detector.onError((error) => {
438
+ console.error("[VoiceDetector] 错误:", error);
439
+ initError.value = error.message;
440
+ voiceStatus.value = "standby";
441
+ isTranscribing.value = false;
442
+ if (error.message.includes("permission")) {
443
+ transcriptionText.value = "需要麦克风权限";
444
+ } else if (error.message.includes("model")) {
445
+ transcriptionText.value = "模型加载失败";
446
+ } else {
447
+ transcriptionText.value = "初始化失败";
448
+ }
449
+ setTimeout(() => {
450
+ transcriptionText.value = "";
451
+ }, 3e3);
452
+ });
453
+ console.log("[VoiceDetector] 初始化成功");
454
+ } catch (error) {
455
+ console.error("[VoiceDetector] 初始化失败:", error);
456
+ initError.value = error instanceof Error ? error.message : "初始化失败";
457
+ voiceStatus.value = "standby";
458
+ isTranscribing.value = false;
459
+ } finally {
460
+ isInitializing.value = false;
461
+ }
462
+ };
463
+ function initTranscriber() {
464
+ if (transcriber) return;
465
+ try {
466
+ const { appId, apiKey, websocketUrl } = aiChatbotX.voiceConfig();
467
+ if (!appId || !apiKey || !websocketUrl) {
468
+ initError.value = "未配置语音配置";
469
+ voiceStatus.value = "standby";
470
+ isTranscribing.value = false;
471
+ return;
472
+ }
473
+ transcriber = new SpeechTranscriberStandalone({
474
+ appId,
475
+ apiKey,
476
+ websocketUrl,
477
+ autoStop: {
478
+ enabled: true,
479
+ silenceTimeoutMs: 3e3,
480
+ noSpeechTimeoutMs: 5e3,
481
+ maxDurationMs: 6e4
482
+ }
483
+ });
484
+ transcriber.onResult((result) => {
485
+ transcriptionText.value = result.transcript;
486
+ });
487
+ transcriber.onAutoStop(async () => {
488
+ console.log("[Transcriber] Auto Stop");
489
+ const currentText = transcriptionText.value;
490
+ await stopTranscribing();
491
+ if (!currentText || !currentText.trim()) {
492
+ console.log("[Transcriber] No transcription text, returning to listening");
493
+ transcriptionText.value = "";
494
+ voiceStatus.value = "listening";
495
+ return;
496
+ }
497
+ isProcessing.value = true;
498
+ transcriptionText.value = currentText;
499
+ try {
500
+ const commands = await aiChatbotX.hostCommads();
501
+ const result = await aiChatbotX.recognition(currentText, commands);
502
+ if (result?.data?.intent === "command" && result?.data?.matchedCommands) {
503
+ const matchedCommands = result.data.matchedCommands;
504
+ for (const cmd of matchedCommands) {
505
+ try {
506
+ const args = cmd.parameters ? Object.values(cmd.parameters) : [];
507
+ const cmdResult = await aiChatbotX.executeCommand(cmd.name, args);
508
+ console.log(`Command ${cmd.name} executed successfully:`, cmdResult);
509
+ } catch (error) {
510
+ console.error(`Failed to execute command ${cmd.name}:`, error);
511
+ }
512
+ }
513
+ } else {
514
+ aiChatbotX.appendMessage(currentText);
515
+ toggleDialog(true);
516
+ }
517
+ } finally {
518
+ isProcessing.value = false;
519
+ transcriptionText.value = "";
520
+ voiceStatus.value = "listening";
521
+ }
522
+ });
523
+ transcriber.onError((error) => {
524
+ console.error("[Transcriber] Error:", error);
525
+ stopTranscribing();
526
+ transcriptionText.value = "转写错误";
527
+ setTimeout(() => {
528
+ transcriptionText.value = "";
529
+ voiceStatus.value = "listening";
530
+ }, 2e3);
531
+ });
532
+ console.log("[Transcriber] 初始化成功");
533
+ } catch (error) {
534
+ console.error("[Transcriber] 初始化失败:", error);
535
+ voiceStatus.value = "standby";
536
+ initError.value = error instanceof Error ? error.message : "转写初始化失败";
537
+ }
538
+ }
539
+ const startTranscribing = async () => {
540
+ if (!transcriber) {
541
+ initTranscriber();
542
+ if (!transcriber) return;
543
+ }
544
+ try {
545
+ emit("start-transcribing");
546
+ await transcriber.start();
547
+ isTranscribing.value = true;
548
+ transcriptionText.value = "";
549
+ } catch (error) {
550
+ console.error("[Transcriber] 启动失败:", error);
551
+ transcriptionText.value = "转写启动失败";
552
+ setTimeout(() => {
553
+ transcriptionText.value = "";
554
+ }, 2e3);
555
+ }
556
+ };
557
+ const stopTranscribing = async () => {
558
+ if (transcriber && transcriber.isActive()) {
559
+ try {
560
+ await transcriber.stop();
561
+ isTranscribing.value = false;
562
+ emit("stop-transcribing");
563
+ } catch (error) {
564
+ console.error("[Transcriber] 停止失败:", error);
565
+ }
566
+ }
567
+ };
568
+ const toggleVoiceMode = async (targetState) => {
569
+ const permission = await ensureMicrophonePermission();
570
+ if (!permission) return;
571
+ if (isInitializing.value) return;
572
+ if (!detector) {
573
+ await initVoiceDetector();
574
+ if (!detector) return;
575
+ }
576
+ const isCurrentlyListening = voiceStatus.value === "listening";
577
+ const shouldStart = targetState !== void 0 ? targetState : !isCurrentlyListening;
578
+ if (shouldStart === isCurrentlyListening) return;
579
+ try {
580
+ if (shouldStart) {
581
+ console.log("[VoiceDetector] 强制/自动启动监听...");
582
+ await detector.start();
583
+ voiceStatus.value = "listening";
584
+ transcriptionText.value = "";
585
+ isTranscribing.value = false;
586
+ } else {
587
+ console.log("[VoiceDetector] 强制/自动停止监听...");
588
+ await detector.stop();
589
+ stopTranscribing();
590
+ voiceStatus.value = "standby";
591
+ transcriptionText.value = "";
592
+ isTranscribing.value = false;
593
+ }
594
+ } catch (error) {
595
+ console.error("[VoiceDetector] 操作失败:", error);
596
+ voiceStatus.value = "standby";
597
+ transcriptionText.value = "操作失败";
598
+ isTranscribing.value = false;
599
+ setTimeout(() => {
600
+ transcriptionText.value = "";
601
+ }, 2e3);
602
+ }
603
+ };
604
+ const position = reactive({ x: 0, y: 0 });
605
+ const containerWidth = ref(props.xDialogSize?.width || 420);
606
+ const containerHeight = ref(props.xDialogSize?.height || 600);
607
+ const isFirstOpen = ref(true);
608
+ const validatePosition = (x, y, width, height) => {
609
+ const margin = 20;
610
+ const viewportWidth = window.innerWidth;
611
+ const viewportHeight = window.innerHeight;
612
+ const maxX = viewportWidth - width - margin;
613
+ const maxY = viewportHeight - height - margin;
614
+ return {
615
+ x: Math.max(margin, Math.min(x, maxX)),
616
+ y: Math.max(margin, Math.min(y, maxY))
617
+ };
618
+ };
619
+ const calculateInitialPosition = () => {
620
+ if (!fabRef.value) return;
621
+ const fabRect = fabRef.value.getBoundingClientRect();
622
+ const dialogWidth = containerWidth.value;
623
+ const dialogHeight = containerHeight.value;
624
+ const viewportWidth = window.innerWidth;
625
+ const viewportHeight = window.innerHeight;
626
+ const margin = 20;
627
+ const minMargin = 20;
628
+ let x = 0;
629
+ let y = 0;
630
+ const leftX = fabRect.left - dialogWidth - margin;
631
+ if (leftX >= minMargin) {
632
+ x = leftX;
633
+ y = fabRect.top;
634
+ if (y + dialogHeight > viewportHeight - minMargin) {
635
+ y = viewportHeight - dialogHeight - minMargin;
636
+ }
637
+ if (y < minMargin) {
638
+ y = minMargin;
639
+ }
640
+ } else {
641
+ const rightX = fabRect.right + margin;
642
+ if (rightX + dialogWidth <= viewportWidth - minMargin) {
643
+ x = rightX;
644
+ y = fabRect.top;
645
+ if (y + dialogHeight > viewportHeight - minMargin) {
646
+ y = viewportHeight - dialogHeight - minMargin;
647
+ }
648
+ if (y < minMargin) {
649
+ y = minMargin;
650
+ }
651
+ } else {
652
+ x = (viewportWidth - dialogWidth) / 2;
653
+ const aboveY = fabRect.top - dialogHeight - margin;
654
+ if (aboveY >= minMargin) {
655
+ y = aboveY;
656
+ } else {
657
+ const belowY = fabRect.bottom + margin;
658
+ if (belowY + dialogHeight <= viewportHeight - minMargin) {
659
+ y = belowY;
660
+ } else {
661
+ y = (viewportHeight - dialogHeight) / 2;
662
+ }
663
+ }
664
+ }
665
+ }
666
+ const validated = validatePosition(x, y, dialogWidth, dialogHeight);
667
+ position.x = validated.x;
668
+ position.y = validated.y;
669
+ };
670
+ const toggleCollapse = async () => {
671
+ isCollapsed.value = !isCollapsed.value;
672
+ nextTick(() => {
673
+ const currentHeight = isCollapsed.value ? 60 : containerHeight.value;
674
+ const validated = validatePosition(position.x, position.y, containerWidth.value, currentHeight);
675
+ position.x = validated.x;
676
+ position.y = validated.y;
677
+ });
678
+ };
679
+ const drag = reactive({
680
+ isDragging: false,
681
+ startX: 0,
682
+ startY: 0,
683
+ offsetX: 0,
684
+ offsetY: 0
685
+ });
686
+ const startDrag = (e) => {
687
+ drag.isDragging = true;
688
+ drag.startX = e.clientX;
689
+ drag.startY = e.clientY;
690
+ drag.offsetX = position.x;
691
+ drag.offsetY = position.y;
692
+ document.addEventListener("mousemove", onDrag);
693
+ document.addEventListener("mouseup", stopDrag);
694
+ };
695
+ const onDrag = (e) => {
696
+ if (!drag.isDragging) return;
697
+ const newX = drag.offsetX + (e.clientX - drag.startX);
698
+ const newY = drag.offsetY + (e.clientY - drag.startY);
699
+ const validated = validatePosition(newX, newY, containerWidth.value, isCollapsed.value ? 60 : containerHeight.value);
700
+ position.x = validated.x;
701
+ position.y = validated.y;
702
+ };
703
+ const stopDrag = () => {
704
+ drag.isDragging = false;
705
+ document.removeEventListener("mousemove", onDrag);
706
+ document.removeEventListener("mouseup", stopDrag);
707
+ };
708
+ const toggleDialog = async (state) => {
709
+ if (state) {
710
+ visible.value = true;
711
+ positionReady.value = false;
712
+ await nextTick();
713
+ if (isFirstOpen.value) {
714
+ calculateInitialPosition();
715
+ isFirstOpen.value = false;
716
+ } else {
717
+ const validated = validatePosition(
718
+ position.x,
719
+ position.y,
720
+ containerWidth.value,
721
+ isCollapsed.value ? 60 : containerHeight.value
722
+ );
723
+ position.x = validated.x;
724
+ position.y = validated.y;
725
+ }
726
+ await nextTick();
727
+ positionReady.value = true;
728
+ } else {
729
+ positionReady.value = false;
730
+ visible.value = false;
731
+ isCollapsed.value = false;
732
+ }
733
+ };
734
+ const handleIframeLoad = (event) => {
735
+ aiChatbotX.setIframeElement(event.target);
736
+ aiChatbotX.setTheme("dark");
737
+ };
738
+ watch(
739
+ () => [aiChatbotX.chatbotUrl()],
740
+ ([url]) => {
741
+ console.log("[AiChatbotX] 初始化", url);
742
+ if (url) {
743
+ chatbotUrl.value = `${url}/app/${aiChatbotX.appId()}?token=${aiChatbotX.appToken()}`;
744
+ }
745
+ },
746
+ { immediate: true }
747
+ );
748
+ onBeforeUnmount(async () => {
749
+ if (detector) {
750
+ try {
751
+ if (detector.isActive()) {
752
+ await detector.stop();
753
+ }
754
+ detector = null;
755
+ } catch (error) {
756
+ console.error("[VoiceDetector] 清理失败:", error);
757
+ }
758
+ }
759
+ if (transcriber) {
760
+ try {
761
+ if (transcriber.isActive()) {
762
+ await transcriber.stop();
763
+ }
764
+ transcriber = null;
765
+ } catch (error) {
766
+ console.error("[Transcriber] 清理失败:", error);
767
+ }
768
+ }
769
+ });
770
+ aiChatbotX?.registerVoiceMethods({
771
+ start: () => toggleVoiceMode(true),
772
+ stop: () => toggleVoiceMode(false),
773
+ openDialog: () => toggleDialog(true),
774
+ closeDialog: () => toggleDialog(false),
775
+ toggleCollapse: () => toggleCollapse()
776
+ });
777
+ return (_ctx, _cache) => {
778
+ return openBlock(), createElementBlock("div", {
779
+ class: "sime-x",
780
+ "data-theme": currentTheme.value
781
+ }, [
782
+ createVNode(Transition, { name: "fade" }, {
783
+ default: withCtx(() => [
784
+ createElementVNode("div", {
785
+ ref_key: "fabRef",
786
+ ref: fabRef,
787
+ class: "assistant-fab",
788
+ onClick: _cache[0] || (_cache[0] = ($event) => toggleDialog(true))
789
+ }, [
790
+ !isProcessing.value ? (openBlock(), createBlock(VoiceStatus, {
791
+ key: 0,
792
+ class: "voice-status",
793
+ status: voiceStatus.value,
794
+ "transcription-text": transcriptionText.value,
795
+ "is-transcribing": isTranscribing.value,
796
+ style: { "width": "480px" }
797
+ }, null, 8, ["status", "transcription-text", "is-transcribing"])) : (openBlock(), createBlock(ExecutionStatus, {
798
+ key: 1,
799
+ class: "voice-status",
800
+ visible: isProcessing.value,
801
+ text: transcriptionText.value
802
+ }, null, 8, ["visible", "text"])),
803
+ createElementVNode("div", _hoisted_2, [
804
+ createElementVNode("img", {
805
+ src: __props.xLogo ? __props.xLogo : "/sime.png",
806
+ alt: "assistant",
807
+ style: normalizeStyle({
808
+ width: __props.xSize?.width + "px"
809
+ })
810
+ }, null, 12, _hoisted_3),
811
+ createVNode(Transition, { name: "indicator-fade" }, {
812
+ default: withCtx(() => [
813
+ voiceStatus.value === "listening" ? (openBlock(), createElementBlock("div", _hoisted_4, [..._cache[3] || (_cache[3] = [
814
+ createElementVNode("div", { class: "listening-waves" }, [
815
+ createElementVNode("div", { class: "wave wave-1" }),
816
+ createElementVNode("div", { class: "wave wave-2" }),
817
+ createElementVNode("div", { class: "wave wave-3" })
818
+ ], -1),
819
+ createElementVNode("div", { class: "listening-icon" }, [
820
+ createElementVNode("svg", {
821
+ width: "24",
822
+ height: "24",
823
+ viewBox: "0 0 24 24",
824
+ fill: "none"
825
+ }, [
826
+ createElementVNode("path", {
827
+ d: "M12 14c1.66 0 3-1.34 3-3V5c0-1.66-1.34-3-3-3S9 3.34 9 5v6c0 1.66 1.34 3 3 3z",
828
+ fill: "currentColor"
829
+ }),
830
+ createElementVNode("path", {
831
+ d: "M17 11c0 2.76-2.24 5-5 5s-5-2.24-5-5",
832
+ stroke: "currentColor",
833
+ "stroke-width": "2",
834
+ "stroke-linecap": "round"
835
+ })
836
+ ])
837
+ ], -1)
838
+ ])])) : createCommentVNode("", true)
839
+ ]),
840
+ _: 1
841
+ })
842
+ ]),
843
+ createElementVNode("div", {
844
+ class: normalizeClass(["fab-pulse", { active: voiceStatus.value === "listening" }])
845
+ }, null, 2)
846
+ ], 512)
847
+ ]),
848
+ _: 1
849
+ }),
850
+ createVNode(Transition, { name: "dialog-fade" }, {
851
+ default: withCtx(() => [
852
+ createElementVNode("div", {
853
+ ref: "dialogRef",
854
+ class: normalizeClass(["x-dialog-container", {
855
+ collapsed: isCollapsed.value,
856
+ "is-hidden": !visible.value,
857
+ "position-ready": positionReady.value
858
+ }]),
859
+ style: normalizeStyle({
860
+ width: containerWidth.value + "px",
861
+ height: isCollapsed.value ? "auto" : containerHeight.value + "px",
862
+ border: currentTheme.value === "light" && !isCollapsed.value ? "1px solid var(--border-color)" : "none",
863
+ "--dialog-x": position.x + "px",
864
+ "--dialog-y": position.y + "px"
865
+ }),
866
+ onMousedown: startDrag
867
+ }, [
868
+ createElementVNode("div", {
869
+ class: "x-dialog-header",
870
+ onMousedown: withModifiers(startDrag, ["stop"])
871
+ }, [
872
+ createElementVNode("div", _hoisted_5, [
873
+ createElementVNode("div", _hoisted_6, [
874
+ createElementVNode("img", {
875
+ src: __props.xLogo ? __props.xLogo : "/sime.png",
876
+ alt: "assistant",
877
+ class: "logo"
878
+ }, null, 8, _hoisted_7)
879
+ ]),
880
+ createElementVNode("span", _hoisted_8, toDisplayString(__props.xTitle), 1)
881
+ ]),
882
+ createElementVNode("div", _hoisted_9, [
883
+ createElementVNode("button", {
884
+ class: "action-btn theme-btn",
885
+ title: "开启新对话",
886
+ onClick: startNewConversation
887
+ }, [..._cache[4] || (_cache[4] = [
888
+ createElementVNode("svg", {
889
+ width: "16",
890
+ height: "16",
891
+ viewBox: "0 0 24 24",
892
+ fill: "none"
893
+ }, [
894
+ createElementVNode("path", {
895
+ d: "M12 5v14M5 12h14",
896
+ stroke: "currentColor",
897
+ "stroke-width": "2",
898
+ "stroke-linecap": "round"
899
+ })
900
+ ], -1)
901
+ ])]),
902
+ createElementVNode("button", {
903
+ class: normalizeClass(["action-btn theme-btn", {
904
+ active: voiceStatus.value !== "standby",
905
+ listening: voiceStatus.value === "listening",
906
+ woke: voiceStatus.value === "wake"
907
+ }]),
908
+ onClick: _cache[1] || (_cache[1] = withModifiers(() => toggleVoiceMode(), ["stop"])),
909
+ title: voiceButtonTooltip.value
910
+ }, [
911
+ _cache[5] || (_cache[5] = createElementVNode("svg", {
912
+ width: "16",
913
+ height: "16",
914
+ viewBox: "0 0 24 24",
915
+ fill: "none"
916
+ }, [
917
+ createElementVNode("path", {
918
+ d: "M12 15C13.6569 15 15 13.6569 15 12V7C15 5.34315 13.6569 4 12 4C10.3431 4 9 5.34315 9 7V12C9 13.6569 10.3431 15 12 15Z",
919
+ stroke: "currentColor",
920
+ "stroke-width": "2",
921
+ "stroke-linecap": "round",
922
+ "stroke-linejoin": "round"
923
+ }),
924
+ createElementVNode("path", {
925
+ d: "M18 11C18 14.3137 15.3137 17 12 17C8.68629 17 6 14.3137 6 11",
926
+ stroke: "currentColor",
927
+ "stroke-width": "2",
928
+ "stroke-linecap": "round",
929
+ "stroke-linejoin": "round"
930
+ }),
931
+ createElementVNode("path", {
932
+ d: "M12 19V17",
933
+ stroke: "currentColor",
934
+ "stroke-width": "2",
935
+ "stroke-linecap": "round",
936
+ "stroke-linejoin": "round"
937
+ }),
938
+ createElementVNode("path", {
939
+ d: "M9 21H15",
940
+ stroke: "currentColor",
941
+ "stroke-width": "2",
942
+ "stroke-linecap": "round",
943
+ "stroke-linejoin": "round"
944
+ })
945
+ ], -1)),
946
+ voiceStatus.value !== "standby" ? (openBlock(), createElementBlock("span", _hoisted_11)) : createCommentVNode("", true)
947
+ ], 10, _hoisted_10),
948
+ createElementVNode("button", {
949
+ class: "action-btn theme-btn",
950
+ onClick: withModifiers(cycleTheme, ["stop"]),
951
+ title: themeTooltip.value
952
+ }, [..._cache[6] || (_cache[6] = [
953
+ createElementVNode("svg", {
954
+ width: "16",
955
+ height: "16",
956
+ viewBox: "0 0 24 24",
957
+ fill: "none"
958
+ }, [
959
+ createElementVNode("circle", {
960
+ cx: "12",
961
+ cy: "12",
962
+ r: "7",
963
+ stroke: "currentColor",
964
+ "stroke-width": "2"
965
+ }),
966
+ createElementVNode("path", {
967
+ d: "M12 5A7 7 0 1 1 12 19",
968
+ fill: "currentColor"
969
+ })
970
+ ], -1)
971
+ ])], 8, _hoisted_12),
972
+ createElementVNode("button", {
973
+ class: "action-btn collapse-btn",
974
+ onClick: withModifiers(toggleCollapse, ["stop"]),
975
+ title: isCollapsed.value ? "展开" : "折叠"
976
+ }, [
977
+ (openBlock(), createElementBlock("svg", _hoisted_14, [
978
+ createElementVNode("path", {
979
+ d: isCollapsed.value ? "M18 15L12 9L6 15" : "M6 9L12 15L18 9",
980
+ stroke: "currentColor",
981
+ "stroke-width": "2",
982
+ "stroke-linecap": "round",
983
+ "stroke-linejoin": "round"
984
+ }, null, 8, _hoisted_15)
985
+ ]))
986
+ ], 8, _hoisted_13),
987
+ createElementVNode("button", {
988
+ class: "action-btn minimize-btn",
989
+ onClick: _cache[2] || (_cache[2] = withModifiers(($event) => toggleDialog(false), ["stop"])),
990
+ title: "最小化"
991
+ }, [..._cache[7] || (_cache[7] = [
992
+ createElementVNode("svg", {
993
+ width: "16",
994
+ height: "16",
995
+ viewBox: "0 0 24 24",
996
+ fill: "none"
997
+ }, [
998
+ createElementVNode("path", {
999
+ d: "M5 12H19",
1000
+ stroke: "currentColor",
1001
+ "stroke-width": "2",
1002
+ "stroke-linecap": "round"
1003
+ })
1004
+ ], -1)
1005
+ ])])
1006
+ ])
1007
+ ], 32),
1008
+ createElementVNode("div", {
1009
+ class: normalizeClass(["x-dialog-content", { "is-hidden": isCollapsed.value }]),
1010
+ style: normalizeStyle({ opacity: isCollapsed.value ? 0 : 1 })
1011
+ }, [
1012
+ createElementVNode("iframe", {
1013
+ ref: "iframeRef",
1014
+ src: chatbotUrl.value,
1015
+ class: "x-iframe",
1016
+ allow: "microphone",
1017
+ frameborder: "0",
1018
+ onLoad: handleIframeLoad
1019
+ }, null, 40, _hoisted_16)
1020
+ ], 6)
1021
+ ], 38)
1022
+ ]),
1023
+ _: 1
1024
+ })
1025
+ ], 8, _hoisted_1);
1026
+ };
1027
+ }
1028
+ });
1029
+
1030
+ const simeX = /* @__PURE__ */ _export_sfc(_sfc_main, [["__scopeId", "data-v-d8f992b0"]]);
1031
+
1032
+ export { _sfc_main$3 as AiChatbotProvider, simeX as AiChatbotX, AiChatbotXKey, clientCommandKey, injectStrict };
1033
+ //# sourceMappingURL=sime-x-vue.mjs.map