virtual-human-cf 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/VirtualHumanEventAdapter.vue.d.ts +43 -0
- package/dist/components/VirtualHumanPersona.vue.d.ts +73 -0
- package/dist/index.d.ts +7 -0
- package/dist/style.css +1 -0
- package/dist/virtual-human-cf.es.js +245 -0
- package/dist/virtual-human-cf.umd.js +1 -0
- package/package.json +38 -0
- package/src/components/VirtualHumanEventAdapter.vue +205 -0
- package/src/components/VirtualHumanPersona.vue +274 -0
- package/src/index.ts +16 -0
- package/tsconfig.json +21 -0
- package/tsconfig.node.json +10 -0
- package/virtual-human-cf-1.0.0.tgz +0 -0
- package/vite.config.ts +34 -0
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
declare function __VLS_template(): {
|
|
2
|
+
default?(_: {}): any;
|
|
3
|
+
};
|
|
4
|
+
declare const __VLS_component: import('vue').DefineComponent<import('vue').ExtractPropTypes<{
|
|
5
|
+
screenClientId: {
|
|
6
|
+
type: StringConstructor;
|
|
7
|
+
required: true;
|
|
8
|
+
};
|
|
9
|
+
wsUrl: {
|
|
10
|
+
type: StringConstructor;
|
|
11
|
+
required: true;
|
|
12
|
+
};
|
|
13
|
+
}>, {}, {}, {}, {}, import('vue').ComponentOptionsMixin, import('vue').ComponentOptionsMixin, {
|
|
14
|
+
error: (...args: any[]) => void;
|
|
15
|
+
pause: (...args: any[]) => void;
|
|
16
|
+
highlight: (...args: any[]) => void;
|
|
17
|
+
showDialog: (...args: any[]) => void;
|
|
18
|
+
end: (...args: any[]) => void;
|
|
19
|
+
connected: (...args: any[]) => void;
|
|
20
|
+
}, string, import('vue').PublicProps, Readonly<import('vue').ExtractPropTypes<{
|
|
21
|
+
screenClientId: {
|
|
22
|
+
type: StringConstructor;
|
|
23
|
+
required: true;
|
|
24
|
+
};
|
|
25
|
+
wsUrl: {
|
|
26
|
+
type: StringConstructor;
|
|
27
|
+
required: true;
|
|
28
|
+
};
|
|
29
|
+
}>> & Readonly<{
|
|
30
|
+
onError?: ((...args: any[]) => any) | undefined;
|
|
31
|
+
onPause?: ((...args: any[]) => any) | undefined;
|
|
32
|
+
onHighlight?: ((...args: any[]) => any) | undefined;
|
|
33
|
+
onShowDialog?: ((...args: any[]) => any) | undefined;
|
|
34
|
+
onEnd?: ((...args: any[]) => any) | undefined;
|
|
35
|
+
onConnected?: ((...args: any[]) => any) | undefined;
|
|
36
|
+
}>, {}, {}, {}, {}, string, import('vue').ComponentProvideOptions, true, {}, any>;
|
|
37
|
+
declare const _default: __VLS_WithTemplateSlots<typeof __VLS_component, ReturnType<typeof __VLS_template>>;
|
|
38
|
+
export default _default;
|
|
39
|
+
type __VLS_WithTemplateSlots<T, S> = T & {
|
|
40
|
+
new (): {
|
|
41
|
+
$slots: S;
|
|
42
|
+
};
|
|
43
|
+
};
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
declare const _default: import('vue').DefineComponent<import('vue').ExtractPropTypes<{
|
|
2
|
+
videoSrc: {
|
|
3
|
+
type: StringConstructor;
|
|
4
|
+
required: true;
|
|
5
|
+
};
|
|
6
|
+
visible: {
|
|
7
|
+
type: BooleanConstructor;
|
|
8
|
+
default: boolean;
|
|
9
|
+
};
|
|
10
|
+
isPlaying: {
|
|
11
|
+
type: BooleanConstructor;
|
|
12
|
+
default: boolean;
|
|
13
|
+
};
|
|
14
|
+
muted: {
|
|
15
|
+
type: BooleanConstructor;
|
|
16
|
+
default: boolean;
|
|
17
|
+
};
|
|
18
|
+
isDark: {
|
|
19
|
+
type: BooleanConstructor;
|
|
20
|
+
default: boolean;
|
|
21
|
+
};
|
|
22
|
+
screenClientId: {
|
|
23
|
+
type: StringConstructor;
|
|
24
|
+
required: false;
|
|
25
|
+
};
|
|
26
|
+
wsUrl: {
|
|
27
|
+
type: StringConstructor;
|
|
28
|
+
required: false;
|
|
29
|
+
};
|
|
30
|
+
}>, {}, {}, {}, {}, import('vue').ComponentOptionsMixin, import('vue').ComponentOptionsMixin, {
|
|
31
|
+
ended: (...args: any[]) => void;
|
|
32
|
+
"update:isPlaying": (...args: any[]) => void;
|
|
33
|
+
"update:visible": (...args: any[]) => void;
|
|
34
|
+
}, string, import('vue').PublicProps, Readonly<import('vue').ExtractPropTypes<{
|
|
35
|
+
videoSrc: {
|
|
36
|
+
type: StringConstructor;
|
|
37
|
+
required: true;
|
|
38
|
+
};
|
|
39
|
+
visible: {
|
|
40
|
+
type: BooleanConstructor;
|
|
41
|
+
default: boolean;
|
|
42
|
+
};
|
|
43
|
+
isPlaying: {
|
|
44
|
+
type: BooleanConstructor;
|
|
45
|
+
default: boolean;
|
|
46
|
+
};
|
|
47
|
+
muted: {
|
|
48
|
+
type: BooleanConstructor;
|
|
49
|
+
default: boolean;
|
|
50
|
+
};
|
|
51
|
+
isDark: {
|
|
52
|
+
type: BooleanConstructor;
|
|
53
|
+
default: boolean;
|
|
54
|
+
};
|
|
55
|
+
screenClientId: {
|
|
56
|
+
type: StringConstructor;
|
|
57
|
+
required: false;
|
|
58
|
+
};
|
|
59
|
+
wsUrl: {
|
|
60
|
+
type: StringConstructor;
|
|
61
|
+
required: false;
|
|
62
|
+
};
|
|
63
|
+
}>> & Readonly<{
|
|
64
|
+
onEnded?: ((...args: any[]) => any) | undefined;
|
|
65
|
+
"onUpdate:isPlaying"?: ((...args: any[]) => any) | undefined;
|
|
66
|
+
"onUpdate:visible"?: ((...args: any[]) => any) | undefined;
|
|
67
|
+
}>, {
|
|
68
|
+
visible: boolean;
|
|
69
|
+
isPlaying: boolean;
|
|
70
|
+
muted: boolean;
|
|
71
|
+
isDark: boolean;
|
|
72
|
+
}, {}, {}, {}, string, import('vue').ComponentProvideOptions, true, {}, any>;
|
|
73
|
+
export default _default;
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { Plugin } from 'vue';
|
|
2
|
+
import { default as VirtualHumanPersona } from './components/VirtualHumanPersona.vue';
|
|
3
|
+
import { default as VirtualHumanEventAdapter } from './components/VirtualHumanEventAdapter.vue';
|
|
4
|
+
|
|
5
|
+
export { VirtualHumanPersona, VirtualHumanEventAdapter };
|
|
6
|
+
declare const _default: Plugin;
|
|
7
|
+
export default _default;
|
package/dist/style.css
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
.fade-enter-active[data-v-288a71c0],.fade-leave-active[data-v-288a71c0]{transition:opacity .5s ease,transform .5s ease}.fade-enter-from[data-v-288a71c0],.fade-leave-to[data-v-288a71c0]{opacity:0;transform:translateY(10px)}.virtual-human-container[data-v-288a71c0]{position:relative;width:100%;max-width:400px;border-radius:1rem;overflow:hidden;box-shadow:0 10px 25px #0000001a;background:#fff;transition:background-color .3s ease}.virtual-human-container.is-dark[data-v-288a71c0]{background:#1f2937;box-shadow:0 10px 25px #00000080}.video-wrapper[data-v-288a71c0]{position:relative;width:100%;aspect-ratio:9 / 16;background:#000}.persona-video[data-v-288a71c0]{width:100%;height:100%;object-fit:cover}.overlay[data-v-288a71c0]{position:absolute;top:1rem;right:1rem;z-index:10}.status-badge[data-v-288a71c0]{display:flex;align-items:center;gap:.5rem;padding:.25rem .75rem;border-radius:9999px;font-size:.75rem;font-weight:500;color:#fff;-webkit-backdrop-filter:blur(4px);backdrop-filter:blur(4px)}.status-badge.paused[data-v-288a71c0]{background:#ef4444cc}.status-badge.playing[data-v-288a71c0]{background:#22c55ecc}.dot[data-v-288a71c0]{width:6px;height:6px;border-radius:50%;background-color:#fff}@keyframes pulse-288a71c0{0%,to{opacity:1}50%{opacity:.5}}.animate-pulse[data-v-288a71c0]{animation:pulse-288a71c0 2s cubic-bezier(.4,0,.6,1) infinite}
|
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
import { defineComponent as H, ref as g, watch as w, onMounted as I, onUnmounted as _, openBlock as h, createBlock as E, Transition as x, withCtx as B, createElementBlock as A, normalizeClass as W, createElementVNode as y, createTextVNode as U, createCommentVNode as $, renderSlot as D } from "vue";
|
|
2
|
+
const F = { class: "video-wrapper" }, T = ["src", "muted"], q = { class: "overlay" }, M = {
|
|
3
|
+
key: 0,
|
|
4
|
+
class: "status-badge paused"
|
|
5
|
+
}, N = {
|
|
6
|
+
key: 1,
|
|
7
|
+
class: "status-badge playing"
|
|
8
|
+
}, R = /* @__PURE__ */ H({
|
|
9
|
+
__name: "VirtualHumanPersona",
|
|
10
|
+
props: {
|
|
11
|
+
videoSrc: {
|
|
12
|
+
type: String,
|
|
13
|
+
required: !0
|
|
14
|
+
},
|
|
15
|
+
visible: {
|
|
16
|
+
type: Boolean,
|
|
17
|
+
default: !0
|
|
18
|
+
},
|
|
19
|
+
isPlaying: {
|
|
20
|
+
type: Boolean,
|
|
21
|
+
default: !1
|
|
22
|
+
},
|
|
23
|
+
muted: {
|
|
24
|
+
type: Boolean,
|
|
25
|
+
default: !0
|
|
26
|
+
// Auto-play policies usually require muted
|
|
27
|
+
},
|
|
28
|
+
isDark: {
|
|
29
|
+
type: Boolean,
|
|
30
|
+
default: !1
|
|
31
|
+
},
|
|
32
|
+
screenClientId: {
|
|
33
|
+
type: String,
|
|
34
|
+
required: !1
|
|
35
|
+
},
|
|
36
|
+
wsUrl: {
|
|
37
|
+
type: String,
|
|
38
|
+
required: !1
|
|
39
|
+
}
|
|
40
|
+
},
|
|
41
|
+
emits: ["update:isPlaying", "ended", "update:visible"],
|
|
42
|
+
setup(r, { emit: p }) {
|
|
43
|
+
const e = r, s = p, o = g(null), c = g(null), n = g(!1), u = () => {
|
|
44
|
+
if (c.value && c.value.close(), !(!e.wsUrl || !e.screenClientId))
|
|
45
|
+
try {
|
|
46
|
+
const i = new URL(e.wsUrl);
|
|
47
|
+
i.searchParams.append("sessionId", e.screenClientId + "-persona"), c.value = new WebSocket(i.toString()), c.value.onopen = () => {
|
|
48
|
+
n.value = !0, console.log(`[VirtualHumanPersona] Connected to ${e.wsUrl} for session ${e.screenClientId}-persona`);
|
|
49
|
+
}, c.value.onmessage = (t) => {
|
|
50
|
+
try {
|
|
51
|
+
const a = JSON.parse(t.data);
|
|
52
|
+
b(a);
|
|
53
|
+
} catch (a) {
|
|
54
|
+
console.error("[VirtualHumanPersona] Failed to parse message:", t.data, a);
|
|
55
|
+
}
|
|
56
|
+
}, c.value.onerror = (t) => {
|
|
57
|
+
console.error("[VirtualHumanPersona] WebSocket error:", t);
|
|
58
|
+
}, c.value.onclose = () => {
|
|
59
|
+
n.value = !1, console.log("[VirtualHumanPersona] WebSocket disconnected");
|
|
60
|
+
};
|
|
61
|
+
} catch (i) {
|
|
62
|
+
console.error("[VirtualHumanPersona] Failed to initialize WebSocket:", i);
|
|
63
|
+
}
|
|
64
|
+
}, b = (i) => {
|
|
65
|
+
const { type: t, action: a } = i;
|
|
66
|
+
t === "control" && (a === "play" ? (s("update:isPlaying", !0), s("update:visible", !0)) : a === "pause" ? (s("update:isPlaying", !1), s("update:visible", !1)) : a === "stop" && (s("update:isPlaying", !1), s("update:visible", !1), o.value && (o.value.currentTime = 0)));
|
|
67
|
+
};
|
|
68
|
+
w(() => e.screenClientId, () => {
|
|
69
|
+
e.screenClientId && e.wsUrl && u();
|
|
70
|
+
}), w(() => e.wsUrl, () => {
|
|
71
|
+
e.screenClientId && e.wsUrl && u();
|
|
72
|
+
}), w(() => e.isPlaying, (i) => {
|
|
73
|
+
o.value && (i ? o.value.play().catch((t) => console.error("Video play failed:", t)) : o.value.pause());
|
|
74
|
+
});
|
|
75
|
+
const k = () => {
|
|
76
|
+
e.isPlaying || s("update:isPlaying", !0);
|
|
77
|
+
}, C = () => {
|
|
78
|
+
e.isPlaying && s("update:isPlaying", !1);
|
|
79
|
+
}, v = () => {
|
|
80
|
+
s("update:isPlaying", !1), s("ended");
|
|
81
|
+
};
|
|
82
|
+
return I(() => {
|
|
83
|
+
e.isPlaying && o.value && o.value.play().catch((i) => console.error("Video play failed:", i)), e.screenClientId && e.wsUrl && u();
|
|
84
|
+
}), _(() => {
|
|
85
|
+
c.value && c.value.close();
|
|
86
|
+
}), (i, t) => (h(), E(x, { name: "fade" }, {
|
|
87
|
+
default: B(() => [
|
|
88
|
+
r.visible ? (h(), A("div", {
|
|
89
|
+
key: 0,
|
|
90
|
+
class: W(["virtual-human-container", { "is-dark": r.isDark }])
|
|
91
|
+
}, [
|
|
92
|
+
y("div", F, [
|
|
93
|
+
y("video", {
|
|
94
|
+
ref_key: "videoRef",
|
|
95
|
+
ref: o,
|
|
96
|
+
src: r.videoSrc,
|
|
97
|
+
class: "persona-video",
|
|
98
|
+
muted: r.muted,
|
|
99
|
+
playsinline: "",
|
|
100
|
+
loop: "",
|
|
101
|
+
onPlay: k,
|
|
102
|
+
onPause: C,
|
|
103
|
+
onEnded: v
|
|
104
|
+
}, null, 40, T),
|
|
105
|
+
y("div", q, [
|
|
106
|
+
r.isPlaying ? (h(), A("div", N, [...t[1] || (t[1] = [
|
|
107
|
+
y("span", { class: "dot animate-pulse" }, null, -1),
|
|
108
|
+
U(" 播放中 ", -1)
|
|
109
|
+
])])) : (h(), A("div", M, [...t[0] || (t[0] = [
|
|
110
|
+
y("span", { class: "dot" }, null, -1),
|
|
111
|
+
U(" 暂停中 ", -1)
|
|
112
|
+
])]))
|
|
113
|
+
])
|
|
114
|
+
])
|
|
115
|
+
], 2)) : $("", !0)
|
|
116
|
+
]),
|
|
117
|
+
_: 1
|
|
118
|
+
}));
|
|
119
|
+
}
|
|
120
|
+
}), z = (r, p) => {
|
|
121
|
+
const e = r.__vccOpts || r;
|
|
122
|
+
for (const [s, o] of p)
|
|
123
|
+
e[s] = o;
|
|
124
|
+
return e;
|
|
125
|
+
}, O = /* @__PURE__ */ z(R, [["__scopeId", "data-v-288a71c0"]]), J = /* @__PURE__ */ H({
|
|
126
|
+
__name: "VirtualHumanEventAdapter",
|
|
127
|
+
props: {
|
|
128
|
+
screenClientId: {
|
|
129
|
+
type: String,
|
|
130
|
+
required: !0
|
|
131
|
+
},
|
|
132
|
+
wsUrl: {
|
|
133
|
+
type: String,
|
|
134
|
+
required: !0
|
|
135
|
+
}
|
|
136
|
+
},
|
|
137
|
+
emits: ["highlight", "showDialog", "end", "pause", "connected", "error"],
|
|
138
|
+
setup(r, { emit: p }) {
|
|
139
|
+
const e = r, s = p, o = g(null), c = g(!1);
|
|
140
|
+
let n = null, u = 0;
|
|
141
|
+
const b = () => {
|
|
142
|
+
n || (n = new (window.AudioContext || window.webkitAudioContext)({
|
|
143
|
+
sampleRate: 24e3
|
|
144
|
+
})), n.state === "suspended" && n.resume();
|
|
145
|
+
}, k = (t) => {
|
|
146
|
+
if (b(), !!n)
|
|
147
|
+
try {
|
|
148
|
+
const a = window.atob(t), l = a.length, m = new Uint8Array(l);
|
|
149
|
+
for (let d = 0; d < l; d++)
|
|
150
|
+
m[d] = a.charCodeAt(d);
|
|
151
|
+
const f = new Int16Array(m.buffer), P = new Float32Array(f.length);
|
|
152
|
+
for (let d = 0; d < f.length; d++)
|
|
153
|
+
P[d] = f[d] / 32768;
|
|
154
|
+
const S = n.createBuffer(1, P.length, 24e3);
|
|
155
|
+
S.getChannelData(0).set(P);
|
|
156
|
+
const V = n.createBufferSource();
|
|
157
|
+
V.buffer = S, V.connect(n.destination), u < n.currentTime && (u = n.currentTime), V.start(u), u += S.duration;
|
|
158
|
+
} catch (a) {
|
|
159
|
+
console.error("[VirtualHumanEventAdapter] Failed to decode and play audio:", a);
|
|
160
|
+
}
|
|
161
|
+
}, C = (t) => {
|
|
162
|
+
if (n)
|
|
163
|
+
switch (t) {
|
|
164
|
+
case "play":
|
|
165
|
+
n.state === "suspended" && n.resume();
|
|
166
|
+
break;
|
|
167
|
+
case "pause":
|
|
168
|
+
n.state === "running" && n.suspend();
|
|
169
|
+
break;
|
|
170
|
+
case "stop":
|
|
171
|
+
n.close(), n = null, u = 0;
|
|
172
|
+
break;
|
|
173
|
+
default:
|
|
174
|
+
console.warn(`[VirtualHumanEventAdapter] Unknown control action: ${t}`);
|
|
175
|
+
}
|
|
176
|
+
}, v = () => {
|
|
177
|
+
o.value && o.value.close();
|
|
178
|
+
try {
|
|
179
|
+
const t = new URL(e.wsUrl);
|
|
180
|
+
t.searchParams.append("sessionId", e.screenClientId + "-event"), o.value = new WebSocket(t.toString()), o.value.onopen = () => {
|
|
181
|
+
c.value = !0, s("connected"), console.log(`[VirtualHumanEventAdapter] Connected to ${e.wsUrl} for session ${e.screenClientId}-event`);
|
|
182
|
+
}, o.value.onmessage = (a) => {
|
|
183
|
+
try {
|
|
184
|
+
const l = JSON.parse(a.data);
|
|
185
|
+
i(l);
|
|
186
|
+
} catch (l) {
|
|
187
|
+
console.error("[VirtualHumanEventAdapter] Failed to parse message:", a.data, l);
|
|
188
|
+
}
|
|
189
|
+
}, o.value.onerror = (a) => {
|
|
190
|
+
console.error("[VirtualHumanEventAdapter] WebSocket error:", a), s("error", a);
|
|
191
|
+
}, o.value.onclose = () => {
|
|
192
|
+
c.value = !1, console.log("[VirtualHumanEventAdapter] WebSocket disconnected");
|
|
193
|
+
};
|
|
194
|
+
} catch (t) {
|
|
195
|
+
console.error("[VirtualHumanEventAdapter] Failed to initialize WebSocket:", t), s("error", t);
|
|
196
|
+
}
|
|
197
|
+
}, i = (t) => {
|
|
198
|
+
const { type: a, payload: l, action: m } = t;
|
|
199
|
+
switch (a) {
|
|
200
|
+
case "audio":
|
|
201
|
+
const f = (l == null ? void 0 : l.data) || t.data;
|
|
202
|
+
f && k(f);
|
|
203
|
+
break;
|
|
204
|
+
case "dialog_event":
|
|
205
|
+
t.event && s(t.event, t.params);
|
|
206
|
+
break;
|
|
207
|
+
case "control":
|
|
208
|
+
m && C(m);
|
|
209
|
+
break;
|
|
210
|
+
case "highlight":
|
|
211
|
+
s("highlight", l);
|
|
212
|
+
break;
|
|
213
|
+
case "showDialog":
|
|
214
|
+
s("showDialog", l);
|
|
215
|
+
break;
|
|
216
|
+
case "end":
|
|
217
|
+
s("end", l);
|
|
218
|
+
break;
|
|
219
|
+
case "pause":
|
|
220
|
+
s("pause", l);
|
|
221
|
+
break;
|
|
222
|
+
default:
|
|
223
|
+
console.warn(`[VirtualHumanEventAdapter] Unknown message type: ${a}`);
|
|
224
|
+
}
|
|
225
|
+
};
|
|
226
|
+
return w(() => e.screenClientId, () => {
|
|
227
|
+
e.screenClientId && e.wsUrl && v();
|
|
228
|
+
}), w(() => e.wsUrl, () => {
|
|
229
|
+
e.screenClientId && e.wsUrl && v();
|
|
230
|
+
}), I(() => {
|
|
231
|
+
e.screenClientId && e.wsUrl && v();
|
|
232
|
+
}), _(() => {
|
|
233
|
+
o.value && o.value.close(), n && n.close();
|
|
234
|
+
}), (t, a) => D(t.$slots, "default");
|
|
235
|
+
}
|
|
236
|
+
}), L = (r) => {
|
|
237
|
+
r.component("VirtualHumanPersona", O), r.component("VirtualHumanEventAdapter", J);
|
|
238
|
+
}, G = {
|
|
239
|
+
install: L
|
|
240
|
+
};
|
|
241
|
+
export {
|
|
242
|
+
J as VirtualHumanEventAdapter,
|
|
243
|
+
O as VirtualHumanPersona,
|
|
244
|
+
G as default
|
|
245
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
(function(u,e){typeof exports=="object"&&typeof module<"u"?e(exports,require("vue")):typeof define=="function"&&define.amd?define(["exports","vue"],e):(u=typeof globalThis<"u"?globalThis:u||self,e(u.VirtualHumanCf={},u.Vue))})(this,function(u,e){"use strict";const U={class:"video-wrapper"},_=["src","muted"],A={class:"overlay"},E={key:0,class:"status-badge paused"},H={key:1,class:"status-badge playing"},P=((l,y)=>{const t=l.__vccOpts||l;for(const[s,r]of y)t[s]=r;return t})(e.defineComponent({__name:"VirtualHumanPersona",props:{videoSrc:{type:String,required:!0},visible:{type:Boolean,default:!0},isPlaying:{type:Boolean,default:!1},muted:{type:Boolean,default:!0},isDark:{type:Boolean,default:!1},screenClientId:{type:String,required:!1},wsUrl:{type:String,required:!1}},emits:["update:isPlaying","ended","update:visible"],setup(l,{emit:y}){const t=l,s=y,r=e.ref(null),d=e.ref(null),a=e.ref(!1),f=()=>{if(d.value&&d.value.close(),!(!t.wsUrl||!t.screenClientId))try{const c=new URL(t.wsUrl);c.searchParams.append("sessionId",t.screenClientId+"-persona"),d.value=new WebSocket(c.toString()),d.value.onopen=()=>{a.value=!0,console.log(`[VirtualHumanPersona] Connected to ${t.wsUrl} for session ${t.screenClientId}-persona`)},d.value.onmessage=n=>{try{const o=JSON.parse(n.data);w(o)}catch(o){console.error("[VirtualHumanPersona] Failed to parse message:",n.data,o)}},d.value.onerror=n=>{console.error("[VirtualHumanPersona] WebSocket error:",n)},d.value.onclose=()=>{a.value=!1,console.log("[VirtualHumanPersona] WebSocket disconnected")}}catch(c){console.error("[VirtualHumanPersona] Failed to initialize WebSocket:",c)}},w=c=>{const{type:n,action:o}=c;n==="control"&&(o==="play"?(s("update:isPlaying",!0),s("update:visible",!0)):o==="pause"?(s("update:isPlaying",!1),s("update:visible",!1)):o==="stop"&&(s("update:isPlaying",!1),s("update:visible",!1),r.value&&(r.value.currentTime=0)))};e.watch(()=>t.screenClientId,()=>{t.screenClientId&&t.wsUrl&&f()}),e.watch(()=>t.wsUrl,()=>{t.screenClientId&&t.wsUrl&&f()}),e.watch(()=>t.isPlaying,c=>{r.value&&(c?r.value.play().catch(n=>console.error("Video play failed:",n)):r.value.pause())});const k=()=>{t.isPlaying||s("update:isPlaying",!0)},b=()=>{t.isPlaying&&s("update:isPlaying",!1)},g=()=>{s("update:isPlaying",!1),s("ended")};return e.onMounted(()=>{t.isPlaying&&r.value&&r.value.play().catch(c=>console.error("Video play failed:",c)),t.screenClientId&&t.wsUrl&&f()}),e.onUnmounted(()=>{d.value&&d.value.close()}),(c,n)=>(e.openBlock(),e.createBlock(e.Transition,{name:"fade"},{default:e.withCtx(()=>[l.visible?(e.openBlock(),e.createElementBlock("div",{key:0,class:e.normalizeClass(["virtual-human-container",{"is-dark":l.isDark}])},[e.createElementVNode("div",U,[e.createElementVNode("video",{ref_key:"videoRef",ref:r,src:l.videoSrc,class:"persona-video",muted:l.muted,playsinline:"",loop:"",onPlay:k,onPause:b,onEnded:g},null,40,_),e.createElementVNode("div",A,[l.isPlaying?(e.openBlock(),e.createElementBlock("div",H,[...n[1]||(n[1]=[e.createElementVNode("span",{class:"dot animate-pulse"},null,-1),e.createTextVNode(" 播放中 ",-1)])])):(e.openBlock(),e.createElementBlock("div",E,[...n[0]||(n[0]=[e.createElementVNode("span",{class:"dot"},null,-1),e.createTextVNode(" 暂停中 ",-1)])]))])])],2)):e.createCommentVNode("",!0)]),_:1}))}}),[["__scopeId","data-v-288a71c0"]]),S=e.defineComponent({__name:"VirtualHumanEventAdapter",props:{screenClientId:{type:String,required:!0},wsUrl:{type:String,required:!0}},emits:["highlight","showDialog","end","pause","connected","error"],setup(l,{emit:y}){const t=l,s=y,r=e.ref(null),d=e.ref(!1);let a=null,f=0;const w=()=>{a||(a=new(window.AudioContext||window.webkitAudioContext)({sampleRate:24e3})),a.state==="suspended"&&a.resume()},k=n=>{if(w(),!!a)try{const o=window.atob(n),i=o.length,h=new Uint8Array(i);for(let p=0;p<i;p++)h[p]=o.charCodeAt(p);const m=new Int16Array(h.buffer),v=new Float32Array(m.length);for(let p=0;p<m.length;p++)v[p]=m[p]/32768;const C=a.createBuffer(1,v.length,24e3);C.getChannelData(0).set(v);const V=a.createBufferSource();V.buffer=C,V.connect(a.destination),f<a.currentTime&&(f=a.currentTime),V.start(f),f+=C.duration}catch(o){console.error("[VirtualHumanEventAdapter] Failed to decode and play audio:",o)}},b=n=>{if(a)switch(n){case"play":a.state==="suspended"&&a.resume();break;case"pause":a.state==="running"&&a.suspend();break;case"stop":a.close(),a=null,f=0;break;default:console.warn(`[VirtualHumanEventAdapter] Unknown control action: ${n}`)}},g=()=>{r.value&&r.value.close();try{const n=new URL(t.wsUrl);n.searchParams.append("sessionId",t.screenClientId+"-event"),r.value=new WebSocket(n.toString()),r.value.onopen=()=>{d.value=!0,s("connected"),console.log(`[VirtualHumanEventAdapter] Connected to ${t.wsUrl} for session ${t.screenClientId}-event`)},r.value.onmessage=o=>{try{const i=JSON.parse(o.data);c(i)}catch(i){console.error("[VirtualHumanEventAdapter] Failed to parse message:",o.data,i)}},r.value.onerror=o=>{console.error("[VirtualHumanEventAdapter] WebSocket error:",o),s("error",o)},r.value.onclose=()=>{d.value=!1,console.log("[VirtualHumanEventAdapter] WebSocket disconnected")}}catch(n){console.error("[VirtualHumanEventAdapter] Failed to initialize WebSocket:",n),s("error",n)}},c=n=>{const{type:o,payload:i,action:h}=n;switch(o){case"audio":const m=(i==null?void 0:i.data)||n.data;m&&k(m);break;case"dialog_event":n.event&&s(n.event,n.params);break;case"control":h&&b(h);break;case"highlight":s("highlight",i);break;case"showDialog":s("showDialog",i);break;case"end":s("end",i);break;case"pause":s("pause",i);break;default:console.warn(`[VirtualHumanEventAdapter] Unknown message type: ${o}`)}};return e.watch(()=>t.screenClientId,()=>{t.screenClientId&&t.wsUrl&&g()}),e.watch(()=>t.wsUrl,()=>{t.screenClientId&&t.wsUrl&&g()}),e.onMounted(()=>{t.screenClientId&&t.wsUrl&&g()}),e.onUnmounted(()=>{r.value&&r.value.close(),a&&a.close()}),(n,o)=>e.renderSlot(n.$slots,"default")}}),I={install:l=>{l.component("VirtualHumanPersona",P),l.component("VirtualHumanEventAdapter",S)}};u.VirtualHumanEventAdapter=S,u.VirtualHumanPersona=P,u.default=I,Object.defineProperties(u,{__esModule:{value:!0},[Symbol.toStringTag]:{value:"Module"}})});
|
package/package.json
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "virtual-human-cf",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Vue3 Digital Human Component Package by cf ",
|
|
5
|
+
"main": "dist/virtual-human-cf.umd.js",
|
|
6
|
+
"module": "dist/virtual-human-cf.es.js",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"import": "./dist/virtual-human-cf.es.js",
|
|
11
|
+
"require": "./dist/virtual-human-cf.umd.js",
|
|
12
|
+
"types": "./dist/index.d.ts"
|
|
13
|
+
},
|
|
14
|
+
"./style.css": "./dist/style.css"
|
|
15
|
+
},
|
|
16
|
+
"scripts": {
|
|
17
|
+
"dev": "vite",
|
|
18
|
+
"build": "vue-tsc --noEmit && vite build"
|
|
19
|
+
},
|
|
20
|
+
"peerDependencies": {
|
|
21
|
+
"vue": "^3.0.0"
|
|
22
|
+
},
|
|
23
|
+
"devDependencies": {
|
|
24
|
+
"@vitejs/plugin-vue": "^5.0.0",
|
|
25
|
+
"typescript": "^5.9.3",
|
|
26
|
+
"vite": "^5.0.0",
|
|
27
|
+
"vite-plugin-dts": "^3.0.0",
|
|
28
|
+
"vue": "^3.4.0",
|
|
29
|
+
"vue-tsc": "^3.2.6"
|
|
30
|
+
},
|
|
31
|
+
"keywords": [
|
|
32
|
+
"vue",
|
|
33
|
+
"virtual-human",
|
|
34
|
+
"component"
|
|
35
|
+
],
|
|
36
|
+
"author": "cuihan",
|
|
37
|
+
"license": "MIT"
|
|
38
|
+
}
|
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
<template>
|
|
2
|
+
<slot></slot>
|
|
3
|
+
</template>
|
|
4
|
+
|
|
5
|
+
<script setup lang="ts">
|
|
6
|
+
import { ref, watch, onMounted, onUnmounted } from 'vue';
|
|
7
|
+
|
|
8
|
+
const props = defineProps({
|
|
9
|
+
screenClientId: {
|
|
10
|
+
type: String,
|
|
11
|
+
required: true,
|
|
12
|
+
},
|
|
13
|
+
wsUrl: {
|
|
14
|
+
type: String,
|
|
15
|
+
required: true,
|
|
16
|
+
}
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
const emit = defineEmits(['highlight', 'showDialog', 'end', 'pause', 'connected', 'error']);
|
|
20
|
+
|
|
21
|
+
const ws = ref<WebSocket | null>(null);
|
|
22
|
+
const isConnected = ref(false);
|
|
23
|
+
|
|
24
|
+
// Audio playback variables
|
|
25
|
+
let audioContext: AudioContext | null = null;
|
|
26
|
+
let nextStartTime = 0;
|
|
27
|
+
|
|
28
|
+
const initAudioContext = () => {
|
|
29
|
+
if (!audioContext) {
|
|
30
|
+
audioContext = new (window.AudioContext || (window as any).webkitAudioContext)({
|
|
31
|
+
sampleRate: 24000
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
if (audioContext.state === 'suspended') {
|
|
35
|
+
audioContext.resume();
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
const handleAudioMessage = (base64Data: string) => {
|
|
40
|
+
initAudioContext();
|
|
41
|
+
if (!audioContext) return;
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
const binaryString = window.atob(base64Data);
|
|
45
|
+
const len = binaryString.length;
|
|
46
|
+
const bytes = new Uint8Array(len);
|
|
47
|
+
for (let i = 0; i < len; i++) {
|
|
48
|
+
bytes[i] = binaryString.charCodeAt(i);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// 16-bit PCM is 2 bytes per sample
|
|
52
|
+
const int16Array = new Int16Array(bytes.buffer);
|
|
53
|
+
const float32Array = new Float32Array(int16Array.length);
|
|
54
|
+
for (let i = 0; i < int16Array.length; i++) {
|
|
55
|
+
float32Array[i] = int16Array[i] / 32768.0; // convert to range -1 to 1
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const audioBuffer = audioContext.createBuffer(1, float32Array.length, 24000);
|
|
59
|
+
audioBuffer.getChannelData(0).set(float32Array);
|
|
60
|
+
|
|
61
|
+
const source = audioContext.createBufferSource();
|
|
62
|
+
source.buffer = audioBuffer;
|
|
63
|
+
source.connect(audioContext.destination);
|
|
64
|
+
|
|
65
|
+
// Keep track of the start time for seamless playback
|
|
66
|
+
if (nextStartTime < audioContext.currentTime) {
|
|
67
|
+
nextStartTime = audioContext.currentTime;
|
|
68
|
+
}
|
|
69
|
+
source.start(nextStartTime);
|
|
70
|
+
nextStartTime += audioBuffer.duration;
|
|
71
|
+
} catch (error) {
|
|
72
|
+
console.error('[VirtualHumanEventAdapter] Failed to decode and play audio:', error);
|
|
73
|
+
}
|
|
74
|
+
};
|
|
75
|
+
|
|
76
|
+
const handleControlMessage = (action: string) => {
|
|
77
|
+
if (!audioContext) return;
|
|
78
|
+
switch (action) {
|
|
79
|
+
case 'play':
|
|
80
|
+
if (audioContext.state === 'suspended') {
|
|
81
|
+
audioContext.resume();
|
|
82
|
+
}
|
|
83
|
+
break;
|
|
84
|
+
case 'pause':
|
|
85
|
+
if (audioContext.state === 'running') {
|
|
86
|
+
audioContext.suspend();
|
|
87
|
+
}
|
|
88
|
+
break;
|
|
89
|
+
case 'stop':
|
|
90
|
+
audioContext.close();
|
|
91
|
+
audioContext = null;
|
|
92
|
+
nextStartTime = 0;
|
|
93
|
+
break;
|
|
94
|
+
default:
|
|
95
|
+
console.warn(`[VirtualHumanEventAdapter] Unknown control action: ${action}`);
|
|
96
|
+
}
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
const connectWebSocket = () => {
|
|
100
|
+
if (ws.value) {
|
|
101
|
+
ws.value.close();
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
try {
|
|
105
|
+
const url = new URL(props.wsUrl);
|
|
106
|
+
url.searchParams.append('sessionId', props.screenClientId + '-event');
|
|
107
|
+
ws.value = new WebSocket(url.toString());
|
|
108
|
+
|
|
109
|
+
ws.value.onopen = () => {
|
|
110
|
+
isConnected.value = true;
|
|
111
|
+
emit('connected');
|
|
112
|
+
console.log(`[VirtualHumanEventAdapter] Connected to ${props.wsUrl} for session ${props.screenClientId}-event`);
|
|
113
|
+
};
|
|
114
|
+
|
|
115
|
+
ws.value.onmessage = (event) => {
|
|
116
|
+
try {
|
|
117
|
+
const message = JSON.parse(event.data);
|
|
118
|
+
handleMessage(message);
|
|
119
|
+
} catch (e) {
|
|
120
|
+
console.error('[VirtualHumanEventAdapter] Failed to parse message:', event.data, e);
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
|
|
124
|
+
ws.value.onerror = (error) => {
|
|
125
|
+
console.error('[VirtualHumanEventAdapter] WebSocket error:', error);
|
|
126
|
+
emit('error', error);
|
|
127
|
+
};
|
|
128
|
+
|
|
129
|
+
ws.value.onclose = () => {
|
|
130
|
+
isConnected.value = false;
|
|
131
|
+
console.log('[VirtualHumanEventAdapter] WebSocket disconnected');
|
|
132
|
+
};
|
|
133
|
+
} catch (error) {
|
|
134
|
+
console.error('[VirtualHumanEventAdapter] Failed to initialize WebSocket:', error);
|
|
135
|
+
emit('error', error);
|
|
136
|
+
}
|
|
137
|
+
};
|
|
138
|
+
|
|
139
|
+
const handleMessage = (msg: any) => {
|
|
140
|
+
const { type, payload, action } = msg;
|
|
141
|
+
switch (type) {
|
|
142
|
+
case 'audio':
|
|
143
|
+
const base64Data = payload?.data || msg.data;
|
|
144
|
+
if (base64Data) {
|
|
145
|
+
handleAudioMessage(base64Data);
|
|
146
|
+
}
|
|
147
|
+
break;
|
|
148
|
+
case 'dialog_event':
|
|
149
|
+
if (msg.event) {
|
|
150
|
+
emit(msg.event as any, msg.params);
|
|
151
|
+
}
|
|
152
|
+
break;
|
|
153
|
+
case 'control':
|
|
154
|
+
if (action) {
|
|
155
|
+
handleControlMessage(action);
|
|
156
|
+
}
|
|
157
|
+
break;
|
|
158
|
+
case 'highlight':
|
|
159
|
+
// 触发高亮大屏某区域事件
|
|
160
|
+
emit('highlight', payload);
|
|
161
|
+
break;
|
|
162
|
+
case 'showDialog':
|
|
163
|
+
// 触发弹窗显示事件
|
|
164
|
+
emit('showDialog', payload);
|
|
165
|
+
break;
|
|
166
|
+
case 'end':
|
|
167
|
+
// 触发数字人对话结束事件
|
|
168
|
+
emit('end', payload);
|
|
169
|
+
break;
|
|
170
|
+
case 'pause':
|
|
171
|
+
// 触发暂停播放事件
|
|
172
|
+
emit('pause', payload);
|
|
173
|
+
break;
|
|
174
|
+
default:
|
|
175
|
+
console.warn(`[VirtualHumanEventAdapter] Unknown message type: ${type}`);
|
|
176
|
+
}
|
|
177
|
+
};
|
|
178
|
+
|
|
179
|
+
watch(() => props.screenClientId, () => {
|
|
180
|
+
if (props.screenClientId && props.wsUrl) {
|
|
181
|
+
connectWebSocket();
|
|
182
|
+
}
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
watch(() => props.wsUrl, () => {
|
|
186
|
+
if (props.screenClientId && props.wsUrl) {
|
|
187
|
+
connectWebSocket();
|
|
188
|
+
}
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
onMounted(() => {
|
|
192
|
+
if (props.screenClientId && props.wsUrl) {
|
|
193
|
+
connectWebSocket();
|
|
194
|
+
}
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
onUnmounted(() => {
|
|
198
|
+
if (ws.value) {
|
|
199
|
+
ws.value.close();
|
|
200
|
+
}
|
|
201
|
+
if (audioContext) {
|
|
202
|
+
audioContext.close();
|
|
203
|
+
}
|
|
204
|
+
});
|
|
205
|
+
</script>
|
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
<template>
|
|
2
|
+
<Transition name="fade">
|
|
3
|
+
<div
|
|
4
|
+
v-if="visible"
|
|
5
|
+
class="virtual-human-container"
|
|
6
|
+
:class="{ 'is-dark': isDark }"
|
|
7
|
+
>
|
|
8
|
+
<div class="video-wrapper">
|
|
9
|
+
<video
|
|
10
|
+
ref="videoRef"
|
|
11
|
+
:src="videoSrc"
|
|
12
|
+
class="persona-video"
|
|
13
|
+
:muted="muted"
|
|
14
|
+
playsinline
|
|
15
|
+
loop
|
|
16
|
+
@play="handlePlay"
|
|
17
|
+
@pause="handlePause"
|
|
18
|
+
@ended="handleEnded"
|
|
19
|
+
></video>
|
|
20
|
+
|
|
21
|
+
<!-- UI Overlay for controls or status -->
|
|
22
|
+
<div class="overlay">
|
|
23
|
+
<div v-if="!isPlaying" class="status-badge paused">
|
|
24
|
+
<span class="dot"></span>
|
|
25
|
+
暂停中
|
|
26
|
+
</div>
|
|
27
|
+
<div v-else class="status-badge playing">
|
|
28
|
+
<span class="dot animate-pulse"></span>
|
|
29
|
+
播放中
|
|
30
|
+
</div>
|
|
31
|
+
</div>
|
|
32
|
+
</div>
|
|
33
|
+
</div>
|
|
34
|
+
</Transition>
|
|
35
|
+
</template>
|
|
36
|
+
|
|
37
|
+
<script setup lang="ts">
|
|
38
|
+
import { ref, watch, onMounted, onUnmounted } from 'vue';
|
|
39
|
+
|
|
40
|
+
const props = defineProps({
|
|
41
|
+
videoSrc: {
|
|
42
|
+
type: String,
|
|
43
|
+
required: true,
|
|
44
|
+
},
|
|
45
|
+
visible: {
|
|
46
|
+
type: Boolean,
|
|
47
|
+
default: true,
|
|
48
|
+
},
|
|
49
|
+
isPlaying: {
|
|
50
|
+
type: Boolean,
|
|
51
|
+
default: false,
|
|
52
|
+
},
|
|
53
|
+
muted: {
|
|
54
|
+
type: Boolean,
|
|
55
|
+
default: true, // Auto-play policies usually require muted
|
|
56
|
+
},
|
|
57
|
+
isDark: {
|
|
58
|
+
type: Boolean,
|
|
59
|
+
default: false,
|
|
60
|
+
},
|
|
61
|
+
screenClientId: {
|
|
62
|
+
type: String,
|
|
63
|
+
required: false,
|
|
64
|
+
},
|
|
65
|
+
wsUrl: {
|
|
66
|
+
type: String,
|
|
67
|
+
required: false,
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
const emit = defineEmits(['update:isPlaying', 'ended', 'update:visible']);
|
|
72
|
+
|
|
73
|
+
const videoRef = ref<HTMLVideoElement | null>(null);
|
|
74
|
+
|
|
75
|
+
const ws = ref<WebSocket | null>(null);
|
|
76
|
+
const isConnected = ref(false);
|
|
77
|
+
|
|
78
|
+
const connectWebSocket = () => {
|
|
79
|
+
if (ws.value) {
|
|
80
|
+
ws.value.close();
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
if (!props.wsUrl || !props.screenClientId) return;
|
|
84
|
+
|
|
85
|
+
try {
|
|
86
|
+
const url = new URL(props.wsUrl);
|
|
87
|
+
url.searchParams.append('sessionId', props.screenClientId + '-persona');
|
|
88
|
+
ws.value = new WebSocket(url.toString());
|
|
89
|
+
|
|
90
|
+
ws.value.onopen = () => {
|
|
91
|
+
isConnected.value = true;
|
|
92
|
+
console.log(`[VirtualHumanPersona] Connected to ${props.wsUrl} for session ${props.screenClientId}-persona`);
|
|
93
|
+
};
|
|
94
|
+
|
|
95
|
+
ws.value.onmessage = (event) => {
|
|
96
|
+
try {
|
|
97
|
+
const message = JSON.parse(event.data);
|
|
98
|
+
handleMessage(message);
|
|
99
|
+
} catch (e) {
|
|
100
|
+
console.error('[VirtualHumanPersona] Failed to parse message:', event.data, e);
|
|
101
|
+
}
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
ws.value.onerror = (error) => {
|
|
105
|
+
console.error('[VirtualHumanPersona] WebSocket error:', error);
|
|
106
|
+
};
|
|
107
|
+
|
|
108
|
+
ws.value.onclose = () => {
|
|
109
|
+
isConnected.value = false;
|
|
110
|
+
console.log('[VirtualHumanPersona] WebSocket disconnected');
|
|
111
|
+
};
|
|
112
|
+
} catch (error) {
|
|
113
|
+
console.error('[VirtualHumanPersona] Failed to initialize WebSocket:', error);
|
|
114
|
+
}
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
const handleMessage = (msg: any) => {
|
|
118
|
+
const { type, action } = msg;
|
|
119
|
+
if (type === 'control') {
|
|
120
|
+
if (action === 'play') {
|
|
121
|
+
emit('update:isPlaying', true);
|
|
122
|
+
emit('update:visible', true);
|
|
123
|
+
} else if (action === 'pause') {
|
|
124
|
+
emit('update:isPlaying', false);
|
|
125
|
+
emit('update:visible', false);
|
|
126
|
+
} else if (action === 'stop') {
|
|
127
|
+
emit('update:isPlaying', false);
|
|
128
|
+
emit('update:visible', false);
|
|
129
|
+
if (videoRef.value) {
|
|
130
|
+
videoRef.value.currentTime = 0;
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
};
|
|
135
|
+
|
|
136
|
+
watch(() => props.screenClientId, () => {
|
|
137
|
+
if (props.screenClientId && props.wsUrl) {
|
|
138
|
+
connectWebSocket();
|
|
139
|
+
}
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
watch(() => props.wsUrl, () => {
|
|
143
|
+
if (props.screenClientId && props.wsUrl) {
|
|
144
|
+
connectWebSocket();
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
watch(() => props.isPlaying, (newVal) => {
|
|
149
|
+
if (videoRef.value) {
|
|
150
|
+
if (newVal) {
|
|
151
|
+
videoRef.value.play().catch(e => console.error('Video play failed:', e));
|
|
152
|
+
} else {
|
|
153
|
+
videoRef.value.pause();
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
const handlePlay = () => {
|
|
159
|
+
if (!props.isPlaying) {
|
|
160
|
+
emit('update:isPlaying', true);
|
|
161
|
+
}
|
|
162
|
+
};
|
|
163
|
+
|
|
164
|
+
const handlePause = () => {
|
|
165
|
+
if (props.isPlaying) {
|
|
166
|
+
emit('update:isPlaying', false);
|
|
167
|
+
}
|
|
168
|
+
};
|
|
169
|
+
|
|
170
|
+
const handleEnded = () => {
|
|
171
|
+
emit('update:isPlaying', false);
|
|
172
|
+
emit('ended');
|
|
173
|
+
};
|
|
174
|
+
|
|
175
|
+
onMounted(() => {
|
|
176
|
+
if (props.isPlaying && videoRef.value) {
|
|
177
|
+
videoRef.value.play().catch(e => console.error('Video play failed:', e));
|
|
178
|
+
}
|
|
179
|
+
if (props.screenClientId && props.wsUrl) {
|
|
180
|
+
connectWebSocket();
|
|
181
|
+
}
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
onUnmounted(() => {
|
|
185
|
+
if (ws.value) {
|
|
186
|
+
ws.value.close();
|
|
187
|
+
}
|
|
188
|
+
});
|
|
189
|
+
</script>
|
|
190
|
+
|
|
191
|
+
<style scoped>
|
|
192
|
+
.fade-enter-active,
|
|
193
|
+
.fade-leave-active {
|
|
194
|
+
transition: opacity 0.5s ease, transform 0.5s ease;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
.fade-enter-from,
|
|
198
|
+
.fade-leave-to {
|
|
199
|
+
opacity: 0;
|
|
200
|
+
transform: translateY(10px);
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
.virtual-human-container {
|
|
204
|
+
position: relative;
|
|
205
|
+
width: 100%;
|
|
206
|
+
max-width: 400px;
|
|
207
|
+
border-radius: 1rem;
|
|
208
|
+
overflow: hidden;
|
|
209
|
+
box-shadow: 0 10px 25px rgba(0, 0, 0, 0.1);
|
|
210
|
+
background: #ffffff;
|
|
211
|
+
transition: background-color 0.3s ease;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
.virtual-human-container.is-dark {
|
|
215
|
+
background: #1f2937;
|
|
216
|
+
box-shadow: 0 10px 25px rgba(0, 0, 0, 0.5);
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
.video-wrapper {
|
|
220
|
+
position: relative;
|
|
221
|
+
width: 100%;
|
|
222
|
+
aspect-ratio: 9 / 16; /* Portrait ratio for digital human */
|
|
223
|
+
background: #000;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
.persona-video {
|
|
227
|
+
width: 100%;
|
|
228
|
+
height: 100%;
|
|
229
|
+
object-fit: cover;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
.overlay {
|
|
233
|
+
position: absolute;
|
|
234
|
+
top: 1rem;
|
|
235
|
+
right: 1rem;
|
|
236
|
+
z-index: 10;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
.status-badge {
|
|
240
|
+
display: flex;
|
|
241
|
+
align-items: center;
|
|
242
|
+
gap: 0.5rem;
|
|
243
|
+
padding: 0.25rem 0.75rem;
|
|
244
|
+
border-radius: 9999px;
|
|
245
|
+
font-size: 0.75rem;
|
|
246
|
+
font-weight: 500;
|
|
247
|
+
color: white;
|
|
248
|
+
backdrop-filter: blur(4px);
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
.status-badge.paused {
|
|
252
|
+
background: rgba(239, 68, 68, 0.8); /* red-500 */
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
.status-badge.playing {
|
|
256
|
+
background: rgba(34, 197, 94, 0.8); /* green-500 */
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
.dot {
|
|
260
|
+
width: 6px;
|
|
261
|
+
height: 6px;
|
|
262
|
+
border-radius: 50%;
|
|
263
|
+
background-color: white;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
@keyframes pulse {
|
|
267
|
+
0%, 100% { opacity: 1; }
|
|
268
|
+
50% { opacity: 0.5; }
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
.animate-pulse {
|
|
272
|
+
animation: pulse 2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
|
|
273
|
+
}
|
|
274
|
+
</style>
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import type { App, Plugin } from 'vue';
|
|
2
|
+
import VirtualHumanPersona from './components/VirtualHumanPersona.vue';
|
|
3
|
+
import VirtualHumanEventAdapter from './components/VirtualHumanEventAdapter.vue';
|
|
4
|
+
|
|
5
|
+
// 导出组件供按需引入
|
|
6
|
+
export { VirtualHumanPersona, VirtualHumanEventAdapter };
|
|
7
|
+
|
|
8
|
+
// 提供全局安装的插件方式
|
|
9
|
+
const install = (app: App) => {
|
|
10
|
+
app.component('VirtualHumanPersona', VirtualHumanPersona);
|
|
11
|
+
app.component('VirtualHumanEventAdapter', VirtualHumanEventAdapter);
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
export default {
|
|
15
|
+
install,
|
|
16
|
+
} as Plugin;
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ESNext",
|
|
4
|
+
"useDefineForClassFields": true,
|
|
5
|
+
"module": "ESNext",
|
|
6
|
+
"moduleResolution": "Node",
|
|
7
|
+
"strict": true,
|
|
8
|
+
"jsx": "preserve",
|
|
9
|
+
"resolveJsonModule": true,
|
|
10
|
+
"isolatedModules": true,
|
|
11
|
+
"esModuleInterop": true,
|
|
12
|
+
"lib": ["ESNext", "DOM"],
|
|
13
|
+
"skipLibCheck": true,
|
|
14
|
+
"noEmit": true,
|
|
15
|
+
"declaration": true,
|
|
16
|
+
"declarationDir": "dist",
|
|
17
|
+
"outDir": "dist"
|
|
18
|
+
},
|
|
19
|
+
"include": ["src/**/*.ts", "src/**/*.d.ts", "src/**/*.tsx", "src/**/*.vue"],
|
|
20
|
+
"references": [{ "path": "./tsconfig.node.json" }]
|
|
21
|
+
}
|
|
Binary file
|
package/vite.config.ts
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { defineConfig } from 'vite';
|
|
2
|
+
import vue from '@vitejs/plugin-vue';
|
|
3
|
+
import dts from 'vite-plugin-dts';
|
|
4
|
+
import { resolve } from 'path';
|
|
5
|
+
|
|
6
|
+
export default defineConfig({
|
|
7
|
+
plugins: [
|
|
8
|
+
vue(),
|
|
9
|
+
dts({
|
|
10
|
+
insertTypesEntry: true,
|
|
11
|
+
include: ['src/**/*.ts', 'src/**/*.vue'],
|
|
12
|
+
}),
|
|
13
|
+
],
|
|
14
|
+
build: {
|
|
15
|
+
lib: {
|
|
16
|
+
entry: resolve(__dirname, 'src/index.ts'),
|
|
17
|
+
name: 'VirtualHumanCf',
|
|
18
|
+
fileName: (format) => `virtual-human-cf.${format}.js`,
|
|
19
|
+
},
|
|
20
|
+
rollupOptions: {
|
|
21
|
+
// make sure to externalize deps that shouldn't be bundled
|
|
22
|
+
// into your library
|
|
23
|
+
external: ['vue'],
|
|
24
|
+
output: {
|
|
25
|
+
// Provide global variables to use in the UMD build
|
|
26
|
+
// for externalized deps
|
|
27
|
+
globals: {
|
|
28
|
+
vue: 'Vue',
|
|
29
|
+
},
|
|
30
|
+
exports: 'named',
|
|
31
|
+
},
|
|
32
|
+
},
|
|
33
|
+
},
|
|
34
|
+
});
|