yjz-web-sdk 1.0.9-beta.4 → 1.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/components/RemotePlayer/index.vue.d.ts +1 -1
- package/lib/composables/useRemoteVideo.d.ts +1 -1
- package/lib/util/WasmUtil.d.ts +1 -2
- package/lib/yjz-web-sdk.js +68 -36
- package/package.json +6 -4
- package/src/assets/icon/circle.svg +1 -0
- package/src/assets/icon/triangle.svg +1 -0
- package/src/assets/wasm/h264-atomic.wasm +0 -0
- package/src/assets/wasm/h264-simd.wasm +0 -0
- package/src/components/RemotePlayer/index.vue +170 -0
- package/src/components/RemotePlayer/type.ts +11 -0
- package/src/composables/useCursorStyle.ts +15 -0
- package/src/composables/useKeyboardControl.ts +32 -0
- package/src/composables/useMouseTouchControl.ts +158 -0
- package/src/composables/useRemoteVideo.ts +248 -0
- package/src/composables/useResizeObserver.ts +27 -0
- package/src/core/WebRTCSdk.ts +561 -0
- package/src/core/data/MessageType.ts +70 -0
- package/src/core/data/TurnType.ts +25 -0
- package/src/core/data/WebRtcError.ts +93 -0
- package/src/core/data/WebrtcDataType.ts +354 -0
- package/src/core/groupctrl/GroupCtrlSocketManager.ts +94 -0
- package/src/core/groupctrl/SdkController.ts +96 -0
- package/src/core/rtc/WebRTCClient.ts +862 -0
- package/src/core/rtc/WebRTCConfig.ts +86 -0
- package/src/core/rtc/WebRtcNegotiate.ts +164 -0
- package/src/core/signal/SignalingClient.ts +221 -0
- package/src/core/util/FileTypeUtils.ts +75 -0
- package/src/core/util/KeyCodeUtil.ts +162 -0
- package/src/core/util/Logger.ts +83 -0
- package/src/core/util/MapCache.ts +135 -0
- package/src/core/util/ScreenControlUtil.ts +174 -0
- package/src/core/util/TurnTestUtil.ts +123 -0
- package/src/env.d.ts +30 -0
- package/src/index.ts +61 -0
- package/src/render/Canvas2DRenderer.ts +38 -0
- package/src/render/WebGLRenderer.ts +150 -0
- package/src/render/WebGPURenderer.ts +194 -0
- package/src/types/index.ts +15 -0
- package/src/types/webgpu.d.ts +1158 -0
- package/src/util/WasmUtil.ts +291 -0
- package/src/worker/worker.ts +292 -0
- package/lib/assets/worker-CYyEsgiB.js +0 -12598
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "yjz-web-sdk",
|
|
3
3
|
"private": false,
|
|
4
|
-
"version": "1.0.
|
|
4
|
+
"version": "1.0.10",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"description": "针对于亚矩阵项目的云手机投屏和屏幕控制",
|
|
7
7
|
"license": "Apache-2.0",
|
|
@@ -20,7 +20,10 @@
|
|
|
20
20
|
"files": [
|
|
21
21
|
"package.json",
|
|
22
22
|
"README.md",
|
|
23
|
-
"lib"
|
|
23
|
+
"lib",
|
|
24
|
+
"dist",
|
|
25
|
+
"src",
|
|
26
|
+
"types"
|
|
24
27
|
],
|
|
25
28
|
"publishConfig": {
|
|
26
29
|
"registry": "https://registry.npmjs.org",
|
|
@@ -35,8 +38,8 @@
|
|
|
35
38
|
"@libmedia/avcodec": "1.0.1",
|
|
36
39
|
"@libmedia/avformat": "1.0.1",
|
|
37
40
|
"@libmedia/avnetwork": "1.0.1",
|
|
38
|
-
"@libmedia/avutil": "1.0.1",
|
|
39
41
|
"@libmedia/avrender": "1.0.1",
|
|
42
|
+
"@libmedia/avutil": "1.0.1",
|
|
40
43
|
"@libmedia/cheap": "~1.0.2",
|
|
41
44
|
"@libmedia/common": "~2.0.1",
|
|
42
45
|
"element-plus": "^2.10.5",
|
|
@@ -50,7 +53,6 @@
|
|
|
50
53
|
"@types/node": "^22.14.1",
|
|
51
54
|
"@vitejs/plugin-vue": "^6.0.1",
|
|
52
55
|
"@vue/tsconfig": "^0.7.0",
|
|
53
|
-
"rollup-plugin-visualizer": "5.9.3",
|
|
54
56
|
"tslib": "^2.8.1",
|
|
55
57
|
"typescript": "~5.8.0",
|
|
56
58
|
"vite": "^6.2.0",
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none"><circle cx="12" cy="12" r="12" fill="#fff" opacity=".6"/><circle cx="12" cy="12" r="10" fill="#000" opacity=".6"/></svg>
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
<svg class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" width="22" height="22"><path d="M143.832313 5.834982H143.686438A108.676545 108.676545 0 0 0 5.834982 143.686438l34.499333-11.815839-34.499333 11.815839 0.072938 0.218812 0.145874 0.437624 0.583498 1.750494 2.333993 6.71023 8.752474 25.528047L49.232663 269.867929a2254749.467572 2254749.467572 0 0 1 223.917444 652.351017l9.335972 27.205605 2.552804 7.585476 0.729373 2.188119a72.572592 72.572592 0 0 0 126.181491 40.844876 72.134968 72.134968 0 0 0 14.076895-18.963693c3.282178-6.41848 5.689108-13.639271 8.023101-20.3495l0.072937-0.291749 72.572592-209.329989 47.409231-136.830334 15.53564-44.710551 0.145874-0.364687 0.510561-0.145874 45.002301-15.900327 137.486769-48.649165c99.340573-35.228705 202.984445-71.989094 209.913487-74.906584l3.355115-1.312871c8.023101-3.136303 22.391744-8.606599 33.915834-20.130689a72.499655 72.499655 0 0 0 0-102.549813L999.240712 304.877823c-1.823432-1.969307-7.293728-7.731351-13.274585-11.961714a89.056417 89.056417 0 0 0-27.205605-12.3264h-0.145874l-2.552805-0.875247L948.184617 277.161657l-27.86204-9.263034-94.672588-31.800653A405018.007245 405018.007245 0 0 1 268.919745 48.138604L178.039896 17.504947 152.657723 8.752473 145.874556 6.637292 144.196999 5.90792 143.832313 5.834982z" fill="#000000" opacity=".7"></path></svg>
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
<script setup lang="ts">
|
|
2
|
+
import {ref, onMounted, toRefs, onBeforeUnmount} from 'vue'
|
|
3
|
+
|
|
4
|
+
import useCursorStyle from '../../composables/useCursorStyle'
|
|
5
|
+
import useResizeObserver from '../../composables/useResizeObserver'
|
|
6
|
+
import useRemoteVideo from '../../composables/useRemoteVideo'
|
|
7
|
+
import useMouseTouchControl from '../../composables/useMouseTouchControl'
|
|
8
|
+
import useKeyboardControl from '../../composables/useKeyboardControl'
|
|
9
|
+
import type {Dimension} from "./type";
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
const emit = defineEmits(['channelEvent', "groupControlEvent", 'loadedSuccess', 'loadedFailure']);
|
|
14
|
+
|
|
15
|
+
interface Props {
|
|
16
|
+
streamAngle?: number
|
|
17
|
+
videoAngle?: number
|
|
18
|
+
cursorType?: number
|
|
19
|
+
cloudDeviceSize?: Dimension
|
|
20
|
+
disabled?: boolean,
|
|
21
|
+
bgColor?: string,
|
|
22
|
+
isGroup?: boolean,
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const props = withDefaults(defineProps<Props>(), {
|
|
26
|
+
streamAngle: 0,
|
|
27
|
+
videoAngle: 0,
|
|
28
|
+
cursorType: 0,
|
|
29
|
+
cloudDeviceSize: () => ({ width: 0, height: 0 }),
|
|
30
|
+
disabled: true,
|
|
31
|
+
bgColor: 'transparent',
|
|
32
|
+
isGroup: false
|
|
33
|
+
})
|
|
34
|
+
|
|
35
|
+
const { streamAngle, videoAngle, cursorType, cloudDeviceSize, disabled, bgColor, isGroup } = toRefs(props)
|
|
36
|
+
|
|
37
|
+
const videoContainer = ref<HTMLElement | null>(null)
|
|
38
|
+
const remoteVideoElement = ref<HTMLCanvasElement | null>(null)
|
|
39
|
+
const audioElement = ref<HTMLAudioElement | null>(null)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
const cursorClass = useCursorStyle(cursorType)
|
|
43
|
+
|
|
44
|
+
const {
|
|
45
|
+
videoSize,
|
|
46
|
+
dimensions,
|
|
47
|
+
widthRadio,
|
|
48
|
+
initVideoContainer,
|
|
49
|
+
startDecode,
|
|
50
|
+
stopPlay,
|
|
51
|
+
startPlay
|
|
52
|
+
} = useRemoteVideo(videoContainer, remoteVideoElement, audioElement, videoAngle, emit)
|
|
53
|
+
|
|
54
|
+
const {
|
|
55
|
+
handleMouseDown,
|
|
56
|
+
handleMouseMove,
|
|
57
|
+
handleMouseEnter,
|
|
58
|
+
handleMouseUp,
|
|
59
|
+
handleMouseLeave,
|
|
60
|
+
handleWheel
|
|
61
|
+
} = useMouseTouchControl({
|
|
62
|
+
remoteVideoElement,
|
|
63
|
+
cloudDeviceSize,
|
|
64
|
+
streamAngle,
|
|
65
|
+
videoAngle,
|
|
66
|
+
widthRadio,
|
|
67
|
+
isGroup,
|
|
68
|
+
emit
|
|
69
|
+
})
|
|
70
|
+
|
|
71
|
+
const {
|
|
72
|
+
startListening,
|
|
73
|
+
stopListening
|
|
74
|
+
} = useKeyboardControl(disabled, emit)
|
|
75
|
+
|
|
76
|
+
useResizeObserver(videoContainer, dimensions)
|
|
77
|
+
|
|
78
|
+
const handleClick = () => {
|
|
79
|
+
if (audioElement.value) {
|
|
80
|
+
audioElement.value.muted = false;
|
|
81
|
+
audioElement.value.play().catch(()=>{});
|
|
82
|
+
}
|
|
83
|
+
};
|
|
84
|
+
|
|
85
|
+
onMounted(() => {
|
|
86
|
+
document.addEventListener("click", handleClick);
|
|
87
|
+
initVideoContainer()
|
|
88
|
+
})
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
onBeforeUnmount(() => {
|
|
93
|
+
document.removeEventListener("click", handleClick);
|
|
94
|
+
})
|
|
95
|
+
|
|
96
|
+
defineExpose({ startDecode, stopPlay, remoteVideoElement, startPlay })
|
|
97
|
+
</script>
|
|
98
|
+
|
|
99
|
+
<template>
|
|
100
|
+
<div ref="videoContainer" class="flex flex-1 items-center justify-center" style="width:100%;height: 100%;position:relative;overflow:hidden">
|
|
101
|
+
<div ref="keyboardArea" @pointerenter="startListening" @pointerleave="stopListening"
|
|
102
|
+
class="vContainer"
|
|
103
|
+
:style="{height: videoSize.height + 'px', width: videoSize.width + 'px', transform: `rotate(${videoAngle}deg)`}" style="position: relative;overflow:hidden">
|
|
104
|
+
<canvas ref="remoteVideoElement"
|
|
105
|
+
class="canvas-control"
|
|
106
|
+
:width="cloudDeviceSize.width"
|
|
107
|
+
:height="cloudDeviceSize.height"
|
|
108
|
+
:class="[cursorClass, { 'no-events': disabled }]"
|
|
109
|
+
@pointerenter="handleMouseEnter"
|
|
110
|
+
@pointerdown="handleMouseDown"
|
|
111
|
+
@pointermove="handleMouseMove"
|
|
112
|
+
@pointerup="handleMouseUp"
|
|
113
|
+
@pointerleave="handleMouseLeave"
|
|
114
|
+
@wheel="handleWheel"
|
|
115
|
+
hidden="hidden"
|
|
116
|
+
:style="{backgroundColor: `${bgColor}`}"></canvas>
|
|
117
|
+
<audio ref="audioElement" autoplay playsinline muted style="display: none"></audio>
|
|
118
|
+
</div>
|
|
119
|
+
</div>
|
|
120
|
+
</template>
|
|
121
|
+
|
|
122
|
+
<style scoped>
|
|
123
|
+
.vContainer {
|
|
124
|
+
transition: transform 0.2s linear;
|
|
125
|
+
transform-origin: center center;
|
|
126
|
+
margin: 0; /* 清除外边距 */
|
|
127
|
+
overflow: hidden;
|
|
128
|
+
/* 对频繁旋转或缩放使用 translateZ(0) 强制 GPU 合成 */
|
|
129
|
+
transform: translateZ(0);
|
|
130
|
+
will-change: transform; /* 告诉浏览器只关注 transform 变化 */
|
|
131
|
+
}
|
|
132
|
+
.flex {
|
|
133
|
+
display: flex;
|
|
134
|
+
}
|
|
135
|
+
.flex-1 {
|
|
136
|
+
flex: 1 1 0;
|
|
137
|
+
}
|
|
138
|
+
.items-center {
|
|
139
|
+
align-items: center;
|
|
140
|
+
}
|
|
141
|
+
.justify-center {
|
|
142
|
+
justify-content: center;
|
|
143
|
+
}
|
|
144
|
+
.canvas-control {
|
|
145
|
+
width: 100%;
|
|
146
|
+
height: 100%;
|
|
147
|
+
display: block;
|
|
148
|
+
user-select: none;
|
|
149
|
+
-webkit-user-drag: none;
|
|
150
|
+
touch-action: none;
|
|
151
|
+
|
|
152
|
+
object-fit: cover;
|
|
153
|
+
|
|
154
|
+
backface-visibility: hidden;
|
|
155
|
+
transform: translateZ(0);
|
|
156
|
+
clip-path: inset(0 1px);
|
|
157
|
+
}
|
|
158
|
+
.circle-cursor {
|
|
159
|
+
cursor: url('../../assets/icon/circle.svg') 12 12, auto;
|
|
160
|
+
}
|
|
161
|
+
.triangle-cursor {
|
|
162
|
+
cursor: url('../../assets/icon/triangle.svg') 1 1, auto;
|
|
163
|
+
}
|
|
164
|
+
.default-cursor {
|
|
165
|
+
cursor: default;
|
|
166
|
+
}
|
|
167
|
+
.no-events {
|
|
168
|
+
pointer-events: none !important;
|
|
169
|
+
}
|
|
170
|
+
</style>
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type {Ref} from 'vue'
|
|
2
|
+
import {computed} from 'vue'
|
|
3
|
+
|
|
4
|
+
export default function useCursorStyle(cursorType: Ref<number>) {
|
|
5
|
+
return computed(() => {
|
|
6
|
+
switch (cursorType.value) {
|
|
7
|
+
case 1:
|
|
8
|
+
return 'circle-cursor'
|
|
9
|
+
case 2:
|
|
10
|
+
return 'triangle-cursor'
|
|
11
|
+
default:
|
|
12
|
+
return 'default-cursor'
|
|
13
|
+
}
|
|
14
|
+
})
|
|
15
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import {ChannelDataType, KeyEventData} from "../core/data/WebrtcDataType";
|
|
2
|
+
import {getKeyEventData} from "../core/util/KeyCodeUtil";
|
|
3
|
+
import {type Ref, ref} from "vue";
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
export default function useKeyboardControl(
|
|
7
|
+
disabled:Ref<boolean>,
|
|
8
|
+
emit: (event: 'channelEvent', payload: ChannelDataType, data:any) => void
|
|
9
|
+
) {
|
|
10
|
+
const listening = ref(false)
|
|
11
|
+
const handleKeyDown = (e: KeyboardEvent) => {
|
|
12
|
+
const data: KeyEventData = getKeyEventData(e)
|
|
13
|
+
emit('channelEvent', ChannelDataType.ActionInput, data)
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const startListening = () => {
|
|
17
|
+
if (!disabled){return}
|
|
18
|
+
listening.value = true;
|
|
19
|
+
document.addEventListener('keydown', handleKeyDown);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const stopListening = () => {
|
|
23
|
+
if (!disabled){return}
|
|
24
|
+
listening.value = false;
|
|
25
|
+
document.removeEventListener('keydown', handleKeyDown);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
return {
|
|
29
|
+
startListening,
|
|
30
|
+
stopListening
|
|
31
|
+
}
|
|
32
|
+
}
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
import type {Ref} from 'vue'
|
|
2
|
+
import {ref} from 'vue'
|
|
3
|
+
import {ActionType, ChannelDataType, TouchData, WheelData} from "../core/data/WebrtcDataType";
|
|
4
|
+
import {transformCoordinate, valueToPercentage} from "../core/util/ScreenControlUtil";
|
|
5
|
+
import type {Dimension, TouchEventData} from "../components/RemotePlayer/type";
|
|
6
|
+
|
|
7
|
+
interface MouseTouchControlOptions {
|
|
8
|
+
remoteVideoElement: Ref<HTMLCanvasElement | null>
|
|
9
|
+
cloudDeviceSize:Ref<Dimension>
|
|
10
|
+
streamAngle: Ref<number>
|
|
11
|
+
videoAngle: Ref<number>
|
|
12
|
+
widthRadio: Ref<number>
|
|
13
|
+
isGroup: Ref<boolean>
|
|
14
|
+
emit: (event: 'channelEvent', payload: ChannelDataType, data: any) => void
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export default function useMouseTouchControl(options: MouseTouchControlOptions) {
|
|
18
|
+
const {
|
|
19
|
+
remoteVideoElement,
|
|
20
|
+
cloudDeviceSize,
|
|
21
|
+
streamAngle,
|
|
22
|
+
videoAngle,
|
|
23
|
+
widthRadio,
|
|
24
|
+
isGroup,
|
|
25
|
+
emit
|
|
26
|
+
} = options
|
|
27
|
+
|
|
28
|
+
const isPointerDown = ref(false)
|
|
29
|
+
const bound = ref(0)
|
|
30
|
+
const pointerList = ref<number[]>(new Array(20).fill(0));
|
|
31
|
+
const pointerDownTime = ref<number[]>(new Array(10).fill(0));
|
|
32
|
+
const index = ref<number>(0)
|
|
33
|
+
const groupIndex = ref<number>(0)
|
|
34
|
+
|
|
35
|
+
const handlePointerEvent = (event: PointerEvent | TouchEventData, action: ActionType) => {
|
|
36
|
+
if (!remoteVideoElement.value) return;
|
|
37
|
+
const offsetTime = Math.trunc((event.timeStamp - pointerDownTime.value[0]));
|
|
38
|
+
const rect = remoteVideoElement.value.getBoundingClientRect();
|
|
39
|
+
|
|
40
|
+
let x = event.clientX - rect.left;
|
|
41
|
+
let y = event.clientY - rect.top;
|
|
42
|
+
|
|
43
|
+
const cloudWidth = cloudDeviceSize.value.width;
|
|
44
|
+
const cloudHeight = cloudDeviceSize.value.height;
|
|
45
|
+
const result = transformCoordinate(
|
|
46
|
+
rect.width, rect.height, cloudWidth, cloudHeight,
|
|
47
|
+
videoAngle.value, streamAngle.value, x, y
|
|
48
|
+
);
|
|
49
|
+
if (!result || result.length < 2) return;
|
|
50
|
+
x = result[0];
|
|
51
|
+
y = result[1];
|
|
52
|
+
|
|
53
|
+
if (action === ActionType.ACTION_MOVE) {
|
|
54
|
+
const flipY = pointerList.value[10] - y;
|
|
55
|
+
const flipX = pointerList.value[0] - x;
|
|
56
|
+
if (Math.abs(flipY) < bound.value && Math.abs(flipX) < bound.value) return;
|
|
57
|
+
}
|
|
58
|
+
if (isGroup.value) {
|
|
59
|
+
if(action === ActionType.ACTION_DOWN){
|
|
60
|
+
index.value = 0
|
|
61
|
+
groupIndex.value ++
|
|
62
|
+
}else if(action === ActionType.ACTION_UP || action === ActionType.ACTION_MOVE){
|
|
63
|
+
index.value ++
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
// 更新最后一次位置
|
|
67
|
+
pointerList.value[0] = x
|
|
68
|
+
pointerList.value[10] = y
|
|
69
|
+
|
|
70
|
+
// 转换为百分比坐标
|
|
71
|
+
const [px, py] = valueToPercentage(
|
|
72
|
+
rect.width, rect.height, cloudWidth, cloudHeight,
|
|
73
|
+
videoAngle.value, streamAngle.value, x, y
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
// 构造发送数据
|
|
77
|
+
const touchData = isGroup.value
|
|
78
|
+
? new TouchData(action, 0, px, py, offsetTime, "web", index.value, groupIndex.value)
|
|
79
|
+
: new TouchData(action, 0, px, py, offsetTime, "web")
|
|
80
|
+
|
|
81
|
+
emit('channelEvent', ChannelDataType.ClickData, touchData)
|
|
82
|
+
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
const handleMouseDown = (event: PointerEvent) => {
|
|
86
|
+
// 标记按下状态
|
|
87
|
+
isPointerDown.value = true
|
|
88
|
+
if(remoteVideoElement.value){
|
|
89
|
+
remoteVideoElement.value.setPointerCapture(event.pointerId)
|
|
90
|
+
}
|
|
91
|
+
pointerDownTime.value[0] = event.timeStamp;
|
|
92
|
+
bound.value = Math.trunc(6 / widthRadio.value);
|
|
93
|
+
// 群控模式下新一轮点击重置 index
|
|
94
|
+
handlePointerEvent(event, ActionType.ACTION_DOWN)
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const handleMouseMove = (event: PointerEvent) => {
|
|
98
|
+
if (!isPointerDown.value) return;
|
|
99
|
+
handlePointerEvent(event, ActionType.ACTION_MOVE)
|
|
100
|
+
if(!remoteVideoElement.value){
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
const rect = remoteVideoElement.value.getBoundingClientRect();
|
|
104
|
+
const { clientX: x, clientY: y } = event;
|
|
105
|
+
if (x < rect.left || x > rect.right || y < rect.top || y > rect.bottom) {
|
|
106
|
+
handleMouseUp(event)
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
const handleMouseUp = (event: PointerEvent) => {
|
|
111
|
+
if (!isPointerDown.value) return;
|
|
112
|
+
isPointerDown.value = false
|
|
113
|
+
handlePointerEvent(event, ActionType.ACTION_UP)
|
|
114
|
+
// 释放指针捕获 & 清除长按
|
|
115
|
+
if(remoteVideoElement.value){
|
|
116
|
+
remoteVideoElement.value.releasePointerCapture(event.pointerId)
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
const handleMouseEnter = (event: PointerEvent) => {
|
|
121
|
+
// 如果左键已按下且尚未标记为 down,就补发一次 down
|
|
122
|
+
if (event.buttons === 1 && !isPointerDown.value) {
|
|
123
|
+
isPointerDown.value = true
|
|
124
|
+
if(remoteVideoElement.value){
|
|
125
|
+
remoteVideoElement.value.setPointerCapture(event.pointerId)
|
|
126
|
+
}
|
|
127
|
+
pointerDownTime.value[0] = event.timeStamp;
|
|
128
|
+
bound.value = Math.trunc(6 / widthRadio.value);
|
|
129
|
+
|
|
130
|
+
handlePointerEvent(event, ActionType.ACTION_DOWN)
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
const handleMouseLeave = (event: PointerEvent) => {
|
|
134
|
+
if (isPointerDown.value) {
|
|
135
|
+
handlePointerEvent(event, ActionType.ACTION_UP);
|
|
136
|
+
isPointerDown.value = false
|
|
137
|
+
if(remoteVideoElement.value){
|
|
138
|
+
remoteVideoElement.value.releasePointerCapture(event.pointerId)
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
const handleWheel = (e: WheelEvent) => {
|
|
144
|
+
const delta = Math.sign(e.deltaY) * -1;
|
|
145
|
+
const wheelData = new WheelData(delta);
|
|
146
|
+
emit('channelEvent', ChannelDataType.ActionWheel,wheelData)
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
return {
|
|
150
|
+
isPointerDown,
|
|
151
|
+
handleMouseDown,
|
|
152
|
+
handleMouseMove,
|
|
153
|
+
handleMouseEnter,
|
|
154
|
+
handleMouseUp,
|
|
155
|
+
handleMouseLeave,
|
|
156
|
+
handleWheel
|
|
157
|
+
}
|
|
158
|
+
}
|
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
import type {Ref} from 'vue';
|
|
2
|
+
import {computed, onBeforeUnmount, onMounted, ref} from 'vue';
|
|
3
|
+
import {Dimension} from '../components/RemotePlayer/type';
|
|
4
|
+
import {
|
|
5
|
+
checkDecoderSupport,
|
|
6
|
+
detectRenderingCapabilities,
|
|
7
|
+
VideoCodecType
|
|
8
|
+
} from "../util/WasmUtil";
|
|
9
|
+
import DecodeWorker from '../worker/worker.ts?worker&inline';
|
|
10
|
+
|
|
11
|
+
import {DecoderSupportResult, RendererType} from "../types";
|
|
12
|
+
|
|
13
|
+
export default function useRemoteVideo(
|
|
14
|
+
videoContainer: Ref<HTMLElement | null>,
|
|
15
|
+
remoteVideoElement: Ref<HTMLCanvasElement | null>,
|
|
16
|
+
audioElement: Ref<HTMLAudioElement | null>,
|
|
17
|
+
videoAngle: Ref<number>,
|
|
18
|
+
emit: (event: 'loadedSuccess' | 'loadedFailure', reason?: string) => void
|
|
19
|
+
) {
|
|
20
|
+
const screenStatus = ref(false);
|
|
21
|
+
const remoteVideo = ref<Partial<Dimension>>({});
|
|
22
|
+
const dimensions = ref<Dimension>({ width: 0, height: 0 });
|
|
23
|
+
const widthRadio = ref(0);
|
|
24
|
+
const heightRadio = ref(0);
|
|
25
|
+
const resizeObserver = ref<ResizeObserver | null>(null);
|
|
26
|
+
|
|
27
|
+
const renderType = ref<RendererType | null>(null)
|
|
28
|
+
|
|
29
|
+
const isRotated = computed(() => videoAngle.value % 180 !== 0);
|
|
30
|
+
// Worker 相关
|
|
31
|
+
const worker = ref<Worker | null>(null)
|
|
32
|
+
const offscreen = ref<OffscreenCanvas | null>(null)
|
|
33
|
+
const workerInited = ref(false)
|
|
34
|
+
const visible = ref<boolean>(true);
|
|
35
|
+
|
|
36
|
+
const videoSize = computed(() => {
|
|
37
|
+
let width: number;
|
|
38
|
+
let height: number;
|
|
39
|
+
|
|
40
|
+
const containerWidth = isRotated.value ? dimensions.value.height : dimensions.value.width;
|
|
41
|
+
const containerHeight = isRotated.value ? dimensions.value.width : dimensions.value.height;
|
|
42
|
+
|
|
43
|
+
const vWidth = remoteVideo.value.width ?? 720;
|
|
44
|
+
const vHeight = remoteVideo.value.height ?? 1280;
|
|
45
|
+
const aspect = vWidth / vHeight;
|
|
46
|
+
const videoHeight = containerWidth / aspect;
|
|
47
|
+
if (videoHeight > containerHeight) {
|
|
48
|
+
height = containerHeight;
|
|
49
|
+
width = height * aspect;
|
|
50
|
+
} else {
|
|
51
|
+
width = containerWidth;
|
|
52
|
+
height = videoHeight;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
widthRadio.value = vWidth / width;
|
|
56
|
+
heightRadio.value = vHeight / height;
|
|
57
|
+
|
|
58
|
+
return { width, height };
|
|
59
|
+
});
|
|
60
|
+
const waitingKeyFrame = ref(true)
|
|
61
|
+
const checkResult = ref<DecoderSupportResult | null>(null)
|
|
62
|
+
|
|
63
|
+
const observeContainer = () => {
|
|
64
|
+
if (!videoContainer.value) return;
|
|
65
|
+
resizeObserver.value = new ResizeObserver(([entry]) => {
|
|
66
|
+
const { width, height } = entry.contentRect;
|
|
67
|
+
dimensions.value = { width, height };
|
|
68
|
+
});
|
|
69
|
+
resizeObserver.value.observe(videoContainer.value);
|
|
70
|
+
};
|
|
71
|
+
|
|
72
|
+
const initVideoContainer = () => {
|
|
73
|
+
observeContainer();
|
|
74
|
+
bindVideoEvents()
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
const bindVideoEvents = async () => {
|
|
78
|
+
const video = remoteVideoElement.value;
|
|
79
|
+
if (!video) return;
|
|
80
|
+
checkResult.value = await checkDecoderSupport(VideoCodecType.H264)
|
|
81
|
+
console.log("checkResult.value====>", checkResult.value)
|
|
82
|
+
const result = await detectRenderingCapabilities()
|
|
83
|
+
console.log("detectRenderingCapabilities===>", result)
|
|
84
|
+
renderType.value = result.best
|
|
85
|
+
initWorker(video)
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
const initWorker = (video: HTMLCanvasElement) => {
|
|
89
|
+
if (workerInited.value) return;
|
|
90
|
+
console.log("initWorker===>");
|
|
91
|
+
worker.value = new DecodeWorker();
|
|
92
|
+
const pixelRatio = window.devicePixelRatio || 1;
|
|
93
|
+
if (!offscreen.value) {
|
|
94
|
+
offscreen.value = video.transferControlToOffscreen();
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
worker.value.onmessage = (event: MessageEvent) => {
|
|
98
|
+
const { data } = event;
|
|
99
|
+
switch (data.type) {
|
|
100
|
+
case 'decoderError':
|
|
101
|
+
case 'rendererError':
|
|
102
|
+
emit('loadedFailure', data.error)
|
|
103
|
+
break
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
worker.value.onerror = (event: ErrorEvent) => {
|
|
107
|
+
console.error('error', event);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
worker.value?.postMessage({
|
|
111
|
+
type: 'init',
|
|
112
|
+
supportH265: checkResult.value?.supported,
|
|
113
|
+
isHardware: checkResult.value?.hardware,
|
|
114
|
+
isSoftware: checkResult.value?.software,
|
|
115
|
+
canvas: offscreen.value,
|
|
116
|
+
pixelRatio: pixelRatio,
|
|
117
|
+
renderType: renderType.value
|
|
118
|
+
}, [offscreen.value])
|
|
119
|
+
workerInited.value = true;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
const startDecode = (arrayBuffer: ArrayBuffer) => {
|
|
123
|
+
handleMessage(arrayBuffer)
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
const handleMessage = (arrayBuffer: ArrayBuffer) => {
|
|
127
|
+
const data = new Uint8Array(arrayBuffer); // ArrayBuffer -> Uint8Array
|
|
128
|
+
if (data.length < 4) return;
|
|
129
|
+
|
|
130
|
+
// 读取 label 长度(前 4 个字节,Big Endian)
|
|
131
|
+
const labelLen = (data[0] << 24) | (data[1] << 16) | (data[2] << 8) | data[3];
|
|
132
|
+
if (data.length < 4 + labelLen) return;
|
|
133
|
+
|
|
134
|
+
// 读取 label
|
|
135
|
+
const labelBytes = data.subarray(4, 4 + labelLen);
|
|
136
|
+
const label = new TextDecoder().decode(labelBytes);
|
|
137
|
+
|
|
138
|
+
// 读取 frame
|
|
139
|
+
const frame = data.subarray(4 + labelLen); // Uint8Array 视图,底层 buffer 没拷贝
|
|
140
|
+
|
|
141
|
+
// 调用回调
|
|
142
|
+
decodeVideo(label, frame)
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
const decodeVideo = (label: String, data: Uint8Array) => {
|
|
146
|
+
if(label !== 'key' && !visible){
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
if (label === 'config'){
|
|
150
|
+
return;
|
|
151
|
+
}
|
|
152
|
+
if (waitingKeyFrame.value){
|
|
153
|
+
if(label === 'delta'){
|
|
154
|
+
return;
|
|
155
|
+
}
|
|
156
|
+
waitingKeyFrame.value = false;
|
|
157
|
+
emit("loadedSuccess")
|
|
158
|
+
}
|
|
159
|
+
worker.value?.postMessage(
|
|
160
|
+
{
|
|
161
|
+
type: 'decode',
|
|
162
|
+
label,
|
|
163
|
+
data
|
|
164
|
+
},
|
|
165
|
+
[data.buffer]
|
|
166
|
+
);
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
const stopPlay = () => {
|
|
170
|
+
screenStatus.value = false;
|
|
171
|
+
waitingKeyFrame.value = true;
|
|
172
|
+
const audio = audioElement.value;
|
|
173
|
+
worker.value?.postMessage({type: 'clearRender'});
|
|
174
|
+
if (audio && audio.srcObject) {
|
|
175
|
+
const stream = audio.srcObject as MediaStream;
|
|
176
|
+
|
|
177
|
+
stream.getTracks().forEach(t => t.stop());
|
|
178
|
+
|
|
179
|
+
audio.pause();
|
|
180
|
+
audio.srcObject = null;
|
|
181
|
+
}
|
|
182
|
+
};
|
|
183
|
+
|
|
184
|
+
const startPlay = (track: MediaStreamTrack) => {
|
|
185
|
+
const audio = audioElement.value;
|
|
186
|
+
if (!audio) return;
|
|
187
|
+
|
|
188
|
+
replaceAudioTrack(audio, track);
|
|
189
|
+
audio.autoplay = true;
|
|
190
|
+
audio.muted = false;
|
|
191
|
+
audio.setAttribute('playsinline', 'true');
|
|
192
|
+
audio.setAttribute('webkit-playsinline', 'true');
|
|
193
|
+
};
|
|
194
|
+
|
|
195
|
+
const replaceAudioTrack = (audioEl: HTMLAudioElement, newTrack: MediaStreamTrack) => {
|
|
196
|
+
if (!audioEl.srcObject) {
|
|
197
|
+
audioEl.srcObject = new MediaStream();
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
const stream = audioEl.srcObject as MediaStream;
|
|
201
|
+
updateTrack(stream, newTrack);
|
|
202
|
+
};
|
|
203
|
+
|
|
204
|
+
const updateTrack = (stream: MediaStream, newTrack: MediaStreamTrack) => {
|
|
205
|
+
if (newTrack.kind === 'audio') {
|
|
206
|
+
// 移除旧音频轨
|
|
207
|
+
stream.getAudioTracks().forEach(oldTrack => {
|
|
208
|
+
stream.removeTrack(oldTrack);
|
|
209
|
+
oldTrack.stop();
|
|
210
|
+
});
|
|
211
|
+
stream.addTrack(newTrack);
|
|
212
|
+
}
|
|
213
|
+
};
|
|
214
|
+
|
|
215
|
+
const handleVisibilityChange = () => {
|
|
216
|
+
visible.value = document.visibilityState === "visible";
|
|
217
|
+
};
|
|
218
|
+
|
|
219
|
+
onMounted(() => {
|
|
220
|
+
document.addEventListener('visibilitychange', handleVisibilityChange);
|
|
221
|
+
})
|
|
222
|
+
|
|
223
|
+
onBeforeUnmount(async () => {
|
|
224
|
+
document.removeEventListener('visibilitychange', handleVisibilityChange);
|
|
225
|
+
if (resizeObserver.value && videoContainer.value) {
|
|
226
|
+
resizeObserver.value.unobserve(videoContainer.value);
|
|
227
|
+
}
|
|
228
|
+
stopPlay();
|
|
229
|
+
worker.value?.postMessage({type: 'stopDecode'});
|
|
230
|
+
if(worker.value) {
|
|
231
|
+
worker.value.terminate()
|
|
232
|
+
}
|
|
233
|
+
});
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
return {
|
|
237
|
+
videoSize,
|
|
238
|
+
remoteVideo,
|
|
239
|
+
dimensions,
|
|
240
|
+
widthRadio,
|
|
241
|
+
heightRadio,
|
|
242
|
+
screenStatus,
|
|
243
|
+
initVideoContainer,
|
|
244
|
+
startDecode,
|
|
245
|
+
stopPlay,
|
|
246
|
+
startPlay
|
|
247
|
+
};
|
|
248
|
+
}
|