@byteplus/react-native-rtc 1.0.3 → 1.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. package/README.md +311 -4
  2. package/android/build.gradle +14 -33
  3. package/android/src/main/java/com/volcengine/reactnative/vertc/VertcHelper.java +24 -0
  4. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VertcVod.java +29 -13
  5. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VideoAudioProcessor.java +9 -11
  6. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VodMock.java +17 -6
  7. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VodVideoEngineCallbackProxy.java +94 -0
  8. package/ios/vod/VertcVod.m +20 -5
  9. package/ios/vod/VodAudioProcessor.h +6 -1
  10. package/ios/vod/VodAudioProcessor.mm +9 -1
  11. package/ios/vod/VodVideoProcessor.h +3 -1
  12. package/ios/vod/VodVideoProcessor.m +22 -2
  13. package/lib/commonjs/index.js +2061 -3928
  14. package/lib/module/index.js +2061 -3928
  15. package/lib/typescript/codegen/pack/api.d.ts +1 -1
  16. package/lib/typescript/codegen/pack/keytype.d.ts +3 -3
  17. package/lib/typescript/core/rtc-video.d.ts +7 -1
  18. package/lib/typescript/interface.d.ts +4 -4
  19. package/lib/typescript/platforms/android/vertc.d.ts +9 -0
  20. package/lib/typescript/platforms/android/vod.d.ts +2 -2
  21. package/package.json +1 -1
  22. package/react-native-rtc.podspec +39 -16
  23. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VodAudioProxy.java +0 -44
  24. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VodAudioVoiceWrapperObject.java +0 -355
  25. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VodVideoProxy.java +0 -97
package/README.md CHANGED
@@ -21,7 +21,7 @@ yarn add @byteplus/react-native-rtc
21
21
 
22
22
  ## Basic Example
23
23
 
24
- ### Core Call Definition
24
+ ### RTC Core Definition
25
25
  `@/core/index.ts`
26
26
 
27
27
  ```typescript
@@ -83,15 +83,14 @@ class RTCClient {
83
83
  export default new RTCClient();
84
84
  ```
85
85
 
86
- ### Base JoinRoom Page
86
+ ### JoinRoom Page
87
87
  `@/page/login.tsx`
88
88
 
89
-
90
89
  Mainly focus on the usage of `NativeViewComponent`. Note that after the component is registered, call `setLocalVideoCanvas` in `onLoad` to set the local rendering view. The same applies to remote users.
91
90
  ```typescript
92
91
  import { Platform } from 'react-native';
93
- import { NativeViewComponent, StreamIndex, RenderMode } from '@byteplus/react-native-rtc';
94
92
  import { request, PERMISSIONS } from 'react-native-permissions';
93
+ import { NativeViewComponent, StreamIndex, RenderMode } from '@byteplus/react-native-rtc';
95
94
  import RTCClient from '@/core';
96
95
 
97
96
  const viewId = 'my-view';
@@ -173,6 +172,314 @@ const Login = () => {
173
172
  export default Login;
174
173
  ```
175
174
 
175
+ ## On-Demand Video with Real-Time Guest Interaction Example
176
+
177
+ **We recommend using the latest RTC React Native SDK version in combination with react-native-vod-player SDK v1.2.4.**
178
+
179
+
180
+ ### RTC Core Definition
181
+ `@/core/index.ts`
182
+
183
+ Based on the rtc core definition in `Basic Example`, add some apis:
184
+ ```typescript
185
+ ...
186
+ import type { TTVideoEngine } from '@byteplus/react-native-vod-player';
187
+
188
+ class RTCClient {
189
+
190
+ ...
191
+
192
+ /**
193
+ * @brief Start to observe vod player, capture frames from vod player and set stream for external screen stream.
194
+ */
195
+ startVodPlayerCapture(player: TTVideoEngine) {
196
+ return this.engine?.startVodPlayerCapture(player);
197
+ }
198
+
199
+ /**
200
+ * @brief Stop all vod player observer in rtc.
201
+ * @note Once you invoke this api, you should invoke `startVodPlayerCapture` to observe vod player.
202
+ */
203
+ stopVodPlayerCapture(player: TTVideoEngine) {
204
+ return this.engine?.stopVodPlayerCapture(player);
205
+ }
206
+ }
207
+ ```
208
+ ### Main logic page in react-native
209
+ `@/page/vodRtc.ts`
210
+
211
+ ```typescript
212
+ import React, {useEffect, useRef, useState} from 'react';
213
+ import fs from 'react-native-fs';
214
+ import {Platform, SafeAreaView, ScrollView} from 'react-native';
215
+ import {Button, Input, Text, Toast, View} from '@ant-design/react-native';
216
+ import {
217
+ NativeViewComponent,
218
+ StreamIndex,
219
+ RenderMode,
220
+ MediaStreamType,
221
+ LocalLogLevel,
222
+ } from '@byteplus/react-native-rtc';
223
+ import {
224
+ createDirectUrlSource,
225
+ type TTVideoEngine,
226
+ } from '@byteplus/react-native-vod-player';
227
+ import {
228
+ launchImageLibrary,
229
+ type ImagePickerResponse,
230
+ } from 'react-native-image-picker';
231
+ import RTCClient from '@/core';
232
+ import { createVeplayer } from '@/core/veplayer';
233
+ import RowItem from '@/components/RowItem';
234
+ import {GlobalStyles} from '@/style';
235
+
236
+ const viewId = 'my-view';
237
+
238
+ const auth = {
239
+ appId: 'Your RTC AppID',
240
+ roomId: 'Your Room ID',
241
+ userId: 'Your User ID',
242
+ token: 'Your RTC Token',
243
+ };
244
+
245
+ const Page = () => {
246
+ const [isViewLoaded, setViewLoaded] = useState<boolean>(false);
247
+ const [filepath, setFilepath] = useState('');
248
+ const hasCaptureRef = useRef(false);
249
+ const playerRef = useRef<TTVideoEngine>();
250
+
251
+ const handleViewLoad = () => {
252
+ setViewLoaded(true);
253
+ };
254
+
255
+ const handleSelectVideoFile = async () => {
256
+ try {
257
+ const callback = (response: ImagePickerResponse) => {
258
+ if (!response.didCancel && !response.errorCode) {
259
+ const filePath = response.assets?.[0]?.uri;
260
+ if (filePath) {
261
+ setFilepath(filePath);
262
+ }
263
+ }
264
+ };
265
+ launchImageLibrary(
266
+ {
267
+ mediaType: 'video',
268
+ },
269
+ callback,
270
+ );
271
+ } catch {
272
+ Toast.fail('Select media file failed.');
273
+ }
274
+ };
275
+
276
+ const handlePublish = () => {
277
+ RTCClient.publishScreen(MediaStreamType.RTC_MEDIA_STREAM_TYPE_BOTH);
278
+ };
279
+
280
+ const setVideoSource = () => {
281
+ const source = filepath
282
+ ? createDirectUrlSource({
283
+ url: filepath,
284
+ cacheKey: filepath,
285
+ })
286
+ : createDirectUrlSource({
287
+ url: 'Your media url, like https://xxxx.mp4',
288
+ cacheKey: 'remote',
289
+ vid: 'remote',
290
+ });
291
+ playerRef.current!.setVideoSource(source);
292
+ };
293
+
294
+ const handlePlay = async () => {
295
+ if (hasCaptureRef.current) {
296
+ playerRef.current!.play();
297
+ return;
298
+ }
299
+ if (!playerRef.current) {
300
+ return;
301
+ }
302
+ setVideoSource();
303
+ await RTCClient.startVodPlayerCapture(playerRef.current);
304
+ await playerRef.current!.play();
305
+
306
+ RTCClient.publishScreen(MediaStreamType.RTC_MEDIA_STREAM_TYPE_BOTH);
307
+
308
+ hasCaptureRef.current = true;
309
+ };
310
+
311
+ const handleStop = async () => {
312
+ if (hasCaptureRef.current) {
313
+ playerRef.current!.pause();
314
+ }
315
+ }
316
+
317
+ const handleDestroy = async () => {
318
+ if (!playerRef.current) {
319
+ return;
320
+ }
321
+ await RTCClient.stopVodPlayerCapture(playerRef.current);
322
+ hasCaptureRef.current = false;
323
+ }
324
+
325
+ const initializePlayer = async () => {
326
+ /**
327
+ * @brief It's not necessary to set viewId for vod player.
328
+ * @note You should realize veplayer for youself, refer to @byteplus/react-native-vod-player SDK.
329
+ */
330
+ playerRef.current = await createVeplayer({ viewId: '' });
331
+ playerRef.current.setListener({
332
+ onLoadStateChanged(engine, loadState) {
333
+ console.log('onLoadStateChanged: ', loadState);
334
+ },
335
+ onError(message, code) {
336
+ console.error('onError: ', message, code);
337
+ },
338
+ onPlaybackStateChanged(engine, playbackState) {
339
+ console.log('onPlaybackStateChanged: ', playbackState);
340
+ },
341
+ });
342
+ };
343
+
344
+ const initializeRTC = async () => {
345
+ /** Init your engine */
346
+ let DefaultPath = fs.ExternalDirectoryPath;
347
+ if (Platform.OS === 'ios') {
348
+ DefaultPath = fs.DocumentDirectoryPath;
349
+ }
350
+
351
+ /** Set log */
352
+ RTCClient.setLogConfig({
353
+ logLevel: LocalLogLevel.INFO,
354
+ logPath: DefaultPath,
355
+ logFileSize: 10,
356
+ logFilenamePrefix: '',
357
+ });
358
+
359
+ /** Create RTC Engine */
360
+ await RTCClient.createEngine({
361
+ appID: auth.appId,
362
+ parameters: {},
363
+ });
364
+
365
+ /** Set Local video canvas for player */
366
+ RTCClient.setLocalVideoCanvas(StreamIndex.STREAM_INDEX_SCREEN, {
367
+ viewId,
368
+ renderMode: RenderMode.ByteRTCRenderModeFit,
369
+ });
370
+
371
+ /** Join room */
372
+ RTCClient.createRoom(auth.roomId);
373
+ RTCClient.setRTCRoomEventHandler({
374
+ onUserJoined(userInfo, elapsed) {
375
+ console.log('onUserJoined: ', userInfo, elapsed);
376
+ },
377
+ });
378
+ RTCClient.joinRoom({
379
+ token: auth.token,
380
+ userId: auth.userId,
381
+ roomConfigs: {
382
+ profile: 0,
383
+ isAutoPublish: true,
384
+ isAutoSubscribeAudio: false,
385
+ isAutoSubscribeVideo: false,
386
+ },
387
+ });
388
+ };
389
+
390
+ useEffect(() => {
391
+ if (isViewLoaded) {
392
+ initializeRTC();
393
+ initializePlayer();
394
+ console.log('init success');
395
+ }
396
+ }, [isViewLoaded]);
397
+
398
+ useEffect(() => {
399
+ return () => {
400
+ RTCClient.engine?.stopVodPlayerCapture(playerRef.current);
401
+ }
402
+ }, []);
403
+
404
+ return (
405
+ <SafeAreaView>
406
+ <ScrollView
407
+ style={{
408
+ display: 'flex',
409
+ flexDirection: 'column',
410
+ width: '100%',
411
+ height: '100%',
412
+ backgroundColor: 'gray',
413
+ }}>
414
+ <RowItem
415
+ theme="dark"
416
+ leftItem="File path"
417
+ leftItemStyle={{width: '25%'}}
418
+ rightItem={
419
+ <Input disabled placeholder="Select media file" value={filepath} />
420
+ }
421
+ />
422
+ <Button
423
+ style={{...GlobalStyles.rowBtn, marginBottom: 6}}
424
+ onPress={handleSelectVideoFile}>
425
+ <Text style={{color: 'gray'}}>Select media file</Text>
426
+ </Button>
427
+ <Button
428
+ style={{...GlobalStyles.rowBtn, marginBottom: 6}}
429
+ onPress={() => setFilepath('')}>
430
+ <Text style={{color: 'gray'}}>Clear media file</Text>
431
+ </Button>
432
+ <Button
433
+ style={{...GlobalStyles.rowBtn, marginBottom: 6}}
434
+ onPress={handlePublish}>
435
+ <Text style={{color: 'gray'}}>Push Stream</Text>
436
+ </Button>
437
+ <Button
438
+ style={{...GlobalStyles.rowBtn, marginBottom: 6}}
439
+ onPress={handlePlay}>
440
+ <Text style={{color: 'gray'}}>Play</Text>
441
+ </Button>
442
+ <Button
443
+ style={{...GlobalStyles.rowBtn, marginBottom: 6}}
444
+ onPress={handleStop}>
445
+ <Text style={{color: 'gray'}}>Pause</Text>
446
+ </Button>
447
+ <Button
448
+ style={{...GlobalStyles.rowBtn, marginBottom: 6}}
449
+ onPress={handleDestroy}>
450
+ <Text style={{color: 'gray'}}>Destroy</Text>
451
+ </Button>
452
+ <View
453
+ style={{
454
+ flex: 1,
455
+ width: '100%',
456
+ minHeight: 300,
457
+ backgroundColor: '#000',
458
+ }}>
459
+ <Text>{`${viewId}`}</Text>
460
+ <NativeViewComponent
461
+ viewId={viewId}
462
+ style={{
463
+ width: '100%',
464
+ height: '100%',
465
+ }}
466
+ onLoad={handleViewLoad}
467
+ kind={
468
+ Platform.select({
469
+ android: 'SurfaceView',
470
+ ios: 'UIView',
471
+ })!
472
+ }
473
+ />
474
+ </View>
475
+ </ScrollView>
476
+ </SafeAreaView>
477
+ );
478
+ };
479
+
480
+ export default Page;
481
+ ```
482
+
176
483
 
177
484
  ## Attention
178
485
  - In Android/iOS scenarios, the screen sharing method is slightly different. For details, please refer to [Android screen sharing](https://docs.byteplus.com/en/docs/byteplus-rtc/docs-124176) and [iOS screen sharing](https://docs.byteplus.com/en/docs/byteplus-rtc/docs-124177).
@@ -14,7 +14,6 @@ def isNewArchitectureEnabled() {
14
14
  }
15
15
 
16
16
  apply plugin: "com.android.library"
17
- apply from: "https://ve-vos.volccdn.com/script/vevos-repo-base.gradle"
18
17
 
19
18
  if (isNewArchitectureEnabled()) {
20
19
  apply plugin: "com.facebook.react"
@@ -52,11 +51,16 @@ def getNativeDep() {
52
51
  def parsedJson = new groovy.json.JsonSlurper().parseText(packageJson.text)
53
52
  def isBp = parsedJson.name.startsWith("@byteplus")
54
53
 
54
+ def enableUnionForRTCWithLive = rootProject.ext.has('enableUnionForRTCWithLive') ? rootProject.ext.get('enableUnionForRTCWithLive') : false
55
+ def rtcVersionToUse
55
56
  if (isBp) {
56
- return "com.byteplus:BytePlusRTC:3.58.1.15100"
57
+ rtcVersionToUse = enableUnionForRTCWithLive ? "3.58.1.20600" : "3.58.1.15100"
58
+ println "Using BytePlusRTC SDK version : $rtcVersionToUse (union build enabled: $enableUnionForRTCWithLive)"
59
+ return "com.byteplus:BytePlusRTC:$rtcVersionToUse"
57
60
  }
58
-
59
- return "com.volcengine:VolcEngineRTC:3.58.1.2700"
61
+ rtcVersionToUse = enableUnionForRTCWithLive ? "3.58.1.20700" : "3.58.1.2700"
62
+ println "Using VolcEngineRTC SDK version : $rtcVersionToUse (union build enabled: $enableUnionForRTCWithLive)"
63
+ return "com.volcengine:VolcEngineRTC:$rtcVersionToUse"
60
64
  }
61
65
 
62
66
  android {
@@ -95,46 +99,23 @@ android {
95
99
  }
96
100
  }
97
101
 
98
- // repositories {
99
- // mavenCentral()
100
- // google()
101
- // maven {
102
- // // url "https://artifact.bytedance.com/repository/Volcengine/" // volc public maven repo
103
- // }
104
- // }
105
-
106
-
107
- allprojects {
108
- repositories {
109
- maven {
110
- url 'https://artifact.byteplus.com/repository/public/' // byteplus public maven repo
111
- }
112
- google()
113
- mavenCentral()
114
- maven {
115
- url "https://artifact.bytedance.com/repository/Volcengine/" // volc public maven repo
116
- }
117
- }
118
- }
119
-
120
102
  dependencies {
121
103
  // For < 0.71, this will be from the local maven repo
122
104
  // For > 0.71, this will be replaced by `com.facebook.react:react-android:$version` by react gradle plugin
123
105
  // noinspection GradleDynamicVersion
124
106
  implementation "com.facebook.react:react-native:+"
125
107
 
126
- implementation "com.volcengine:VolcApiEngine:1.5.0"
108
+ implementation "com.volcengine:VolcApiEngine:1.6.2"
127
109
  // implementation project(":hybrid-runtime");
128
110
 
129
- // def dep = getNativeDep()
130
- // implementation "$dep"
131
- // implementation "com.volcengine:VolcEngineRTC:3.58.1.2700"
132
- implementation "com.byteplus:BytePlusRTC:3.58.1.15100"
111
+ // Use the RTC SDK dependency determined by getNativeDep() which can be overridden by customer's app build.gradle.
112
+ def rtcDep = getNativeDep()
113
+ implementation rtcDep
133
114
 
134
115
  // TTVideoEngine
135
116
  // byteplus and volcengine use different name, different version
136
117
  // volcengine com.bytedanceapi:ttsdk-player_premium:1.43.1.5 com.bytedanceapi:ttsdk-player_standard:1.43.1.5
137
- // byteplus com.bytedanceapi:ttsdk-player_premium:1.42.300.101 com.bytedanceapi:ttsdk-player_standard:1.42.300.101
118
+ // byteplus com.bytedanceapi:ttsdk-player_premium:1.42.300.101 com.bytedanceapi:ttsdk-player_standard:1.42.300.101
138
119
  def license_type = getLicenseType()
139
120
  implementation "com.bytedanceapi:ttsdk-player_$license_type:1.+"
140
- }
121
+ }
@@ -0,0 +1,24 @@
1
+ // Copyright © 2022 BytePlusRTC All rights reserved.
2
+ // SPDX-License-Identifier: MIT
3
+
4
+ package com.volcengine.reactnative.vertc;
5
+
6
+ import android.util.Log;
7
+
8
+ import com.ss.bytertc.engine.RTCVideo;
9
+ import com.ss.bytertc.engine.VideoEncoderConfig;
10
+
11
+ public class VertcHelper {
12
+
13
+ static public VertcHelper sInstance = new VertcHelper();
14
+
15
+ public static VertcHelper getInstance() {
16
+ Log.d("VertcHelper", "channelSolutions" + sInstance);
17
+ return sInstance;
18
+ }
19
+
20
+ public int invokeSetVideoEncoderConfig(RTCVideo engine, VideoEncoderConfig[] channelSolutions) {
21
+ Log.d("VertcHelper", "channelSolutions");
22
+ return engine.setVideoEncoderConfig(channelSolutions);
23
+ }
24
+ }
@@ -21,9 +21,11 @@ public class VertcVod {
21
21
  private String TAG = "VertcVod";
22
22
 
23
23
  private TTVideoEngine mVideoEngine;
24
+ private org.json.JSONObject mOptions;
24
25
 
25
26
  private int mVideoWidth = 1080;
26
27
  private int mVideoHeight = 1920;
28
+ private VodVideoEngineCallbackProxy mVodVideoEngineCallbackProxy;
27
29
  private OESTextureProcessor mOESTextureProcessor;
28
30
  private VideoAudioProcessor mVAProcessor;
29
31
  private final OESTextureProcessor.IProcessorCallback mProcessorCallback = new OESTextureProcessor.IProcessorCallback() {
@@ -45,25 +47,28 @@ public class VertcVod {
45
47
 
46
48
  @Override
47
49
  public void onFrameAvailable(EGLContext eglContext, int oesTextureId, float[] transformMatrix, int width, int height, long timestamp) {
48
- mVAProcessor.onFrameAvailable(eglContext, oesTextureId, transformMatrix, width, height);
50
+ mVAProcessor.onFrameAvailable(eglContext, oesTextureId, transformMatrix, width, height, timestamp);
49
51
  }
50
52
  };
51
53
 
52
- public int startVodPlayerCapture(RTCVideo engine, Object arg, StreamIndex streamIndex) {
54
+ public int startVodPlayerCapture(RTCVideo engine, Object ttplayer, org.json.JSONObject options) {
53
55
  try {
54
- mVideoEngine = getTTVideoEngine(arg);
56
+ stopVodPlayerCapture(engine);
57
+
58
+ mVideoEngine = getTTVideoEngine(ttplayer);
55
59
  if (mVideoEngine == null) {
56
60
  throw new RuntimeException("unknown VideoEngine");
57
61
  }
58
- stopVodPlayerCapture(engine);
62
+
63
+ mOptions = options;
64
+ mVAProcessor = new VideoAudioProcessor(engine, options);
59
65
 
60
- mVAProcessor = new VideoAudioProcessor(engine);
61
- var mVideoEngineCallback = new VideoEngineCallback() {
66
+ // process TTVideoEngine video
67
+ VideoEngineCallback mVideoEngineCallback = new VideoEngineCallback() {
62
68
  @Override
63
69
  public void onRenderStart(TTVideoEngine engine) {
64
70
  final int videoWidth = engine.getVideoWidth();
65
71
  final int videoHeight = engine.getVideoHeight();
66
- Log.d(TAG, "onRenderStart: " + videoWidth + "x" + videoHeight);
67
72
  mVideoWidth = videoWidth;
68
73
  mVideoHeight = videoHeight;
69
74
 
@@ -75,35 +80,46 @@ public class VertcVod {
75
80
  mOESTextureProcessor.initSurfaceTexture(videoWidth, videoHeight, mProcessorCallback);
76
81
  }
77
82
  };
78
- mVideoEngine.configResolution(Resolution.High);
79
- mVideoEngine.setVideoEngineCallback(mVideoEngineCallback);
83
+ mVodVideoEngineCallbackProxy = new VodVideoEngineCallbackProxy(mVideoEngine, mVideoEngineCallback);
84
+ mVideoEngine.setVideoEngineCallback(mVodVideoEngineCallbackProxy.listen());
85
+ // process TTVideoEngine audio
80
86
  mVideoEngine.setAudioProcessor(mVAProcessor);
81
87
  mVideoEngine.setIntOption(TTVideoEngine.PLAYER_OPTION_SET_VOICE, TTVideoEngine.VOICE_DUMMY);
82
88
  mVideoEngine.setIntOption(TTVideoEngine.PLAYER_OPTION_DUMMY_AUDIO_SLEEP, 0);
83
89
 
90
+ // set sourceType to External
84
91
  engine.setScreenAudioSourceType(AudioSourceType.AUDIO_SOURCE_TYPE_EXTERNAL);
85
92
  engine.setVideoSourceType(StreamIndex.STREAM_INDEX_SCREEN, VideoSourceType.VIDEO_SOURCE_TYPE_EXTERNAL);
86
- //
93
+
87
94
  OESTextureProcessor textureProcessor = mOESTextureProcessor;
88
95
  if (textureProcessor != null) {
89
96
  textureProcessor.destroy();
90
97
  }
91
98
  mOESTextureProcessor = new OESTextureProcessor();
92
- mOESTextureProcessor.initSurfaceTexture(1080, 1920, mProcessorCallback);
99
+ mOESTextureProcessor.initSurfaceTexture(mVideoWidth, mVideoHeight, mProcessorCallback);
93
100
 
94
101
  return 0;
95
102
  } catch (Exception err) {
96
- Log.d(TAG, "error:" + err.getMessage());
103
+ Log.d(TAG, "startVodPlayerCapture fail:" + err.getMessage());
97
104
  throw new RuntimeException(err);
98
105
  }
99
106
  }
100
107
 
101
108
  public int stopVodPlayerCapture(RTCVideo engine) {
109
+ // reset sourceType to Internal
102
110
  engine.setVideoSourceType(StreamIndex.STREAM_INDEX_SCREEN, VideoSourceType.VIDEO_SOURCE_TYPE_INTERNAL);
111
+ engine.setScreenAudioSourceType(AudioSourceType.AUDIO_SOURCE_TYPE_INTERNAL);
112
+ mOptions = null;
113
+ if (mVodVideoEngineCallbackProxy != null) {
114
+ mVodVideoEngineCallbackProxy.reset(mVideoEngine);
115
+ mVodVideoEngineCallbackProxy = null;
116
+ }
103
117
  if (mVideoEngine != null) {
104
- mVideoEngine.setSurfaceHolder(null);
118
+ mVideoEngine.setSurface(null);
105
119
  mVideoEngine.setAudioProcessor(null);
106
120
  mVideoEngine.setVideoEngineCallback(null);
121
+ mVideoEngine.stop();
122
+ mVideoEngine = null;
107
123
  }
108
124
  if (mOESTextureProcessor != null) {
109
125
  mOESTextureProcessor.destroy();
@@ -20,14 +20,16 @@ import java.nio.ByteBuffer;
20
20
  import java.util.concurrent.TimeUnit;
21
21
 
22
22
  public class VideoAudioProcessor extends AudioProcessor {
23
- public String TAG = "VideoAudioProcessor";
24
-
25
- private final VodAudioProcessor processor;
26
- private final RTCVideo mRTCEngine;
23
+ public String TAG = "VideoAudioProcessor";
27
24
 
28
- public VideoAudioProcessor(RTCVideo engine) {
25
+ private org.json.JSONObject mOptions;
26
+ private final VodAudioProcessor processor;
27
+ private final RTCVideo mRTCEngine;
28
+
29
+ public VideoAudioProcessor(RTCVideo engine, org.json.JSONObject options) {
29
30
  processor = new VodAudioProcessor(engine);
30
31
  mRTCEngine = engine;
32
+ mOptions = options;
31
33
  }
32
34
 
33
35
  public void updateScreenConfig(int width, int height) {
@@ -38,7 +40,6 @@ public class VideoAudioProcessor extends AudioProcessor {
38
40
  videoEncoderConfig.height = height;
39
41
  videoEncoderConfig.encodePreference = ScreenVideoEncoderConfig.EncoderPreference.MaintainFramerate;
40
42
  mRTCEngine.setScreenVideoEncoderConfig(videoEncoderConfig);
41
- // mRTCEngine.publishScreen(RTCEngine.MediaStreamType.RTC_MEDIA_STREAM_TYPE_BOTH);
42
43
  }
43
44
 
44
45
  @Override
@@ -53,7 +54,6 @@ public class VideoAudioProcessor extends AudioProcessor {
53
54
 
54
55
  @Override
55
56
  public void audioClose() {
56
-
57
57
  }
58
58
 
59
59
  @Override
@@ -61,16 +61,14 @@ public class VideoAudioProcessor extends AudioProcessor {
61
61
 
62
62
  }
63
63
 
64
- public void onFrameAvailable(EGLContext eglContext, int oesTextureId, float[] transformMatrix, int width, int height) {
65
- long nanoTime = System.currentTimeMillis() * TimeUnit.MILLISECONDS.toNanos(1);
64
+ public void onFrameAvailable(EGLContext eglContext, int oesTextureId, float[] transformMatrix, int width, int height, long timestamp) {
66
65
  GLTextureVideoFrameBuilder builder = new GLTextureVideoFrameBuilder(VideoPixelFormat.TEXTURE_OES)
67
66
  .setTextureID(oesTextureId)
68
67
  .setWidth(width)
69
68
  .setHeight(height)
70
69
  .setRotation(VideoRotation.VIDEO_ROTATION_0)
71
70
  .setTextureMatrix(transformMatrix)
72
- .setTimeStampUs(nanoTime)
73
- .setColorSpace(ColorSpace.UNKNOWN)
71
+ .setTimeStampUs(timestamp)
74
72
  .setEGLContext(eglContext);
75
73
  int ret_status = mRTCEngine.pushScreenVideoFrame(builder.build());
76
74
  boolean result = ret_status == ReturnStatus.RETURN_STATUS_SUCCESS.value()
@@ -5,36 +5,47 @@ package com.volcengine.reactnative.vertc.vod;
5
5
 
6
6
  import android.os.Handler;
7
7
  import android.os.Looper;
8
- import android.util.Log;
9
8
 
10
9
  import com.ss.bytertc.engine.RTCVideo;
10
+ import com.ss.bytertc.engine.data.AudioSourceType;
11
+ import com.ss.bytertc.engine.data.StreamIndex;
11
12
  import com.ss.bytertc.engine.data.VideoPixelFormat;
12
13
  import com.ss.bytertc.engine.data.VideoRotation;
14
+ import com.ss.bytertc.engine.data.VideoSourceType;
13
15
  import com.ss.bytertc.engine.video.VideoFrame;
14
16
  import com.ss.bytertc.engine.video.builder.CpuBufferVideoFrameBuilder;
15
17
 
16
18
  import java.nio.ByteBuffer;
17
19
  import java.util.concurrent.TimeUnit;
18
20
 
21
+ // For Test pushScreenVideoFrame
19
22
  public class VodMock {
20
- public String TAG = "VideoAudioProcessor";
23
+ public String TAG = "VodMockTest";
21
24
 
22
25
  private final RTCVideo mRTCEngine;
26
+ private final int mFps;
23
27
 
24
28
  public VodMock(RTCVideo engine) {
25
29
  mRTCEngine = engine;
30
+ mFps = 30;
26
31
  }
27
32
 
28
- public void useFakeVideoFrame() {
33
+ public void start() {
34
+ mRTCEngine.setScreenAudioSourceType(AudioSourceType.AUDIO_SOURCE_TYPE_EXTERNAL);
35
+ mRTCEngine.setVideoSourceType(StreamIndex.STREAM_INDEX_SCREEN, VideoSourceType.VIDEO_SOURCE_TYPE_EXTERNAL);
36
+ useFakeVideoFrame();
37
+ }
38
+
39
+ private void useFakeVideoFrame() {
29
40
  final Handler handler = new Handler(Looper.getMainLooper());
30
41
  var width = 998;
31
42
  var height = 554;
32
43
  Runnable runnable = new Runnable() {
33
44
  @Override
34
45
  public void run() {
35
- handler.postDelayed(this, 30); // Repeats every 5 seconds
46
+ handler.postDelayed(this, 1000 / mFps);
36
47
 
37
- var frame = useInternalVideoFrame(width, height);
48
+ var frame = buildInternalVideoFrame(width, height);
38
49
  mRTCEngine.pushScreenVideoFrame(frame);
39
50
  // Log.d(TAG, "pushScreenVideoFrame");
40
51
  }
@@ -42,7 +53,7 @@ public class VodMock {
42
53
  handler.post(runnable);
43
54
  }
44
55
 
45
- public VideoFrame useInternalVideoFrame(int width, int height){
56
+ private VideoFrame buildInternalVideoFrame(int width, int height){
46
57
  int chromaWidth = (width + 1) / 2;
47
58
  int chromaHeight = (height + 1) /2;
48
59
  int uvSize = chromaWidth * chromaHeight;