@byteplus/react-native-rtc 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. package/android/build.gradle +1 -1
  2. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VertcVod.java +10 -26
  3. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VideoAudioProcessor.java +8 -9
  4. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VodAudioProxy.java +44 -0
  5. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VodAudioVoiceWrapperObject.java +355 -0
  6. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VodMock.java +6 -17
  7. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VodVideoProxy.java +97 -0
  8. package/ios/vod/VertcVod.m +3 -10
  9. package/ios/vod/VodAudioProcessor.h +1 -1
  10. package/ios/vod/VodAudioProcessor.mm +1 -3
  11. package/ios/vod/VodVideoProcessor.h +1 -3
  12. package/ios/vod/VodVideoProcessor.m +2 -22
  13. package/lib/commonjs/index.js +871 -865
  14. package/lib/module/index.js +871 -865
  15. package/lib/typescript/codegen/pack/api.d.ts +634 -637
  16. package/lib/typescript/codegen/pack/callback.d.ts +248 -251
  17. package/lib/typescript/codegen/pack/errorcode.d.ts +36 -36
  18. package/lib/typescript/codegen/pack/keytype.d.ts +181 -181
  19. package/lib/typescript/core/rtc-video.d.ts +1 -7
  20. package/lib/typescript/core.d.ts +4 -4
  21. package/lib/typescript/interface.d.ts +63 -64
  22. package/lib/typescript/platforms/android/vod.d.ts +2 -2
  23. package/package.json +2 -2
  24. package/react-native-rtc.podspec +6 -5
  25. package/android/src/main/java/com/volcengine/reactnative/vertc/vod/VodVideoEngineCallbackProxy.java +0 -94
@@ -128,8 +128,8 @@ dependencies {
128
128
 
129
129
  // def dep = getNativeDep()
130
130
  // implementation "$dep"
131
- implementation "com.byteplus:BytePlusRTC:3.58.1.15100"
132
131
  // implementation "com.volcengine:VolcEngineRTC:3.58.1.2700"
132
+ implementation "com.byteplus:BytePlusRTC:3.58.1.15100"
133
133
 
134
134
  // TTVideoEngine
135
135
  // byteplus and volcengine use different name, different version
@@ -21,11 +21,9 @@ public class VertcVod {
21
21
  private String TAG = "VertcVod";
22
22
 
23
23
  private TTVideoEngine mVideoEngine;
24
- private org.json.JSONObject mOptions;
25
24
 
26
25
  private int mVideoWidth = 1080;
27
26
  private int mVideoHeight = 1920;
28
- private VodVideoEngineCallbackProxy mVodVideoEngineCallbackProxy;
29
27
  private OESTextureProcessor mOESTextureProcessor;
30
28
  private VideoAudioProcessor mVAProcessor;
31
29
  private final OESTextureProcessor.IProcessorCallback mProcessorCallback = new OESTextureProcessor.IProcessorCallback() {
@@ -51,20 +49,16 @@ public class VertcVod {
51
49
  }
52
50
  };
53
51
 
54
- public int startVodPlayerCapture(RTCVideo engine, Object ttplayer, org.json.JSONObject options) {
52
+ public int startVodPlayerCapture(RTCVideo engine, Object arg, StreamIndex streamIndex) {
55
53
  try {
56
- stopVodPlayerCapture(engine);
57
-
58
- mVideoEngine = getTTVideoEngine(ttplayer);
54
+ mVideoEngine = getTTVideoEngine(arg);
59
55
  if (mVideoEngine == null) {
60
56
  throw new RuntimeException("unknown VideoEngine");
61
57
  }
62
-
63
- mOptions = options;
64
- mVAProcessor = new VideoAudioProcessor(engine, options);
58
+ stopVodPlayerCapture(engine);
65
59
 
66
- // process TTVideoEngine video
67
- VideoEngineCallback mVideoEngineCallback = new VideoEngineCallback() {
60
+ mVAProcessor = new VideoAudioProcessor(engine);
61
+ var mVideoEngineCallback = new VideoEngineCallback() {
68
62
  @Override
69
63
  public void onRenderStart(TTVideoEngine engine) {
70
64
  final int videoWidth = engine.getVideoWidth();
@@ -81,45 +75,35 @@ public class VertcVod {
81
75
  mOESTextureProcessor.initSurfaceTexture(videoWidth, videoHeight, mProcessorCallback);
82
76
  }
83
77
  };
84
- mVodVideoEngineCallbackProxy = new VodVideoEngineCallbackProxy(mVideoEngine, mVideoEngineCallback);
85
- mVideoEngine.setVideoEngineCallback(mVodVideoEngineCallbackProxy.listen());
86
- // process TTVideoEngine audio
78
+ mVideoEngine.configResolution(Resolution.High);
79
+ mVideoEngine.setVideoEngineCallback(mVideoEngineCallback);
87
80
  mVideoEngine.setAudioProcessor(mVAProcessor);
88
81
  mVideoEngine.setIntOption(TTVideoEngine.PLAYER_OPTION_SET_VOICE, TTVideoEngine.VOICE_DUMMY);
89
82
  mVideoEngine.setIntOption(TTVideoEngine.PLAYER_OPTION_DUMMY_AUDIO_SLEEP, 0);
90
83
 
91
- // set sourceType to External
92
84
  engine.setScreenAudioSourceType(AudioSourceType.AUDIO_SOURCE_TYPE_EXTERNAL);
93
85
  engine.setVideoSourceType(StreamIndex.STREAM_INDEX_SCREEN, VideoSourceType.VIDEO_SOURCE_TYPE_EXTERNAL);
94
-
86
+ //
95
87
  OESTextureProcessor textureProcessor = mOESTextureProcessor;
96
88
  if (textureProcessor != null) {
97
89
  textureProcessor.destroy();
98
90
  }
99
91
  mOESTextureProcessor = new OESTextureProcessor();
100
- mOESTextureProcessor.initSurfaceTexture(mVideoWidth, mVideoHeight, mProcessorCallback);
92
+ mOESTextureProcessor.initSurfaceTexture(1080, 1920, mProcessorCallback);
101
93
 
102
94
  return 0;
103
95
  } catch (Exception err) {
104
- Log.d(TAG, "startVodPlayerCapture fail:" + err.getMessage());
96
+ Log.d(TAG, "error:" + err.getMessage());
105
97
  throw new RuntimeException(err);
106
98
  }
107
99
  }
108
100
 
109
101
  public int stopVodPlayerCapture(RTCVideo engine) {
110
- // reset sourceType to Internal
111
102
  engine.setVideoSourceType(StreamIndex.STREAM_INDEX_SCREEN, VideoSourceType.VIDEO_SOURCE_TYPE_INTERNAL);
112
- engine.setScreenAudioSourceType(AudioSourceType.AUDIO_SOURCE_TYPE_INTERNAL);
113
- mOptions = null;
114
- if (mVodVideoEngineCallbackProxy != null) {
115
- mVodVideoEngineCallbackProxy.reset(mVideoEngine);
116
- mVodVideoEngineCallbackProxy = null;
117
- }
118
103
  if (mVideoEngine != null) {
119
104
  mVideoEngine.setSurfaceHolder(null);
120
105
  mVideoEngine.setAudioProcessor(null);
121
106
  mVideoEngine.setVideoEngineCallback(null);
122
- mVideoEngine = null;
123
107
  }
124
108
  if (mOESTextureProcessor != null) {
125
109
  mOESTextureProcessor.destroy();
@@ -20,16 +20,14 @@ import java.nio.ByteBuffer;
20
20
  import java.util.concurrent.TimeUnit;
21
21
 
22
22
  public class VideoAudioProcessor extends AudioProcessor {
23
- public String TAG = "VideoAudioProcessor";
24
-
25
- private org.json.JSONObject mOptions;
26
- private final VodAudioProcessor processor;
23
+ public String TAG = "VideoAudioProcessor";
24
+
25
+ private final VodAudioProcessor processor;
27
26
  private final RTCVideo mRTCEngine;
28
-
29
- public VideoAudioProcessor(RTCVideo engine, org.json.JSONObject options) {
27
+
28
+ public VideoAudioProcessor(RTCVideo engine) {
30
29
  processor = new VodAudioProcessor(engine);
31
30
  mRTCEngine = engine;
32
- mOptions = options;
33
31
  }
34
32
 
35
33
  public void updateScreenConfig(int width, int height) {
@@ -40,6 +38,7 @@ public class VideoAudioProcessor extends AudioProcessor {
40
38
  videoEncoderConfig.height = height;
41
39
  videoEncoderConfig.encodePreference = ScreenVideoEncoderConfig.EncoderPreference.MaintainFramerate;
42
40
  mRTCEngine.setScreenVideoEncoderConfig(videoEncoderConfig);
41
+ // mRTCEngine.publishScreen(RTCEngine.MediaStreamType.RTC_MEDIA_STREAM_TYPE_BOTH);
43
42
  }
44
43
 
45
44
  @Override
@@ -63,14 +62,14 @@ public class VideoAudioProcessor extends AudioProcessor {
63
62
  }
64
63
 
65
64
  public void onFrameAvailable(EGLContext eglContext, int oesTextureId, float[] transformMatrix, int width, int height) {
66
- long timeStampUs = System.currentTimeMillis() * TimeUnit.MILLISECONDS.toNanos(1);
65
+ long nanoTime = System.currentTimeMillis() * TimeUnit.MILLISECONDS.toNanos(1);
67
66
  GLTextureVideoFrameBuilder builder = new GLTextureVideoFrameBuilder(VideoPixelFormat.TEXTURE_OES)
68
67
  .setTextureID(oesTextureId)
69
68
  .setWidth(width)
70
69
  .setHeight(height)
71
70
  .setRotation(VideoRotation.VIDEO_ROTATION_0)
72
71
  .setTextureMatrix(transformMatrix)
73
- .setTimeStampUs(timeStampUs)
72
+ .setTimeStampUs(nanoTime)
74
73
  .setColorSpace(ColorSpace.UNKNOWN)
75
74
  .setEGLContext(eglContext);
76
75
  int ret_status = mRTCEngine.pushScreenVideoFrame(builder.build());
@@ -0,0 +1,44 @@
1
+ // Copyright © 2022 BytePlusRTC All rights reserved.
2
+ // SPDX-License-Identifier: MIT
3
+
4
+ package com.volcengine.reactnative.vertc.vod;
5
+
6
+ import com.facebook.react.bridge.ReactApplicationContext;
7
+ import com.ss.bytertc.engine.RTCVideo;
8
+ import com.ss.bytertc.engine.data.AudioSourceType;
9
+ import com.ss.ttm.player.TraitObject;
10
+ import com.ss.ttvideoengine.TTVideoEngine;
11
+
12
+ public class VodAudioProxy {
13
+ private String TAG = "VodAudioProxy";
14
+
15
+ private TTVideoEngine mVideoEngine;
16
+ private RTCVideo mRTCVideo;
17
+
18
+ public void initEngine(TTVideoEngine videoEngine, RTCVideo rtcEngine, ReactApplicationContext reactApplicationContext) {
19
+ mVideoEngine = videoEngine;
20
+ mRTCVideo = rtcEngine;
21
+ rtcEngine.setScreenAudioSourceType(AudioSourceType.AUDIO_SOURCE_TYPE_EXTERNAL);
22
+
23
+ // var mVAProcessor = new
24
+ // mVideoEngine.setAudioProcessor(mVAProcessor);
25
+
26
+ // TraitObject traitObj = new VodAudioVoiceWrapperObject(reactApplicationContext.getApplicationContext(), rtcEngine);
27
+ // mVideoEngine.setIntOption(TTVideoEngine.PLAYER_OPTION_SET_VOICE, TTVideoEngine.VOICE_EXTERN);
28
+ // mVideoEngine.setTraitObject(TraitObject.ExtVoice, traitObj);
29
+
30
+ // mVideoEngine.setIntOption(TTVideoEngine.PLAYER_OPTION_EXTERN_VOICE_OUTPUT_FORMAT, VoiceTrait.AV_PCM_FMT_S16);
31
+
32
+ // mVideoEngine.setIntOption(TTVideoEngine.PLAYER_OPTION_SET_VOICE, TTVideoEngine.VOICE_DUMMY);
33
+ // mVideoEngine.setIntOption(TTVideoEngine.PLAYER_OPTION_DUMMY_AUDIO_SLEEP, 0);
34
+ }
35
+
36
+ public void release() {
37
+ if (mVideoEngine != null) {
38
+ mVideoEngine = null;
39
+ }
40
+ if (mRTCVideo != null) {
41
+ mRTCVideo = null;
42
+ }
43
+ }
44
+ }
@@ -0,0 +1,355 @@
1
+ // Copyright © 2022 BytePlusRTC All rights reserved.
2
+ // SPDX-License-Identifier: MIT
3
+
4
+ package com.volcengine.reactnative.vertc.vod;
5
+
6
+ import android.app.Service;
7
+ import android.content.Context;
8
+ import android.media.AudioFormat;
9
+ import android.media.AudioManager;
10
+ import android.media.AudioTrack;
11
+ import android.os.Build;
12
+ import android.util.Log;
13
+
14
+ import com.ss.bytertc.engine.RTCVideo;
15
+ import com.ss.bytertc.engine.data.AudioChannel;
16
+ import com.ss.bytertc.engine.data.AudioSampleRate;
17
+ import com.ss.bytertc.engine.utils.AudioFrame;
18
+ import com.ss.ttm.player.TraitObject;
19
+ import com.ss.ttm.player.VoiceTrait;
20
+
21
+ import java.lang.reflect.Method;
22
+ import java.nio.ByteBuffer;
23
+
24
+ public class VodAudioVoiceWrapperObject extends VoiceTrait {
25
+ private String TAG = "VodAudioVoiceWrapperObject";
26
+ private VodAudioProcessor mAudioProcessor;
27
+
28
+ private final static int kNoSettingVolume = -1;
29
+ private Context mContext = null;
30
+ private AudioManager mAudioManager = null;
31
+ private AudioTrack mAudioTrack = null;
32
+ private Method getLatencyMethod = null;
33
+ private float mLeftVolume = kNoSettingVolume;
34
+ private int mMaxVolume = 0;
35
+ private volatile boolean mStoped = true;
36
+ private int mChannels = 0;
37
+ private int mSampleBytes = 2;
38
+ private int mSampleRate = 0;
39
+ private int mFrameSampleNB = 0;
40
+ private int mBytePerSample = 0;
41
+ private int mFormat = -1;
42
+ private boolean mIsPacked = false;
43
+ private int mTrackBufferSize = 0;
44
+ private int mSessionId = -1;
45
+ private int mStreamType = AudioManager.STREAM_MUSIC;
46
+ private int mChannelsLayout = AudioFormat.CHANNEL_INVALID;
47
+ private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
48
+ private RTCVideo mRTCVideo;
49
+
50
+ public VodAudioVoiceWrapperObject() {
51
+ super(TraitObject.ExtVoice, VoiceTrait.Version0, 0);
52
+
53
+ }
54
+ public VodAudioVoiceWrapperObject(Context context, RTCVideo rtcVideo) {
55
+ this();
56
+ mContext = context;
57
+ mRTCVideo = rtcVideo;
58
+ mAudioProcessor = new VodAudioProcessor(rtcVideo);
59
+ }
60
+
61
+ public int audioOpen(AudioMediaInfo info) {
62
+ if (info == null) {
63
+ return -1;
64
+ }
65
+
66
+ mAudioProcessor.audioOpen(info.mSampleRate, info.mChannels);
67
+
68
+ mChannels = info.mChannels;
69
+ mSampleRate = info.mSampleRate;
70
+ mFrameSampleNB = info.mFrameSampleNB;
71
+ mBytePerSample = info.mBytePerSample;
72
+ mFormat = info.mFormat;
73
+ mIsPacked = info.mIsPacked == 1;
74
+ // ret = reconfigure();
75
+ // if (ret == -1) {
76
+ // return -1;
77
+ // }
78
+ // if(mLeftVolume == kNoSettingVolume) {
79
+ // mLeftVolume = mMaxVolume / 2;
80
+ // }
81
+ // setAudioVolume(mLeftVolume);
82
+ // try {
83
+ // getLatencyMethod =
84
+ // AudioTrack.class.getMethod("getLatency", (Class<?>[]) null);
85
+ // } catch (NoSuchMethodException e) {
86
+ // // There's no guarantee this method exists. Do nothing.
87
+ // }
88
+ // mStoped = false;
89
+ // mAudioTrack.play();
90
+ Log.d(TAG, "audio open success");
91
+ return 0;
92
+ }
93
+
94
+ @Override
95
+ public int audioWrite(AudioFrameInfo frame) {
96
+ // mAudioProcessor.audioProcess(frame.mBuffers, frame.mSamples, frame.mTimestamp);
97
+ // return 0;
98
+ // int ret = -1;
99
+ // if (frame == null || frame.mBuffers == null) {
100
+ // return ret;
101
+ // }
102
+ //
103
+ // int wSize= 0;
104
+ // synchronized (mAudioTrack) {
105
+ // if (mStoped) {
106
+ // return 0;
107
+ // }
108
+ // try {
109
+ // byte[] tmpBuffer = getAudioBuffer(frame);
110
+ // wSize = mAudioTrack.write(tmpBuffer, 0, tmpBuffer.length);
111
+ // ret = 0;
112
+ // AudioFrame audioFrame = buildAudioFrame(frame);
113
+ // this.mRTCVideo.pushScreenAudioFrame(audioFrame);
114
+ // } catch (Exception e) {
115
+ // Log.d(TAG, "write fail = " + e);
116
+ // e.printStackTrace();
117
+ // return -1;
118
+ // }
119
+ // }
120
+ //
121
+ // if(wSize < 0) {
122
+ // Log.e(TAG, "write failed : ret: " + wSize + ", size = " + frame.mBuffers[0].array().length);
123
+ // return wSize;
124
+ // }
125
+ // Log.d(TAG, "audio write success");
126
+ // mWrittenPcmBytes += wSize;
127
+ AudioFrame audioFrame = buildAudioFrame(frame);
128
+ this.mRTCVideo.pushScreenAudioFrame(audioFrame);
129
+ return 0;
130
+ }
131
+
132
+ private AudioFrame buildAudioFrame(AudioFrameInfo info) {
133
+ var buffer = getAudioBuffer(info);
134
+ var mAudioSampleRate = getAudioSampleRate();
135
+ var mAudioChannel = getAudioChannel();
136
+ return new AudioFrame(
137
+ buffer,
138
+ info.mSamples,
139
+ mAudioSampleRate,
140
+ mAudioChannel
141
+ );
142
+ }
143
+
144
+ private AudioSampleRate getAudioSampleRate() {
145
+ if (mSampleRate == AudioSampleRate.AUDIO_SAMPLE_RATE_48000.value()) {
146
+ return AudioSampleRate.AUDIO_SAMPLE_RATE_48000;
147
+ }
148
+
149
+ if (mSampleRate == AudioSampleRate.AUDIO_SAMPLE_RATE_44100.value()) {
150
+ return AudioSampleRate.AUDIO_SAMPLE_RATE_44100;
151
+ }
152
+
153
+ if (mSampleRate == AudioSampleRate.AUDIO_SAMPLE_RATE_32000.value()) {
154
+ return AudioSampleRate.AUDIO_SAMPLE_RATE_32000;
155
+ }
156
+
157
+ if (mSampleRate == AudioSampleRate.AUDIO_SAMPLE_RATE_16000.value()) {
158
+ return AudioSampleRate.AUDIO_SAMPLE_RATE_16000;
159
+ }
160
+
161
+ if (mSampleRate == AudioSampleRate.AUDIO_SAMPLE_RATE_8000.value()) {
162
+ return AudioSampleRate.AUDIO_SAMPLE_RATE_8000;
163
+ }
164
+
165
+ return AudioSampleRate.AUDIO_SAMPLE_RATE_AUTO;
166
+ }
167
+
168
+ private AudioChannel getAudioChannel() {
169
+ if (mChannels == AudioChannel.AUDIO_CHANNEL_MONO.value()) {
170
+ return AudioChannel.AUDIO_CHANNEL_MONO;
171
+ }
172
+
173
+ if (mChannels == AudioChannel.AUDIO_CHANNEL_STEREO.value()) {
174
+ return AudioChannel.AUDIO_CHANNEL_STEREO;
175
+ }
176
+
177
+ return AudioChannel.AUDIO_CHANNEL_AUTO;
178
+ }
179
+
180
+ private byte[] getAudioBuffer(AudioFrameInfo frame) {
181
+ byte[] buffer = null;
182
+ if (frame == null || frame.mBuffers == null) {
183
+ return null;
184
+ }
185
+ int bufferSize = frame.mBuffers[0].limit();
186
+ if (!mIsPacked) {
187
+ bufferSize = bufferSize * mChannels;
188
+ }
189
+ buffer = new byte[bufferSize];
190
+ int bytePerSample = mBytePerSample;
191
+ int readSize = bytePerSample * mChannels;
192
+ if (!mIsPacked) {
193
+ readSize = bytePerSample;
194
+ }
195
+ for (int i = 0;i < mChannels;i++) {
196
+ ByteBuffer tmpBuffer = frame.mBuffers[i];
197
+ if (tmpBuffer == null) {
198
+ break;
199
+ }
200
+ for (int j = 0; j < frame.mSamples;j++) {
201
+ tmpBuffer.get(buffer,j * mChannels * bytePerSample + i * bytePerSample, readSize);
202
+ }
203
+ }
204
+ return buffer;
205
+ }
206
+
207
+ @Override
208
+ public void audioPause() {
209
+ if (mAudioTrack == null) {
210
+ return;
211
+ }
212
+ mAudioTrack.pause();
213
+ }
214
+
215
+ @Override
216
+ public void audioResume() {
217
+ if (mAudioTrack == null) {
218
+ return;
219
+ }
220
+ mAudioTrack.play();
221
+ }
222
+
223
+ @Override
224
+ public void audioFlush() {
225
+ if (mAudioTrack == null) {
226
+ return;
227
+ }
228
+ mAudioTrack.flush();
229
+ }
230
+
231
+ @Override
232
+ public void audioClose() {
233
+ if (mAudioTrack == null) {
234
+ return;
235
+ }
236
+ mAudioTrack.pause();
237
+ mStoped = true;
238
+ }
239
+
240
+ @Override
241
+ public int getLatency() {
242
+ int latency = 0;
243
+ if (mAudioTrack == null) {
244
+ return latency;
245
+ }
246
+ if (getLatencyMethod != null) {
247
+ try {
248
+ // Compute the audio track latency, excluding the latency due to the buffer (leaving
249
+ // latency due to the mixer and audio hardware driver).
250
+ latency = (Integer) getLatencyMethod.invoke(mAudioTrack, (Object[]) null);
251
+ } catch (Exception e) {
252
+ latency = 0;
253
+ getLatencyMethod = null;
254
+ }
255
+ }
256
+ return latency;
257
+ }
258
+
259
+ private int reconfigure() {
260
+ int ret = -1;
261
+ if (mContext == null) {
262
+ return ret;
263
+ }
264
+ mAudioManager = (AudioManager)mContext.getSystemService(Service.AUDIO_SERVICE);
265
+ if(mAudioManager != null) {
266
+ mMaxVolume = mAudioManager.getStreamMaxVolume( mStreamType ); //
267
+ }
268
+ mChannelsLayout = getAudioTrackChannelConfig(mChannels);
269
+ if (mChannelsLayout == AudioFormat.CHANNEL_INVALID) {
270
+ return ret;
271
+ }
272
+ if (mSampleBytes == 1) {
273
+ mAudioFormat = AudioFormat.ENCODING_PCM_8BIT;
274
+ } else if(mSampleBytes == 2) {
275
+ mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
276
+ } else {
277
+ return ret;
278
+ }
279
+ int minBufSize = AudioTrack.getMinBufferSize(mSampleRate, mChannelsLayout, mAudioFormat);
280
+ if (minBufSize <= 0) {
281
+ Log.e(TAG, "getMinBufferSize failed, trace: sampleRate = " + mSampleRate + ", " +
282
+ "mChannelsLayout = " + mChannelsLayout + ", mAudioFormat = " + mAudioFormat);
283
+ return minBufSize;
284
+ }
285
+ try {
286
+ if (mSessionId == -1) {
287
+ mAudioTrack = new AudioTrack(mStreamType, mSampleRate, mChannelsLayout, mAudioFormat, minBufSize, AudioTrack.MODE_STREAM);
288
+ } else {
289
+ mAudioTrack = new AudioTrack(mStreamType, mSampleRate, mChannelsLayout, mAudioFormat, minBufSize, AudioTrack.MODE_STREAM, mSessionId);
290
+ }
291
+ } catch (Throwable e) {
292
+ Log.e(TAG, "create audio track failed ,detail = " + e);
293
+ return ret;
294
+ }
295
+ int state = mAudioTrack.getState();
296
+ if (state != AudioTrack.STATE_INITIALIZED) {
297
+ try {
298
+ mAudioTrack.release();
299
+ } catch (Exception e) {
300
+ // The track has already failed to initialize, so it wouldn't be that surprising if release
301
+ // were to fail too. Swallow the exception.
302
+ } finally {
303
+ mAudioTrack = null;
304
+ Log.e(TAG, "create audiotrack but failed to initialize");
305
+ return AudioTrack.ERROR_INVALID_OPERATION;
306
+ }
307
+ }
308
+ return 0;
309
+ }
310
+
311
+ public void setAudioVolume(float leftVolume) {
312
+ try{
313
+ if(mMaxVolume < leftVolume) {
314
+ leftVolume = mMaxVolume;
315
+ }
316
+ if(mAudioManager != null) {
317
+ // Log.i("ttmn", "setVolume system lv"+leftVolume + "rv:"+rightVolume + "mv:"+mAudioManager.getStreamMaxVolume( AudioManager.STREAM_MUSIC ));
318
+ mAudioManager.setStreamVolume(mStreamType, (int)leftVolume, 0);
319
+ }
320
+ mLeftVolume = leftVolume;
321
+ } catch (Exception e) {
322
+ }
323
+ }
324
+
325
+ public static int getAudioTrackChannelConfig(int channelCount) {
326
+ switch(channelCount) {
327
+ case 1:
328
+ return 4;
329
+ case 2:
330
+ return 12;
331
+ case 3:
332
+ return 28;
333
+ case 4:
334
+ return 204;
335
+ case 5:
336
+ return 220;
337
+ case 6:
338
+ return 252;
339
+ case 7:
340
+ return 1276;
341
+ case 8:
342
+ if (Build.VERSION.SDK_INT >= 23) {
343
+ return 6396;
344
+ } else {
345
+ if (Build.VERSION.SDK_INT >= 21) {
346
+ return 6396;
347
+ }
348
+
349
+ return 0;
350
+ }
351
+ default:
352
+ return 0;
353
+ }
354
+ }
355
+ }
@@ -5,47 +5,36 @@ package com.volcengine.reactnative.vertc.vod;
5
5
 
6
6
  import android.os.Handler;
7
7
  import android.os.Looper;
8
+ import android.util.Log;
8
9
 
9
10
  import com.ss.bytertc.engine.RTCVideo;
10
- import com.ss.bytertc.engine.data.AudioSourceType;
11
- import com.ss.bytertc.engine.data.StreamIndex;
12
11
  import com.ss.bytertc.engine.data.VideoPixelFormat;
13
12
  import com.ss.bytertc.engine.data.VideoRotation;
14
- import com.ss.bytertc.engine.data.VideoSourceType;
15
13
  import com.ss.bytertc.engine.video.VideoFrame;
16
14
  import com.ss.bytertc.engine.video.builder.CpuBufferVideoFrameBuilder;
17
15
 
18
16
  import java.nio.ByteBuffer;
19
17
  import java.util.concurrent.TimeUnit;
20
18
 
21
- // For Test pushScreenVideoFrame
22
19
  public class VodMock {
23
- public String TAG = "VodMockTest";
20
+ public String TAG = "VideoAudioProcessor";
24
21
 
25
22
  private final RTCVideo mRTCEngine;
26
- private final int mFps;
27
23
 
28
24
  public VodMock(RTCVideo engine) {
29
25
  mRTCEngine = engine;
30
- mFps = 30;
31
26
  }
32
27
 
33
- public void start() {
34
- mRTCEngine.setScreenAudioSourceType(AudioSourceType.AUDIO_SOURCE_TYPE_EXTERNAL);
35
- mRTCEngine.setVideoSourceType(StreamIndex.STREAM_INDEX_SCREEN, VideoSourceType.VIDEO_SOURCE_TYPE_EXTERNAL);
36
- useFakeVideoFrame();
37
- }
38
-
39
- private void useFakeVideoFrame() {
28
+ public void useFakeVideoFrame() {
40
29
  final Handler handler = new Handler(Looper.getMainLooper());
41
30
  var width = 998;
42
31
  var height = 554;
43
32
  Runnable runnable = new Runnable() {
44
33
  @Override
45
34
  public void run() {
46
- handler.postDelayed(this, 1000 / mFps);
35
+ handler.postDelayed(this, 30); // Repeats every 5 seconds
47
36
 
48
- var frame = buildInternalVideoFrame(width, height);
37
+ var frame = useInternalVideoFrame(width, height);
49
38
  mRTCEngine.pushScreenVideoFrame(frame);
50
39
  // Log.d(TAG, "pushScreenVideoFrame");
51
40
  }
@@ -53,7 +42,7 @@ public class VodMock {
53
42
  handler.post(runnable);
54
43
  }
55
44
 
56
- private VideoFrame buildInternalVideoFrame(int width, int height){
45
+ public VideoFrame useInternalVideoFrame(int width, int height){
57
46
  int chromaWidth = (width + 1) / 2;
58
47
  int chromaHeight = (height + 1) /2;
59
48
  int uvSize = chromaWidth * chromaHeight;