@sbhjt-gr/react-native-webrtc 137.0.4 → 137.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ios/RCTWebRTC/WLVAudioDevice.h +8 -0
- package/ios/RCTWebRTC/WLVAudioDevice.m +201 -0
- package/ios/RCTWebRTC/WebRTCModule+RTCMediaStream.m +47 -0
- package/ios/RCTWebRTC/WebRTCModule.h +1 -0
- package/ios/RCTWebRTC/WebRTCModule.m +15 -0
- package/ios/RCTWebRTC.xcodeproj/project.pbxproj +6 -0
- package/lib/commonjs/MediaDevices.js +13 -0
- package/lib/commonjs/MediaDevices.js.map +1 -1
- package/lib/module/MediaDevices.js +13 -0
- package/lib/module/MediaDevices.js.map +1 -1
- package/lib/typescript/MediaDevices.d.ts +15 -0
- package/package.json +1 -1
- package/src/MediaDevices.ts +33 -0
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
#import "WLVAudioDevice.h"
|
|
2
|
+
#import <AudioToolbox/AudioToolbox.h>
|
|
3
|
+
#import <math.h>
|
|
4
|
+
|
|
5
|
+
static const double kDefaultSampleRate = 48000.0;
|
|
6
|
+
static const NSInteger kDefaultChannels = 1;
|
|
7
|
+
static const NSTimeInterval kDefaultBufferDuration = 0.02;
|
|
8
|
+
|
|
9
|
+
@interface WLVAudioDevice ()
|
|
10
|
+
|
|
11
|
+
@property(nonatomic, weak) id<RTCAudioDeviceDelegate> delegate;
|
|
12
|
+
@property(nonatomic) BOOL initialized;
|
|
13
|
+
@property(nonatomic) BOOL playoutInitialized;
|
|
14
|
+
@property(nonatomic) BOOL recordingInitialized;
|
|
15
|
+
@property(nonatomic) BOOL playing;
|
|
16
|
+
@property(nonatomic) BOOL recording;
|
|
17
|
+
@property(nonatomic) double inRate;
|
|
18
|
+
@property(nonatomic) NSInteger inChannels;
|
|
19
|
+
@property(nonatomic) double outRate;
|
|
20
|
+
@property(nonatomic) NSInteger outChannels;
|
|
21
|
+
@property(nonatomic) NSTimeInterval inBufferDuration;
|
|
22
|
+
@property(nonatomic) NSTimeInterval outBufferDuration;
|
|
23
|
+
@property(nonatomic) NSTimeInterval inLatency;
|
|
24
|
+
@property(nonatomic) NSTimeInterval outLatency;
|
|
25
|
+
@property(nonatomic) double sampleTime;
|
|
26
|
+
@property(nonatomic, strong) dispatch_queue_t queue;
|
|
27
|
+
|
|
28
|
+
@end
|
|
29
|
+
|
|
30
|
+
@implementation WLVAudioDevice
|
|
31
|
+
|
|
32
|
+
- (instancetype)init {
|
|
33
|
+
self = [super init];
|
|
34
|
+
if (self) {
|
|
35
|
+
_inRate = kDefaultSampleRate;
|
|
36
|
+
_outRate = kDefaultSampleRate;
|
|
37
|
+
_inChannels = kDefaultChannels;
|
|
38
|
+
_outChannels = kDefaultChannels;
|
|
39
|
+
_inBufferDuration = kDefaultBufferDuration;
|
|
40
|
+
_outBufferDuration = kDefaultBufferDuration;
|
|
41
|
+
_queue = dispatch_queue_create("webrtc.virtual.audio", DISPATCH_QUEUE_SERIAL);
|
|
42
|
+
}
|
|
43
|
+
return self;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
- (void)pushAudioData:(NSData *)data sampleRate:(double)sampleRate channels:(NSInteger)channels {
|
|
47
|
+
if (!self.recording || !self.delegate || data.length == 0) {
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
NSInteger bytesPerFrame = channels * sizeof(int16_t);
|
|
51
|
+
if (bytesPerFrame == 0) {
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
UInt32 frames = (UInt32)(data.length / bytesPerFrame);
|
|
55
|
+
if (frames == 0) {
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
[self updateInputRate:sampleRate channels:channels];
|
|
59
|
+
NSData *chunk = [data copy];
|
|
60
|
+
dispatch_async(self.queue, ^{
|
|
61
|
+
if (!self.recording || !self.delegate) {
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
AudioBuffer buffer;
|
|
65
|
+
buffer.mNumberChannels = (UInt32)channels;
|
|
66
|
+
buffer.mDataByteSize = (UInt32)chunk.length;
|
|
67
|
+
buffer.mData = (void *)chunk.bytes;
|
|
68
|
+
AudioBufferList list;
|
|
69
|
+
list.mNumberBuffers = 1;
|
|
70
|
+
list.mBuffers[0] = buffer;
|
|
71
|
+
AudioUnitRenderActionFlags flags = 0;
|
|
72
|
+
AudioTimeStamp stamp;
|
|
73
|
+
memset(&stamp, 0, sizeof(AudioTimeStamp));
|
|
74
|
+
stamp.mFlags = kAudioTimeStampSampleTimeValid;
|
|
75
|
+
stamp.mSampleTime = self.sampleTime;
|
|
76
|
+
self.sampleTime += frames;
|
|
77
|
+
RTC_OBJC_TYPE(RTCAudioDeviceDeliverRecordedDataBlock) block = self.delegate.deliverRecordedData;
|
|
78
|
+
if (block) {
|
|
79
|
+
block(&flags, &stamp, 0, frames, &list, NULL, nil);
|
|
80
|
+
}
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
- (void)updateInputRate:(double)rate channels:(NSInteger)channels {
|
|
85
|
+
BOOL rateChanged = fabs(self.inRate - rate) > 0.1;
|
|
86
|
+
BOOL channelChanged = self.inChannels != channels;
|
|
87
|
+
if (rateChanged) {
|
|
88
|
+
self.inRate = rate;
|
|
89
|
+
}
|
|
90
|
+
if (channelChanged) {
|
|
91
|
+
self.inChannels = channels;
|
|
92
|
+
}
|
|
93
|
+
if ((rateChanged || channelChanged) && self.delegate) {
|
|
94
|
+
[self.delegate dispatchAsync:^{
|
|
95
|
+
[self.delegate notifyAudioInputParametersChange];
|
|
96
|
+
}];
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
#pragma mark - RTCAudioDevice
|
|
101
|
+
|
|
102
|
+
- (double)deviceInputSampleRate {
|
|
103
|
+
return self.inRate;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
- (NSTimeInterval)inputIOBufferDuration {
|
|
107
|
+
return self.inBufferDuration;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
- (NSInteger)inputNumberOfChannels {
|
|
111
|
+
return self.inChannels;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
- (NSTimeInterval)inputLatency {
|
|
115
|
+
return self.inLatency;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
- (double)deviceOutputSampleRate {
|
|
119
|
+
return self.outRate;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
- (NSTimeInterval)outputIOBufferDuration {
|
|
123
|
+
return self.outBufferDuration;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
- (NSInteger)outputNumberOfChannels {
|
|
127
|
+
return self.outChannels;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
- (NSTimeInterval)outputLatency {
|
|
131
|
+
return self.outLatency;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
- (BOOL)isInitialized {
|
|
135
|
+
return self.initialized;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
- (BOOL)initializeWithDelegate:(id<RTCAudioDeviceDelegate>)delegate {
|
|
139
|
+
self.delegate = delegate;
|
|
140
|
+
self.initialized = YES;
|
|
141
|
+
return YES;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
- (BOOL)terminateDevice {
|
|
145
|
+
self.delegate = nil;
|
|
146
|
+
self.initialized = NO;
|
|
147
|
+
self.playoutInitialized = NO;
|
|
148
|
+
self.recordingInitialized = NO;
|
|
149
|
+
self.playing = NO;
|
|
150
|
+
self.recording = NO;
|
|
151
|
+
self.sampleTime = 0;
|
|
152
|
+
return YES;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
- (BOOL)isPlayoutInitialized {
|
|
156
|
+
return self.playoutInitialized;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
- (BOOL)initializePlayout {
|
|
160
|
+
self.playoutInitialized = YES;
|
|
161
|
+
return YES;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
- (BOOL)isPlaying {
|
|
165
|
+
return self.playing;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
- (BOOL)startPlayout {
|
|
169
|
+
self.playing = YES;
|
|
170
|
+
return YES;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
- (BOOL)stopPlayout {
|
|
174
|
+
self.playing = NO;
|
|
175
|
+
return YES;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
- (BOOL)isRecordingInitialized {
|
|
179
|
+
return self.recordingInitialized;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
- (BOOL)initializeRecording {
|
|
183
|
+
self.recordingInitialized = YES;
|
|
184
|
+
return YES;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
- (BOOL)isRecording {
|
|
188
|
+
return self.recording;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
- (BOOL)startRecording {
|
|
192
|
+
self.recording = YES;
|
|
193
|
+
return YES;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
- (BOOL)stopRecording {
|
|
197
|
+
self.recording = NO;
|
|
198
|
+
return YES;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
@end
|
|
@@ -323,6 +323,53 @@ RCT_EXPORT_METHOD(getDisplayMedia : (RCTPromiseResolveBlock)resolve rejecter : (
|
|
|
323
323
|
#endif
|
|
324
324
|
}
|
|
325
325
|
|
|
326
|
+
RCT_EXPORT_METHOD(createVirtualAudioTrack : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock)reject) {
|
|
327
|
+
#if TARGET_OS_TV
|
|
328
|
+
reject(@"unsupported_platform", @"tvOS is not supported", nil);
|
|
329
|
+
return;
|
|
330
|
+
#else
|
|
331
|
+
RTCPeerConnectionFactory *factory = [self virtualFactory];
|
|
332
|
+
if (!factory) {
|
|
333
|
+
reject(@"virtual_factory_error", @"Factory unavailable", nil);
|
|
334
|
+
return;
|
|
335
|
+
}
|
|
336
|
+
NSString *trackId = [[NSUUID UUID] UUIDString];
|
|
337
|
+
RTCAudioSource *source = [factory audioSourceWithConstraints:nil];
|
|
338
|
+
RTCAudioTrack *track = [factory audioTrackWithSource:source trackId:trackId];
|
|
339
|
+
NSArray *components = [self createMediaStream:@[ track ]];
|
|
340
|
+
resolve(@{ @"streamId" : components[0], @"tracks" : components[1] });
|
|
341
|
+
#endif
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
RCT_EXPORT_METHOD(pushVirtualAudioSamples
|
|
345
|
+
: (nonnull NSArray<NSNumber *> *)samples
|
|
346
|
+
sampleRate
|
|
347
|
+
: (nonnull NSNumber *)sampleRate
|
|
348
|
+
channels
|
|
349
|
+
: (nonnull NSNumber *)channels) {
|
|
350
|
+
#if TARGET_OS_TV
|
|
351
|
+
return;
|
|
352
|
+
#else
|
|
353
|
+
if (samples.count == 0) {
|
|
354
|
+
return;
|
|
355
|
+
}
|
|
356
|
+
if (!self.virtualAudioDevice) {
|
|
357
|
+
[self virtualFactory];
|
|
358
|
+
}
|
|
359
|
+
if (!self.virtualAudioDevice) {
|
|
360
|
+
return;
|
|
361
|
+
}
|
|
362
|
+
NSUInteger length = samples.count * sizeof(int16_t);
|
|
363
|
+
NSMutableData *data = [NSMutableData dataWithLength:length];
|
|
364
|
+
int16_t *buffer = data.mutableBytes;
|
|
365
|
+
NSUInteger idx = 0;
|
|
366
|
+
for (NSNumber *value in samples) {
|
|
367
|
+
buffer[idx++] = (int16_t)value.shortValue;
|
|
368
|
+
}
|
|
369
|
+
[self.virtualAudioDevice pushAudioData:data sampleRate:sampleRate.doubleValue channels:channels.integerValue];
|
|
370
|
+
#endif
|
|
371
|
+
}
|
|
372
|
+
|
|
326
373
|
/**
|
|
327
374
|
* Implements {@code getUserMedia}. Note that at this point constraints have
|
|
328
375
|
* been normalized and permissions have been granted. The constraints only
|
|
@@ -43,5 +43,6 @@ static NSString *const kEventAudioSamples = @"audioSamples";
|
|
|
43
43
|
@property(nonatomic, strong) NSMutableDictionary<NSString *, RTCDataPacketCryptor *> *dataPacketCryptors;
|
|
44
44
|
|
|
45
45
|
- (RTCMediaStream *)streamForReactTag:(NSString *)reactTag;
|
|
46
|
+
- (RTCPeerConnectionFactory *)virtualFactory;
|
|
46
47
|
|
|
47
48
|
@end
|
|
@@ -10,8 +10,13 @@
|
|
|
10
10
|
#import "WebRTCModule+RTCPeerConnection.h"
|
|
11
11
|
#import "WebRTCModule.h"
|
|
12
12
|
#import "WebRTCModuleOptions.h"
|
|
13
|
+
#import "WLVAudioDevice.h"
|
|
13
14
|
|
|
14
15
|
@interface WebRTCModule ()
|
|
16
|
+
|
|
17
|
+
@property(nonatomic, strong) WLVAudioDevice *virtualAudioDevice;
|
|
18
|
+
@property(nonatomic, strong) RTCPeerConnectionFactory *virtualPeerConnectionFactory;
|
|
19
|
+
|
|
15
20
|
@end
|
|
16
21
|
|
|
17
22
|
@implementation WebRTCModule
|
|
@@ -128,6 +133,16 @@ RCT_EXPORT_MODULE();
|
|
|
128
133
|
return _workerQueue;
|
|
129
134
|
}
|
|
130
135
|
|
|
136
|
+
- (RTCPeerConnectionFactory *)virtualFactory {
|
|
137
|
+
if (_virtualPeerConnectionFactory == nil) {
|
|
138
|
+
self.virtualAudioDevice = [[WLVAudioDevice alloc] init];
|
|
139
|
+
_virtualPeerConnectionFactory = [[RTCPeerConnectionFactory alloc] initWithEncoderFactory:_encoderFactory
|
|
140
|
+
decoderFactory:_decoderFactory
|
|
141
|
+
audioDevice:self.virtualAudioDevice];
|
|
142
|
+
}
|
|
143
|
+
return _virtualPeerConnectionFactory;
|
|
144
|
+
}
|
|
145
|
+
|
|
131
146
|
- (NSArray<NSString *> *)supportedEvents {
|
|
132
147
|
return @[
|
|
133
148
|
kEventPeerConnectionSignalingStateChanged,
|
|
@@ -22,6 +22,7 @@
|
|
|
22
22
|
4EE3A8D225B841DD00FAA24A /* CaptureController.m in Sources */ = {isa = PBXBuildFile; fileRef = 4EE3A8C925B841DD00FAA24A /* CaptureController.m */; };
|
|
23
23
|
4EE3A8D325B841DD00FAA24A /* ScreenCaptureController.m in Sources */ = {isa = PBXBuildFile; fileRef = 4EE3A8CC25B841DD00FAA24A /* ScreenCaptureController.m */; };
|
|
24
24
|
4EE3A8D425B841DD00FAA24A /* ScreenCapturer.m in Sources */ = {isa = PBXBuildFile; fileRef = 4EE3A8CD25B841DD00FAA24A /* ScreenCapturer.m */; };
|
|
25
|
+
E0C37C1F2D8D1C6A009E2E63 /* WLVAudioDevice.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C37C1E2D8D1C6A009E2E63 /* WLVAudioDevice.m */; };
|
|
25
26
|
D3FF699919D2664B25C9D458 /* Pods_RCTWebRTC.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A20F721AD842563B66292D5B /* Pods_RCTWebRTC.framework */; };
|
|
26
27
|
D74EF94829652169000742E1 /* TrackCapturerEventsEmitter.m in Sources */ = {isa = PBXBuildFile; fileRef = D74EF94629652169000742E1 /* TrackCapturerEventsEmitter.m */; };
|
|
27
28
|
D7F0711E2C6DC91F0031F594 /* WebRTCModule+RTCAudioSession.m in Sources */ = {isa = PBXBuildFile; fileRef = D7F0711D2C6DC91F0031F594 /* WebRTCModule+RTCAudioSession.m */; };
|
|
@@ -72,6 +73,8 @@
|
|
|
72
73
|
4EE3A8CC25B841DD00FAA24A /* ScreenCaptureController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = ScreenCaptureController.m; path = RCTWebRTC/ScreenCaptureController.m; sourceTree = SOURCE_ROOT; };
|
|
73
74
|
4EE3A8CD25B841DD00FAA24A /* ScreenCapturer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = ScreenCapturer.m; path = RCTWebRTC/ScreenCapturer.m; sourceTree = SOURCE_ROOT; };
|
|
74
75
|
4EE3A8CE25B841DD00FAA24A /* SocketConnection.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = SocketConnection.h; path = RCTWebRTC/SocketConnection.h; sourceTree = SOURCE_ROOT; };
|
|
76
|
+
E0C37C1D2D8D1C6A009E2E63 /* WLVAudioDevice.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = WLVAudioDevice.h; path = RCTWebRTC/WLVAudioDevice.h; sourceTree = SOURCE_ROOT; };
|
|
77
|
+
E0C37C1E2D8D1C6A009E2E63 /* WLVAudioDevice.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; name = WLVAudioDevice.m; path = RCTWebRTC/WLVAudioDevice.m; sourceTree = SOURCE_ROOT; };
|
|
75
78
|
A20F721AD842563B66292D5B /* Pods_RCTWebRTC.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_RCTWebRTC.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
|
76
79
|
D74EF94529652148000742E1 /* CapturerEventsDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = CapturerEventsDelegate.h; path = RCTWebRTC/CapturerEventsDelegate.h; sourceTree = SOURCE_ROOT; };
|
|
77
80
|
D74EF94629652169000742E1 /* TrackCapturerEventsEmitter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = TrackCapturerEventsEmitter.m; path = RCTWebRTC/TrackCapturerEventsEmitter.m; sourceTree = SOURCE_ROOT; };
|
|
@@ -157,6 +160,8 @@
|
|
|
157
160
|
4EE3A8CD25B841DD00FAA24A /* ScreenCapturer.m */,
|
|
158
161
|
4EE3A8CE25B841DD00FAA24A /* SocketConnection.h */,
|
|
159
162
|
4EE3A8C825B841DD00FAA24A /* SocketConnection.m */,
|
|
163
|
+
E0C37C1D2D8D1C6A009E2E63 /* WLVAudioDevice.h */,
|
|
164
|
+
E0C37C1E2D8D1C6A009E2E63 /* WLVAudioDevice.m */,
|
|
160
165
|
DEC96579264176DF0052DB35 /* DataChannelWrapper.h */,
|
|
161
166
|
DEC96576264176C10052DB35 /* DataChannelWrapper.m */,
|
|
162
167
|
);
|
|
@@ -255,6 +260,7 @@
|
|
|
255
260
|
isa = PBXSourcesBuildPhase;
|
|
256
261
|
buildActionMask = 2147483647;
|
|
257
262
|
files = (
|
|
263
|
+
E0C37C1F2D8D1C6A009E2E63 /* WLVAudioDevice.m in Sources */,
|
|
258
264
|
4EE3A8C525B8417800FAA24A /* WebRTCModule+VideoTrackAdapter.m in Sources */,
|
|
259
265
|
4EE3A8BA25B8415900FAA24A /* WebRTCModule+RTCDataChannel.m in Sources */,
|
|
260
266
|
4EE3A8B625B8414A00FAA24A /* WebRTCModule+Permissions.m in Sources */,
|
|
@@ -42,6 +42,19 @@ class MediaDevices extends _index.EventTarget {
|
|
|
42
42
|
getUserMedia(constraints) {
|
|
43
43
|
return (0, _getUserMedia.default)(constraints);
|
|
44
44
|
}
|
|
45
|
+
createVirtualAudioTrack() {
|
|
46
|
+
if (!WebRTCModule.createVirtualAudioTrack) {
|
|
47
|
+
return Promise.reject(new Error('virtual audio unavailable'));
|
|
48
|
+
}
|
|
49
|
+
return WebRTCModule.createVirtualAudioTrack();
|
|
50
|
+
}
|
|
51
|
+
pushVirtualAudioSamples(samples, sampleRate, channels) {
|
|
52
|
+
if (!WebRTCModule.pushVirtualAudioSamples) {
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
const array = samples instanceof Int16Array ? Array.from(samples) : samples.slice();
|
|
56
|
+
WebRTCModule.pushVirtualAudioSamples(array, sampleRate, channels);
|
|
57
|
+
}
|
|
45
58
|
}
|
|
46
59
|
|
|
47
60
|
/**
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["_index","require","_reactNative","_getDisplayMedia","_interopRequireDefault","_getUserMedia","obj","__esModule","default","WebRTCModule","NativeModules","MediaDevices","EventTarget","enumerateDevices","Promise","resolve","getDisplayMedia","getUserMedia","constraints","proto","prototype","defineEventAttribute","_default","exports"],"sources":["MediaDevices.ts"],"sourcesContent":["import { EventTarget, Event, defineEventAttribute } from 'event-target-shim/index';\nimport { NativeModules } from 'react-native';\n\nimport getDisplayMedia from './getDisplayMedia';\nimport getUserMedia, { Constraints } from './getUserMedia';\n\nconst { WebRTCModule } = NativeModules;\n\ntype MediaDevicesEventMap = {\n devicechange: Event<'devicechange'>\n}\n\nclass MediaDevices extends EventTarget<MediaDevicesEventMap> {\n /**\n * W3C \"Media Capture and Streams\" compatible {@code enumerateDevices}\n * implementation.\n */\n enumerateDevices() {\n return new Promise(resolve => WebRTCModule.enumerateDevices(resolve));\n }\n\n /**\n * W3C \"Screen Capture\" compatible {@code getDisplayMedia} implementation.\n * See: https://w3c.github.io/mediacapture-screen-share/\n *\n * @returns {Promise}\n */\n getDisplayMedia() {\n return getDisplayMedia();\n }\n\n /**\n * W3C \"Media Capture and Streams\" compatible {@code getUserMedia}\n * implementation.\n * See: https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-enumeratedevices\n *\n * @param {*} constraints\n * @returns {Promise}\n */\n getUserMedia(constraints: Constraints) {\n return getUserMedia(constraints);\n }\n}\n\n/**\n * Define the `onxxx` event handlers.\n */\nconst proto = MediaDevices.prototype;\n\ndefineEventAttribute(proto, 'devicechange');\n\n\nexport default new MediaDevices();\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,YAAA,GAAAD,OAAA;
|
|
1
|
+
{"version":3,"names":["_index","require","_reactNative","_getDisplayMedia","_interopRequireDefault","_getUserMedia","obj","__esModule","default","WebRTCModule","NativeModules","MediaDevices","EventTarget","enumerateDevices","Promise","resolve","getDisplayMedia","getUserMedia","constraints","createVirtualAudioTrack","reject","Error","pushVirtualAudioSamples","samples","sampleRate","channels","array","Int16Array","Array","from","slice","proto","prototype","defineEventAttribute","_default","exports"],"sources":["MediaDevices.ts"],"sourcesContent":["import { EventTarget, Event, defineEventAttribute } from 'event-target-shim/index';\nimport { NativeModules } from 'react-native';\n\nimport { type MediaTrackSettings } from './MediaStreamTrack';\nimport getDisplayMedia from './getDisplayMedia';\nimport getUserMedia, { Constraints } from './getUserMedia';\n\nconst { WebRTCModule } = NativeModules;\n\ntype MediaDevicesEventMap = {\n devicechange: Event<'devicechange'>\n}\n\ntype VirtualTrackInfo = {\n enabled: boolean;\n id: string;\n kind: string;\n readyState: string;\n remote: boolean;\n settings: MediaTrackSettings;\n}\n\ntype VirtualAudioResponse = {\n streamId: string;\n tracks: VirtualTrackInfo[];\n}\n\nclass MediaDevices extends EventTarget<MediaDevicesEventMap> {\n /**\n * W3C \"Media Capture and Streams\" compatible {@code enumerateDevices}\n * implementation.\n */\n enumerateDevices() {\n return new Promise(resolve => WebRTCModule.enumerateDevices(resolve));\n }\n\n /**\n * W3C \"Screen Capture\" compatible {@code getDisplayMedia} implementation.\n * See: https://w3c.github.io/mediacapture-screen-share/\n *\n * @returns {Promise}\n */\n getDisplayMedia() {\n return getDisplayMedia();\n }\n\n /**\n * W3C \"Media Capture and Streams\" compatible {@code getUserMedia}\n * implementation.\n * See: https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-enumeratedevices\n *\n * @param {*} constraints\n * @returns {Promise}\n */\n getUserMedia(constraints: Constraints) {\n return getUserMedia(constraints);\n }\n\n createVirtualAudioTrack(): Promise<VirtualAudioResponse> {\n if (!WebRTCModule.createVirtualAudioTrack) {\n return Promise.reject(new Error('virtual audio unavailable'));\n }\n\n return WebRTCModule.createVirtualAudioTrack();\n }\n\n pushVirtualAudioSamples(samples: Int16Array | number[], sampleRate: number, channels: number) {\n if (!WebRTCModule.pushVirtualAudioSamples) {\n return;\n }\n\n const array = samples instanceof Int16Array ? Array.from(samples) : samples.slice();\n\n WebRTCModule.pushVirtualAudioSamples(array, sampleRate, channels);\n }\n}\n\n/**\n * Define the `onxxx` event handlers.\n */\nconst proto = MediaDevices.prototype;\n\ndefineEventAttribute(proto, 'devicechange');\n\n\nexport default new MediaDevices();\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,YAAA,GAAAD,OAAA;AAGA,IAAAE,gBAAA,GAAAC,sBAAA,CAAAH,OAAA;AACA,IAAAI,aAAA,GAAAD,sBAAA,CAAAH,OAAA;AAA2D,SAAAG,uBAAAE,GAAA,WAAAA,GAAA,IAAAA,GAAA,CAAAC,UAAA,GAAAD,GAAA,KAAAE,OAAA,EAAAF,GAAA;AAE3D,MAAM;EAAEG;AAAa,CAAC,GAAGC,0BAAa;AAoBtC,MAAMC,YAAY,SAASC,kBAAW,CAAuB;EACzD;AACJ;AACA;AACA;EACIC,gBAAgBA,CAAA,EAAG;IACf,OAAO,IAAIC,OAAO,CAACC,OAAO,IAAIN,YAAY,CAACI,gBAAgB,CAACE,OAAO,CAAC,CAAC;EACzE;;EAEA;AACJ;AACA;AACA;AACA;AACA;EACIC,eAAeA,CAAA,EAAG;IACd,OAAO,IAAAA,wBAAe,EAAC,CAAC;EAC5B;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;EACIC,YAAYA,CAACC,WAAwB,EAAE;IACnC,OAAO,IAAAD,qBAAY,EAACC,WAAW,CAAC;EACpC;EAEAC,uBAAuBA,CAAA,EAAkC;IACrD,IAAI,CAACV,YAAY,CAACU,uBAAuB,EAAE;MACvC,OAAOL,OAAO,CAACM,MAAM,CAAC,IAAIC,KAAK,CAAC,2BAA2B,CAAC,CAAC;IACjE;IAEA,OAAOZ,YAAY,CAACU,uBAAuB,CAAC,CAAC;EACjD;EAEAG,uBAAuBA,CAACC,OAA8B,EAAEC,UAAkB,EAAEC,QAAgB,EAAE;IAC1F,IAAI,CAAChB,YAAY,CAACa,uBAAuB,EAAE;MACvC;IACJ;IAEA,MAAMI,KAAK,GAAGH,OAAO,YAAYI,UAAU,GAAGC,KAAK,CAACC,IAAI,CAACN,OAAO,CAAC,GAAGA,OAAO,CAACO,KAAK,CAAC,CAAC;IAEnFrB,YAAY,CAACa,uBAAuB,CAACI,KAAK,EAAEF,UAAU,EAAEC,QAAQ,CAAC;EACrE;AACJ;;AAEA;AACA;AACA;AACA,MAAMM,KAAK,GAAGpB,YAAY,CAACqB,SAAS;AAEpC,IAAAC,2BAAoB,EAACF,KAAK,EAAE,cAAc,CAAC;AAAC,IAAAG,QAAA,GAG7B,IAAIvB,YAAY,CAAC,CAAC;AAAAwB,OAAA,CAAA3B,OAAA,GAAA0B,QAAA"}
|
|
@@ -35,6 +35,19 @@ class MediaDevices extends EventTarget {
|
|
|
35
35
|
getUserMedia(constraints) {
|
|
36
36
|
return getUserMedia(constraints);
|
|
37
37
|
}
|
|
38
|
+
createVirtualAudioTrack() {
|
|
39
|
+
if (!WebRTCModule.createVirtualAudioTrack) {
|
|
40
|
+
return Promise.reject(new Error('virtual audio unavailable'));
|
|
41
|
+
}
|
|
42
|
+
return WebRTCModule.createVirtualAudioTrack();
|
|
43
|
+
}
|
|
44
|
+
pushVirtualAudioSamples(samples, sampleRate, channels) {
|
|
45
|
+
if (!WebRTCModule.pushVirtualAudioSamples) {
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
const array = samples instanceof Int16Array ? Array.from(samples) : samples.slice();
|
|
49
|
+
WebRTCModule.pushVirtualAudioSamples(array, sampleRate, channels);
|
|
50
|
+
}
|
|
38
51
|
}
|
|
39
52
|
|
|
40
53
|
/**
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["EventTarget","defineEventAttribute","NativeModules","getDisplayMedia","getUserMedia","WebRTCModule","MediaDevices","enumerateDevices","Promise","resolve","constraints","proto","prototype"],"sources":["MediaDevices.ts"],"sourcesContent":["import { EventTarget, Event, defineEventAttribute } from 'event-target-shim/index';\nimport { NativeModules } from 'react-native';\n\nimport getDisplayMedia from './getDisplayMedia';\nimport getUserMedia, { Constraints } from './getUserMedia';\n\nconst { WebRTCModule } = NativeModules;\n\ntype MediaDevicesEventMap = {\n devicechange: Event<'devicechange'>\n}\n\nclass MediaDevices extends EventTarget<MediaDevicesEventMap> {\n /**\n * W3C \"Media Capture and Streams\" compatible {@code enumerateDevices}\n * implementation.\n */\n enumerateDevices() {\n return new Promise(resolve => WebRTCModule.enumerateDevices(resolve));\n }\n\n /**\n * W3C \"Screen Capture\" compatible {@code getDisplayMedia} implementation.\n * See: https://w3c.github.io/mediacapture-screen-share/\n *\n * @returns {Promise}\n */\n getDisplayMedia() {\n return getDisplayMedia();\n }\n\n /**\n * W3C \"Media Capture and Streams\" compatible {@code getUserMedia}\n * implementation.\n * See: https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-enumeratedevices\n *\n * @param {*} constraints\n * @returns {Promise}\n */\n getUserMedia(constraints: Constraints) {\n return getUserMedia(constraints);\n }\n}\n\n/**\n * Define the `onxxx` event handlers.\n */\nconst proto = MediaDevices.prototype;\n\ndefineEventAttribute(proto, 'devicechange');\n\n\nexport default new MediaDevices();\n"],"mappings":"AAAA,SAASA,WAAW,EAASC,oBAAoB,QAAQ,yBAAyB;AAClF,SAASC,aAAa,QAAQ,cAAc;
|
|
1
|
+
{"version":3,"names":["EventTarget","defineEventAttribute","NativeModules","getDisplayMedia","getUserMedia","WebRTCModule","MediaDevices","enumerateDevices","Promise","resolve","constraints","createVirtualAudioTrack","reject","Error","pushVirtualAudioSamples","samples","sampleRate","channels","array","Int16Array","Array","from","slice","proto","prototype"],"sources":["MediaDevices.ts"],"sourcesContent":["import { EventTarget, Event, defineEventAttribute } from 'event-target-shim/index';\nimport { NativeModules } from 'react-native';\n\nimport { type MediaTrackSettings } from './MediaStreamTrack';\nimport getDisplayMedia from './getDisplayMedia';\nimport getUserMedia, { Constraints } from './getUserMedia';\n\nconst { WebRTCModule } = NativeModules;\n\ntype MediaDevicesEventMap = {\n devicechange: Event<'devicechange'>\n}\n\ntype VirtualTrackInfo = {\n enabled: boolean;\n id: string;\n kind: string;\n readyState: string;\n remote: boolean;\n settings: MediaTrackSettings;\n}\n\ntype VirtualAudioResponse = {\n streamId: string;\n tracks: VirtualTrackInfo[];\n}\n\nclass MediaDevices extends EventTarget<MediaDevicesEventMap> {\n /**\n * W3C \"Media Capture and Streams\" compatible {@code enumerateDevices}\n * implementation.\n */\n enumerateDevices() {\n return new Promise(resolve => WebRTCModule.enumerateDevices(resolve));\n }\n\n /**\n * W3C \"Screen Capture\" compatible {@code getDisplayMedia} implementation.\n * See: https://w3c.github.io/mediacapture-screen-share/\n *\n * @returns {Promise}\n */\n getDisplayMedia() {\n return getDisplayMedia();\n }\n\n /**\n * W3C \"Media Capture and Streams\" compatible {@code getUserMedia}\n * implementation.\n * See: https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-enumeratedevices\n *\n * @param {*} constraints\n * @returns {Promise}\n */\n getUserMedia(constraints: Constraints) {\n return getUserMedia(constraints);\n }\n\n createVirtualAudioTrack(): Promise<VirtualAudioResponse> {\n if (!WebRTCModule.createVirtualAudioTrack) {\n return Promise.reject(new Error('virtual audio unavailable'));\n }\n\n return WebRTCModule.createVirtualAudioTrack();\n }\n\n pushVirtualAudioSamples(samples: Int16Array | number[], sampleRate: number, channels: number) {\n if (!WebRTCModule.pushVirtualAudioSamples) {\n return;\n }\n\n const array = samples instanceof Int16Array ? Array.from(samples) : samples.slice();\n\n WebRTCModule.pushVirtualAudioSamples(array, sampleRate, channels);\n }\n}\n\n/**\n * Define the `onxxx` event handlers.\n */\nconst proto = MediaDevices.prototype;\n\ndefineEventAttribute(proto, 'devicechange');\n\n\nexport default new MediaDevices();\n"],"mappings":"AAAA,SAASA,WAAW,EAASC,oBAAoB,QAAQ,yBAAyB;AAClF,SAASC,aAAa,QAAQ,cAAc;AAG5C,OAAOC,eAAe,MAAM,mBAAmB;AAC/C,OAAOC,YAAY,MAAuB,gBAAgB;AAE1D,MAAM;EAAEC;AAAa,CAAC,GAAGH,aAAa;AAoBtC,MAAMI,YAAY,SAASN,WAAW,CAAuB;EACzD;AACJ;AACA;AACA;EACIO,gBAAgBA,CAAA,EAAG;IACf,OAAO,IAAIC,OAAO,CAACC,OAAO,IAAIJ,YAAY,CAACE,gBAAgB,CAACE,OAAO,CAAC,CAAC;EACzE;;EAEA;AACJ;AACA;AACA;AACA;AACA;EACIN,eAAeA,CAAA,EAAG;IACd,OAAOA,eAAe,CAAC,CAAC;EAC5B;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;EACIC,YAAYA,CAACM,WAAwB,EAAE;IACnC,OAAON,YAAY,CAACM,WAAW,CAAC;EACpC;EAEAC,uBAAuBA,CAAA,EAAkC;IACrD,IAAI,CAACN,YAAY,CAACM,uBAAuB,EAAE;MACvC,OAAOH,OAAO,CAACI,MAAM,CAAC,IAAIC,KAAK,CAAC,2BAA2B,CAAC,CAAC;IACjE;IAEA,OAAOR,YAAY,CAACM,uBAAuB,CAAC,CAAC;EACjD;EAEAG,uBAAuBA,CAACC,OAA8B,EAAEC,UAAkB,EAAEC,QAAgB,EAAE;IAC1F,IAAI,CAACZ,YAAY,CAACS,uBAAuB,EAAE;MACvC;IACJ;IAEA,MAAMI,KAAK,GAAGH,OAAO,YAAYI,UAAU,GAAGC,KAAK,CAACC,IAAI,CAACN,OAAO,CAAC,GAAGA,OAAO,CAACO,KAAK,CAAC,CAAC;IAEnFjB,YAAY,CAACS,uBAAuB,CAACI,KAAK,EAAEF,UAAU,EAAEC,QAAQ,CAAC;EACrE;AACJ;;AAEA;AACA;AACA;AACA,MAAMM,KAAK,GAAGjB,YAAY,CAACkB,SAAS;AAEpCvB,oBAAoB,CAACsB,KAAK,EAAE,cAAc,CAAC;AAG3C,eAAe,IAAIjB,YAAY,CAAC,CAAC"}
|
|
@@ -1,8 +1,21 @@
|
|
|
1
1
|
import { EventTarget, Event } from 'event-target-shim/index';
|
|
2
|
+
import { type MediaTrackSettings } from './MediaStreamTrack';
|
|
2
3
|
import { Constraints } from './getUserMedia';
|
|
3
4
|
declare type MediaDevicesEventMap = {
|
|
4
5
|
devicechange: Event<'devicechange'>;
|
|
5
6
|
};
|
|
7
|
+
declare type VirtualTrackInfo = {
|
|
8
|
+
enabled: boolean;
|
|
9
|
+
id: string;
|
|
10
|
+
kind: string;
|
|
11
|
+
readyState: string;
|
|
12
|
+
remote: boolean;
|
|
13
|
+
settings: MediaTrackSettings;
|
|
14
|
+
};
|
|
15
|
+
declare type VirtualAudioResponse = {
|
|
16
|
+
streamId: string;
|
|
17
|
+
tracks: VirtualTrackInfo[];
|
|
18
|
+
};
|
|
6
19
|
declare class MediaDevices extends EventTarget<MediaDevicesEventMap> {
|
|
7
20
|
/**
|
|
8
21
|
* W3C "Media Capture and Streams" compatible {@code enumerateDevices}
|
|
@@ -25,6 +38,8 @@ declare class MediaDevices extends EventTarget<MediaDevicesEventMap> {
|
|
|
25
38
|
* @returns {Promise}
|
|
26
39
|
*/
|
|
27
40
|
getUserMedia(constraints: Constraints): Promise<import("./MediaStream").default>;
|
|
41
|
+
createVirtualAudioTrack(): Promise<VirtualAudioResponse>;
|
|
42
|
+
pushVirtualAudioSamples(samples: Int16Array | number[], sampleRate: number, channels: number): void;
|
|
28
43
|
}
|
|
29
44
|
declare const _default: MediaDevices;
|
|
30
45
|
export default _default;
|
package/package.json
CHANGED
package/src/MediaDevices.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { EventTarget, Event, defineEventAttribute } from 'event-target-shim/index';
|
|
2
2
|
import { NativeModules } from 'react-native';
|
|
3
3
|
|
|
4
|
+
import { type MediaTrackSettings } from './MediaStreamTrack';
|
|
4
5
|
import getDisplayMedia from './getDisplayMedia';
|
|
5
6
|
import getUserMedia, { Constraints } from './getUserMedia';
|
|
6
7
|
|
|
@@ -10,6 +11,20 @@ type MediaDevicesEventMap = {
|
|
|
10
11
|
devicechange: Event<'devicechange'>
|
|
11
12
|
}
|
|
12
13
|
|
|
14
|
+
type VirtualTrackInfo = {
|
|
15
|
+
enabled: boolean;
|
|
16
|
+
id: string;
|
|
17
|
+
kind: string;
|
|
18
|
+
readyState: string;
|
|
19
|
+
remote: boolean;
|
|
20
|
+
settings: MediaTrackSettings;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
type VirtualAudioResponse = {
|
|
24
|
+
streamId: string;
|
|
25
|
+
tracks: VirtualTrackInfo[];
|
|
26
|
+
}
|
|
27
|
+
|
|
13
28
|
class MediaDevices extends EventTarget<MediaDevicesEventMap> {
|
|
14
29
|
/**
|
|
15
30
|
* W3C "Media Capture and Streams" compatible {@code enumerateDevices}
|
|
@@ -40,6 +55,24 @@ class MediaDevices extends EventTarget<MediaDevicesEventMap> {
|
|
|
40
55
|
getUserMedia(constraints: Constraints) {
|
|
41
56
|
return getUserMedia(constraints);
|
|
42
57
|
}
|
|
58
|
+
|
|
59
|
+
createVirtualAudioTrack(): Promise<VirtualAudioResponse> {
|
|
60
|
+
if (!WebRTCModule.createVirtualAudioTrack) {
|
|
61
|
+
return Promise.reject(new Error('virtual audio unavailable'));
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
return WebRTCModule.createVirtualAudioTrack();
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
pushVirtualAudioSamples(samples: Int16Array | number[], sampleRate: number, channels: number) {
|
|
68
|
+
if (!WebRTCModule.pushVirtualAudioSamples) {
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const array = samples instanceof Int16Array ? Array.from(samples) : samples.slice();
|
|
73
|
+
|
|
74
|
+
WebRTCModule.pushVirtualAudioSamples(array, sampleRate, channels);
|
|
75
|
+
}
|
|
43
76
|
}
|
|
44
77
|
|
|
45
78
|
/**
|