react-native-suuqencode 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ios/Suuqencode.mm +115 -95
- package/package.json +1 -1
package/ios/Suuqencode.mm
CHANGED
|
@@ -1,10 +1,13 @@
|
|
|
1
1
|
#import "Suuqencode.h"
|
|
2
|
+
#import <SuuqeDMABuf/DMABuf.h>
|
|
3
|
+
|
|
4
|
+
@interface Suuqencode ()
|
|
2
5
|
|
|
3
|
-
@
|
|
6
|
+
@property(nonatomic) VTCompressionSessionRef compressionSession;
|
|
7
|
+
@property(nonatomic) dispatch_queue_t encodeQueue;
|
|
8
|
+
@property(nonatomic) int frameCount;
|
|
4
9
|
|
|
5
|
-
|
|
6
|
-
@property (nonatomic) dispatch_queue_t encodeQueue;
|
|
7
|
-
@property (nonatomic) int frameCount;
|
|
10
|
+
- (void)sendEncodedData:(NSData *)data;
|
|
8
11
|
|
|
9
12
|
@end
|
|
10
13
|
|
|
@@ -12,115 +15,132 @@
|
|
|
12
15
|
|
|
13
16
|
RCT_EXPORT_MODULE()
|
|
14
17
|
|
|
15
|
-
- (instancetype)init
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
18
|
+
- (instancetype)init {
|
|
19
|
+
self = [super init];
|
|
20
|
+
if (self) {
|
|
21
|
+
_encodeQueue = dispatch_queue_create("com.suuqencode.encodequeue",
|
|
22
|
+
DISPATCH_QUEUE_SERIAL);
|
|
23
|
+
}
|
|
24
|
+
return self;
|
|
22
25
|
}
|
|
23
26
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
[DMABuf
|
|
29
|
-
void *buf = [DMABuf buf];
|
|
30
|
-
int width = [DMABuf width];
|
|
31
|
-
int height = [DMABuf height];
|
|
32
|
-
|
|
33
|
-
if (!buf) {
|
|
34
|
-
return;
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
dispatch_async(self.encodeQueue, ^{
|
|
38
|
-
if (!self.compressionSession) {
|
|
39
|
-
[self setupCompressionSessionWithWidth:width height:height];
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
CVPixelBufferRef pixelBuffer = NULL;
|
|
43
|
-
CVReturn status = CVPixelBufferCreateWithBytes(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, buf, width * 4, NULL, NULL, NULL, &pixelBuffer);
|
|
44
|
-
|
|
45
|
-
if (status != kCVReturnSuccess) {
|
|
46
|
-
NSLog(@"Failed to create CVPixelBuffer");
|
|
47
|
-
return;
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
CMTime presentationTimeStamp = CMTimeMake(self.frameCount++, 30);
|
|
51
|
-
VTEncodeInfoFlags flags;
|
|
52
|
-
|
|
53
|
-
VTCompressionSessionEncodeFrame(self.compressionSession, pixelBuffer, presentationTimeStamp, kCMTimeInvalid, NULL, NULL, &flags);
|
|
54
|
-
CVPixelBufferRelease(pixelBuffer);
|
|
55
|
-
});
|
|
56
|
-
}];
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
- (void)setupCompressionSessionWithWidth:(int)width height:(int)height {
|
|
60
|
-
VTCompressionSessionCreate(NULL, width, height, kCMVideoCodecType_H264, NULL, NULL, NULL, compressionOutputCallback, (__bridge void *)(self), &_compressionSession);
|
|
61
|
-
|
|
62
|
-
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
|
|
63
|
-
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel);
|
|
64
|
-
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)@(width * height * 10));
|
|
65
|
-
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, (__bridge CFTypeRef)@(20));
|
|
66
|
-
|
|
67
|
-
VTCompressionSessionPrepareToEncodeFrames(_compressionSession);
|
|
68
|
-
}
|
|
27
|
+
RCT_EXPORT_METHOD(startEncode) {
|
|
28
|
+
[DMABuf setFrameChangeCallback:^{
|
|
29
|
+
void *buf = [DMABuf buf];
|
|
30
|
+
int width = [DMABuf width];
|
|
31
|
+
int height = [DMABuf height];
|
|
69
32
|
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
NSLog(@"Error encoding frame: %d", (int)status);
|
|
73
|
-
return;
|
|
33
|
+
if (!buf) {
|
|
34
|
+
return;
|
|
74
35
|
}
|
|
75
36
|
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
Suuqencode *encoder = (__bridge Suuqencode *)outputCallbackRefCon;
|
|
81
|
-
|
|
82
|
-
bool isKeyFrame = !CFDictionaryContainsKey( (CFDictionaryRef)CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0), (const void *)kCMSampleAttachmentKey_NotSync);
|
|
37
|
+
dispatch_async(self.encodeQueue, ^{
|
|
38
|
+
if (!self.compressionSession) {
|
|
39
|
+
[self setupCompressionSessionWithWidth:width height:height];
|
|
40
|
+
}
|
|
83
41
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
size_t sparameterSetSize, sparameterSetCount;
|
|
89
|
-
CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0 );
|
|
42
|
+
CVPixelBufferRef pixelBuffer = NULL;
|
|
43
|
+
CVReturn status = CVPixelBufferCreateWithBytes(
|
|
44
|
+
kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, buf,
|
|
45
|
+
width * 4, NULL, NULL, NULL, &pixelBuffer);
|
|
90
46
|
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
47
|
+
if (status != kCVReturnSuccess) {
|
|
48
|
+
NSLog(@"Failed to create CVPixelBuffer");
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
94
51
|
|
|
95
|
-
|
|
96
|
-
|
|
52
|
+
CMTime presentationTimeStamp = CMTimeMake(self.frameCount++, 30);
|
|
53
|
+
VTEncodeInfoFlags flags;
|
|
97
54
|
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
55
|
+
VTCompressionSessionEncodeFrame(self.compressionSession, pixelBuffer,
|
|
56
|
+
presentationTimeStamp, kCMTimeInvalid,
|
|
57
|
+
NULL, NULL, &flags);
|
|
58
|
+
CVPixelBufferRelease(pixelBuffer);
|
|
59
|
+
});
|
|
60
|
+
}];
|
|
61
|
+
}
|
|
101
62
|
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
63
|
+
- (void)setupCompressionSessionWithWidth:(int)width height:(int)height {
|
|
64
|
+
VTCompressionSessionCreate(NULL, width, height, kCMVideoCodecType_H264, NULL,
|
|
65
|
+
NULL, NULL, compressionOutputCallback,
|
|
66
|
+
(__bridge void *)(self), &_compressionSession);
|
|
67
|
+
|
|
68
|
+
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_RealTime,
|
|
69
|
+
kCFBooleanTrue);
|
|
70
|
+
VTSessionSetProperty(_compressionSession,
|
|
71
|
+
kVTCompressionPropertyKey_ProfileLevel,
|
|
72
|
+
kVTProfileLevel_H264_Baseline_AutoLevel);
|
|
73
|
+
VTSessionSetProperty(_compressionSession,
|
|
74
|
+
kVTCompressionPropertyKey_AverageBitRate,
|
|
75
|
+
(__bridge CFTypeRef) @(width * height * 10));
|
|
76
|
+
VTSessionSetProperty(_compressionSession,
|
|
77
|
+
kVTCompressionPropertyKey_MaxKeyFrameInterval,
|
|
78
|
+
(__bridge CFTypeRef) @(20));
|
|
79
|
+
|
|
80
|
+
VTCompressionSessionPrepareToEncodeFrames(_compressionSession);
|
|
81
|
+
}
|
|
106
82
|
|
|
107
|
-
|
|
108
|
-
|
|
83
|
+
void compressionOutputCallback(void *outputCallbackRefCon,
|
|
84
|
+
void *sourceFrameRefCon, OSStatus status,
|
|
85
|
+
VTEncodeInfoFlags infoFlags,
|
|
86
|
+
CMSampleBufferRef sampleBuffer) {
|
|
87
|
+
if (status != noErr) {
|
|
88
|
+
NSLog(@"Error encoding frame: %d", (int)status);
|
|
89
|
+
return;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
Suuqencode *encoder = (__bridge Suuqencode *)outputCallbackRefCon;
|
|
97
|
+
|
|
98
|
+
bool isKeyFrame = !CFDictionaryContainsKey(
|
|
99
|
+
(CFDictionaryRef)CFArrayGetValueAtIndex(
|
|
100
|
+
CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0),
|
|
101
|
+
(const void *)kCMSampleAttachmentKey_NotSync);
|
|
102
|
+
|
|
103
|
+
if (isKeyFrame) {
|
|
104
|
+
CMFormatDescriptionRef format =
|
|
105
|
+
CMSampleBufferGetFormatDescription(sampleBuffer);
|
|
106
|
+
const uint8_t *sparameterSet;
|
|
107
|
+
size_t sparameterSetSize, sparameterSetCount;
|
|
108
|
+
CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
|
|
109
|
+
format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0);
|
|
110
|
+
|
|
111
|
+
const uint8_t *pparameterSet;
|
|
112
|
+
size_t pparameterSetSize, pparameterSetCount;
|
|
113
|
+
CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
|
|
114
|
+
format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0);
|
|
115
|
+
|
|
116
|
+
NSData *sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];
|
|
117
|
+
NSData *pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];
|
|
118
|
+
|
|
119
|
+
[encoder sendEncodedData:sps];
|
|
120
|
+
[encoder sendEncodedData:pps];
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
|
|
124
|
+
size_t length, totalLength;
|
|
125
|
+
char *dataPointer;
|
|
126
|
+
CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength,
|
|
127
|
+
&dataPointer);
|
|
128
|
+
|
|
129
|
+
NSData *naluData = [NSData dataWithBytes:dataPointer length:length];
|
|
130
|
+
[encoder sendEncodedData:naluData];
|
|
109
131
|
}
|
|
110
132
|
|
|
111
133
|
- (void)sendEncodedData:(NSData *)data {
|
|
112
|
-
|
|
113
|
-
|
|
134
|
+
NSString *base64Encoded = [data base64EncodedStringWithOptions:0];
|
|
135
|
+
[self sendEventWithName:@"onEncodedData" body:base64Encoded];
|
|
114
136
|
}
|
|
115
137
|
|
|
116
|
-
- (NSArray<NSString *> *)supportedEvents
|
|
117
|
-
|
|
118
|
-
return @[@"onEncodedData"];
|
|
138
|
+
- (NSArray<NSString *> *)supportedEvents {
|
|
139
|
+
return @[ @"onEncodedData" ];
|
|
119
140
|
}
|
|
120
141
|
|
|
121
|
-
+ (BOOL)requiresMainQueueSetup
|
|
122
|
-
|
|
123
|
-
return NO;
|
|
142
|
+
+ (BOOL)requiresMainQueueSetup {
|
|
143
|
+
return NO;
|
|
124
144
|
}
|
|
125
145
|
|
|
126
146
|
@end
|
package/package.json
CHANGED