@byteplus/react-native-live-pull 1.1.3-rc.3 → 1.2.0-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/build.gradle +2 -2
- package/android/src/main/java/com/volcengine/velive/rn/pull/ClassHelper.java +15 -0
- package/android/src/main/java/com/volcengine/velive/rn/pull/NativeVariableManager.java +1 -1
- package/android/src/main/java/com/volcengine/velive/rn/pull/VolcLiveModule.java +2 -0
- package/android/src/main/java/com/volcengine/velive/rn/pull/autogen/MethodSignature.java +66 -77
- package/android/src/main/java/com/volcengine/velive/rn/pull/pictureInpicture/FloatingWindowService.java +79 -15
- package/ios/VeLivePlayerMultiObserver.h +3 -2
- package/ios/VeLivePlayerMultiObserver.m +2 -2
- package/ios/include/react-native-velive-pull.modulemap +4 -0
- package/ios/pictureInpicture/PictureInPictureManager.h +21 -1
- package/ios/pictureInpicture/PictureInPictureManager.m +254 -141
- package/lib/commonjs/index.js +4455 -1648
- package/lib/module/index.js +4455 -1648
- package/lib/typescript/codegen/android/api.d.ts +51 -43
- package/lib/typescript/codegen/android/callback.d.ts +166 -0
- package/lib/typescript/codegen/android/errorcode.d.ts +123 -2
- package/lib/typescript/codegen/android/keytype.d.ts +501 -13
- package/lib/typescript/codegen/ios/api.d.ts +232 -16
- package/lib/typescript/codegen/ios/callback.d.ts +137 -1
- package/lib/typescript/codegen/ios/errorcode.d.ts +104 -0
- package/lib/typescript/codegen/ios/keytype.d.ts +460 -6
- package/lib/typescript/codegen/pack/api.d.ts +66 -57
- package/lib/typescript/codegen/pack/callback.d.ts +45 -21
- package/lib/typescript/codegen/pack/errorcode.d.ts +45 -15
- package/lib/typescript/codegen/pack/keytype.d.ts +388 -114
- package/lib/typescript/core/api.d.ts +13 -0
- package/lib/typescript/core/keytype.d.ts +18 -2
- package/package.json +1 -1
- package/react-native-velive-pull.podspec +12 -3
- package/ios/pictureInpicture/VeLivePictureInPictureController.h +0 -207
- package/ios/pictureInpicture/VeLivePictureInPictureController.m +0 -3393
|
@@ -1,3393 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
//
|
|
3
|
-
// VeLivePictureInPictureController.m
|
|
4
|
-
// VELLiveDemo
|
|
5
|
-
//
|
|
6
|
-
// Created by Bytedance on 2023-06-09
|
|
7
|
-
//
|
|
8
|
-
|
|
9
|
-
#import "VeLivePictureInPictureController.h"
|
|
10
|
-
#import <VideoToolbox/VideoToolbox.h>
|
|
11
|
-
#import <objc/runtime.h>
|
|
12
|
-
#define LOG_TAG @"VeLivePictureInPictureController"
|
|
13
|
-
|
|
14
|
-
FOUNDATION_EXTERN_INLINE void vel_sync_in_main_queue(dispatch_block_t block) {
|
|
15
|
-
if (NSThread.isMainThread) {
|
|
16
|
-
block();
|
|
17
|
-
} else {
|
|
18
|
-
dispatch_sync(dispatch_get_main_queue(), block);
|
|
19
|
-
}
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
@interface VELPipPixelBufferTool : NSObject
|
|
23
|
-
@property(nonatomic, assign) CVPixelBufferPoolRef pixelBufferPool;
|
|
24
|
-
@property(nonatomic, strong) NSRecursiveLock *lock;
|
|
25
|
-
@property(nonatomic, strong)
|
|
26
|
-
NSMutableDictionary<NSString *, NSValue *> *pixelBufferPoolDict;
|
|
27
|
-
- (CVPixelBufferRef)createDarkFrameWithFrameSize:(CGSize)frameSize;
|
|
28
|
-
- (CVPixelBufferRef)createPixelBuffer:(OSType)type
|
|
29
|
-
heigth:(int)height
|
|
30
|
-
width:(int)width;
|
|
31
|
-
- (CMSampleBufferRef)sampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
|
32
|
-
fps:(CMTime)fps
|
|
33
|
-
pts:(CMTime)pts;
|
|
34
|
-
@end
|
|
35
|
-
|
|
36
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
37
|
-
// MARK: - interface VELPipAVPlayer
|
|
38
|
-
@interface VELPipAVPlayer : AVPlayer
|
|
39
|
-
- (void)velPause;
|
|
40
|
-
@end
|
|
41
|
-
|
|
42
|
-
// MARK: - interface UIView (VELPipAutoSupport)
|
|
43
|
-
@interface UIView (VELPipAutoSupport)
|
|
44
|
-
@property(nonatomic, assign) BOOL vel_autoHideSamplePlayerView;
|
|
45
|
-
@property(nonatomic, assign) BOOL vel_autoHideControlsView;
|
|
46
|
-
@property(nonatomic, weak) UIViewController *vel_viewController;
|
|
47
|
-
@end
|
|
48
|
-
|
|
49
|
-
// MARK: - interface VELPipAVPlayerView
|
|
50
|
-
@interface VELPipAVPlayerView : UIView
|
|
51
|
-
@property(nonatomic, weak, readonly) AVPlayerLayer *playerLayer;
|
|
52
|
-
@end
|
|
53
|
-
|
|
54
|
-
// MARK: interface VELPipLoopPlayer
|
|
55
|
-
@interface VELPipLoopPlayer : NSObject
|
|
56
|
-
@property(nonatomic, strong, class, readonly, nullable) VELPipAVPlayer *player;
|
|
57
|
-
@property(nonatomic, assign, class) CGSize videoSize;
|
|
58
|
-
@property(nonatomic, assign, class, readonly) AVPlayerStatus status;
|
|
59
|
-
@property(nonatomic, copy, class) void (^statusBlock)
|
|
60
|
-
(AVPlayerStatus status, NSError *_Nullable error);
|
|
61
|
-
+ (void)initLoopPlayer;
|
|
62
|
-
+ (void)playWithNotifyStatus:(BOOL)notify;
|
|
63
|
-
+ (void)pause;
|
|
64
|
-
+ (void)stop;
|
|
65
|
-
@end
|
|
66
|
-
#endif // end of VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
67
|
-
|
|
68
|
-
// MARK: - UIViewController (VELPipTopViewController)
|
|
69
|
-
@interface UIViewController (VELPipTopViewController)
|
|
70
|
-
+ (UIViewController *)vel_pipTopViewController;
|
|
71
|
-
@end
|
|
72
|
-
|
|
73
|
-
// MARK: - interface VELPipSampleBufferView
|
|
74
|
-
@interface VELPipSampleBufferView : UIView
|
|
75
|
-
@property(nonatomic, strong, readonly) AVSampleBufferDisplayLayer *sampleLayer;
|
|
76
|
-
@property(nonatomic, copy) AVLayerVideoGravity videoGravity;
|
|
77
|
-
- (void)rebuildSamplelayer;
|
|
78
|
-
@end
|
|
79
|
-
static BOOL vel_pip_is_started = NO;
|
|
80
|
-
// MARK: interface VeLivePictureInPictureController
|
|
81
|
-
@interface VeLivePictureInPictureController ()
|
|
82
|
-
@property(nonatomic, strong) UIViewController *storeContentController;
|
|
83
|
-
@property(nonatomic, strong) UIView *contentOriginSuperView;
|
|
84
|
-
@property(nonatomic, assign) CGRect contentOriginFrame;
|
|
85
|
-
@property(atomic, assign, readwrite) VeLivePictureInPictureState state;
|
|
86
|
-
@property(nonatomic, strong, readwrite)
|
|
87
|
-
AVPictureInPictureController *pipController;
|
|
88
|
-
@property(nonatomic, copy) VELPipPrepareCompletionBlock prepareCompletionBlock;
|
|
89
|
-
@property(atomic, assign, readwrite) VeLivePictureInPictureType type;
|
|
90
|
-
@property(nonatomic, strong, readwrite) UIView *pipHolderView;
|
|
91
|
-
@property(nonatomic, strong) UIViewController *pipWrapperViewController;
|
|
92
|
-
@property(nonatomic, strong) UIView *pipWrapperView;
|
|
93
|
-
@property(nonatomic, assign) VeLivePictureInPictureType originPipType;
|
|
94
|
-
@property(nonatomic, assign) BOOL needStartWhenPrepared;
|
|
95
|
-
@property(nonatomic, assign) BOOL isRestoreUserInterface;
|
|
96
|
-
@property(nonatomic, assign) BOOL isCalledRestoreUserInterface;
|
|
97
|
-
@property(nonatomic, strong) UIImageView *pipHolderImageView;
|
|
98
|
-
@end
|
|
99
|
-
static VeLivePictureInPictureController *vel_instance_pip = nil;
|
|
100
|
-
|
|
101
|
-
// MARK: interface VeLivePictureInPictureController (VELPipNotify)
|
|
102
|
-
@interface VeLivePictureInPictureController (VELPipNotify)
|
|
103
|
-
- (void)notifyDeleteIsReady;
|
|
104
|
-
- (void)notifyDeleteWillStart;
|
|
105
|
-
- (void)notifyDeleteDidStart;
|
|
106
|
-
- (void)notifyDeleteWillStop;
|
|
107
|
-
- (void)notifyDeleteDidStop;
|
|
108
|
-
- (void)notifyDeleteRestoreUserInterfaceWithCompletion:
|
|
109
|
-
(void (^)(BOOL restored))completionHandler;
|
|
110
|
-
- (void)notifyDeleteWithError:(NSError *)error;
|
|
111
|
-
@end
|
|
112
|
-
|
|
113
|
-
// MARK: interface VeLivePictureInPictureController (VELPipContentSource)
|
|
114
|
-
@class VELSampleBufferPlaybackDelegate;
|
|
115
|
-
@class VELPipContentSourceProvider;
|
|
116
|
-
@interface VeLivePictureInPictureController (VELPipContentSource)
|
|
117
|
-
@property(nonatomic, strong)
|
|
118
|
-
VELSampleBufferPlaybackDelegate *playbackDelegate API_AVAILABLE(
|
|
119
|
-
ios(15.0), tvos(15.0), macos(12.0)) API_UNAVAILABLE(watchos);
|
|
120
|
-
@property(nonatomic, strong) VELPipContentSourceProvider *sourceProvider;
|
|
121
|
-
- (void)initContentSourcePictureInPicture;
|
|
122
|
-
- (void)releaseContentSourcePictureInPicture;
|
|
123
|
-
- (void)vel_setContentSourceVideoSize:(CGSize)videoSize;
|
|
124
|
-
- (void)setSampleLayerVideoGravity:(AVLayerVideoGravity)videoGravity;
|
|
125
|
-
- (void)vel_enqueuePixelBuffer:(CVPixelBufferRef)pixelBuffer
|
|
126
|
-
videoRect:(CGRect)videoRect;
|
|
127
|
-
- (void)vel_enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|
128
|
-
videoRect:(CGRect)videoRect;
|
|
129
|
-
@end
|
|
130
|
-
|
|
131
|
-
// MARK: interface VeLivePictureInPictureController
|
|
132
|
-
// (VELPipContentViewPlayerLayer)
|
|
133
|
-
@interface VeLivePictureInPictureController (VELPipContentViewPlayerLayer)
|
|
134
|
-
@property(nonatomic, weak) AVPlayerLayer *contentViewPlayerLayer;
|
|
135
|
-
@property(nonatomic, weak) UIView *contentViewPlayerLayerView;
|
|
136
|
-
- (void)setContentViewPlayerLayerVideoGravity:(AVLayerVideoGravity)videoGravity;
|
|
137
|
-
- (void)initContentViewPlayerLayerPictureInPicture;
|
|
138
|
-
- (void)releaseContentViewPlayerLayerPictureInPicture;
|
|
139
|
-
@end
|
|
140
|
-
|
|
141
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
142
|
-
// MARK: interface VeLivePictureInPictureController
|
|
143
|
-
// (VELPipContentSourceAVPlayer)
|
|
144
|
-
@interface VeLivePictureInPictureController (VELPipContentSourceAVPlayer)
|
|
145
|
-
@property(nonatomic, strong) VELPipAVPlayerView *contentSourcePlayerView;
|
|
146
|
-
- (void)setContentSourceAVPlayerLayerVideoGravity:
|
|
147
|
-
(AVLayerVideoGravity)videoGravity;
|
|
148
|
-
- (void)initContentSourceAVPlayerPictureInPicture;
|
|
149
|
-
- (void)releaseContentSourceAVPlayerPictureInPicture;
|
|
150
|
-
@end
|
|
151
|
-
|
|
152
|
-
// MARK: interface VeLivePictureInPictureController (VELPipAVPlayer)
|
|
153
|
-
@interface VeLivePictureInPictureController (VELPipAVPlayer)
|
|
154
|
-
@property(nonatomic, strong) VELPipAVPlayerView *playerView;
|
|
155
|
-
- (void)setAVPlayerLayerVideoGravity:(AVLayerVideoGravity)videoGravity;
|
|
156
|
-
- (void)initAVPlayerPictureInPicture;
|
|
157
|
-
- (void)releaseAVPlayerPictureInPicture;
|
|
158
|
-
@end
|
|
159
|
-
|
|
160
|
-
// MARK: interface VeLivePictureInPictureController
|
|
161
|
-
// (VELPipAVPlayerViewController)
|
|
162
|
-
@protocol VELAVPictureInPictureProtocol;
|
|
163
|
-
@interface VeLivePictureInPictureController (VELPipAVPlayerViewController)
|
|
164
|
-
@property(nonatomic, strong)
|
|
165
|
-
AVPlayerViewController<VELAVPictureInPictureProtocol> *playerViewController;
|
|
166
|
-
- (void)initAVPlayerViewControllerPictureInPicture;
|
|
167
|
-
- (void)releaseAVPlayerViewControllerPictureInPicture;
|
|
168
|
-
- (void)velPlayerVCStartPictureInPicture;
|
|
169
|
-
- (void)velPlayerVCStopPictureInPicture;
|
|
170
|
-
- (BOOL)velPlayerVCCanStartPictureInPicture;
|
|
171
|
-
- (void)setAVPlayerVCVideoGravity:(AVLayerVideoGravity)videoGravity;
|
|
172
|
-
- (void)velPlayerVCSetCanStartPictureInPictureAutomaticallyFromInline:
|
|
173
|
-
(BOOL)canStart;
|
|
174
|
-
@end
|
|
175
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
176
|
-
|
|
177
|
-
// MARK: interface VeLivePictureInPictureController
|
|
178
|
-
// (VeLivePictureInPictureDelegate)
|
|
179
|
-
@interface VeLivePictureInPictureController (VeLivePictureInPictureDelegate) <
|
|
180
|
-
AVPictureInPictureControllerDelegate, AVPlayerViewControllerDelegate>
|
|
181
|
-
@end
|
|
182
|
-
|
|
183
|
-
// MARK: implementation VeLivePictureInPictureController
|
|
184
|
-
@implementation VeLivePictureInPictureController
|
|
185
|
-
- (instancetype)init {
|
|
186
|
-
return [self initWithType:(VeLivePictureInPictureTypeAuto)];
|
|
187
|
-
}
|
|
188
|
-
|
|
189
|
-
- (instancetype)initWithType:(VeLivePictureInPictureType)type {
|
|
190
|
-
if (self = [super init]) {
|
|
191
|
-
[self _InitWithType:type contentView:nil];
|
|
192
|
-
}
|
|
193
|
-
return self;
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
-
- (instancetype)initWithContentView:(UIView *)contentView {
|
|
197
|
-
if (self = [super init]) {
|
|
198
|
-
[self _InitWithType:VeLivePictureInPictureTypeAuto contentView:contentView];
|
|
199
|
-
self.contentView = contentView;
|
|
200
|
-
}
|
|
201
|
-
return self;
|
|
202
|
-
}
|
|
203
|
-
|
|
204
|
-
- (instancetype)initWithType:(VeLivePictureInPictureType)type
|
|
205
|
-
contentView:(nullable UIView *)contentView {
|
|
206
|
-
if (self = [super init]) {
|
|
207
|
-
[self _InitWithType:(type) contentView:contentView];
|
|
208
|
-
}
|
|
209
|
-
return self;
|
|
210
|
-
}
|
|
211
|
-
|
|
212
|
-
- (void)_InitWithType:(VeLivePictureInPictureType)type
|
|
213
|
-
contentView:(UIView *)contentView {
|
|
214
|
-
self.fps = 15;
|
|
215
|
-
self.videoSourceCount = 1;
|
|
216
|
-
self.type = type;
|
|
217
|
-
self.contentView = contentView;
|
|
218
|
-
self.originPipType = type;
|
|
219
|
-
self.videoGravity = AVLayerVideoGravityResize;
|
|
220
|
-
self.autoHideContentController = NO;
|
|
221
|
-
if (@available(iOS 14.2, *)) {
|
|
222
|
-
self.canStartPictureInPictureAutomaticallyFromInline = YES;
|
|
223
|
-
}
|
|
224
|
-
}
|
|
225
|
-
|
|
226
|
-
- (void)dealloc {
|
|
227
|
-
VeLivePipLog(LOG_TAG, @"pip dealloc %@", @(self.type));
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
- (void)setPlaybackAudioSessionWithError:(NSError **)error {
|
|
231
|
-
/// NOTICE: 不能设置 options 为非 0 的参数,否则不能后台渲染
|
|
232
|
-
if ([AVAudioSession sharedInstance].category ==
|
|
233
|
-
AVAudioSessionCategoryPlayback &&
|
|
234
|
-
[AVAudioSession sharedInstance].category ==
|
|
235
|
-
AVAudioSessionCategoryPlayAndRecord) {
|
|
236
|
-
return;
|
|
237
|
-
}
|
|
238
|
-
if (@available(iOS 10.0, *)) {
|
|
239
|
-
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback
|
|
240
|
-
mode:AVAudioSessionModeMoviePlayback
|
|
241
|
-
options:(0)error:nil];
|
|
242
|
-
} else if (@available(iOS 11.0, *)) {
|
|
243
|
-
[[AVAudioSession sharedInstance]
|
|
244
|
-
setCategory:AVAudioSessionCategoryPlayback
|
|
245
|
-
mode:AVAudioSessionModeMoviePlayback
|
|
246
|
-
routeSharingPolicy:(AVAudioSessionRouteSharingPolicyDefault)options
|
|
247
|
-
:(0)error:nil];
|
|
248
|
-
} else {
|
|
249
|
-
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback
|
|
250
|
-
withOptions:(0)error:nil];
|
|
251
|
-
}
|
|
252
|
-
[[AVAudioSession sharedInstance] setActive:YES error:error];
|
|
253
|
-
}
|
|
254
|
-
|
|
255
|
-
// {zh} 准备 {en} prepare
|
|
256
|
-
- (void)prepareWithCompletion:(VELPipPrepareCompletionBlock)completion {
|
|
257
|
-
if (self.state != VeLivePictureInPictureStateIde &&
|
|
258
|
-
self.state != VeLivePictureInPictureStateError) {
|
|
259
|
-
if (completion) {
|
|
260
|
-
completion(self, nil);
|
|
261
|
-
}
|
|
262
|
-
VeLivePipLog(LOG_TAG, @"pip already prepared");
|
|
263
|
-
return;
|
|
264
|
-
}
|
|
265
|
-
// NSAssert(self.delegate != nil, @"delegate cannot be nil");
|
|
266
|
-
self.state = VeLivePictureInPictureStatePreparing;
|
|
267
|
-
self.isRestoreUserInterface = NO;
|
|
268
|
-
self.isCalledRestoreUserInterface = NO;
|
|
269
|
-
self.prepareCompletionBlock = completion;
|
|
270
|
-
{
|
|
271
|
-
// {zh} 这部分代码,可根据自身业务需求进行配置 {en} This part of the code
|
|
272
|
-
// can be configured according to its own business needs {zh}
|
|
273
|
-
// 例如是否需要和别的 App 同时播放音频 {en} For example, do you need to
|
|
274
|
-
// play audio with other apps at the same time?
|
|
275
|
-
NSError *error = nil;
|
|
276
|
-
[self setPlaybackAudioSessionWithError:&error];
|
|
277
|
-
if (error) {
|
|
278
|
-
VeLivePipLog(LOG_TAG, @"set audio session playback error %@", error);
|
|
279
|
-
[self notifyDeleteWithError:error];
|
|
280
|
-
return;
|
|
281
|
-
}
|
|
282
|
-
}
|
|
283
|
-
[self setupPictureInPictureController];
|
|
284
|
-
}
|
|
285
|
-
|
|
286
|
-
// {zh} 开启画中画 {en} Open picture-in-picture
|
|
287
|
-
- (void)startPictureInPicture {
|
|
288
|
-
self.needStartWhenPrepared = YES;
|
|
289
|
-
[self _startPictureInPicture];
|
|
290
|
-
}
|
|
291
|
-
|
|
292
|
-
- (void)_startPictureInPicture {
|
|
293
|
-
if (self.state == VeLivePictureInPictureStateRunning) {
|
|
294
|
-
return;
|
|
295
|
-
}
|
|
296
|
-
if (self.state == VeLivePictureInPictureStateError) {
|
|
297
|
-
self.state = VeLivePictureInPictureStateIde;
|
|
298
|
-
[self destroyPictureInPicture];
|
|
299
|
-
[self prepareWithCompletion:nil];
|
|
300
|
-
} else {
|
|
301
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
302
|
-
if (self.type != VeLivePictureInPictureTypeContentViewPlayerLayer) {
|
|
303
|
-
[VELPipLoopPlayer playWithNotifyStatus:NO];
|
|
304
|
-
}
|
|
305
|
-
if (self.type == VeLivePictureInPictureTypeAVPlayerViewController) {
|
|
306
|
-
[self velPlayerVCStartPictureInPicture];
|
|
307
|
-
} else {
|
|
308
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
309
|
-
[self pipControllerStartPictureInPicture];
|
|
310
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
311
|
-
}
|
|
312
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
313
|
-
}
|
|
314
|
-
}
|
|
315
|
-
|
|
316
|
-
- (void)pipControllerStartPictureInPicture {
|
|
317
|
-
if (!self.canStartPictureInPicture) {
|
|
318
|
-
VeLivePipLog(LOG_TAG,
|
|
319
|
-
@"can not start pip state:%@ pipIsPossible:%@, check app has "
|
|
320
|
-
@"audio playing?",
|
|
321
|
-
@(self.state),
|
|
322
|
-
@(self.pipController.isPictureInPicturePossible));
|
|
323
|
-
[self
|
|
324
|
-
notifyDeleteWithError:
|
|
325
|
-
VeLivePipError(VeLivePictureInPictureErrorCodeNotPossible,
|
|
326
|
-
@"can not start pip, check app has audio playing?")];
|
|
327
|
-
return;
|
|
328
|
-
}
|
|
329
|
-
VeLivePipLog(LOG_TAG, @"call pip controller to start pip");
|
|
330
|
-
[self.pipController startPictureInPicture];
|
|
331
|
-
self.state = VeLivePictureInPictureStateRunning;
|
|
332
|
-
}
|
|
333
|
-
|
|
334
|
-
// {zh} 停止画中画 {en} Stop picture-in-picture
|
|
335
|
-
- (void)stopPictureInPicture {
|
|
336
|
-
self.needStartWhenPrepared = NO;
|
|
337
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
338
|
-
if (self.type == VeLivePictureInPictureTypeAVPlayerViewController) {
|
|
339
|
-
[self velPlayerVCStopPictureInPicture];
|
|
340
|
-
} else {
|
|
341
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
342
|
-
[self _stopPictureInPicture];
|
|
343
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
344
|
-
}
|
|
345
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
346
|
-
}
|
|
347
|
-
|
|
348
|
-
- (void)destroyPictureInPicture {
|
|
349
|
-
VeLivePipLog(LOG_TAG, @"destroy pip with type:%@", @(self.type));
|
|
350
|
-
switch (self.type) {
|
|
351
|
-
case VeLivePictureInPictureTypeContentSource:
|
|
352
|
-
[self releaseContentSourcePictureInPicture];
|
|
353
|
-
break;
|
|
354
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
355
|
-
case VeLivePictureInPictureTypeContentSourceAVPlayer:
|
|
356
|
-
[self releaseContentSourceAVPlayerPictureInPicture];
|
|
357
|
-
break;
|
|
358
|
-
case VeLivePictureInPictureTypeAVPlayer:
|
|
359
|
-
[self releaseAVPlayerPictureInPicture];
|
|
360
|
-
break;
|
|
361
|
-
case VeLivePictureInPictureTypeAVPlayerViewController:
|
|
362
|
-
[self releaseAVPlayerViewControllerPictureInPicture];
|
|
363
|
-
break;
|
|
364
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
365
|
-
case VeLivePictureInPictureTypeContentViewPlayerLayer:
|
|
366
|
-
[self releaseContentViewPlayerLayerPictureInPicture];
|
|
367
|
-
break;
|
|
368
|
-
default:
|
|
369
|
-
break;
|
|
370
|
-
}
|
|
371
|
-
|
|
372
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
373
|
-
VeLivePipLog(LOG_TAG, @"destroy pip and player");
|
|
374
|
-
[VELPipLoopPlayer stop];
|
|
375
|
-
#endif
|
|
376
|
-
}
|
|
377
|
-
|
|
378
|
-
- (void)_stopPictureInPicture {
|
|
379
|
-
if (self.state != VeLivePictureInPictureStateRunning) {
|
|
380
|
-
VeLivePipLog(LOG_TAG, @"pip not running");
|
|
381
|
-
return;
|
|
382
|
-
}
|
|
383
|
-
if (self.pipController.isPictureInPictureActive) {
|
|
384
|
-
[self.pipController stopPictureInPicture];
|
|
385
|
-
self.state = VeLivePictureInPictureStateStopping;
|
|
386
|
-
VeLivePipLog(LOG_TAG, @"call pip controller to stop pip");
|
|
387
|
-
} else {
|
|
388
|
-
self.state = VeLivePictureInPictureStateIde;
|
|
389
|
-
VeLivePipLog(LOG_TAG, @"pip not active set state for ide");
|
|
390
|
-
}
|
|
391
|
-
if (@available(iOS 14.2, *)) {
|
|
392
|
-
[self setupAutomaticallyFromInLine];
|
|
393
|
-
}
|
|
394
|
-
}
|
|
395
|
-
|
|
396
|
-
+ (void)destroyPictureInPicture {
|
|
397
|
-
[vel_instance_pip destroyPictureInPicture];
|
|
398
|
-
}
|
|
399
|
-
|
|
400
|
-
+ (BOOL)isPictureInPictureSupported {
|
|
401
|
-
return [AVPictureInPictureController isPictureInPictureSupported];
|
|
402
|
-
}
|
|
403
|
-
|
|
404
|
-
+ (BOOL)isPictureInPictureStarted {
|
|
405
|
-
return vel_pip_is_started && vel_instance_pip != nil;
|
|
406
|
-
}
|
|
407
|
-
|
|
408
|
-
// MARK: - Private
|
|
409
|
-
- (void)autoSelectType {
|
|
410
|
-
if (self.type != VeLivePictureInPictureTypeAuto) {
|
|
411
|
-
return;
|
|
412
|
-
}
|
|
413
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
414
|
-
[self checkContentViewHasPlayerLayer];
|
|
415
|
-
if (@available(iOS 15.0, *)) {
|
|
416
|
-
self.type = VeLivePictureInPictureTypeContentSource;
|
|
417
|
-
} else {
|
|
418
|
-
self.type = VeLivePictureInPictureTypeAVPlayer;
|
|
419
|
-
}
|
|
420
|
-
#else
|
|
421
|
-
if (@available(iOS 15.0, *)) {
|
|
422
|
-
self.type = VeLivePictureInPictureTypeContentSource;
|
|
423
|
-
} else {
|
|
424
|
-
[self notifyDeleteWithError:VeLivePipError(
|
|
425
|
-
VeLivePictureInPictureErrorCodeNotPossible,
|
|
426
|
-
@"pip is not possible")];
|
|
427
|
-
}
|
|
428
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
429
|
-
}
|
|
430
|
-
|
|
431
|
-
- (void)setupPictureInPictureController {
|
|
432
|
-
self.state = VeLivePictureInPictureStateIde;
|
|
433
|
-
if (self.contentView == nil) {
|
|
434
|
-
VeLivePipLog(LOG_TAG, @"content view is null");
|
|
435
|
-
[self notifyDeleteWithError:VeLivePipError(
|
|
436
|
-
VeLivePictureInPictureErrorCodeNotPossible,
|
|
437
|
-
@"no content view")];
|
|
438
|
-
return;
|
|
439
|
-
}
|
|
440
|
-
[self setupPictureInPictureWithType];
|
|
441
|
-
}
|
|
442
|
-
|
|
443
|
-
- (void)setupPictureInPictureWithType {
|
|
444
|
-
[self autoSelectType];
|
|
445
|
-
switch (self.type) {
|
|
446
|
-
case VeLivePictureInPictureTypeContentSource:
|
|
447
|
-
[self initContentSourcePictureInPicture];
|
|
448
|
-
break;
|
|
449
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
450
|
-
case VeLivePictureInPictureTypeContentSourceAVPlayer:
|
|
451
|
-
[self initContentSourceAVPlayerPictureInPicture];
|
|
452
|
-
break;
|
|
453
|
-
case VeLivePictureInPictureTypeAVPlayer:
|
|
454
|
-
[self initAVPlayerPictureInPicture];
|
|
455
|
-
break;
|
|
456
|
-
case VeLivePictureInPictureTypeAVPlayerViewController:
|
|
457
|
-
[self initAVPlayerViewControllerPictureInPicture];
|
|
458
|
-
break;
|
|
459
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
460
|
-
case VeLivePictureInPictureTypeContentViewPlayerLayer:
|
|
461
|
-
[self initContentViewPlayerLayerPictureInPicture];
|
|
462
|
-
break;
|
|
463
|
-
default:
|
|
464
|
-
[self notifyDeleteWithError:VeLivePipError(
|
|
465
|
-
VeLivePictureInPictureErrorCodeNotPossible,
|
|
466
|
-
@"type unknown")];
|
|
467
|
-
break;
|
|
468
|
-
}
|
|
469
|
-
[self setupVideoGravity];
|
|
470
|
-
}
|
|
471
|
-
|
|
472
|
-
- (void)setupPipHolderView {
|
|
473
|
-
if (self.type == VeLivePictureInPictureTypeContentViewPlayerLayer) {
|
|
474
|
-
return;
|
|
475
|
-
}
|
|
476
|
-
if (self.contentView == nil) {
|
|
477
|
-
VeLivePipLog(LOG_TAG, @"content view is null");
|
|
478
|
-
[self notifyDeleteWithError:VeLivePipError(
|
|
479
|
-
VeLivePictureInPictureErrorCodeNotPossible,
|
|
480
|
-
@"no content view")];
|
|
481
|
-
return;
|
|
482
|
-
}
|
|
483
|
-
if (self.pipHolderView == nil || self.contentOriginSuperView == nil) {
|
|
484
|
-
return;
|
|
485
|
-
}
|
|
486
|
-
[self setupVideoSizeHolders];
|
|
487
|
-
self.pipHolderView.alpha = 1;
|
|
488
|
-
self.pipHolderView.frame = self.videoFrame;
|
|
489
|
-
if (self.pipHolderView.superview != self.contentOriginSuperView) {
|
|
490
|
-
[self.contentOriginSuperView insertSubview:self.pipHolderView
|
|
491
|
-
belowSubview:self.contentView];
|
|
492
|
-
}
|
|
493
|
-
}
|
|
494
|
-
|
|
495
|
-
- (void)setupVideoSizeHolders {
|
|
496
|
-
if (self.type == VeLivePictureInPictureTypeContentViewPlayerLayer) {
|
|
497
|
-
return;
|
|
498
|
-
}
|
|
499
|
-
if (CGRectEqualToRect(_videoFrame, CGRectZero)) {
|
|
500
|
-
return;
|
|
501
|
-
}
|
|
502
|
-
[self vel_setContentSourceVideoSize:self.videoSize];
|
|
503
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
504
|
-
[VELPipLoopPlayer setVideoSize:self.videoSize];
|
|
505
|
-
#endif
|
|
506
|
-
}
|
|
507
|
-
|
|
508
|
-
- (void)storeContentViewOriginIfNeed {
|
|
509
|
-
if (self.contentView.superview == self.contentOriginSuperView) {
|
|
510
|
-
self.contentOriginFrame = self.contentView.frame;
|
|
511
|
-
[self setupPipHolderView];
|
|
512
|
-
}
|
|
513
|
-
|
|
514
|
-
if (CGRectEqualToRect(CGRectZero, self.videoFrame)) {
|
|
515
|
-
self.videoFrame = self.contentOriginFrame;
|
|
516
|
-
}
|
|
517
|
-
}
|
|
518
|
-
|
|
519
|
-
- (void)setupVideoGravity {
|
|
520
|
-
if (self.state == VeLivePictureInPictureStateIde) {
|
|
521
|
-
return;
|
|
522
|
-
}
|
|
523
|
-
switch (self.type) {
|
|
524
|
-
case VeLivePictureInPictureTypeContentSource:
|
|
525
|
-
[self setSampleLayerVideoGravity:self.videoGravity];
|
|
526
|
-
break;
|
|
527
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
528
|
-
case VeLivePictureInPictureTypeContentSourceAVPlayer:
|
|
529
|
-
[self setContentSourceAVPlayerLayerVideoGravity:self.videoGravity];
|
|
530
|
-
break;
|
|
531
|
-
case VeLivePictureInPictureTypeAVPlayer:
|
|
532
|
-
[self setAVPlayerVCVideoGravity:self.videoGravity];
|
|
533
|
-
break;
|
|
534
|
-
case VeLivePictureInPictureTypeAVPlayerViewController:
|
|
535
|
-
[self setAVPlayerVCVideoGravity:self.videoGravity];
|
|
536
|
-
break;
|
|
537
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
538
|
-
case VeLivePictureInPictureTypeContentViewPlayerLayer:
|
|
539
|
-
[self setContentViewPlayerLayerVideoGravity:self.videoGravity];
|
|
540
|
-
break;
|
|
541
|
-
default:
|
|
542
|
-
[self notifyDeleteWithError:VeLivePipError(
|
|
543
|
-
VeLivePictureInPictureErrorCodeNotPossible,
|
|
544
|
-
@"type unknown")];
|
|
545
|
-
break;
|
|
546
|
-
}
|
|
547
|
-
}
|
|
548
|
-
|
|
549
|
-
- (void)setupAutomaticallyFromInLine API_AVAILABLE(ios(14.2)) {
|
|
550
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
551
|
-
if (self.type == VeLivePictureInPictureTypeAVPlayerViewController) {
|
|
552
|
-
[self velPlayerVCSetCanStartPictureInPictureAutomaticallyFromInline:
|
|
553
|
-
self.canStartPictureInPictureAutomaticallyFromInline];
|
|
554
|
-
} else {
|
|
555
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
556
|
-
self.pipController.canStartPictureInPictureAutomaticallyFromInline =
|
|
557
|
-
self.canStartPictureInPictureAutomaticallyFromInline;
|
|
558
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
559
|
-
}
|
|
560
|
-
#endif
|
|
561
|
-
}
|
|
562
|
-
|
|
563
|
-
- (void)checkContentViewHasPlayerLayer {
|
|
564
|
-
UIView *layerView = nil;
|
|
565
|
-
self.contentViewPlayerLayer = [self findPlayerLayerInView:self.contentView
|
|
566
|
-
layerView:&layerView];
|
|
567
|
-
self.contentViewPlayerLayerView = layerView;
|
|
568
|
-
if (self.contentViewPlayerLayer != nil) {
|
|
569
|
-
BOOL needStart = NO;
|
|
570
|
-
if (self.state == VeLivePictureInPictureStateRunning ||
|
|
571
|
-
self.state == VeLivePictureInPictureStatePreparing) {
|
|
572
|
-
[self stopPictureInPicture];
|
|
573
|
-
[self destroyPictureInPicture];
|
|
574
|
-
needStart = YES;
|
|
575
|
-
}
|
|
576
|
-
self.type = VeLivePictureInPictureTypeContentViewPlayerLayer;
|
|
577
|
-
if (needStart) {
|
|
578
|
-
[self startPictureInPicture];
|
|
579
|
-
}
|
|
580
|
-
}
|
|
581
|
-
}
|
|
582
|
-
- (AVPlayerLayer *)findPlayerLayerInView:(UIView *)view
|
|
583
|
-
layerView:
|
|
584
|
-
(UIView *_Nullable *_Nullable)layerView {
|
|
585
|
-
AVPlayerLayer *layer = (AVPlayerLayer *)view.layer;
|
|
586
|
-
if ([layer isKindOfClass:AVPlayerLayer.class]) {
|
|
587
|
-
if (layerView != NULL) {
|
|
588
|
-
*layerView = view;
|
|
589
|
-
}
|
|
590
|
-
return (AVPlayerLayer *)layer;
|
|
591
|
-
}
|
|
592
|
-
|
|
593
|
-
for (CALayer *l in layer.sublayers) {
|
|
594
|
-
AVPlayerLayer *playerLayer = [self findPlayerLayerInLayer:l];
|
|
595
|
-
if (playerLayer != nil) {
|
|
596
|
-
if (layerView != NULL) {
|
|
597
|
-
*layerView = view;
|
|
598
|
-
}
|
|
599
|
-
return playerLayer;
|
|
600
|
-
}
|
|
601
|
-
}
|
|
602
|
-
|
|
603
|
-
for (UIView *v in view.subviews) {
|
|
604
|
-
AVPlayerLayer *layer = [self findPlayerLayerInView:v layerView:layerView];
|
|
605
|
-
if (layer != nil) {
|
|
606
|
-
return layer;
|
|
607
|
-
}
|
|
608
|
-
}
|
|
609
|
-
return nil;
|
|
610
|
-
}
|
|
611
|
-
|
|
612
|
-
- (AVPlayerLayer *)findPlayerLayerInLayer:(CALayer *)layer {
|
|
613
|
-
if ([layer isKindOfClass:AVPlayerLayer.class]) {
|
|
614
|
-
return (AVPlayerLayer *)layer;
|
|
615
|
-
}
|
|
616
|
-
for (CALayer *l in layer.sublayers) {
|
|
617
|
-
AVPlayerLayer *layer = [self findPlayerLayerInLayer:l];
|
|
618
|
-
if (layer != nil) {
|
|
619
|
-
return layer;
|
|
620
|
-
}
|
|
621
|
-
}
|
|
622
|
-
return nil;
|
|
623
|
-
}
|
|
624
|
-
|
|
625
|
-
// MARK: - Setter
|
|
626
|
-
- (void)setContentView:(UIView *)contentView {
|
|
627
|
-
if (_contentView != contentView ||
|
|
628
|
-
contentView.superview != _contentView.superview) {
|
|
629
|
-
_contentView = contentView;
|
|
630
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
631
|
-
[self checkContentViewHasPlayerLayer];
|
|
632
|
-
#endif
|
|
633
|
-
self.contentOriginSuperView = self.contentView.superview;
|
|
634
|
-
self.contentOriginFrame = self.contentView.frame;
|
|
635
|
-
if (CGRectEqualToRect(CGRectZero, self.contentOriginFrame)) {
|
|
636
|
-
self.contentOriginFrame =
|
|
637
|
-
CGRectMake(CGRectGetMidX(UIScreen.mainScreen.bounds) - 120,
|
|
638
|
-
CGRectGetMinY(UIScreen.mainScreen.bounds) - 90, 240, 180);
|
|
639
|
-
}
|
|
640
|
-
[self setVideoFrame:self.contentOriginFrame];
|
|
641
|
-
[self setupPipHolderView];
|
|
642
|
-
}
|
|
643
|
-
}
|
|
644
|
-
|
|
645
|
-
- (void)setVideoFrame:(CGRect)videoFrame {
|
|
646
|
-
if (!CGRectEqualToRect(_videoFrame, videoFrame)) {
|
|
647
|
-
VeLivePipLog(LOG_TAG, @"set videoFrame %@", NSStringFromCGRect(videoFrame));
|
|
648
|
-
_videoFrame = videoFrame;
|
|
649
|
-
if (CGSizeEqualToSize(_videoSize, CGSizeZero)) {
|
|
650
|
-
_videoSize = videoFrame.size;
|
|
651
|
-
}
|
|
652
|
-
[self setupPipHolderView];
|
|
653
|
-
}
|
|
654
|
-
}
|
|
655
|
-
|
|
656
|
-
- (void)setVideoSize:(CGSize)videoSize {
|
|
657
|
-
if (!CGSizeEqualToSize(_videoSize, videoSize)) {
|
|
658
|
-
VeLivePipLog(LOG_TAG, @"set video size %@", NSStringFromCGSize(videoSize));
|
|
659
|
-
_videoSize = videoSize;
|
|
660
|
-
[self setupVideoSizeHolders];
|
|
661
|
-
}
|
|
662
|
-
}
|
|
663
|
-
|
|
664
|
-
- (void)setVideoGravity:(AVLayerVideoGravity)videoGravity {
|
|
665
|
-
if (![_videoGravity isEqualToString:videoGravity]) {
|
|
666
|
-
_videoGravity = videoGravity.copy;
|
|
667
|
-
[self setupVideoGravity];
|
|
668
|
-
}
|
|
669
|
-
}
|
|
670
|
-
|
|
671
|
-
- (void)setCanStartPictureInPictureAutomaticallyFromInline:
|
|
672
|
-
(BOOL)canStartPictureInPictureAutomaticallyFromInline
|
|
673
|
-
API_AVAILABLE(ios(14.2)) {
|
|
674
|
-
if (_canStartPictureInPictureAutomaticallyFromInline !=
|
|
675
|
-
canStartPictureInPictureAutomaticallyFromInline) {
|
|
676
|
-
_canStartPictureInPictureAutomaticallyFromInline =
|
|
677
|
-
canStartPictureInPictureAutomaticallyFromInline;
|
|
678
|
-
[self setupAutomaticallyFromInLine];
|
|
679
|
-
}
|
|
680
|
-
}
|
|
681
|
-
|
|
682
|
-
- (void)setPipHolderView:(UIView *)pipHolderView {
|
|
683
|
-
if (_pipHolderView != pipHolderView) {
|
|
684
|
-
_pipHolderView = pipHolderView;
|
|
685
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
686
|
-
_pipHolderView.vel_autoHideControlsView = YES;
|
|
687
|
-
#endif
|
|
688
|
-
[self setupPipHolderView];
|
|
689
|
-
}
|
|
690
|
-
}
|
|
691
|
-
|
|
692
|
-
// MARK: - Setter
|
|
693
|
-
- (void)setPipController:(AVPictureInPictureController *)pipController {
|
|
694
|
-
if (_pipController != pipController) {
|
|
695
|
-
_pipController = pipController;
|
|
696
|
-
if (@available(iOS 14.0, *)) {
|
|
697
|
-
pipController.requiresLinearPlayback = YES;
|
|
698
|
-
[pipController setValue:[NSNumber numberWithInt:1]
|
|
699
|
-
forKey:@"controlsStyle"];
|
|
700
|
-
}
|
|
701
|
-
if (@available(iOS 14.2, *)) {
|
|
702
|
-
pipController.canStartPictureInPictureAutomaticallyFromInline =
|
|
703
|
-
self.canStartPictureInPictureAutomaticallyFromInline;
|
|
704
|
-
}
|
|
705
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
706
|
-
@try {
|
|
707
|
-
[pipController setValue:[NSNumber numberWithInt:1]
|
|
708
|
-
forKey:@"controlsStyle"];
|
|
709
|
-
} @catch (NSException *exception) {
|
|
710
|
-
}
|
|
711
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
712
|
-
}
|
|
713
|
-
}
|
|
714
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
715
|
-
// MARK: - Getter
|
|
716
|
-
- (UIViewController *)pipWrapperController {
|
|
717
|
-
@try {
|
|
718
|
-
NSString *selStr =
|
|
719
|
-
[NSString stringWithFormat:@"%@%@%@", @"pictu", @"reInPictureV",
|
|
720
|
-
@"iewController"];
|
|
721
|
-
if ([self.pipController respondsToSelector:NSSelectorFromString(selStr)]) {
|
|
722
|
-
#pragma clang diagnostic push
|
|
723
|
-
#pragma clang diagnostic ignored "-Warc-performSelector-leaks"
|
|
724
|
-
return [self.pipController performSelector:NSSelectorFromString(selStr)];
|
|
725
|
-
#pragma clang diagnostic pop
|
|
726
|
-
}
|
|
727
|
-
} @catch (NSException *exception) {
|
|
728
|
-
}
|
|
729
|
-
|
|
730
|
-
return nil;
|
|
731
|
-
}
|
|
732
|
-
|
|
733
|
-
- (UIView *)pipWrapperView {
|
|
734
|
-
if (!_pipWrapperView) {
|
|
735
|
-
UIView *view = self.pipWrapperController.view;
|
|
736
|
-
view.backgroundColor = UIColor.clearColor;
|
|
737
|
-
view.vel_autoHideSamplePlayerView = YES;
|
|
738
|
-
_pipWrapperView = view;
|
|
739
|
-
}
|
|
740
|
-
return _pipWrapperView;
|
|
741
|
-
}
|
|
742
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
743
|
-
- (BOOL)canStartPictureInPicture {
|
|
744
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
745
|
-
if (self.type == VeLivePictureInPictureTypeAVPlayerViewController) {
|
|
746
|
-
return [self velPlayerVCCanStartPictureInPicture];
|
|
747
|
-
}
|
|
748
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
749
|
-
return [self _canStartPictureInPicture];
|
|
750
|
-
}
|
|
751
|
-
|
|
752
|
-
- (BOOL)isPictureInPictureStarted {
|
|
753
|
-
return VeLivePictureInPictureController.isPictureInPictureStarted;
|
|
754
|
-
}
|
|
755
|
-
|
|
756
|
-
- (BOOL)isPictureInPictureSupported {
|
|
757
|
-
return VeLivePictureInPictureController.isPictureInPictureSupported;
|
|
758
|
-
}
|
|
759
|
-
|
|
760
|
-
- (BOOL)_canStartPictureInPicture {
|
|
761
|
-
return self.state == VeLivePictureInPictureStateReady &&
|
|
762
|
-
self.pipController.isPictureInPicturePossible;
|
|
763
|
-
}
|
|
764
|
-
|
|
765
|
-
- (void)enqueuePixelBuffer:(nonnull CVPixelBufferRef)pixelBuffer {
|
|
766
|
-
[self enqueuePixelBuffer:pixelBuffer
|
|
767
|
-
videoRect:VELPixtureInPictureFullVideoRect];
|
|
768
|
-
}
|
|
769
|
-
|
|
770
|
-
- (void)enqueuePixelBuffer:(nonnull CVPixelBufferRef)pixelBuffer
|
|
771
|
-
videoRect:(CGRect)videoRect {
|
|
772
|
-
[self vel_enqueuePixelBuffer:pixelBuffer videoRect:videoRect];
|
|
773
|
-
}
|
|
774
|
-
|
|
775
|
-
- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer {
|
|
776
|
-
[self enqueueSampleBuffer:sampleBuffer
|
|
777
|
-
videoRect:VELPixtureInPictureFullVideoRect];
|
|
778
|
-
}
|
|
779
|
-
|
|
780
|
-
- (void)enqueueSampleBuffer:(nonnull CMSampleBufferRef)sampleBuffer
|
|
781
|
-
videoRect:(CGRect)videoRect {
|
|
782
|
-
[self vel_enqueueSampleBuffer:sampleBuffer videoRect:videoRect];
|
|
783
|
-
}
|
|
784
|
-
|
|
785
|
-
@end
|
|
786
|
-
|
|
787
|
-
// MARK: - implementation VeLivePictureInPictureController (VELPipNotify)
|
|
788
|
-
@implementation VeLivePictureInPictureController (VELPipNotify)
|
|
789
|
-
|
|
790
|
-
- (void)notifyDeleteIsReady {
|
|
791
|
-
VeLivePipLog(LOG_TAG, @"pip is ready");
|
|
792
|
-
vel_sync_in_main_queue(^{
|
|
793
|
-
self.state = VeLivePictureInPictureStateReady;
|
|
794
|
-
self.isRestoreUserInterface = NO;
|
|
795
|
-
if (self.prepareCompletionBlock) {
|
|
796
|
-
self.prepareCompletionBlock(self, nil);
|
|
797
|
-
}
|
|
798
|
-
if (self.delegate && [self.delegate respondsToSelector:@selector
|
|
799
|
-
(pictureInPictureIsReady:)]) {
|
|
800
|
-
[self.delegate pictureInPictureIsReady:self];
|
|
801
|
-
}
|
|
802
|
-
if (self.needStartWhenPrepared &&
|
|
803
|
-
self.state == VeLivePictureInPictureStateReady) {
|
|
804
|
-
VeLivePipLog(LOG_TAG, @"pip is ready, auto start...");
|
|
805
|
-
[self _startPictureInPicture];
|
|
806
|
-
}
|
|
807
|
-
});
|
|
808
|
-
}
|
|
809
|
-
|
|
810
|
-
- (void)setupPipPlayerContentView {
|
|
811
|
-
self.state = VeLivePictureInPictureStateRunning;
|
|
812
|
-
vel_pip_is_started = YES;
|
|
813
|
-
vel_instance_pip = self;
|
|
814
|
-
[self storeContentViewOriginIfNeed];
|
|
815
|
-
[self hideContentControllerIfNeed];
|
|
816
|
-
[self setupContentViewFullInPipWrapperViewIfNeed];
|
|
817
|
-
[self adjustContentViewHidden:YES];
|
|
818
|
-
}
|
|
819
|
-
|
|
820
|
-
- (void)notifyDeleteWillStart {
|
|
821
|
-
VeLivePipLog(LOG_TAG, @"pip will start");
|
|
822
|
-
vel_sync_in_main_queue(^{
|
|
823
|
-
[self setupPipPlayerContentView];
|
|
824
|
-
if (self.delegate && [self.delegate respondsToSelector:@selector
|
|
825
|
-
(pictureInPictureWillStart:)]) {
|
|
826
|
-
[self.delegate pictureInPictureWillStart:self];
|
|
827
|
-
}
|
|
828
|
-
});
|
|
829
|
-
}
|
|
830
|
-
|
|
831
|
-
- (void)notifyDeleteDidStart {
|
|
832
|
-
VeLivePipLog(LOG_TAG, @"pip did start");
|
|
833
|
-
vel_sync_in_main_queue(^{
|
|
834
|
-
[self setupPipPlayerContentView];
|
|
835
|
-
self.pipHolderView.alpha = 1;
|
|
836
|
-
if (self.delegate && [self.delegate respondsToSelector:@selector
|
|
837
|
-
(pictureInPictureDidStart:)]) {
|
|
838
|
-
[self.delegate pictureInPictureDidStart:self];
|
|
839
|
-
}
|
|
840
|
-
});
|
|
841
|
-
}
|
|
842
|
-
|
|
843
|
-
- (void)notifyDeleteWillStop {
|
|
844
|
-
VeLivePipLog(LOG_TAG, @"pip will stop");
|
|
845
|
-
vel_sync_in_main_queue(^{
|
|
846
|
-
self.pipHolderView.alpha = 0.01;
|
|
847
|
-
self.needStartWhenPrepared = NO;
|
|
848
|
-
[self generatedPlacdholderImageViewFor:self.contentView];
|
|
849
|
-
if (!self.isRestoreUserInterface || self.autoHideContentController) {
|
|
850
|
-
[self
|
|
851
|
-
notifyDeleteRestoreUserInterfaceWithCompletion:nil
|
|
852
|
-
isUserStop:
|
|
853
|
-
!self.isRestoreUserInterface];
|
|
854
|
-
} else if (!self.isCalledRestoreUserInterface) {
|
|
855
|
-
VeLivePipLog(
|
|
856
|
-
LOG_TAG,
|
|
857
|
-
@"pip will stop, but not call restore user interface, call once");
|
|
858
|
-
[self
|
|
859
|
-
notifyDeleteRestoreUserInterfaceWithCompletion:nil
|
|
860
|
-
isUserStop:
|
|
861
|
-
!self.isRestoreUserInterface];
|
|
862
|
-
}
|
|
863
|
-
if (self.delegate && [self.delegate respondsToSelector:@selector
|
|
864
|
-
(pictureInPictureWillStop:
|
|
865
|
-
isUserStop:)]) {
|
|
866
|
-
[self.delegate pictureInPictureWillStop:self
|
|
867
|
-
isUserStop:!self.isRestoreUserInterface];
|
|
868
|
-
}
|
|
869
|
-
[self hidePipHolderSnapshotView];
|
|
870
|
-
});
|
|
871
|
-
}
|
|
872
|
-
|
|
873
|
-
- (void)notifyDeleteDidStop {
|
|
874
|
-
VeLivePipLog(LOG_TAG, @"pip did stop");
|
|
875
|
-
vel_sync_in_main_queue(^{
|
|
876
|
-
if (self.isRestoreUserInterface && !self.autoHideContentController) {
|
|
877
|
-
[self
|
|
878
|
-
notifyDeleteRestoreUserInterfaceWithCompletion:nil
|
|
879
|
-
isUserStop:
|
|
880
|
-
!self.isRestoreUserInterface];
|
|
881
|
-
} else if (!self.isCalledRestoreUserInterface) {
|
|
882
|
-
VeLivePipLog(
|
|
883
|
-
LOG_TAG,
|
|
884
|
-
@"pip did stop, but not call restore user interface, call once");
|
|
885
|
-
[self
|
|
886
|
-
notifyDeleteRestoreUserInterfaceWithCompletion:nil
|
|
887
|
-
isUserStop:
|
|
888
|
-
!self.isRestoreUserInterface];
|
|
889
|
-
}
|
|
890
|
-
|
|
891
|
-
if (self.delegate && [self.delegate respondsToSelector:@selector
|
|
892
|
-
(pictureInPictureDidStop:
|
|
893
|
-
isUserStop:)]) {
|
|
894
|
-
[self.delegate pictureInPictureDidStop:self
|
|
895
|
-
isUserStop:!self.isRestoreUserInterface];
|
|
896
|
-
}
|
|
897
|
-
self.needStartWhenPrepared = NO;
|
|
898
|
-
self.isRestoreUserInterface = NO;
|
|
899
|
-
self.isCalledRestoreUserInterface = NO;
|
|
900
|
-
self.state = VeLivePictureInPictureStateReady;
|
|
901
|
-
[self hidePipHolderSnapshotView];
|
|
902
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
903
|
-
// play loop video
|
|
904
|
-
if (self.type != VeLivePictureInPictureTypeContentViewPlayerLayer) {
|
|
905
|
-
[VELPipLoopPlayer playWithNotifyStatus:NO];
|
|
906
|
-
}
|
|
907
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
908
|
-
});
|
|
909
|
-
}
|
|
910
|
-
|
|
911
|
-
- (void)notifyDeleteRestoreUserInterfaceWithCompletion:
|
|
912
|
-
(void (^)(BOOL restored))completionHandler {
|
|
913
|
-
VeLivePipLog(LOG_TAG, @"pip restore user interface");
|
|
914
|
-
self.isRestoreUserInterface = YES;
|
|
915
|
-
self.isCalledRestoreUserInterface = YES;
|
|
916
|
-
if (completionHandler) {
|
|
917
|
-
completionHandler(YES);
|
|
918
|
-
}
|
|
919
|
-
}
|
|
920
|
-
|
|
921
|
-
- (void)notifyDeleteRestoreUserInterfaceWithCompletion:
|
|
922
|
-
(void (^)(BOOL restored))completionHandler
|
|
923
|
-
isUserStop:(BOOL)isUserStop {
|
|
924
|
-
VeLivePipLog(LOG_TAG, @"pip restore user interface isUserStop:%@",
|
|
925
|
-
@(isUserStop));
|
|
926
|
-
vel_sync_in_main_queue(^{
|
|
927
|
-
vel_instance_pip = nil;
|
|
928
|
-
vel_pip_is_started = NO;
|
|
929
|
-
self.needStartWhenPrepared = NO;
|
|
930
|
-
|
|
931
|
-
[self generatedPlacdholderImageViewFor:self.contentView];
|
|
932
|
-
|
|
933
|
-
[self showContentControllerIfNeedIsUserStop:isUserStop];
|
|
934
|
-
|
|
935
|
-
void (^CompletionBlock)(BOOL restored) = ^(BOOL restored) {
|
|
936
|
-
if (completionHandler) {
|
|
937
|
-
completionHandler(restored);
|
|
938
|
-
}
|
|
939
|
-
self.pipHolderView.alpha = 0.01;
|
|
940
|
-
/// hide place holder
|
|
941
|
-
[self hidePipHolderSnapshotView];
|
|
942
|
-
};
|
|
943
|
-
|
|
944
|
-
if (self.delegate &&
|
|
945
|
-
[self.delegate
|
|
946
|
-
respondsToSelector:@selector
|
|
947
|
-
(pictureInPicture:
|
|
948
|
-
restoreUserInterfaceWithCompletionHandler:isUserStop:)]) {
|
|
949
|
-
[self.delegate pictureInPicture:self
|
|
950
|
-
restoreUserInterfaceWithCompletionHandler:CompletionBlock
|
|
951
|
-
isUserStop:isUserStop];
|
|
952
|
-
} else {
|
|
953
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
954
|
-
[self _InnerRestoreUserInterfaceWithCompletion:CompletionBlock
|
|
955
|
-
isUserStop:isUserStop];
|
|
956
|
-
#endif
|
|
957
|
-
}
|
|
958
|
-
});
|
|
959
|
-
}
|
|
960
|
-
|
|
961
|
-
- (void)notifyDeleteWithError:(NSError *)error {
|
|
962
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
963
|
-
if (self.downgradeWhenStartFailed &&
|
|
964
|
-
self.type > VeLivePictureInPictureTypeAVPlayer &&
|
|
965
|
-
self.type <= VeLivePictureInPictureTypeAuto) {
|
|
966
|
-
VeLivePipLog(LOG_TAG, @"pip retry with error %@", error);
|
|
967
|
-
[self destroyPictureInPicture];
|
|
968
|
-
self.type = self.type - 1;
|
|
969
|
-
self.state = VeLivePictureInPictureStateIde;
|
|
970
|
-
[self prepareWithCompletion:self.prepareCompletionBlock];
|
|
971
|
-
} else {
|
|
972
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
973
|
-
VeLivePipLog(LOG_TAG, @"pip with error %@", error);
|
|
974
|
-
[self notifyDeleteRestoreUserInterfaceWithCompletion:nil isUserStop:NO];
|
|
975
|
-
self.type = self.originPipType;
|
|
976
|
-
[self autoSelectType];
|
|
977
|
-
self.state = VeLivePictureInPictureStateError;
|
|
978
|
-
self.needStartWhenPrepared = NO;
|
|
979
|
-
vel_sync_in_main_queue(^{
|
|
980
|
-
vel_pip_is_started = NO;
|
|
981
|
-
vel_instance_pip = nil;
|
|
982
|
-
if (self.delegate &&
|
|
983
|
-
[self.delegate respondsToSelector:@selector(pictureInPicture:
|
|
984
|
-
failedToStartWithError:)]) {
|
|
985
|
-
[self.delegate pictureInPicture:self failedToStartWithError:error];
|
|
986
|
-
}
|
|
987
|
-
if (self.prepareCompletionBlock) {
|
|
988
|
-
self.prepareCompletionBlock(self, error);
|
|
989
|
-
}
|
|
990
|
-
[self hidePipHolderSnapshotView];
|
|
991
|
-
});
|
|
992
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
993
|
-
}
|
|
994
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
995
|
-
}
|
|
996
|
-
|
|
997
|
-
// MARK: - content view layout
|
|
998
|
-
- (void)setupContentViewFullInPipWrapperViewIfNeed {
|
|
999
|
-
if (self.type == VeLivePictureInPictureTypeContentViewPlayerLayer) {
|
|
1000
|
-
return;
|
|
1001
|
-
}
|
|
1002
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
1003
|
-
self.pipHolderView.alpha = 1;
|
|
1004
|
-
if (self.contentView.superview != self.pipWrapperView) {
|
|
1005
|
-
[self.contentView removeFromSuperview];
|
|
1006
|
-
if (!CGRectEqualToRect(self.pipWrapperView.bounds, CGRectZero)) {
|
|
1007
|
-
self.contentView.frame = self.pipWrapperView.bounds;
|
|
1008
|
-
}
|
|
1009
|
-
[self.pipWrapperView addSubview:self.contentView];
|
|
1010
|
-
[self addFullContraints:self.pipWrapperView contentView:self.contentView];
|
|
1011
|
-
}
|
|
1012
|
-
#endif
|
|
1013
|
-
}
|
|
1014
|
-
|
|
1015
|
-
- (void)adjustContentViewHidden:(BOOL)hidden {
|
|
1016
|
-
if (@available(iOS 15.0, *)) {
|
|
1017
|
-
if (self.type == VeLivePictureInPictureTypeContentSource) {
|
|
1018
|
-
self.contentView.hidden = hidden;
|
|
1019
|
-
}
|
|
1020
|
-
}
|
|
1021
|
-
}
|
|
1022
|
-
|
|
1023
|
-
- (void)hideContentControllerIfNeed {
|
|
1024
|
-
if (!self.contentController || !self.autoHideContentController) {
|
|
1025
|
-
self.storeContentController = nil;
|
|
1026
|
-
return;
|
|
1027
|
-
}
|
|
1028
|
-
if (self.storeContentController != nil) {
|
|
1029
|
-
return;
|
|
1030
|
-
}
|
|
1031
|
-
/// add holder
|
|
1032
|
-
[self generatedPlacdholderImageViewFor:self.contentView];
|
|
1033
|
-
// {zh} 如果想要做特效转场,自行实现 {en} If you want to do special effects
|
|
1034
|
-
// transitions, do it yourself
|
|
1035
|
-
self.storeContentController = self.contentController;
|
|
1036
|
-
if (self.contentController.presentingViewController) {
|
|
1037
|
-
[self.contentController.presentingViewController
|
|
1038
|
-
dismissViewControllerAnimated:NO
|
|
1039
|
-
completion:nil];
|
|
1040
|
-
} else if (self.contentController.navigationController &&
|
|
1041
|
-
self.contentController !=
|
|
1042
|
-
self.contentController.navigationController.viewControllers
|
|
1043
|
-
.firstObject) {
|
|
1044
|
-
[self.contentController.navigationController popViewControllerAnimated:NO];
|
|
1045
|
-
} else {
|
|
1046
|
-
self.autoHideContentController = NO;
|
|
1047
|
-
self.storeContentController = nil;
|
|
1048
|
-
VeLivePipLog(LOG_TAG, @"can not dismiss contentController");
|
|
1049
|
-
}
|
|
1050
|
-
}
|
|
1051
|
-
|
|
1052
|
-
- (void)showContentControllerIfNeedIsUserStop:(BOOL)isUserStop {
|
|
1053
|
-
if (!self.contentController || !self.autoHideContentController) {
|
|
1054
|
-
self.storeContentController = nil;
|
|
1055
|
-
return;
|
|
1056
|
-
}
|
|
1057
|
-
|
|
1058
|
-
if (isUserStop) {
|
|
1059
|
-
self.storeContentController = nil;
|
|
1060
|
-
return;
|
|
1061
|
-
}
|
|
1062
|
-
|
|
1063
|
-
if (self.storeContentController == nil) {
|
|
1064
|
-
return;
|
|
1065
|
-
}
|
|
1066
|
-
[self generatedPlacdholderImageViewFor:self.contentView];
|
|
1067
|
-
VeLivePipLog(LOG_TAG, @"show content controller");
|
|
1068
|
-
// {zh} 如果想要做特效转场,自行实现 {en} If you want to do special effects
|
|
1069
|
-
// transitions, do it yourself
|
|
1070
|
-
UIViewController *topVC = [UIViewController vel_pipTopViewController];
|
|
1071
|
-
if ([topVC isKindOfClass:UINavigationController.class]) {
|
|
1072
|
-
[(UINavigationController *)topVC pushViewController:self.contentController
|
|
1073
|
-
animated:NO];
|
|
1074
|
-
} else if (topVC.navigationController != nil) {
|
|
1075
|
-
[topVC.navigationController pushViewController:self.contentController
|
|
1076
|
-
animated:NO];
|
|
1077
|
-
} else {
|
|
1078
|
-
[topVC presentViewController:self.contentController
|
|
1079
|
-
animated:NO
|
|
1080
|
-
completion:nil];
|
|
1081
|
-
}
|
|
1082
|
-
self.storeContentController = nil;
|
|
1083
|
-
}
|
|
1084
|
-
|
|
1085
|
-
- (void)generatedPlacdholderImageViewFor:(UIView *)view {
|
|
1086
|
-
if (self.type == VeLivePictureInPictureTypeContentViewPlayerLayer) {
|
|
1087
|
-
return;
|
|
1088
|
-
}
|
|
1089
|
-
if (self.pipHolderImageView != nil) {
|
|
1090
|
-
[self.pipHolderImageView removeFromSuperview];
|
|
1091
|
-
}
|
|
1092
|
-
UIView *superView = view.superview;
|
|
1093
|
-
if (superView == nil) {
|
|
1094
|
-
return;
|
|
1095
|
-
}
|
|
1096
|
-
UIImage *holderImage = [self generateImageForView:view];
|
|
1097
|
-
self.pipHolderImageView = [[UIImageView alloc] initWithFrame:view.frame];
|
|
1098
|
-
self.pipHolderImageView.contentMode = UIViewContentModeScaleAspectFit;
|
|
1099
|
-
self.pipHolderImageView.image = holderImage;
|
|
1100
|
-
[superView insertSubview:self.pipHolderImageView aboveSubview:view];
|
|
1101
|
-
self.pipHolderImageView.translatesAutoresizingMaskIntoConstraints = NO;
|
|
1102
|
-
[superView addConstraints:@[
|
|
1103
|
-
[self.pipHolderImageView.topAnchor constraintEqualToAnchor:view.topAnchor],
|
|
1104
|
-
[self.pipHolderImageView.leftAnchor
|
|
1105
|
-
constraintEqualToAnchor:view.leftAnchor],
|
|
1106
|
-
[self.pipHolderImageView.rightAnchor
|
|
1107
|
-
constraintEqualToAnchor:view.rightAnchor],
|
|
1108
|
-
[self.pipHolderImageView.bottomAnchor
|
|
1109
|
-
constraintEqualToAnchor:view.bottomAnchor],
|
|
1110
|
-
]];
|
|
1111
|
-
}
|
|
1112
|
-
|
|
1113
|
-
- (void)hidePipHolderSnapshotView {
|
|
1114
|
-
if (self.pipHolderImageView != nil) {
|
|
1115
|
-
[UIView animateWithDuration:0.3
|
|
1116
|
-
animations:^{
|
|
1117
|
-
self.pipHolderImageView.alpha = 0;
|
|
1118
|
-
}
|
|
1119
|
-
completion:^(BOOL finished) {
|
|
1120
|
-
[self.pipHolderImageView removeFromSuperview];
|
|
1121
|
-
self.pipHolderImageView = nil;
|
|
1122
|
-
}];
|
|
1123
|
-
}
|
|
1124
|
-
[self adjustContentViewHidden:NO];
|
|
1125
|
-
}
|
|
1126
|
-
|
|
1127
|
-
- (UIImage *)generateImageForView:(UIView *)view {
|
|
1128
|
-
UIGraphicsBeginImageContextWithOptions(view.bounds.size, YES,
|
|
1129
|
-
UIScreen.mainScreen.scale);
|
|
1130
|
-
[view drawViewHierarchyInRect:view.bounds afterScreenUpdates:NO];
|
|
1131
|
-
UIImage *img = UIGraphicsGetImageFromCurrentImageContext();
|
|
1132
|
-
UIGraphicsEndImageContext();
|
|
1133
|
-
return img;
|
|
1134
|
-
}
|
|
1135
|
-
|
|
1136
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
1137
|
-
- (void)_InnerRestoreUserInterfaceWithCompletion:
|
|
1138
|
-
(void (^)(BOOL restored))completionHandler
|
|
1139
|
-
isUserStop:(BOOL)isUserStop {
|
|
1140
|
-
if (self.type == VeLivePictureInPictureTypeContentViewPlayerLayer) {
|
|
1141
|
-
return;
|
|
1142
|
-
}
|
|
1143
|
-
if (self.contentView.superview != self.contentOriginSuperView) {
|
|
1144
|
-
[self.contentView removeFromSuperview];
|
|
1145
|
-
if (self.pipHolderView.superview == self.contentOriginSuperView) {
|
|
1146
|
-
[self.contentOriginSuperView insertSubview:self.contentView
|
|
1147
|
-
aboveSubview:self.pipHolderView];
|
|
1148
|
-
} else {
|
|
1149
|
-
[self.contentOriginSuperView insertSubview:self.contentView atIndex:0];
|
|
1150
|
-
}
|
|
1151
|
-
void (^LayoutBlock)(void) = nil;
|
|
1152
|
-
if ([self isFullScreenLayout]) {
|
|
1153
|
-
LayoutBlock = ^(void) {
|
|
1154
|
-
self.pipHolderImageView.alpha = 0;
|
|
1155
|
-
self.pipHolderView.alpha = 0.01;
|
|
1156
|
-
[self addFullContraints:self.contentOriginSuperView
|
|
1157
|
-
contentView:self.contentView];
|
|
1158
|
-
[self.contentView setNeedsLayout];
|
|
1159
|
-
[self.contentView layoutIfNeeded];
|
|
1160
|
-
VeLivePipLog(LOG_TAG,
|
|
1161
|
-
@"set contentview frame with full screen constraint");
|
|
1162
|
-
};
|
|
1163
|
-
} else {
|
|
1164
|
-
LayoutBlock = ^(void) {
|
|
1165
|
-
self.pipHolderImageView.alpha = 0;
|
|
1166
|
-
self.pipHolderView.alpha = 0.01;
|
|
1167
|
-
self.contentView.translatesAutoresizingMaskIntoConstraints = YES;
|
|
1168
|
-
self.contentView.frame = self.contentOriginFrame;
|
|
1169
|
-
VeLivePipLog(LOG_TAG, @"set contentview frame %@",
|
|
1170
|
-
NSStringFromCGRect(self.contentOriginFrame));
|
|
1171
|
-
};
|
|
1172
|
-
}
|
|
1173
|
-
[UIView animateWithDuration:0.3
|
|
1174
|
-
animations:LayoutBlock
|
|
1175
|
-
completion:^(BOOL finished) {
|
|
1176
|
-
if (completionHandler) {
|
|
1177
|
-
completionHandler(YES);
|
|
1178
|
-
}
|
|
1179
|
-
}];
|
|
1180
|
-
}
|
|
1181
|
-
}
|
|
1182
|
-
|
|
1183
|
-
- (void)addLayerAlphaAnimationFor:(UIView *)view to:(CGFloat)to {
|
|
1184
|
-
[self addLayerAlphaAnimationFor:view to:to duration:0.3];
|
|
1185
|
-
}
|
|
1186
|
-
|
|
1187
|
-
- (void)addLayerAlphaAnimationFor:(UIView *)view
|
|
1188
|
-
to:(CGFloat)to
|
|
1189
|
-
duration:(CGFloat)duration {
|
|
1190
|
-
[view.layer removeAllAnimations];
|
|
1191
|
-
if (duration <= 0) {
|
|
1192
|
-
view.alpha = to;
|
|
1193
|
-
} else {
|
|
1194
|
-
CABasicAnimation *flash =
|
|
1195
|
-
[CABasicAnimation animationWithKeyPath:@"opacity"];
|
|
1196
|
-
flash.toValue = [NSNumber numberWithFloat:to];
|
|
1197
|
-
flash.duration = duration;
|
|
1198
|
-
[view.layer addAnimation:flash forKey:@"flashAnimation"];
|
|
1199
|
-
}
|
|
1200
|
-
}
|
|
1201
|
-
|
|
1202
|
-
// {zh} 宽高误差在 16 像素的间距内,都默认是全屏展示 {en} The width and height
|
|
1203
|
-
// error is within the spacing of 16 pixels, and the default is full-screen
|
|
1204
|
-
// display.
|
|
1205
|
-
- (BOOL)isFullScreenLayout {
|
|
1206
|
-
return ABS(self.contentOriginFrame.size.width -
|
|
1207
|
-
self.contentOriginSuperView.bounds.size.width) < 16 &&
|
|
1208
|
-
ABS(self.contentOriginFrame.size.height -
|
|
1209
|
-
self.contentOriginSuperView.bounds.size.height) < 16;
|
|
1210
|
-
}
|
|
1211
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
1212
|
-
|
|
1213
|
-
- (void)addFullContraints:(UIView *)superView
|
|
1214
|
-
contentView:(UIView *)contentView {
|
|
1215
|
-
if (superView == nil || contentView == nil ||
|
|
1216
|
-
![superView.subviews containsObject:contentView]) {
|
|
1217
|
-
return;
|
|
1218
|
-
}
|
|
1219
|
-
contentView.translatesAutoresizingMaskIntoConstraints = NO;
|
|
1220
|
-
[superView addConstraints:@[
|
|
1221
|
-
[superView.topAnchor constraintEqualToAnchor:contentView.topAnchor],
|
|
1222
|
-
[superView.leftAnchor constraintEqualToAnchor:contentView.leftAnchor],
|
|
1223
|
-
[superView.rightAnchor constraintEqualToAnchor:contentView.rightAnchor],
|
|
1224
|
-
[superView.bottomAnchor constraintEqualToAnchor:contentView.bottomAnchor],
|
|
1225
|
-
]];
|
|
1226
|
-
}
|
|
1227
|
-
@end
|
|
1228
|
-
|
|
1229
|
-
// MARK: - implementation VeLivePictureInPictureController
|
|
1230
|
-
// (VeLivePictureInPictureDelegate)
|
|
1231
|
-
@implementation
|
|
1232
|
-
VeLivePictureInPictureController (VeLivePictureInPictureDelegate)
|
|
1233
|
-
// AVPictureInPictureControllerDelegate
|
|
1234
|
-
- (void)pictureInPictureControllerWillStartPictureInPicture:
|
|
1235
|
-
(AVPictureInPictureController *)pictureInPictureController {
|
|
1236
|
-
[self notifyDeleteWillStart];
|
|
1237
|
-
}
|
|
1238
|
-
|
|
1239
|
-
- (void)pictureInPictureControllerDidStartPictureInPicture:
|
|
1240
|
-
(AVPictureInPictureController *)pictureInPictureController {
|
|
1241
|
-
[self notifyDeleteDidStart];
|
|
1242
|
-
}
|
|
1243
|
-
|
|
1244
|
-
- (void)pictureInPictureController:
|
|
1245
|
-
(AVPictureInPictureController *)pictureInPictureController
|
|
1246
|
-
failedToStartPictureInPictureWithError:(NSError *)error {
|
|
1247
|
-
[self notifyDeleteWithError:error];
|
|
1248
|
-
}
|
|
1249
|
-
|
|
1250
|
-
- (void)pictureInPictureControllerWillStopPictureInPicture:
|
|
1251
|
-
(AVPictureInPictureController *)pictureInPictureController {
|
|
1252
|
-
[self notifyDeleteWillStop];
|
|
1253
|
-
}
|
|
1254
|
-
|
|
1255
|
-
- (void)pictureInPictureControllerDidStopPictureInPicture:
|
|
1256
|
-
(AVPictureInPictureController *)pictureInPictureController {
|
|
1257
|
-
[self notifyDeleteDidStop];
|
|
1258
|
-
}
|
|
1259
|
-
|
|
1260
|
-
- (void)pictureInPictureController:
|
|
1261
|
-
(AVPictureInPictureController *)pictureInPictureController
|
|
1262
|
-
restoreUserInterfaceForPictureInPictureStopWithCompletionHandler:
|
|
1263
|
-
(void (^)(BOOL restored))completionHandler {
|
|
1264
|
-
[self notifyDeleteRestoreUserInterfaceWithCompletion:completionHandler];
|
|
1265
|
-
}
|
|
1266
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
1267
|
-
// AVPlayerViewControllerDelegate
|
|
1268
|
-
- (void)playerViewControllerWillStartPictureInPicture:
|
|
1269
|
-
(AVPlayerViewController *)playerViewController {
|
|
1270
|
-
[self notifyDeleteWillStart];
|
|
1271
|
-
}
|
|
1272
|
-
|
|
1273
|
-
- (void)playerViewControllerDidStartPictureInPicture:
|
|
1274
|
-
(AVPlayerViewController *)playerViewController {
|
|
1275
|
-
[self notifyDeleteDidStart];
|
|
1276
|
-
}
|
|
1277
|
-
|
|
1278
|
-
- (void)playerViewController:(AVPlayerViewController *)playerViewController
|
|
1279
|
-
failedToStartPictureInPictureWithError:(NSError *)error {
|
|
1280
|
-
[self notifyDeleteWithError:error];
|
|
1281
|
-
}
|
|
1282
|
-
|
|
1283
|
-
- (void)playerViewControllerWillStopPictureInPicture:
|
|
1284
|
-
(AVPlayerViewController *)playerViewController {
|
|
1285
|
-
[self notifyDeleteWillStop];
|
|
1286
|
-
}
|
|
1287
|
-
|
|
1288
|
-
- (void)playerViewControllerDidStopPictureInPicture:
|
|
1289
|
-
(AVPlayerViewController *)playerViewController {
|
|
1290
|
-
[self notifyDeleteDidStop];
|
|
1291
|
-
}
|
|
1292
|
-
|
|
1293
|
-
- (void)playerViewController:(AVPlayerViewController *)playerViewController
|
|
1294
|
-
restoreUserInterfaceForPictureInPictureStopWithCompletionHandler:
|
|
1295
|
-
(void (^)(BOOL restored))completionHandler {
|
|
1296
|
-
[self notifyDeleteRestoreUserInterfaceWithCompletion:completionHandler];
|
|
1297
|
-
}
|
|
1298
|
-
#endif
|
|
1299
|
-
@end
|
|
1300
|
-
|
|
1301
|
-
// MARK: - interface VELPipContentSourceProvider
|
|
1302
|
-
@interface VELPipCachedQueueBuffer : NSObject
|
|
1303
|
-
@property(nonatomic, assign) CVPixelBufferRef pixelBuffer;
|
|
1304
|
-
@property(nonatomic, assign) CGRect videoRect;
|
|
1305
|
-
@property(nonatomic, copy) NSString *key;
|
|
1306
|
-
@property(nonatomic, assign) NSTimeInterval lastUpdateTime;
|
|
1307
|
-
@end
|
|
1308
|
-
@implementation VELPipCachedQueueBuffer
|
|
1309
|
-
- (instancetype)initWithBuffer:(CVPixelBufferRef)pixelBuffer
|
|
1310
|
-
videoRect:(CGRect)videoRect {
|
|
1311
|
-
if (self = [super init]) {
|
|
1312
|
-
self.key = [VELPipCachedQueueBuffer getKeyWithVideoRect:videoRect];
|
|
1313
|
-
self.pixelBuffer = CVPixelBufferRetain(pixelBuffer);
|
|
1314
|
-
self.videoRect = videoRect;
|
|
1315
|
-
self.lastUpdateTime = (NSTimeInterval)CACurrentMediaTime();
|
|
1316
|
-
}
|
|
1317
|
-
return self;
|
|
1318
|
-
}
|
|
1319
|
-
|
|
1320
|
-
- (CGRect)convertRectFrom:(CGRect)rect {
|
|
1321
|
-
CGFloat x = self.videoRect.origin.x * rect.size.width;
|
|
1322
|
-
CGFloat y = self.videoRect.origin.y * rect.size.height;
|
|
1323
|
-
CGFloat w = self.videoRect.size.width * rect.size.width;
|
|
1324
|
-
CGFloat h = self.videoRect.size.height * rect.size.height;
|
|
1325
|
-
return CGRectMake(x, y, w, h);
|
|
1326
|
-
}
|
|
1327
|
-
|
|
1328
|
-
- (void)dealloc {
|
|
1329
|
-
[self releaseBuffer];
|
|
1330
|
-
}
|
|
1331
|
-
|
|
1332
|
-
- (void)releaseBuffer {
|
|
1333
|
-
if (_pixelBuffer != NULL) {
|
|
1334
|
-
CVPixelBufferRelease(_pixelBuffer);
|
|
1335
|
-
_pixelBuffer = NULL;
|
|
1336
|
-
}
|
|
1337
|
-
}
|
|
1338
|
-
|
|
1339
|
-
+ (NSString *)getKeyWithVideoRect:(CGRect)videoRect {
|
|
1340
|
-
return [NSString stringWithFormat:@"%d_%d_%d_%d",
|
|
1341
|
-
(int)(videoRect.origin.x * 1000000),
|
|
1342
|
-
(int)(videoRect.origin.y * 1000000),
|
|
1343
|
-
(int)(videoRect.size.width * 1000000),
|
|
1344
|
-
(int)(videoRect.size.height * 1000000)];
|
|
1345
|
-
}
|
|
1346
|
-
@end
|
|
1347
|
-
@interface VELPipContentSourceProvider : NSObject
|
|
1348
|
-
@property(nonatomic, strong) NSRecursiveLock *bufferLock;
|
|
1349
|
-
@property(nonatomic, assign) CGSize videoSize;
|
|
1350
|
-
@property(nonatomic, assign) int videoRectKeyScale;
|
|
1351
|
-
@property(nonatomic, strong) dispatch_queue_t bufferQueue;
|
|
1352
|
-
/// key is rect, value is pixelbuffer ref
|
|
1353
|
-
@property(nonatomic, strong)
|
|
1354
|
-
NSMutableDictionary<NSString *, VELPipCachedQueueBuffer *>
|
|
1355
|
-
*cachedQueueBuffers;
|
|
1356
|
-
@property(nonatomic, strong) VELPipPixelBufferTool *bufferTool;
|
|
1357
|
-
@property(nonatomic, strong) VELPipSampleBufferView *sampleBufferView;
|
|
1358
|
-
@property(nonatomic, strong) AVSampleBufferDisplayLayer *sampleDisplayLayer;
|
|
1359
|
-
@property(nonatomic, assign) CVPixelBufferRef darkPixelBuffer;
|
|
1360
|
-
@property(nonatomic, strong) dispatch_source_t darkFrameTimer;
|
|
1361
|
-
@property(nonatomic, strong) dispatch_queue_t darkFrameQueue;
|
|
1362
|
-
@property(nonatomic, copy) AVLayerVideoGravity videoGravity;
|
|
1363
|
-
@property(nonatomic, copy) void (^sampleDisplayLayerChanged)
|
|
1364
|
-
(VELPipContentSourceProvider *provider);
|
|
1365
|
-
@property(atomic, assign) NSTimeInterval fpsInterval;
|
|
1366
|
-
@property(nonatomic, assign) int videoSourceCount;
|
|
1367
|
-
- (void)enqueuePixelBuffer:(CVPixelBufferRef)pixelBuffer
|
|
1368
|
-
videoRect:(CGRect)videoRect;
|
|
1369
|
-
- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|
1370
|
-
videoRect:(CGRect)videoRect;
|
|
1371
|
-
- (void)destroy;
|
|
1372
|
-
@end
|
|
1373
|
-
|
|
1374
|
-
// MARK: - interface VELSampleBufferPlaybackDelegate
|
|
1375
|
-
API_AVAILABLE(ios(15.0), tvos(15.0), macos(12.0))
|
|
1376
|
-
API_UNAVAILABLE(watchos)
|
|
1377
|
-
@interface VELSampleBufferPlaybackDelegate
|
|
1378
|
-
: NSObject <AVPictureInPictureSampleBufferPlaybackDelegate>
|
|
1379
|
-
@end
|
|
1380
|
-
|
|
1381
|
-
// MARK: - implementation VELSampleBufferPlaybackDelegate
|
|
1382
|
-
@implementation VELSampleBufferPlaybackDelegate
|
|
1383
|
-
- (BOOL)pictureInPictureControllerIsPlaybackPaused:
|
|
1384
|
-
(nonnull AVPictureInPictureController *)pictureInPictureController {
|
|
1385
|
-
return NO;
|
|
1386
|
-
}
|
|
1387
|
-
|
|
1388
|
-
- (CMTimeRange)pictureInPictureControllerTimeRangeForPlayback:
|
|
1389
|
-
(AVPictureInPictureController *)pictureInPictureController {
|
|
1390
|
-
return CMTimeRangeMake(kCMTimeZero, kCMTimePositiveInfinity);
|
|
1391
|
-
}
|
|
1392
|
-
|
|
1393
|
-
- (void)pictureInPictureController:
|
|
1394
|
-
(AVPictureInPictureController *)pictureInPictureController
|
|
1395
|
-
didTransitionToRenderSize:(CMVideoDimensions)newRenderSize {
|
|
1396
|
-
VeLivePipLog(LOG_TAG,
|
|
1397
|
-
@"content source pip render size changed width:%@ height:%@",
|
|
1398
|
-
@(newRenderSize.width), @(newRenderSize.height));
|
|
1399
|
-
}
|
|
1400
|
-
|
|
1401
|
-
- (void)pictureInPictureController:
|
|
1402
|
-
(AVPictureInPictureController *)pictureInPictureController
|
|
1403
|
-
setPlaying:(BOOL)playing {
|
|
1404
|
-
}
|
|
1405
|
-
|
|
1406
|
-
- (void)pictureInPictureController:
|
|
1407
|
-
(AVPictureInPictureController *)pictureInPictureController
|
|
1408
|
-
skipByInterval:(CMTime)skipInterval
|
|
1409
|
-
completionHandler:(void (^)(void))completionHandler {
|
|
1410
|
-
completionHandler();
|
|
1411
|
-
}
|
|
1412
|
-
@end
|
|
1413
|
-
|
|
1414
|
-
// MARK: - implementation VeLivePictureInPictureController (VELPipContentSource)
|
|
1415
|
-
@implementation VeLivePictureInPictureController (VELPipContentSource)
|
|
1416
|
-
static char kAssociatedObjectKey_pipContentSourceProvider;
|
|
1417
|
-
- (void)setSourceProvider:(VELPipContentSourceProvider *)sourceProvider {
|
|
1418
|
-
@synchronized(self) {
|
|
1419
|
-
objc_setAssociatedObject(self,
|
|
1420
|
-
&kAssociatedObjectKey_pipContentSourceProvider,
|
|
1421
|
-
sourceProvider, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
|
|
1422
|
-
}
|
|
1423
|
-
}
|
|
1424
|
-
|
|
1425
|
-
- (VELPipContentSourceProvider *)sourceProvider {
|
|
1426
|
-
@synchronized(self) {
|
|
1427
|
-
return (VELPipContentSourceProvider *)objc_getAssociatedObject(
|
|
1428
|
-
self, &kAssociatedObjectKey_pipContentSourceProvider);
|
|
1429
|
-
}
|
|
1430
|
-
}
|
|
1431
|
-
|
|
1432
|
-
static char kAssociatedObjectKey_pipPlaybackDelegate;
|
|
1433
|
-
- (void)setPlaybackDelegate:(VELSampleBufferPlaybackDelegate *)playbackDelegate
|
|
1434
|
-
API_AVAILABLE(ios(15.0), tvos(15.0), macos(12.0))API_UNAVAILABLE(watchos) {
|
|
1435
|
-
objc_setAssociatedObject(self, &kAssociatedObjectKey_pipPlaybackDelegate,
|
|
1436
|
-
playbackDelegate, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
|
|
1437
|
-
}
|
|
1438
|
-
|
|
1439
|
-
- (VELSampleBufferPlaybackDelegate *)playbackDelegate API_AVAILABLE(ios(15.0),
|
|
1440
|
-
tvos(15.0),
|
|
1441
|
-
macos(12.0))
|
|
1442
|
-
API_UNAVAILABLE(watchos) {
|
|
1443
|
-
return (VELSampleBufferPlaybackDelegate *)objc_getAssociatedObject(
|
|
1444
|
-
self, &kAssociatedObjectKey_pipPlaybackDelegate);
|
|
1445
|
-
}
|
|
1446
|
-
|
|
1447
|
-
- (void)initContentSourcePictureInPicture {
|
|
1448
|
-
if (@available(iOS 15.0, *)) {
|
|
1449
|
-
VeLivePipLog(LOG_TAG, @"init content source pip");
|
|
1450
|
-
if (self.sourceProvider == nil) {
|
|
1451
|
-
self.sourceProvider = [[VELPipContentSourceProvider alloc] init];
|
|
1452
|
-
self.sourceProvider.fpsInterval = 1.0 / self.fps;
|
|
1453
|
-
self.sourceProvider.videoSourceCount = MAX(self.videoSourceCount, 1);
|
|
1454
|
-
self.sourceProvider.videoSize = self.videoSize;
|
|
1455
|
-
__weak __typeof__(self) weakSelf = self;
|
|
1456
|
-
[self.sourceProvider setSampleDisplayLayerChanged:^(
|
|
1457
|
-
VELPipContentSourceProvider *provider) {
|
|
1458
|
-
__strong __typeof__(weakSelf) self = weakSelf;
|
|
1459
|
-
VeLivePipLog(LOG_TAG, @"reset sample layer from provider ");
|
|
1460
|
-
[self setupContentSource];
|
|
1461
|
-
}];
|
|
1462
|
-
[self setupContentSource];
|
|
1463
|
-
self.pipHolderView = self.sourceProvider.sampleBufferView;
|
|
1464
|
-
}
|
|
1465
|
-
dispatch_after(
|
|
1466
|
-
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.3 * NSEC_PER_SEC)),
|
|
1467
|
-
dispatch_get_main_queue(), ^{
|
|
1468
|
-
[self notifyDeleteIsReady];
|
|
1469
|
-
});
|
|
1470
|
-
} else {
|
|
1471
|
-
[self notifyDeleteWithError:
|
|
1472
|
-
VeLivePipError(VeLivePictureInPictureErrorCodeNotPossible,
|
|
1473
|
-
@"content source not support below iOS 15.0")];
|
|
1474
|
-
}
|
|
1475
|
-
}
|
|
1476
|
-
|
|
1477
|
-
- (void)releaseContentSourcePictureInPicture {
|
|
1478
|
-
if (self.pipController != nil) {
|
|
1479
|
-
[self.sourceProvider.sampleBufferView removeFromSuperview];
|
|
1480
|
-
[self.sourceProvider destroy];
|
|
1481
|
-
self.sourceProvider = nil;
|
|
1482
|
-
[self stopPictureInPicture];
|
|
1483
|
-
self.pipController = nil;
|
|
1484
|
-
}
|
|
1485
|
-
}
|
|
1486
|
-
|
|
1487
|
-
- (void)setupContentSource API_AVAILABLE(ios(15.0)) {
|
|
1488
|
-
// playback delegate strong ref
|
|
1489
|
-
self.playbackDelegate = [[VELSampleBufferPlaybackDelegate alloc] init];
|
|
1490
|
-
AVPictureInPictureControllerContentSource *contentSource =
|
|
1491
|
-
[[AVPictureInPictureControllerContentSource alloc]
|
|
1492
|
-
initWithSampleBufferDisplayLayer:self.sourceProvider
|
|
1493
|
-
.sampleDisplayLayer
|
|
1494
|
-
playbackDelegate:self.playbackDelegate];
|
|
1495
|
-
if (self.pipController == nil) {
|
|
1496
|
-
self.pipController = [[AVPictureInPictureController alloc]
|
|
1497
|
-
initWithContentSource:contentSource];
|
|
1498
|
-
self.pipController.delegate = self;
|
|
1499
|
-
} else {
|
|
1500
|
-
[self.pipController setContentSource:contentSource];
|
|
1501
|
-
}
|
|
1502
|
-
}
|
|
1503
|
-
|
|
1504
|
-
- (void)vel_setContentSourceVideoSize:(CGSize)videoSize {
|
|
1505
|
-
[self.sourceProvider setVideoSize:videoSize];
|
|
1506
|
-
}
|
|
1507
|
-
|
|
1508
|
-
- (void)setSampleLayerVideoGravity:(AVLayerVideoGravity)videoGravity {
|
|
1509
|
-
self.sourceProvider.sampleBufferView.videoGravity = videoGravity;
|
|
1510
|
-
}
|
|
1511
|
-
|
|
1512
|
-
- (void)vel_enqueuePixelBuffer:(CVPixelBufferRef)pixelBuffer
|
|
1513
|
-
videoRect:(CGRect)videoRect {
|
|
1514
|
-
if (self.state != VeLivePictureInPictureStateRunning) {
|
|
1515
|
-
return;
|
|
1516
|
-
}
|
|
1517
|
-
[self.sourceProvider enqueuePixelBuffer:pixelBuffer videoRect:videoRect];
|
|
1518
|
-
}
|
|
1519
|
-
|
|
1520
|
-
- (void)vel_enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|
1521
|
-
videoRect:(CGRect)videoRect {
|
|
1522
|
-
if (self.state != VeLivePictureInPictureStateRunning) {
|
|
1523
|
-
return;
|
|
1524
|
-
}
|
|
1525
|
-
[self.sourceProvider enqueueSampleBuffer:sampleBuffer videoRect:videoRect];
|
|
1526
|
-
}
|
|
1527
|
-
|
|
1528
|
-
@end
|
|
1529
|
-
|
|
1530
|
-
// MARK: - implementation VeLivePictureInPictureController
|
|
1531
|
-
// (VELPipContentViewPlayerLayer)
|
|
1532
|
-
@implementation VeLivePictureInPictureController (VELPipContentViewPlayerLayer)
|
|
1533
|
-
static char kAssociatedObjectKey_pipContentViewPlayerLayer;
|
|
1534
|
-
- (void)setContentViewPlayerLayer:(AVPlayerLayer *)contentViewPlayerLayer {
|
|
1535
|
-
objc_setAssociatedObject(self,
|
|
1536
|
-
&kAssociatedObjectKey_pipContentViewPlayerLayer,
|
|
1537
|
-
contentViewPlayerLayer, OBJC_ASSOCIATION_ASSIGN);
|
|
1538
|
-
}
|
|
1539
|
-
- (AVPlayerLayer *)contentViewPlayerLayer {
|
|
1540
|
-
return (AVPlayerLayer *)objc_getAssociatedObject(
|
|
1541
|
-
self, &kAssociatedObjectKey_pipContentViewPlayerLayer);
|
|
1542
|
-
}
|
|
1543
|
-
static char kAssociatedObjectKey_pipContentViewPlayerLayerView;
|
|
1544
|
-
- (void)setContentViewPlayerLayerView:(UIView *)contentViewPlayerLayerView {
|
|
1545
|
-
objc_setAssociatedObject(self,
|
|
1546
|
-
&kAssociatedObjectKey_pipContentViewPlayerLayerView,
|
|
1547
|
-
contentViewPlayerLayerView, OBJC_ASSOCIATION_ASSIGN);
|
|
1548
|
-
}
|
|
1549
|
-
- (UIView *)contentViewPlayerLayerView {
|
|
1550
|
-
return (UIView *)objc_getAssociatedObject(
|
|
1551
|
-
self, &kAssociatedObjectKey_pipContentViewPlayerLayerView);
|
|
1552
|
-
}
|
|
1553
|
-
|
|
1554
|
-
- (void)initContentViewPlayerLayerPictureInPicture {
|
|
1555
|
-
VeLivePipLog(LOG_TAG, @"init avplayer pip");
|
|
1556
|
-
if (self.contentViewPlayerLayer == nil) {
|
|
1557
|
-
[self notifyDeleteWithError:VeLivePipError(
|
|
1558
|
-
-1,
|
|
1559
|
-
@"content view has no AVPlayerLayer view")];
|
|
1560
|
-
return;
|
|
1561
|
-
}
|
|
1562
|
-
if (self.pipController == nil) {
|
|
1563
|
-
self.pipController = [[AVPictureInPictureController alloc]
|
|
1564
|
-
initWithPlayerLayer:self.contentViewPlayerLayer];
|
|
1565
|
-
self.pipController.delegate = self;
|
|
1566
|
-
}
|
|
1567
|
-
dispatch_after(
|
|
1568
|
-
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.3 * NSEC_PER_SEC)),
|
|
1569
|
-
dispatch_get_main_queue(), ^{
|
|
1570
|
-
[self notifyDeleteIsReady];
|
|
1571
|
-
});
|
|
1572
|
-
}
|
|
1573
|
-
|
|
1574
|
-
- (void)releaseContentViewPlayerLayerPictureInPicture {
|
|
1575
|
-
if (self.pipController != nil) {
|
|
1576
|
-
[self stopPictureInPicture];
|
|
1577
|
-
self.contentViewPlayerLayer = nil;
|
|
1578
|
-
self.contentViewPlayerLayerView = nil;
|
|
1579
|
-
self.pipController = nil;
|
|
1580
|
-
}
|
|
1581
|
-
}
|
|
1582
|
-
|
|
1583
|
-
- (void)setContentViewPlayerLayerVideoGravity:
|
|
1584
|
-
(AVLayerVideoGravity)videoGravity {
|
|
1585
|
-
// self.contentViewPlayerLayer.videoGravity = videoGravity;
|
|
1586
|
-
}
|
|
1587
|
-
@end
|
|
1588
|
-
|
|
1589
|
-
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
1590
|
-
// MARK: - implementation VeLivePictureInPictureController
|
|
1591
|
-
// (VELPipContentSourceAVPlayer)
|
|
1592
|
-
@implementation VeLivePictureInPictureController (VELPipContentSourceAVPlayer)
|
|
1593
|
-
static char kAssociatedObjectKey_pipContentSourcePlayerView;
|
|
1594
|
-
- (void)setContentSourcePlayerView:
|
|
1595
|
-
(VELPipAVPlayerView *)contentSourcePlayerView {
|
|
1596
|
-
objc_setAssociatedObject(
|
|
1597
|
-
self, &kAssociatedObjectKey_pipContentSourcePlayerView,
|
|
1598
|
-
contentSourcePlayerView, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
|
|
1599
|
-
}
|
|
1600
|
-
|
|
1601
|
-
- (VELPipAVPlayerView *)contentSourcePlayerView {
|
|
1602
|
-
return (VELPipAVPlayerView *)objc_getAssociatedObject(
|
|
1603
|
-
self, &kAssociatedObjectKey_pipContentSourcePlayerView);
|
|
1604
|
-
}
|
|
1605
|
-
|
|
1606
|
-
- (void)initContentSourceAVPlayerPictureInPicture {
|
|
1607
|
-
VeLivePipLog(LOG_TAG, @"init content source avplayer pip");
|
|
1608
|
-
if (@available(iOS 15.0, *)) {
|
|
1609
|
-
if (self.contentSourcePlayerView == nil) {
|
|
1610
|
-
[VELPipLoopPlayer initLoopPlayer];
|
|
1611
|
-
self.contentSourcePlayerView = [[VELPipAVPlayerView alloc] init];
|
|
1612
|
-
self.contentSourcePlayerView.playerLayer.player = VELPipLoopPlayer.player;
|
|
1613
|
-
self.contentSourcePlayerView.playerLayer.videoGravity =
|
|
1614
|
-
AVLayerVideoGravityResize;
|
|
1615
|
-
self.pipHolderView = self.contentSourcePlayerView;
|
|
1616
|
-
AVPictureInPictureControllerContentSource *contentSource =
|
|
1617
|
-
[[AVPictureInPictureControllerContentSource alloc]
|
|
1618
|
-
initWithPlayerLayer:self.contentSourcePlayerView.playerLayer];
|
|
1619
|
-
self.pipController = [[AVPictureInPictureController alloc]
|
|
1620
|
-
initWithContentSource:contentSource];
|
|
1621
|
-
self.pipController.delegate = self;
|
|
1622
|
-
}
|
|
1623
|
-
__weak __typeof__(self) weakSelf = self;
|
|
1624
|
-
[VELPipLoopPlayer
|
|
1625
|
-
setStatusBlock:^(AVPlayerStatus status, NSError *_Nullable error) {
|
|
1626
|
-
__strong __typeof__(weakSelf) self = weakSelf;
|
|
1627
|
-
VeLivePipLog(LOG_TAG, @"loop player status changed block %@-%@",
|
|
1628
|
-
@(status), error);
|
|
1629
|
-
if (status == AVPlayerStatusFailed) {
|
|
1630
|
-
[self notifyDeleteWithError:error];
|
|
1631
|
-
} else if (status == AVPlayerStatusReadyToPlay) {
|
|
1632
|
-
[self contentSourceAvplayerIsReady];
|
|
1633
|
-
}
|
|
1634
|
-
}];
|
|
1635
|
-
[VELPipLoopPlayer setVideoSize:self.videoSize];
|
|
1636
|
-
[VELPipLoopPlayer playWithNotifyStatus:YES];
|
|
1637
|
-
} else {
|
|
1638
|
-
[self notifyDeleteWithError:
|
|
1639
|
-
VeLivePipError(
|
|
1640
|
-
VeLivePictureInPictureErrorCodeNotPossible,
|
|
1641
|
-
@"content source avplayer not support below iOS 15.0")];
|
|
1642
|
-
}
|
|
1643
|
-
}
|
|
1644
|
-
|
|
1645
|
-
- (void)releaseContentSourceAVPlayerPictureInPicture {
|
|
1646
|
-
if (self.pipController != nil) {
|
|
1647
|
-
[self.contentSourcePlayerView removeFromSuperview];
|
|
1648
|
-
self.contentSourcePlayerView = nil;
|
|
1649
|
-
[self stopPictureInPicture];
|
|
1650
|
-
self.pipController = nil;
|
|
1651
|
-
}
|
|
1652
|
-
}
|
|
1653
|
-
|
|
1654
|
-
- (void)contentSourceAvplayerIsReady {
|
|
1655
|
-
[self notifyDeleteIsReady];
|
|
1656
|
-
}
|
|
1657
|
-
|
|
1658
|
-
- (void)setContentSourceAVPlayerLayerVideoGravity:
|
|
1659
|
-
(AVLayerVideoGravity)videoGravity {
|
|
1660
|
-
self.contentSourcePlayerView.playerLayer.videoGravity = videoGravity;
|
|
1661
|
-
}
|
|
1662
|
-
@end
|
|
1663
|
-
|
|
1664
|
-
// MARK: - implementation VeLivePictureInPictureController (VELPipAVPlayer)
|
|
1665
|
-
@implementation VeLivePictureInPictureController (VELPipAVPlayer)
|
|
1666
|
-
static char kAssociatedObjectKey_pipAVplayerView;
|
|
1667
|
-
- (void)setPlayerView:(VELPipAVPlayerView *)playerView {
|
|
1668
|
-
objc_setAssociatedObject(self, &kAssociatedObjectKey_pipAVplayerView,
|
|
1669
|
-
playerView, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
|
|
1670
|
-
}
|
|
1671
|
-
|
|
1672
|
-
- (VELPipAVPlayerView *)playerView {
|
|
1673
|
-
return (VELPipAVPlayerView *)objc_getAssociatedObject(
|
|
1674
|
-
self, &kAssociatedObjectKey_pipAVplayerView);
|
|
1675
|
-
}
|
|
1676
|
-
|
|
1677
|
-
- (void)initAVPlayerPictureInPicture {
|
|
1678
|
-
VeLivePipLog(LOG_TAG, @"init avplayer pip");
|
|
1679
|
-
if (self.playerView == nil) {
|
|
1680
|
-
[VELPipLoopPlayer initLoopPlayer];
|
|
1681
|
-
self.playerView = [[VELPipAVPlayerView alloc] init];
|
|
1682
|
-
self.playerView.playerLayer.player = VELPipLoopPlayer.player;
|
|
1683
|
-
self.playerView.playerLayer.videoGravity = AVLayerVideoGravityResize;
|
|
1684
|
-
self.pipHolderView = self.playerView;
|
|
1685
|
-
|
|
1686
|
-
self.pipController = [[AVPictureInPictureController alloc]
|
|
1687
|
-
initWithPlayerLayer:self.playerView.playerLayer];
|
|
1688
|
-
self.pipController.delegate = self;
|
|
1689
|
-
}
|
|
1690
|
-
__weak __typeof__(self) weakSelf = self;
|
|
1691
|
-
[VELPipLoopPlayer
|
|
1692
|
-
setStatusBlock:^(AVPlayerStatus status, NSError *_Nullable error) {
|
|
1693
|
-
__strong __typeof__(weakSelf) self = weakSelf;
|
|
1694
|
-
VeLivePipLog(LOG_TAG, @"loop player status changed block %@-%@",
|
|
1695
|
-
@(status), error);
|
|
1696
|
-
if (status == AVPlayerStatusFailed) {
|
|
1697
|
-
[self notifyDeleteWithError:error];
|
|
1698
|
-
} else if (status == AVPlayerStatusReadyToPlay) {
|
|
1699
|
-
[self avplayerIsReady];
|
|
1700
|
-
}
|
|
1701
|
-
}];
|
|
1702
|
-
[VELPipLoopPlayer setVideoSize:self.videoSize];
|
|
1703
|
-
[VELPipLoopPlayer playWithNotifyStatus:YES];
|
|
1704
|
-
}
|
|
1705
|
-
|
|
1706
|
-
- (void)releaseAVPlayerPictureInPicture {
|
|
1707
|
-
if (self.pipController != nil) {
|
|
1708
|
-
[self.playerView removeFromSuperview];
|
|
1709
|
-
self.playerView = nil;
|
|
1710
|
-
|
|
1711
|
-
[self stopPictureInPicture];
|
|
1712
|
-
self.pipController = nil;
|
|
1713
|
-
}
|
|
1714
|
-
}
|
|
1715
|
-
|
|
1716
|
-
- (void)avplayerIsReady {
|
|
1717
|
-
[self notifyDeleteIsReady];
|
|
1718
|
-
}
|
|
1719
|
-
|
|
1720
|
-
- (void)setAVPlayerLayerVideoGravity:(AVLayerVideoGravity)videoGravity {
|
|
1721
|
-
self.playerView.playerLayer.videoGravity = videoGravity;
|
|
1722
|
-
}
|
|
1723
|
-
@end
|
|
1724
|
-
|
|
1725
|
-
@protocol VELAVPictureInPictureProtocol <NSObject>
|
|
1726
|
-
- (void)startPictureInPicture;
|
|
1727
|
-
- (void)stopPictureInPicture;
|
|
1728
|
-
@property(nonatomic, readonly, getter=isPictureInPicturePossible)
|
|
1729
|
-
BOOL pictureInPicturePossible;
|
|
1730
|
-
@property(nonatomic, readonly, getter=isPictureInPictureActive)
|
|
1731
|
-
BOOL pictureInPictureActive;
|
|
1732
|
-
@property(nonatomic, readonly, getter=isPictureInPictureSuspended)
|
|
1733
|
-
BOOL pictureInPictureSuspended;
|
|
1734
|
-
@property(nonatomic, readonly)
|
|
1735
|
-
BOOL canStopPictureInPicture API_AVAILABLE(tvos(14.0))
|
|
1736
|
-
API_UNAVAILABLE(ios, macos, watchos);
|
|
1737
|
-
@end
|
|
1738
|
-
|
|
1739
|
-
// MARK: - implementation VeLivePictureInPictureController
|
|
1740
|
-
// (VELPipAVPlayerViewController)
|
|
1741
|
-
@implementation VeLivePictureInPictureController (VELPipAVPlayerViewController)
|
|
1742
|
-
static char kAssociatedObjectKey_pipAVPlayerViewController;
|
|
1743
|
-
- (void)setPlayerViewController:(AVPlayerViewController *)playerViewController {
|
|
1744
|
-
objc_setAssociatedObject(
|
|
1745
|
-
self, &kAssociatedObjectKey_pipAVPlayerViewController,
|
|
1746
|
-
playerViewController, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
|
|
1747
|
-
}
|
|
1748
|
-
|
|
1749
|
-
- (AVPlayerViewController *)playerViewController {
|
|
1750
|
-
return (AVPlayerViewController *)objc_getAssociatedObject(
|
|
1751
|
-
self, &kAssociatedObjectKey_pipAVPlayerViewController);
|
|
1752
|
-
}
|
|
1753
|
-
|
|
1754
|
-
- (void)initAVPlayerViewControllerPictureInPicture {
|
|
1755
|
-
VeLivePipLog(LOG_TAG, @"init AVPlayerViewController pip");
|
|
1756
|
-
if (self.playerViewController == nil) {
|
|
1757
|
-
[VELPipLoopPlayer initLoopPlayer];
|
|
1758
|
-
self.playerViewController =
|
|
1759
|
-
(AVPlayerViewController<VELAVPictureInPictureProtocol> *)
|
|
1760
|
-
[[AVPlayerViewController alloc] init];
|
|
1761
|
-
self.playerViewController.delegate = self;
|
|
1762
|
-
self.playerViewController.player = VELPipLoopPlayer.player;
|
|
1763
|
-
self.playerViewController.allowsPictureInPicturePlayback = YES;
|
|
1764
|
-
self.playerViewController.videoGravity = AVLayerVideoGravityResize;
|
|
1765
|
-
// self.playerViewController.showsPlaybackControls = NO; // don't set to
|
|
1766
|
-
// NO
|
|
1767
|
-
|
|
1768
|
-
if (@available(iOS 14.0, *)) {
|
|
1769
|
-
self.playerViewController.requiresLinearPlayback = YES;
|
|
1770
|
-
}
|
|
1771
|
-
if (@available(iOS 14.2, *)) {
|
|
1772
|
-
self.playerViewController
|
|
1773
|
-
.canStartPictureInPictureAutomaticallyFromInline =
|
|
1774
|
-
self.canStartPictureInPictureAutomaticallyFromInline;
|
|
1775
|
-
}
|
|
1776
|
-
self.pipHolderView = self.playerViewController.view;
|
|
1777
|
-
}
|
|
1778
|
-
__weak __typeof__(self) weakSelf = self;
|
|
1779
|
-
[VELPipLoopPlayer
|
|
1780
|
-
setStatusBlock:^(AVPlayerStatus status, NSError *_Nullable error) {
|
|
1781
|
-
__strong __typeof__(weakSelf) self = weakSelf;
|
|
1782
|
-
VeLivePipLog(LOG_TAG, @"loop player status changed block %@-%@",
|
|
1783
|
-
@(status), error);
|
|
1784
|
-
if (status == AVPlayerStatusFailed) {
|
|
1785
|
-
[self notifyDeleteWithError:error];
|
|
1786
|
-
} else if (status == AVPlayerStatusReadyToPlay) {
|
|
1787
|
-
[self avPlayerViewControllerIsReady];
|
|
1788
|
-
}
|
|
1789
|
-
}];
|
|
1790
|
-
[VELPipLoopPlayer setVideoSize:self.videoSize];
|
|
1791
|
-
[VELPipLoopPlayer playWithNotifyStatus:YES];
|
|
1792
|
-
}
|
|
1793
|
-
|
|
1794
|
-
- (void)velPlayerVCSetCanStartPictureInPictureAutomaticallyFromInline:
|
|
1795
|
-
(BOOL)canStart {
|
|
1796
|
-
if (@available(iOS 14.2, *)) {
|
|
1797
|
-
self.playerViewController.canStartPictureInPictureAutomaticallyFromInline =
|
|
1798
|
-
canStart;
|
|
1799
|
-
}
|
|
1800
|
-
}
|
|
1801
|
-
|
|
1802
|
-
- (void)releaseAVPlayerViewControllerPictureInPicture {
|
|
1803
|
-
if (self.playerViewController != nil) {
|
|
1804
|
-
[self.playerViewController.view removeFromSuperview];
|
|
1805
|
-
[self velPlayerVCStopPictureInPicture];
|
|
1806
|
-
self.playerViewController = nil;
|
|
1807
|
-
}
|
|
1808
|
-
}
|
|
1809
|
-
|
|
1810
|
-
// {zh} 开启画中画 {en} Open picture-in-picture
|
|
1811
|
-
- (void)velPlayerVCStartPictureInPicture {
|
|
1812
|
-
if (!self.velPlayerVCCanStartPictureInPicture) {
|
|
1813
|
-
VeLivePipLog(LOG_TAG,
|
|
1814
|
-
@"AVPlayerViewController can not start pip state:%@, check "
|
|
1815
|
-
@"app has audio playing?",
|
|
1816
|
-
@(self.state));
|
|
1817
|
-
[self
|
|
1818
|
-
notifyDeleteWithError:
|
|
1819
|
-
VeLivePipError(VeLivePictureInPictureErrorCodeNotPossible,
|
|
1820
|
-
@"can not start pip, check app has audio playing?")];
|
|
1821
|
-
return;
|
|
1822
|
-
}
|
|
1823
|
-
if ([self.playerViewController
|
|
1824
|
-
respondsToSelector:@selector(startPictureInPicture)]) {
|
|
1825
|
-
VeLivePipLog(LOG_TAG, @"AVPlayerViewController start pip");
|
|
1826
|
-
[self.playerViewController startPictureInPicture];
|
|
1827
|
-
} else {
|
|
1828
|
-
[self.pipController startPictureInPicture];
|
|
1829
|
-
}
|
|
1830
|
-
self.state = VeLivePictureInPictureStateRunning;
|
|
1831
|
-
}
|
|
1832
|
-
|
|
1833
|
-
// {zh} 停止画中画 {en} Stop picture-in-picture
|
|
1834
|
-
- (void)velPlayerVCStopPictureInPicture {
|
|
1835
|
-
if (self.state != VeLivePictureInPictureStateRunning) {
|
|
1836
|
-
VeLivePipLog(LOG_TAG, @"AVPlayerViewController not running");
|
|
1837
|
-
return;
|
|
1838
|
-
}
|
|
1839
|
-
if ([self.playerViewController
|
|
1840
|
-
respondsToSelector:@selector(stopPictureInPicture)]) {
|
|
1841
|
-
VeLivePipLog(LOG_TAG, @"AVPlayerViewController stop pip");
|
|
1842
|
-
[self.playerViewController stopPictureInPicture];
|
|
1843
|
-
} else {
|
|
1844
|
-
[self.pipController stopPictureInPicture];
|
|
1845
|
-
}
|
|
1846
|
-
self.state = VeLivePictureInPictureStateStopping;
|
|
1847
|
-
}
|
|
1848
|
-
|
|
1849
|
-
- (BOOL)velPlayerVCCanStartPictureInPicture {
|
|
1850
|
-
if ([self.playerViewController
|
|
1851
|
-
respondsToSelector:@selector(isPictureInPicturePossible)]) {
|
|
1852
|
-
return self.state == VeLivePictureInPictureStateReady &&
|
|
1853
|
-
self.playerViewController.isPictureInPicturePossible;
|
|
1854
|
-
}
|
|
1855
|
-
return [self _canStartPictureInPicture];
|
|
1856
|
-
}
|
|
1857
|
-
|
|
1858
|
-
- (void)avPlayerViewControllerIsReady {
|
|
1859
|
-
[self getAVPlayerViewControllerPipController];
|
|
1860
|
-
if (self.pipController == nil) {
|
|
1861
|
-
return;
|
|
1862
|
-
}
|
|
1863
|
-
[self notifyDeleteIsReady];
|
|
1864
|
-
}
|
|
1865
|
-
|
|
1866
|
-
- (void)getAVPlayerViewControllerPipController {
|
|
1867
|
-
@try {
|
|
1868
|
-
NSString *pipName = [NSString
|
|
1869
|
-
stringWithFormat:@"_%@%@%@", @"pictur", @"eInPict", @"ureController"];
|
|
1870
|
-
id pipController = [self.playerViewController valueForKey:pipName];
|
|
1871
|
-
if (pipController != nil &&
|
|
1872
|
-
[pipController isKindOfClass:[AVPictureInPictureController class]]) {
|
|
1873
|
-
self.pipController = pipController;
|
|
1874
|
-
} else {
|
|
1875
|
-
[self notifyDeleteWithError:
|
|
1876
|
-
VeLivePipError(VeLivePictureInPictureErrorCodeNotPossible,
|
|
1877
|
-
@"can not find PictureInPictureController")];
|
|
1878
|
-
}
|
|
1879
|
-
} @catch (NSException *exception) {
|
|
1880
|
-
[self
|
|
1881
|
-
notifyDeleteWithError:VeLivePipError(
|
|
1882
|
-
VeLivePictureInPictureErrorCodeNotPossible,
|
|
1883
|
-
@"can not find PictureInPictureController")];
|
|
1884
|
-
}
|
|
1885
|
-
}
|
|
1886
|
-
|
|
1887
|
-
- (void)setAVPlayerVCVideoGravity:(AVLayerVideoGravity)videoGravity {
|
|
1888
|
-
self.playerViewController.videoGravity = videoGravity;
|
|
1889
|
-
}
|
|
1890
|
-
@end
|
|
1891
|
-
|
|
1892
|
-
// MARK: - interface VELPipLoopPlayer
|
|
1893
|
-
@interface VELPipLoopPlayer ()
|
|
1894
|
-
@property(nonatomic, strong) VELPipAVPlayer *player;
|
|
1895
|
-
@property(nonatomic, strong) AVPlayerItem *playerItem;
|
|
1896
|
-
@property(nonatomic, assign) CGSize videoSize;
|
|
1897
|
-
@property(nonatomic, assign) AVPlayerStatus status;
|
|
1898
|
-
@property(nonatomic, copy) void (^statusBlock)
|
|
1899
|
-
(AVPlayerStatus status, NSError *_Nullable error);
|
|
1900
|
-
/// 写入本地占位视频
|
|
1901
|
-
@property(nonatomic, copy, readonly) NSString *videoPath;
|
|
1902
|
-
@property(strong, nonatomic) AVAssetWriter *writer;
|
|
1903
|
-
@property(nonatomic, strong) NSRecursiveLock *videoWritterLock;
|
|
1904
|
-
@property(strong, nonatomic) AVAssetWriterInput *videoInput;
|
|
1905
|
-
@property(strong, nonatomic)
|
|
1906
|
-
AVAssetWriterInputPixelBufferAdaptor *writerInputPixelBufferAdaptor;
|
|
1907
|
-
@property(nonatomic, strong) VELPipPixelBufferTool *bufferTool;
|
|
1908
|
-
@end
|
|
1909
|
-
|
|
1910
|
-
// MARK: - implementation VELPipLoopPlayer
|
|
1911
|
-
@implementation VELPipLoopPlayer
|
|
1912
|
-
static VELPipLoopPlayer *vel_pip_loop_player_instance = nil;
|
|
1913
|
-
+ (instancetype)sharePlayer {
|
|
1914
|
-
static dispatch_once_t onceToken;
|
|
1915
|
-
dispatch_once(&onceToken, ^{
|
|
1916
|
-
vel_pip_loop_player_instance = [[VELPipLoopPlayer alloc] init];
|
|
1917
|
-
});
|
|
1918
|
-
return vel_pip_loop_player_instance;
|
|
1919
|
-
}
|
|
1920
|
-
|
|
1921
|
-
- (instancetype)init {
|
|
1922
|
-
if (self = [super init]) {
|
|
1923
|
-
_videoWritterLock = [[NSRecursiveLock alloc] init];
|
|
1924
|
-
_videoSize = CGSizeMake(360, 640);
|
|
1925
|
-
_bufferTool = [[VELPipPixelBufferTool alloc] init];
|
|
1926
|
-
}
|
|
1927
|
-
return self;
|
|
1928
|
-
}
|
|
1929
|
-
|
|
1930
|
-
+ (void)initLoopPlayer {
|
|
1931
|
-
if ([self isInited]) {
|
|
1932
|
-
return;
|
|
1933
|
-
}
|
|
1934
|
-
[[VELPipLoopPlayer sharePlayer] setup];
|
|
1935
|
-
VeLivePipLog(LOG_TAG, @"loop player init");
|
|
1936
|
-
[[VELPipLoopPlayer sharePlayer] playWithNotifyStatus:YES];
|
|
1937
|
-
}
|
|
1938
|
-
|
|
1939
|
-
+ (BOOL)isInited {
|
|
1940
|
-
return vel_pip_loop_player_instance != nil &&
|
|
1941
|
-
vel_pip_loop_player_instance.player != nil;
|
|
1942
|
-
}
|
|
1943
|
-
|
|
1944
|
-
+ (VELPipAVPlayer *)player {
|
|
1945
|
-
if (![self isInited]) {
|
|
1946
|
-
return nil;
|
|
1947
|
-
}
|
|
1948
|
-
return [[VELPipLoopPlayer sharePlayer] player];
|
|
1949
|
-
}
|
|
1950
|
-
|
|
1951
|
-
+ (AVPlayerStatus)status {
|
|
1952
|
-
if (![self isInited]) {
|
|
1953
|
-
return AVPlayerStatusUnknown;
|
|
1954
|
-
}
|
|
1955
|
-
return [[VELPipLoopPlayer sharePlayer] status];
|
|
1956
|
-
}
|
|
1957
|
-
|
|
1958
|
-
+ (void)playWithNotifyStatus:(BOOL)notify {
|
|
1959
|
-
if (![self isInited]) {
|
|
1960
|
-
return;
|
|
1961
|
-
}
|
|
1962
|
-
VeLivePipLog(LOG_TAG, @"loop player play");
|
|
1963
|
-
[[VELPipLoopPlayer sharePlayer] playWithNotifyStatus:notify];
|
|
1964
|
-
}
|
|
1965
|
-
|
|
1966
|
-
+ (void)pause {
|
|
1967
|
-
if (![self isInited]) {
|
|
1968
|
-
return;
|
|
1969
|
-
}
|
|
1970
|
-
VeLivePipLog(LOG_TAG, @"loop player pause");
|
|
1971
|
-
[[VELPipLoopPlayer sharePlayer] pause];
|
|
1972
|
-
}
|
|
1973
|
-
|
|
1974
|
-
+ (void)stop {
|
|
1975
|
-
if (![self isInited]) {
|
|
1976
|
-
return;
|
|
1977
|
-
}
|
|
1978
|
-
VeLivePipLog(LOG_TAG, @"loop player stop");
|
|
1979
|
-
[[VELPipLoopPlayer sharePlayer] stop];
|
|
1980
|
-
}
|
|
1981
|
-
|
|
1982
|
-
+ (void)setVideoSize:(CGSize)videoSize {
|
|
1983
|
-
if (![self isInited]) {
|
|
1984
|
-
return;
|
|
1985
|
-
}
|
|
1986
|
-
[[VELPipLoopPlayer sharePlayer] setVideoSize:videoSize];
|
|
1987
|
-
}
|
|
1988
|
-
|
|
1989
|
-
+ (CGSize)videoSize {
|
|
1990
|
-
if (![self isInited]) {
|
|
1991
|
-
return CGSizeZero;
|
|
1992
|
-
}
|
|
1993
|
-
return [[VELPipLoopPlayer sharePlayer] videoSize];
|
|
1994
|
-
}
|
|
1995
|
-
|
|
1996
|
-
+ (void)setStatusBlock:(void (^)(AVPlayerStatus,
|
|
1997
|
-
NSError *_Nullable))statusBlock {
|
|
1998
|
-
if (![self isInited]) {
|
|
1999
|
-
return;
|
|
2000
|
-
}
|
|
2001
|
-
VeLivePipLog(LOG_TAG, @"loop player set status block");
|
|
2002
|
-
[[VELPipLoopPlayer sharePlayer] setStatusBlock:statusBlock];
|
|
2003
|
-
}
|
|
2004
|
-
|
|
2005
|
-
+ (void (^)(AVPlayerStatus, NSError *_Nullable))statusBlock {
|
|
2006
|
-
if (![self isInited]) {
|
|
2007
|
-
return nil;
|
|
2008
|
-
}
|
|
2009
|
-
return [[VELPipLoopPlayer sharePlayer] statusBlock];
|
|
2010
|
-
}
|
|
2011
|
-
|
|
2012
|
-
// MARK: VELLoopPlayer Private
|
|
2013
|
-
- (void)setVideoSize:(CGSize)videoSize {
|
|
2014
|
-
if (!CGSizeEqualToSize(_videoSize, videoSize)) {
|
|
2015
|
-
VeLivePipLog(LOG_TAG, @"loop player set videoSize from %@ to %@",
|
|
2016
|
-
NSStringFromCGSize(_videoSize), NSStringFromCGSize(videoSize));
|
|
2017
|
-
_videoSize = videoSize;
|
|
2018
|
-
[self resetPlayerItem];
|
|
2019
|
-
}
|
|
2020
|
-
}
|
|
2021
|
-
|
|
2022
|
-
- (void)playWithNotifyStatus:(BOOL)notify {
|
|
2023
|
-
CGFloat delayTime = 0.5;
|
|
2024
|
-
NSError *error = nil;
|
|
2025
|
-
if (self.status != AVPlayerStatusReadyToPlay) {
|
|
2026
|
-
VeLivePipLog(LOG_TAG, @"loop player not ready");
|
|
2027
|
-
error = VeLivePipError(-1, @"loop player not ready");
|
|
2028
|
-
} else if (self.player.rate <= 0) {
|
|
2029
|
-
VeLivePipLog(LOG_TAG, @"loop player is ready, play now");
|
|
2030
|
-
[self.player play];
|
|
2031
|
-
} else {
|
|
2032
|
-
delayTime = 0.3;
|
|
2033
|
-
}
|
|
2034
|
-
if (notify) {
|
|
2035
|
-
dispatch_after(
|
|
2036
|
-
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayTime * NSEC_PER_SEC)),
|
|
2037
|
-
dispatch_get_main_queue(), ^{
|
|
2038
|
-
VeLivePipLog(LOG_TAG, @"loop player call status changed %@",
|
|
2039
|
-
@(self.status));
|
|
2040
|
-
if (self.statusBlock) {
|
|
2041
|
-
self.statusBlock(self.status, error);
|
|
2042
|
-
}
|
|
2043
|
-
});
|
|
2044
|
-
}
|
|
2045
|
-
}
|
|
2046
|
-
|
|
2047
|
-
- (void)pause {
|
|
2048
|
-
VeLivePipLog(LOG_TAG, @"loop player pause");
|
|
2049
|
-
[self.player velPause];
|
|
2050
|
-
}
|
|
2051
|
-
|
|
2052
|
-
- (void)stop {
|
|
2053
|
-
if (self.player == nil) {
|
|
2054
|
-
return;
|
|
2055
|
-
}
|
|
2056
|
-
VeLivePipLog(LOG_TAG, @"loop player stop");
|
|
2057
|
-
[self pause];
|
|
2058
|
-
[self removeSystemNotifactionObserver];
|
|
2059
|
-
self.status = AVPlayerStatusUnknown;
|
|
2060
|
-
self.playerItem = nil;
|
|
2061
|
-
[self.player replaceCurrentItemWithPlayerItem:nil];
|
|
2062
|
-
self.player = nil;
|
|
2063
|
-
}
|
|
2064
|
-
|
|
2065
|
-
- (void)setup {
|
|
2066
|
-
if (self.player != nil) {
|
|
2067
|
-
return;
|
|
2068
|
-
}
|
|
2069
|
-
VeLivePipLog(LOG_TAG, @"loop player setup");
|
|
2070
|
-
self.player = [[VELPipAVPlayer alloc] init];
|
|
2071
|
-
// same to rate
|
|
2072
|
-
[self.player
|
|
2073
|
-
addPeriodicTimeObserverForInterval:CMTimeMake(1, 1000000000)
|
|
2074
|
-
queue:dispatch_get_main_queue()
|
|
2075
|
-
usingBlock:^(CMTime time){
|
|
2076
|
-
// VeLivePipLog(LOG_TAG, @"loop player
|
|
2077
|
-
// time %@", @(time.value));
|
|
2078
|
-
}];
|
|
2079
|
-
[self resetPlayerItem];
|
|
2080
|
-
[self addSystemNotifactionObserver];
|
|
2081
|
-
}
|
|
2082
|
-
|
|
2083
|
-
- (void)resetPlayerItem {
|
|
2084
|
-
VeLivePipLog(LOG_TAG, @"loop player reset player item");
|
|
2085
|
-
self.status = AVPlayerStatusUnknown;
|
|
2086
|
-
self.playerItem = nil;
|
|
2087
|
-
[self preparePlayerItem];
|
|
2088
|
-
}
|
|
2089
|
-
|
|
2090
|
-
- (void)preparePlayerItem {
|
|
2091
|
-
if (self.playerItem == nil) {
|
|
2092
|
-
VeLivePipLog(LOG_TAG, @"loop player prepare player item");
|
|
2093
|
-
[self getOrCreateDarkVideo:^(NSString *_Nullable videoPath,
|
|
2094
|
-
NSError *_Nullable error) {
|
|
2095
|
-
if (error != nil) {
|
|
2096
|
-
VeLivePipLog(LOG_TAG, @"loop player prepare player item error:%@",
|
|
2097
|
-
error);
|
|
2098
|
-
if (self.statusBlock) {
|
|
2099
|
-
self.statusBlock(self.status, error);
|
|
2100
|
-
}
|
|
2101
|
-
} else {
|
|
2102
|
-
VeLivePipLog(LOG_TAG,
|
|
2103
|
-
@"loop player prepare player item complete with path:%@",
|
|
2104
|
-
videoPath);
|
|
2105
|
-
AVAsset *asset =
|
|
2106
|
-
[AVAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
|
|
2107
|
-
self.playerItem = [[AVPlayerItem alloc] initWithAsset:asset];
|
|
2108
|
-
}
|
|
2109
|
-
}];
|
|
2110
|
-
}
|
|
2111
|
-
}
|
|
2112
|
-
|
|
2113
|
-
- (void)setPlayerItem:(AVPlayerItem *)playerItem {
|
|
2114
|
-
self.status = AVPlayerStatusUnknown;
|
|
2115
|
-
[self removePlayerItemObserver:_playerItem];
|
|
2116
|
-
_playerItem = playerItem;
|
|
2117
|
-
[self addPlayerItemObserver:_playerItem];
|
|
2118
|
-
[self.player replaceCurrentItemWithPlayerItem:playerItem];
|
|
2119
|
-
[self.player play];
|
|
2120
|
-
}
|
|
2121
|
-
|
|
2122
|
-
- (void)playerItemIsReadyToPlay {
|
|
2123
|
-
VeLivePipLog(LOG_TAG, @"loop player player item is ready");
|
|
2124
|
-
self.status = AVPlayerStatusReadyToPlay;
|
|
2125
|
-
if (self.statusBlock) {
|
|
2126
|
-
self.statusBlock(self.status, nil);
|
|
2127
|
-
}
|
|
2128
|
-
}
|
|
2129
|
-
|
|
2130
|
-
- (void)playerItemFailedToPlay {
|
|
2131
|
-
VeLivePipLog(LOG_TAG, @"loop player player item is failed");
|
|
2132
|
-
self.status = AVPlayerStatusFailed;
|
|
2133
|
-
if (self.statusBlock) {
|
|
2134
|
-
self.statusBlock(
|
|
2135
|
-
self.status,
|
|
2136
|
-
VeLivePipError(VeLivePictureInPictureErrorCodePlayTemplateFileFailed,
|
|
2137
|
-
@"play template file failed"));
|
|
2138
|
-
}
|
|
2139
|
-
}
|
|
2140
|
-
|
|
2141
|
-
- (void)observeValueForKeyPath:(NSString *)keyPath
|
|
2142
|
-
ofObject:(id)object
|
|
2143
|
-
change:(NSDictionary<NSKeyValueChangeKey, id> *)change
|
|
2144
|
-
context:(void *)context {
|
|
2145
|
-
if (object == self.playerItem) {
|
|
2146
|
-
AVPlayerStatus status = [change[NSKeyValueChangeNewKey] integerValue];
|
|
2147
|
-
VeLivePipLog(LOG_TAG, @"loop player item status changed %d", (int)status);
|
|
2148
|
-
if (status == AVPlayerItemStatusFailed) {
|
|
2149
|
-
[self playerItemFailedToPlay];
|
|
2150
|
-
} else if (status == AVPlayerItemStatusReadyToPlay) {
|
|
2151
|
-
dispatch_after(
|
|
2152
|
-
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1 * NSEC_PER_SEC)),
|
|
2153
|
-
dispatch_get_main_queue(), ^{
|
|
2154
|
-
[self playerItemIsReadyToPlay];
|
|
2155
|
-
});
|
|
2156
|
-
}
|
|
2157
|
-
}
|
|
2158
|
-
}
|
|
2159
|
-
|
|
2160
|
-
- (void)removePlayerItemObserver:(AVPlayerItem *)playerItem {
|
|
2161
|
-
if (playerItem == nil) {
|
|
2162
|
-
return;
|
|
2163
|
-
}
|
|
2164
|
-
VeLivePipLog(LOG_TAG, @"loop player remove player item observer");
|
|
2165
|
-
[playerItem removeObserver:self
|
|
2166
|
-
forKeyPath:NSStringFromSelector(@selector(status))];
|
|
2167
|
-
[[NSNotificationCenter defaultCenter]
|
|
2168
|
-
removeObserver:self
|
|
2169
|
-
name:AVPlayerItemDidPlayToEndTimeNotification
|
|
2170
|
-
object:playerItem];
|
|
2171
|
-
}
|
|
2172
|
-
|
|
2173
|
-
- (void)addPlayerItemObserver:(AVPlayerItem *)playerItem {
|
|
2174
|
-
if (playerItem == nil) {
|
|
2175
|
-
return;
|
|
2176
|
-
}
|
|
2177
|
-
VeLivePipLog(LOG_TAG, @"loop player add player item observer");
|
|
2178
|
-
[playerItem addObserver:self
|
|
2179
|
-
forKeyPath:NSStringFromSelector(@selector(status))
|
|
2180
|
-
options:(NSKeyValueObservingOptionNew |
|
|
2181
|
-
NSKeyValueObservingOptionInitial)
|
|
2182
|
-
context:nil];
|
|
2183
|
-
[[NSNotificationCenter defaultCenter]
|
|
2184
|
-
addObserver:self
|
|
2185
|
-
selector:@selector(loopPlayerDidEndPlayNotifaction:)
|
|
2186
|
-
name:AVPlayerItemDidPlayToEndTimeNotification
|
|
2187
|
-
object:self.playerItem];
|
|
2188
|
-
}
|
|
2189
|
-
|
|
2190
|
-
- (void)addSystemNotifactionObserver {
|
|
2191
|
-
VeLivePipLog(LOG_TAG, @"loop player add system notifaction");
|
|
2192
|
-
[[NSNotificationCenter defaultCenter]
|
|
2193
|
-
addObserver:self
|
|
2194
|
-
selector:@selector(applicationDidBecomeActiveNotifaction)
|
|
2195
|
-
name:UIApplicationDidBecomeActiveNotification
|
|
2196
|
-
object:nil];
|
|
2197
|
-
|
|
2198
|
-
[[NSNotificationCenter defaultCenter]
|
|
2199
|
-
addObserver:self
|
|
2200
|
-
selector:@selector(audioInterruptNotifaction:)
|
|
2201
|
-
name:AVAudioSessionInterruptionNotification
|
|
2202
|
-
object:nil];
|
|
2203
|
-
}
|
|
2204
|
-
|
|
2205
|
-
- (void)removeSystemNotifactionObserver {
|
|
2206
|
-
VeLivePipLog(LOG_TAG, @"loop player remove system notifaction");
|
|
2207
|
-
[[NSNotificationCenter defaultCenter]
|
|
2208
|
-
removeObserver:self
|
|
2209
|
-
name:UIApplicationDidBecomeActiveNotification
|
|
2210
|
-
object:nil];
|
|
2211
|
-
[[NSNotificationCenter defaultCenter]
|
|
2212
|
-
removeObserver:self
|
|
2213
|
-
name:AVAudioSessionInterruptionNotification
|
|
2214
|
-
object:nil];
|
|
2215
|
-
}
|
|
2216
|
-
|
|
2217
|
-
- (void)loopPlayerDidEndPlayNotifaction:(NSNotification *)notification {
|
|
2218
|
-
if (notification.object == self.playerItem) {
|
|
2219
|
-
VeLivePipLog(LOG_TAG, @"loop player play to end, will seek to zero");
|
|
2220
|
-
if (@available(iOS 11.0, *)) {
|
|
2221
|
-
[self.playerItem seekToTime:kCMTimeZero
|
|
2222
|
-
completionHandler:^(BOOL finished){
|
|
2223
|
-
}];
|
|
2224
|
-
} else {
|
|
2225
|
-
#pragma clang diagnostic push
|
|
2226
|
-
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
|
|
2227
|
-
[self.playerItem seekToTime:kCMTimeZero];
|
|
2228
|
-
#pragma clang diagnostic pop
|
|
2229
|
-
}
|
|
2230
|
-
}
|
|
2231
|
-
}
|
|
2232
|
-
|
|
2233
|
-
- (void)applicationDidBecomeActiveNotifaction {
|
|
2234
|
-
VeLivePipLog(LOG_TAG, @"loop player play when active");
|
|
2235
|
-
if (self.status == AVPlayerStatusReadyToPlay) {
|
|
2236
|
-
[self.player play];
|
|
2237
|
-
}
|
|
2238
|
-
}
|
|
2239
|
-
|
|
2240
|
-
- (void)audioInterruptNotifaction:(NSNotification *)notifaction {
|
|
2241
|
-
if (@available(iOS 10.3, *)) {
|
|
2242
|
-
NSNumber *suspended = [[notifaction userInfo]
|
|
2243
|
-
objectForKey:AVAudioSessionInterruptionWasSuspendedKey];
|
|
2244
|
-
if ([suspended boolValue]) {
|
|
2245
|
-
return;
|
|
2246
|
-
}
|
|
2247
|
-
}
|
|
2248
|
-
AVAudioSessionInterruptionType interruptionType =
|
|
2249
|
-
(AVAudioSessionInterruptionType)[[[notifaction userInfo]
|
|
2250
|
-
objectForKey:AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];
|
|
2251
|
-
if (AVAudioSessionInterruptionTypeEnded == interruptionType) {
|
|
2252
|
-
VeLivePipLog(LOG_TAG, @"loop player play when audio interrupt end");
|
|
2253
|
-
[self.player play];
|
|
2254
|
-
}
|
|
2255
|
-
}
|
|
2256
|
-
|
|
2257
|
-
- (void)getLocalDarkVideo:(void (^)(NSString *_Nullable videoPath,
|
|
2258
|
-
NSError *_Nullable error))completion {
|
|
2259
|
-
/// 不能创建,使用模板视频
|
|
2260
|
-
VeLivePipLog(LOG_TAG, @"loop player prepare player item use local bundle");
|
|
2261
|
-
BOOL landspace = self.videoSize.width > self.videoSize.height;
|
|
2262
|
-
NSString *fileName =
|
|
2263
|
-
landspace ? @"pipmode_template_landspace" : @"pipmode_template";
|
|
2264
|
-
fileName =
|
|
2265
|
-
[@"VELLiveDemo_Pull.bundle" stringByAppendingPathComponent:fileName];
|
|
2266
|
-
NSString *videoPath =
|
|
2267
|
-
[[NSBundle bundleForClass:self.class] pathForResource:fileName
|
|
2268
|
-
ofType:@"mp4"];
|
|
2269
|
-
if (completion) {
|
|
2270
|
-
completion(videoPath,
|
|
2271
|
-
videoPath == nil ? VeLivePipError(-1, @"video not found") : nil);
|
|
2272
|
-
}
|
|
2273
|
-
}
|
|
2274
|
-
|
|
2275
|
-
// MARK: - Dark Video Create
|
|
2276
|
-
- (void)getOrCreateDarkVideo:(void (^)(NSString *_Nullable videoPath,
|
|
2277
|
-
NSError *_Nullable error))completion {
|
|
2278
|
-
/// 清理上次未写完的的 buffer
|
|
2279
|
-
[self cancelAssetWritterWithCleanFile:YES];
|
|
2280
|
-
|
|
2281
|
-
if (CGSizeEqualToSize(self.videoSize, CGSizeZero)) {
|
|
2282
|
-
[self getLocalDarkVideo:completion];
|
|
2283
|
-
return;
|
|
2284
|
-
}
|
|
2285
|
-
if (![self.videoWritterLock tryLock]) {
|
|
2286
|
-
return;
|
|
2287
|
-
}
|
|
2288
|
-
NSString *videoPath = [self videoPath];
|
|
2289
|
-
if ([NSFileManager.defaultManager fileExistsAtPath:videoPath]) {
|
|
2290
|
-
if ([NSFileManager.defaultManager attributesOfItemAtPath:videoPath
|
|
2291
|
-
error:nil]
|
|
2292
|
-
.fileSize > 500) {
|
|
2293
|
-
if (completion) {
|
|
2294
|
-
completion(videoPath, nil);
|
|
2295
|
-
}
|
|
2296
|
-
[self.videoWritterLock unlock];
|
|
2297
|
-
return;
|
|
2298
|
-
}
|
|
2299
|
-
[NSFileManager.defaultManager removeItemAtPath:videoPath error:nil];
|
|
2300
|
-
}
|
|
2301
|
-
|
|
2302
|
-
if ([self setupAssetWriter:videoPath] && [self setupVideoInput]) {
|
|
2303
|
-
VeLivePipLog(LOG_TAG,
|
|
2304
|
-
@"loop player prepare player item use asset writer %@",
|
|
2305
|
-
videoPath);
|
|
2306
|
-
@try {
|
|
2307
|
-
__weak __typeof__(self) weakSelf = self;
|
|
2308
|
-
int videoSeconds = 3;
|
|
2309
|
-
int32_t videoFps = 1;
|
|
2310
|
-
CMTime startSampleTime = kCMTimeZero;
|
|
2311
|
-
__block CMTime endSampleTime = startSampleTime;
|
|
2312
|
-
__block int videoBufferIndex = 0;
|
|
2313
|
-
int32_t videoBufferCount = videoSeconds * videoFps;
|
|
2314
|
-
CGSize videoSize = self.videoSize;
|
|
2315
|
-
[self.writer startWriting];
|
|
2316
|
-
[self.writer startSessionAtSourceTime:startSampleTime];
|
|
2317
|
-
[self.videoInput
|
|
2318
|
-
requestMediaDataWhenReadyOnQueue:dispatch_queue_create(
|
|
2319
|
-
"com.pip.video.queue", NULL)
|
|
2320
|
-
usingBlock:^{
|
|
2321
|
-
__strong __typeof__(weakSelf) self = weakSelf;
|
|
2322
|
-
|
|
2323
|
-
if ([self.videoWritterLock tryLock]) {
|
|
2324
|
-
@try {
|
|
2325
|
-
while ([self.videoInput
|
|
2326
|
-
isReadyForMoreMediaData]) {
|
|
2327
|
-
videoBufferIndex = videoBufferIndex + 1;
|
|
2328
|
-
endSampleTime = CMTimeAdd(
|
|
2329
|
-
startSampleTime,
|
|
2330
|
-
CMTimeMake(videoBufferIndex,
|
|
2331
|
-
videoFps));
|
|
2332
|
-
if (videoBufferIndex <=
|
|
2333
|
-
videoBufferCount) {
|
|
2334
|
-
CVPixelBufferRef darkBuffer =
|
|
2335
|
-
[self.bufferTool
|
|
2336
|
-
createDarkFrameWithFrameSize:
|
|
2337
|
-
videoSize];
|
|
2338
|
-
[self.writerInputPixelBufferAdaptor
|
|
2339
|
-
appendPixelBuffer:darkBuffer
|
|
2340
|
-
withPresentationTime:
|
|
2341
|
-
endSampleTime];
|
|
2342
|
-
CVPixelBufferRelease(darkBuffer);
|
|
2343
|
-
VeLivePipLog(
|
|
2344
|
-
LOG_TAG,
|
|
2345
|
-
@"write buffer at index %d-%f",
|
|
2346
|
-
videoBufferIndex,
|
|
2347
|
-
CMTimeGetSeconds(endSampleTime));
|
|
2348
|
-
} else {
|
|
2349
|
-
VeLivePipLog(
|
|
2350
|
-
LOG_TAG,
|
|
2351
|
-
@"write buffer complete index %d "
|
|
2352
|
-
@"- %f",
|
|
2353
|
-
videoBufferIndex,
|
|
2354
|
-
CMTimeGetSeconds(endSampleTime));
|
|
2355
|
-
[self.videoInput markAsFinished];
|
|
2356
|
-
[self.writer endSessionAtSourceTime:
|
|
2357
|
-
endSampleTime];
|
|
2358
|
-
[self.writer
|
|
2359
|
-
finishWritingWithCompletionHandler:^{
|
|
2360
|
-
__strong __typeof__(weakSelf)
|
|
2361
|
-
self = weakSelf;
|
|
2362
|
-
if ([self.videoWritterLock
|
|
2363
|
-
tryLock]) {
|
|
2364
|
-
if (self.writer.status ==
|
|
2365
|
-
AVAssetWriterStatusCompleted) {
|
|
2366
|
-
VeLivePipLog(
|
|
2367
|
-
LOG_TAG,
|
|
2368
|
-
@"avasset writter end "
|
|
2369
|
-
@"completed");
|
|
2370
|
-
vel_sync_in_main_queue(^{
|
|
2371
|
-
CGFloat fileSize =
|
|
2372
|
-
[NSFileManager
|
|
2373
|
-
.defaultManager
|
|
2374
|
-
attributesOfItemAtPath:
|
|
2375
|
-
videoPath
|
|
2376
|
-
error:
|
|
2377
|
-
nil]
|
|
2378
|
-
.fileSize;
|
|
2379
|
-
if (fileSize > 20) {
|
|
2380
|
-
if (completion) {
|
|
2381
|
-
completion(videoPath,
|
|
2382
|
-
nil);
|
|
2383
|
-
}
|
|
2384
|
-
[self cleanupWritter];
|
|
2385
|
-
} else {
|
|
2386
|
-
[self cleanupWritter];
|
|
2387
|
-
[self getLocalDarkVideo:
|
|
2388
|
-
completion];
|
|
2389
|
-
}
|
|
2390
|
-
});
|
|
2391
|
-
} else {
|
|
2392
|
-
VeLivePipLog(
|
|
2393
|
-
LOG_TAG,
|
|
2394
|
-
@"avasset writter end "
|
|
2395
|
-
@"error %@-%@",
|
|
2396
|
-
@(self.writer.status),
|
|
2397
|
-
self.writer.error);
|
|
2398
|
-
[NSFileManager
|
|
2399
|
-
.defaultManager
|
|
2400
|
-
removeItemAtPath:
|
|
2401
|
-
videoPath
|
|
2402
|
-
error:nil];
|
|
2403
|
-
vel_sync_in_main_queue(^{
|
|
2404
|
-
[self cleanupWritter];
|
|
2405
|
-
[self getLocalDarkVideo:
|
|
2406
|
-
completion];
|
|
2407
|
-
});
|
|
2408
|
-
}
|
|
2409
|
-
[self.videoWritterLock
|
|
2410
|
-
unlock];
|
|
2411
|
-
}
|
|
2412
|
-
}];
|
|
2413
|
-
}
|
|
2414
|
-
}
|
|
2415
|
-
} @catch (NSException *exception) {
|
|
2416
|
-
vel_sync_in_main_queue(^{
|
|
2417
|
-
[self cleanupWritter];
|
|
2418
|
-
[self getLocalDarkVideo:completion];
|
|
2419
|
-
});
|
|
2420
|
-
}
|
|
2421
|
-
[self.videoWritterLock unlock];
|
|
2422
|
-
}
|
|
2423
|
-
}];
|
|
2424
|
-
} @catch (NSException *exception) {
|
|
2425
|
-
[self cancelAssetWritterWithCleanFile:YES];
|
|
2426
|
-
[self getLocalDarkVideo:completion];
|
|
2427
|
-
}
|
|
2428
|
-
} else {
|
|
2429
|
-
[self cleanupWritter];
|
|
2430
|
-
[self getLocalDarkVideo:completion];
|
|
2431
|
-
}
|
|
2432
|
-
[self.videoWritterLock unlock];
|
|
2433
|
-
}
|
|
2434
|
-
|
|
2435
|
-
- (void)cancelAssetWritterWithCleanFile:(BOOL)cleanFile {
|
|
2436
|
-
if ([self.videoWritterLock tryLock]) {
|
|
2437
|
-
@try {
|
|
2438
|
-
if (self.videoInput != nil) {
|
|
2439
|
-
[self.videoInput markAsFinished];
|
|
2440
|
-
}
|
|
2441
|
-
if (self.writer != nil) {
|
|
2442
|
-
[self.writer
|
|
2443
|
-
endSessionAtSourceTime:CMTimeMakeWithSeconds(CACurrentMediaTime(),
|
|
2444
|
-
1000000000)];
|
|
2445
|
-
[self.writer cancelWriting];
|
|
2446
|
-
VeLivePipLog(LOG_TAG, @"cancel avasset writter %@",
|
|
2447
|
-
self.writer.outputURL);
|
|
2448
|
-
if (cleanFile) {
|
|
2449
|
-
NSURL *outputUrl = self.writer.outputURL;
|
|
2450
|
-
if (outputUrl != nil &&
|
|
2451
|
-
[NSFileManager.defaultManager
|
|
2452
|
-
fileExistsAtPath:outputUrl.absoluteString]) {
|
|
2453
|
-
[NSFileManager.defaultManager removeItemAtURL:outputUrl error:nil];
|
|
2454
|
-
}
|
|
2455
|
-
}
|
|
2456
|
-
}
|
|
2457
|
-
[self cleanupWritter];
|
|
2458
|
-
} @catch (NSException *exception) {
|
|
2459
|
-
VeLivePipLog(LOG_TAG, @"cancel avasset writter exception %@", exception);
|
|
2460
|
-
}
|
|
2461
|
-
[self.videoWritterLock unlock];
|
|
2462
|
-
}
|
|
2463
|
-
}
|
|
2464
|
-
|
|
2465
|
-
- (void)cleanupWritter {
|
|
2466
|
-
if ([self.videoWritterLock tryLock]) {
|
|
2467
|
-
self.videoInput = nil;
|
|
2468
|
-
self.writerInputPixelBufferAdaptor = nil;
|
|
2469
|
-
self.writer = nil;
|
|
2470
|
-
[self.videoWritterLock unlock];
|
|
2471
|
-
}
|
|
2472
|
-
}
|
|
2473
|
-
|
|
2474
|
-
- (BOOL)setupAssetWriter:(NSString *)videoPath {
|
|
2475
|
-
if ([self.videoWritterLock tryLock]) {
|
|
2476
|
-
NSError *error = nil;
|
|
2477
|
-
self.writer =
|
|
2478
|
-
[[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoPath]
|
|
2479
|
-
fileType:AVFileTypeQuickTimeMovie
|
|
2480
|
-
error:&error];
|
|
2481
|
-
if (error != nil) {
|
|
2482
|
-
[self.videoWritterLock unlock];
|
|
2483
|
-
return NO;
|
|
2484
|
-
}
|
|
2485
|
-
[self.videoWritterLock unlock];
|
|
2486
|
-
return YES;
|
|
2487
|
-
}
|
|
2488
|
-
return NO;
|
|
2489
|
-
}
|
|
2490
|
-
|
|
2491
|
-
- (BOOL)setupVideoInput {
|
|
2492
|
-
if ([self.videoWritterLock tryLock]) {
|
|
2493
|
-
NSDictionary *videoCompression = @{
|
|
2494
|
-
AVVideoAverageBitRateKey : @(5 * 1000 * 1000),
|
|
2495
|
-
AVVideoProfileLevelKey : AVVideoProfileLevelH264HighAutoLevel,
|
|
2496
|
-
AVVideoAllowFrameReorderingKey : [NSNumber numberWithBool:YES],
|
|
2497
|
-
AVVideoMaxKeyFrameIntervalDurationKey : [NSNumber numberWithFloat:4.0],
|
|
2498
|
-
AVVideoH264EntropyModeKey : AVVideoH264EntropyModeCABAC
|
|
2499
|
-
};
|
|
2500
|
-
|
|
2501
|
-
NSMutableDictionary *videoSettings =
|
|
2502
|
-
[NSMutableDictionary dictionaryWithDictionary:@{
|
|
2503
|
-
AVVideoWidthKey : [NSNumber numberWithInt:self.videoSize.width],
|
|
2504
|
-
AVVideoHeightKey : [NSNumber numberWithInt:self.videoSize.height],
|
|
2505
|
-
AVVideoCompressionPropertiesKey : videoCompression
|
|
2506
|
-
}];
|
|
2507
|
-
if (@available(iOS 11.0, *)) {
|
|
2508
|
-
videoSettings[AVVideoCodecKey] = AVVideoCodecTypeH264;
|
|
2509
|
-
} else {
|
|
2510
|
-
#pragma clang diagnostic push
|
|
2511
|
-
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
|
|
2512
|
-
videoSettings[AVVideoCodecKey] = AVVideoCodecH264;
|
|
2513
|
-
#pragma clang diagnostic pop
|
|
2514
|
-
}
|
|
2515
|
-
self.videoInput =
|
|
2516
|
-
[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
|
|
2517
|
-
outputSettings:videoSettings];
|
|
2518
|
-
self.videoInput.expectsMediaDataInRealTime = YES;
|
|
2519
|
-
NSDictionary *bufferAttributes = @{
|
|
2520
|
-
(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
|
|
2521
|
-
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES,
|
|
2522
|
-
(id)kCVPixelBufferWidthKey : @(self.videoSize.width),
|
|
2523
|
-
(id)kCVPixelBufferHeightKey : @(self.videoSize.height),
|
|
2524
|
-
(id)kCVPixelBufferBytesPerRowAlignmentKey : @(self.videoSize.width * 4)
|
|
2525
|
-
};
|
|
2526
|
-
self.writerInputPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor
|
|
2527
|
-
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.videoInput
|
|
2528
|
-
sourcePixelBufferAttributes:
|
|
2529
|
-
bufferAttributes];
|
|
2530
|
-
|
|
2531
|
-
if ([self.writer canAddInput:self.videoInput]) {
|
|
2532
|
-
[self.writer addInput:self.videoInput];
|
|
2533
|
-
[self.videoWritterLock unlock];
|
|
2534
|
-
return YES;
|
|
2535
|
-
} else {
|
|
2536
|
-
[self.videoWritterLock unlock];
|
|
2537
|
-
return NO;
|
|
2538
|
-
}
|
|
2539
|
-
}
|
|
2540
|
-
return NO;
|
|
2541
|
-
}
|
|
2542
|
-
|
|
2543
|
-
- (NSString *)videoPath {
|
|
2544
|
-
NSString *rootPath = NSSearchPathForDirectoriesInDomains(
|
|
2545
|
-
NSDocumentDirectory, NSUserDomainMask, YES)
|
|
2546
|
-
.lastObject;
|
|
2547
|
-
rootPath = [rootPath stringByAppendingPathComponent:@"vel_pip/dark_video"];
|
|
2548
|
-
BOOL isDir = NO;
|
|
2549
|
-
if (![NSFileManager.defaultManager fileExistsAtPath:rootPath
|
|
2550
|
-
isDirectory:&isDir] ||
|
|
2551
|
-
!isDir) {
|
|
2552
|
-
[NSFileManager.defaultManager createDirectoryAtPath:rootPath
|
|
2553
|
-
withIntermediateDirectories:YES
|
|
2554
|
-
attributes:nil
|
|
2555
|
-
error:nil];
|
|
2556
|
-
}
|
|
2557
|
-
rootPath =
|
|
2558
|
-
[rootPath stringByAppendingPathComponent:
|
|
2559
|
-
[NSString stringWithFormat:@"dark_video_%d_%d.mp4",
|
|
2560
|
-
(int)self.videoSize.width,
|
|
2561
|
-
(int)self.videoSize.height]];
|
|
2562
|
-
return rootPath;
|
|
2563
|
-
}
|
|
2564
|
-
@end
|
|
2565
|
-
|
|
2566
|
-
// MARK: - implementation VELPipAVPlayer
|
|
2567
|
-
@implementation VELPipAVPlayer
|
|
2568
|
-
- (instancetype)init {
|
|
2569
|
-
if (self = [super init]) {
|
|
2570
|
-
self.allowsExternalPlayback = NO;
|
|
2571
|
-
self.usesExternalPlaybackWhileExternalScreenIsActive = NO;
|
|
2572
|
-
}
|
|
2573
|
-
return self;
|
|
2574
|
-
}
|
|
2575
|
-
|
|
2576
|
-
- (void)setRate:(float)rate {
|
|
2577
|
-
[self setMuted:YES];
|
|
2578
|
-
VeLivePipLog(LOG_TAG, @"loop player set rate %f", rate);
|
|
2579
|
-
if (rate != 0) {
|
|
2580
|
-
[super setRate:0.000000001];
|
|
2581
|
-
} else {
|
|
2582
|
-
[super setRate:0];
|
|
2583
|
-
}
|
|
2584
|
-
}
|
|
2585
|
-
|
|
2586
|
-
- (void)velPause {
|
|
2587
|
-
[super setRate:0];
|
|
2588
|
-
}
|
|
2589
|
-
@end
|
|
2590
|
-
|
|
2591
|
-
// MARK: - implementation VELPipAVPlayerView
|
|
2592
|
-
@implementation VELPipAVPlayerView
|
|
2593
|
-
- (instancetype)initWithFrame:(CGRect)frame {
|
|
2594
|
-
if (self = [super initWithFrame:frame]) {
|
|
2595
|
-
self.backgroundColor = UIColor.blackColor;
|
|
2596
|
-
}
|
|
2597
|
-
return self;
|
|
2598
|
-
}
|
|
2599
|
-
+ (Class)layerClass {
|
|
2600
|
-
return AVPlayerLayer.class;
|
|
2601
|
-
}
|
|
2602
|
-
|
|
2603
|
-
- (AVPlayerLayer *)playerLayer {
|
|
2604
|
-
return (AVPlayerLayer *)self.layer;
|
|
2605
|
-
}
|
|
2606
|
-
@end
|
|
2607
|
-
|
|
2608
|
-
// MARK: - implementation UIView (VELPipAutoSupport)
|
|
2609
|
-
@implementation UIView (VELPipAutoSupport)
|
|
2610
|
-
+ (void)load {
|
|
2611
|
-
static dispatch_once_t onceToken;
|
|
2612
|
-
dispatch_once(&onceToken, ^{
|
|
2613
|
-
Class class = [self class];
|
|
2614
|
-
SEL originSelector = @selector(addSubview:);
|
|
2615
|
-
SEL newSelector = @selector(vel_autoHideSampleLayerSubview:);
|
|
2616
|
-
Method oriMethod = class_getInstanceMethod(class, originSelector);
|
|
2617
|
-
Method newMethod = class_getInstanceMethod(class, newSelector);
|
|
2618
|
-
if (newMethod) {
|
|
2619
|
-
BOOL isAddedMethod = class_addMethod(class, originSelector,
|
|
2620
|
-
method_getImplementation(newMethod),
|
|
2621
|
-
method_getTypeEncoding(newMethod));
|
|
2622
|
-
if (isAddedMethod) {
|
|
2623
|
-
IMP oriMethodIMP = method_getImplementation(oriMethod)
|
|
2624
|
-
?: imp_implementationWithBlock(^(id selfObject){
|
|
2625
|
-
});
|
|
2626
|
-
const char *oriMethodTypeEncoding =
|
|
2627
|
-
method_getTypeEncoding(oriMethod) ?: "v@:";
|
|
2628
|
-
class_replaceMethod(class, newSelector, oriMethodIMP,
|
|
2629
|
-
oriMethodTypeEncoding);
|
|
2630
|
-
} else {
|
|
2631
|
-
method_exchangeImplementations(oriMethod, newMethod);
|
|
2632
|
-
}
|
|
2633
|
-
}
|
|
2634
|
-
});
|
|
2635
|
-
}
|
|
2636
|
-
|
|
2637
|
-
static char kAssociatedObjectKey_vel_autoHideSamplePlayerView;
|
|
2638
|
-
- (void)setVel_autoHideSamplePlayerView:(BOOL)vel_autoHideSamplePlayerView {
|
|
2639
|
-
objc_setAssociatedObject(
|
|
2640
|
-
self, &kAssociatedObjectKey_vel_autoHideSamplePlayerView,
|
|
2641
|
-
@(vel_autoHideSamplePlayerView), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
|
|
2642
|
-
if (vel_autoHideSamplePlayerView) {
|
|
2643
|
-
[self hideSamplePlayerView];
|
|
2644
|
-
}
|
|
2645
|
-
}
|
|
2646
|
-
|
|
2647
|
-
- (BOOL)vel_autoHideSamplePlayerView {
|
|
2648
|
-
return [((NSNumber *)objc_getAssociatedObject(
|
|
2649
|
-
self, &kAssociatedObjectKey_vel_autoHideSamplePlayerView)) boolValue];
|
|
2650
|
-
}
|
|
2651
|
-
|
|
2652
|
-
static char kAssociatedObjectKey_velAutoHideControlsView;
|
|
2653
|
-
- (void)setVel_autoHideControlsView:(BOOL)vel_autoHideControlsView {
|
|
2654
|
-
objc_setAssociatedObject(self, &kAssociatedObjectKey_velAutoHideControlsView,
|
|
2655
|
-
@(vel_autoHideControlsView),
|
|
2656
|
-
OBJC_ASSOCIATION_RETAIN_NONATOMIC);
|
|
2657
|
-
if (vel_autoHideControlsView) {
|
|
2658
|
-
[self hideControlViewInSubviews];
|
|
2659
|
-
}
|
|
2660
|
-
}
|
|
2661
|
-
|
|
2662
|
-
- (BOOL)vel_autoHideControlsView {
|
|
2663
|
-
return [((NSNumber *)objc_getAssociatedObject(
|
|
2664
|
-
self, &kAssociatedObjectKey_velAutoHideControlsView)) boolValue];
|
|
2665
|
-
}
|
|
2666
|
-
|
|
2667
|
-
- (void)vel_autoHideSampleLayerSubview:(UIView *)view {
|
|
2668
|
-
[self vel_autoHideSampleLayerSubview:view];
|
|
2669
|
-
if (self.vel_autoHideSamplePlayerView) {
|
|
2670
|
-
[self hideSamplePlayerView];
|
|
2671
|
-
}
|
|
2672
|
-
if (self.vel_autoHideControlsView) {
|
|
2673
|
-
[self hideControlViewInSubviews];
|
|
2674
|
-
}
|
|
2675
|
-
}
|
|
2676
|
-
|
|
2677
|
-
- (void)hideSamplePlayerView {
|
|
2678
|
-
NSString *layerName = [NSString
|
|
2679
|
-
stringWithFormat:@"%@%@%@", @"_AVS", @"implePlay", @"erLayerView"];
|
|
2680
|
-
NSString *otherLayerName =
|
|
2681
|
-
[NSString stringWithFormat:@"%@%@%@%@%@%@%@%@", @"A", @"VPict",
|
|
2682
|
-
@"ureInPic", @"tureSam", @"pleBufferDisp",
|
|
2683
|
-
@"layLaye", @"rHos", @"tView"];
|
|
2684
|
-
if ([@[ layerName, otherLayerName ]
|
|
2685
|
-
containsObject:NSStringFromClass(self.class)]) {
|
|
2686
|
-
self.hidden = YES;
|
|
2687
|
-
return;
|
|
2688
|
-
}
|
|
2689
|
-
for (UIView *v in self.subviews) {
|
|
2690
|
-
v.vel_autoHideSamplePlayerView = YES;
|
|
2691
|
-
}
|
|
2692
|
-
}
|
|
2693
|
-
|
|
2694
|
-
- (void)hideControlViewInSubviews {
|
|
2695
|
-
NSString *controlName = [NSString
|
|
2696
|
-
stringWithFormat:@"%@%@%@%@", @"AVPla", @"ybackCo", @"ntrol", @"sView"];
|
|
2697
|
-
if ([NSStringFromClass(self.class) isEqualToString:controlName]) {
|
|
2698
|
-
self.hidden = YES;
|
|
2699
|
-
return;
|
|
2700
|
-
}
|
|
2701
|
-
for (UIView *v in self.subviews) {
|
|
2702
|
-
v.vel_autoHideControlsView = YES;
|
|
2703
|
-
}
|
|
2704
|
-
}
|
|
2705
|
-
|
|
2706
|
-
static char kAssociatedObjectKey_vel_viewController;
|
|
2707
|
-
- (void)setVel_viewController:(UIViewController *)vel_viewController {
|
|
2708
|
-
objc_setAssociatedObject(self, &kAssociatedObjectKey_vel_viewController,
|
|
2709
|
-
vel_viewController, OBJC_ASSOCIATION_ASSIGN);
|
|
2710
|
-
}
|
|
2711
|
-
|
|
2712
|
-
- (UIViewController *)vel_viewController {
|
|
2713
|
-
UIViewController *vc = (UIViewController *)objc_getAssociatedObject(
|
|
2714
|
-
self, &kAssociatedObjectKey_vel_viewController);
|
|
2715
|
-
if (vc == nil) {
|
|
2716
|
-
UIResponder *responder = self;
|
|
2717
|
-
while (responder && ![responder isKindOfClass:[UIViewController class]]) {
|
|
2718
|
-
responder = [responder nextResponder];
|
|
2719
|
-
}
|
|
2720
|
-
vc = (UIViewController *)responder;
|
|
2721
|
-
[self setVel_viewController:vc];
|
|
2722
|
-
}
|
|
2723
|
-
return vc;
|
|
2724
|
-
}
|
|
2725
|
-
@end
|
|
2726
|
-
#endif // VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
|
|
2727
|
-
|
|
2728
|
-
// MARK: - interface VELPipSampleBufferView
|
|
2729
|
-
@interface VELPipSampleBufferView ()
|
|
2730
|
-
@property(nonatomic, strong, readwrite) AVSampleBufferDisplayLayer *sampleLayer;
|
|
2731
|
-
@end
|
|
2732
|
-
|
|
2733
|
-
// MARK: - implementation VELPipSampleBufferView
|
|
2734
|
-
@implementation VELPipSampleBufferView
|
|
2735
|
-
- (instancetype)initWithFrame:(CGRect)frame {
|
|
2736
|
-
if (self = [super initWithFrame:frame]) {
|
|
2737
|
-
self.backgroundColor = UIColor.blackColor;
|
|
2738
|
-
[self rebuildSamplelayer];
|
|
2739
|
-
}
|
|
2740
|
-
return self;
|
|
2741
|
-
}
|
|
2742
|
-
|
|
2743
|
-
- (void)setVideoGravity:(AVLayerVideoGravity)videoGravity {
|
|
2744
|
-
_videoGravity = videoGravity.copy;
|
|
2745
|
-
@synchronized(self) {
|
|
2746
|
-
self.sampleLayer.videoGravity = videoGravity;
|
|
2747
|
-
}
|
|
2748
|
-
}
|
|
2749
|
-
|
|
2750
|
-
- (void)rebuildSamplelayer {
|
|
2751
|
-
@synchronized(self) {
|
|
2752
|
-
if (self.sampleLayer) {
|
|
2753
|
-
[self.sampleLayer flush];
|
|
2754
|
-
[self.sampleLayer stopRequestingMediaData];
|
|
2755
|
-
[self.sampleLayer removeFromSuperlayer];
|
|
2756
|
-
}
|
|
2757
|
-
|
|
2758
|
-
self.sampleLayer = [[AVSampleBufferDisplayLayer alloc] init];
|
|
2759
|
-
self.sampleLayer.videoGravity = self.videoGravity;
|
|
2760
|
-
self.sampleLayer.opaque = YES;
|
|
2761
|
-
CMTimebaseRef timebase;
|
|
2762
|
-
CMTimebaseCreateWithSourceClock(nil, CMClockGetHostTimeClock(), &timebase);
|
|
2763
|
-
CMTimebaseSetTime(timebase, kCMTimeZero);
|
|
2764
|
-
CMTimebaseSetRate(timebase, 1);
|
|
2765
|
-
self.sampleLayer.controlTimebase = timebase;
|
|
2766
|
-
if (timebase) {
|
|
2767
|
-
CFRelease(timebase);
|
|
2768
|
-
}
|
|
2769
|
-
self.sampleLayer.frame = self.bounds;
|
|
2770
|
-
[self.layer addSublayer:self.sampleLayer];
|
|
2771
|
-
}
|
|
2772
|
-
}
|
|
2773
|
-
|
|
2774
|
-
- (void)layoutSubviews {
|
|
2775
|
-
[super layoutSubviews];
|
|
2776
|
-
@synchronized(self) {
|
|
2777
|
-
self.sampleLayer.frame = self.bounds;
|
|
2778
|
-
self.sampleLayer.position =
|
|
2779
|
-
CGPointMake(CGRectGetMidX(self.bounds), CGRectGetMidY(self.bounds));
|
|
2780
|
-
}
|
|
2781
|
-
}
|
|
2782
|
-
|
|
2783
|
-
@end
|
|
2784
|
-
|
|
2785
|
-
// MARK: - implementation VELPipContentSourceProvider
|
|
2786
|
-
@implementation VELPipContentSourceProvider
|
|
2787
|
-
- (instancetype)init {
|
|
2788
|
-
if (self = [super init]) {
|
|
2789
|
-
[self initContentSource];
|
|
2790
|
-
}
|
|
2791
|
-
return self;
|
|
2792
|
-
}
|
|
2793
|
-
|
|
2794
|
-
- (void)dealloc {
|
|
2795
|
-
[self destroy];
|
|
2796
|
-
}
|
|
2797
|
-
|
|
2798
|
-
- (void)destroy {
|
|
2799
|
-
VeLivePipLog(LOG_TAG, @"content source provider destroy");
|
|
2800
|
-
[self stopDarkFrameTimer];
|
|
2801
|
-
[self releaseDarkFrame];
|
|
2802
|
-
[self releaseAllQueueCachedBuffers];
|
|
2803
|
-
vel_sync_in_main_queue(^{
|
|
2804
|
-
[self.sampleBufferView removeFromSuperview];
|
|
2805
|
-
});
|
|
2806
|
-
}
|
|
2807
|
-
static char kVELPipBufferQueueSpecificKey;
|
|
2808
|
-
- (void)initContentSource {
|
|
2809
|
-
VeLivePipLog(LOG_TAG, @"content source provider init");
|
|
2810
|
-
self.videoSize = CGSizeMake(360, 480);
|
|
2811
|
-
self.videoRectKeyScale = 1000000;
|
|
2812
|
-
self.bufferLock = [[NSRecursiveLock alloc] init];
|
|
2813
|
-
self.cachedQueueBuffers = [[NSMutableDictionary alloc] init];
|
|
2814
|
-
self.bufferTool = [[VELPipPixelBufferTool alloc] init];
|
|
2815
|
-
self.sampleBufferView =
|
|
2816
|
-
[[VELPipSampleBufferView alloc] initWithFrame:CGRectMake(0, 0, 360, 480)];
|
|
2817
|
-
self.sampleDisplayLayer = self.sampleBufferView.sampleLayer;
|
|
2818
|
-
self.bufferQueue =
|
|
2819
|
-
dispatch_queue_create("com.velpip.buffer.queue", DISPATCH_QUEUE_SERIAL);
|
|
2820
|
-
dispatch_queue_set_specific(self.bufferQueue, &kVELPipBufferQueueSpecificKey,
|
|
2821
|
-
&kVELPipBufferQueueSpecificKey, NULL);
|
|
2822
|
-
[self startDarkFrameTimer];
|
|
2823
|
-
}
|
|
2824
|
-
|
|
2825
|
-
- (void)safeAsyncInBufferQueue:(void (^)(void))block {
|
|
2826
|
-
if (dispatch_get_specific(&kVELPipBufferQueueSpecificKey)) {
|
|
2827
|
-
block();
|
|
2828
|
-
} else {
|
|
2829
|
-
dispatch_async(self.bufferQueue, block);
|
|
2830
|
-
}
|
|
2831
|
-
}
|
|
2832
|
-
|
|
2833
|
-
- (void)safeSyncInBufferQueue:(void (^)(void))block {
|
|
2834
|
-
if (dispatch_get_specific(&kVELPipBufferQueueSpecificKey)) {
|
|
2835
|
-
block();
|
|
2836
|
-
} else {
|
|
2837
|
-
dispatch_sync(self.bufferQueue, block);
|
|
2838
|
-
}
|
|
2839
|
-
}
|
|
2840
|
-
|
|
2841
|
-
- (void)enqueuePixelBuffer:(CVPixelBufferRef)pixelBuffer
|
|
2842
|
-
videoRect:(CGRect)videoRect {
|
|
2843
|
-
if (pixelBuffer == NULL) {
|
|
2844
|
-
return;
|
|
2845
|
-
}
|
|
2846
|
-
// {zh} 外部送视频帧,停止内部黑帧 {en} Send video frames externally, stop
|
|
2847
|
-
// internal black frames
|
|
2848
|
-
[self stopDarkFrameTimer];
|
|
2849
|
-
[self _enqueuePixelBuffer:pixelBuffer videoRect:videoRect];
|
|
2850
|
-
}
|
|
2851
|
-
|
|
2852
|
-
- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|
2853
|
-
videoRect:(CGRect)videoRect {
|
|
2854
|
-
if (sampleBuffer == NULL) {
|
|
2855
|
-
return;
|
|
2856
|
-
}
|
|
2857
|
-
// {zh} 外部送视频帧,停止内部黑帧 {en} Send video frames externally, stop
|
|
2858
|
-
// internal black frames
|
|
2859
|
-
[self stopDarkFrameTimer];
|
|
2860
|
-
[self _enqueuePixelBuffer:CMSampleBufferGetImageBuffer(sampleBuffer)
|
|
2861
|
-
videoRect:videoRect];
|
|
2862
|
-
}
|
|
2863
|
-
|
|
2864
|
-
- (void)setupDarkFrame:(BOOL)force {
|
|
2865
|
-
if (force) {
|
|
2866
|
-
VeLivePipLog(LOG_TAG, @"content source provider force rebuild dark frame");
|
|
2867
|
-
[self releaseDarkFrame];
|
|
2868
|
-
}
|
|
2869
|
-
[self.bufferLock lock];
|
|
2870
|
-
if (_darkPixelBuffer != NULL) {
|
|
2871
|
-
return;
|
|
2872
|
-
}
|
|
2873
|
-
VeLivePipLog(LOG_TAG, @"content source provider build dark frame");
|
|
2874
|
-
_darkPixelBuffer =
|
|
2875
|
-
[self.bufferTool createDarkFrameWithFrameSize:self.videoSize];
|
|
2876
|
-
[self.bufferLock unlock];
|
|
2877
|
-
}
|
|
2878
|
-
|
|
2879
|
-
- (void)releaseDarkFrame {
|
|
2880
|
-
[self.bufferLock lock];
|
|
2881
|
-
if (_darkPixelBuffer != NULL) {
|
|
2882
|
-
VeLivePipLog(LOG_TAG, @"content source provider release dark frame");
|
|
2883
|
-
CVPixelBufferRelease(_darkPixelBuffer);
|
|
2884
|
-
_darkPixelBuffer = NULL;
|
|
2885
|
-
}
|
|
2886
|
-
[self.bufferLock unlock];
|
|
2887
|
-
}
|
|
2888
|
-
|
|
2889
|
-
- (void)startDarkFrameTimer {
|
|
2890
|
-
[self.bufferLock lock];
|
|
2891
|
-
if (_darkFrameTimer != NULL) {
|
|
2892
|
-
[self stopDarkFrameTimer];
|
|
2893
|
-
}
|
|
2894
|
-
|
|
2895
|
-
if (_darkFrameQueue == NULL) {
|
|
2896
|
-
_darkFrameQueue =
|
|
2897
|
-
dispatch_queue_create("com.pip.dark.queue", DISPATCH_QUEUE_SERIAL);
|
|
2898
|
-
}
|
|
2899
|
-
|
|
2900
|
-
[self setupDarkFrame:NO];
|
|
2901
|
-
VeLivePipLog(LOG_TAG,
|
|
2902
|
-
@"content source provider start dark frame timer with 15fps");
|
|
2903
|
-
_darkFrameTimer =
|
|
2904
|
-
dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, _darkFrameQueue);
|
|
2905
|
-
dispatch_time_t start =
|
|
2906
|
-
dispatch_walltime(NULL, (int64_t)(0.0 * NSEC_PER_SEC));
|
|
2907
|
-
uint64_t interval = (uint64_t)(1 / 15.0 * NSEC_PER_SEC);
|
|
2908
|
-
dispatch_source_set_timer(_darkFrameTimer, start, interval, 0);
|
|
2909
|
-
__weak __typeof__(self) weakSelf = self;
|
|
2910
|
-
dispatch_source_set_event_handler(_darkFrameTimer, ^{
|
|
2911
|
-
__strong __typeof__(weakSelf) self = weakSelf;
|
|
2912
|
-
if (self == nil) {
|
|
2913
|
-
return;
|
|
2914
|
-
}
|
|
2915
|
-
[self.bufferLock lock];
|
|
2916
|
-
if (self.darkPixelBuffer != NULL) {
|
|
2917
|
-
[self _enqueuePixelBuffer:self.darkPixelBuffer
|
|
2918
|
-
videoRect:VELPixtureInPictureFullVideoRect];
|
|
2919
|
-
}
|
|
2920
|
-
[self.bufferLock unlock];
|
|
2921
|
-
});
|
|
2922
|
-
dispatch_resume(_darkFrameTimer);
|
|
2923
|
-
[self.bufferLock unlock];
|
|
2924
|
-
}
|
|
2925
|
-
|
|
2926
|
-
- (void)stopDarkFrameTimer {
|
|
2927
|
-
if (_darkFrameTimer != NULL) {
|
|
2928
|
-
VeLivePipLog(LOG_TAG, @"content source provider stop dark frame timer");
|
|
2929
|
-
dispatch_source_cancel(_darkFrameTimer);
|
|
2930
|
-
_darkFrameTimer = NULL;
|
|
2931
|
-
}
|
|
2932
|
-
}
|
|
2933
|
-
|
|
2934
|
-
- (void)releaseAllQueueCachedBuffers {
|
|
2935
|
-
[self safeSyncInBufferQueue:^{
|
|
2936
|
-
[self.bufferLock lock];
|
|
2937
|
-
for (VELPipCachedQueueBuffer *cachedBuffer in self.cachedQueueBuffers
|
|
2938
|
-
.allValues) {
|
|
2939
|
-
[cachedBuffer releaseBuffer];
|
|
2940
|
-
}
|
|
2941
|
-
[self.cachedQueueBuffers removeAllObjects];
|
|
2942
|
-
[self.bufferLock unlock];
|
|
2943
|
-
}];
|
|
2944
|
-
}
|
|
2945
|
-
|
|
2946
|
-
- (void)cacheQueueBuffer:(CVPixelBufferRef)pixelBuffer
|
|
2947
|
-
videoRect:(CGRect)videoRect {
|
|
2948
|
-
[self.bufferLock lock];
|
|
2949
|
-
NSString *key = [VELPipCachedQueueBuffer getKeyWithVideoRect:videoRect];
|
|
2950
|
-
VELPipCachedQueueBuffer *buffer = [self.cachedQueueBuffers objectForKey:key];
|
|
2951
|
-
/// release old
|
|
2952
|
-
if (buffer != nil) {
|
|
2953
|
-
if (CACurrentMediaTime() - buffer.lastUpdateTime > self.fpsInterval) {
|
|
2954
|
-
[buffer releaseBuffer];
|
|
2955
|
-
[self.cachedQueueBuffers removeObjectForKey:key];
|
|
2956
|
-
buffer = [[VELPipCachedQueueBuffer alloc] initWithBuffer:pixelBuffer
|
|
2957
|
-
videoRect:videoRect];
|
|
2958
|
-
}
|
|
2959
|
-
} else {
|
|
2960
|
-
buffer = [[VELPipCachedQueueBuffer alloc] initWithBuffer:pixelBuffer
|
|
2961
|
-
videoRect:videoRect];
|
|
2962
|
-
}
|
|
2963
|
-
[self.cachedQueueBuffers setObject:buffer forKey:key];
|
|
2964
|
-
[self.bufferLock unlock];
|
|
2965
|
-
}
|
|
2966
|
-
|
|
2967
|
-
- (BOOL)needUpdateQueuePixelBuffer {
|
|
2968
|
-
__block BOOL needUpdatePixelBuffer = NO;
|
|
2969
|
-
[self.bufferLock lock];
|
|
2970
|
-
if (self.cachedQueueBuffers.count == self.videoSourceCount) {
|
|
2971
|
-
NSTimeInterval currTime = CACurrentMediaTime();
|
|
2972
|
-
[self.cachedQueueBuffers
|
|
2973
|
-
enumerateKeysAndObjectsUsingBlock:^(
|
|
2974
|
-
NSString *_Nonnull key, VELPipCachedQueueBuffer *_Nonnull obj,
|
|
2975
|
-
BOOL *_Nonnull stop) {
|
|
2976
|
-
if (currTime - obj.lastUpdateTime > self.fpsInterval) {
|
|
2977
|
-
needUpdatePixelBuffer = YES;
|
|
2978
|
-
*stop = YES;
|
|
2979
|
-
}
|
|
2980
|
-
}];
|
|
2981
|
-
}
|
|
2982
|
-
[self.bufferLock unlock];
|
|
2983
|
-
return needUpdatePixelBuffer;
|
|
2984
|
-
}
|
|
2985
|
-
|
|
2986
|
-
- (CVPixelBufferRef)createPixelBufferFromAllCachedQueueBuffer {
|
|
2987
|
-
CVPixelBufferRef mixPixelBuffer = NULL;
|
|
2988
|
-
[self.bufferLock lock];
|
|
2989
|
-
CGSize videoSize = self.videoSize;
|
|
2990
|
-
int videoWidth = videoSize.width;
|
|
2991
|
-
int videoHeight = videoSize.height;
|
|
2992
|
-
int minFrameSize = 2048;
|
|
2993
|
-
if (videoWidth > videoHeight && videoWidth < minFrameSize) {
|
|
2994
|
-
CGFloat scale = minFrameSize / videoWidth;
|
|
2995
|
-
videoWidth = minFrameSize;
|
|
2996
|
-
videoHeight = videoHeight * scale;
|
|
2997
|
-
} else if (videoHeight > videoWidth && videoHeight < minFrameSize) {
|
|
2998
|
-
CGFloat scale = minFrameSize / videoHeight;
|
|
2999
|
-
videoHeight = minFrameSize;
|
|
3000
|
-
videoWidth = videoWidth * scale;
|
|
3001
|
-
}
|
|
3002
|
-
mixPixelBuffer = [self.bufferTool createPixelBuffer:kCVPixelFormatType_32BGRA
|
|
3003
|
-
heigth:videoHeight
|
|
3004
|
-
width:videoWidth];
|
|
3005
|
-
if (mixPixelBuffer) {
|
|
3006
|
-
CVPixelBufferLockBaseAddress(mixPixelBuffer, 0);
|
|
3007
|
-
void *pxdata = CVPixelBufferGetBaseAddress(mixPixelBuffer);
|
|
3008
|
-
if (pxdata == NULL) {
|
|
3009
|
-
CVPixelBufferRelease(mixPixelBuffer);
|
|
3010
|
-
[self.bufferLock unlock];
|
|
3011
|
-
return NULL;
|
|
3012
|
-
}
|
|
3013
|
-
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
|
3014
|
-
uint32_t bitmapInfo = kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Host;
|
|
3015
|
-
// uint32_t bitmapInfo = kCGBitmapByteOrder32Little |
|
|
3016
|
-
// kCGImageAlphaPremultipliedFirst;
|
|
3017
|
-
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(mixPixelBuffer);
|
|
3018
|
-
CGContextRef context =
|
|
3019
|
-
CGBitmapContextCreate(pxdata, videoWidth, videoHeight, 8, bytesPerRow,
|
|
3020
|
-
colorSpace, bitmapInfo);
|
|
3021
|
-
if (context == NULL) {
|
|
3022
|
-
CVPixelBufferRelease(mixPixelBuffer);
|
|
3023
|
-
CGColorSpaceRelease(colorSpace);
|
|
3024
|
-
[self.bufferLock unlock];
|
|
3025
|
-
return NULL;
|
|
3026
|
-
}
|
|
3027
|
-
|
|
3028
|
-
for (VELPipCachedQueueBuffer *cachedBuffer in self.cachedQueueBuffers
|
|
3029
|
-
.allValues) {
|
|
3030
|
-
CGImageRef imageRef = nil;
|
|
3031
|
-
VTCreateCGImageFromCVPixelBuffer(cachedBuffer.pixelBuffer, nil,
|
|
3032
|
-
&imageRef);
|
|
3033
|
-
if (imageRef != nil) {
|
|
3034
|
-
CGContextDrawImage(
|
|
3035
|
-
context,
|
|
3036
|
-
[cachedBuffer
|
|
3037
|
-
convertRectFrom:CGRectMake(0, 0, videoWidth, videoHeight)],
|
|
3038
|
-
imageRef);
|
|
3039
|
-
CGImageRelease(imageRef);
|
|
3040
|
-
}
|
|
3041
|
-
}
|
|
3042
|
-
CVPixelBufferUnlockBaseAddress(mixPixelBuffer, 0);
|
|
3043
|
-
CGColorSpaceRelease(colorSpace);
|
|
3044
|
-
CGContextRelease(context);
|
|
3045
|
-
}
|
|
3046
|
-
[self.bufferLock unlock];
|
|
3047
|
-
return mixPixelBuffer;
|
|
3048
|
-
}
|
|
3049
|
-
|
|
3050
|
-
- (void)_enqueuePixelBuffer:(CVPixelBufferRef)pixelBuffer
|
|
3051
|
-
videoRect:(CGRect)videoRect {
|
|
3052
|
-
if (pixelBuffer == NULL) {
|
|
3053
|
-
return;
|
|
3054
|
-
}
|
|
3055
|
-
__block CVPixelBufferRef asyncBuffer = CVPixelBufferRetain(pixelBuffer);
|
|
3056
|
-
[self safeAsyncInBufferQueue:^{
|
|
3057
|
-
if (CGRectEqualToRect(videoRect, VELPixtureInPictureFullVideoRect)) {
|
|
3058
|
-
[self releaseAllQueueCachedBuffers];
|
|
3059
|
-
[self _enqueueVideoRectPixelBuffer:asyncBuffer];
|
|
3060
|
-
CVPixelBufferRelease(asyncBuffer);
|
|
3061
|
-
} else {
|
|
3062
|
-
[self cacheQueueBuffer:asyncBuffer videoRect:videoRect];
|
|
3063
|
-
CVPixelBufferRelease(asyncBuffer);
|
|
3064
|
-
if ([self needUpdateQueuePixelBuffer]) {
|
|
3065
|
-
CVPixelBufferRef pixelBuffer =
|
|
3066
|
-
[self createPixelBufferFromAllCachedQueueBuffer];
|
|
3067
|
-
[self _enqueueVideoRectPixelBuffer:pixelBuffer];
|
|
3068
|
-
CVPixelBufferRelease(pixelBuffer);
|
|
3069
|
-
} else {
|
|
3070
|
-
// VeLivePipLog(LOG_TAG, @"drop mix video frame with fps
|
|
3071
|
-
// no need update");
|
|
3072
|
-
}
|
|
3073
|
-
}
|
|
3074
|
-
}];
|
|
3075
|
-
}
|
|
3076
|
-
|
|
3077
|
-
- (void)_enqueueVideoRectPixelBuffer:(CVPixelBufferRef)pixelBuffer {
|
|
3078
|
-
CMSampleBufferRef sampleBuffer =
|
|
3079
|
-
[self.bufferTool sampleBufferFromPixelBuffer:pixelBuffer
|
|
3080
|
-
fps:kCMTimeInvalid
|
|
3081
|
-
pts:kCMTimeInvalid];
|
|
3082
|
-
[self _enqueueDisplaySampleBuffer:sampleBuffer];
|
|
3083
|
-
if (sampleBuffer != NULL) {
|
|
3084
|
-
CFRelease(sampleBuffer);
|
|
3085
|
-
}
|
|
3086
|
-
}
|
|
3087
|
-
|
|
3088
|
-
- (void)_enqueueDisplaySampleBuffer:(CMSampleBufferRef)sampleBuffer {
|
|
3089
|
-
if (sampleBuffer) {
|
|
3090
|
-
[self.bufferLock lock];
|
|
3091
|
-
[_sampleDisplayLayer enqueueSampleBuffer:sampleBuffer];
|
|
3092
|
-
if (_sampleDisplayLayer.status ==
|
|
3093
|
-
AVQueuedSampleBufferRenderingStatusFailed) {
|
|
3094
|
-
[_sampleDisplayLayer flush];
|
|
3095
|
-
// {zh} 后台唤醒,重启渲染 {en} Wake up in the background, restart
|
|
3096
|
-
// rendering
|
|
3097
|
-
if (-11847 == _sampleDisplayLayer.error.code) {
|
|
3098
|
-
VeLivePipLog(LOG_TAG, @"content source provider rebuild sampleLayer");
|
|
3099
|
-
vel_sync_in_main_queue(^{
|
|
3100
|
-
[self.sampleBufferView rebuildSamplelayer];
|
|
3101
|
-
self.sampleDisplayLayer = self.sampleBufferView.sampleLayer;
|
|
3102
|
-
if (self.sampleDisplayLayerChanged) {
|
|
3103
|
-
self.sampleDisplayLayerChanged(self);
|
|
3104
|
-
}
|
|
3105
|
-
});
|
|
3106
|
-
}
|
|
3107
|
-
}
|
|
3108
|
-
[self.bufferLock unlock];
|
|
3109
|
-
}
|
|
3110
|
-
}
|
|
3111
|
-
|
|
3112
|
-
// MARK: - Setter
|
|
3113
|
-
- (void)setVideoGravity:(AVLayerVideoGravity)videoGravity {
|
|
3114
|
-
_videoGravity = videoGravity.copy;
|
|
3115
|
-
self.sampleDisplayLayer.videoGravity = videoGravity;
|
|
3116
|
-
}
|
|
3117
|
-
|
|
3118
|
-
- (void)setVideoSize:(CGSize)videoSize {
|
|
3119
|
-
if (!CGSizeEqualToSize(_videoSize, videoSize)) {
|
|
3120
|
-
VeLivePipLog(LOG_TAG,
|
|
3121
|
-
@"content source provider video size changed from %@ to %@",
|
|
3122
|
-
NSStringFromCGSize(_videoSize), NSStringFromCGSize(videoSize));
|
|
3123
|
-
_videoSize = videoSize;
|
|
3124
|
-
[self setupDarkFrame:YES];
|
|
3125
|
-
}
|
|
3126
|
-
}
|
|
3127
|
-
|
|
3128
|
-
@end
|
|
3129
|
-
|
|
3130
|
-
@implementation UIViewController (VELPipTopViewController)
|
|
3131
|
-
+ (UIViewController *)vel_pipTopViewController {
|
|
3132
|
-
UIViewController *resultVC;
|
|
3133
|
-
resultVC =
|
|
3134
|
-
[self _velPipTopViewController:[[UIApplication sharedApplication]
|
|
3135
|
-
.keyWindow rootViewController]];
|
|
3136
|
-
while (resultVC.presentedViewController) {
|
|
3137
|
-
resultVC = [self _velPipTopViewController:resultVC.presentedViewController];
|
|
3138
|
-
}
|
|
3139
|
-
if ([resultVC isKindOfClass:[UIAlertController class]]) {
|
|
3140
|
-
resultVC =
|
|
3141
|
-
[self _velPipTopViewController:[[UIApplication sharedApplication]
|
|
3142
|
-
.keyWindow rootViewController]];
|
|
3143
|
-
}
|
|
3144
|
-
return resultVC;
|
|
3145
|
-
}
|
|
3146
|
-
|
|
3147
|
-
+ (UIViewController *)_velPipTopViewController:(UIViewController *)vc {
|
|
3148
|
-
if ([vc isKindOfClass:[UINavigationController class]]) {
|
|
3149
|
-
return [self _velPipTopViewController:[(UINavigationController *)
|
|
3150
|
-
vc topViewController]];
|
|
3151
|
-
} else if ([vc isKindOfClass:[UITabBarController class]]) {
|
|
3152
|
-
return [self _velPipTopViewController:[(UITabBarController *)
|
|
3153
|
-
vc selectedViewController]];
|
|
3154
|
-
} else {
|
|
3155
|
-
return vc;
|
|
3156
|
-
}
|
|
3157
|
-
return nil;
|
|
3158
|
-
}
|
|
3159
|
-
@end
|
|
3160
|
-
|
|
3161
|
-
@implementation VELPipPixelBufferTool
|
|
3162
|
-
- (instancetype)init {
|
|
3163
|
-
if (self = [super init]) {
|
|
3164
|
-
self.pixelBufferPoolDict =
|
|
3165
|
-
[[NSMutableDictionary alloc] initWithCapacity:10];
|
|
3166
|
-
self.lock = [[NSRecursiveLock alloc] init];
|
|
3167
|
-
}
|
|
3168
|
-
return self;
|
|
3169
|
-
}
|
|
3170
|
-
|
|
3171
|
-
- (void)dealloc {
|
|
3172
|
-
[self releaseMemory];
|
|
3173
|
-
}
|
|
3174
|
-
|
|
3175
|
-
- (void)releaseMemory {
|
|
3176
|
-
if ([self.lock tryLock]) {
|
|
3177
|
-
NSDictionary<NSString *, NSValue *> *bufferPool =
|
|
3178
|
-
self.pixelBufferPoolDict.copy;
|
|
3179
|
-
[self.pixelBufferPoolDict removeAllObjects];
|
|
3180
|
-
[bufferPool enumerateKeysAndObjectsUsingBlock:^(NSString *_Nonnull key,
|
|
3181
|
-
NSValue *_Nonnull obj,
|
|
3182
|
-
BOOL *_Nonnull stop) {
|
|
3183
|
-
CVPixelBufferPoolRef pixelBufferPool =
|
|
3184
|
-
(CVPixelBufferPoolRef)[obj pointerValue];
|
|
3185
|
-
CVPixelBufferPoolRelease(pixelBufferPool);
|
|
3186
|
-
}];
|
|
3187
|
-
[self.lock unlock];
|
|
3188
|
-
}
|
|
3189
|
-
}
|
|
3190
|
-
// MARK: - Dark PixelBuffer
|
|
3191
|
-
- (CVPixelBufferRef)createDarkFrameWithFrameSize:(CGSize)frameSize {
|
|
3192
|
-
|
|
3193
|
-
CVPixelBufferRef pxbuffer = [self createPixelBuffer:kCVPixelFormatType_32BGRA
|
|
3194
|
-
heigth:frameSize.height
|
|
3195
|
-
width:frameSize.width];
|
|
3196
|
-
if (pxbuffer == NULL) {
|
|
3197
|
-
return NULL;
|
|
3198
|
-
}
|
|
3199
|
-
|
|
3200
|
-
CVPixelBufferLockBaseAddress(pxbuffer, 0);
|
|
3201
|
-
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
|
|
3202
|
-
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
|
|
3203
|
-
CGContextRef context = CGBitmapContextCreate(
|
|
3204
|
-
pxdata, frameSize.width, frameSize.height, 8,
|
|
3205
|
-
CVPixelBufferGetBytesPerRow(pxbuffer), rgbColorSpace,
|
|
3206
|
-
(CGBitmapInfo)kCGBitmapByteOrder32Little |
|
|
3207
|
-
kCGImageAlphaPremultipliedFirst);
|
|
3208
|
-
CGContextSetRGBFillColor(context, 0, 0, 0, 1);
|
|
3209
|
-
CGContextFillRect(context,
|
|
3210
|
-
CGRectMake(0, 0, frameSize.width, frameSize.height));
|
|
3211
|
-
CGColorSpaceRelease(rgbColorSpace);
|
|
3212
|
-
CGContextRelease(context);
|
|
3213
|
-
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
|
|
3214
|
-
return pxbuffer;
|
|
3215
|
-
}
|
|
3216
|
-
|
|
3217
|
-
- (CMSampleBufferRef)sampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
|
3218
|
-
fps:(CMTime)fps
|
|
3219
|
-
pts:(CMTime)pts {
|
|
3220
|
-
if (!pixelBuffer) {
|
|
3221
|
-
return NULL;
|
|
3222
|
-
}
|
|
3223
|
-
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
|
|
3224
|
-
CMSampleTimingInfo timing = {fps, pts, pts};
|
|
3225
|
-
CMVideoFormatDescriptionRef videoInfo = NULL;
|
|
3226
|
-
OSStatus result = CMVideoFormatDescriptionCreateForImageBuffer(
|
|
3227
|
-
NULL, pixelBuffer, &videoInfo);
|
|
3228
|
-
if (result != noErr) {
|
|
3229
|
-
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
|
3230
|
-
return NULL;
|
|
3231
|
-
}
|
|
3232
|
-
|
|
3233
|
-
CMSampleBufferRef sampleBuffer = NULL;
|
|
3234
|
-
result = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer,
|
|
3235
|
-
true, NULL, NULL, videoInfo,
|
|
3236
|
-
&timing, &sampleBuffer);
|
|
3237
|
-
if (result != noErr || sampleBuffer == nil) {
|
|
3238
|
-
if (videoInfo) {
|
|
3239
|
-
CFRelease(videoInfo);
|
|
3240
|
-
videoInfo = nil;
|
|
3241
|
-
}
|
|
3242
|
-
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
|
3243
|
-
return nil;
|
|
3244
|
-
}
|
|
3245
|
-
if (videoInfo) {
|
|
3246
|
-
CFRelease(videoInfo);
|
|
3247
|
-
videoInfo = nil;
|
|
3248
|
-
}
|
|
3249
|
-
// display immediately
|
|
3250
|
-
CFArrayRef attachments =
|
|
3251
|
-
CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
|
|
3252
|
-
CFMutableDictionaryRef dict =
|
|
3253
|
-
(CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
|
|
3254
|
-
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately,
|
|
3255
|
-
kCFBooleanTrue);
|
|
3256
|
-
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
|
3257
|
-
return sampleBuffer;
|
|
3258
|
-
}
|
|
3259
|
-
|
|
3260
|
-
- (CVPixelBufferRef)createPixelBuffer:(OSType)type
|
|
3261
|
-
heigth:(int)height
|
|
3262
|
-
width:(int)width {
|
|
3263
|
-
CVPixelBufferRef buffer = [self createPixelBufferFromPool:type
|
|
3264
|
-
heigth:height
|
|
3265
|
-
width:width];
|
|
3266
|
-
if (buffer == NULL) {
|
|
3267
|
-
buffer = [self createPixelBufferNormalWithType:type
|
|
3268
|
-
heigth:height
|
|
3269
|
-
width:width];
|
|
3270
|
-
}
|
|
3271
|
-
return buffer;
|
|
3272
|
-
}
|
|
3273
|
-
|
|
3274
|
-
- (CVPixelBufferRef)createPixelBufferFromPool:(OSType)type
|
|
3275
|
-
heigth:(int)height
|
|
3276
|
-
width:(int)width {
|
|
3277
|
-
|
|
3278
|
-
NSString *poolKey = [NSString
|
|
3279
|
-
stringWithFormat:@"%u_%d_%d", (unsigned int)type, height, width];
|
|
3280
|
-
CVPixelBufferPoolRef pixelBufferPool = NULL;
|
|
3281
|
-
CVPixelBufferRef buffer = NULL;
|
|
3282
|
-
|
|
3283
|
-
if ([self.lock tryLock]) {
|
|
3284
|
-
NSValue *bufferPoolAddress =
|
|
3285
|
-
[self.pixelBufferPoolDict objectForKey:poolKey];
|
|
3286
|
-
if (!bufferPoolAddress) {
|
|
3287
|
-
pixelBufferPool = [self createPixelBufferPool:type
|
|
3288
|
-
heigth:height
|
|
3289
|
-
width:width];
|
|
3290
|
-
if (pixelBufferPool != NULL) {
|
|
3291
|
-
bufferPoolAddress = [NSValue valueWithPointer:pixelBufferPool];
|
|
3292
|
-
[self.pixelBufferPoolDict setValue:bufferPoolAddress forKey:poolKey];
|
|
3293
|
-
}
|
|
3294
|
-
} else {
|
|
3295
|
-
pixelBufferPool = (CVPixelBufferPoolRef)[bufferPoolAddress pointerValue];
|
|
3296
|
-
}
|
|
3297
|
-
[self.lock unlock];
|
|
3298
|
-
}
|
|
3299
|
-
|
|
3300
|
-
if (pixelBufferPool != NULL) {
|
|
3301
|
-
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool,
|
|
3302
|
-
&buffer);
|
|
3303
|
-
[self cleanPixelBuffer:buffer];
|
|
3304
|
-
}
|
|
3305
|
-
return buffer;
|
|
3306
|
-
}
|
|
3307
|
-
|
|
3308
|
-
- (CVPixelBufferRef)createPixelBufferNormalWithType:(OSType)type
|
|
3309
|
-
heigth:(int)height
|
|
3310
|
-
width:(int)width {
|
|
3311
|
-
NSDictionary *attributes = [self getPixelBufferAttributes:type
|
|
3312
|
-
heigth:height
|
|
3313
|
-
width:width];
|
|
3314
|
-
CVPixelBufferRef pixelBuffer = NULL;
|
|
3315
|
-
CVReturn status =
|
|
3316
|
-
CVPixelBufferCreate(kCFAllocatorDefault, width, height, type,
|
|
3317
|
-
(__bridge CFDictionaryRef)attributes, &pixelBuffer);
|
|
3318
|
-
if (status != kCVReturnSuccess) {
|
|
3319
|
-
return NULL;
|
|
3320
|
-
}
|
|
3321
|
-
[self cleanPixelBuffer:pixelBuffer];
|
|
3322
|
-
return pixelBuffer;
|
|
3323
|
-
}
|
|
3324
|
-
|
|
3325
|
-
- (void)cleanPixelBuffer:(CVPixelBufferRef)pixelBuffer {
|
|
3326
|
-
if (pixelBuffer != NULL) {
|
|
3327
|
-
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
|
|
3328
|
-
void *pxdata = CVPixelBufferGetBaseAddress(pixelBuffer);
|
|
3329
|
-
size_t dataSize = CVPixelBufferGetDataSize(pixelBuffer);
|
|
3330
|
-
memset(pxdata, 0, dataSize);
|
|
3331
|
-
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
|
3332
|
-
}
|
|
3333
|
-
}
|
|
3334
|
-
|
|
3335
|
-
- (CVPixelBufferPoolRef)createPixelBufferPool:(OSType)type
|
|
3336
|
-
heigth:(int)height
|
|
3337
|
-
width:(int)width {
|
|
3338
|
-
CVPixelBufferPoolRef pool = NULL;
|
|
3339
|
-
|
|
3340
|
-
NSDictionary *attributes = [self getPixelBufferAttributes:type
|
|
3341
|
-
heigth:height
|
|
3342
|
-
width:width];
|
|
3343
|
-
|
|
3344
|
-
CVReturn ret = CVPixelBufferPoolCreate(
|
|
3345
|
-
kCFAllocatorDefault, NULL, (__bridge CFDictionaryRef)attributes, &pool);
|
|
3346
|
-
|
|
3347
|
-
if (ret != kCVReturnSuccess) {
|
|
3348
|
-
return NULL;
|
|
3349
|
-
}
|
|
3350
|
-
|
|
3351
|
-
CVPixelBufferRef buffer = NULL;
|
|
3352
|
-
ret = CVPixelBufferPoolCreatePixelBuffer(NULL, pool, &buffer);
|
|
3353
|
-
if (ret != kCVReturnSuccess) {
|
|
3354
|
-
return NULL;
|
|
3355
|
-
}
|
|
3356
|
-
if (buffer != NULL) {
|
|
3357
|
-
CVPixelBufferRelease(buffer);
|
|
3358
|
-
}
|
|
3359
|
-
return pool;
|
|
3360
|
-
}
|
|
3361
|
-
|
|
3362
|
-
- (NSDictionary *)getPixelBufferAttributes:(OSType)type
|
|
3363
|
-
heigth:(int)height
|
|
3364
|
-
width:(int)width {
|
|
3365
|
-
NSMutableDictionary *attributes = [NSMutableDictionary dictionary];
|
|
3366
|
-
[attributes
|
|
3367
|
-
setObject:[NSNumber numberWithBool:YES]
|
|
3368
|
-
forKey:(__bridge NSString *)kCVPixelBufferOpenGLCompatibilityKey];
|
|
3369
|
-
[attributes
|
|
3370
|
-
setObject:[NSNumber numberWithBool:YES]
|
|
3371
|
-
forKey:(__bridge NSString *)kCVPixelBufferCGImageCompatibilityKey];
|
|
3372
|
-
[attributes setObject:[NSNumber numberWithBool:YES]
|
|
3373
|
-
forKey:(__bridge NSString *)
|
|
3374
|
-
kCVPixelBufferCGBitmapContextCompatibilityKey];
|
|
3375
|
-
[attributes
|
|
3376
|
-
setObject:[NSNumber numberWithInt:64]
|
|
3377
|
-
forKey:(__bridge NSString *)kCVPixelBufferBytesPerRowAlignmentKey];
|
|
3378
|
-
[attributes setObject:[NSNumber numberWithInt:type]
|
|
3379
|
-
forKey:(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey];
|
|
3380
|
-
[attributes setObject:[NSNumber numberWithInt:width]
|
|
3381
|
-
forKey:(__bridge NSString *)kCVPixelBufferWidthKey];
|
|
3382
|
-
[attributes setObject:[NSNumber numberWithInt:height]
|
|
3383
|
-
forKey:(__bridge NSString *)kCVPixelBufferHeightKey];
|
|
3384
|
-
[attributes
|
|
3385
|
-
setObject:[NSDictionary dictionary]
|
|
3386
|
-
forKey:(__bridge NSString *)kCVPixelBufferIOSurfacePropertiesKey];
|
|
3387
|
-
return attributes;
|
|
3388
|
-
}
|
|
3389
|
-
@end
|
|
3390
|
-
|
|
3391
|
-
NSString *VeLivePictureInPictureErrorDomain =
|
|
3392
|
-
@"VeLivePictureInPictureErrorDomain";
|
|
3393
|
-
CGRect VELPixtureInPictureFullVideoRect = {0, 0, 1, 1};
|