node-mac-recorder 2.21.51 → 2.21.52
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +2 -1
- package/output/temp_cursor_1764800554355.json +1 -0
- package/output/temp_cursor_1764800784424.json +1 -0
- package/package.json +1 -1
- package/scripts/camera-sync.js +74 -0
- package/scripts/full-sync.js +131 -0
- package/src/camera_recorder.mm +240 -144
- package/src/camera_recorder.mm.backup +863 -0
- package/tasks-1.md +231 -0
- package/tasks-2.md +220 -0
- package/tasks-3.md +291 -0
|
@@ -0,0 +1,863 @@
|
|
|
1
|
+
#import <AVFoundation/AVFoundation.h>
|
|
2
|
+
#import <CoreMedia/CoreMedia.h>
|
|
3
|
+
#import <CoreVideo/CoreVideo.h>
|
|
4
|
+
#import <Foundation/Foundation.h>
|
|
5
|
+
#import "logging.h"
|
|
6
|
+
#import "sync_timeline.h"
|
|
7
|
+
|
|
8
|
+
static double g_cameraStartTimestamp = 0.0;
|
|
9
|
+
|
|
10
|
+
static NSError *MRCameraError(NSInteger code, NSString *message) {
|
|
11
|
+
NSDictionary *info = @{
|
|
12
|
+
NSLocalizedDescriptionKey: message ?: @"Camera error"
|
|
13
|
+
};
|
|
14
|
+
return [NSError errorWithDomain:@"CameraRecorder" code:code userInfo:info];
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
static BOOL MRAllowContinuityCamera() {
|
|
18
|
+
if (getenv("ALLOW_CONTINUITY_CAMERA")) {
|
|
19
|
+
return YES;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
static dispatch_once_t onceToken;
|
|
23
|
+
static BOOL allowContinuity = NO;
|
|
24
|
+
dispatch_once(&onceToken, ^{
|
|
25
|
+
id continuityKey = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"NSCameraUseContinuityCameraDeviceType"];
|
|
26
|
+
if ([continuityKey respondsToSelector:@selector(boolValue)] && [continuityKey boolValue]) {
|
|
27
|
+
allowContinuity = YES;
|
|
28
|
+
}
|
|
29
|
+
});
|
|
30
|
+
return allowContinuity;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
34
|
+
if (!device) {
|
|
35
|
+
return NO;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if (@available(macOS 14.0, *)) {
|
|
39
|
+
if ([device.deviceType isEqualToString:AVCaptureDeviceTypeContinuityCamera]) {
|
|
40
|
+
return YES;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
NSString *deviceType = device.deviceType ?: @"";
|
|
45
|
+
NSString *localizedName = device.localizedName ?: @"";
|
|
46
|
+
NSString *modelId = device.modelID ?: @"";
|
|
47
|
+
NSString *manufacturer = device.manufacturer ?: @"";
|
|
48
|
+
|
|
49
|
+
BOOL nameMentionsContinuity = [localizedName rangeOfString:@"Continuity" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
50
|
+
[modelId rangeOfString:@"Continuity" options:NSCaseInsensitiveSearch].location != NSNotFound;
|
|
51
|
+
|
|
52
|
+
if (@available(macOS 14.0, *)) {
|
|
53
|
+
if ([deviceType isEqualToString:AVCaptureDeviceTypeExternal] && nameMentionsContinuity) {
|
|
54
|
+
return YES;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if ([deviceType isEqualToString:AVCaptureDeviceTypeExternalUnknown] && nameMentionsContinuity) {
|
|
59
|
+
return YES;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
BOOL isApple = [manufacturer rangeOfString:@"Apple" options:NSCaseInsensitiveSearch].location != NSNotFound;
|
|
63
|
+
if (isApple && nameMentionsContinuity) {
|
|
64
|
+
if (@available(macOS 14.0, *)) {
|
|
65
|
+
if ([deviceType isEqualToString:AVCaptureDeviceTypeExternal]) {
|
|
66
|
+
return YES;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
if ([deviceType isEqualToString:AVCaptureDeviceTypeExternalUnknown]) {
|
|
70
|
+
return YES;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return NO;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
static NSString *MRCameraNormalizeOutputPath(NSString *path) {
|
|
78
|
+
if (![path isKindOfClass:[NSString class]] || [path length] == 0) {
|
|
79
|
+
return nil;
|
|
80
|
+
}
|
|
81
|
+
NSString *extension = path.pathExtension.lowercaseString;
|
|
82
|
+
if ([extension isEqualToString:@"webm"]) {
|
|
83
|
+
NSString *updated = [[path stringByDeletingPathExtension] stringByAppendingPathExtension:@"mov"];
|
|
84
|
+
MRLog(@"⚠️ Camera: .webm not supported, writing to %@", updated);
|
|
85
|
+
return updated;
|
|
86
|
+
}
|
|
87
|
+
return path;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
static void MRCameraRemoveFileIfExists(NSString *path) {
|
|
91
|
+
if (!path || [path length] == 0) {
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
NSError *removeError = nil;
|
|
95
|
+
[[NSFileManager defaultManager] removeItemAtPath:path error:&removeError];
|
|
96
|
+
if (removeError && removeError.code != NSFileNoSuchFileError) {
|
|
97
|
+
MRLog(@"⚠️ CameraRecorder: Failed to remove existing file at %@ (%@)", path, removeError.localizedDescription);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
@interface CameraRecorder : NSObject<AVCaptureFileOutputRecordingDelegate>
|
|
102
|
+
|
|
103
|
+
@property (nonatomic, strong) dispatch_queue_t workQueue;
|
|
104
|
+
@property (nonatomic, strong) AVCaptureSession *session;
|
|
105
|
+
@property (nonatomic, strong) AVCaptureDeviceInput *deviceInput;
|
|
106
|
+
@property (nonatomic, strong) AVCaptureMovieFileOutput *fileOutput;
|
|
107
|
+
@property (nonatomic, copy) NSString *outputPath;
|
|
108
|
+
@property (nonatomic, copy) NSString *lastFinishedOutputPath;
|
|
109
|
+
|
|
110
|
+
@property (atomic, assign) BOOL isRecording;
|
|
111
|
+
@property (atomic, assign) BOOL stopInFlight;
|
|
112
|
+
|
|
113
|
+
@property (atomic, assign) BOOL startCompleted;
|
|
114
|
+
@property (atomic, assign) BOOL startSucceeded;
|
|
115
|
+
@property (nonatomic, strong) dispatch_semaphore_t startSemaphore;
|
|
116
|
+
|
|
117
|
+
@property (nonatomic, strong) dispatch_semaphore_t stopSemaphore;
|
|
118
|
+
@property (atomic, assign) uint64_t activeToken;
|
|
119
|
+
@property (atomic, assign) BOOL unexpectedRestartAttempted;
|
|
120
|
+
|
|
121
|
+
+ (instancetype)sharedRecorder;
|
|
122
|
+
+ (NSArray<NSDictionary *> *)availableCameraDevices;
|
|
123
|
+
|
|
124
|
+
- (BOOL)startRecordingWithDeviceId:(NSString *)deviceId
|
|
125
|
+
outputPath:(NSString *)outputPath
|
|
126
|
+
error:(NSError **)error;
|
|
127
|
+
- (BOOL)stopRecording;
|
|
128
|
+
- (BOOL)waitForRecordingStartWithTimeout:(NSTimeInterval)timeout;
|
|
129
|
+
|
|
130
|
+
@end
|
|
131
|
+
|
|
132
|
+
@implementation CameraRecorder
|
|
133
|
+
|
|
134
|
+
- (instancetype)init {
|
|
135
|
+
self = [super init];
|
|
136
|
+
if (self) {
|
|
137
|
+
_workQueue = dispatch_queue_create("com.macrecorder.camera", DISPATCH_QUEUE_SERIAL);
|
|
138
|
+
_startCompleted = YES;
|
|
139
|
+
_startSucceeded = NO;
|
|
140
|
+
_activeToken = 0;
|
|
141
|
+
_unexpectedRestartAttempted = NO;
|
|
142
|
+
}
|
|
143
|
+
return self;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
#pragma mark - Shared instance
|
|
147
|
+
|
|
148
|
+
+ (instancetype)sharedRecorder {
|
|
149
|
+
static CameraRecorder *recorder = nil;
|
|
150
|
+
static dispatch_once_t onceToken;
|
|
151
|
+
dispatch_once(&onceToken, ^{
|
|
152
|
+
recorder = [[CameraRecorder alloc] init];
|
|
153
|
+
});
|
|
154
|
+
return recorder;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
#pragma mark - Device listing helpers
|
|
158
|
+
|
|
159
|
+
+ (NSArray<NSDictionary *> *)availableCameraDevices {
|
|
160
|
+
NSMutableArray<NSDictionary *> *devicesInfo = [NSMutableArray array];
|
|
161
|
+
|
|
162
|
+
NSMutableArray<AVCaptureDeviceType> *deviceTypes = [NSMutableArray array];
|
|
163
|
+
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
|
|
164
|
+
if (@available(macOS 14.0, *)) {
|
|
165
|
+
[deviceTypes addObject:AVCaptureDeviceTypeExternal];
|
|
166
|
+
[deviceTypes addObject:AVCaptureDeviceTypeContinuityCamera];
|
|
167
|
+
} else {
|
|
168
|
+
[deviceTypes addObject:AVCaptureDeviceTypeExternalUnknown];
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
AVCaptureDeviceDiscoverySession *discoverySession =
|
|
172
|
+
[AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes
|
|
173
|
+
mediaType:AVMediaTypeVideo
|
|
174
|
+
position:AVCaptureDevicePositionUnspecified];
|
|
175
|
+
|
|
176
|
+
for (AVCaptureDevice *device in discoverySession.devices) {
|
|
177
|
+
BOOL continuityCamera = MRIsContinuityCamera(device);
|
|
178
|
+
|
|
179
|
+
CMVideoDimensions bestDimensions = {0, 0};
|
|
180
|
+
Float64 bestFrameRate = 0.0;
|
|
181
|
+
|
|
182
|
+
for (AVCaptureDeviceFormat *format in device.formats) {
|
|
183
|
+
CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
|
184
|
+
if (dims.width <= 0 || dims.height <= 0) {
|
|
185
|
+
continue;
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
Float64 maxFrameRateForFormat = 0.0;
|
|
189
|
+
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
|
|
190
|
+
maxFrameRateForFormat = MAX(maxFrameRateForFormat, range.maxFrameRate);
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
bool isBetterResolution = (dims.width * dims.height) > (bestDimensions.width * bestDimensions.height);
|
|
194
|
+
bool sameResolutionHigherFps = (dims.width * dims.height) == (bestDimensions.width * bestDimensions.height) &&
|
|
195
|
+
maxFrameRateForFormat > bestFrameRate;
|
|
196
|
+
|
|
197
|
+
if (isBetterResolution || sameResolutionHigherFps) {
|
|
198
|
+
bestDimensions = dims;
|
|
199
|
+
bestFrameRate = maxFrameRateForFormat;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
NSString *position;
|
|
204
|
+
switch (device.position) {
|
|
205
|
+
case AVCaptureDevicePositionFront:
|
|
206
|
+
position = @"front";
|
|
207
|
+
break;
|
|
208
|
+
case AVCaptureDevicePositionBack:
|
|
209
|
+
position = @"back";
|
|
210
|
+
break;
|
|
211
|
+
default:
|
|
212
|
+
position = @"unspecified";
|
|
213
|
+
break;
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
BOOL isBuiltIn = NO;
|
|
217
|
+
NSString *deviceName = device.localizedName ?: @"";
|
|
218
|
+
NSString *deviceType = device.deviceType ?: @"";
|
|
219
|
+
|
|
220
|
+
if ([deviceName rangeOfString:@"FaceTime" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
221
|
+
[deviceName rangeOfString:@"iSight" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
222
|
+
[deviceName rangeOfString:@"Built-in" options:NSCaseInsensitiveSearch].location != NSNotFound) {
|
|
223
|
+
isBuiltIn = YES;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
if (@available(macOS 10.15, *)) {
|
|
227
|
+
if ([deviceType isEqualToString:AVCaptureDeviceTypeBuiltInWideAngleCamera]) {
|
|
228
|
+
isBuiltIn = YES;
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
if (continuityCamera ||
|
|
233
|
+
[deviceName rangeOfString:@"iPhone" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
234
|
+
[deviceName rangeOfString:@"iPad" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
235
|
+
[deviceName rangeOfString:@"Continuity" options:NSCaseInsensitiveSearch].location != NSNotFound) {
|
|
236
|
+
isBuiltIn = NO;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
if (@available(macOS 14.0, *)) {
|
|
240
|
+
if ([deviceType isEqualToString:AVCaptureDeviceTypeExternal] ||
|
|
241
|
+
[deviceType isEqualToString:AVCaptureDeviceTypeContinuityCamera]) {
|
|
242
|
+
isBuiltIn = NO;
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
if ([deviceType isEqualToString:AVCaptureDeviceTypeExternalUnknown]) {
|
|
246
|
+
isBuiltIn = NO;
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
NSDictionary *deviceInfo = @{
|
|
250
|
+
@"id": device.uniqueID ?: @"",
|
|
251
|
+
@"name": deviceName,
|
|
252
|
+
@"model": device.modelID ?: @"",
|
|
253
|
+
@"manufacturer": device.manufacturer ?: @"",
|
|
254
|
+
@"position": position ?: @"unspecified",
|
|
255
|
+
@"transportType": @(device.transportType),
|
|
256
|
+
@"isConnected": @(device.isConnected),
|
|
257
|
+
@"isDefault": @(isBuiltIn),
|
|
258
|
+
@"hasFlash": @(device.hasFlash),
|
|
259
|
+
@"supportsDepth": @NO,
|
|
260
|
+
@"deviceType": deviceType,
|
|
261
|
+
@"requiresContinuityCameraPermission": @(continuityCamera),
|
|
262
|
+
@"maxResolution": @{
|
|
263
|
+
@"width": @(bestDimensions.width),
|
|
264
|
+
@"height": @(bestDimensions.height),
|
|
265
|
+
@"maxFrameRate": @(bestFrameRate)
|
|
266
|
+
}
|
|
267
|
+
};
|
|
268
|
+
|
|
269
|
+
[devicesInfo addObject:deviceInfo];
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
return devicesInfo;
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
#pragma mark - Device configuration
|
|
276
|
+
|
|
277
|
+
- (AVCaptureDevice *)deviceForId:(NSString *)deviceId {
|
|
278
|
+
if (deviceId && deviceId.length > 0) {
|
|
279
|
+
AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:deviceId];
|
|
280
|
+
if (device) {
|
|
281
|
+
return device;
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
|
|
286
|
+
if (device) {
|
|
287
|
+
return device;
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
NSArray<NSDictionary *> *devices = [CameraRecorder availableCameraDevices];
|
|
291
|
+
if (devices.count > 0) {
|
|
292
|
+
NSString *fallbackId = devices.firstObject[@"id"];
|
|
293
|
+
device = [AVCaptureDevice deviceWithUniqueID:fallbackId];
|
|
294
|
+
}
|
|
295
|
+
return device;
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
- (AVCaptureDeviceFormat *)bestFormatForDevice:(AVCaptureDevice *)device
|
|
299
|
+
widthOut:(int32_t *)widthOut
|
|
300
|
+
heightOut:(int32_t *)heightOut
|
|
301
|
+
frameRateOut:(double *)frameRateOut {
|
|
302
|
+
AVCaptureDeviceFormat *bestFormat = nil;
|
|
303
|
+
int64_t bestResolutionScore = 0;
|
|
304
|
+
double bestFrameRate = 0.0;
|
|
305
|
+
|
|
306
|
+
for (AVCaptureDeviceFormat *format in device.formats) {
|
|
307
|
+
CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
|
308
|
+
if (dims.width <= 0 || dims.height <= 0) {
|
|
309
|
+
continue;
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
if (dims.width > 1280 || dims.height > 720) {
|
|
313
|
+
continue;
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
int64_t score = (int64_t)dims.width * (int64_t)dims.height;
|
|
317
|
+
|
|
318
|
+
double maxFrameRate = 0.0;
|
|
319
|
+
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
|
|
320
|
+
maxFrameRate = MAX(maxFrameRate, range.maxFrameRate);
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
BOOL usesBetterResolution = score > bestResolutionScore;
|
|
324
|
+
BOOL sameResolutionHigherFps = (score == bestResolutionScore) && (maxFrameRate > bestFrameRate);
|
|
325
|
+
|
|
326
|
+
if (!bestFormat || usesBetterResolution || sameResolutionHigherFps) {
|
|
327
|
+
bestFormat = format;
|
|
328
|
+
bestResolutionScore = score;
|
|
329
|
+
bestFrameRate = maxFrameRate;
|
|
330
|
+
if (widthOut) *widthOut = dims.width;
|
|
331
|
+
if (heightOut) *heightOut = dims.height;
|
|
332
|
+
if (frameRateOut) *frameRateOut = bestFrameRate;
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
return bestFormat;
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
- (BOOL)configureDevice:(AVCaptureDevice *)device
|
|
340
|
+
withFormat:(AVCaptureDeviceFormat *)format
|
|
341
|
+
frameRate:(double)frameRate
|
|
342
|
+
error:(NSError **)error {
|
|
343
|
+
if (!device || !format) {
|
|
344
|
+
if (error) {
|
|
345
|
+
*error = MRCameraError(-3, @"Camera device unavailable");
|
|
346
|
+
}
|
|
347
|
+
return NO;
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
NSError *lockError = nil;
|
|
351
|
+
if (![device lockForConfiguration:&lockError]) {
|
|
352
|
+
if (error) {
|
|
353
|
+
*error = lockError;
|
|
354
|
+
}
|
|
355
|
+
return NO;
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
@try {
|
|
359
|
+
if ([device.formats containsObject:format]) {
|
|
360
|
+
device.activeFormat = format;
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
double targetFrameRate = frameRate > 0 ? MIN(frameRate, 24.0) : 24.0;
|
|
364
|
+
AVFrameRateRange *bestRange = nil;
|
|
365
|
+
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
|
|
366
|
+
if (!bestRange || range.maxFrameRate > bestRange.maxFrameRate) {
|
|
367
|
+
bestRange = range;
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
if (bestRange) {
|
|
372
|
+
double clampedRate = MIN(bestRange.maxFrameRate, MAX(bestRange.minFrameRate, targetFrameRate));
|
|
373
|
+
double durationSeconds = clampedRate > 0.0 ? (1.0 / clampedRate) : CMTimeGetSeconds(bestRange.maxFrameDuration);
|
|
374
|
+
int32_t preferredTimescale = bestRange.minFrameDuration.timescale > 0 ? bestRange.minFrameDuration.timescale : 600;
|
|
375
|
+
CMTime desiredDuration = CMTimeMakeWithSeconds(durationSeconds, preferredTimescale);
|
|
376
|
+
|
|
377
|
+
if (!CMTIME_IS_NUMERIC(desiredDuration)) {
|
|
378
|
+
desiredDuration = bestRange.maxFrameDuration;
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
if (CMTimeCompare(desiredDuration, bestRange.minFrameDuration) < 0) {
|
|
382
|
+
desiredDuration = bestRange.minFrameDuration;
|
|
383
|
+
} else if (CMTimeCompare(desiredDuration, bestRange.maxFrameDuration) > 0) {
|
|
384
|
+
desiredDuration = bestRange.maxFrameDuration;
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
device.activeVideoMinFrameDuration = desiredDuration;
|
|
388
|
+
device.activeVideoMaxFrameDuration = desiredDuration;
|
|
389
|
+
}
|
|
390
|
+
} @catch (NSException *exception) {
|
|
391
|
+
if (error) {
|
|
392
|
+
*error = MRCameraError(-4, exception.reason ?: @"Failed to configure camera device");
|
|
393
|
+
}
|
|
394
|
+
[device unlockForConfiguration];
|
|
395
|
+
return NO;
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
[device unlockForConfiguration];
|
|
399
|
+
return YES;
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
#pragma mark - Synchronization helpers
|
|
403
|
+
|
|
404
|
+
- (uint64_t)nextToken {
|
|
405
|
+
@synchronized (self) {
|
|
406
|
+
self.activeToken += 1;
|
|
407
|
+
return self.activeToken;
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
- (BOOL)waitForStopCompletion:(NSTimeInterval)timeout {
|
|
412
|
+
dispatch_semaphore_t stopSemaphore = self.stopSemaphore;
|
|
413
|
+
if (!stopSemaphore) {
|
|
414
|
+
return YES;
|
|
415
|
+
}
|
|
416
|
+
dispatch_time_t waitTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC));
|
|
417
|
+
long result = dispatch_semaphore_wait(stopSemaphore, waitTime);
|
|
418
|
+
if (result != 0) {
|
|
419
|
+
MRLog(@"⚠️ CameraRecorder: Previous stop did not finish within %.2fs", timeout);
|
|
420
|
+
return NO;
|
|
421
|
+
}
|
|
422
|
+
self.stopSemaphore = nil;
|
|
423
|
+
self.stopInFlight = NO;
|
|
424
|
+
return YES;
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
- (void)completeStart:(BOOL)success token:(uint64_t)token {
|
|
428
|
+
if (token != self.activeToken) {
|
|
429
|
+
return;
|
|
430
|
+
}
|
|
431
|
+
if (self.startCompleted && self.startSucceeded == success) {
|
|
432
|
+
return;
|
|
433
|
+
}
|
|
434
|
+
self.startCompleted = YES;
|
|
435
|
+
self.startSucceeded = success;
|
|
436
|
+
if (!success) {
|
|
437
|
+
self.isRecording = NO;
|
|
438
|
+
}
|
|
439
|
+
dispatch_semaphore_t semaphore = self.startSemaphore;
|
|
440
|
+
if (semaphore) {
|
|
441
|
+
dispatch_semaphore_signal(semaphore);
|
|
442
|
+
}
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
- (void)cleanupAfterStopOnQueue {
|
|
446
|
+
self.session = nil;
|
|
447
|
+
self.deviceInput = nil;
|
|
448
|
+
self.fileOutput = nil;
|
|
449
|
+
self.isRecording = NO;
|
|
450
|
+
self.stopInFlight = NO;
|
|
451
|
+
self.outputPath = nil;
|
|
452
|
+
self.unexpectedRestartAttempted = NO;
|
|
453
|
+
g_cameraStartTimestamp = 0.0;
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
- (BOOL)attemptRestartAfterUnexpectedStop {
|
|
457
|
+
if (self.unexpectedRestartAttempted) {
|
|
458
|
+
MRLog(@"⚠️ Camera already retried after unexpected stop; skipping restart");
|
|
459
|
+
return NO;
|
|
460
|
+
}
|
|
461
|
+
self.unexpectedRestartAttempted = YES;
|
|
462
|
+
|
|
463
|
+
if (!self.outputPath || [self.outputPath length] == 0) {
|
|
464
|
+
MRLog(@"⚠️ Cannot restart camera: missing output path");
|
|
465
|
+
return NO;
|
|
466
|
+
}
|
|
467
|
+
if (!self.session || !self.fileOutput) {
|
|
468
|
+
MRLog(@"⚠️ Cannot restart camera: session/output unavailable");
|
|
469
|
+
return NO;
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
if (![self.session isRunning]) {
|
|
473
|
+
[self.session startRunning];
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
NSURL *outputURL = [NSURL fileURLWithPath:self.outputPath];
|
|
477
|
+
if (!outputURL) {
|
|
478
|
+
MRLog(@"⚠️ Cannot restart camera: invalid output URL");
|
|
479
|
+
return NO;
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
// Move existing clip aside so we don't lose it if restart fails
|
|
483
|
+
NSString *backupPath = [self.outputPath stringByAppendingPathExtension:@"bak"];
|
|
484
|
+
[[NSFileManager defaultManager] removeItemAtPath:backupPath error:nil];
|
|
485
|
+
if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath]) {
|
|
486
|
+
[[NSFileManager defaultManager] moveItemAtPath:self.outputPath toPath:backupPath error:nil];
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
MRLog(@"🔁 Attempting automatic camera restart after unexpected stop");
|
|
490
|
+
@try {
|
|
491
|
+
self.stopInFlight = NO;
|
|
492
|
+
self.isRecording = YES;
|
|
493
|
+
g_cameraStartTimestamp = 0.0;
|
|
494
|
+
[self.fileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
|
|
495
|
+
// Remove backup since restart succeeded
|
|
496
|
+
[[NSFileManager defaultManager] removeItemAtPath:backupPath error:nil];
|
|
497
|
+
return YES;
|
|
498
|
+
} @catch (NSException *exception) {
|
|
499
|
+
MRLog(@"❌ Camera auto-restart failed: %@", exception.reason);
|
|
500
|
+
// Restore previous clip if we created a backup
|
|
501
|
+
if ([[NSFileManager defaultManager] fileExistsAtPath:backupPath]) {
|
|
502
|
+
[[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:nil];
|
|
503
|
+
[[NSFileManager defaultManager] moveItemAtPath:backupPath toPath:self.outputPath error:nil];
|
|
504
|
+
}
|
|
505
|
+
self.isRecording = NO;
|
|
506
|
+
return NO;
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
#pragma mark - Recording lifecycle
|
|
511
|
+
|
|
512
|
+
- (void)performStartWithDeviceId:(NSString *)deviceId
|
|
513
|
+
outputPath:(NSString *)outputPath
|
|
514
|
+
token:(uint64_t)token {
|
|
515
|
+
@autoreleasepool {
|
|
516
|
+
if (self.stopInFlight || token != self.activeToken) {
|
|
517
|
+
[self completeStart:NO token:token];
|
|
518
|
+
return;
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
NSString *normalizedPath = MRCameraNormalizeOutputPath(outputPath);
|
|
522
|
+
if (!normalizedPath || [normalizedPath length] == 0) {
|
|
523
|
+
[self completeStart:NO token:token];
|
|
524
|
+
return;
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
MRCameraRemoveFileIfExists(normalizedPath);
|
|
528
|
+
|
|
529
|
+
AVCaptureDevice *device = [self deviceForId:deviceId];
|
|
530
|
+
if (!device) {
|
|
531
|
+
MRLog(@"❌ No camera devices available");
|
|
532
|
+
[self completeStart:NO token:token];
|
|
533
|
+
return;
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
if (MRIsContinuityCamera(device) && !MRAllowContinuityCamera()) {
|
|
537
|
+
MRLog(@"⚠️ Continuity Camera access denied - missing entitlement");
|
|
538
|
+
[self completeStart:NO token:token];
|
|
539
|
+
return;
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
int32_t width = 0;
|
|
543
|
+
int32_t height = 0;
|
|
544
|
+
double frameRate = 0.0;
|
|
545
|
+
AVCaptureDeviceFormat *bestFormat = [self bestFormatForDevice:device widthOut:&width heightOut:&height frameRateOut:&frameRate];
|
|
546
|
+
if (!bestFormat) {
|
|
547
|
+
MRLog(@"❌ No suitable camera format found");
|
|
548
|
+
[self completeStart:NO token:token];
|
|
549
|
+
return;
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
NSError *configError = nil;
|
|
553
|
+
if (![self configureDevice:device withFormat:bestFormat frameRate:frameRate error:&configError]) {
|
|
554
|
+
MRLog(@"❌ Failed to configure device: %@", configError.localizedDescription);
|
|
555
|
+
[self completeStart:NO token:token];
|
|
556
|
+
return;
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
if (self.stopInFlight || token != self.activeToken) {
|
|
560
|
+
[self completeStart:NO token:token];
|
|
561
|
+
return;
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
AVCaptureSession *session = [[AVCaptureSession alloc] init];
|
|
565
|
+
session.sessionPreset = AVCaptureSessionPresetHigh;
|
|
566
|
+
[session beginConfiguration];
|
|
567
|
+
|
|
568
|
+
NSError *inputError = nil;
|
|
569
|
+
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&inputError];
|
|
570
|
+
if (!deviceInput || ![session canAddInput:deviceInput]) {
|
|
571
|
+
MRLog(@"❌ Unable to add camera input: %@", inputError.localizedDescription);
|
|
572
|
+
[session commitConfiguration];
|
|
573
|
+
[self completeStart:NO token:token];
|
|
574
|
+
return;
|
|
575
|
+
}
|
|
576
|
+
[session addInput:deviceInput];
|
|
577
|
+
|
|
578
|
+
AVCaptureMovieFileOutput *fileOutput = [[AVCaptureMovieFileOutput alloc] init];
|
|
579
|
+
if (![session canAddOutput:fileOutput]) {
|
|
580
|
+
MRLog(@"❌ Unable to add movie file output to capture session");
|
|
581
|
+
[session commitConfiguration];
|
|
582
|
+
[self completeStart:NO token:token];
|
|
583
|
+
return;
|
|
584
|
+
}
|
|
585
|
+
[session addOutput:fileOutput];
|
|
586
|
+
|
|
587
|
+
// Ensure the file output does not auto-stop due to implicit limits
|
|
588
|
+
fileOutput.movieFragmentInterval = kCMTimeInvalid;
|
|
589
|
+
fileOutput.maxRecordedDuration = kCMTimeInvalid;
|
|
590
|
+
fileOutput.maxRecordedFileSize = 0;
|
|
591
|
+
|
|
592
|
+
AVCaptureConnection *audioConnection = [fileOutput connectionWithMediaType:AVMediaTypeAudio];
|
|
593
|
+
if (audioConnection) {
|
|
594
|
+
audioConnection.enabled = NO;
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
AVCaptureConnection *videoConnection = [fileOutput connectionWithMediaType:AVMediaTypeVideo];
|
|
598
|
+
if (videoConnection && videoConnection.isVideoMirroringSupported && device.position == AVCaptureDevicePositionFront) {
|
|
599
|
+
if ([videoConnection respondsToSelector:@selector(setAutomaticallyAdjustsVideoMirroring:)]) {
|
|
600
|
+
videoConnection.automaticallyAdjustsVideoMirroring = NO;
|
|
601
|
+
}
|
|
602
|
+
videoConnection.videoMirrored = YES;
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
[session commitConfiguration];
|
|
606
|
+
|
|
607
|
+
if (self.stopInFlight || token != self.activeToken) {
|
|
608
|
+
[self completeStart:NO token:token];
|
|
609
|
+
return;
|
|
610
|
+
}
|
|
611
|
+
|
|
612
|
+
self.session = session;
|
|
613
|
+
self.deviceInput = deviceInput;
|
|
614
|
+
self.fileOutput = fileOutput;
|
|
615
|
+
self.outputPath = normalizedPath;
|
|
616
|
+
|
|
617
|
+
[session startRunning];
|
|
618
|
+
|
|
619
|
+
// Give session a brief moment to warm up to avoid false start timeouts on slower devices
|
|
620
|
+
[NSThread sleepForTimeInterval:0.5];
|
|
621
|
+
|
|
622
|
+
if (self.stopInFlight || token != self.activeToken) {
|
|
623
|
+
[session stopRunning];
|
|
624
|
+
[self completeStart:NO token:token];
|
|
625
|
+
return;
|
|
626
|
+
}
|
|
627
|
+
|
|
628
|
+
NSURL *outputURL = [NSURL fileURLWithPath:normalizedPath];
|
|
629
|
+
if (!outputURL) {
|
|
630
|
+
MRLog(@"❌ Failed to create output URL for camera recording");
|
|
631
|
+
[self completeStart:NO token:token];
|
|
632
|
+
return;
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
MRLog(@"🎥 Starting camera recording to %@", normalizedPath);
|
|
636
|
+
@try {
|
|
637
|
+
[fileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
|
|
638
|
+
MRLog(@"📤 Camera setup scheduled on background queue (non-blocking)");
|
|
639
|
+
} @catch (NSException *exception) {
|
|
640
|
+
MRLog(@"❌ Exception while starting camera recording: %@", exception.reason);
|
|
641
|
+
[self completeStart:NO token:token];
|
|
642
|
+
return;
|
|
643
|
+
}
|
|
644
|
+
}
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
- (BOOL)startRecordingWithDeviceId:(NSString *)deviceId
|
|
648
|
+
outputPath:(NSString *)outputPath
|
|
649
|
+
error:(NSError **)error {
|
|
650
|
+
if (![self waitForStopCompletion:5.0]) {
|
|
651
|
+
if (error) {
|
|
652
|
+
*error = MRCameraError(-20, @"Camera stop is still finalizing – please retry");
|
|
653
|
+
}
|
|
654
|
+
return NO;
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
if (self.isRecording) {
|
|
658
|
+
if (error) {
|
|
659
|
+
*error = MRCameraError(-1, @"Camera recording already in progress");
|
|
660
|
+
}
|
|
661
|
+
return NO;
|
|
662
|
+
}
|
|
663
|
+
|
|
664
|
+
if (!outputPath || outputPath.length == 0) {
|
|
665
|
+
if (error) {
|
|
666
|
+
*error = MRCameraError(-2, @"Invalid camera output path");
|
|
667
|
+
}
|
|
668
|
+
return NO;
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
AVAuthorizationStatus cameraStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
|
|
672
|
+
if (cameraStatus == AVAuthorizationStatusDenied || cameraStatus == AVAuthorizationStatusRestricted) {
|
|
673
|
+
if (error) {
|
|
674
|
+
*error = MRCameraError(-4, @"Camera permission denied - please grant permission in System Settings");
|
|
675
|
+
}
|
|
676
|
+
return NO;
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
if (cameraStatus == AVAuthorizationStatusNotDetermined) {
|
|
680
|
+
MRLog(@"🔐 Camera permission not determined - requesting async (non-blocking)...");
|
|
681
|
+
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
|
|
682
|
+
if (granted) {
|
|
683
|
+
MRLog(@"✅ Camera permission granted (async callback)");
|
|
684
|
+
} else {
|
|
685
|
+
MRLog(@"❌ Camera permission denied (async callback)");
|
|
686
|
+
}
|
|
687
|
+
}];
|
|
688
|
+
}
|
|
689
|
+
|
|
690
|
+
self.startCompleted = NO;
|
|
691
|
+
self.startSucceeded = NO;
|
|
692
|
+
self.startSemaphore = dispatch_semaphore_create(0);
|
|
693
|
+
self.stopInFlight = NO;
|
|
694
|
+
self.isRecording = YES;
|
|
695
|
+
self.unexpectedRestartAttempted = NO;
|
|
696
|
+
self.lastFinishedOutputPath = nil;
|
|
697
|
+
|
|
698
|
+
uint64_t token = [self nextToken];
|
|
699
|
+
|
|
700
|
+
dispatch_async(self.workQueue, ^{
|
|
701
|
+
[self performStartWithDeviceId:deviceId outputPath:outputPath token:token];
|
|
702
|
+
});
|
|
703
|
+
|
|
704
|
+
return YES;
|
|
705
|
+
}
|
|
706
|
+
|
|
707
|
+
- (BOOL)stopRecording {
|
|
708
|
+
BOOL hasActiveSession = (self.session && [self.session isRunning]);
|
|
709
|
+
BOOL outputRecording = (self.fileOutput && [self.fileOutput isRecording]);
|
|
710
|
+
if (!self.isRecording && !hasActiveSession && !outputRecording) {
|
|
711
|
+
[self waitForStopCompletion:5.0];
|
|
712
|
+
return YES;
|
|
713
|
+
}
|
|
714
|
+
|
|
715
|
+
if (!self.startCompleted) {
|
|
716
|
+
[self completeStart:NO token:self.activeToken];
|
|
717
|
+
}
|
|
718
|
+
|
|
719
|
+
self.stopInFlight = YES;
|
|
720
|
+
|
|
721
|
+
dispatch_semaphore_t stopSemaphore = dispatch_semaphore_create(0);
|
|
722
|
+
self.stopSemaphore = stopSemaphore;
|
|
723
|
+
|
|
724
|
+
dispatch_async(self.workQueue, ^{
|
|
725
|
+
if (self.fileOutput && [self.fileOutput isRecording]) {
|
|
726
|
+
MRLog(@"🛑 Movie file output stop requested");
|
|
727
|
+
[self.fileOutput stopRecording];
|
|
728
|
+
} else {
|
|
729
|
+
dispatch_semaphore_signal(stopSemaphore);
|
|
730
|
+
}
|
|
731
|
+
|
|
732
|
+
if (self.session && [self.session isRunning]) {
|
|
733
|
+
[self.session stopRunning];
|
|
734
|
+
}
|
|
735
|
+
|
|
736
|
+
if (self.session && self.deviceInput && [self.session.inputs containsObject:self.deviceInput]) {
|
|
737
|
+
[self.session removeInput:self.deviceInput];
|
|
738
|
+
}
|
|
739
|
+
if (self.session && self.fileOutput && [self.session.outputs containsObject:self.fileOutput]) {
|
|
740
|
+
[self.session removeOutput:self.fileOutput];
|
|
741
|
+
}
|
|
742
|
+
|
|
743
|
+
if (!self.fileOutput || ![self.fileOutput isRecording]) {
|
|
744
|
+
[self cleanupAfterStopOnQueue];
|
|
745
|
+
}
|
|
746
|
+
});
|
|
747
|
+
|
|
748
|
+
dispatch_time_t waitTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(5 * NSEC_PER_SEC));
|
|
749
|
+
long waitResult = dispatch_semaphore_wait(stopSemaphore, waitTime);
|
|
750
|
+
if (waitResult != 0) {
|
|
751
|
+
MRLog(@"⚠️ CameraRecorder: Stop did not finish within 5s (proceeding)");
|
|
752
|
+
} else {
|
|
753
|
+
MRLog(@"✅ CameraRecorder: Stop finalized");
|
|
754
|
+
}
|
|
755
|
+
|
|
756
|
+
self.stopSemaphore = nil;
|
|
757
|
+
self.isRecording = NO;
|
|
758
|
+
self.stopInFlight = NO;
|
|
759
|
+
return YES;
|
|
760
|
+
}
|
|
761
|
+
|
|
762
|
+
- (BOOL)waitForRecordingStartWithTimeout:(NSTimeInterval)timeout {
|
|
763
|
+
if (self.startCompleted) {
|
|
764
|
+
return self.startSucceeded;
|
|
765
|
+
}
|
|
766
|
+
dispatch_semaphore_t semaphore = self.startSemaphore;
|
|
767
|
+
if (!semaphore) {
|
|
768
|
+
return self.startSucceeded;
|
|
769
|
+
}
|
|
770
|
+
dispatch_time_t waitTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC));
|
|
771
|
+
long result = dispatch_semaphore_wait(semaphore, waitTime);
|
|
772
|
+
if (result != 0 && !self.startCompleted) {
|
|
773
|
+
return NO;
|
|
774
|
+
}
|
|
775
|
+
return self.startSucceeded;
|
|
776
|
+
}
|
|
777
|
+
|
|
778
|
+
#pragma mark - AVCaptureFileOutputRecordingDelegate
|
|
779
|
+
|
|
780
|
+
- (void)captureOutput:(AVCaptureFileOutput *)output
|
|
781
|
+
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
|
|
782
|
+
fromConnections:(NSArray<AVCaptureConnection *> *)connections
|
|
783
|
+
error:(NSError *)error {
|
|
784
|
+
double elapsedTime = g_cameraStartTimestamp > 0 ? (CFAbsoluteTimeGetCurrent() - g_cameraStartTimestamp) : 0;
|
|
785
|
+
MRLog(@"🎬 Camera recording finished (elapsed: %.2fs)", elapsedTime);
|
|
786
|
+
if (error) {
|
|
787
|
+
MRLog(@"❌ Camera recording finished with error: %@", error);
|
|
788
|
+
} else {
|
|
789
|
+
MRLog(@"✅ Camera recording finished successfully");
|
|
790
|
+
}
|
|
791
|
+
self.lastFinishedOutputPath = outputFileURL.path ?: self.outputPath;
|
|
792
|
+
|
|
793
|
+
dispatch_semaphore_t stopSemaphore = self.stopSemaphore;
|
|
794
|
+
BOOL expectedStop = self.stopInFlight || (stopSemaphore != nil);
|
|
795
|
+
dispatch_async(self.workQueue, ^{
|
|
796
|
+
if (!expectedStop) {
|
|
797
|
+
BOOL restarted = [self attemptRestartAfterUnexpectedStop];
|
|
798
|
+
if (restarted) {
|
|
799
|
+
MRLog(@"🔁 Camera auto-restart initiated after unexpected stop");
|
|
800
|
+
return;
|
|
801
|
+
} else {
|
|
802
|
+
MRLog(@"⚠️ Camera could not auto-restart after unexpected stop");
|
|
803
|
+
}
|
|
804
|
+
}
|
|
805
|
+
|
|
806
|
+
[self cleanupAfterStopOnQueue];
|
|
807
|
+
if (stopSemaphore) {
|
|
808
|
+
dispatch_semaphore_signal(stopSemaphore);
|
|
809
|
+
}
|
|
810
|
+
});
|
|
811
|
+
}
|
|
812
|
+
|
|
813
|
+
- (void)captureOutput:(AVCaptureFileOutput *)output
|
|
814
|
+
didStartRecordingToOutputFileAtURL:(NSURL *)fileURL
|
|
815
|
+
fromConnections:(NSArray<AVCaptureConnection *> *)connections {
|
|
816
|
+
MRLog(@"✅ Camera file recording started: %@", fileURL.path);
|
|
817
|
+
g_cameraStartTimestamp = CFAbsoluteTimeGetCurrent();
|
|
818
|
+
[self completeStart:YES token:self.activeToken];
|
|
819
|
+
}
|
|
820
|
+
|
|
821
|
+
@end
|
|
822
|
+
|
|
823
|
+
// MARK: - C Interface
|
|
824
|
+
|
|
825
|
+
extern "C" {
|
|
826
|
+
|
|
827
|
+
NSArray<NSDictionary *> *listCameraDevices() {
|
|
828
|
+
return [CameraRecorder availableCameraDevices];
|
|
829
|
+
}
|
|
830
|
+
|
|
831
|
+
bool startCameraRecording(NSString *outputPath, NSString *deviceId, NSError **error) {
|
|
832
|
+
return [[CameraRecorder sharedRecorder] startRecordingWithDeviceId:deviceId
|
|
833
|
+
outputPath:outputPath
|
|
834
|
+
error:error];
|
|
835
|
+
}
|
|
836
|
+
|
|
837
|
+
bool waitForCameraRecordingStart(double timeoutSeconds) {
|
|
838
|
+
return [[CameraRecorder sharedRecorder] waitForRecordingStartWithTimeout:timeoutSeconds];
|
|
839
|
+
}
|
|
840
|
+
|
|
841
|
+
double currentCameraRecordingStartTime(void) {
|
|
842
|
+
return g_cameraStartTimestamp;
|
|
843
|
+
}
|
|
844
|
+
|
|
845
|
+
bool stopCameraRecording() {
|
|
846
|
+
@autoreleasepool {
|
|
847
|
+
return [[CameraRecorder sharedRecorder] stopRecording];
|
|
848
|
+
}
|
|
849
|
+
}
|
|
850
|
+
|
|
851
|
+
bool isCameraRecording() {
|
|
852
|
+
return [CameraRecorder sharedRecorder].isRecording;
|
|
853
|
+
}
|
|
854
|
+
|
|
855
|
+
NSString *currentCameraRecordingPath() {
|
|
856
|
+
CameraRecorder *recorder = [CameraRecorder sharedRecorder];
|
|
857
|
+
if (recorder.lastFinishedOutputPath && [recorder.lastFinishedOutputPath length] > 0) {
|
|
858
|
+
return recorder.lastFinishedOutputPath;
|
|
859
|
+
}
|
|
860
|
+
return recorder.outputPath;
|
|
861
|
+
}
|
|
862
|
+
|
|
863
|
+
}
|