AVClub 0.1.4 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,8 +1,8 @@
1
1
  //
2
2
  // AVClub.m
3
- // AVClub
3
+ // AVClub. Based on the AVCam demo by Apple.
4
4
  //
5
- // Created by Colin Thomas-Arnold on 11/16/12.
5
+ // Created by Colin T.A. Gray on 11/16/12.
6
6
  // Copyright (c) 2012 colinta. All rights reserved.
7
7
  //
8
8
 
@@ -21,8 +21,6 @@
21
21
  #pragma mark -
22
22
  @interface AVClub (InternalUtilityMethods)
23
23
  - (AVCaptureDevice *) cameraWithPosition:(AVCaptureDevicePosition)position;
24
- - (AVCaptureDevice *) frontFacingCamera;
25
- - (AVCaptureDevice *) backFacingCamera;
26
24
  - (AVCaptureDevice *) audioDevice;
27
25
  - (NSURL *) tempFileURL;
28
26
  - (void) removeFile:(NSURL *)outputFileURL;
@@ -46,7 +44,7 @@
46
44
 
47
45
  __block AVClub *weakSelf = self;
48
46
  void (^deviceConnectedBlock)(NSNotification *) = ^(NSNotification *notification) {
49
- AVCaptureDevice *device = [notification object];
47
+ AVCaptureDevice *device = notification.object;
50
48
 
51
49
  BOOL sessionHasDeviceWithMatchingMediaType = NO;
52
50
  NSString *deviceMediaType = nil;
@@ -59,7 +57,7 @@
59
57
  {
60
58
  for (AVCaptureDeviceInput *input in [self.session inputs])
61
59
  {
62
- if ( [[input device] hasMediaType:deviceMediaType] )
60
+ if ( [input.device hasMediaType:deviceMediaType] )
63
61
  {
64
62
  sessionHasDeviceWithMatchingMediaType = YES;
65
63
  break;
@@ -79,14 +77,14 @@
79
77
  CFRunLoopPerformBlock(CFRunLoopGetMain(), kCFRunLoopCommonModes, ^{ [delegate clubDeviceConfigurationChanged:self]; });
80
78
  };
81
79
  void (^deviceDisconnectedBlock)(NSNotification *) = ^(NSNotification *notification) {
82
- AVCaptureDevice *device = [notification object];
80
+ AVCaptureDevice *device = notification.object;
83
81
 
84
82
  if ( [device hasMediaType:AVMediaTypeAudio] ) {
85
- [session removeInput:[weakSelf audioInput]];
83
+ [session removeInput:weakSelf.audioInput];
86
84
  weakSelf.audioInput = nil;
87
85
  }
88
86
  else if ( [device hasMediaType:AVMediaTypeVideo] ) {
89
- [session removeInput:[weakSelf videoInput]];
87
+ [session removeInput:weakSelf.videoInput];
90
88
  weakSelf.videoInput = nil;
91
89
  }
92
90
 
@@ -95,8 +93,8 @@
95
93
  };
96
94
 
97
95
  NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];
98
- [self setDeviceConnectedObserver:[notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification object:nil queue:nil usingBlock:deviceConnectedBlock]];
99
- [self setDeviceDisconnectedObserver:[notificationCenter addObserverForName:AVCaptureDeviceWasDisconnectedNotification object:nil queue:nil usingBlock:deviceDisconnectedBlock]];
96
+ self.deviceConnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification object:nil queue:nil usingBlock:deviceConnectedBlock];
97
+ self.deviceDisconnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasDisconnectedNotification object:nil queue:nil usingBlock:deviceDisconnectedBlock];
100
98
  [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
101
99
  [notificationCenter addObserver:self selector:@selector(deviceOrientationDidChange) name:UIDeviceOrientationDidChangeNotification object:nil];
102
100
  self.orientation = AVCaptureVideoOrientationPortrait;
@@ -108,8 +106,8 @@
108
106
  - (void) dealloc
109
107
  {
110
108
  NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];
111
- [notificationCenter removeObserver:[self deviceConnectedObserver]];
112
- [notificationCenter removeObserver:[self deviceDisconnectedObserver]];
109
+ [notificationCenter removeObserver:self.deviceConnectedObserver];
110
+ [notificationCenter removeObserver:self.deviceDisconnectedObserver];
113
111
  [notificationCenter removeObserver:self name:UIDeviceOrientationDidChangeNotification object:nil];
114
112
  [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
115
113
 
@@ -119,30 +117,31 @@
119
117
  - (void) startInView:(UIView*)videoView
120
118
  {
121
119
  // Set torch and flash mode to auto
122
- if ( [[self backFacingCamera] hasFlash] )
120
+ if ( self.backFacingCamera.hasFlash )
123
121
  {
124
- if ( [[self backFacingCamera] lockForConfiguration:nil] )
122
+ if ( [self.backFacingCamera lockForConfiguration:nil] )
125
123
  {
126
- if ( [[self backFacingCamera] isFlashModeSupported:AVCaptureFlashModeAuto] )
127
- [[self backFacingCamera] setFlashMode:AVCaptureFlashModeAuto];
124
+ if ( [self.backFacingCamera isFlashModeSupported:AVCaptureFlashModeAuto] )
125
+ self.backFacingCamera.flashMode = AVCaptureFlashModeAuto;
128
126
 
129
- [[self backFacingCamera] unlockForConfiguration];
127
+ [self.backFacingCamera unlockForConfiguration];
130
128
  }
131
129
  }
132
- if ( [[self backFacingCamera] hasTorch] )
130
+
131
+ if ( self.backFacingCamera.hasTorch )
133
132
  {
134
- if ( [[self backFacingCamera] lockForConfiguration:nil] )
133
+ if ( [self.backFacingCamera lockForConfiguration:nil] )
135
134
  {
136
- if ( [[self backFacingCamera] isTorchModeSupported:AVCaptureTorchModeAuto] )
137
- [[self backFacingCamera] setTorchMode:AVCaptureTorchModeAuto];
135
+ if ( [self.backFacingCamera isTorchModeSupported:AVCaptureTorchModeAuto] )
136
+ self.backFacingCamera.torchMode = AVCaptureTorchModeAuto;
138
137
 
139
- [[self backFacingCamera] unlockForConfiguration];
138
+ [self.backFacingCamera unlockForConfiguration];
140
139
  }
141
140
  }
142
141
 
143
142
  // Init the device inputs
144
- AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:nil];
145
- AVCaptureDeviceInput *newAudioInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self audioDevice] error:nil];
143
+ AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:self.backFacingCamera error:nil];
144
+ AVCaptureDeviceInput *newAudioInput = [[AVCaptureDeviceInput alloc] initWithDevice:self.audioDevice error:nil];
146
145
 
147
146
 
148
147
  // Setup the still image file output
@@ -150,7 +149,7 @@
150
149
  NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
151
150
  AVVideoCodecJPEG, AVVideoCodecKey,
152
151
  nil];
153
- [newStillImageOutput setOutputSettings:outputSettings];
152
+ newStillImageOutput.outputSettings = outputSettings;
154
153
 
155
154
 
156
155
  // Create session (use default AVCaptureSessionPresetHigh)
@@ -173,12 +172,12 @@
173
172
  self.session = newCaptureSession;
174
173
 
175
174
  // Set up the movie file output
176
- NSURL *outputFileURL = [self tempFileURL];
177
- AVCamRecorder *newRecorder = [[AVCamRecorder alloc] initWithSession:[self session] outputFileURL:outputFileURL];
178
- [newRecorder setDelegate:self];
175
+ NSURL *outputFileURL = self.tempFileURL;
176
+ AVCamRecorder *newRecorder = [[AVCamRecorder alloc] initWithSession:self.session outputFileURL:outputFileURL];
177
+ newRecorder.delegate = self;
179
178
 
180
179
  // Send an error to the delegate if video recording is unavailable
181
- if ( ! [newRecorder recordsVideo] && [newRecorder recordsAudio] )
180
+ if ( ! newRecorder.recordsVideo && newRecorder.recordsAudio )
182
181
  {
183
182
  NSString *localizedDescription = NSLocalizedString(@"Video recording unavailable", @"Video recording unavailable description");
184
183
  NSString *localizedFailureReason = NSLocalizedString(@"Movies recorded on this device will only contain audio. They will be accessible through iTunes file sharing.", @"Video recording unavailable failure reason");
@@ -196,19 +195,19 @@
196
195
  // Create video preview layer and add it to the UI
197
196
  if ( videoView )
198
197
  {
199
- AVCaptureVideoPreviewLayer *newCaptureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:[self session]];
200
- CALayer *viewLayer = [videoView layer];
201
- [viewLayer setMasksToBounds:YES];
198
+ AVCaptureVideoPreviewLayer *newCaptureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
199
+ CALayer *viewLayer = videoView.layer;
200
+ viewLayer.masksToBounds = YES;
202
201
 
203
- CGRect bounds = [videoView bounds];
204
- [newCaptureVideoPreviewLayer setFrame:bounds];
202
+ CGRect bounds = videoView.bounds;
203
+ newCaptureVideoPreviewLayer.frame = bounds;
205
204
 
206
- if ( [[self recorder] isOrientationSupported] )
207
- [[self recorder] setOrientation:AVCaptureVideoOrientationPortrait];
205
+ if ( self.recorder.isOrientationSupported )
206
+ self.recorder.orientation = AVCaptureVideoOrientationPortrait;
208
207
 
209
- [newCaptureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
208
+ newCaptureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
210
209
 
211
- [viewLayer insertSublayer:newCaptureVideoPreviewLayer below:[[viewLayer sublayers] objectAtIndex:0]];
210
+ [viewLayer insertSublayer:newCaptureVideoPreviewLayer below:[viewLayer.sublayers objectAtIndex:0]];
212
211
 
213
212
  self.captureVideoPreviewLayer = newCaptureVideoPreviewLayer;
214
213
 
@@ -225,7 +224,7 @@
225
224
  {
226
225
  if ( ! self.isRunning )
227
226
  {
228
- [[self session] startRunning];
227
+ [self.session startRunning];
229
228
  self.isRunning = YES;
230
229
  }
231
230
  }
@@ -234,7 +233,7 @@
234
233
  {
235
234
  if ( self.isRunning )
236
235
  {
237
- [[self session] stopRunning];
236
+ [self.session stopRunning];
238
237
  self.isRunning = NO;
239
238
  }
240
239
  }
@@ -247,16 +246,16 @@
247
246
  // to the foreground unless you request background execution time. This also ensures that there will be time to write the file to the assets library
248
247
  // when AVCam is backgrounded. To conclude this background execution, -endBackgroundTask is called in -recorder:recordingDidFinishToOutputFileURL:error:
249
248
  // after the recorded file has been saved.
250
- [self setBackgroundRecordingID:[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:^{}]];
249
+ self.backgroundRecordingID = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:^{}];
251
250
  }
252
251
 
253
- [self removeFile:[[self recorder] outputFileURL]];
254
- [[self recorder] startRecordingWithOrientation:self.orientation];
252
+ [self removeFile:[self.recorder outputFileURL]];
253
+ [self.recorder startRecordingWithOrientation:self.orientation];
255
254
  }
256
255
 
257
256
  - (void) stopRecording
258
257
  {
259
- [[self recorder] stopRecording];
258
+ [self.recorder stopRecording];
260
259
  }
261
260
 
262
261
  - (void) saveImageToLibrary:(UIImage*)image
@@ -268,8 +267,8 @@
268
267
  CFRunLoopPerformBlock(CFRunLoopGetMain(), kCFRunLoopCommonModes, ^{ [delegate club:self assetSavedToURL:assetURL error:error]; });
269
268
  };
270
269
 
271
- [library writeImageToSavedPhotosAlbum:[image CGImage]
272
- orientation:(ALAssetOrientation)[image imageOrientation]
270
+ [library writeImageToSavedPhotosAlbum:image.CGImage
271
+ orientation:(ALAssetOrientation)image.imageOrientation
273
272
  completionBlock:completionBlock];
274
273
  }
275
274
 
@@ -279,23 +278,23 @@
279
278
  }
280
279
  - (void) captureStillImageAnimated:(BOOL)animated
281
280
  {
282
- AVCaptureConnection *stillImageConnection = [AVCamUtilities connectionWithMediaType:AVMediaTypeVideo fromConnections:[[self stillImageOutput] connections]];
281
+ AVCaptureConnection *stillImageConnection = [AVCamUtilities connectionWithMediaType:AVMediaTypeVideo fromConnections:self.stillImageOutput.connections];
283
282
  if ( ! stillImageConnection )
284
283
  return;
285
284
 
286
285
  if ( [stillImageConnection isVideoOrientationSupported] )
287
- [stillImageConnection setVideoOrientation:self.orientation];
286
+ stillImageConnection.videoOrientation = self.orientation;
288
287
 
289
288
  if ( animated )
290
289
  {
291
290
  // Flash the screen white and fade it out to give UI feedback that a still image was taken
292
- UIView *flashView = [[UIView alloc] initWithFrame:[[[self viewFinderView] window] bounds]];
293
- [flashView setBackgroundColor:[UIColor whiteColor]];
294
- [[[self viewFinderView] window] addSubview:flashView];
291
+ UIView *flashView = [[UIView alloc] initWithFrame:self.viewFinderView.window.bounds];
292
+ flashView.backgroundColor = [UIColor whiteColor];
293
+ [self.viewFinderView.window addSubview:flashView];
295
294
 
296
295
  [UIView animateWithDuration:.4f
297
296
  animations:^{
298
- [flashView setAlpha:0.f];
297
+ flashView.alpha = 0.f;
299
298
  }
300
299
  completion:^(BOOL finished){
301
300
  [flashView removeFromSuperview];
@@ -303,7 +302,7 @@
303
302
  ];
304
303
  }
305
304
 
306
- [[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:stillImageConnection
305
+ [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:stillImageConnection
307
306
  completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
308
307
 
309
308
  void (^completionBlock)(UIImage*,NSError*) = ^(UIImage *image, NSError *error) {
@@ -337,38 +336,44 @@
337
336
  {
338
337
  BOOL success = NO;
339
338
 
340
- if ( [self cameraCount] > 1 )
339
+ if ( self.cameraCount > 1 )
341
340
  {
342
341
  NSError *error;
343
342
  AVCaptureDeviceInput *newVideoInput;
344
- AVCaptureDevicePosition position = [[self.videoInput device] position];
343
+ AVCaptureDevicePosition position = [self currentCamera];
345
344
 
346
345
  if ( position == AVCaptureDevicePositionBack )
347
- newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontFacingCamera] error:&error];
346
+ {
347
+ newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:self.frontFacingCamera error:&error];
348
+ }
348
349
  else if ( position == AVCaptureDevicePositionFront )
349
- newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error];
350
+ {
351
+ newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:self.backFacingCamera error:&error];
352
+ }
350
353
  else
351
354
  goto bail;
352
355
 
353
- if ( newVideoInput != nil )
356
+ if ( error )
357
+ {
358
+ if ( [delegate respondsToSelector:@selector(club:didFailWithError:)] )
359
+ CFRunLoopPerformBlock(CFRunLoopGetMain(), kCFRunLoopCommonModes, ^{ [delegate club:self didFailWithError:error]; });
360
+ }
361
+ else
354
362
  {
355
- [[self session] beginConfiguration];
356
- [[self session] removeInput:[self videoInput]];
357
- if ( [[self session] canAddInput:newVideoInput] )
363
+ [self.session beginConfiguration];
364
+ [self.session removeInput:self.videoInput];
365
+ if ( [self.session canAddInput:newVideoInput] )
358
366
  {
359
- [[self session] addInput:newVideoInput];
367
+ [self.session addInput:newVideoInput];
360
368
  self.videoInput = newVideoInput;
361
369
  }
362
370
  else
363
- [[self session] addInput:[self videoInput]];
364
- [[self session] commitConfiguration];
371
+ {
372
+ [self.session addInput:self.videoInput];
373
+ }
374
+ [self.session commitConfiguration];
365
375
  success = YES;
366
376
  }
367
- else if ( error )
368
- {
369
- if ( [delegate respondsToSelector:@selector(club:didFailWithError:)] )
370
- CFRunLoopPerformBlock(CFRunLoopGetMain(), kCFRunLoopCommonModes, ^{ [delegate club:self didFailWithError:error]; });
371
- }
372
377
  }
373
378
 
374
379
  bail:
@@ -389,19 +394,19 @@ bail:
389
394
 
390
395
  - (BOOL) hasCamera
391
396
  {
392
- return [self cameraCount] > 0;
397
+ return self.cameraCount > 0;
393
398
  }
394
399
  - (BOOL) hasMultipleCameras
395
400
  {
396
- return [self cameraCount] > 1;
401
+ return self.cameraCount > 1;
397
402
  }
398
403
  - (BOOL) hasAudio
399
404
  {
400
- return [self micCount] > 0;
405
+ return self.micCount > 0;
401
406
  }
402
407
  - (BOOL) hasVideo
403
408
  {
404
- return [self hasCamera] && [self hasAudio];
409
+ return self.hasCamera && self.hasAudio;
405
410
  }
406
411
 
407
412
 
@@ -409,13 +414,13 @@ bail:
409
414
  // Perform an auto focus at the specified point. The focus mode will automatically change to locked once the auto focus is complete.
410
415
  - (void) autoFocusAtPoint:(CGPoint)point
411
416
  {
412
- AVCaptureDevice *device = [[self videoInput] device];
413
- if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
417
+ AVCaptureDevice *device = self.videoInput.device;
418
+ if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus] ) {
414
419
  NSError *error;
415
420
  if ( [device lockForConfiguration:&error] )
416
421
  {
417
- [device setFocusPointOfInterest:point];
418
- [device setFocusMode:AVCaptureFocusModeAutoFocus];
422
+ device.focusPointOfInterest = point;
423
+ device.focusMode = AVCaptureFocusModeAutoFocus;
419
424
  [device unlockForConfiguration];
420
425
  }
421
426
  else
@@ -429,15 +434,15 @@ bail:
429
434
  // Switch to continuous auto focus mode at the specified point
430
435
  - (void) continuousFocusAtPoint:(CGPoint)point
431
436
  {
432
- AVCaptureDevice *device = [[self videoInput] device];
437
+ AVCaptureDevice *device = self.videoInput.device;
433
438
 
434
- if ( [device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus] )
439
+ if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus] )
435
440
  {
436
441
  NSError *error;
437
442
  if ( [device lockForConfiguration:&error] )
438
443
  {
439
- [device setFocusPointOfInterest:point];
440
- [device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
444
+ device.focusPointOfInterest = point;
445
+ device.focusMode = AVCaptureFocusModeContinuousAutoFocus;
441
446
  [device unlockForConfiguration];
442
447
  }
443
448
  else
@@ -453,9 +458,9 @@ bail:
453
458
  - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates
454
459
  {
455
460
  CGPoint pointOfInterest = CGPointMake(.5f, .5f);
456
- CGSize frameSize = [[self viewFinderView] frame].size;
461
+ CGSize frameSize = self.viewFinderView.frame.size;
457
462
 
458
- if ( [[self recorder] isMirrored] )
463
+ if ( self.recorder.isMirrored )
459
464
  viewCoordinates.x = frameSize.width - viewCoordinates.x;
460
465
 
461
466
  if ( [[self.captureVideoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResize] )
@@ -466,11 +471,11 @@ bail:
466
471
  else
467
472
  {
468
473
  CGRect cleanAperture;
469
- for (AVCaptureInputPort *port in [[self videoInput] ports])
474
+ for (AVCaptureInputPort *port in self.videoInput.ports)
470
475
  {
471
- if ( [port mediaType] == AVMediaTypeVideo )
476
+ if ( port.mediaType == AVMediaTypeVideo )
472
477
  {
473
- cleanAperture = CMVideoFormatDescriptionGetCleanAperture([port formatDescription], YES);
478
+ cleanAperture = CMVideoFormatDescriptionGetCleanAperture(port.formatDescription, YES);
474
479
  CGSize apertureSize = cleanAperture.size;
475
480
  CGPoint point = viewCoordinates;
476
481
 
@@ -565,7 +570,7 @@ bail:
565
570
  NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
566
571
  for (AVCaptureDevice *device in devices)
567
572
  {
568
- if ( [device position] == position )
573
+ if ( device.position == position )
569
574
  return device;
570
575
  }
571
576
  return nil;
@@ -583,11 +588,16 @@ bail:
583
588
  return [self cameraWithPosition:AVCaptureDevicePositionBack];
584
589
  }
585
590
 
591
+ - (AVCaptureDevicePosition) currentCamera
592
+ {
593
+ return [[self.videoInput device] position];
594
+ }
595
+
586
596
  // Find and return an audio device, returning nil if one is not found
587
597
  - (AVCaptureDevice *) audioDevice
588
598
  {
589
599
  NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
590
- if ( [devices count] > 0 )
600
+ if ( devices.count > 0 )
591
601
  return [devices objectAtIndex:0];
592
602
  return nil;
593
603
  }
@@ -599,7 +609,7 @@ bail:
599
609
 
600
610
  - (void) removeFile:(NSURL *)fileURL
601
611
  {
602
- NSString *filePath = [fileURL path];
612
+ NSString *filePath = fileURL.path;
603
613
  NSFileManager *fileManager = [NSFileManager defaultManager];
604
614
  if ( [fileManager fileExistsAtPath:filePath] )
605
615
  {
@@ -616,7 +626,7 @@ bail:
616
626
  {
617
627
  NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
618
628
  NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
619
- [dateFormatter setDateFormat:@"yyyy-MM-dd_HH-mm-ss"];
629
+ dateFormatter.dateFormat = @"yyyy-MM-dd_HH-mm-ss";
620
630
  NSString *destinationPath = [documentsDirectory stringByAppendingFormat:@"/output_%@.mov", [dateFormatter stringFromDate:[NSDate date]]];
621
631
  NSError *error;
622
632
  if ( ! [[NSFileManager defaultManager] copyItemAtURL:fileURL toURL:[NSURL fileURLWithPath:destinationPath] error:&error] )
@@ -640,7 +650,7 @@ bail:
640
650
 
641
651
  -(void)recorder:(AVCamRecorder *)recorder recordingDidFinishToOutputFileURL:(NSURL *)outputFileURL error:(NSError *)error
642
652
  {
643
- if ( [[self recorder] recordsAudio] && ![[self recorder] recordsVideo] )
653
+ if ( self.recorder.recordsAudio && ! self.recorder.recordsVideo )
644
654
  {
645
655
  // If the file was created on a device that doesn't support video recording, it can't be saved to the assets
646
656
  // library. Instead, save it in the app's Documents directory, whence it can be copied from the device via
@@ -648,7 +658,7 @@ bail:
648
658
  [self copyFileToDocuments:outputFileURL];
649
659
 
650
660
  if ( [[UIDevice currentDevice] isMultitaskingSupported] )
651
- [[UIApplication sharedApplication] endBackgroundTask:[self backgroundRecordingID]];
661
+ [[UIApplication sharedApplication] endBackgroundTask:self.backgroundRecordingID];
652
662
 
653
663
  if ( [delegate respondsToSelector:@selector(clubRecordingFinished:)] )
654
664
  CFRunLoopPerformBlock(CFRunLoopGetMain(), kCFRunLoopCommonModes, ^{ [delegate clubRecordingFinished:self]; });
@@ -665,7 +675,7 @@ bail:
665
675
  }
666
676
 
667
677
  if ( [[UIDevice currentDevice] isMultitaskingSupported] )
668
- [[UIApplication sharedApplication] endBackgroundTask:[self backgroundRecordingID]];
678
+ [[UIApplication sharedApplication] endBackgroundTask:self.backgroundRecordingID];
669
679
 
670
680
  if ( [delegate respondsToSelector:@selector(clubRecordingFinished:)] )
671
681
  CFRunLoopPerformBlock(CFRunLoopGetMain(), kCFRunLoopCommonModes, ^{ [delegate clubRecordingFinished:self]; });