Home | History | Annotate | Download | only in ios
      1 /*
      2  *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
      3  *
      4  *  Use of this source code is governed by a BSD-style license
      5  *  that can be found in the LICENSE file in the root of the source
      6  *  tree. An additional intellectual property rights grant can be found
      7  *  in the file PATENTS.  All contributing project authors may
      8  *  be found in the AUTHORS file in the root of the source tree.
      9  */
     10 
     11 #if !defined(__has_feature) || !__has_feature(objc_arc)
     12 #error "This file requires ARC support."
     13 #endif
     14 
     15 #import <UIKit/UIKit.h>
     16 
     17 #import "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
     18 #import "webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h"
     19 
     20 #include "webrtc/system_wrappers/include/trace.h"
     21 
     22 using namespace webrtc;
     23 using namespace webrtc::videocapturemodule;
     24 
     25 @interface RTCVideoCaptureIosObjC (hidden)
     26 - (int)changeCaptureInputWithName:(NSString*)captureDeviceName;
     27 @end
     28 
     29 @implementation RTCVideoCaptureIosObjC {
     30   webrtc::videocapturemodule::VideoCaptureIos* _owner;
     31   webrtc::VideoCaptureCapability _capability;
     32   AVCaptureSession* _captureSession;
     33   int _captureId;
     34   BOOL _orientationHasChanged;
     35   AVCaptureConnection* _connection;
     36   BOOL _captureChanging;  // Guarded by _captureChangingCondition.
     37   NSCondition* _captureChangingCondition;
     38 }
     39 
     40 @synthesize frameRotation = _framRotation;
     41 
     42 - (id)initWithOwner:(VideoCaptureIos*)owner captureId:(int)captureId {
     43   if (self == [super init]) {
     44     _owner = owner;
     45     _captureId = captureId;
     46     _captureSession = [[AVCaptureSession alloc] init];
     47 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
     48     NSString* version = [[UIDevice currentDevice] systemVersion];
     49     if ([version integerValue] >= 7) {
     50       _captureSession.usesApplicationAudioSession = NO;
     51     }
     52 #endif
     53     _captureChanging = NO;
     54     _captureChangingCondition = [[NSCondition alloc] init];
     55 
     56     if (!_captureSession || !_captureChangingCondition) {
     57       return nil;
     58     }
     59 
     60     // create and configure a new output (using callbacks)
     61     AVCaptureVideoDataOutput* captureOutput =
     62         [[AVCaptureVideoDataOutput alloc] init];
     63     NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
     64 
     65     NSNumber* val = [NSNumber
     66         numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
     67     NSDictionary* videoSettings =
     68         [NSDictionary dictionaryWithObject:val forKey:key];
     69     captureOutput.videoSettings = videoSettings;
     70 
     71     // add new output
     72     if ([_captureSession canAddOutput:captureOutput]) {
     73       [_captureSession addOutput:captureOutput];
     74     } else {
     75       WEBRTC_TRACE(kTraceError,
     76                    kTraceVideoCapture,
     77                    _captureId,
     78                    "%s:%s:%d Could not add output to AVCaptureSession ",
     79                    __FILE__,
     80                    __FUNCTION__,
     81                    __LINE__);
     82     }
     83 
     84     [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
     85 
     86     NSNotificationCenter* notify = [NSNotificationCenter defaultCenter];
     87     [notify addObserver:self
     88                selector:@selector(onVideoError:)
     89                    name:AVCaptureSessionRuntimeErrorNotification
     90                  object:_captureSession];
     91     [notify addObserver:self
     92                selector:@selector(deviceOrientationDidChange:)
     93                    name:UIDeviceOrientationDidChangeNotification
     94                  object:nil];
     95   }
     96 
     97   return self;
     98 }
     99 
    100 - (void)directOutputToSelf {
    101   [[self currentOutput]
    102       setSampleBufferDelegate:self
    103                         queue:dispatch_get_global_queue(
    104                                   DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
    105 }
    106 
    107 - (void)directOutputToNil {
    108   [[self currentOutput] setSampleBufferDelegate:nil queue:NULL];
    109 }
    110 
    111 - (void)deviceOrientationDidChange:(NSNotification*)notification {
    112   _orientationHasChanged = YES;
    113   [self setRelativeVideoOrientation];
    114 }
    115 
    116 - (void)dealloc {
    117   [[NSNotificationCenter defaultCenter] removeObserver:self];
    118 }
    119 
    120 - (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId {
    121   [self waitForCaptureChangeToFinish];
    122   // check to see if the camera is already set
    123   if (_captureSession) {
    124     NSArray* currentInputs = [NSArray arrayWithArray:[_captureSession inputs]];
    125     if ([currentInputs count] > 0) {
    126       AVCaptureDeviceInput* currentInput = [currentInputs objectAtIndex:0];
    127       if ([uniqueId isEqualToString:[currentInput.device localizedName]]) {
    128         return YES;
    129       }
    130     }
    131   }
    132 
    133   return [self changeCaptureInputByUniqueId:uniqueId];
    134 }
    135 
    136 - (BOOL)startCaptureWithCapability:(const VideoCaptureCapability&)capability {
    137   [self waitForCaptureChangeToFinish];
    138   if (!_captureSession) {
    139     return NO;
    140   }
    141 
    142   // check limits of the resolution
    143   if (capability.maxFPS < 0 || capability.maxFPS > 60) {
    144     return NO;
    145   }
    146 
    147   if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
    148     if (capability.width > 1920 || capability.height > 1080) {
    149       return NO;
    150     }
    151   } else if ([_captureSession
    152                  canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
    153     if (capability.width > 1280 || capability.height > 720) {
    154       return NO;
    155     }
    156   } else if ([_captureSession
    157                  canSetSessionPreset:AVCaptureSessionPreset640x480]) {
    158     if (capability.width > 640 || capability.height > 480) {
    159       return NO;
    160     }
    161   } else if ([_captureSession
    162                  canSetSessionPreset:AVCaptureSessionPreset352x288]) {
    163     if (capability.width > 352 || capability.height > 288) {
    164       return NO;
    165     }
    166   } else if (capability.width < 0 || capability.height < 0) {
    167     return NO;
    168   }
    169 
    170   _capability = capability;
    171 
    172   AVCaptureVideoDataOutput* currentOutput = [self currentOutput];
    173   if (!currentOutput)
    174     return NO;
    175 
    176   [self directOutputToSelf];
    177 
    178   _orientationHasChanged = NO;
    179   _captureChanging = YES;
    180   dispatch_async(
    181       dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
    182       ^(void) { [self startCaptureInBackgroundWithOutput:currentOutput]; });
    183   return YES;
    184 }
    185 
    186 - (AVCaptureVideoDataOutput*)currentOutput {
    187   return [[_captureSession outputs] firstObject];
    188 }
    189 
    190 - (void)startCaptureInBackgroundWithOutput:
    191             (AVCaptureVideoDataOutput*)currentOutput {
    192   NSString* captureQuality =
    193       [NSString stringWithString:AVCaptureSessionPresetLow];
    194   if (_capability.width >= 1920 || _capability.height >= 1080) {
    195     captureQuality =
    196         [NSString stringWithString:AVCaptureSessionPreset1920x1080];
    197   } else if (_capability.width >= 1280 || _capability.height >= 720) {
    198     captureQuality = [NSString stringWithString:AVCaptureSessionPreset1280x720];
    199   } else if (_capability.width >= 640 || _capability.height >= 480) {
    200     captureQuality = [NSString stringWithString:AVCaptureSessionPreset640x480];
    201   } else if (_capability.width >= 352 || _capability.height >= 288) {
    202     captureQuality = [NSString stringWithString:AVCaptureSessionPreset352x288];
    203   }
    204 
    205   // begin configuration for the AVCaptureSession
    206   [_captureSession beginConfiguration];
    207 
    208   // picture resolution
    209   [_captureSession setSessionPreset:captureQuality];
    210 
    211   // take care of capture framerate now
    212   NSArray* sessionInputs = _captureSession.inputs;
    213   AVCaptureDeviceInput* deviceInput = [sessionInputs count] > 0 ?
    214       sessionInputs[0] : nil;
    215   AVCaptureDevice* inputDevice = deviceInput.device;
    216   if (inputDevice) {
    217     AVCaptureDeviceFormat* activeFormat = inputDevice.activeFormat;
    218     NSArray* supportedRanges = activeFormat.videoSupportedFrameRateRanges;
    219     AVFrameRateRange* targetRange = [supportedRanges count] > 0 ?
    220         supportedRanges[0] : nil;
    221     // Find the largest supported framerate less than capability maxFPS.
    222     for (AVFrameRateRange* range in supportedRanges) {
    223       if (range.maxFrameRate <= _capability.maxFPS &&
    224           targetRange.maxFrameRate <= range.maxFrameRate) {
    225         targetRange = range;
    226       }
    227     }
    228     if (targetRange && [inputDevice lockForConfiguration:NULL]) {
    229       inputDevice.activeVideoMinFrameDuration = targetRange.minFrameDuration;
    230       inputDevice.activeVideoMaxFrameDuration = targetRange.minFrameDuration;
    231       [inputDevice unlockForConfiguration];
    232     }
    233   }
    234 
    235   _connection = [currentOutput connectionWithMediaType:AVMediaTypeVideo];
    236   [self setRelativeVideoOrientation];
    237 
    238   // finished configuring, commit settings to AVCaptureSession.
    239   [_captureSession commitConfiguration];
    240 
    241   [_captureSession startRunning];
    242   [self signalCaptureChangeEnd];
    243 }
    244 
    245 - (void)setRelativeVideoOrientation {
    246   if (!_connection.supportsVideoOrientation) {
    247     return;
    248   }
    249 
    250   switch ([UIDevice currentDevice].orientation) {
    251     case UIDeviceOrientationPortrait:
    252       _connection.videoOrientation =
    253           AVCaptureVideoOrientationPortrait;
    254       break;
    255     case UIDeviceOrientationPortraitUpsideDown:
    256       _connection.videoOrientation =
    257           AVCaptureVideoOrientationPortraitUpsideDown;
    258       break;
    259     case UIDeviceOrientationLandscapeLeft:
    260       _connection.videoOrientation =
    261           AVCaptureVideoOrientationLandscapeRight;
    262       break;
    263     case UIDeviceOrientationLandscapeRight:
    264       _connection.videoOrientation =
    265           AVCaptureVideoOrientationLandscapeLeft;
    266       break;
    267     case UIDeviceOrientationFaceUp:
    268     case UIDeviceOrientationFaceDown:
    269     case UIDeviceOrientationUnknown:
    270       if (!_orientationHasChanged) {
    271         _connection.videoOrientation =
    272             AVCaptureVideoOrientationPortrait;
    273       }
    274       break;
    275   }
    276 }
    277 
    278 - (void)onVideoError:(NSNotification*)notification {
    279   NSLog(@"onVideoError: %@", notification);
    280   // TODO(sjlee): make the specific error handling with this notification.
    281   WEBRTC_TRACE(kTraceError,
    282                kTraceVideoCapture,
    283                _captureId,
    284                "%s:%s:%d [AVCaptureSession startRunning] error.",
    285                __FILE__,
    286                __FUNCTION__,
    287                __LINE__);
    288 }
    289 
    290 - (BOOL)stopCapture {
    291   [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
    292   _orientationHasChanged = NO;
    293   [self waitForCaptureChangeToFinish];
    294   [self directOutputToNil];
    295 
    296   if (!_captureSession) {
    297     return NO;
    298   }
    299 
    300   _captureChanging = YES;
    301   dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
    302                  ^(void) { [self stopCaptureInBackground]; });
    303   return YES;
    304 }
    305 
    306 - (void)stopCaptureInBackground {
    307   [_captureSession stopRunning];
    308   [self signalCaptureChangeEnd];
    309 }
    310 
    311 - (BOOL)changeCaptureInputByUniqueId:(NSString*)uniqueId {
    312   [self waitForCaptureChangeToFinish];
    313   NSArray* currentInputs = [_captureSession inputs];
    314   // remove current input
    315   if ([currentInputs count] > 0) {
    316     AVCaptureInput* currentInput =
    317         (AVCaptureInput*)[currentInputs objectAtIndex:0];
    318 
    319     [_captureSession removeInput:currentInput];
    320   }
    321 
    322   // Look for input device with the name requested (as our input param)
    323   // get list of available capture devices
    324   int captureDeviceCount = [DeviceInfoIosObjC captureDeviceCount];
    325   if (captureDeviceCount <= 0) {
    326     return NO;
    327   }
    328 
    329   AVCaptureDevice* captureDevice =
    330       [DeviceInfoIosObjC captureDeviceForUniqueId:uniqueId];
    331 
    332   if (!captureDevice) {
    333     return NO;
    334   }
    335 
    336   // now create capture session input out of AVCaptureDevice
    337   NSError* deviceError = nil;
    338   AVCaptureDeviceInput* newCaptureInput =
    339       [AVCaptureDeviceInput deviceInputWithDevice:captureDevice
    340                                             error:&deviceError];
    341 
    342   if (!newCaptureInput) {
    343     const char* errorMessage = [[deviceError localizedDescription] UTF8String];
    344 
    345     WEBRTC_TRACE(kTraceError,
    346                  kTraceVideoCapture,
    347                  _captureId,
    348                  "%s:%s:%d deviceInputWithDevice error:%s",
    349                  __FILE__,
    350                  __FUNCTION__,
    351                  __LINE__,
    352                  errorMessage);
    353 
    354     return NO;
    355   }
    356 
    357   // try to add our new capture device to the capture session
    358   [_captureSession beginConfiguration];
    359 
    360   BOOL addedCaptureInput = NO;
    361   if ([_captureSession canAddInput:newCaptureInput]) {
    362     [_captureSession addInput:newCaptureInput];
    363     addedCaptureInput = YES;
    364   } else {
    365     addedCaptureInput = NO;
    366   }
    367 
    368   [_captureSession commitConfiguration];
    369 
    370   return addedCaptureInput;
    371 }
    372 
    373 - (void)captureOutput:(AVCaptureOutput*)captureOutput
    374     didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
    375            fromConnection:(AVCaptureConnection*)connection {
    376   const int kFlags = 0;
    377   CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
    378 
    379   if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) != kCVReturnSuccess) {
    380     return;
    381   }
    382 
    383   const int kYPlaneIndex = 0;
    384   const int kUVPlaneIndex = 1;
    385 
    386   uint8_t* baseAddress =
    387       (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(videoFrame, kYPlaneIndex);
    388   size_t yPlaneBytesPerRow =
    389       CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kYPlaneIndex);
    390   size_t yPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kYPlaneIndex);
    391   size_t uvPlaneBytesPerRow =
    392       CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kUVPlaneIndex);
    393   size_t uvPlaneHeight =
    394       CVPixelBufferGetHeightOfPlane(videoFrame, kUVPlaneIndex);
    395   size_t frameSize =
    396       yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
    397 
    398   VideoCaptureCapability tempCaptureCapability;
    399   tempCaptureCapability.width = CVPixelBufferGetWidth(videoFrame);
    400   tempCaptureCapability.height = CVPixelBufferGetHeight(videoFrame);
    401   tempCaptureCapability.maxFPS = _capability.maxFPS;
    402   tempCaptureCapability.rawType = kVideoNV12;
    403 
    404   _owner->IncomingFrame(baseAddress, frameSize, tempCaptureCapability, 0);
    405 
    406   CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
    407 }
    408 
    409 - (void)signalCaptureChangeEnd {
    410   [_captureChangingCondition lock];
    411   _captureChanging = NO;
    412   [_captureChangingCondition signal];
    413   [_captureChangingCondition unlock];
    414 }
    415 
    416 - (void)waitForCaptureChangeToFinish {
    417   [_captureChangingCondition lock];
    418   while (_captureChanging) {
    419     [_captureChangingCondition wait];
    420   }
    421   [_captureChangingCondition unlock];
    422 }
    423 @end
    424