Home | History | Annotate | Download | only in linux
      1 /*
      2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
      3  *
      4  *  Use of this source code is governed by a BSD-style license
      5  *  that can be found in the LICENSE file in the root of the source
      6  *  tree. An additional intellectual property rights grant can be found
      7  *  in the file PATENTS.  All contributing project authors may
      8  *  be found in the AUTHORS file in the root of the source tree.
      9  */
     10 
     11 #include <errno.h>
     12 #include <fcntl.h>
     13 #include <linux/videodev2.h>
     14 #include <stdio.h>
     15 #include <string.h>
     16 #include <sys/ioctl.h>
     17 #include <sys/mman.h>
     18 #include <sys/stat.h>
     19 #include <unistd.h>
     20 
     21 #include <iostream>
     22 #include <new>
     23 
     24 #include "webrtc/modules/video_capture/linux/video_capture_linux.h"
     25 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
     26 #include "webrtc/system_wrappers/interface/ref_count.h"
     27 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
     28 #include "webrtc/system_wrappers/interface/trace.h"
     29 
     30 namespace webrtc
     31 {
     32 namespace videocapturemodule
     33 {
     34 VideoCaptureModule* VideoCaptureImpl::Create(const int32_t id,
     35                                              const char* deviceUniqueId)
     36 {
     37     RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>* implementation =
     38         new RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>(id);
     39 
     40     if (!implementation || implementation->Init(deviceUniqueId) != 0)
     41     {
     42         delete implementation;
     43         implementation = NULL;
     44     }
     45 
     46     return implementation;
     47 }
     48 
     49 VideoCaptureModuleV4L2::VideoCaptureModuleV4L2(const int32_t id)
     50     : VideoCaptureImpl(id),
     51       _captureThread(NULL),
     52       _captureCritSect(CriticalSectionWrapper::CreateCriticalSection()),
     53       _deviceId(-1),
     54       _deviceFd(-1),
     55       _buffersAllocatedByDevice(-1),
     56       _currentWidth(-1),
     57       _currentHeight(-1),
     58       _currentFrameRate(-1),
     59       _captureStarted(false),
     60       _captureVideoType(kVideoI420),
     61       _pool(NULL)
     62 {
     63 }
     64 
     65 int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8)
     66 {
     67     int len = strlen((const char*) deviceUniqueIdUTF8);
     68     _deviceUniqueId = new (std::nothrow) char[len + 1];
     69     if (_deviceUniqueId)
     70     {
     71         memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
     72     }
     73 
     74     int fd;
     75     char device[32];
     76     bool found = false;
     77 
     78     /* detect /dev/video [0-63] entries */
     79     int n;
     80     for (n = 0; n < 64; n++)
     81     {
     82         sprintf(device, "/dev/video%d", n);
     83         if ((fd = open(device, O_RDONLY)) != -1)
     84         {
     85             // query device capabilities
     86             struct v4l2_capability cap;
     87             if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0)
     88             {
     89                 if (cap.bus_info[0] != 0)
     90                 {
     91                     if (strncmp((const char*) cap.bus_info,
     92                                 (const char*) deviceUniqueIdUTF8,
     93                                 strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id
     94                     {
     95                         close(fd);
     96                         found = true;
     97                         break; // fd matches with device unique id supplied
     98                     }
     99                 }
    100             }
    101             close(fd); // close since this is not the matching device
    102         }
    103     }
    104     if (!found)
    105     {
    106         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found");
    107         return -1;
    108     }
    109     _deviceId = n; //store the device id
    110     return 0;
    111 }
    112 
    113 VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2()
    114 {
    115     StopCapture();
    116     if (_captureCritSect)
    117     {
    118         delete _captureCritSect;
    119     }
    120     if (_deviceFd != -1)
    121       close(_deviceFd);
    122 }
    123 
    124 int32_t VideoCaptureModuleV4L2::StartCapture(
    125     const VideoCaptureCapability& capability)
    126 {
    127     if (_captureStarted)
    128     {
    129         if (capability.width == _currentWidth &&
    130             capability.height == _currentHeight &&
    131             _captureVideoType == capability.rawType)
    132         {
    133             return 0;
    134         }
    135         else
    136         {
    137             StopCapture();
    138         }
    139     }
    140 
    141     CriticalSectionScoped cs(_captureCritSect);
    142     //first open /dev/video device
    143     char device[20];
    144     sprintf(device, "/dev/video%d", (int) _deviceId);
    145 
    146     if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0)
    147     {
    148         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
    149                    "error in opening %s errono = %d", device, errno);
    150         return -1;
    151     }
    152 
    153     // Supported video formats in preferred order.
    154     // If the requested resolution is larger than VGA, we prefer MJPEG. Go for
    155     // I420 otherwise.
    156     const int nFormats = 4;
    157     unsigned int fmts[nFormats];
    158     if (capability.width > 640 || capability.height > 480) {
    159         fmts[0] = V4L2_PIX_FMT_MJPEG;
    160         fmts[1] = V4L2_PIX_FMT_YUV420;
    161         fmts[2] = V4L2_PIX_FMT_YUYV;
    162         fmts[3] = V4L2_PIX_FMT_JPEG;
    163     } else {
    164         fmts[0] = V4L2_PIX_FMT_YUV420;
    165         fmts[1] = V4L2_PIX_FMT_YUYV;
    166         fmts[2] = V4L2_PIX_FMT_MJPEG;
    167         fmts[3] = V4L2_PIX_FMT_JPEG;
    168     }
    169 
    170     // Enumerate image formats.
    171     struct v4l2_fmtdesc fmt;
    172     int fmtsIdx = nFormats;
    173     memset(&fmt, 0, sizeof(fmt));
    174     fmt.index = 0;
    175     fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    176     WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
    177                  "Video Capture enumerats supported image formats:");
    178     while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) {
    179         WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
    180                      "  { pixelformat = %c%c%c%c, description = '%s' }",
    181                      fmt.pixelformat & 0xFF, (fmt.pixelformat>>8) & 0xFF,
    182                      (fmt.pixelformat>>16) & 0xFF, (fmt.pixelformat>>24) & 0xFF,
    183                      fmt.description);
    184         // Match the preferred order.
    185         for (int i = 0; i < nFormats; i++) {
    186             if (fmt.pixelformat == fmts[i] && i < fmtsIdx)
    187                 fmtsIdx = i;
    188         }
    189         // Keep enumerating.
    190         fmt.index++;
    191     }
    192 
    193     if (fmtsIdx == nFormats)
    194     {
    195         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
    196                      "no supporting video formats found");
    197         return -1;
    198     } else {
    199         WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
    200                      "We prefer format %c%c%c%c",
    201                      fmts[fmtsIdx] & 0xFF, (fmts[fmtsIdx]>>8) & 0xFF,
    202                      (fmts[fmtsIdx]>>16) & 0xFF, (fmts[fmtsIdx]>>24) & 0xFF);
    203     }
    204 
    205     struct v4l2_format video_fmt;
    206     memset(&video_fmt, 0, sizeof(struct v4l2_format));
    207     video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    208     video_fmt.fmt.pix.sizeimage = 0;
    209     video_fmt.fmt.pix.width = capability.width;
    210     video_fmt.fmt.pix.height = capability.height;
    211     video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
    212 
    213     if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
    214         _captureVideoType = kVideoYUY2;
    215     else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
    216         _captureVideoType = kVideoI420;
    217     else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG ||
    218              video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG)
    219         _captureVideoType = kVideoMJPEG;
    220 
    221     //set format and frame size now
    222     if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0)
    223     {
    224         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
    225                    "error in VIDIOC_S_FMT, errno = %d", errno);
    226         return -1;
    227     }
    228 
    229     // initialize current width and height
    230     _currentWidth = video_fmt.fmt.pix.width;
    231     _currentHeight = video_fmt.fmt.pix.height;
    232     _captureDelay = 120;
    233 
    234     // Trying to set frame rate, before check driver capability.
    235     bool driver_framerate_support = true;
    236     struct v4l2_streamparm streamparms;
    237     memset(&streamparms, 0, sizeof(streamparms));
    238     streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    239     if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) {
    240         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
    241                    "error in VIDIOC_G_PARM errno = %d", errno);
    242         driver_framerate_support = false;
    243       // continue
    244     } else {
    245       // check the capability flag is set to V4L2_CAP_TIMEPERFRAME.
    246       if (streamparms.parm.capture.capability == V4L2_CAP_TIMEPERFRAME) {
    247         // driver supports the feature. Set required framerate.
    248         memset(&streamparms, 0, sizeof(streamparms));
    249         streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    250         streamparms.parm.capture.timeperframe.numerator = 1;
    251         streamparms.parm.capture.timeperframe.denominator = capability.maxFPS;
    252         if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) {
    253           WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
    254                    "Failed to set the framerate. errno=%d", errno);
    255           driver_framerate_support = false;
    256         } else {
    257           _currentFrameRate = capability.maxFPS;
    258         }
    259       }
    260     }
    261     // If driver doesn't support framerate control, need to hardcode.
    262     // Hardcoding the value based on the frame size.
    263     if (!driver_framerate_support) {
    264       if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG) {
    265         _currentFrameRate = 15;
    266       } else {
    267         _currentFrameRate = 30;
    268       }
    269     }
    270 
    271     if (!AllocateVideoBuffers())
    272     {
    273         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
    274                    "failed to allocate video capture buffers");
    275         return -1;
    276     }
    277 
    278     //start capture thread;
    279     if (!_captureThread)
    280     {
    281         _captureThread = ThreadWrapper::CreateThread(
    282             VideoCaptureModuleV4L2::CaptureThread, this, kHighPriority);
    283         unsigned int id;
    284         _captureThread->Start(id);
    285     }
    286 
    287     // Needed to start UVC camera - from the uvcview application
    288     enum v4l2_buf_type type;
    289     type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    290     if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1)
    291     {
    292         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
    293                      "Failed to turn on stream");
    294         return -1;
    295     }
    296 
    297     _captureStarted = true;
    298     return 0;
    299 }
    300 
    301 int32_t VideoCaptureModuleV4L2::StopCapture()
    302 {
    303     if (_captureThread) {
    304         // Make sure the capture thread stop stop using the critsect.
    305         _captureThread->SetNotAlive();
    306         if (_captureThread->Stop()) {
    307             delete _captureThread;
    308             _captureThread = NULL;
    309         } else
    310         {
    311             // Couldn't stop the thread, leak instead of crash.
    312             WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
    313                          "%s: could not stop capture thread", __FUNCTION__);
    314             assert(false);
    315         }
    316     }
    317 
    318     CriticalSectionScoped cs(_captureCritSect);
    319     if (_captureStarted)
    320     {
    321         _captureStarted = false;
    322         _captureThread = NULL;
    323 
    324         DeAllocateVideoBuffers();
    325         close(_deviceFd);
    326         _deviceFd = -1;
    327     }
    328 
    329     return 0;
    330 }
    331 
    332 //critical section protected by the caller
    333 
    334 bool VideoCaptureModuleV4L2::AllocateVideoBuffers()
    335 {
    336     struct v4l2_requestbuffers rbuffer;
    337     memset(&rbuffer, 0, sizeof(v4l2_requestbuffers));
    338 
    339     rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    340     rbuffer.memory = V4L2_MEMORY_MMAP;
    341     rbuffer.count = kNoOfV4L2Bufffers;
    342 
    343     if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0)
    344     {
    345         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
    346                    "Could not get buffers from device. errno = %d", errno);
    347         return false;
    348     }
    349 
    350     if (rbuffer.count > kNoOfV4L2Bufffers)
    351         rbuffer.count = kNoOfV4L2Bufffers;
    352 
    353     _buffersAllocatedByDevice = rbuffer.count;
    354 
    355     //Map the buffers
    356     _pool = new Buffer[rbuffer.count];
    357 
    358     for (unsigned int i = 0; i < rbuffer.count; i++)
    359     {
    360         struct v4l2_buffer buffer;
    361         memset(&buffer, 0, sizeof(v4l2_buffer));
    362         buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    363         buffer.memory = V4L2_MEMORY_MMAP;
    364         buffer.index = i;
    365 
    366         if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0)
    367         {
    368             return false;
    369         }
    370 
    371         _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED,
    372                               _deviceFd, buffer.m.offset);
    373 
    374         if (MAP_FAILED == _pool[i].start)
    375         {
    376             for (unsigned int j = 0; j < i; j++)
    377                 munmap(_pool[j].start, _pool[j].length);
    378             return false;
    379         }
    380 
    381         _pool[i].length = buffer.length;
    382 
    383         if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0)
    384         {
    385             return false;
    386         }
    387     }
    388     return true;
    389 }
    390 
    391 bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers()
    392 {
    393     // unmap buffers
    394     for (int i = 0; i < _buffersAllocatedByDevice; i++)
    395         munmap(_pool[i].start, _pool[i].length);
    396 
    397     delete[] _pool;
    398 
    399     // turn off stream
    400     enum v4l2_buf_type type;
    401     type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    402     if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0)
    403     {
    404         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
    405                    "VIDIOC_STREAMOFF error. errno: %d", errno);
    406     }
    407 
    408     return true;
    409 }
    410 
    411 bool VideoCaptureModuleV4L2::CaptureStarted()
    412 {
    413     return _captureStarted;
    414 }
    415 
    416 bool VideoCaptureModuleV4L2::CaptureThread(void* obj)
    417 {
    418     return static_cast<VideoCaptureModuleV4L2*> (obj)->CaptureProcess();
    419 }
    420 bool VideoCaptureModuleV4L2::CaptureProcess()
    421 {
    422     int retVal = 0;
    423     fd_set rSet;
    424     struct timeval timeout;
    425 
    426     _captureCritSect->Enter();
    427 
    428     FD_ZERO(&rSet);
    429     FD_SET(_deviceFd, &rSet);
    430     timeout.tv_sec = 1;
    431     timeout.tv_usec = 0;
    432 
    433     retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout);
    434     if (retVal < 0 && errno != EINTR) // continue if interrupted
    435     {
    436         // select failed
    437         _captureCritSect->Leave();
    438         return false;
    439     }
    440     else if (retVal == 0)
    441     {
    442         // select timed out
    443         _captureCritSect->Leave();
    444         return true;
    445     }
    446     else if (!FD_ISSET(_deviceFd, &rSet))
    447     {
    448         // not event on camera handle
    449         _captureCritSect->Leave();
    450         return true;
    451     }
    452 
    453     if (_captureStarted)
    454     {
    455         struct v4l2_buffer buf;
    456         memset(&buf, 0, sizeof(struct v4l2_buffer));
    457         buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    458         buf.memory = V4L2_MEMORY_MMAP;
    459         // dequeue a buffer - repeat until dequeued properly!
    460         while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0)
    461         {
    462             if (errno != EINTR)
    463             {
    464                 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
    465                            "could not sync on a buffer on device %s", strerror(errno));
    466                 _captureCritSect->Leave();
    467                 return true;
    468             }
    469         }
    470         VideoCaptureCapability frameInfo;
    471         frameInfo.width = _currentWidth;
    472         frameInfo.height = _currentHeight;
    473         frameInfo.rawType = _captureVideoType;
    474 
    475         // convert to to I420 if needed
    476         IncomingFrame((unsigned char*) _pool[buf.index].start,
    477                       buf.bytesused, frameInfo);
    478         // enqueue the buffer again
    479         if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1)
    480         {
    481             WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
    482                        "Failed to enqueue capture buffer");
    483         }
    484     }
    485     _captureCritSect->Leave();
    486     usleep(0);
    487     return true;
    488 }
    489 
    490 int32_t VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings)
    491 {
    492     settings.width = _currentWidth;
    493     settings.height = _currentHeight;
    494     settings.maxFPS = _currentFrameRate;
    495     settings.rawType=_captureVideoType;
    496 
    497     return 0;
    498 }
    499 }  // namespace videocapturemodule
    500 }  // namespace webrtc
    501