1 /* 2 * Copyright (C) Texas Instruments - http://www.ti.com/ 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 /** 18 * @file V4LCameraAdapter.cpp 19 * 20 * This file maps the Camera Hardware Interface to V4L2. 21 * 22 */ 23 24 25 #include "V4LCameraAdapter.h" 26 #include "CameraHal.h" 27 #include "TICameraParameters.h" 28 #include "DebugUtils.h" 29 #include <signal.h> 30 #include <stdio.h> 31 #include <stdlib.h> 32 #include <string.h> 33 #include <fcntl.h> 34 #include <unistd.h> 35 #include <errno.h> 36 #include <sys/ioctl.h> 37 #include <sys/mman.h> 38 #include <sys/select.h> 39 #include <linux/videodev.h> 40 41 #include <ui/GraphicBuffer.h> 42 #include <ui/GraphicBufferMapper.h> 43 44 #include <cutils/properties.h> 45 #define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false )) 46 static int mDebugFps = 0; 47 48 #define Q16_OFFSET 16 49 50 #define HERE(Msg) {CAMHAL_LOGEB("--=== %s===--\n", Msg);} 51 52 namespace Ti { 53 namespace Camera { 54 55 //frames skipped before recalculating the framerate 56 #define FPS_PERIOD 30 57 58 //define this macro to save first few raw frames when starting the preview. 59 //#define SAVE_RAW_FRAMES 1 60 //#define DUMP_CAPTURE_FRAME 1 61 //#define PPM_PER_FRAME_CONVERSION 1 62 63 //Proto Types 64 static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size ); 65 static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height ); 66 static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height ); 67 68 android::Mutex gV4LAdapterLock; 69 char device[15]; 70 71 72 /*--------------------Camera Adapter Class STARTS here-----------------------------*/ 73 74 /*--------------------V4L wrapper functions -------------------------------*/ 75 status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) { 76 status_t ret = NO_ERROR; 77 errno = 0; 78 79 do { 80 ret = ioctl (fd, req, argp); 81 }while (-1 == ret && EINTR == errno); 82 83 return ret; 84 } 85 86 status_t V4LCameraAdapter::v4lInitMmap(int& count) { 87 status_t ret = NO_ERROR; 88 89 //First allocate adapter internal buffers at V4L level for USB Cam 90 //These are the buffers from which we will copy the data into overlay buffers 91 /* Check if camera can handle NB_BUFFER buffers */ 92 mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 93 mVideoInfo->rb.memory = V4L2_MEMORY_MMAP; 94 mVideoInfo->rb.count = count; 95 96 ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb); 97 if (ret < 0) { 98 CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno)); 99 return ret; 100 } 101 102 count = mVideoInfo->rb.count; 103 for (int i = 0; i < count; i++) { 104 105 memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer)); 106 107 mVideoInfo->buf.index = i; 108 mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 109 mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; 110 111 ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf); 112 if (ret < 0) { 113 CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno)); 114 return ret; 115 } 116 117 mVideoInfo->mem[i] = mmap (NULL, 118 mVideoInfo->buf.length, 119 PROT_READ | PROT_WRITE, 120 MAP_SHARED, 121 mCameraHandle, 122 mVideoInfo->buf.m.offset); 123 124 CAMHAL_LOGVB(" mVideoInfo->mem[%d]=%p ; mVideoInfo->buf.length = %d", i, mVideoInfo->mem[i], mVideoInfo->buf.length); 125 if (mVideoInfo->mem[i] == MAP_FAILED) { 126 CAMHAL_LOGEB("Unable to map buffer [%d]. (%s)", i, strerror(errno)); 127 return -1; 128 } 129 } 130 return ret; 131 } 132 133 status_t V4LCameraAdapter::v4lInitUsrPtr(int& count) { 134 status_t ret = NO_ERROR; 135 136 mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 137 mVideoInfo->rb.memory = V4L2_MEMORY_USERPTR; 138 mVideoInfo->rb.count = count; 139 140 ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb); 141 if (ret < 0) { 142 CAMHAL_LOGEB("VIDIOC_REQBUFS failed for USERPTR: %s", strerror(errno)); 143 return ret; 144 } 145 146 count = mVideoInfo->rb.count; 147 return ret; 148 } 149 150 status_t V4LCameraAdapter::v4lStartStreaming () { 151 status_t ret = NO_ERROR; 152 enum v4l2_buf_type bufType; 153 154 if (!mVideoInfo->isStreaming) { 155 bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE; 156 157 ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMON, &bufType); 158 if (ret < 0) { 159 CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno)); 160 return ret; 161 } 162 mVideoInfo->isStreaming = true; 163 } 164 return ret; 165 } 166 167 status_t V4LCameraAdapter::v4lStopStreaming (int nBufferCount) { 168 status_t ret = NO_ERROR; 169 enum v4l2_buf_type bufType; 170 171 if (mVideoInfo->isStreaming) { 172 bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE; 173 174 ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType); 175 if (ret < 0) { 176 CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno)); 177 goto EXIT; 178 } 179 mVideoInfo->isStreaming = false; 180 181 /* Unmap buffers */ 182 mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 183 mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; 184 for (int i = 0; i < nBufferCount; i++) { 185 if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0) { 186 CAMHAL_LOGEA("munmap() failed"); 187 } 188 } 189 190 //free the memory allocated during REQBUFS, by setting the count=0 191 mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 192 mVideoInfo->rb.memory = V4L2_MEMORY_MMAP; 193 mVideoInfo->rb.count = 0; 194 195 ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb); 196 if (ret < 0) { 197 CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno)); 198 goto EXIT; 199 } 200 } 201 EXIT: 202 return ret; 203 } 204 205 status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_format) { 206 status_t ret = NO_ERROR; 207 208 mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 209 ret = v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format); 210 if (ret < 0) { 211 CAMHAL_LOGEB("VIDIOC_G_FMT Failed: %s", strerror(errno)); 212 } 213 214 mVideoInfo->width = width; 215 mVideoInfo->height = height; 216 mVideoInfo->framesizeIn = (width * height << 1); 217 mVideoInfo->formatIn = DEFAULT_PIXEL_FORMAT; 218 219 mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 220 mVideoInfo->format.fmt.pix.width = width; 221 mVideoInfo->format.fmt.pix.height = height; 222 mVideoInfo->format.fmt.pix.pixelformat = pix_format; 223 224 ret = v4lIoctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format); 225 if (ret < 0) { 226 CAMHAL_LOGEB("VIDIOC_S_FMT Failed: %s", strerror(errno)); 227 return ret; 228 } 229 v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format); 230 CAMHAL_LOGDB("VIDIOC_G_FMT : WxH = %dx%d", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height); 231 return ret; 232 } 233 234 status_t V4LCameraAdapter::restartPreview () 235 { 236 status_t ret = NO_ERROR; 237 int width = 0; 238 int height = 0; 239 struct v4l2_streamparm streamParams; 240 241 //configure for preview size and pixel format. 242 mParams.getPreviewSize(&width, &height); 243 244 ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT); 245 if (ret < 0) { 246 CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno)); 247 goto EXIT; 248 } 249 250 ret = v4lInitMmap(mPreviewBufferCount); 251 if (ret < 0) { 252 CAMHAL_LOGEB("v4lInitMmap Failed: %s", strerror(errno)); 253 goto EXIT; 254 } 255 256 //set frame rate 257 streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 258 streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME; 259 streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY; 260 streamParams.parm.capture.timeperframe.denominator = FPS_PERIOD; 261 streamParams.parm.capture.timeperframe.numerator= 1; 262 ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams); 263 if (ret < 0) { 264 CAMHAL_LOGEB("VIDIOC_S_PARM Failed: %s", strerror(errno)); 265 goto EXIT; 266 } 267 268 for (int i = 0; i < mPreviewBufferCountQueueable; i++) { 269 270 mVideoInfo->buf.index = i; 271 mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 272 mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; 273 274 ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf); 275 if (ret < 0) { 276 CAMHAL_LOGEA("VIDIOC_QBUF Failed"); 277 goto EXIT; 278 } 279 nQueued++; 280 } 281 282 ret = v4lStartStreaming(); 283 CAMHAL_LOGDA("Ready for preview...."); 284 EXIT: 285 return ret; 286 } 287 288 /*--------------------Camera Adapter Functions-----------------------------*/ 289 status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps) 290 { 291 char value[PROPERTY_VALUE_MAX]; 292 293 LOG_FUNCTION_NAME; 294 property_get("debug.camera.showfps", value, "0"); 295 mDebugFps = atoi(value); 296 297 int ret = NO_ERROR; 298 299 // Allocate memory for video info structure 300 mVideoInfo = (struct VideoInfo *) calloc (1, sizeof (struct VideoInfo)); 301 if(!mVideoInfo) { 302 ret = NO_MEMORY; 303 goto EXIT; 304 } 305 306 if ((mCameraHandle = open(device, O_RDWR | O_NONBLOCK) ) == -1) { 307 CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno)); 308 ret = BAD_VALUE; 309 goto EXIT; 310 } 311 312 ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap); 313 if (ret < 0) { 314 CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera"); 315 ret = BAD_VALUE; 316 goto EXIT; 317 } 318 319 if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) { 320 CAMHAL_LOGEA("Error while adapter initialization: video capture not supported."); 321 ret = BAD_VALUE; 322 goto EXIT; 323 } 324 325 if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING)) { 326 CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o"); 327 ret = BAD_VALUE; 328 goto EXIT; 329 } 330 331 // Initialize flags 332 mPreviewing = false; 333 mVideoInfo->isStreaming = false; 334 mRecording = false; 335 mCapturing = false; 336 EXIT: 337 LOG_FUNCTION_NAME_EXIT; 338 return ret; 339 } 340 341 status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::FrameType frameType) 342 { 343 status_t ret = NO_ERROR; 344 int idx = 0; 345 LOG_FUNCTION_NAME; 346 347 if ( frameType == CameraFrame::IMAGE_FRAME) { //(1 > mCapturedFrames) 348 // Signal end of image capture 349 if ( NULL != mEndImageCaptureCallback) { 350 CAMHAL_LOGDB("===========Signal End Image Capture=========="); 351 mEndImageCaptureCallback(mEndCaptureData); 352 } 353 goto EXIT; 354 } 355 if ( !mVideoInfo->isStreaming ) { 356 goto EXIT; 357 } 358 359 idx = mPreviewBufs.valueFor(frameBuf); 360 if(idx < 0) { 361 CAMHAL_LOGEB("Wrong index = %d",idx); 362 goto EXIT; 363 } 364 365 mVideoInfo->buf.index = idx; 366 mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 367 mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; 368 369 ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf); 370 if (ret < 0) { 371 CAMHAL_LOGEA("VIDIOC_QBUF Failed"); 372 goto EXIT; 373 } 374 nQueued++; 375 EXIT: 376 LOG_FUNCTION_NAME_EXIT; 377 return ret; 378 379 } 380 381 status_t V4LCameraAdapter::setParameters(const android::CameraParameters ¶ms) 382 { 383 status_t ret = NO_ERROR; 384 int width, height; 385 struct v4l2_streamparm streamParams; 386 387 LOG_FUNCTION_NAME; 388 389 if(!mPreviewing && !mCapturing) { 390 params.getPreviewSize(&width, &height); 391 CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT); 392 393 ret = v4lSetFormat( width, height, DEFAULT_PIXEL_FORMAT); 394 if (ret < 0) { 395 CAMHAL_LOGEB(" VIDIOC_S_FMT Failed: %s", strerror(errno)); 396 goto EXIT; 397 } 398 //set frame rate 399 // Now its fixed to 30 FPS 400 streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 401 streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME; 402 streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY; 403 streamParams.parm.capture.timeperframe.denominator = FPS_PERIOD; 404 streamParams.parm.capture.timeperframe.numerator= 1; 405 ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams); 406 if (ret < 0) { 407 CAMHAL_LOGEB(" VIDIOC_S_PARM Failed: %s", strerror(errno)); 408 goto EXIT; 409 } 410 int actualFps = streamParams.parm.capture.timeperframe.denominator / streamParams.parm.capture.timeperframe.numerator; 411 CAMHAL_LOGDB("Actual FPS set is : %d.", actualFps); 412 } 413 414 // Udpate the current parameter set 415 mParams = params; 416 417 EXIT: 418 LOG_FUNCTION_NAME_EXIT; 419 return ret; 420 } 421 422 423 void V4LCameraAdapter::getParameters(android::CameraParameters& params) 424 { 425 LOG_FUNCTION_NAME; 426 427 // Return the current parameter set 428 params = mParams; 429 430 LOG_FUNCTION_NAME_EXIT; 431 } 432 433 434 ///API to give the buffers to Adapter 435 status_t V4LCameraAdapter::useBuffers(CameraMode mode, CameraBuffer *bufArr, int num, size_t length, unsigned int queueable) 436 { 437 status_t ret = NO_ERROR; 438 439 LOG_FUNCTION_NAME; 440 441 android::AutoMutex lock(mLock); 442 443 switch(mode) 444 { 445 case CAMERA_PREVIEW: 446 mPreviewBufferCountQueueable = queueable; 447 ret = UseBuffersPreview(bufArr, num); 448 break; 449 450 case CAMERA_IMAGE_CAPTURE: 451 mCaptureBufferCountQueueable = queueable; 452 ret = UseBuffersCapture(bufArr, num); 453 break; 454 455 case CAMERA_VIDEO: 456 //@warn Video capture is not fully supported yet 457 mPreviewBufferCountQueueable = queueable; 458 ret = UseBuffersPreview(bufArr, num); 459 break; 460 461 case CAMERA_MEASUREMENT: 462 break; 463 464 default: 465 break; 466 } 467 468 LOG_FUNCTION_NAME_EXIT; 469 470 return ret; 471 } 472 473 status_t V4LCameraAdapter::UseBuffersCapture(CameraBuffer *bufArr, int num) { 474 int ret = NO_ERROR; 475 476 LOG_FUNCTION_NAME; 477 if(NULL == bufArr) { 478 ret = BAD_VALUE; 479 goto EXIT; 480 } 481 482 for (int i = 0; i < num; i++) { 483 //Associate each Camera internal buffer with the one from Overlay 484 mCaptureBufs.add(&bufArr[i], i); 485 CAMHAL_LOGDB("capture- buff [%d] = 0x%x ",i, mCaptureBufs.keyAt(i)); 486 } 487 488 mCaptureBuffersAvailable.clear(); 489 for (int i = 0; i < mCaptureBufferCountQueueable; i++ ) { 490 mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 0); 491 } 492 493 // initial ref count for undeqeueued buffers is 1 since buffer provider 494 // is still holding on to it 495 for (int i = mCaptureBufferCountQueueable; i < num; i++ ) { 496 mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 1); 497 } 498 499 // Update the preview buffer count 500 mCaptureBufferCount = num; 501 EXIT: 502 LOG_FUNCTION_NAME_EXIT; 503 return ret; 504 505 } 506 507 status_t V4LCameraAdapter::UseBuffersPreview(CameraBuffer *bufArr, int num) 508 { 509 int ret = NO_ERROR; 510 LOG_FUNCTION_NAME; 511 512 if(NULL == bufArr) { 513 ret = BAD_VALUE; 514 goto EXIT; 515 } 516 517 ret = v4lInitMmap(num); 518 if (ret == NO_ERROR) { 519 for (int i = 0; i < num; i++) { 520 //Associate each Camera internal buffer with the one from Overlay 521 mPreviewBufs.add(&bufArr[i], i); 522 CAMHAL_LOGDB("Preview- buff [%d] = 0x%x ",i, mPreviewBufs.keyAt(i)); 523 } 524 525 // Update the preview buffer count 526 mPreviewBufferCount = num; 527 } 528 EXIT: 529 LOG_FUNCTION_NAME_EXIT; 530 return ret; 531 } 532 533 status_t V4LCameraAdapter::takePicture() { 534 status_t ret = NO_ERROR; 535 int width = 0; 536 int height = 0; 537 size_t yuv422i_buff_size = 0; 538 int index = 0; 539 char *fp = NULL; 540 CameraBuffer *buffer = NULL; 541 CameraFrame frame; 542 543 LOG_FUNCTION_NAME; 544 545 android::AutoMutex lock(mCaptureBufsLock); 546 547 if(mCapturing) { 548 CAMHAL_LOGEA("Already Capture in Progress..."); 549 ret = BAD_VALUE; 550 goto EXIT; 551 } 552 553 mCapturing = true; 554 mPreviewing = false; 555 556 // Stop preview streaming 557 ret = v4lStopStreaming(mPreviewBufferCount); 558 if (ret < 0 ) { 559 CAMHAL_LOGEB("v4lStopStreaming Failed: %s", strerror(errno)); 560 goto EXIT; 561 } 562 563 //configure for capture image size and pixel format. 564 mParams.getPictureSize(&width, &height); 565 CAMHAL_LOGDB("Image Capture Size WxH = %dx%d",width,height); 566 yuv422i_buff_size = width * height * 2; 567 568 ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT); 569 if (ret < 0) { 570 CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno)); 571 goto EXIT; 572 } 573 574 ret = v4lInitMmap(mCaptureBufferCount); 575 if (ret < 0) { 576 CAMHAL_LOGEB("v4lInitMmap Failed: %s", strerror(errno)); 577 goto EXIT; 578 } 579 580 for (int i = 0; i < mCaptureBufferCountQueueable; i++) { 581 582 mVideoInfo->buf.index = i; 583 mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 584 mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; 585 586 ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf); 587 if (ret < 0) { 588 CAMHAL_LOGEA("VIDIOC_QBUF Failed"); 589 ret = BAD_VALUE; 590 goto EXIT; 591 } 592 nQueued++; 593 } 594 595 ret = v4lStartStreaming(); 596 if (ret < 0) { 597 CAMHAL_LOGEB("v4lStartStreaming Failed: %s", strerror(errno)); 598 goto EXIT; 599 } 600 601 CAMHAL_LOGDA("Streaming started for Image Capture"); 602 603 //get the frame and send to encode as JPG 604 fp = this->GetFrame(index); 605 if(!fp) { 606 CAMHAL_LOGEA("!!! Captured frame is NULL !!!!"); 607 ret = BAD_VALUE; 608 goto EXIT; 609 } 610 611 CAMHAL_LOGDA("::Capture Frame received from V4L::"); 612 buffer = mCaptureBufs.keyAt(index); 613 CAMHAL_LOGVB("## captureBuf[%d] = 0x%x, yuv422i_buff_size=%d", index, buffer->opaque, yuv422i_buff_size); 614 615 //copy the yuv422i data to the image buffer. 616 memcpy(buffer->opaque, fp, yuv422i_buff_size); 617 618 #ifdef DUMP_CAPTURE_FRAME 619 //dump the YUV422 buffer in to a file 620 //a folder should have been created at /data/misc/camera/raw/ 621 { 622 int fd =-1; 623 fd = open("/data/misc/camera/raw/captured_yuv422i_dump.yuv", O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777); 624 if(fd < 0) { 625 CAMHAL_LOGEB("Unable to open file: %s", strerror(fd)); 626 } 627 else { 628 write(fd, fp, yuv422i_buff_size ); 629 close(fd); 630 CAMHAL_LOGDB("::Captured Frame dumped at /data/misc/camera/raw/captured_yuv422i_dump.yuv::"); 631 } 632 } 633 #endif 634 635 CAMHAL_LOGDA("::sending capture frame to encoder::"); 636 frame.mFrameType = CameraFrame::IMAGE_FRAME; 637 frame.mBuffer = buffer; 638 frame.mLength = yuv422i_buff_size; 639 frame.mWidth = width; 640 frame.mHeight = height; 641 frame.mAlignment = width*2; 642 frame.mOffset = 0; 643 frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC); 644 frame.mFrameMask = (unsigned int)CameraFrame::IMAGE_FRAME; 645 frame.mQuirks |= CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG; 646 frame.mQuirks |= CameraFrame::FORMAT_YUV422I_YUYV; 647 648 ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask); 649 if (ret != NO_ERROR) { 650 CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret); 651 } else { 652 ret = sendFrameToSubscribers(&frame); 653 } 654 655 // Stop streaming after image capture 656 ret = v4lStopStreaming(mCaptureBufferCount); 657 if (ret < 0 ) { 658 CAMHAL_LOGEB("v4lStopStreaming Failed: %s", strerror(errno)); 659 goto EXIT; 660 } 661 662 ret = restartPreview(); 663 EXIT: 664 LOG_FUNCTION_NAME_EXIT; 665 return ret; 666 } 667 668 status_t V4LCameraAdapter::stopImageCapture() 669 { 670 status_t ret = NO_ERROR; 671 LOG_FUNCTION_NAME; 672 673 //Release image buffers 674 if ( NULL != mReleaseImageBuffersCallback ) { 675 mReleaseImageBuffersCallback(mReleaseData); 676 } 677 mCaptureBufs.clear(); 678 679 mCapturing = false; 680 mPreviewing = true; 681 LOG_FUNCTION_NAME_EXIT; 682 return ret; 683 } 684 685 status_t V4LCameraAdapter::autoFocus() 686 { 687 status_t ret = NO_ERROR; 688 LOG_FUNCTION_NAME; 689 690 //autoFocus is not implemented. Just return. 691 LOG_FUNCTION_NAME_EXIT; 692 return ret; 693 } 694 695 status_t V4LCameraAdapter::startPreview() 696 { 697 status_t ret = NO_ERROR; 698 699 LOG_FUNCTION_NAME; 700 android::AutoMutex lock(mPreviewBufsLock); 701 702 if(mPreviewing) { 703 ret = BAD_VALUE; 704 goto EXIT; 705 } 706 707 for (int i = 0; i < mPreviewBufferCountQueueable; i++) { 708 709 mVideoInfo->buf.index = i; 710 mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 711 mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; 712 713 ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf); 714 if (ret < 0) { 715 CAMHAL_LOGEA("VIDIOC_QBUF Failed"); 716 goto EXIT; 717 } 718 nQueued++; 719 } 720 721 ret = v4lStartStreaming(); 722 723 // Create and start preview thread for receiving buffers from V4L Camera 724 if(!mCapturing) { 725 mPreviewThread = new PreviewThread(this); 726 CAMHAL_LOGDA("Created preview thread"); 727 } 728 729 //Update the flag to indicate we are previewing 730 mPreviewing = true; 731 mCapturing = false; 732 733 EXIT: 734 LOG_FUNCTION_NAME_EXIT; 735 return ret; 736 } 737 738 status_t V4LCameraAdapter::stopPreview() 739 { 740 enum v4l2_buf_type bufType; 741 int ret = NO_ERROR; 742 743 LOG_FUNCTION_NAME; 744 android::AutoMutex lock(mStopPreviewLock); 745 746 if(!mPreviewing) { 747 return NO_INIT; 748 } 749 mPreviewing = false; 750 751 ret = v4lStopStreaming(mPreviewBufferCount); 752 if (ret < 0) { 753 CAMHAL_LOGEB("StopStreaming: FAILED: %s", strerror(errno)); 754 } 755 756 nQueued = 0; 757 nDequeued = 0; 758 mFramesWithEncoder = 0; 759 760 mPreviewBufs.clear(); 761 762 mPreviewThread->requestExitAndWait(); 763 mPreviewThread.clear(); 764 765 LOG_FUNCTION_NAME_EXIT; 766 return ret; 767 } 768 769 char * V4LCameraAdapter::GetFrame(int &index) 770 { 771 int ret = NO_ERROR; 772 LOG_FUNCTION_NAME; 773 774 mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 775 mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; 776 777 /* DQ */ 778 // Some V4L drivers, notably uvc, protect each incoming call with 779 // a driver-wide mutex. If we use poll() or blocking VIDIOC_DQBUF ioctl 780 // here then we sometimes would run into a deadlock on VIDIO_QBUF ioctl. 781 while(true) { 782 if(!mVideoInfo->isStreaming) { 783 return NULL; 784 } 785 786 ret = v4lIoctl(mCameraHandle, VIDIOC_DQBUF, &mVideoInfo->buf); 787 if((ret == 0) || (errno != EAGAIN)) { 788 break; 789 } 790 } 791 792 if (ret < 0) { 793 CAMHAL_LOGEA("GetFrame: VIDIOC_DQBUF Failed"); 794 return NULL; 795 } 796 nDequeued++; 797 798 index = mVideoInfo->buf.index; 799 800 LOG_FUNCTION_NAME_EXIT; 801 return (char *)mVideoInfo->mem[mVideoInfo->buf.index]; 802 } 803 804 //API to get the frame size required to be allocated. This size is used to override the size passed 805 //by camera service when VSTAB/VNF is turned ON for example 806 status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height) 807 { 808 status_t ret = NO_ERROR; 809 LOG_FUNCTION_NAME; 810 811 // Just return the current preview size, nothing more to do here. 812 mParams.getPreviewSize(( int * ) &width, 813 ( int * ) &height); 814 815 LOG_FUNCTION_NAME_EXIT; 816 817 return ret; 818 } 819 820 status_t V4LCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount) 821 { 822 // We don't support meta data, so simply return 823 return NO_ERROR; 824 } 825 826 status_t V4LCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t bufferCount) 827 { 828 int width = 0; 829 int height = 0; 830 int bytesPerPixel = 2; // for YUV422i; default pixel format 831 832 LOG_FUNCTION_NAME; 833 834 mParams.getPictureSize( &width, &height ); 835 frame.mLength = width * height * bytesPerPixel; 836 frame.mWidth = width; 837 frame.mHeight = height; 838 frame.mAlignment = width * bytesPerPixel; 839 840 CAMHAL_LOGDB("Picture size: W x H = %u x %u (size=%u bytes, alignment=%u bytes)", 841 frame.mWidth, frame.mHeight, frame.mLength, frame.mAlignment); 842 LOG_FUNCTION_NAME_EXIT; 843 return NO_ERROR; 844 } 845 846 static void debugShowFPS() 847 { 848 static int mFrameCount = 0; 849 static int mLastFrameCount = 0; 850 static nsecs_t mLastFpsTime = 0; 851 static float mFps = 0; 852 if(mDebugFps) { 853 mFrameCount++; 854 if (!(mFrameCount & 0x1F)) { 855 nsecs_t now = systemTime(); 856 nsecs_t diff = now - mLastFpsTime; 857 mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff; 858 mLastFpsTime = now; 859 mLastFrameCount = mFrameCount; 860 CAMHAL_LOGI("Camera %d Frames, %f FPS", mFrameCount, mFps); 861 } 862 } 863 } 864 865 status_t V4LCameraAdapter::recalculateFPS() 866 { 867 float currentFPS; 868 869 mFrameCount++; 870 871 if ( ( mFrameCount % FPS_PERIOD ) == 0 ) 872 { 873 nsecs_t now = systemTime(); 874 nsecs_t diff = now - mLastFPSTime; 875 currentFPS = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff; 876 mLastFPSTime = now; 877 mLastFrameCount = mFrameCount; 878 879 if ( 1 == mIter ) 880 { 881 mFPS = currentFPS; 882 } 883 else 884 { 885 //cumulative moving average 886 mFPS = mLastFPS + (currentFPS - mLastFPS)/mIter; 887 } 888 889 mLastFPS = mFPS; 890 mIter++; 891 } 892 893 return NO_ERROR; 894 } 895 896 void V4LCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt) 897 { 898 LOG_FUNCTION_NAME; 899 900 LOG_FUNCTION_NAME_EXIT; 901 } 902 903 904 V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index) 905 { 906 LOG_FUNCTION_NAME; 907 908 // Nothing useful to do in the constructor 909 mFramesWithEncoder = 0; 910 911 LOG_FUNCTION_NAME_EXIT; 912 } 913 914 V4LCameraAdapter::~V4LCameraAdapter() 915 { 916 LOG_FUNCTION_NAME; 917 918 // Close the camera handle and free the video info structure 919 close(mCameraHandle); 920 921 if (mVideoInfo) 922 { 923 free(mVideoInfo); 924 mVideoInfo = NULL; 925 } 926 927 LOG_FUNCTION_NAME_EXIT; 928 } 929 930 static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size ) { 931 //convert YUV422I yuyv to uyvy format. 932 uint32_t *bf = (uint32_t*)src; 933 uint32_t *dst = (uint32_t*)dest; 934 935 LOG_FUNCTION_NAME; 936 937 if (!src || !dest) { 938 return; 939 } 940 941 for(size_t i = 0; i < size; i = i+4) 942 { 943 dst[0] = ((bf[0] & 0x00FF00FF) << 8) | ((bf[0] & 0xFF00FF00) >> 8); 944 bf++; 945 dst++; 946 } 947 948 LOG_FUNCTION_NAME_EXIT; 949 } 950 951 static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height ) { 952 //convert YUV422I to YUV420 NV12 format and copies directly to preview buffers (Tiler memory). 953 int stride = 4096; 954 unsigned char *bf = src; 955 unsigned char *dst_y = dest; 956 unsigned char *dst_uv = dest + ( height * stride); 957 #ifdef PPM_PER_FRAME_CONVERSION 958 static int frameCount = 0; 959 static nsecs_t ppm_diff = 0; 960 nsecs_t ppm_start = systemTime(); 961 #endif 962 963 LOG_FUNCTION_NAME; 964 965 if (width % 16 ) { 966 for(int i = 0; i < height; i++) { 967 for(int j = 0; j < width; j++) { 968 *dst_y = *bf; 969 dst_y++; 970 bf = bf + 2; 971 } 972 dst_y += (stride - width); 973 } 974 975 bf = src; 976 bf++; //UV sample 977 for(int i = 0; i < height/2; i++) { 978 for(int j=0; j<width; j++) { 979 *dst_uv = *bf; 980 dst_uv++; 981 bf = bf + 2; 982 } 983 bf = bf + width*2; 984 dst_uv = dst_uv + (stride - width); 985 } 986 } else { 987 //neon conversion 988 for(int i = 0; i < height; i++) { 989 int n = width; 990 int skip = i & 0x1; // skip uv elements for the odd rows 991 asm volatile ( 992 " pld [%[src], %[src_stride], lsl #2] \n\t" 993 " cmp %[n], #16 \n\t" 994 " blt 5f \n\t" 995 "0: @ 16 pixel copy \n\t" 996 " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t" 997 " @ now q0 = y q1 = uv \n\t" 998 " vst1.32 {d0,d1}, [%[dst_y]]! \n\t" 999 " cmp %[skip], #0 \n\t" 1000 " bne 1f \n\t" 1001 " vst1.32 {d2,d3},[%[dst_uv]]! \n\t" 1002 "1: @ skip odd rows for UV \n\t" 1003 " sub %[n], %[n], #16 \n\t" 1004 " cmp %[n], #16 \n\t" 1005 " bge 0b \n\t" 1006 "5: @ end \n\t" 1007 #ifdef NEEDS_ARM_ERRATA_754319_754320 1008 " vmov s0,s0 @ add noop for errata item \n\t" 1009 #endif 1010 : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n) 1011 : [src_stride] "r" (width), [skip] "r" (skip) 1012 : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3" 1013 ); 1014 dst_y = dst_y + (stride - width); 1015 if (skip == 0) { 1016 dst_uv = dst_uv + (stride - width); 1017 } 1018 } //end of for() 1019 } 1020 1021 #ifdef PPM_PER_FRAME_CONVERSION 1022 ppm_diff += (systemTime() - ppm_start); 1023 frameCount++; 1024 1025 if (frameCount >= 30) { 1026 ppm_diff = ppm_diff / frameCount; 1027 LOGD("PPM: YUV422i to NV12 Conversion(%d x %d): %llu us ( %llu ms )", width, height, 1028 ns2us(ppm_diff), ns2ms(ppm_diff) ); 1029 ppm_diff = 0; 1030 frameCount = 0; 1031 } 1032 #endif 1033 1034 LOG_FUNCTION_NAME_EXIT; 1035 } 1036 1037 static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height ) { 1038 //convert YUV422I to YUV420 NV12 format. 1039 unsigned char *bf = src; 1040 unsigned char *dst_y = dest; 1041 unsigned char *dst_uv = dest + (width * height); 1042 1043 LOG_FUNCTION_NAME; 1044 1045 if (width % 16 ) { 1046 for(int i = 0; i < height; i++) { 1047 for(int j = 0; j < width; j++) { 1048 *dst_y = *bf; 1049 dst_y++; 1050 bf = bf + 2; 1051 } 1052 } 1053 1054 bf = src; 1055 bf++; //UV sample 1056 for(int i = 0; i < height/2; i++) { 1057 for(int j=0; j<width; j++) { 1058 *dst_uv = *bf; 1059 dst_uv++; 1060 bf = bf + 2; 1061 } 1062 bf = bf + width*2; 1063 } 1064 } else { 1065 //neon conversion 1066 for(int i = 0; i < height; i++) { 1067 int n = width; 1068 int skip = i & 0x1; // skip uv elements for the odd rows 1069 asm volatile ( 1070 " pld [%[src], %[src_stride], lsl #2] \n\t" 1071 " cmp %[n], #16 \n\t" 1072 " blt 5f \n\t" 1073 "0: @ 16 pixel copy \n\t" 1074 " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t" 1075 " @ now q0 = y q1 = uv \n\t" 1076 " vst1.32 {d0,d1}, [%[dst_y]]! \n\t" 1077 " cmp %[skip], #0 \n\t" 1078 " bne 1f \n\t" 1079 " vst1.32 {d2,d3},[%[dst_uv]]! \n\t" 1080 "1: @ skip odd rows for UV \n\t" 1081 " sub %[n], %[n], #16 \n\t" 1082 " cmp %[n], #16 \n\t" 1083 " bge 0b \n\t" 1084 "5: @ end \n\t" 1085 #ifdef NEEDS_ARM_ERRATA_754319_754320 1086 " vmov s0,s0 @ add noop for errata item \n\t" 1087 #endif 1088 : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n) 1089 : [src_stride] "r" (width), [skip] "r" (skip) 1090 : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3" 1091 ); 1092 } 1093 } 1094 1095 LOG_FUNCTION_NAME_EXIT; 1096 } 1097 1098 #ifdef SAVE_RAW_FRAMES 1099 void saveFile(unsigned char* buff, int buff_size) { 1100 static int counter = 1; 1101 int fd = -1; 1102 char fn[256]; 1103 1104 LOG_FUNCTION_NAME; 1105 if (counter > 3) { 1106 return; 1107 } 1108 //dump nv12 buffer 1109 counter++; 1110 sprintf(fn, "/data/misc/camera/raw/nv12_dump_%03d.yuv", counter); 1111 CAMHAL_LOGEB("Dumping nv12 frame to a file : %s.", fn); 1112 1113 fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777); 1114 if(fd < 0) { 1115 CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd)); 1116 return; 1117 } 1118 1119 write(fd, buff, buff_size ); 1120 close(fd); 1121 1122 LOG_FUNCTION_NAME_EXIT; 1123 } 1124 #endif 1125 1126 /* Preview Thread */ 1127 // --------------------------------------------------------------------------- 1128 1129 int V4LCameraAdapter::previewThread() 1130 { 1131 status_t ret = NO_ERROR; 1132 int width, height; 1133 CameraFrame frame; 1134 void *y_uv[2]; 1135 int index = 0; 1136 int stride = 4096; 1137 char *fp = NULL; 1138 1139 mParams.getPreviewSize(&width, &height); 1140 1141 if (mPreviewing) { 1142 1143 fp = this->GetFrame(index); 1144 if(!fp) { 1145 ret = BAD_VALUE; 1146 goto EXIT; 1147 } 1148 CameraBuffer *buffer = mPreviewBufs.keyAt(index); 1149 CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(buffer); 1150 if (!lframe) { 1151 ret = BAD_VALUE; 1152 goto EXIT; 1153 } 1154 1155 debugShowFPS(); 1156 1157 if ( mFrameSubscribers.size() == 0 ) { 1158 ret = BAD_VALUE; 1159 goto EXIT; 1160 } 1161 y_uv[0] = (void*) lframe->mYuv[0]; 1162 //y_uv[1] = (void*) lframe->mYuv[1]; 1163 //y_uv[1] = (void*) (lframe->mYuv[0] + height*stride); 1164 convertYUV422ToNV12Tiler ( (unsigned char*)fp, (unsigned char*)y_uv[0], width, height); 1165 CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; y= 0x%x; UV= 0x%x.",index, buffer, y_uv[0], y_uv[1] ); 1166 1167 #ifdef SAVE_RAW_FRAMES 1168 unsigned char* nv12_buff = (unsigned char*) malloc(width*height*3/2); 1169 //Convert yuv422i to yuv420sp(NV12) & dump the frame to a file 1170 convertYUV422ToNV12 ( (unsigned char*)fp, nv12_buff, width, height); 1171 saveFile( nv12_buff, ((width*height)*3/2) ); 1172 free (nv12_buff); 1173 #endif 1174 1175 frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC; 1176 frame.mBuffer = buffer; 1177 frame.mLength = width*height*3/2; 1178 frame.mAlignment = stride; 1179 frame.mOffset = 0; 1180 frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC); 1181 frame.mFrameMask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC; 1182 1183 if (mRecording) 1184 { 1185 frame.mFrameMask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC; 1186 mFramesWithEncoder++; 1187 } 1188 1189 ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask); 1190 if (ret != NO_ERROR) { 1191 CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret); 1192 } else { 1193 ret = sendFrameToSubscribers(&frame); 1194 } 1195 } 1196 EXIT: 1197 1198 return ret; 1199 } 1200 1201 //scan for video devices 1202 void detectVideoDevice(char** video_device_list, int& num_device) { 1203 char dir_path[20]; 1204 char* filename; 1205 char** dev_list = video_device_list; 1206 DIR *d; 1207 struct dirent *dir; 1208 int index = 0; 1209 1210 strcpy(dir_path, DEVICE_PATH); 1211 d = opendir(dir_path); 1212 if(d) { 1213 //read each entry in the /dev/ and find if there is videox entry. 1214 while ((dir = readdir(d)) != NULL) { 1215 filename = dir->d_name; 1216 if (strncmp(filename, DEVICE_NAME, 5) == 0) { 1217 strcpy(dev_list[index],DEVICE_PATH); 1218 strncat(dev_list[index],filename,sizeof(DEVICE_NAME)); 1219 index++; 1220 } 1221 } //end of while() 1222 closedir(d); 1223 num_device = index; 1224 1225 for(int i=0; i<index; i++){ 1226 CAMHAL_LOGDB("Video device list::dev_list[%d]= %s",i,dev_list[i]); 1227 } 1228 } 1229 } 1230 1231 extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index) 1232 { 1233 CameraAdapter *adapter = NULL; 1234 android::AutoMutex lock(gV4LAdapterLock); 1235 1236 LOG_FUNCTION_NAME; 1237 1238 adapter = new V4LCameraAdapter(sensor_index); 1239 if ( adapter ) { 1240 CAMHAL_LOGDB("New V4L Camera adapter instance created for sensor %d",sensor_index); 1241 } else { 1242 CAMHAL_LOGEA("V4L Camera adapter create failed for sensor index = %d!",sensor_index); 1243 } 1244 1245 LOG_FUNCTION_NAME_EXIT; 1246 1247 return adapter; 1248 } 1249 1250 extern "C" status_t V4LCameraAdapter_Capabilities( 1251 CameraProperties::Properties * const properties_array, 1252 const int starting_camera, const int max_camera, int & supportedCameras) 1253 { 1254 status_t ret = NO_ERROR; 1255 struct v4l2_capability cap; 1256 int tempHandle = NULL; 1257 int num_cameras_supported = 0; 1258 char device_list[5][15]; 1259 char* video_device_list[5]; 1260 int num_v4l_devices = 0; 1261 int sensorId = 0; 1262 CameraProperties::Properties* properties = NULL; 1263 1264 LOG_FUNCTION_NAME; 1265 1266 supportedCameras = 0; 1267 memset((void*)&cap, 0, sizeof(v4l2_capability)); 1268 1269 if (!properties_array) { 1270 CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array); 1271 LOG_FUNCTION_NAME_EXIT; 1272 return BAD_VALUE; 1273 } 1274 1275 for (int i = 0; i < 5; i++) { 1276 video_device_list[i] = device_list[i]; 1277 } 1278 //look for the connected video devices 1279 detectVideoDevice(video_device_list, num_v4l_devices); 1280 1281 for (int i = 0; i < num_v4l_devices; i++) { 1282 if ( (starting_camera + num_cameras_supported) < max_camera) { 1283 sensorId = starting_camera + num_cameras_supported; 1284 1285 CAMHAL_LOGDB("Opening device[%d] = %s..",i, video_device_list[i]); 1286 if ((tempHandle = open(video_device_list[i], O_RDWR)) == -1) { 1287 CAMHAL_LOGEB("Error while opening handle to V4L2 Camera(%s): %s",video_device_list[i], strerror(errno)); 1288 continue; 1289 } 1290 1291 ret = ioctl (tempHandle, VIDIOC_QUERYCAP, &cap); 1292 if (ret < 0) { 1293 CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera"); 1294 close(tempHandle); 1295 continue; 1296 } 1297 1298 //check for video capture devices 1299 if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) { 1300 CAMHAL_LOGEA("Error while adapter initialization: video capture not supported."); 1301 close(tempHandle); 1302 continue; 1303 } 1304 1305 strcpy(device, video_device_list[i]); 1306 properties = properties_array + starting_camera + num_cameras_supported; 1307 1308 //fetch capabilities for this camera 1309 ret = V4LCameraAdapter::getCaps( sensorId, properties, tempHandle ); 1310 if (ret < 0) { 1311 CAMHAL_LOGEA("Error while getting capabilities."); 1312 close(tempHandle); 1313 continue; 1314 } 1315 1316 num_cameras_supported++; 1317 1318 } 1319 //For now exit this loop once a valid video capture device is found. 1320 //TODO: find all V4L capture devices and it capabilities 1321 break; 1322 }//end of for() loop 1323 1324 supportedCameras = num_cameras_supported; 1325 CAMHAL_LOGDB("Number of V4L cameras detected =%d", num_cameras_supported); 1326 1327 EXIT: 1328 LOG_FUNCTION_NAME_EXIT; 1329 close(tempHandle); 1330 return NO_ERROR; 1331 } 1332 1333 } // namespace Camera 1334 } // namespace Ti 1335 1336 1337 /*--------------------Camera Adapter Class ENDS here-----------------------------*/ 1338 1339