Home | History | Annotate | Download | only in camera
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 /*
     18  * Contains implementation of an abstract class EmulatedCameraDevice that defines
     19  * functionality expected from an emulated physical camera device:
     20  *  - Obtaining and setting camera parameters
     21  *  - Capturing frames
     22  *  - Streaming video
     23  *  - etc.
     24  */
     25 
     26 #define LOG_NDEBUG 0
     27 #define LOG_TAG "EmulatedCamera_Device"
     28 #include <cutils/log.h>
     29 #include <sys/select.h>
     30 #include <cmath>
     31 #include "EmulatedCameraDevice.h"
     32 
     33 namespace android {
     34 
     35 const float GAMMA_CORRECTION = 2.2f;
     36 EmulatedCameraDevice::EmulatedCameraDevice(EmulatedCamera* camera_hal)
     37     : mObjectLock(),
     38       mCurFrameTimestamp(0),
     39       mCameraHAL(camera_hal),
     40       mCurrentFrame(NULL),
     41       mExposureCompensation(1.0f),
     42       mWhiteBalanceScale(NULL),
     43       mSupportedWhiteBalanceScale(),
     44       mState(ECDS_CONSTRUCTED)
     45 {
     46 }
     47 
     48 EmulatedCameraDevice::~EmulatedCameraDevice()
     49 {
     50     ALOGV("EmulatedCameraDevice destructor");
     51     if (mCurrentFrame != NULL) {
     52         delete[] mCurrentFrame;
     53     }
     54     for (int i = 0; i < mSupportedWhiteBalanceScale.size(); ++i) {
     55         if (mSupportedWhiteBalanceScale.valueAt(i) != NULL) {
     56             delete[] mSupportedWhiteBalanceScale.valueAt(i);
     57         }
     58     }
     59 }
     60 
     61 /****************************************************************************
     62  * Emulated camera device public API
     63  ***************************************************************************/
     64 
     65 status_t EmulatedCameraDevice::Initialize()
     66 {
     67     if (isInitialized()) {
     68         ALOGW("%s: Emulated camera device is already initialized: mState = %d",
     69              __FUNCTION__, mState);
     70         return NO_ERROR;
     71     }
     72 
     73     /* Instantiate worker thread object. */
     74     mWorkerThread = new WorkerThread(this);
     75     if (getWorkerThread() == NULL) {
     76         ALOGE("%s: Unable to instantiate worker thread object", __FUNCTION__);
     77         return ENOMEM;
     78     }
     79 
     80     mState = ECDS_INITIALIZED;
     81 
     82     return NO_ERROR;
     83 }
     84 
     85 status_t EmulatedCameraDevice::startDeliveringFrames(bool one_burst)
     86 {
     87     ALOGV("%s", __FUNCTION__);
     88 
     89     if (!isStarted()) {
     90         ALOGE("%s: Device is not started", __FUNCTION__);
     91         return EINVAL;
     92     }
     93 
     94     /* Frames will be delivered from the thread routine. */
     95     const status_t res = startWorkerThread(one_burst);
     96     ALOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__);
     97     return res;
     98 }
     99 
    100 status_t EmulatedCameraDevice::stopDeliveringFrames()
    101 {
    102     ALOGV("%s", __FUNCTION__);
    103 
    104     if (!isStarted()) {
    105         ALOGW("%s: Device is not started", __FUNCTION__);
    106         return NO_ERROR;
    107     }
    108 
    109     const status_t res = stopWorkerThread();
    110     ALOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__);
    111     return res;
    112 }
    113 
    114 void EmulatedCameraDevice::setExposureCompensation(const float ev) {
    115     ALOGV("%s", __FUNCTION__);
    116 
    117     if (!isStarted()) {
    118         ALOGW("%s: Fake camera device is not started.", __FUNCTION__);
    119     }
    120 
    121     mExposureCompensation = std::pow(2.0f, ev / GAMMA_CORRECTION);
    122     ALOGV("New exposure compensation is %f", mExposureCompensation);
    123 }
    124 
    125 void EmulatedCameraDevice::initializeWhiteBalanceModes(const char* mode,
    126                                                        const float r_scale,
    127                                                        const float b_scale) {
    128     ALOGV("%s with %s, %f, %f", __FUNCTION__, mode, r_scale, b_scale);
    129     float* value = new float[3];
    130     value[0] = r_scale; value[1] = 1.0f; value[2] = b_scale;
    131     mSupportedWhiteBalanceScale.add(String8(mode), value);
    132 }
    133 
    134 void EmulatedCameraDevice::setWhiteBalanceMode(const char* mode) {
    135     ALOGV("%s with white balance %s", __FUNCTION__, mode);
    136     mWhiteBalanceScale =
    137             mSupportedWhiteBalanceScale.valueFor(String8(mode));
    138 }
    139 
    140 /* Computes the pixel value after adjusting the white balance to the current
    141  * one. The input the y, u, v channel of the pixel and the adjusted value will
    142  * be stored in place. The adjustment is done in RGB space.
    143  */
    144 void EmulatedCameraDevice::changeWhiteBalance(uint8_t& y,
    145                                               uint8_t& u,
    146                                               uint8_t& v) const {
    147     float r_scale = mWhiteBalanceScale[0];
    148     float b_scale = mWhiteBalanceScale[2];
    149     int r = static_cast<float>(YUV2R(y, u, v)) / r_scale;
    150     int g = YUV2G(y, u, v);
    151     int b = static_cast<float>(YUV2B(y, u, v)) / b_scale;
    152 
    153     y = RGB2Y(r, g, b);
    154     u = RGB2U(r, g, b);
    155     v = RGB2V(r, g, b);
    156 }
    157 
    158 status_t EmulatedCameraDevice::getCurrentPreviewFrame(void* buffer)
    159 {
    160     if (!isStarted()) {
    161         ALOGE("%s: Device is not started", __FUNCTION__);
    162         return EINVAL;
    163     }
    164     if (mCurrentFrame == NULL || buffer == NULL) {
    165         ALOGE("%s: No framebuffer", __FUNCTION__);
    166         return EINVAL;
    167     }
    168 
    169     /* In emulation the framebuffer is never RGB. */
    170     switch (mPixelFormat) {
    171         case V4L2_PIX_FMT_YVU420:
    172             YV12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
    173             return NO_ERROR;
    174         case V4L2_PIX_FMT_YUV420:
    175             YU12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
    176             return NO_ERROR;
    177         case V4L2_PIX_FMT_NV21:
    178             NV21ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
    179             return NO_ERROR;
    180         case V4L2_PIX_FMT_NV12:
    181             NV12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
    182             return NO_ERROR;
    183 
    184         default:
    185             ALOGE("%s: Unknown pixel format %.4s",
    186                  __FUNCTION__, reinterpret_cast<const char*>(&mPixelFormat));
    187             return EINVAL;
    188     }
    189 }
    190 
    191 /****************************************************************************
    192  * Emulated camera device private API
    193  ***************************************************************************/
    194 
    195 status_t EmulatedCameraDevice::commonStartDevice(int width,
    196                                                  int height,
    197                                                  uint32_t pix_fmt)
    198 {
    199     /* Validate pixel format, and calculate framebuffer size at the same time. */
    200     switch (pix_fmt) {
    201         case V4L2_PIX_FMT_YVU420:
    202         case V4L2_PIX_FMT_YUV420:
    203         case V4L2_PIX_FMT_NV21:
    204         case V4L2_PIX_FMT_NV12:
    205             mFrameBufferSize = (width * height * 12) / 8;
    206             break;
    207 
    208         default:
    209             ALOGE("%s: Unknown pixel format %.4s",
    210                  __FUNCTION__, reinterpret_cast<const char*>(&pix_fmt));
    211             return EINVAL;
    212     }
    213 
    214     /* Cache framebuffer info. */
    215     mFrameWidth = width;
    216     mFrameHeight = height;
    217     mPixelFormat = pix_fmt;
    218     mTotalPixels = width * height;
    219 
    220     /* Allocate framebuffer. */
    221     mCurrentFrame = new uint8_t[mFrameBufferSize];
    222     if (mCurrentFrame == NULL) {
    223         ALOGE("%s: Unable to allocate framebuffer", __FUNCTION__);
    224         return ENOMEM;
    225     }
    226     ALOGV("%s: Allocated %p %d bytes for %d pixels in %.4s[%dx%d] frame",
    227          __FUNCTION__, mCurrentFrame, mFrameBufferSize, mTotalPixels,
    228          reinterpret_cast<const char*>(&mPixelFormat), mFrameWidth, mFrameHeight);
    229     return NO_ERROR;
    230 }
    231 
    232 void EmulatedCameraDevice::commonStopDevice()
    233 {
    234     mFrameWidth = mFrameHeight = mTotalPixels = 0;
    235     mPixelFormat = 0;
    236 
    237     if (mCurrentFrame != NULL) {
    238         delete[] mCurrentFrame;
    239         mCurrentFrame = NULL;
    240     }
    241 }
    242 
    243 /****************************************************************************
    244  * Worker thread management.
    245  ***************************************************************************/
    246 
    247 status_t EmulatedCameraDevice::startWorkerThread(bool one_burst)
    248 {
    249     ALOGV("%s", __FUNCTION__);
    250 
    251     if (!isInitialized()) {
    252         ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
    253         return EINVAL;
    254     }
    255 
    256     const status_t res = getWorkerThread()->startThread(one_burst);
    257     ALOGE_IF(res != NO_ERROR, "%s: Unable to start worker thread", __FUNCTION__);
    258     return res;
    259 }
    260 
    261 status_t EmulatedCameraDevice::stopWorkerThread()
    262 {
    263     ALOGV("%s", __FUNCTION__);
    264 
    265     if (!isInitialized()) {
    266         ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
    267         return EINVAL;
    268     }
    269 
    270     const status_t res = getWorkerThread()->stopThread();
    271     ALOGE_IF(res != NO_ERROR, "%s: Unable to stop worker thread", __FUNCTION__);
    272     return res;
    273 }
    274 
    275 bool EmulatedCameraDevice::inWorkerThread()
    276 {
    277     /* This will end the thread loop, and will terminate the thread. Derived
    278      * classes must override this method. */
    279     return false;
    280 }
    281 
    282 /****************************************************************************
    283  * Worker thread implementation.
    284  ***************************************************************************/
    285 
    286 status_t EmulatedCameraDevice::WorkerThread::readyToRun()
    287 {
    288     ALOGV("Starting emulated camera device worker thread...");
    289 
    290     ALOGW_IF(mThreadControl >= 0 || mControlFD >= 0,
    291             "%s: Thread control FDs are opened", __FUNCTION__);
    292     /* Create a pair of FDs that would be used to control the thread. */
    293     int thread_fds[2];
    294     if (pipe(thread_fds) == 0) {
    295         mThreadControl = thread_fds[1];
    296         mControlFD = thread_fds[0];
    297         ALOGV("Emulated device's worker thread has been started.");
    298         return NO_ERROR;
    299     } else {
    300         ALOGE("%s: Unable to create thread control FDs: %d -> %s",
    301              __FUNCTION__, errno, strerror(errno));
    302         return errno;
    303     }
    304 }
    305 
    306 status_t EmulatedCameraDevice::WorkerThread::stopThread()
    307 {
    308     ALOGV("Stopping emulated camera device's worker thread...");
    309 
    310     status_t res = EINVAL;
    311     if (mThreadControl >= 0) {
    312         /* Send "stop" message to the thread loop. */
    313         const ControlMessage msg = THREAD_STOP;
    314         const int wres =
    315             TEMP_FAILURE_RETRY(write(mThreadControl, &msg, sizeof(msg)));
    316         if (wres == sizeof(msg)) {
    317             /* Stop the thread, and wait till it's terminated. */
    318             res = requestExitAndWait();
    319             if (res == NO_ERROR) {
    320                 /* Close control FDs. */
    321                 if (mThreadControl >= 0) {
    322                     close(mThreadControl);
    323                     mThreadControl = -1;
    324                 }
    325                 if (mControlFD >= 0) {
    326                     close(mControlFD);
    327                     mControlFD = -1;
    328                 }
    329                 ALOGV("Emulated camera device's worker thread has been stopped.");
    330             } else {
    331                 ALOGE("%s: requestExitAndWait failed: %d -> %s",
    332                      __FUNCTION__, res, strerror(-res));
    333             }
    334         } else {
    335             ALOGE("%s: Unable to send THREAD_STOP message: %d -> %s",
    336                  __FUNCTION__, errno, strerror(errno));
    337             res = errno ? errno : EINVAL;
    338         }
    339     } else {
    340         ALOGE("%s: Thread control FDs are not opened", __FUNCTION__);
    341     }
    342 
    343     return res;
    344 }
    345 
    346 EmulatedCameraDevice::WorkerThread::SelectRes
    347 EmulatedCameraDevice::WorkerThread::Select(int fd, int timeout)
    348 {
    349     fd_set fds[1];
    350     struct timeval tv, *tvp = NULL;
    351 
    352     const int fd_num = (fd >= 0) ? max(fd, mControlFD) + 1 :
    353                                    mControlFD + 1;
    354     FD_ZERO(fds);
    355     FD_SET(mControlFD, fds);
    356     if (fd >= 0) {
    357         FD_SET(fd, fds);
    358     }
    359     if (timeout) {
    360         tv.tv_sec = timeout / 1000000;
    361         tv.tv_usec = timeout % 1000000;
    362         tvp = &tv;
    363     }
    364     int res = TEMP_FAILURE_RETRY(select(fd_num, fds, NULL, NULL, tvp));
    365     if (res < 0) {
    366         ALOGE("%s: select returned %d and failed: %d -> %s",
    367              __FUNCTION__, res, errno, strerror(errno));
    368         return ERROR;
    369     } else if (res == 0) {
    370         /* Timeout. */
    371         return TIMEOUT;
    372     } else if (FD_ISSET(mControlFD, fds)) {
    373         /* A control event. Lets read the message. */
    374         ControlMessage msg;
    375         res = TEMP_FAILURE_RETRY(read(mControlFD, &msg, sizeof(msg)));
    376         if (res != sizeof(msg)) {
    377             ALOGE("%s: Unexpected message size %d, or an error %d -> %s",
    378                  __FUNCTION__, res, errno, strerror(errno));
    379             return ERROR;
    380         }
    381         /* THREAD_STOP is the only message expected here. */
    382         if (msg == THREAD_STOP) {
    383             ALOGV("%s: THREAD_STOP message is received", __FUNCTION__);
    384             return EXIT_THREAD;
    385         } else {
    386             ALOGE("Unknown worker thread message %d", msg);
    387             return ERROR;
    388         }
    389     } else {
    390         /* Must be an FD. */
    391         ALOGW_IF(fd < 0 || !FD_ISSET(fd, fds), "%s: Undefined 'select' result",
    392                 __FUNCTION__);
    393         return READY;
    394     }
    395 }
    396 
    397 };  /* namespace android */
    398