Home | History | Annotate | Download | only in camera
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 /*
     18  * Contains implementation of an abstract class EmulatedCameraDevice that defines
     19  * functionality expected from an emulated physical camera device:
     20  *  - Obtaining and setting camera parameters
     21  *  - Capturing frames
     22  *  - Streaming video
     23  *  - etc.
     24  */
     25 
     26 #define LOG_NDEBUG 0
     27 #define LOG_TAG "EmulatedCamera_Device"
     28 #include <cutils/log.h>
     29 #include <sys/select.h>
     30 #include <cmath>
     31 #include "Alignment.h"
     32 #include "EmulatedCamera.h"
     33 #include "EmulatedCameraDevice.h"
     34 
     35 #undef min
     36 #undef max
     37 #include <algorithm>
     38 
     39 namespace android {
     40 
     41 const float GAMMA_CORRECTION = 2.2f;
     42 EmulatedCameraDevice::EmulatedCameraDevice(EmulatedCamera* camera_hal)
     43     : mObjectLock(),
     44       mCameraHAL(camera_hal),
     45       mExposureCompensation(1.0f),
     46       mWhiteBalanceScale(NULL),
     47       mSupportedWhiteBalanceScale(),
     48       mState(ECDS_CONSTRUCTED),
     49       mTriggerAutoFocus(false)
     50 {
     51 }
     52 
     53 EmulatedCameraDevice::~EmulatedCameraDevice()
     54 {
     55     ALOGV("EmulatedCameraDevice destructor");
     56     for (size_t i = 0; i < mSupportedWhiteBalanceScale.size(); ++i) {
     57         if (mSupportedWhiteBalanceScale.valueAt(i) != NULL) {
     58             delete[] mSupportedWhiteBalanceScale.valueAt(i);
     59         }
     60     }
     61 }
     62 
     63 /****************************************************************************
     64  * Emulated camera device public API
     65  ***************************************************************************/
     66 
     67 status_t EmulatedCameraDevice::Initialize()
     68 {
     69     if (isInitialized()) {
     70         ALOGW("%s: Emulated camera device is already initialized: mState = %d",
     71              __FUNCTION__, mState);
     72         return NO_ERROR;
     73     }
     74 
     75     mState = ECDS_INITIALIZED;
     76 
     77     return NO_ERROR;
     78 }
     79 
     80 status_t EmulatedCameraDevice::startDeliveringFrames(bool one_burst)
     81 {
     82     ALOGV("%s", __FUNCTION__);
     83 
     84     if (!isStarted()) {
     85         ALOGE("%s: Device is not started", __FUNCTION__);
     86         return EINVAL;
     87     }
     88 
     89     /* Frames will be delivered from the thread routine. */
     90     const status_t res = startWorkerThread(one_burst);
     91     ALOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__);
     92     return res;
     93 }
     94 
     95 status_t EmulatedCameraDevice::stopDeliveringFrames()
     96 {
     97     ALOGV("%s", __FUNCTION__);
     98 
     99     if (!isStarted()) {
    100         ALOGW("%s: Device is not started", __FUNCTION__);
    101         return NO_ERROR;
    102     }
    103 
    104     const status_t res = stopWorkerThread();
    105     ALOGE_IF(res != NO_ERROR, "%s: stopWorkerThread failed", __FUNCTION__);
    106     return res;
    107 }
    108 
    109 status_t EmulatedCameraDevice::setPreviewFrameRate(int framesPerSecond) {
    110     if (framesPerSecond <= 0) {
    111         return EINVAL;
    112     }
    113     mFramesPerSecond = framesPerSecond;
    114     return NO_ERROR;
    115 }
    116 
    117 void EmulatedCameraDevice::setExposureCompensation(const float ev) {
    118     ALOGV("%s", __FUNCTION__);
    119 
    120     if (!isStarted()) {
    121         ALOGW("%s: Fake camera device is not started.", __FUNCTION__);
    122     }
    123 
    124     mExposureCompensation = std::pow(2.0f, ev / GAMMA_CORRECTION);
    125     ALOGV("New exposure compensation is %f", mExposureCompensation);
    126 }
    127 
    128 void EmulatedCameraDevice::initializeWhiteBalanceModes(const char* mode,
    129                                                        const float r_scale,
    130                                                        const float b_scale) {
    131     ALOGV("%s with %s, %f, %f", __FUNCTION__, mode, r_scale, b_scale);
    132     float* value = new float[3];
    133     value[0] = r_scale; value[1] = 1.0f; value[2] = b_scale;
    134     mSupportedWhiteBalanceScale.add(String8(mode), value);
    135 }
    136 
    137 void EmulatedCameraDevice::setWhiteBalanceMode(const char* mode) {
    138     ALOGV("%s with white balance %s", __FUNCTION__, mode);
    139     mWhiteBalanceScale =
    140             mSupportedWhiteBalanceScale.valueFor(String8(mode));
    141 }
    142 
    143 /* Computes the pixel value after adjusting the white balance to the current
    144  * one. The input the y, u, v channel of the pixel and the adjusted value will
    145  * be stored in place. The adjustment is done in RGB space.
    146  */
    147 void EmulatedCameraDevice::changeWhiteBalance(uint8_t& y,
    148                                               uint8_t& u,
    149                                               uint8_t& v) const {
    150     float r_scale = mWhiteBalanceScale[0];
    151     float b_scale = mWhiteBalanceScale[2];
    152     int r = static_cast<float>(YUV2R(y, u, v)) / r_scale;
    153     int g = YUV2G(y, u, v);
    154     int b = static_cast<float>(YUV2B(y, u, v)) / b_scale;
    155 
    156     y = RGB2Y(r, g, b);
    157     u = RGB2U(r, g, b);
    158     v = RGB2V(r, g, b);
    159 }
    160 
    161 void EmulatedCameraDevice::checkAutoFocusTrigger() {
    162     // The expected value is a reference so we need it to be a variable
    163     bool expectedTrigger = true;
    164     if (mTriggerAutoFocus.compare_exchange_strong(expectedTrigger, false)) {
    165         // If the compare exchange returns true then the value was the expected
    166         // 'true' and was successfully set to 'false'. So that means it's time
    167         // to trigger an auto-focus event and that we have disabled that trigger
    168         // so it won't happen until another request is received.
    169         mCameraHAL->autoFocusComplete();
    170     }
    171 }
    172 
    173 status_t EmulatedCameraDevice::getCurrentFrameImpl(const uint8_t* source,
    174                                                    uint8_t* dest,
    175                                                    uint32_t pixelFormat) const {
    176     if (pixelFormat == mPixelFormat) {
    177         memcpy(dest, source, mFrameBufferSize);
    178         return NO_ERROR;
    179     } else if (pixelFormat == V4L2_PIX_FMT_YUV420 &&
    180                mPixelFormat == V4L2_PIX_FMT_YVU420) {
    181         // Convert from YV12 to YUV420 without alignment
    182         const int ySize = mYStride * mFrameHeight;
    183         const int uvSize = mUVStride * (mFrameHeight / 2);
    184         if (mYStride == mFrameWidth) {
    185             // Copy Y straight up
    186             memcpy(dest, source, ySize);
    187         } else {
    188             // Strip alignment
    189             for (int y = 0; y < mFrameHeight; ++y) {
    190                 memcpy(dest + y * mFrameWidth,
    191                        source + y * mYStride,
    192                        mFrameWidth);
    193             }
    194         }
    195 
    196         if (mUVStride == mFrameWidth / 2) {
    197             // Swap U and V
    198             memcpy(dest + ySize, source + ySize + uvSize, uvSize);
    199             memcpy(dest + ySize + uvSize, source + ySize, uvSize);
    200         } else {
    201             // Strip alignment
    202             uint8_t* uvDest = dest + mFrameWidth * mFrameHeight;
    203             const uint8_t* uvSource = source + ySize + uvSize;
    204 
    205             for (int i = 0; i < 2; ++i) {
    206                 for (int y = 0; y < mFrameHeight / 2; ++y) {
    207                     memcpy(uvDest + y * (mFrameWidth / 2),
    208                            uvSource + y * mUVStride,
    209                            mFrameWidth / 2);
    210                 }
    211                 uvDest += (mFrameHeight / 2) * (mFrameWidth / 2);
    212                 uvSource -= uvSize;
    213             }
    214         }
    215         return NO_ERROR;
    216     }
    217     ALOGE("%s: Invalid pixel format conversion [%.4s to %.4s] requested",
    218           __FUNCTION__, reinterpret_cast<const char*>(&mPixelFormat),
    219           reinterpret_cast<const char*>(&pixelFormat));
    220     return EINVAL;
    221 }
    222 
    223 status_t EmulatedCameraDevice::getCurrentFrame(void* buffer,
    224                                                uint32_t pixelFormat)
    225 {
    226     if (!isStarted()) {
    227         ALOGE("%s: Device is not started", __FUNCTION__);
    228         return EINVAL;
    229     }
    230     if (buffer == nullptr) {
    231         ALOGE("%s: Invalid buffer provided", __FUNCTION__);
    232         return EINVAL;
    233     }
    234 
    235     FrameLock lock(*this);
    236     const void* source = mCameraThread->getPrimaryBuffer();
    237     if (source == nullptr) {
    238         ALOGE("%s: No framebuffer", __FUNCTION__);
    239         return EINVAL;
    240     }
    241     return getCurrentFrameImpl(reinterpret_cast<const uint8_t*>(source),
    242                                reinterpret_cast<uint8_t*>(buffer),
    243                                pixelFormat);
    244 }
    245 
    246 status_t EmulatedCameraDevice::getCurrentPreviewFrame(void* buffer)
    247 {
    248     if (!isStarted()) {
    249         ALOGE("%s: Device is not started", __FUNCTION__);
    250         return EINVAL;
    251     }
    252     if (buffer == nullptr) {
    253         ALOGE("%s: Invalid buffer provided", __FUNCTION__);
    254         return EINVAL;
    255     }
    256 
    257     FrameLock lock(*this);
    258     const void* currentFrame = mCameraThread->getPrimaryBuffer();
    259     if (currentFrame == nullptr) {
    260         ALOGE("%s: No framebuffer", __FUNCTION__);
    261         return EINVAL;
    262     }
    263 
    264     /* In emulation the framebuffer is never RGB. */
    265     switch (mPixelFormat) {
    266         case V4L2_PIX_FMT_YVU420:
    267             YV12ToRGB32(currentFrame, buffer, mFrameWidth, mFrameHeight);
    268             return NO_ERROR;
    269         case V4L2_PIX_FMT_YUV420:
    270             YU12ToRGB32(currentFrame, buffer, mFrameWidth, mFrameHeight);
    271             return NO_ERROR;
    272         case V4L2_PIX_FMT_NV21:
    273             NV21ToRGB32(currentFrame, buffer, mFrameWidth, mFrameHeight);
    274             return NO_ERROR;
    275         case V4L2_PIX_FMT_NV12:
    276             NV12ToRGB32(currentFrame, buffer, mFrameWidth, mFrameHeight);
    277             return NO_ERROR;
    278 
    279         default:
    280             ALOGE("%s: Unknown pixel format %.4s",
    281                  __FUNCTION__, reinterpret_cast<const char*>(&mPixelFormat));
    282             return EINVAL;
    283     }
    284 }
    285 
    286 const void* EmulatedCameraDevice::getCurrentFrame() {
    287     if (mCameraThread.get()) {
    288         return mCameraThread->getPrimaryBuffer();
    289     }
    290     return nullptr;
    291 }
    292 
    293 EmulatedCameraDevice::FrameLock::FrameLock(EmulatedCameraDevice& cameraDevice)
    294     : mCameraDevice(cameraDevice) {
    295         mCameraDevice.lockCurrentFrame();
    296 }
    297 
    298 EmulatedCameraDevice::FrameLock::~FrameLock() {
    299     mCameraDevice.unlockCurrentFrame();
    300 }
    301 
    302 status_t EmulatedCameraDevice::setAutoFocus() {
    303     mTriggerAutoFocus = true;
    304     return NO_ERROR;
    305 }
    306 
    307 status_t EmulatedCameraDevice::cancelAutoFocus() {
    308     mTriggerAutoFocus = false;
    309     return NO_ERROR;
    310 }
    311 
    312 bool EmulatedCameraDevice::requestRestart(int width, int height,
    313                                           uint32_t pixelFormat,
    314                                           bool takingPicture, bool oneBurst) {
    315     if (mCameraThread.get() == nullptr) {
    316         ALOGE("%s: No thread alive to perform the restart, is preview on?",
    317               __FUNCTION__);
    318         return false;
    319     }
    320     mCameraThread->requestRestart(width, height, pixelFormat,
    321                                   takingPicture, oneBurst);
    322     return true;
    323 }
    324 
    325 /****************************************************************************
    326  * Emulated camera device private API
    327  ***************************************************************************/
    328 
    329 status_t EmulatedCameraDevice::commonStartDevice(int width,
    330                                                  int height,
    331                                                  uint32_t pix_fmt)
    332 {
    333     /* Validate pixel format, and calculate framebuffer size at the same time. */
    334     switch (pix_fmt) {
    335         case V4L2_PIX_FMT_YVU420:
    336         case V4L2_PIX_FMT_YUV420:
    337             // For these pixel formats the strides have to be aligned to 16 byte
    338             // boundaries as per the format specification
    339             // https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12
    340             mYStride = align(width, 16);
    341             mUVStride = align(mYStride / 2, 16);
    342             // The second term should use half the height but since there are
    343             // two planes the multiplication with two cancels that out
    344             mFrameBufferSize = mYStride * height + mUVStride * height;
    345             break;
    346         case V4L2_PIX_FMT_NV21:
    347         case V4L2_PIX_FMT_NV12:
    348             mYStride = width;
    349             // Because of interleaving the UV stride is the same as the Y stride
    350             // since it covers two pixels, one U and one V.
    351             mUVStride = mYStride;
    352             // Since the U/V stride covers both U and V we don't multiply by two
    353             mFrameBufferSize = mYStride * height + mUVStride * (height / 2);
    354             break;
    355         default:
    356             ALOGE("%s: Unknown pixel format %.4s",
    357                  __FUNCTION__, reinterpret_cast<const char*>(&pix_fmt));
    358             return EINVAL;
    359     }
    360 
    361     /* Cache framebuffer info. */
    362     mFrameWidth = width;
    363     mFrameHeight = height;
    364     mPixelFormat = pix_fmt;
    365     mTotalPixels = width * height;
    366 
    367     /* Allocate framebuffer. */
    368     mFrameBuffers[0].resize(mFrameBufferSize);
    369     mFrameBuffers[1].resize(mFrameBufferSize);
    370     ALOGV("%s: Allocated %zu bytes for %d pixels in %.4s[%dx%d] frame",
    371          __FUNCTION__, mFrameBufferSize, mTotalPixels,
    372          reinterpret_cast<const char*>(&mPixelFormat), mFrameWidth, mFrameHeight);
    373     return NO_ERROR;
    374 }
    375 
    376 void EmulatedCameraDevice::commonStopDevice()
    377 {
    378     mFrameWidth = mFrameHeight = mTotalPixels = 0;
    379     mPixelFormat = 0;
    380 
    381     mFrameBuffers[0].clear();
    382     mFrameBuffers[1].clear();
    383     // No need to keep all that memory allocated if the camera isn't running
    384     mFrameBuffers[0].shrink_to_fit();
    385     mFrameBuffers[1].shrink_to_fit();
    386 }
    387 
    388 /****************************************************************************
    389  * Worker thread management.
    390  ***************************************************************************/
    391 
    392 status_t EmulatedCameraDevice::startWorkerThread(bool one_burst)
    393 {
    394     ALOGV("%s", __FUNCTION__);
    395 
    396     if (!isInitialized()) {
    397         ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
    398         return EINVAL;
    399     }
    400 
    401     mCameraThread = new CameraThread(this, staticProduceFrame, this);
    402     if (mCameraThread == NULL) {
    403         ALOGE("%s: Unable to instantiate CameraThread object", __FUNCTION__);
    404         return ENOMEM;
    405     }
    406     status_t res = mCameraThread->startThread(one_burst);
    407     if (res != NO_ERROR) {
    408         ALOGE("%s: Unable to start CameraThread: %s",
    409               __FUNCTION__, strerror(res));
    410         return res;
    411     }
    412 
    413     return res;
    414 }
    415 
    416 status_t EmulatedCameraDevice::stopWorkerThread()
    417 {
    418     ALOGV("%s", __FUNCTION__);
    419 
    420     if (!isInitialized()) {
    421         ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
    422         return EINVAL;
    423     }
    424 
    425     status_t res = mCameraThread->stopThread();
    426     if (res != NO_ERROR) {
    427         ALOGE("%s: Unable to stop CameraThread", __FUNCTION__);
    428         return res;
    429     }
    430     res = mCameraThread->joinThread();
    431     if (res != NO_ERROR) {
    432         ALOGE("%s: Unable to join CameraThread", __FUNCTION__);
    433         return res;
    434     }
    435 
    436     // Destroy the thread as well
    437     mCameraThread.clear();
    438     return res;
    439 }
    440 
    441 EmulatedCameraDevice::CameraThread::CameraThread(EmulatedCameraDevice* dev,
    442                                                  ProduceFrameFunc producer,
    443                                                  void* producerOpaque)
    444     : WorkerThread("Camera_CameraThread", dev, dev->mCameraHAL),
    445       mCurFrameTimestamp(0),
    446       mProducerFunc(producer),
    447       mProducerOpaque(producerOpaque),
    448       mRestartWidth(0),
    449       mRestartHeight(0),
    450       mRestartPixelFormat(0),
    451       mRestartOneBurst(false),
    452       mRestartTakingPicture(false),
    453       mRestartRequested(false) {
    454 
    455 }
    456 
    457 const void* EmulatedCameraDevice::CameraThread::getPrimaryBuffer() const {
    458     if (mFrameProducer.get()) {
    459         return mFrameProducer->getPrimaryBuffer();
    460     }
    461     return nullptr;
    462 }
    463 
    464 void EmulatedCameraDevice::CameraThread::lockPrimaryBuffer() {
    465     mFrameProducer->lockPrimaryBuffer();
    466 }
    467 
    468 void EmulatedCameraDevice::CameraThread::unlockPrimaryBuffer() {
    469     mFrameProducer->unlockPrimaryBuffer();
    470 }
    471 
    472 bool
    473 EmulatedCameraDevice::CameraThread::waitForFrameOrTimeout(nsecs_t timeout) {
    474     // Keep waiting until the frame producer indicates that a frame is available
    475     // This does introduce some unnecessary latency to the first frame delivery
    476     // but avoids a lot of thread synchronization.
    477     do {
    478         // We don't have any specific fd we want to select so we pass in -1
    479         // timeout is in nanoseconds but Select expects microseconds
    480         Mutex::Autolock lock(mRunningMutex);
    481         mRunningCondition.waitRelative(mRunningMutex, timeout);
    482         if (!mRunning) {
    483             ALOGV("%s: CameraThread has been terminated.", __FUNCTION__);
    484             return false;
    485         }
    486         // Set a short timeout in case there is no frame available and we are
    487         // going to loop. This way we ensure a sleep but keep a decent latency
    488         timeout = milliseconds(5);
    489     } while (!mFrameProducer->hasFrame());
    490 
    491     return true;
    492 }
    493 
    494 bool EmulatedCameraDevice::CameraThread::inWorkerThread() {
    495     /* Wait till FPS timeout expires, or thread exit message is received. */
    496     nsecs_t wakeAt =
    497         mCurFrameTimestamp + 1000000000.0 / mCameraDevice->mFramesPerSecond;
    498     nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
    499     nsecs_t timeout = std::max<nsecs_t>(0, wakeAt - now);
    500 
    501     if (!waitForFrameOrTimeout(timeout)) {
    502         return false;
    503     }
    504 
    505     /* Check if a restart and potentially apply the requested changes */
    506     if (!checkRestartRequest()) {
    507         return false;
    508     }
    509 
    510     /* Check if an auto-focus event needs to be triggered */
    511     mCameraDevice->checkAutoFocusTrigger();
    512 
    513     mCurFrameTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
    514     mCameraHAL->onNextFrameAvailable(mCurFrameTimestamp, mCameraDevice);
    515 
    516     return true;
    517 }
    518 
    519 status_t EmulatedCameraDevice::CameraThread::onThreadStart() {
    520     void* primaryBuffer = mCameraDevice->getPrimaryBuffer();
    521     void* secondaryBuffer = mCameraDevice->getSecondaryBuffer();
    522     mFrameProducer = new FrameProducer(mCameraDevice,
    523                                        mProducerFunc, mProducerOpaque,
    524                                        primaryBuffer, secondaryBuffer);
    525     if (mFrameProducer.get() == nullptr) {
    526         ALOGE("%s: Could not instantiate FrameProducer object", __FUNCTION__);
    527         return ENOMEM;
    528     }
    529     return mFrameProducer->startThread(mOneBurst);
    530 }
    531 
    532 void EmulatedCameraDevice::CameraThread::onThreadExit() {
    533     if (mFrameProducer.get()) {
    534         if (mFrameProducer->stopThread() == NO_ERROR) {
    535             mFrameProducer->joinThread();
    536             mFrameProducer.clear();
    537         }
    538     }
    539 }
    540 
    541 EmulatedCameraDevice::CameraThread::FrameProducer::FrameProducer(
    542         EmulatedCameraDevice* dev,
    543         ProduceFrameFunc producer,
    544         void* opaque,
    545         void* primaryBuffer,
    546         void* secondaryBuffer)
    547     : WorkerThread("Camera_FrameProducer", dev, dev->mCameraHAL),
    548       mProducer(producer),
    549       mOpaque(opaque),
    550       mPrimaryBuffer(primaryBuffer),
    551       mSecondaryBuffer(secondaryBuffer),
    552       mLastFrame(0),
    553       mHasFrame(false) {
    554 
    555 }
    556 
    557 const void*
    558 EmulatedCameraDevice::CameraThread::FrameProducer::getPrimaryBuffer() const {
    559     return mPrimaryBuffer;
    560 }
    561 
    562 void EmulatedCameraDevice::CameraThread::FrameProducer::lockPrimaryBuffer() {
    563     mBufferMutex.lock();
    564 }
    565 void EmulatedCameraDevice::CameraThread::FrameProducer::unlockPrimaryBuffer() {
    566     mBufferMutex.unlock();
    567 }
    568 
    569 void EmulatedCameraDevice::CameraThread::requestRestart(int width,
    570                                                         int height,
    571                                                         uint32_t pixelFormat,
    572                                                         bool takingPicture,
    573                                                         bool oneBurst) {
    574     Mutex::Autolock lock(mRequestMutex);
    575     mRestartWidth = width;
    576     mRestartHeight = height;
    577     mRestartPixelFormat = pixelFormat;
    578     mRestartTakingPicture = takingPicture;
    579     mRestartOneBurst = oneBurst;
    580     mRestartRequested = true;
    581 }
    582 
    583 bool EmulatedCameraDevice::CameraThread::FrameProducer::hasFrame() const {
    584     return mHasFrame;
    585 }
    586 
    587 bool EmulatedCameraDevice::CameraThread::checkRestartRequest() {
    588     Mutex::Autolock lock(mRequestMutex);
    589     if (mRestartRequested) {
    590         mRestartRequested = false;
    591         status_t res = mFrameProducer->stopThread();
    592         if (res != NO_ERROR) {
    593             ALOGE("%s: Could not stop frame producer thread", __FUNCTION__);
    594             mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
    595             return false;
    596         }
    597         res = mFrameProducer->joinThread();
    598         if (res != NO_ERROR) {
    599             ALOGE("%s: Could not join frame producer thread", __FUNCTION__);
    600             mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
    601             return false;
    602         }
    603         mFrameProducer.clear();
    604         res = mCameraDevice->stopDevice();
    605         if (res != NO_ERROR) {
    606             ALOGE("%s: Could not stop device", __FUNCTION__);
    607             mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
    608             return false;
    609         }
    610         res = mCameraDevice->startDevice(mRestartWidth,
    611                                          mRestartHeight,
    612                                          mRestartPixelFormat);
    613         if (res != NO_ERROR) {
    614             ALOGE("%s: Could not start device", __FUNCTION__);
    615             mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
    616             return false;
    617         }
    618         if (mRestartTakingPicture) {
    619             mCameraHAL->setTakingPicture(true);
    620         }
    621         mOneBurst = mRestartOneBurst;
    622 
    623         // Pretend like this a thread start, performs the remaining setup
    624         if (onThreadStart() != NO_ERROR) {
    625             mCameraDevice->stopDevice();
    626             mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
    627             return false;
    628         }
    629 
    630         // Now wait for the frame producer to start producing before we proceed
    631         return waitForFrameOrTimeout(0);
    632     }
    633     return true;
    634 }
    635 
    636 bool EmulatedCameraDevice::CameraThread::FrameProducer::inWorkerThread() {
    637     nsecs_t nextFrame =
    638         mLastFrame + 1000000000 / mCameraDevice->mFramesPerSecond;
    639     nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
    640     nsecs_t timeout = std::max<nsecs_t>(0, nextFrame - now);
    641 
    642     {
    643         Mutex::Autolock lock(mRunningMutex);
    644         mRunningCondition.waitRelative(mRunningMutex, timeout);
    645         if (!mRunning) {
    646             ALOGV("%s: FrameProducer has been terminated.", __FUNCTION__);
    647             return false;
    648         }
    649     }
    650 
    651     // Produce one frame and place it in the secondary buffer
    652     mLastFrame = systemTime(SYSTEM_TIME_MONOTONIC);
    653     if (!mProducer(mOpaque, mSecondaryBuffer)) {
    654         ALOGE("FrameProducer could not produce frame, exiting thread");
    655         mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
    656         return false;
    657     }
    658 
    659     {
    660         // Switch buffers now that the secondary buffer is ready
    661         Mutex::Autolock lock(mBufferMutex);
    662         std::swap(mPrimaryBuffer, mSecondaryBuffer);
    663     }
    664     mHasFrame = true;
    665     return true;
    666 }
    667 
    668 void EmulatedCameraDevice::lockCurrentFrame() {
    669     mCameraThread->lockPrimaryBuffer();
    670 }
    671 
    672 void EmulatedCameraDevice::unlockCurrentFrame() {
    673     mCameraThread->unlockPrimaryBuffer();
    674 }
    675 
    676 };  /* namespace android */
    677