Home | History | Annotate | Download | only in camera
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA_DEVICE_H
     18 #define HW_EMULATOR_CAMERA_EMULATED_CAMERA_DEVICE_H
     19 
     20 /*
     21  * Contains declaration of an abstract class EmulatedCameraDevice that defines
     22  * functionality expected from an emulated physical camera device:
     23  *  - Obtaining and setting camera device parameters
     24  *  - Capturing frames
     25  *  - Streaming video
     26  *  - etc.
     27  */
     28 
     29 #include <utils/threads.h>
     30 #include <utils/KeyedVector.h>
     31 #include <utils/String8.h>
     32 #include "EmulatedCameraCommon.h"
     33 #include "Converters.h"
     34 #include "WorkerThread.h"
     35 
     36 #undef min
     37 #undef max
     38 #include <vector>
     39 
     40 namespace android {
     41 
     42 class EmulatedCamera;
     43 
     44 /* Encapsulates an abstract class EmulatedCameraDevice that defines
     45  * functionality expected from an emulated physical camera device:
     46  *  - Obtaining and setting camera device parameters
     47  *  - Capturing frames
     48  *  - Streaming video
     49  *  - etc.
     50  */
     51 class EmulatedCameraDevice {
     52 public:
     53     /* Constructs EmulatedCameraDevice instance.
     54      * Param:
     55      *  camera_hal - Emulated camera that implements the camera HAL API, and
     56      *      manages (contains) this object.
     57      */
     58     explicit EmulatedCameraDevice(EmulatedCamera* camera_hal);
     59 
     60     /* Destructs EmulatedCameraDevice instance. */
     61     virtual ~EmulatedCameraDevice();
     62 
     63     /***************************************************************************
     64      * Emulated camera device abstract interface
     65      **************************************************************************/
     66 
     67 public:
     68     /* Connects to the camera device.
     69      * This method must be called on an initialized instance of this class.
     70      * Return:
     71      *  NO_ERROR on success, or an appropriate error status.
     72      */
     73     virtual status_t connectDevice() = 0;
     74 
     75     /* Disconnects from the camera device.
     76      * Return:
     77      *  NO_ERROR on success, or an appropriate error status. If this method is
     78      *  called for already disconnected, or uninitialized instance of this class,
     79      *  a successful status must be returned from this method. If this method is
     80      *  called for an instance that is in the "started" state, this method must
     81      *  return a failure.
     82      */
     83     virtual status_t disconnectDevice() = 0;
     84 
     85     /* Starts the camera device.
     86      * This method tells the camera device to start capturing frames of the given
     87      * dimensions for the given pixel format. Note that this method doesn't start
     88      * the delivery of the captured frames to the emulated camera. Call
     89      * startDeliveringFrames method to start delivering frames. This method must
     90      * be called on a connected instance of this class. If it is called on a
     91      * disconnected instance, this method must return a failure.
     92      * Param:
     93      *  width, height - Frame dimensions to use when capturing video frames.
     94      *  pix_fmt - Pixel format to use when capturing video frames.
     95      * Return:
     96      *  NO_ERROR on success, or an appropriate error status.
     97      */
     98     virtual status_t startDevice(int width, int height, uint32_t pix_fmt) = 0;
     99 
    100     /* Stops the camera device.
    101      * This method tells the camera device to stop capturing frames. Note that
    102      * this method doesn't stop delivering frames to the emulated camera. Always
    103      * call stopDeliveringFrames prior to calling this method.
    104      * Return:
    105      *  NO_ERROR on success, or an appropriate error status. If this method is
    106      *  called for an object that is not capturing frames, or is disconnected,
    107      *  or is uninitialized, a successful status must be returned from this
    108      *  method.
    109      */
    110     virtual status_t stopDevice() = 0;
    111 
    112     /***************************************************************************
    113      * Emulated camera device public API
    114      **************************************************************************/
    115 
    116 public:
    117     /* Initializes EmulatedCameraDevice instance.
    118      * Derived classes should override this method in order to cache static
    119      * properties of the physical device (list of supported pixel formats, frame
    120      * sizes, etc.) If this method is called on an already initialized instance,
    121      * it must return a successful status.
    122      * Return:
    123      *  NO_ERROR on success, or an appropriate error status.
    124      */
    125     virtual status_t Initialize();
    126 
    127     /* Initializes the white balance modes parameters.
    128      * The parameters are passed by each individual derived camera API to
    129      * represent that different camera manufacturers may have different
    130      * preferences on the white balance parameters. Green channel in the RGB
    131      * color space is fixed to keep the luminance to be reasonably constant.
    132      *
    133      * Param:
    134      * mode the text describing the current white balance mode
    135      * r_scale the scale factor for the R channel in RGB space
    136      * b_scale the scale factor for the B channel in RGB space.
    137      */
    138     void initializeWhiteBalanceModes(const char* mode,
    139                                      const float r_scale,
    140                                      const float b_scale);
    141 
    142     /* Starts delivering frames captured from the camera device.
    143      * This method will start the worker thread that would be pulling frames from
    144      * the camera device, and will deliver the pulled frames back to the emulated
    145      * camera via onNextFrameAvailable callback. This method must be called on a
    146      * connected instance of this class with a started camera device. If it is
    147      * called on a disconnected instance, or camera device has not been started,
    148      * this method must return a failure.
    149      * Param:
    150      *  one_burst - Controls how many frames should be delivered. If this
    151      *      parameter is 'true', only one captured frame will be delivered to the
    152      *      emulated camera. If this parameter is 'false', frames will keep
    153      *      coming until stopDeliveringFrames method is called. Typically, this
    154      *      parameter is set to 'true' only in order to obtain a single frame
    155      *      that will be used as a "picture" in takePicture method of the
    156      *      emulated camera.
    157      * Return:
    158      *  NO_ERROR on success, or an appropriate error status.
    159      */
    160     virtual status_t startDeliveringFrames(bool one_burst);
    161 
    162     /* Stops delivering frames captured from the camera device.
    163      * This method will stop the worker thread started by startDeliveringFrames.
    164      * Return:
    165      *  NO_ERROR on success, or an appropriate error status.
    166      */
    167     virtual status_t stopDeliveringFrames();
    168 
    169     /* Set the preview frame rate.
    170      * Indicates the rate at which the camera should provide preview frames in
    171      * frames per second. */
    172     status_t setPreviewFrameRate(int framesPerSecond);
    173 
    174     /* Sets the exposure compensation for the camera device.
    175      */
    176     void setExposureCompensation(const float ev);
    177 
    178     /* Sets the white balance mode for the device.
    179      */
    180     void setWhiteBalanceMode(const char* mode);
    181 
    182     /* Gets current framebuffer in a selected format
    183      * This method must be called on a connected instance of this class with a
    184      * started camera device. If it is called on a disconnected instance, or
    185      * camera device has not been started, this method must return a failure.
    186      * Note that this method should be called only after at least one frame has
    187      * been captured and delivered. Otherwise it will return garbage in the
    188      * preview frame buffer. Typically, this method should be called from
    189      * onNextFrameAvailable callback. The method can perform some basic pixel
    190      * format conversion for the most efficient conversions. If a conversion
    191      * is not supported the method will fail. Note that this does NOT require
    192      * that the current frame be locked using a FrameLock object.
    193      *
    194      * Param:
    195      *  buffer - Buffer, large enough to contain the entire frame.
    196      *  pixelFormat - The pixel format to convert to, use
    197      *                getOriginalPixelFormat() to get the configured pixel
    198      *                format (if using this no conversion will be needed)
    199      * Return:
    200      *  NO_ERROR on success, or an appropriate error status.
    201      */
    202     virtual status_t getCurrentFrame(void* buffer, uint32_t pixelFormat);
    203 
    204     /* Gets current framebuffer, converted into preview frame format.
    205      * This method must be called on a connected instance of this class with a
    206      * started camera device. If it is called on a disconnected instance, or
    207      * camera device has not been started, this method must return a failure.
    208      * Note that this method should be called only after at least one frame has
    209      * been captured and delivered. Otherwise it will return garbage in the
    210      * preview frame buffer. Typically, this method should be called from
    211      * onNextFrameAvailable callback. Note that this does NOT require that the
    212      * current frame be locked using a FrameLock object.
    213      * Param:
    214      *  buffer - Buffer, large enough to contain the entire preview frame.
    215      * Return:
    216      *  NO_ERROR on success, or an appropriate error status.
    217      */
    218     virtual status_t getCurrentPreviewFrame(void* buffer);
    219 
    220     /* Gets a pointer to the current frame buffer in its raw format.
    221      * This method must be called on a connected instance of this class with a
    222      * started camera device. If it is called on a disconnected instance, or
    223      * camera device has not been started, this method must return NULL.
    224      * This method should only be called when the frame lock is held through
    225      * a FrameLock object. Otherwise the contents of the frame might change
    226      * unexpectedly or its memory could be deallocated leading to a crash.
    227      * Return:
    228      *  A pointer to the current frame buffer on success, NULL otherwise.
    229      */
    230     virtual const void* getCurrentFrame();
    231 
    232     class FrameLock {
    233     public:
    234         FrameLock(EmulatedCameraDevice& cameraDevice);
    235         ~FrameLock();
    236     private:
    237         EmulatedCameraDevice& mCameraDevice;
    238     };
    239 
    240     /* Gets width of the frame obtained from the physical device.
    241      * Return:
    242      *  Width of the frame obtained from the physical device. Note that value
    243      *  returned from this method is valid only in case if camera device has been
    244      *  started.
    245      */
    246     inline int getFrameWidth() const
    247     {
    248         ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
    249         return mFrameWidth;
    250     }
    251 
    252     /* Gets height of the frame obtained from the physical device.
    253      * Return:
    254      *  Height of the frame obtained from the physical device. Note that value
    255      *  returned from this method is valid only in case if camera device has been
    256      *  started.
    257      */
    258     inline int getFrameHeight() const
    259     {
    260         ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
    261         return mFrameHeight;
    262     }
    263 
    264     /* Gets byte size of the current frame buffer.
    265      * Return:
    266      *  Byte size of the frame buffer. Note that value returned from this method
    267      *  is valid only in case if camera device has been started.
    268      */
    269     inline size_t getFrameBufferSize() const
    270     {
    271         ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
    272         return mFrameBufferSize;
    273     }
    274 
    275     /* Get number of bytes required to store current video frame buffer. Note
    276      * that this can be different from getFrameBufferSize depending on the pixel
    277      * format and resolution. The video frames use a pixel format that is
    278      * suitable for the encoding pipeline and this may have different alignment
    279      * requirements than the pixel format used for regular frames.
    280      */
    281     inline size_t getVideoFrameBufferSize() const
    282     {
    283         ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
    284         // Currently the video format is always YUV 420 without any kind of
    285         // alignment. So each pixel uses 12 bits, and then we divide by 8 to get
    286         // the size in bytes. If additional pixel formats are supported this
    287         // should be updated to take the selected video format into
    288         // consideration.
    289         return (mFrameWidth * mFrameHeight * 12) / 8;
    290     }
    291 
    292     /* Gets number of pixels in the current frame buffer.
    293      * Return:
    294      *  Number of pixels in the frame buffer. Note that value returned from this
    295      *  method is valid only in case if camera device has been started.
    296      */
    297     inline int getPixelNum() const
    298     {
    299         ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
    300         return mTotalPixels;
    301     }
    302 
    303     /* Gets pixel format of the frame that camera device streams to this class.
    304      * Throughout camera framework, there are three different forms of pixel
    305      * format representation:
    306      *  - Original format, as reported by the actual camera device. Values for
    307      *    this format are declared in bionic/libc/kernel/common/linux/videodev2.h
    308      *  - String representation as defined in CameraParameters::PIXEL_FORMAT_XXX
    309      *    strings in frameworks/base/include/camera/CameraParameters.h
    310      *  - HAL_PIXEL_FORMAT_XXX format, as defined in system/core/include/system/graphics.h
    311      * Since emulated camera device gets its data from the actual device, it gets
    312      * pixel format in the original form. And that's the pixel format
    313      * representation that will be returned from this method. HAL components will
    314      * need to translate value returned from this method to the appropriate form.
    315      * This method must be called only on started instance of this class, since
    316      * it's applicable only when camera device is ready to stream frames.
    317      * Param:
    318      *  pix_fmt - Upon success contains the original pixel format.
    319      * Return:
    320      *  Current framebuffer's pixel format. Note that value returned from this
    321      *  method is valid only in case if camera device has been started.
    322      */
    323     inline uint32_t getOriginalPixelFormat() const
    324     {
    325         ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
    326         return mPixelFormat;
    327     }
    328 
    329     /*
    330      * State checkers.
    331      */
    332 
    333     inline bool isInitialized() const {
    334         return mState != ECDS_CONSTRUCTED;
    335     }
    336     inline bool isConnected() const {
    337         /* Instance is connected when its status is either"connected", or
    338          * "started". */
    339         return mState == ECDS_CONNECTED || mState == ECDS_STARTED;
    340     }
    341     inline bool isStarted() const {
    342         return mState == ECDS_STARTED;
    343     }
    344 
    345     /* Enable auto-focus for the camera, this is only possible between calls to
    346      * startPreview and stopPreview, i.e. when preview frames are being
    347      * delivered. This will eventually trigger a callback to the camera HAL
    348      * saying auto-focus completed.
    349      */
    350     virtual status_t setAutoFocus();
    351 
    352     /* Cancel auto-focus if it's enabled.
    353      */
    354     virtual status_t cancelAutoFocus();
    355 
    356     /* Request an asynchronous camera restart with new image parameters. The
    357      * restart will be performed on the same thread that delivers frames,
    358      * ensuring that all callbacks are done from the same thread.
    359      * Return
    360      *  false if the thread request cannot be honored because no thread is
    361      *        running or some other error occured.
    362      */
    363     bool requestRestart(int width, int height, uint32_t pixelFormat,
    364                         bool takingPicture, bool oneBurst);
    365 
    366     /****************************************************************************
    367      * Emulated camera device private API
    368      ***************************************************************************/
    369 protected:
    370     /* Performs common validation and calculation of startDevice parameters.
    371      * Param:
    372      *  width, height, pix_fmt - Parameters passed to the startDevice method.
    373      * Return:
    374      *  NO_ERROR on success, or an appropriate error status.
    375      */
    376     virtual status_t commonStartDevice(int width, int height, uint32_t pix_fmt);
    377 
    378     /* Performs common cleanup on stopDevice.
    379      * This method will undo what commonStartDevice had done.
    380      */
    381     virtual void commonStopDevice();
    382 
    383     /** Computes a luminance value after taking the exposure compensation.
    384      * value into account.
    385      *
    386      * Param:
    387      * inputY - The input luminance value.
    388      * Return:
    389      * The luminance value after adjusting the exposure compensation.
    390      */
    391     inline uint8_t changeExposure(const uint8_t& inputY) const {
    392         return static_cast<uint8_t>(clamp(static_cast<float>(inputY) *
    393                                     mExposureCompensation));
    394     }
    395 
    396     /** Computes the pixel value in YUV space after adjusting to the current
    397      * white balance mode.
    398      */
    399     void changeWhiteBalance(uint8_t& y, uint8_t& u, uint8_t& v) const;
    400 
    401     /* Check if there is a pending auto-focus trigger and send a notification
    402      * if there is. This should be called from the worker thread loop if the
    403      * camera device wishes to use the default behavior of immediately sending
    404      * an auto-focus completion event on request. Otherwise the device should
    405      * implement its own auto-focus behavior. */
    406     void checkAutoFocusTrigger();
    407 
    408     /* Implementation for getCurrentFrame that includes pixel format conversion
    409      * if needed. This allows subclasses to easily use this method instead of
    410      * having to reimplement the conversion all over.
    411      */
    412     status_t getCurrentFrameImpl(const uint8_t* source, uint8_t* dest,
    413                                  uint32_t pixelFormat) const;
    414 
    415     /****************************************************************************
    416      * Worker thread management.
    417      * Typicaly when emulated camera device starts capturing frames from the
    418      * actual device, it does that in a worker thread created in StartCapturing,
    419      * and terminated in StopCapturing. Since this is such a typical scenario,
    420      * it makes sence to encapsulate worker thread management in the base class
    421      * for all emulated camera devices.
    422      ***************************************************************************/
    423 
    424 protected:
    425     /* Starts the worker thread.
    426      * Typically, the worker thread is started from the startDeliveringFrames
    427      * method of this class.
    428      * Param:
    429      *  one_burst - Controls how many times thread loop should run. If this
    430      *      parameter is 'true', thread routine will run only once If this
    431      *      parameter is 'false', thread routine will run until
    432      *      stopWorkerThreads method is called. See startDeliveringFrames for
    433      *      more info.
    434      * Return:
    435      *  NO_ERROR on success, or an appropriate error status.
    436      */
    437     virtual status_t startWorkerThread(bool one_burst);
    438 
    439     /* Stop the worker thread.
    440      * Note that this method will always wait for the worker thread to
    441      * terminate. Typically, the worker thread is stopped from the
    442      * stopDeliveringFrames method of this class.
    443      * Return:
    444      *  NO_ERROR on success, or an appropriate error status.
    445      */
    446     virtual status_t stopWorkerThread();
    447 
    448     /* Produce a camera frame and place it in buffer. The buffer is one of
    449      * the two buffers provided to mFrameProducer during construction along with
    450      * a pointer to this method. The method is expected to know what size frames
    451      * it provided to the producer thread. Returning false indicates an
    452      * unrecoverable error that will stop the frame production thread. */
    453     virtual bool produceFrame(void* buffer) = 0;
    454 
    455     /* Get the primary buffer to use when constructing the FrameProducer. */
    456     virtual void* getPrimaryBuffer() {
    457         return mFrameBuffers[0].data();
    458     }
    459 
    460     /* Get the seconary buffer to use when constructing the FrameProducer. */
    461     virtual void* getSecondaryBuffer() {
    462         return mFrameBuffers[1].data();
    463     }
    464 
    465     /* A class that encaspulates the asynchronous behavior of a camera. This
    466      * includes asynchronous production (through another thread), frame delivery
    467      * as well as asynchronous state changes that have to be synchronized with
    468      * frame production and delivery but can't be blocking the camera HAL. */
    469     class CameraThread : public WorkerThread {
    470     public:
    471         typedef bool (*ProduceFrameFunc)(void* opaque, void* destinationBuffer);
    472         CameraThread(EmulatedCameraDevice* cameraDevice,
    473                      ProduceFrameFunc producer,
    474                      void* producerOpaque);
    475 
    476         /* Access the primary buffer of the frame producer, this is the frame
    477          * that is currently not being written to. The buffer will only have
    478          * valid contents if hasFrame() returns true. Note that accessing this
    479          * without first having created a Lock can lead to contents changing
    480          * without notice. */
    481         const void* getPrimaryBuffer() const;
    482 
    483         /* Lock and unlock the primary buffer */
    484         void lockPrimaryBuffer();
    485         void unlockPrimaryBuffer();
    486 
    487         void requestRestart(int width, int height, uint32_t pixelFormat,
    488                             bool takingPicture, bool oneBurst);
    489 
    490     private:
    491         bool checkRestartRequest();
    492         bool waitForFrameOrTimeout(nsecs_t timeout);
    493         bool inWorkerThread() override;
    494 
    495         status_t onThreadStart() override;
    496         void onThreadExit() override;
    497 
    498         /* A class with a thread that will call a function at a specified
    499          * interval to produce frames. This is done in a double-buffered fashion
    500          * to make sure that one of the frames can be delivered without risk of
    501          * overwriting its contents. Access to the primary buffer, the one NOT
    502          * being drawn to, should be protected with the lock methods provided or
    503          * the guarantee of not overwriting the contents does not hold.
    504          */
    505         class FrameProducer : public WorkerThread {
    506         public:
    507             FrameProducer(EmulatedCameraDevice* cameraDevice,
    508                           ProduceFrameFunc producer, void* opaque,
    509                           void* primaryBuffer, void* secondaryBuffer);
    510 
    511             /* Indicates if the producer has produced at least one frame. */
    512             bool hasFrame() const;
    513 
    514             const void* getPrimaryBuffer() const;
    515 
    516             void lockPrimaryBuffer();
    517             void unlockPrimaryBuffer();
    518 
    519         protected:
    520             bool inWorkerThread() override;
    521 
    522             ProduceFrameFunc mProducer;
    523             void* mOpaque;
    524             void* mPrimaryBuffer;
    525             void* mSecondaryBuffer;
    526             nsecs_t mLastFrame;
    527             mutable Mutex mBufferMutex;
    528             std::atomic<bool> mHasFrame;
    529         };
    530 
    531         nsecs_t mCurFrameTimestamp;
    532         /* Worker thread that will produce frames for the camera thread */
    533         sp<FrameProducer> mFrameProducer;
    534         ProduceFrameFunc mProducerFunc;
    535         void* mProducerOpaque;
    536         Mutex mRequestMutex;
    537         int mRestartWidth;
    538         int mRestartHeight;
    539         uint32_t mRestartPixelFormat;
    540         bool mRestartOneBurst;
    541         bool mRestartTakingPicture;
    542         bool mRestartRequested;
    543     };
    544 
    545     /****************************************************************************
    546      * Data members
    547      ***************************************************************************/
    548 
    549 protected:
    550     /* Locks this instance for parameters, state, etc. change. */
    551     Mutex                       mObjectLock;
    552 
    553     /* A camera thread that is used in frame production, delivery and handling
    554      * of asynchronous restarts. Internally the process of generating and
    555      * delivering frames is split up into two threads. This way frames can
    556      * always be delivered on time even if they cannot be produced fast enough
    557      * to keep up with the expected frame rate. It also increases performance on
    558      * multi-core systems. If the producer cannot keep up the last frame will
    559      * simply be delivered again. */
    560     sp<CameraThread>          mCameraThread;
    561 
    562     /* Emulated camera object containing this instance. */
    563     EmulatedCamera*             mCameraHAL;
    564 
    565     /* Framebuffers containing the frame being drawn to and the frame being
    566      * delivered. This is used by the double buffering producer thread and
    567      * the consumer thread will copy frames from one of these buffers to
    568      * mCurrentFrame to avoid being stalled by frame production. */
    569     std::vector<uint8_t>        mFrameBuffers[2];
    570 
    571     /*
    572      * Framebuffer properties.
    573      */
    574 
    575     /* Byte size of the framebuffer. */
    576     size_t                      mFrameBufferSize;
    577 
    578     /* Original pixel format (one of the V4L2_PIX_FMT_XXX values, as defined in
    579      * bionic/libc/kernel/common/linux/videodev2.h */
    580     uint32_t                    mPixelFormat;
    581 
    582     /* Frame width */
    583     int                         mFrameWidth;
    584 
    585     /* Frame height */
    586     int                         mFrameHeight;
    587 
    588     /* The number of frames per second that the camera should deliver */
    589     int                         mFramesPerSecond;
    590 
    591     /* Defines byte distance between the start of each Y row */
    592     int                         mYStride;
    593 
    594     /* Defines byte distance between the start of each U/V row. For formats with
    595      * separate U and V planes this is the distance between rows in each plane.
    596      * For formats with interleaved U and V components this is the distance
    597      * between rows in the interleaved plane, meaning that it's the stride over
    598      * the combined U and V components. */
    599     int                         mUVStride;
    600 
    601     /* Total number of pixels */
    602     int                         mTotalPixels;
    603 
    604     /* Exposure compensation value */
    605     float                       mExposureCompensation;
    606 
    607     float*                      mWhiteBalanceScale;
    608 
    609     DefaultKeyedVector<String8, float*>      mSupportedWhiteBalanceScale;
    610 
    611     /* Defines possible states of the emulated camera device object.
    612      */
    613     enum EmulatedCameraDeviceState {
    614         /* Object has been constructed. */
    615         ECDS_CONSTRUCTED,
    616         /* Object has been initialized. */
    617         ECDS_INITIALIZED,
    618         /* Object has been connected to the physical device. */
    619         ECDS_CONNECTED,
    620         /* Camera device has been started. */
    621         ECDS_STARTED,
    622     };
    623 
    624     /* Object state. */
    625     EmulatedCameraDeviceState   mState;
    626 
    627 private:
    628     /* Lock the current frame so that it can safely be accessed using
    629      * getCurrentFrame. Prefer using a FrameLock object on the stack instead
    630      * to ensure that the lock is always unlocked properly.
    631      */
    632     void lockCurrentFrame();
    633     /* Unlock the current frame after locking it. Prefer using a FrameLock
    634      * object instead.
    635      */
    636     void unlockCurrentFrame();
    637 
    638     static bool staticProduceFrame(void* opaque, void* buffer) {
    639         auto cameraDevice = reinterpret_cast<EmulatedCameraDevice*>(opaque);
    640         return cameraDevice->produceFrame(buffer);
    641     }
    642 
    643     /* A flag indicating if an auto-focus completion event should be sent the
    644      * next time the worker thread runs. This implies that auto-focus completion
    645      * event can only be delivered while preview frames are being delivered.
    646      * This is also a requirement specified in the documentation where a request
    647      * to perform auto-focusing is only valid between calls to startPreview and
    648      * stopPreview.
    649      * https://developer.android.com/reference/android/hardware/Camera.html#autoFocus(android.hardware.Camera.AutoFocusCallback)
    650      */
    651     std::atomic<bool> mTriggerAutoFocus;
    652 };
    653 
    654 }; /* namespace android */
    655 
    656 #endif  /* HW_EMULATOR_CAMERA_EMULATED_CAMERA_DEVICE_H */
    657