Home | History | Annotate | Download | only in libstagefright
      1 /*
      2  * Copyright (C) 2009 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "CameraSource"
     19 #include <utils/Log.h>
     20 
     21 #include <OMX_Component.h>
     22 #include <binder/IPCThreadState.h>
     23 #include <media/stagefright/foundation/ADebug.h>
     24 #include <media/stagefright/CameraSource.h>
     25 #include <media/stagefright/MediaDefs.h>
     26 #include <media/stagefright/MediaErrors.h>
     27 #include <media/stagefright/MetaData.h>
     28 #include <camera/Camera.h>
     29 #include <camera/CameraParameters.h>
     30 #include <gui/Surface.h>
     31 #include <utils/String8.h>
     32 #include <cutils/properties.h>
     33 
     34 namespace android {
     35 
     36 static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
     37 
     38 struct CameraSourceListener : public CameraListener {
     39     CameraSourceListener(const sp<CameraSource> &source);
     40 
     41     virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
     42     virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
     43                           camera_frame_metadata_t *metadata);
     44 
     45     virtual void postDataTimestamp(
     46             nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
     47 
     48 protected:
     49     virtual ~CameraSourceListener();
     50 
     51 private:
     52     wp<CameraSource> mSource;
     53 
     54     CameraSourceListener(const CameraSourceListener &);
     55     CameraSourceListener &operator=(const CameraSourceListener &);
     56 };
     57 
     58 CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
     59     : mSource(source) {
     60 }
     61 
     62 CameraSourceListener::~CameraSourceListener() {
     63 }
     64 
     65 void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
     66     ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
     67 }
     68 
     69 void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
     70                                     camera_frame_metadata_t *metadata) {
     71     ALOGV("postData(%d, ptr:%p, size:%d)",
     72          msgType, dataPtr->pointer(), dataPtr->size());
     73 
     74     sp<CameraSource> source = mSource.promote();
     75     if (source.get() != NULL) {
     76         source->dataCallback(msgType, dataPtr);
     77     }
     78 }
     79 
     80 void CameraSourceListener::postDataTimestamp(
     81         nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
     82 
     83     sp<CameraSource> source = mSource.promote();
     84     if (source.get() != NULL) {
     85         source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
     86     }
     87 }
     88 
     89 static int32_t getColorFormat(const char* colorFormat) {
     90     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
     91        return OMX_COLOR_FormatYUV420Planar;
     92     }
     93 
     94     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
     95        return OMX_COLOR_FormatYUV422SemiPlanar;
     96     }
     97 
     98     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
     99         return OMX_COLOR_FormatYUV420SemiPlanar;
    100     }
    101 
    102     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
    103         return OMX_COLOR_FormatYCbYCr;
    104     }
    105 
    106     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
    107        return OMX_COLOR_Format16bitRGB565;
    108     }
    109 
    110     if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
    111        return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
    112     }
    113 
    114     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
    115         return OMX_COLOR_FormatAndroidOpaque;
    116     }
    117 
    118     ALOGE("Uknown color format (%s), please add it to "
    119          "CameraSource::getColorFormat", colorFormat);
    120 
    121     CHECK(!"Unknown color format");
    122 }
    123 
    124 CameraSource *CameraSource::Create(const String16 &clientName) {
    125     Size size;
    126     size.width = -1;
    127     size.height = -1;
    128 
    129     sp<ICamera> camera;
    130     return new CameraSource(camera, NULL, 0, clientName, -1,
    131             size, -1, NULL, false);
    132 }
    133 
    134 // static
    135 CameraSource *CameraSource::CreateFromCamera(
    136     const sp<ICamera>& camera,
    137     const sp<ICameraRecordingProxy>& proxy,
    138     int32_t cameraId,
    139     const String16& clientName,
    140     uid_t clientUid,
    141     Size videoSize,
    142     int32_t frameRate,
    143     const sp<IGraphicBufferProducer>& surface,
    144     bool storeMetaDataInVideoBuffers) {
    145 
    146     CameraSource *source = new CameraSource(camera, proxy, cameraId,
    147             clientName, clientUid, videoSize, frameRate, surface,
    148             storeMetaDataInVideoBuffers);
    149     return source;
    150 }
    151 
    152 CameraSource::CameraSource(
    153     const sp<ICamera>& camera,
    154     const sp<ICameraRecordingProxy>& proxy,
    155     int32_t cameraId,
    156     const String16& clientName,
    157     uid_t clientUid,
    158     Size videoSize,
    159     int32_t frameRate,
    160     const sp<IGraphicBufferProducer>& surface,
    161     bool storeMetaDataInVideoBuffers)
    162     : mCameraFlags(0),
    163       mNumInputBuffers(0),
    164       mVideoFrameRate(-1),
    165       mCamera(0),
    166       mSurface(surface),
    167       mNumFramesReceived(0),
    168       mLastFrameTimestampUs(0),
    169       mStarted(false),
    170       mNumFramesEncoded(0),
    171       mTimeBetweenFrameCaptureUs(0),
    172       mFirstFrameTimeUs(0),
    173       mNumFramesDropped(0),
    174       mNumGlitches(0),
    175       mGlitchDurationThresholdUs(200000),
    176       mCollectStats(false) {
    177     mVideoSize.width  = -1;
    178     mVideoSize.height = -1;
    179 
    180     mInitCheck = init(camera, proxy, cameraId,
    181                     clientName, clientUid,
    182                     videoSize, frameRate,
    183                     storeMetaDataInVideoBuffers);
    184     if (mInitCheck != OK) releaseCamera();
    185 }
    186 
    187 status_t CameraSource::initCheck() const {
    188     return mInitCheck;
    189 }
    190 
    191 status_t CameraSource::isCameraAvailable(
    192     const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
    193     int32_t cameraId, const String16& clientName, uid_t clientUid) {
    194 
    195     if (camera == 0) {
    196         mCamera = Camera::connect(cameraId, clientName, clientUid);
    197         if (mCamera == 0) return -EBUSY;
    198         mCameraFlags &= ~FLAGS_HOT_CAMERA;
    199     } else {
    200         // We get the proxy from Camera, not ICamera. We need to get the proxy
    201         // to the remote Camera owned by the application. Here mCamera is a
    202         // local Camera object created by us. We cannot use the proxy from
    203         // mCamera here.
    204         mCamera = Camera::create(camera);
    205         if (mCamera == 0) return -EBUSY;
    206         mCameraRecordingProxy = proxy;
    207         mCameraFlags |= FLAGS_HOT_CAMERA;
    208         mDeathNotifier = new DeathNotifier();
    209         // isBinderAlive needs linkToDeath to work.
    210         mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
    211     }
    212 
    213     mCamera->lock();
    214 
    215     return OK;
    216 }
    217 
    218 
    219 /*
    220  * Check to see whether the requested video width and height is one
    221  * of the supported sizes.
    222  * @param width the video frame width in pixels
    223  * @param height the video frame height in pixels
    224  * @param suppportedSizes the vector of sizes that we check against
    225  * @return true if the dimension (width and height) is supported.
    226  */
    227 static bool isVideoSizeSupported(
    228     int32_t width, int32_t height,
    229     const Vector<Size>& supportedSizes) {
    230 
    231     ALOGV("isVideoSizeSupported");
    232     for (size_t i = 0; i < supportedSizes.size(); ++i) {
    233         if (width  == supportedSizes[i].width &&
    234             height == supportedSizes[i].height) {
    235             return true;
    236         }
    237     }
    238     return false;
    239 }
    240 
    241 /*
    242  * If the preview and video output is separate, we only set the
    243  * the video size, and applications should set the preview size
    244  * to some proper value, and the recording framework will not
    245  * change the preview size; otherwise, if the video and preview
    246  * output is the same, we need to set the preview to be the same
    247  * as the requested video size.
    248  *
    249  */
    250 /*
    251  * Query the camera to retrieve the supported video frame sizes
    252  * and also to see whether CameraParameters::setVideoSize()
    253  * is supported or not.
    254  * @param params CameraParameters to retrieve the information
    255  * @@param isSetVideoSizeSupported retunrs whether method
    256  *      CameraParameters::setVideoSize() is supported or not.
    257  * @param sizes returns the vector of Size objects for the
    258  *      supported video frame sizes advertised by the camera.
    259  */
    260 static void getSupportedVideoSizes(
    261     const CameraParameters& params,
    262     bool *isSetVideoSizeSupported,
    263     Vector<Size>& sizes) {
    264 
    265     *isSetVideoSizeSupported = true;
    266     params.getSupportedVideoSizes(sizes);
    267     if (sizes.size() == 0) {
    268         ALOGD("Camera does not support setVideoSize()");
    269         params.getSupportedPreviewSizes(sizes);
    270         *isSetVideoSizeSupported = false;
    271     }
    272 }
    273 
    274 /*
    275  * Check whether the camera has the supported color format
    276  * @param params CameraParameters to retrieve the information
    277  * @return OK if no error.
    278  */
    279 status_t CameraSource::isCameraColorFormatSupported(
    280         const CameraParameters& params) {
    281     mColorFormat = getColorFormat(params.get(
    282             CameraParameters::KEY_VIDEO_FRAME_FORMAT));
    283     if (mColorFormat == -1) {
    284         return BAD_VALUE;
    285     }
    286     return OK;
    287 }
    288 
    289 /*
    290  * Configure the camera to use the requested video size
    291  * (width and height) and/or frame rate. If both width and
    292  * height are -1, configuration on the video size is skipped.
    293  * if frameRate is -1, configuration on the frame rate
    294  * is skipped. Skipping the configuration allows one to
    295  * use the current camera setting without the need to
    296  * actually know the specific values (see Create() method).
    297  *
    298  * @param params the CameraParameters to be configured
    299  * @param width the target video frame width in pixels
    300  * @param height the target video frame height in pixels
    301  * @param frameRate the target frame rate in frames per second.
    302  * @return OK if no error.
    303  */
    304 status_t CameraSource::configureCamera(
    305         CameraParameters* params,
    306         int32_t width, int32_t height,
    307         int32_t frameRate) {
    308     ALOGV("configureCamera");
    309     Vector<Size> sizes;
    310     bool isSetVideoSizeSupportedByCamera = true;
    311     getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
    312     bool isCameraParamChanged = false;
    313     if (width != -1 && height != -1) {
    314         if (!isVideoSizeSupported(width, height, sizes)) {
    315             ALOGE("Video dimension (%dx%d) is unsupported", width, height);
    316             return BAD_VALUE;
    317         }
    318         if (isSetVideoSizeSupportedByCamera) {
    319             params->setVideoSize(width, height);
    320         } else {
    321             params->setPreviewSize(width, height);
    322         }
    323         isCameraParamChanged = true;
    324     } else if ((width == -1 && height != -1) ||
    325                (width != -1 && height == -1)) {
    326         // If one and only one of the width and height is -1
    327         // we reject such a request.
    328         ALOGE("Requested video size (%dx%d) is not supported", width, height);
    329         return BAD_VALUE;
    330     } else {  // width == -1 && height == -1
    331         // Do not configure the camera.
    332         // Use the current width and height value setting from the camera.
    333     }
    334 
    335     if (frameRate != -1) {
    336         CHECK(frameRate > 0 && frameRate <= 120);
    337         const char* supportedFrameRates =
    338                 params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
    339         CHECK(supportedFrameRates != NULL);
    340         ALOGV("Supported frame rates: %s", supportedFrameRates);
    341         char buf[4];
    342         snprintf(buf, 4, "%d", frameRate);
    343         if (strstr(supportedFrameRates, buf) == NULL) {
    344             ALOGE("Requested frame rate (%d) is not supported: %s",
    345                 frameRate, supportedFrameRates);
    346             return BAD_VALUE;
    347         }
    348 
    349         // The frame rate is supported, set the camera to the requested value.
    350         params->setPreviewFrameRate(frameRate);
    351         isCameraParamChanged = true;
    352     } else {  // frameRate == -1
    353         // Do not configure the camera.
    354         // Use the current frame rate value setting from the camera
    355     }
    356 
    357     if (isCameraParamChanged) {
    358         // Either frame rate or frame size needs to be changed.
    359         String8 s = params->flatten();
    360         if (OK != mCamera->setParameters(s)) {
    361             ALOGE("Could not change settings."
    362                  " Someone else is using camera %p?", mCamera.get());
    363             return -EBUSY;
    364         }
    365     }
    366     return OK;
    367 }
    368 
    369 /*
    370  * Check whether the requested video frame size
    371  * has been successfully configured or not. If both width and height
    372  * are -1, check on the current width and height value setting
    373  * is performed.
    374  *
    375  * @param params CameraParameters to retrieve the information
    376  * @param the target video frame width in pixels to check against
    377  * @param the target video frame height in pixels to check against
    378  * @return OK if no error
    379  */
    380 status_t CameraSource::checkVideoSize(
    381         const CameraParameters& params,
    382         int32_t width, int32_t height) {
    383 
    384     ALOGV("checkVideoSize");
    385     // The actual video size is the same as the preview size
    386     // if the camera hal does not support separate video and
    387     // preview output. In this case, we retrieve the video
    388     // size from preview.
    389     int32_t frameWidthActual = -1;
    390     int32_t frameHeightActual = -1;
    391     Vector<Size> sizes;
    392     params.getSupportedVideoSizes(sizes);
    393     if (sizes.size() == 0) {
    394         // video size is the same as preview size
    395         params.getPreviewSize(&frameWidthActual, &frameHeightActual);
    396     } else {
    397         // video size may not be the same as preview
    398         params.getVideoSize(&frameWidthActual, &frameHeightActual);
    399     }
    400     if (frameWidthActual < 0 || frameHeightActual < 0) {
    401         ALOGE("Failed to retrieve video frame size (%dx%d)",
    402                 frameWidthActual, frameHeightActual);
    403         return UNKNOWN_ERROR;
    404     }
    405 
    406     // Check the actual video frame size against the target/requested
    407     // video frame size.
    408     if (width != -1 && height != -1) {
    409         if (frameWidthActual != width || frameHeightActual != height) {
    410             ALOGE("Failed to set video frame size to %dx%d. "
    411                     "The actual video size is %dx%d ", width, height,
    412                     frameWidthActual, frameHeightActual);
    413             return UNKNOWN_ERROR;
    414         }
    415     }
    416 
    417     // Good now.
    418     mVideoSize.width = frameWidthActual;
    419     mVideoSize.height = frameHeightActual;
    420     return OK;
    421 }
    422 
    423 /*
    424  * Check the requested frame rate has been successfully configured or not.
    425  * If the target frameRate is -1, check on the current frame rate value
    426  * setting is performed.
    427  *
    428  * @param params CameraParameters to retrieve the information
    429  * @param the target video frame rate to check against
    430  * @return OK if no error.
    431  */
    432 status_t CameraSource::checkFrameRate(
    433         const CameraParameters& params,
    434         int32_t frameRate) {
    435 
    436     ALOGV("checkFrameRate");
    437     int32_t frameRateActual = params.getPreviewFrameRate();
    438     if (frameRateActual < 0) {
    439         ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
    440         return UNKNOWN_ERROR;
    441     }
    442 
    443     // Check the actual video frame rate against the target/requested
    444     // video frame rate.
    445     if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
    446         ALOGE("Failed to set preview frame rate to %d fps. The actual "
    447                 "frame rate is %d", frameRate, frameRateActual);
    448         return UNKNOWN_ERROR;
    449     }
    450 
    451     // Good now.
    452     mVideoFrameRate = frameRateActual;
    453     return OK;
    454 }
    455 
    456 /*
    457  * Initialize the CameraSource to so that it becomes
    458  * ready for providing the video input streams as requested.
    459  * @param camera the camera object used for the video source
    460  * @param cameraId if camera == 0, use camera with this id
    461  *      as the video source
    462  * @param videoSize the target video frame size. If both
    463  *      width and height in videoSize is -1, use the current
    464  *      width and heigth settings by the camera
    465  * @param frameRate the target frame rate in frames per second.
    466  *      if it is -1, use the current camera frame rate setting.
    467  * @param storeMetaDataInVideoBuffers request to store meta
    468  *      data or real YUV data in video buffers. Request to
    469  *      store meta data in video buffers may not be honored
    470  *      if the source does not support this feature.
    471  *
    472  * @return OK if no error.
    473  */
    474 status_t CameraSource::init(
    475         const sp<ICamera>& camera,
    476         const sp<ICameraRecordingProxy>& proxy,
    477         int32_t cameraId,
    478         const String16& clientName,
    479         uid_t clientUid,
    480         Size videoSize,
    481         int32_t frameRate,
    482         bool storeMetaDataInVideoBuffers) {
    483 
    484     ALOGV("init");
    485     status_t err = OK;
    486     int64_t token = IPCThreadState::self()->clearCallingIdentity();
    487     err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid,
    488                                videoSize, frameRate,
    489                                storeMetaDataInVideoBuffers);
    490     IPCThreadState::self()->restoreCallingIdentity(token);
    491     return err;
    492 }
    493 
    494 status_t CameraSource::initWithCameraAccess(
    495         const sp<ICamera>& camera,
    496         const sp<ICameraRecordingProxy>& proxy,
    497         int32_t cameraId,
    498         const String16& clientName,
    499         uid_t clientUid,
    500         Size videoSize,
    501         int32_t frameRate,
    502         bool storeMetaDataInVideoBuffers) {
    503     ALOGV("initWithCameraAccess");
    504     status_t err = OK;
    505 
    506     if ((err = isCameraAvailable(camera, proxy, cameraId,
    507             clientName, clientUid)) != OK) {
    508         ALOGE("Camera connection could not be established.");
    509         return err;
    510     }
    511     CameraParameters params(mCamera->getParameters());
    512     if ((err = isCameraColorFormatSupported(params)) != OK) {
    513         return err;
    514     }
    515 
    516     // Set the camera to use the requested video frame size
    517     // and/or frame rate.
    518     if ((err = configureCamera(&params,
    519                     videoSize.width, videoSize.height,
    520                     frameRate))) {
    521         return err;
    522     }
    523 
    524     // Check on video frame size and frame rate.
    525     CameraParameters newCameraParams(mCamera->getParameters());
    526     if ((err = checkVideoSize(newCameraParams,
    527                 videoSize.width, videoSize.height)) != OK) {
    528         return err;
    529     }
    530     if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
    531         return err;
    532     }
    533 
    534     // Set the preview display. Skip this if mSurface is null because
    535     // applications may already set a surface to the camera.
    536     if (mSurface != NULL) {
    537         // This CHECK is good, since we just passed the lock/unlock
    538         // check earlier by calling mCamera->setParameters().
    539         CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
    540     }
    541 
    542     // By default, do not store metadata in video buffers
    543     mIsMetaDataStoredInVideoBuffers = false;
    544     mCamera->storeMetaDataInBuffers(false);
    545     if (storeMetaDataInVideoBuffers) {
    546         if (OK == mCamera->storeMetaDataInBuffers(true)) {
    547             mIsMetaDataStoredInVideoBuffers = true;
    548         }
    549     }
    550 
    551     int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
    552     if (glitchDurationUs > mGlitchDurationThresholdUs) {
    553         mGlitchDurationThresholdUs = glitchDurationUs;
    554     }
    555 
    556     // XXX: query camera for the stride and slice height
    557     // when the capability becomes available.
    558     mMeta = new MetaData;
    559     mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
    560     mMeta->setInt32(kKeyColorFormat, mColorFormat);
    561     mMeta->setInt32(kKeyWidth,       mVideoSize.width);
    562     mMeta->setInt32(kKeyHeight,      mVideoSize.height);
    563     mMeta->setInt32(kKeyStride,      mVideoSize.width);
    564     mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
    565     mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
    566     return OK;
    567 }
    568 
    569 CameraSource::~CameraSource() {
    570     if (mStarted) {
    571         reset();
    572     } else if (mInitCheck == OK) {
    573         // Camera is initialized but because start() is never called,
    574         // the lock on Camera is never released(). This makes sure
    575         // Camera's lock is released in this case.
    576         releaseCamera();
    577     }
    578 }
    579 
    580 void CameraSource::startCameraRecording() {
    581     ALOGV("startCameraRecording");
    582     // Reset the identity to the current thread because media server owns the
    583     // camera and recording is started by the applications. The applications
    584     // will connect to the camera in ICameraRecordingProxy::startRecording.
    585     int64_t token = IPCThreadState::self()->clearCallingIdentity();
    586     if (mNumInputBuffers > 0) {
    587         status_t err = mCamera->sendCommand(
    588             CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
    589 
    590         // This could happen for CameraHAL1 clients; thus the failure is
    591         // not a fatal error
    592         if (err != OK) {
    593             ALOGW("Failed to set video buffer count to %d due to %d",
    594                 mNumInputBuffers, err);
    595         }
    596     }
    597 
    598     if (mCameraFlags & FLAGS_HOT_CAMERA) {
    599         mCamera->unlock();
    600         mCamera.clear();
    601         CHECK_EQ((status_t)OK,
    602             mCameraRecordingProxy->startRecording(new ProxyListener(this)));
    603     } else {
    604         mCamera->setListener(new CameraSourceListener(this));
    605         mCamera->startRecording();
    606         CHECK(mCamera->recordingEnabled());
    607     }
    608     IPCThreadState::self()->restoreCallingIdentity(token);
    609 }
    610 
    611 status_t CameraSource::start(MetaData *meta) {
    612     ALOGV("start");
    613     CHECK(!mStarted);
    614     if (mInitCheck != OK) {
    615         ALOGE("CameraSource is not initialized yet");
    616         return mInitCheck;
    617     }
    618 
    619     char value[PROPERTY_VALUE_MAX];
    620     if (property_get("media.stagefright.record-stats", value, NULL)
    621         && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
    622         mCollectStats = true;
    623     }
    624 
    625     mStartTimeUs = 0;
    626     mNumInputBuffers = 0;
    627     if (meta) {
    628         int64_t startTimeUs;
    629         if (meta->findInt64(kKeyTime, &startTimeUs)) {
    630             mStartTimeUs = startTimeUs;
    631         }
    632 
    633         int32_t nBuffers;
    634         if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
    635             CHECK_GT(nBuffers, 0);
    636             mNumInputBuffers = nBuffers;
    637         }
    638     }
    639 
    640     startCameraRecording();
    641 
    642     mStarted = true;
    643     return OK;
    644 }
    645 
    646 void CameraSource::stopCameraRecording() {
    647     ALOGV("stopCameraRecording");
    648     if (mCameraFlags & FLAGS_HOT_CAMERA) {
    649         mCameraRecordingProxy->stopRecording();
    650     } else {
    651         mCamera->setListener(NULL);
    652         mCamera->stopRecording();
    653     }
    654 }
    655 
    656 void CameraSource::releaseCamera() {
    657     ALOGV("releaseCamera");
    658     if (mCamera != 0) {
    659         int64_t token = IPCThreadState::self()->clearCallingIdentity();
    660         if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
    661             ALOGV("Camera was cold when we started, stopping preview");
    662             mCamera->stopPreview();
    663             mCamera->disconnect();
    664         }
    665         mCamera->unlock();
    666         mCamera.clear();
    667         mCamera = 0;
    668         IPCThreadState::self()->restoreCallingIdentity(token);
    669     }
    670     if (mCameraRecordingProxy != 0) {
    671         mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
    672         mCameraRecordingProxy.clear();
    673     }
    674     mCameraFlags = 0;
    675 }
    676 
    677 status_t CameraSource::reset() {
    678     ALOGD("reset: E");
    679     Mutex::Autolock autoLock(mLock);
    680     mStarted = false;
    681     mFrameAvailableCondition.signal();
    682 
    683     int64_t token;
    684     bool isTokenValid = false;
    685     if (mCamera != 0) {
    686         token = IPCThreadState::self()->clearCallingIdentity();
    687         isTokenValid = true;
    688     }
    689     releaseQueuedFrames();
    690     while (!mFramesBeingEncoded.empty()) {
    691         if (NO_ERROR !=
    692             mFrameCompleteCondition.waitRelative(mLock,
    693                     mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
    694             ALOGW("Timed out waiting for outstanding frames being encoded: %d",
    695                 mFramesBeingEncoded.size());
    696         }
    697     }
    698     stopCameraRecording();
    699     releaseCamera();
    700     if (isTokenValid) {
    701         IPCThreadState::self()->restoreCallingIdentity(token);
    702     }
    703 
    704     if (mCollectStats) {
    705         ALOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
    706                 mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
    707                 mLastFrameTimestampUs - mFirstFrameTimeUs);
    708     }
    709 
    710     if (mNumGlitches > 0) {
    711         ALOGW("%d long delays between neighboring video frames", mNumGlitches);
    712     }
    713 
    714     CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
    715     ALOGD("reset: X");
    716     return OK;
    717 }
    718 
    719 void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
    720     ALOGV("releaseRecordingFrame");
    721     if (mCameraRecordingProxy != NULL) {
    722         mCameraRecordingProxy->releaseRecordingFrame(frame);
    723     } else if (mCamera != NULL) {
    724         int64_t token = IPCThreadState::self()->clearCallingIdentity();
    725         mCamera->releaseRecordingFrame(frame);
    726         IPCThreadState::self()->restoreCallingIdentity(token);
    727     }
    728 }
    729 
    730 void CameraSource::releaseQueuedFrames() {
    731     List<sp<IMemory> >::iterator it;
    732     while (!mFramesReceived.empty()) {
    733         it = mFramesReceived.begin();
    734         releaseRecordingFrame(*it);
    735         mFramesReceived.erase(it);
    736         ++mNumFramesDropped;
    737     }
    738 }
    739 
    740 sp<MetaData> CameraSource::getFormat() {
    741     return mMeta;
    742 }
    743 
    744 void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
    745     releaseRecordingFrame(frame);
    746 }
    747 
    748 void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
    749     ALOGV("signalBufferReturned: %p", buffer->data());
    750     Mutex::Autolock autoLock(mLock);
    751     for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
    752          it != mFramesBeingEncoded.end(); ++it) {
    753         if ((*it)->pointer() ==  buffer->data()) {
    754             releaseOneRecordingFrame((*it));
    755             mFramesBeingEncoded.erase(it);
    756             ++mNumFramesEncoded;
    757             buffer->setObserver(0);
    758             buffer->release();
    759             mFrameCompleteCondition.signal();
    760             return;
    761         }
    762     }
    763     CHECK(!"signalBufferReturned: bogus buffer");
    764 }
    765 
    766 status_t CameraSource::read(
    767         MediaBuffer **buffer, const ReadOptions *options) {
    768     ALOGV("read");
    769 
    770     *buffer = NULL;
    771 
    772     int64_t seekTimeUs;
    773     ReadOptions::SeekMode mode;
    774     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
    775         return ERROR_UNSUPPORTED;
    776     }
    777 
    778     sp<IMemory> frame;
    779     int64_t frameTime;
    780 
    781     {
    782         Mutex::Autolock autoLock(mLock);
    783         while (mStarted && mFramesReceived.empty()) {
    784             if (NO_ERROR !=
    785                 mFrameAvailableCondition.waitRelative(mLock,
    786                     mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
    787                 if (mCameraRecordingProxy != 0 &&
    788                     !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
    789                     ALOGW("camera recording proxy is gone");
    790                     return ERROR_END_OF_STREAM;
    791                 }
    792                 ALOGW("Timed out waiting for incoming camera video frames: %lld us",
    793                     mLastFrameTimestampUs);
    794             }
    795         }
    796         if (!mStarted) {
    797             return OK;
    798         }
    799         frame = *mFramesReceived.begin();
    800         mFramesReceived.erase(mFramesReceived.begin());
    801 
    802         frameTime = *mFrameTimes.begin();
    803         mFrameTimes.erase(mFrameTimes.begin());
    804         mFramesBeingEncoded.push_back(frame);
    805         *buffer = new MediaBuffer(frame->pointer(), frame->size());
    806         (*buffer)->setObserver(this);
    807         (*buffer)->add_ref();
    808         (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
    809     }
    810     return OK;
    811 }
    812 
    813 void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
    814         int32_t msgType, const sp<IMemory> &data) {
    815     ALOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
    816     Mutex::Autolock autoLock(mLock);
    817     if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
    818         ALOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
    819         releaseOneRecordingFrame(data);
    820         return;
    821     }
    822 
    823     if (mNumFramesReceived > 0) {
    824         CHECK(timestampUs > mLastFrameTimestampUs);
    825         if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
    826             ++mNumGlitches;
    827         }
    828     }
    829 
    830     // May need to skip frame or modify timestamp. Currently implemented
    831     // by the subclass CameraSourceTimeLapse.
    832     if (skipCurrentFrame(timestampUs)) {
    833         releaseOneRecordingFrame(data);
    834         return;
    835     }
    836 
    837     mLastFrameTimestampUs = timestampUs;
    838     if (mNumFramesReceived == 0) {
    839         mFirstFrameTimeUs = timestampUs;
    840         // Initial delay
    841         if (mStartTimeUs > 0) {
    842             if (timestampUs < mStartTimeUs) {
    843                 // Frame was captured before recording was started
    844                 // Drop it without updating the statistical data.
    845                 releaseOneRecordingFrame(data);
    846                 return;
    847             }
    848             mStartTimeUs = timestampUs - mStartTimeUs;
    849         }
    850     }
    851     ++mNumFramesReceived;
    852 
    853     CHECK(data != NULL && data->size() > 0);
    854     mFramesReceived.push_back(data);
    855     int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    856     mFrameTimes.push_back(timeUs);
    857     ALOGV("initial delay: %lld, current time stamp: %lld",
    858         mStartTimeUs, timeUs);
    859     mFrameAvailableCondition.signal();
    860 }
    861 
    862 bool CameraSource::isMetaDataStoredInVideoBuffers() const {
    863     ALOGV("isMetaDataStoredInVideoBuffers");
    864     return mIsMetaDataStoredInVideoBuffers;
    865 }
    866 
    867 CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
    868     mSource = source;
    869 }
    870 
    871 void CameraSource::ProxyListener::dataCallbackTimestamp(
    872         nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
    873     mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
    874 }
    875 
    876 void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
    877     ALOGI("Camera recording proxy died");
    878 }
    879 
    880 }  // namespace android
    881