Home | History | Annotate | Download | only in libstagefright
      1 /*
      2  * Copyright (C) 2009 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include <inttypes.h>
     18 
     19 //#define LOG_NDEBUG 0
     20 #define LOG_TAG "CameraSource"
     21 #include <utils/Log.h>
     22 
     23 #include <OMX_Component.h>
     24 #include <binder/IPCThreadState.h>
     25 #include <media/stagefright/foundation/ADebug.h>
     26 #include <media/stagefright/CameraSource.h>
     27 #include <media/stagefright/MediaDefs.h>
     28 #include <media/stagefright/MediaErrors.h>
     29 #include <media/stagefright/MetaData.h>
     30 #include <camera/Camera.h>
     31 #include <camera/CameraParameters.h>
     32 #include <gui/Surface.h>
     33 #include <utils/String8.h>
     34 #include <cutils/properties.h>
     35 
     36 #if LOG_NDEBUG
     37 #define UNUSED_UNLESS_VERBOSE(x) (void)(x)
     38 #else
     39 #define UNUSED_UNLESS_VERBOSE(x)
     40 #endif
     41 
     42 namespace android {
     43 
     44 static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
     45 
     46 struct CameraSourceListener : public CameraListener {
     47     CameraSourceListener(const sp<CameraSource> &source);
     48 
     49     virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
     50     virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
     51                           camera_frame_metadata_t *metadata);
     52 
     53     virtual void postDataTimestamp(
     54             nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
     55 
     56 protected:
     57     virtual ~CameraSourceListener();
     58 
     59 private:
     60     wp<CameraSource> mSource;
     61 
     62     CameraSourceListener(const CameraSourceListener &);
     63     CameraSourceListener &operator=(const CameraSourceListener &);
     64 };
     65 
     66 CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
     67     : mSource(source) {
     68 }
     69 
     70 CameraSourceListener::~CameraSourceListener() {
     71 }
     72 
     73 void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
     74     UNUSED_UNLESS_VERBOSE(msgType);
     75     UNUSED_UNLESS_VERBOSE(ext1);
     76     UNUSED_UNLESS_VERBOSE(ext2);
     77     ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
     78 }
     79 
     80 void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
     81                                     camera_frame_metadata_t * /* metadata */) {
     82     ALOGV("postData(%d, ptr:%p, size:%zu)",
     83          msgType, dataPtr->pointer(), dataPtr->size());
     84 
     85     sp<CameraSource> source = mSource.promote();
     86     if (source.get() != NULL) {
     87         source->dataCallback(msgType, dataPtr);
     88     }
     89 }
     90 
     91 void CameraSourceListener::postDataTimestamp(
     92         nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
     93 
     94     sp<CameraSource> source = mSource.promote();
     95     if (source.get() != NULL) {
     96         source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
     97     }
     98 }
     99 
    100 static int32_t getColorFormat(const char* colorFormat) {
    101     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
    102        return OMX_COLOR_FormatYUV420Planar;
    103     }
    104 
    105     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
    106        return OMX_COLOR_FormatYUV422SemiPlanar;
    107     }
    108 
    109     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
    110         return OMX_COLOR_FormatYUV420SemiPlanar;
    111     }
    112 
    113     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
    114         return OMX_COLOR_FormatYCbYCr;
    115     }
    116 
    117     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
    118        return OMX_COLOR_Format16bitRGB565;
    119     }
    120 
    121     if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
    122        return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
    123     }
    124 
    125     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
    126         return OMX_COLOR_FormatAndroidOpaque;
    127     }
    128 
    129     ALOGE("Uknown color format (%s), please add it to "
    130          "CameraSource::getColorFormat", colorFormat);
    131 
    132     CHECK(!"Unknown color format");
    133     return -1;
    134 }
    135 
    136 CameraSource *CameraSource::Create(const String16 &clientName) {
    137     Size size;
    138     size.width = -1;
    139     size.height = -1;
    140 
    141     sp<ICamera> camera;
    142     return new CameraSource(camera, NULL, 0, clientName, -1,
    143             size, -1, NULL, false);
    144 }
    145 
    146 // static
    147 CameraSource *CameraSource::CreateFromCamera(
    148     const sp<ICamera>& camera,
    149     const sp<ICameraRecordingProxy>& proxy,
    150     int32_t cameraId,
    151     const String16& clientName,
    152     uid_t clientUid,
    153     Size videoSize,
    154     int32_t frameRate,
    155     const sp<IGraphicBufferProducer>& surface,
    156     bool storeMetaDataInVideoBuffers) {
    157 
    158     CameraSource *source = new CameraSource(camera, proxy, cameraId,
    159             clientName, clientUid, videoSize, frameRate, surface,
    160             storeMetaDataInVideoBuffers);
    161     return source;
    162 }
    163 
    164 CameraSource::CameraSource(
    165     const sp<ICamera>& camera,
    166     const sp<ICameraRecordingProxy>& proxy,
    167     int32_t cameraId,
    168     const String16& clientName,
    169     uid_t clientUid,
    170     Size videoSize,
    171     int32_t frameRate,
    172     const sp<IGraphicBufferProducer>& surface,
    173     bool storeMetaDataInVideoBuffers)
    174     : mCameraFlags(0),
    175       mNumInputBuffers(0),
    176       mVideoFrameRate(-1),
    177       mCamera(0),
    178       mSurface(surface),
    179       mNumFramesReceived(0),
    180       mLastFrameTimestampUs(0),
    181       mStarted(false),
    182       mNumFramesEncoded(0),
    183       mTimeBetweenFrameCaptureUs(0),
    184       mFirstFrameTimeUs(0),
    185       mNumFramesDropped(0),
    186       mNumGlitches(0),
    187       mGlitchDurationThresholdUs(200000),
    188       mCollectStats(false) {
    189     mVideoSize.width  = -1;
    190     mVideoSize.height = -1;
    191 
    192     mInitCheck = init(camera, proxy, cameraId,
    193                     clientName, clientUid,
    194                     videoSize, frameRate,
    195                     storeMetaDataInVideoBuffers);
    196     if (mInitCheck != OK) releaseCamera();
    197 }
    198 
    199 status_t CameraSource::initCheck() const {
    200     return mInitCheck;
    201 }
    202 
    203 status_t CameraSource::isCameraAvailable(
    204     const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
    205     int32_t cameraId, const String16& clientName, uid_t clientUid) {
    206 
    207     if (camera == 0) {
    208         mCamera = Camera::connect(cameraId, clientName, clientUid);
    209         if (mCamera == 0) return -EBUSY;
    210         mCameraFlags &= ~FLAGS_HOT_CAMERA;
    211     } else {
    212         // We get the proxy from Camera, not ICamera. We need to get the proxy
    213         // to the remote Camera owned by the application. Here mCamera is a
    214         // local Camera object created by us. We cannot use the proxy from
    215         // mCamera here.
    216         mCamera = Camera::create(camera);
    217         if (mCamera == 0) return -EBUSY;
    218         mCameraRecordingProxy = proxy;
    219         mCameraFlags |= FLAGS_HOT_CAMERA;
    220         mDeathNotifier = new DeathNotifier();
    221         // isBinderAlive needs linkToDeath to work.
    222         mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
    223     }
    224 
    225     mCamera->lock();
    226 
    227     return OK;
    228 }
    229 
    230 
    231 /*
    232  * Check to see whether the requested video width and height is one
    233  * of the supported sizes.
    234  * @param width the video frame width in pixels
    235  * @param height the video frame height in pixels
    236  * @param suppportedSizes the vector of sizes that we check against
    237  * @return true if the dimension (width and height) is supported.
    238  */
    239 static bool isVideoSizeSupported(
    240     int32_t width, int32_t height,
    241     const Vector<Size>& supportedSizes) {
    242 
    243     ALOGV("isVideoSizeSupported");
    244     for (size_t i = 0; i < supportedSizes.size(); ++i) {
    245         if (width  == supportedSizes[i].width &&
    246             height == supportedSizes[i].height) {
    247             return true;
    248         }
    249     }
    250     return false;
    251 }
    252 
    253 /*
    254  * If the preview and video output is separate, we only set the
    255  * the video size, and applications should set the preview size
    256  * to some proper value, and the recording framework will not
    257  * change the preview size; otherwise, if the video and preview
    258  * output is the same, we need to set the preview to be the same
    259  * as the requested video size.
    260  *
    261  */
    262 /*
    263  * Query the camera to retrieve the supported video frame sizes
    264  * and also to see whether CameraParameters::setVideoSize()
    265  * is supported or not.
    266  * @param params CameraParameters to retrieve the information
    267  * @@param isSetVideoSizeSupported retunrs whether method
    268  *      CameraParameters::setVideoSize() is supported or not.
    269  * @param sizes returns the vector of Size objects for the
    270  *      supported video frame sizes advertised by the camera.
    271  */
    272 static void getSupportedVideoSizes(
    273     const CameraParameters& params,
    274     bool *isSetVideoSizeSupported,
    275     Vector<Size>& sizes) {
    276 
    277     *isSetVideoSizeSupported = true;
    278     params.getSupportedVideoSizes(sizes);
    279     if (sizes.size() == 0) {
    280         ALOGD("Camera does not support setVideoSize()");
    281         params.getSupportedPreviewSizes(sizes);
    282         *isSetVideoSizeSupported = false;
    283     }
    284 }
    285 
    286 /*
    287  * Check whether the camera has the supported color format
    288  * @param params CameraParameters to retrieve the information
    289  * @return OK if no error.
    290  */
    291 status_t CameraSource::isCameraColorFormatSupported(
    292         const CameraParameters& params) {
    293     mColorFormat = getColorFormat(params.get(
    294             CameraParameters::KEY_VIDEO_FRAME_FORMAT));
    295     if (mColorFormat == -1) {
    296         return BAD_VALUE;
    297     }
    298     return OK;
    299 }
    300 
    301 /*
    302  * Configure the camera to use the requested video size
    303  * (width and height) and/or frame rate. If both width and
    304  * height are -1, configuration on the video size is skipped.
    305  * if frameRate is -1, configuration on the frame rate
    306  * is skipped. Skipping the configuration allows one to
    307  * use the current camera setting without the need to
    308  * actually know the specific values (see Create() method).
    309  *
    310  * @param params the CameraParameters to be configured
    311  * @param width the target video frame width in pixels
    312  * @param height the target video frame height in pixels
    313  * @param frameRate the target frame rate in frames per second.
    314  * @return OK if no error.
    315  */
    316 status_t CameraSource::configureCamera(
    317         CameraParameters* params,
    318         int32_t width, int32_t height,
    319         int32_t frameRate) {
    320     ALOGV("configureCamera");
    321     Vector<Size> sizes;
    322     bool isSetVideoSizeSupportedByCamera = true;
    323     getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
    324     bool isCameraParamChanged = false;
    325     if (width != -1 && height != -1) {
    326         if (!isVideoSizeSupported(width, height, sizes)) {
    327             ALOGE("Video dimension (%dx%d) is unsupported", width, height);
    328             return BAD_VALUE;
    329         }
    330         if (isSetVideoSizeSupportedByCamera) {
    331             params->setVideoSize(width, height);
    332         } else {
    333             params->setPreviewSize(width, height);
    334         }
    335         isCameraParamChanged = true;
    336     } else if ((width == -1 && height != -1) ||
    337                (width != -1 && height == -1)) {
    338         // If one and only one of the width and height is -1
    339         // we reject such a request.
    340         ALOGE("Requested video size (%dx%d) is not supported", width, height);
    341         return BAD_VALUE;
    342     } else {  // width == -1 && height == -1
    343         // Do not configure the camera.
    344         // Use the current width and height value setting from the camera.
    345     }
    346 
    347     if (frameRate != -1) {
    348         CHECK(frameRate > 0 && frameRate <= 120);
    349         const char* supportedFrameRates =
    350                 params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
    351         CHECK(supportedFrameRates != NULL);
    352         ALOGV("Supported frame rates: %s", supportedFrameRates);
    353         char buf[4];
    354         snprintf(buf, 4, "%d", frameRate);
    355         if (strstr(supportedFrameRates, buf) == NULL) {
    356             ALOGE("Requested frame rate (%d) is not supported: %s",
    357                 frameRate, supportedFrameRates);
    358             return BAD_VALUE;
    359         }
    360 
    361         // The frame rate is supported, set the camera to the requested value.
    362         params->setPreviewFrameRate(frameRate);
    363         isCameraParamChanged = true;
    364     } else {  // frameRate == -1
    365         // Do not configure the camera.
    366         // Use the current frame rate value setting from the camera
    367     }
    368 
    369     if (isCameraParamChanged) {
    370         // Either frame rate or frame size needs to be changed.
    371         String8 s = params->flatten();
    372         if (OK != mCamera->setParameters(s)) {
    373             ALOGE("Could not change settings."
    374                  " Someone else is using camera %p?", mCamera.get());
    375             return -EBUSY;
    376         }
    377     }
    378     return OK;
    379 }
    380 
    381 /*
    382  * Check whether the requested video frame size
    383  * has been successfully configured or not. If both width and height
    384  * are -1, check on the current width and height value setting
    385  * is performed.
    386  *
    387  * @param params CameraParameters to retrieve the information
    388  * @param the target video frame width in pixels to check against
    389  * @param the target video frame height in pixels to check against
    390  * @return OK if no error
    391  */
    392 status_t CameraSource::checkVideoSize(
    393         const CameraParameters& params,
    394         int32_t width, int32_t height) {
    395 
    396     ALOGV("checkVideoSize");
    397     // The actual video size is the same as the preview size
    398     // if the camera hal does not support separate video and
    399     // preview output. In this case, we retrieve the video
    400     // size from preview.
    401     int32_t frameWidthActual = -1;
    402     int32_t frameHeightActual = -1;
    403     Vector<Size> sizes;
    404     params.getSupportedVideoSizes(sizes);
    405     if (sizes.size() == 0) {
    406         // video size is the same as preview size
    407         params.getPreviewSize(&frameWidthActual, &frameHeightActual);
    408     } else {
    409         // video size may not be the same as preview
    410         params.getVideoSize(&frameWidthActual, &frameHeightActual);
    411     }
    412     if (frameWidthActual < 0 || frameHeightActual < 0) {
    413         ALOGE("Failed to retrieve video frame size (%dx%d)",
    414                 frameWidthActual, frameHeightActual);
    415         return UNKNOWN_ERROR;
    416     }
    417 
    418     // Check the actual video frame size against the target/requested
    419     // video frame size.
    420     if (width != -1 && height != -1) {
    421         if (frameWidthActual != width || frameHeightActual != height) {
    422             ALOGE("Failed to set video frame size to %dx%d. "
    423                     "The actual video size is %dx%d ", width, height,
    424                     frameWidthActual, frameHeightActual);
    425             return UNKNOWN_ERROR;
    426         }
    427     }
    428 
    429     // Good now.
    430     mVideoSize.width = frameWidthActual;
    431     mVideoSize.height = frameHeightActual;
    432     return OK;
    433 }
    434 
    435 /*
    436  * Check the requested frame rate has been successfully configured or not.
    437  * If the target frameRate is -1, check on the current frame rate value
    438  * setting is performed.
    439  *
    440  * @param params CameraParameters to retrieve the information
    441  * @param the target video frame rate to check against
    442  * @return OK if no error.
    443  */
    444 status_t CameraSource::checkFrameRate(
    445         const CameraParameters& params,
    446         int32_t frameRate) {
    447 
    448     ALOGV("checkFrameRate");
    449     int32_t frameRateActual = params.getPreviewFrameRate();
    450     if (frameRateActual < 0) {
    451         ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
    452         return UNKNOWN_ERROR;
    453     }
    454 
    455     // Check the actual video frame rate against the target/requested
    456     // video frame rate.
    457     if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
    458         ALOGE("Failed to set preview frame rate to %d fps. The actual "
    459                 "frame rate is %d", frameRate, frameRateActual);
    460         return UNKNOWN_ERROR;
    461     }
    462 
    463     // Good now.
    464     mVideoFrameRate = frameRateActual;
    465     return OK;
    466 }
    467 
    468 /*
    469  * Initialize the CameraSource to so that it becomes
    470  * ready for providing the video input streams as requested.
    471  * @param camera the camera object used for the video source
    472  * @param cameraId if camera == 0, use camera with this id
    473  *      as the video source
    474  * @param videoSize the target video frame size. If both
    475  *      width and height in videoSize is -1, use the current
    476  *      width and heigth settings by the camera
    477  * @param frameRate the target frame rate in frames per second.
    478  *      if it is -1, use the current camera frame rate setting.
    479  * @param storeMetaDataInVideoBuffers request to store meta
    480  *      data or real YUV data in video buffers. Request to
    481  *      store meta data in video buffers may not be honored
    482  *      if the source does not support this feature.
    483  *
    484  * @return OK if no error.
    485  */
    486 status_t CameraSource::init(
    487         const sp<ICamera>& camera,
    488         const sp<ICameraRecordingProxy>& proxy,
    489         int32_t cameraId,
    490         const String16& clientName,
    491         uid_t clientUid,
    492         Size videoSize,
    493         int32_t frameRate,
    494         bool storeMetaDataInVideoBuffers) {
    495 
    496     ALOGV("init");
    497     status_t err = OK;
    498     int64_t token = IPCThreadState::self()->clearCallingIdentity();
    499     err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid,
    500                                videoSize, frameRate,
    501                                storeMetaDataInVideoBuffers);
    502     IPCThreadState::self()->restoreCallingIdentity(token);
    503     return err;
    504 }
    505 
    506 status_t CameraSource::initWithCameraAccess(
    507         const sp<ICamera>& camera,
    508         const sp<ICameraRecordingProxy>& proxy,
    509         int32_t cameraId,
    510         const String16& clientName,
    511         uid_t clientUid,
    512         Size videoSize,
    513         int32_t frameRate,
    514         bool storeMetaDataInVideoBuffers) {
    515     ALOGV("initWithCameraAccess");
    516     status_t err = OK;
    517 
    518     if ((err = isCameraAvailable(camera, proxy, cameraId,
    519             clientName, clientUid)) != OK) {
    520         ALOGE("Camera connection could not be established.");
    521         return err;
    522     }
    523     CameraParameters params(mCamera->getParameters());
    524     if ((err = isCameraColorFormatSupported(params)) != OK) {
    525         return err;
    526     }
    527 
    528     // Set the camera to use the requested video frame size
    529     // and/or frame rate.
    530     if ((err = configureCamera(&params,
    531                     videoSize.width, videoSize.height,
    532                     frameRate))) {
    533         return err;
    534     }
    535 
    536     // Check on video frame size and frame rate.
    537     CameraParameters newCameraParams(mCamera->getParameters());
    538     if ((err = checkVideoSize(newCameraParams,
    539                 videoSize.width, videoSize.height)) != OK) {
    540         return err;
    541     }
    542     if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
    543         return err;
    544     }
    545 
    546     // Set the preview display. Skip this if mSurface is null because
    547     // applications may already set a surface to the camera.
    548     if (mSurface != NULL) {
    549         // This CHECK is good, since we just passed the lock/unlock
    550         // check earlier by calling mCamera->setParameters().
    551         CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
    552     }
    553 
    554     // By default, do not store metadata in video buffers
    555     mIsMetaDataStoredInVideoBuffers = false;
    556     mCamera->storeMetaDataInBuffers(false);
    557     if (storeMetaDataInVideoBuffers) {
    558         if (OK == mCamera->storeMetaDataInBuffers(true)) {
    559             mIsMetaDataStoredInVideoBuffers = true;
    560         }
    561     }
    562 
    563     int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
    564     if (glitchDurationUs > mGlitchDurationThresholdUs) {
    565         mGlitchDurationThresholdUs = glitchDurationUs;
    566     }
    567 
    568     // XXX: query camera for the stride and slice height
    569     // when the capability becomes available.
    570     mMeta = new MetaData;
    571     mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
    572     mMeta->setInt32(kKeyColorFormat, mColorFormat);
    573     mMeta->setInt32(kKeyWidth,       mVideoSize.width);
    574     mMeta->setInt32(kKeyHeight,      mVideoSize.height);
    575     mMeta->setInt32(kKeyStride,      mVideoSize.width);
    576     mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
    577     mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
    578     return OK;
    579 }
    580 
    581 CameraSource::~CameraSource() {
    582     if (mStarted) {
    583         reset();
    584     } else if (mInitCheck == OK) {
    585         // Camera is initialized but because start() is never called,
    586         // the lock on Camera is never released(). This makes sure
    587         // Camera's lock is released in this case.
    588         releaseCamera();
    589     }
    590 }
    591 
    592 status_t CameraSource::startCameraRecording() {
    593     ALOGV("startCameraRecording");
    594     // Reset the identity to the current thread because media server owns the
    595     // camera and recording is started by the applications. The applications
    596     // will connect to the camera in ICameraRecordingProxy::startRecording.
    597     int64_t token = IPCThreadState::self()->clearCallingIdentity();
    598     status_t err;
    599     if (mNumInputBuffers > 0) {
    600         err = mCamera->sendCommand(
    601             CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
    602 
    603         // This could happen for CameraHAL1 clients; thus the failure is
    604         // not a fatal error
    605         if (err != OK) {
    606             ALOGW("Failed to set video buffer count to %d due to %d",
    607                 mNumInputBuffers, err);
    608         }
    609     }
    610 
    611     err = OK;
    612     if (mCameraFlags & FLAGS_HOT_CAMERA) {
    613         mCamera->unlock();
    614         mCamera.clear();
    615         if ((err = mCameraRecordingProxy->startRecording(
    616                 new ProxyListener(this))) != OK) {
    617             ALOGE("Failed to start recording, received error: %s (%d)",
    618                     strerror(-err), err);
    619         }
    620     } else {
    621         mCamera->setListener(new CameraSourceListener(this));
    622         mCamera->startRecording();
    623         if (!mCamera->recordingEnabled()) {
    624             err = -EINVAL;
    625             ALOGE("Failed to start recording");
    626         }
    627     }
    628     IPCThreadState::self()->restoreCallingIdentity(token);
    629     return err;
    630 }
    631 
    632 status_t CameraSource::start(MetaData *meta) {
    633     ALOGV("start");
    634     CHECK(!mStarted);
    635     if (mInitCheck != OK) {
    636         ALOGE("CameraSource is not initialized yet");
    637         return mInitCheck;
    638     }
    639 
    640     char value[PROPERTY_VALUE_MAX];
    641     if (property_get("media.stagefright.record-stats", value, NULL)
    642         && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
    643         mCollectStats = true;
    644     }
    645 
    646     mStartTimeUs = 0;
    647     mNumInputBuffers = 0;
    648     if (meta) {
    649         int64_t startTimeUs;
    650         if (meta->findInt64(kKeyTime, &startTimeUs)) {
    651             mStartTimeUs = startTimeUs;
    652         }
    653 
    654         int32_t nBuffers;
    655         if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
    656             CHECK_GT(nBuffers, 0);
    657             mNumInputBuffers = nBuffers;
    658         }
    659     }
    660 
    661     status_t err;
    662     if ((err = startCameraRecording()) == OK) {
    663         mStarted = true;
    664     }
    665 
    666     return err;
    667 }
    668 
    669 void CameraSource::stopCameraRecording() {
    670     ALOGV("stopCameraRecording");
    671     if (mCameraFlags & FLAGS_HOT_CAMERA) {
    672         mCameraRecordingProxy->stopRecording();
    673     } else {
    674         mCamera->setListener(NULL);
    675         mCamera->stopRecording();
    676     }
    677 }
    678 
    679 void CameraSource::releaseCamera() {
    680     ALOGV("releaseCamera");
    681     sp<Camera> camera;
    682     bool coldCamera = false;
    683     {
    684         Mutex::Autolock autoLock(mLock);
    685         // get a local ref and clear ref to mCamera now
    686         camera = mCamera;
    687         mCamera.clear();
    688         coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
    689     }
    690 
    691     if (camera != 0) {
    692         int64_t token = IPCThreadState::self()->clearCallingIdentity();
    693         if (coldCamera) {
    694             ALOGV("Camera was cold when we started, stopping preview");
    695             camera->stopPreview();
    696             camera->disconnect();
    697         }
    698         camera->unlock();
    699         IPCThreadState::self()->restoreCallingIdentity(token);
    700     }
    701 
    702     {
    703         Mutex::Autolock autoLock(mLock);
    704         if (mCameraRecordingProxy != 0) {
    705             mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
    706             mCameraRecordingProxy.clear();
    707         }
    708         mCameraFlags = 0;
    709     }
    710 }
    711 
    712 status_t CameraSource::reset() {
    713     ALOGD("reset: E");
    714 
    715     {
    716         Mutex::Autolock autoLock(mLock);
    717         mStarted = false;
    718         mFrameAvailableCondition.signal();
    719 
    720         int64_t token;
    721         bool isTokenValid = false;
    722         if (mCamera != 0) {
    723             token = IPCThreadState::self()->clearCallingIdentity();
    724             isTokenValid = true;
    725         }
    726         releaseQueuedFrames();
    727         while (!mFramesBeingEncoded.empty()) {
    728             if (NO_ERROR !=
    729                 mFrameCompleteCondition.waitRelative(mLock,
    730                         mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
    731                 ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
    732                     mFramesBeingEncoded.size());
    733             }
    734         }
    735         stopCameraRecording();
    736         if (isTokenValid) {
    737             IPCThreadState::self()->restoreCallingIdentity(token);
    738         }
    739 
    740         if (mCollectStats) {
    741             ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
    742                     mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
    743                     mLastFrameTimestampUs - mFirstFrameTimeUs);
    744         }
    745 
    746         if (mNumGlitches > 0) {
    747             ALOGW("%d long delays between neighboring video frames", mNumGlitches);
    748         }
    749 
    750         CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
    751     }
    752 
    753     releaseCamera();
    754 
    755     ALOGD("reset: X");
    756     return OK;
    757 }
    758 
    759 void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
    760     ALOGV("releaseRecordingFrame");
    761     if (mCameraRecordingProxy != NULL) {
    762         mCameraRecordingProxy->releaseRecordingFrame(frame);
    763     } else if (mCamera != NULL) {
    764         int64_t token = IPCThreadState::self()->clearCallingIdentity();
    765         mCamera->releaseRecordingFrame(frame);
    766         IPCThreadState::self()->restoreCallingIdentity(token);
    767     }
    768 }
    769 
    770 void CameraSource::releaseQueuedFrames() {
    771     List<sp<IMemory> >::iterator it;
    772     while (!mFramesReceived.empty()) {
    773         it = mFramesReceived.begin();
    774         releaseRecordingFrame(*it);
    775         mFramesReceived.erase(it);
    776         ++mNumFramesDropped;
    777     }
    778 }
    779 
    780 sp<MetaData> CameraSource::getFormat() {
    781     return mMeta;
    782 }
    783 
    784 void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
    785     releaseRecordingFrame(frame);
    786 }
    787 
    788 void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
    789     ALOGV("signalBufferReturned: %p", buffer->data());
    790     Mutex::Autolock autoLock(mLock);
    791     for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
    792          it != mFramesBeingEncoded.end(); ++it) {
    793         if ((*it)->pointer() ==  buffer->data()) {
    794             releaseOneRecordingFrame((*it));
    795             mFramesBeingEncoded.erase(it);
    796             ++mNumFramesEncoded;
    797             buffer->setObserver(0);
    798             buffer->release();
    799             mFrameCompleteCondition.signal();
    800             return;
    801         }
    802     }
    803     CHECK(!"signalBufferReturned: bogus buffer");
    804 }
    805 
    806 status_t CameraSource::read(
    807         MediaBuffer **buffer, const ReadOptions *options) {
    808     ALOGV("read");
    809 
    810     *buffer = NULL;
    811 
    812     int64_t seekTimeUs;
    813     ReadOptions::SeekMode mode;
    814     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
    815         return ERROR_UNSUPPORTED;
    816     }
    817 
    818     sp<IMemory> frame;
    819     int64_t frameTime;
    820 
    821     {
    822         Mutex::Autolock autoLock(mLock);
    823         while (mStarted && mFramesReceived.empty()) {
    824             if (NO_ERROR !=
    825                 mFrameAvailableCondition.waitRelative(mLock,
    826                     mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
    827                 if (mCameraRecordingProxy != 0 &&
    828                     !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
    829                     ALOGW("camera recording proxy is gone");
    830                     return ERROR_END_OF_STREAM;
    831                 }
    832                 ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
    833                     mLastFrameTimestampUs);
    834             }
    835         }
    836         if (!mStarted) {
    837             return OK;
    838         }
    839         frame = *mFramesReceived.begin();
    840         mFramesReceived.erase(mFramesReceived.begin());
    841 
    842         frameTime = *mFrameTimes.begin();
    843         mFrameTimes.erase(mFrameTimes.begin());
    844         mFramesBeingEncoded.push_back(frame);
    845         *buffer = new MediaBuffer(frame->pointer(), frame->size());
    846         (*buffer)->setObserver(this);
    847         (*buffer)->add_ref();
    848         (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
    849     }
    850     return OK;
    851 }
    852 
    853 void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
    854         int32_t msgType, const sp<IMemory> &data) {
    855     ALOGV("dataCallbackTimestamp: timestamp %" PRId64 " us", timestampUs);
    856     Mutex::Autolock autoLock(mLock);
    857     if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
    858         ALOGV("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs);
    859         releaseOneRecordingFrame(data);
    860         return;
    861     }
    862 
    863     if (mNumFramesReceived > 0) {
    864         CHECK(timestampUs > mLastFrameTimestampUs);
    865         if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
    866             ++mNumGlitches;
    867         }
    868     }
    869 
    870     // May need to skip frame or modify timestamp. Currently implemented
    871     // by the subclass CameraSourceTimeLapse.
    872     if (skipCurrentFrame(timestampUs)) {
    873         releaseOneRecordingFrame(data);
    874         return;
    875     }
    876 
    877     mLastFrameTimestampUs = timestampUs;
    878     if (mNumFramesReceived == 0) {
    879         mFirstFrameTimeUs = timestampUs;
    880         // Initial delay
    881         if (mStartTimeUs > 0) {
    882             if (timestampUs < mStartTimeUs) {
    883                 // Frame was captured before recording was started
    884                 // Drop it without updating the statistical data.
    885                 releaseOneRecordingFrame(data);
    886                 return;
    887             }
    888             mStartTimeUs = timestampUs - mStartTimeUs;
    889         }
    890     }
    891     ++mNumFramesReceived;
    892 
    893     CHECK(data != NULL && data->size() > 0);
    894     mFramesReceived.push_back(data);
    895     int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    896     mFrameTimes.push_back(timeUs);
    897     ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
    898         mStartTimeUs, timeUs);
    899     mFrameAvailableCondition.signal();
    900 }
    901 
    902 bool CameraSource::isMetaDataStoredInVideoBuffers() const {
    903     ALOGV("isMetaDataStoredInVideoBuffers");
    904     return mIsMetaDataStoredInVideoBuffers;
    905 }
    906 
    907 CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
    908     mSource = source;
    909 }
    910 
    911 void CameraSource::ProxyListener::dataCallbackTimestamp(
    912         nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
    913     mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
    914 }
    915 
    916 void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
    917     ALOGI("Camera recording proxy died");
    918 }
    919 
    920 }  // namespace android
    921