Home | History | Annotate | Download | only in libstagefright
      1 /*
      2  * Copyright (C) 2009 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "CameraSource"
     19 #include <utils/Log.h>
     20 
     21 #include <OMX_Component.h>
     22 #include <binder/IPCThreadState.h>
     23 #include <media/stagefright/CameraSource.h>
     24 #include <media/stagefright/MediaDebug.h>
     25 #include <media/stagefright/MediaDefs.h>
     26 #include <media/stagefright/MediaErrors.h>
     27 #include <media/stagefright/MetaData.h>
     28 #include <camera/Camera.h>
     29 #include <camera/CameraParameters.h>
     30 #include <utils/String8.h>
     31 #include <cutils/properties.h>
     32 
     33 namespace android {
     34 
     35 struct CameraSourceListener : public CameraListener {
     36     CameraSourceListener(const sp<CameraSource> &source);
     37 
     38     virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
     39     virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr);
     40 
     41     virtual void postDataTimestamp(
     42             nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
     43 
     44 protected:
     45     virtual ~CameraSourceListener();
     46 
     47 private:
     48     wp<CameraSource> mSource;
     49 
     50     CameraSourceListener(const CameraSourceListener &);
     51     CameraSourceListener &operator=(const CameraSourceListener &);
     52 };
     53 
     54 CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
     55     : mSource(source) {
     56 }
     57 
     58 CameraSourceListener::~CameraSourceListener() {
     59 }
     60 
     61 void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
     62     LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
     63 }
     64 
     65 void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
     66     LOGV("postData(%d, ptr:%p, size:%d)",
     67          msgType, dataPtr->pointer(), dataPtr->size());
     68 }
     69 
     70 void CameraSourceListener::postDataTimestamp(
     71         nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
     72 
     73     sp<CameraSource> source = mSource.promote();
     74     if (source.get() != NULL) {
     75         source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
     76     }
     77 }
     78 
     79 static int32_t getColorFormat(const char* colorFormat) {
     80     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
     81        return OMX_COLOR_FormatYUV422SemiPlanar;
     82     }
     83 
     84     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
     85         return OMX_COLOR_FormatYUV420SemiPlanar;
     86     }
     87 
     88     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
     89         return OMX_COLOR_FormatYCbYCr;
     90     }
     91 
     92     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
     93        return OMX_COLOR_Format16bitRGB565;
     94     }
     95 
     96     LOGE("Uknown color format (%s), please add it to "
     97          "CameraSource::getColorFormat", colorFormat);
     98 
     99     CHECK_EQ(0, "Unknown color format");
    100 }
    101 
    102 // static
    103 CameraSource *CameraSource::Create() {
    104     sp<Camera> camera = Camera::connect(0);
    105 
    106     if (camera.get() == NULL) {
    107         return NULL;
    108     }
    109 
    110     return new CameraSource(camera);
    111 }
    112 
    113 // static
    114 CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
    115     if (camera.get() == NULL) {
    116         return NULL;
    117     }
    118 
    119     return new CameraSource(camera);
    120 }
    121 
    122 CameraSource::CameraSource(const sp<Camera> &camera)
    123     : mCamera(camera),
    124       mFirstFrameTimeUs(0),
    125       mLastFrameTimestampUs(0),
    126       mNumFramesReceived(0),
    127       mNumFramesEncoded(0),
    128       mNumFramesDropped(0),
    129       mNumGlitches(0),
    130       mGlitchDurationThresholdUs(200000),
    131       mCollectStats(false),
    132       mStarted(false) {
    133 
    134     int64_t token = IPCThreadState::self()->clearCallingIdentity();
    135     String8 s = mCamera->getParameters();
    136     IPCThreadState::self()->restoreCallingIdentity(token);
    137 
    138     printf("params: \"%s\"\n", s.string());
    139 
    140     int32_t width, height, stride, sliceHeight;
    141     CameraParameters params(s);
    142     params.getPreviewSize(&width, &height);
    143 
    144     // Calculate glitch duraton threshold based on frame rate
    145     int32_t frameRate = params.getPreviewFrameRate();
    146     int64_t glitchDurationUs = (1000000LL / frameRate);
    147     if (glitchDurationUs > mGlitchDurationThresholdUs) {
    148         mGlitchDurationThresholdUs = glitchDurationUs;
    149     }
    150 
    151     const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
    152     CHECK(colorFormatStr != NULL);
    153     int32_t colorFormat = getColorFormat(colorFormatStr);
    154 
    155     // XXX: query camera for the stride and slice height
    156     // when the capability becomes available.
    157     stride = width;
    158     sliceHeight = height;
    159 
    160     mMeta = new MetaData;
    161     mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
    162     mMeta->setInt32(kKeyColorFormat, colorFormat);
    163     mMeta->setInt32(kKeyWidth, width);
    164     mMeta->setInt32(kKeyHeight, height);
    165     mMeta->setInt32(kKeyStride, stride);
    166     mMeta->setInt32(kKeySliceHeight, sliceHeight);
    167 
    168 }
    169 
    170 CameraSource::~CameraSource() {
    171     if (mStarted) {
    172         stop();
    173     }
    174 }
    175 
    176 status_t CameraSource::start(MetaData *meta) {
    177     CHECK(!mStarted);
    178 
    179     char value[PROPERTY_VALUE_MAX];
    180     if (property_get("media.stagefright.record-stats", value, NULL)
    181         && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
    182         mCollectStats = true;
    183     }
    184 
    185     mStartTimeUs = 0;
    186     int64_t startTimeUs;
    187     if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
    188         mStartTimeUs = startTimeUs;
    189     }
    190 
    191     int64_t token = IPCThreadState::self()->clearCallingIdentity();
    192     mCamera->setListener(new CameraSourceListener(this));
    193     CHECK_EQ(OK, mCamera->startRecording());
    194     IPCThreadState::self()->restoreCallingIdentity(token);
    195 
    196     mStarted = true;
    197     return OK;
    198 }
    199 
    200 status_t CameraSource::stop() {
    201     LOGV("stop");
    202     Mutex::Autolock autoLock(mLock);
    203     mStarted = false;
    204     mFrameAvailableCondition.signal();
    205 
    206     int64_t token = IPCThreadState::self()->clearCallingIdentity();
    207     mCamera->setListener(NULL);
    208     mCamera->stopRecording();
    209     releaseQueuedFrames();
    210     while (!mFramesBeingEncoded.empty()) {
    211         LOGI("Waiting for outstanding frames being encoded: %d",
    212                 mFramesBeingEncoded.size());
    213         mFrameCompleteCondition.wait(mLock);
    214     }
    215     mCamera = NULL;
    216     IPCThreadState::self()->restoreCallingIdentity(token);
    217 
    218     if (mCollectStats) {
    219         LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
    220                 mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
    221                 mLastFrameTimestampUs - mFirstFrameTimeUs);
    222     }
    223 
    224     CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
    225     return OK;
    226 }
    227 
    228 void CameraSource::releaseQueuedFrames() {
    229     List<sp<IMemory> >::iterator it;
    230     while (!mFramesReceived.empty()) {
    231         it = mFramesReceived.begin();
    232         mCamera->releaseRecordingFrame(*it);
    233         mFramesReceived.erase(it);
    234         ++mNumFramesDropped;
    235     }
    236 }
    237 
    238 sp<MetaData> CameraSource::getFormat() {
    239     return mMeta;
    240 }
    241 
    242 void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
    243     int64_t token = IPCThreadState::self()->clearCallingIdentity();
    244     mCamera->releaseRecordingFrame(frame);
    245     IPCThreadState::self()->restoreCallingIdentity(token);
    246 }
    247 
    248 void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
    249     LOGV("signalBufferReturned: %p", buffer->data());
    250     Mutex::Autolock autoLock(mLock);
    251     for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
    252          it != mFramesBeingEncoded.end(); ++it) {
    253         if ((*it)->pointer() ==  buffer->data()) {
    254 
    255             releaseOneRecordingFrame((*it));
    256             mFramesBeingEncoded.erase(it);
    257             ++mNumFramesEncoded;
    258             buffer->setObserver(0);
    259             buffer->release();
    260             mFrameCompleteCondition.signal();
    261             return;
    262         }
    263     }
    264     CHECK_EQ(0, "signalBufferReturned: bogus buffer");
    265 }
    266 
    267 status_t CameraSource::read(
    268         MediaBuffer **buffer, const ReadOptions *options) {
    269     LOGV("read");
    270 
    271     *buffer = NULL;
    272 
    273     int64_t seekTimeUs;
    274     ReadOptions::SeekMode mode;
    275     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
    276         return ERROR_UNSUPPORTED;
    277     }
    278 
    279     sp<IMemory> frame;
    280     int64_t frameTime;
    281 
    282     {
    283         Mutex::Autolock autoLock(mLock);
    284         while (mStarted) {
    285             while(mFramesReceived.empty()) {
    286                 mFrameAvailableCondition.wait(mLock);
    287             }
    288 
    289             if (!mStarted) {
    290                 return OK;
    291             }
    292 
    293             frame = *mFramesReceived.begin();
    294             mFramesReceived.erase(mFramesReceived.begin());
    295 
    296             frameTime = *mFrameTimes.begin();
    297             mFrameTimes.erase(mFrameTimes.begin());
    298             int64_t skipTimeUs;
    299             if (!options || !options->getSkipFrame(&skipTimeUs)) {
    300                 skipTimeUs = frameTime;
    301             }
    302             if (skipTimeUs > frameTime) {
    303                 LOGV("skipTimeUs: %lld us > frameTime: %lld us",
    304                     skipTimeUs, frameTime);
    305                 releaseOneRecordingFrame(frame);
    306                 ++mNumFramesDropped;
    307                 // Safeguard against the abuse of the kSkipFrame_Option.
    308                 if (skipTimeUs - frameTime >= 1E6) {
    309                     LOGE("Frame skipping requested is way too long: %lld us",
    310                         skipTimeUs - frameTime);
    311                     return UNKNOWN_ERROR;
    312                 }
    313             } else {
    314                 mFramesBeingEncoded.push_back(frame);
    315                 *buffer = new MediaBuffer(frame->pointer(), frame->size());
    316                 (*buffer)->setObserver(this);
    317                 (*buffer)->add_ref();
    318                 (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
    319 
    320                 return OK;
    321             }
    322         }
    323     }
    324     return OK;
    325 }
    326 
    327 void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
    328         int32_t msgType, const sp<IMemory> &data) {
    329     LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
    330     Mutex::Autolock autoLock(mLock);
    331     if (!mStarted) {
    332         releaseOneRecordingFrame(data);
    333         ++mNumFramesReceived;
    334         ++mNumFramesDropped;
    335         return;
    336     }
    337 
    338     if (mNumFramesReceived > 0 &&
    339         timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
    340         if (mNumGlitches % 10 == 0) {  // Don't spam the log
    341             LOGW("Long delay detected in video recording");
    342         }
    343         ++mNumGlitches;
    344     }
    345 
    346     mLastFrameTimestampUs = timestampUs;
    347     if (mNumFramesReceived == 0) {
    348         mFirstFrameTimeUs = timestampUs;
    349         // Initial delay
    350         if (mStartTimeUs > 0) {
    351             if (timestampUs < mStartTimeUs) {
    352                 // Frame was captured before recording was started
    353                 // Drop it without updating the statistical data.
    354                 releaseOneRecordingFrame(data);
    355                 return;
    356             }
    357             mStartTimeUs = timestampUs - mStartTimeUs;
    358         }
    359     }
    360     ++mNumFramesReceived;
    361 
    362     mFramesReceived.push_back(data);
    363     int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    364     mFrameTimes.push_back(timeUs);
    365     LOGV("initial delay: %lld, current time stamp: %lld",
    366         mStartTimeUs, timeUs);
    367     mFrameAvailableCondition.signal();
    368 }
    369 
    370 }  // namespace android
    371