Home | History | Annotate | Download | only in libstagefright
      1 /*
      2  * Copyright (C) 2010 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include <inttypes.h>
     18 
     19 //#define LOG_NDEBUG 0
     20 #define LOG_TAG "CameraSourceTimeLapse"
     21 
     22 #include <media/hardware/HardwareAPI.h>
     23 #include <binder/IPCThreadState.h>
     24 #include <binder/MemoryBase.h>
     25 #include <binder/MemoryHeapBase.h>
     26 #include <media/stagefright/foundation/ADebug.h>
     27 #include <media/stagefright/CameraSource.h>
     28 #include <media/stagefright/CameraSourceTimeLapse.h>
     29 #include <media/stagefright/MetaData.h>
     30 #include <camera/Camera.h>
     31 #include <camera/CameraParameters.h>
     32 #include <utils/String8.h>
     33 #include <utils/Vector.h>
     34 
     35 namespace android {
     36 
     37 // static
     38 CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
     39         const sp<hardware::ICamera> &camera,
     40         const sp<ICameraRecordingProxy> &proxy,
     41         int32_t cameraId,
     42         const String16& clientName,
     43         uid_t clientUid,
     44         pid_t clientPid,
     45         Size videoSize,
     46         int32_t videoFrameRate,
     47         const sp<IGraphicBufferProducer>& surface,
     48         int64_t timeBetweenFrameCaptureUs,
     49         bool storeMetaDataInVideoBuffers) {
     50 
     51     CameraSourceTimeLapse *source = new
     52             CameraSourceTimeLapse(camera, proxy, cameraId,
     53                 clientName, clientUid, clientPid,
     54                 videoSize, videoFrameRate, surface,
     55                 timeBetweenFrameCaptureUs,
     56                 storeMetaDataInVideoBuffers);
     57 
     58     if (source != NULL) {
     59         if (source->initCheck() != OK) {
     60             delete source;
     61             return NULL;
     62         }
     63     }
     64     return source;
     65 }
     66 
     67 CameraSourceTimeLapse::CameraSourceTimeLapse(
     68         const sp<hardware::ICamera>& camera,
     69         const sp<ICameraRecordingProxy>& proxy,
     70         int32_t cameraId,
     71         const String16& clientName,
     72         uid_t clientUid,
     73         pid_t clientPid,
     74         Size videoSize,
     75         int32_t videoFrameRate,
     76         const sp<IGraphicBufferProducer>& surface,
     77         int64_t timeBetweenFrameCaptureUs,
     78         bool storeMetaDataInVideoBuffers)
     79       : CameraSource(camera, proxy, cameraId, clientName, clientUid, clientPid,
     80                 videoSize, videoFrameRate, surface,
     81                 storeMetaDataInVideoBuffers),
     82       mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
     83       mLastTimeLapseFrameRealTimestampUs(0),
     84       mSkipCurrentFrame(false) {
     85 
     86     mTimeBetweenFrameCaptureUs = timeBetweenFrameCaptureUs;
     87     ALOGD("starting time lapse mode: %" PRId64 " us",
     88         mTimeBetweenFrameCaptureUs);
     89 
     90     mVideoWidth = videoSize.width;
     91     mVideoHeight = videoSize.height;
     92 
     93     if (OK == mInitCheck && !trySettingVideoSize(videoSize.width, videoSize.height)) {
     94         releaseCamera();
     95         mInitCheck = NO_INIT;
     96     }
     97 
     98     // Initialize quick stop variables.
     99     mQuickStop = false;
    100     mForceRead = false;
    101     mLastReadBufferCopy = NULL;
    102     mStopWaitingForIdleCamera = false;
    103 }
    104 
    105 CameraSourceTimeLapse::~CameraSourceTimeLapse() {
    106     if (mLastReadBufferCopy) {
    107         mLastReadBufferCopy->release();
    108         mLastReadBufferCopy = NULL;
    109     }
    110 }
    111 
    112 void CameraSourceTimeLapse::startQuickReadReturns() {
    113     ALOGV("startQuickReadReturns");
    114     Mutex::Autolock autoLock(mQuickStopLock);
    115 
    116     // Enable quick stop mode.
    117     mQuickStop = true;
    118 
    119     // Force dataCallbackTimestamp() coming from the video camera to
    120     // not skip the next frame as we want read() to get a get a frame
    121     // right away.
    122     mForceRead = true;
    123 }
    124 
    125 bool CameraSourceTimeLapse::trySettingVideoSize(
    126         int32_t width, int32_t height) {
    127 
    128     ALOGV("trySettingVideoSize");
    129     int64_t token = IPCThreadState::self()->clearCallingIdentity();
    130     String8 s = mCamera->getParameters();
    131 
    132     CameraParameters params(s);
    133     Vector<Size> supportedSizes;
    134     params.getSupportedVideoSizes(supportedSizes);
    135     bool videoOutputSupported = false;
    136     if (supportedSizes.size() == 0) {
    137         params.getSupportedPreviewSizes(supportedSizes);
    138     } else {
    139         videoOutputSupported = true;
    140     }
    141 
    142     bool videoSizeSupported = false;
    143     for (size_t i = 0; i < supportedSizes.size(); ++i) {
    144         int32_t pictureWidth = supportedSizes[i].width;
    145         int32_t pictureHeight = supportedSizes[i].height;
    146 
    147         if ((pictureWidth == width) && (pictureHeight == height)) {
    148             videoSizeSupported = true;
    149         }
    150     }
    151 
    152     bool isSuccessful = false;
    153     if (videoSizeSupported) {
    154         ALOGV("Video size (%d, %d) is supported", width, height);
    155         if (videoOutputSupported) {
    156             params.setVideoSize(width, height);
    157         } else {
    158             params.setPreviewSize(width, height);
    159         }
    160         if (mCamera->setParameters(params.flatten()) == OK) {
    161             isSuccessful = true;
    162         } else {
    163             ALOGE("Failed to set preview size to %dx%d", width, height);
    164             isSuccessful = false;
    165         }
    166     }
    167 
    168     IPCThreadState::self()->restoreCallingIdentity(token);
    169     return isSuccessful;
    170 }
    171 
    172 void CameraSourceTimeLapse::signalBufferReturned(MediaBufferBase* buffer) {
    173     ALOGV("signalBufferReturned");
    174     Mutex::Autolock autoLock(mQuickStopLock);
    175     if (mQuickStop && (buffer == mLastReadBufferCopy)) {
    176         if (metaDataStoredInVideoBuffers() == kMetadataBufferTypeNativeHandleSource) {
    177             native_handle_t* handle = (
    178                 (VideoNativeHandleMetadata*)(mLastReadBufferCopy->data()))->pHandle;
    179             native_handle_close(handle);
    180             native_handle_delete(handle);
    181         }
    182         buffer->setObserver(NULL);
    183         buffer->release();
    184         mLastReadBufferCopy = NULL;
    185         mForceRead = true;
    186     } else {
    187         return CameraSource::signalBufferReturned(buffer);
    188     }
    189 }
    190 
    191 void createMediaBufferCopy(
    192         const MediaBufferBase& sourceBuffer,
    193         int64_t frameTime,
    194         MediaBufferBase **newBuffer,
    195         int32_t videoBufferMode) {
    196 
    197     ALOGV("createMediaBufferCopy");
    198     size_t sourceSize = sourceBuffer.size();
    199     void* sourcePointer = sourceBuffer.data();
    200 
    201     (*newBuffer) = new MediaBuffer(sourceSize);
    202     memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
    203 
    204     (*newBuffer)->meta_data().setInt64(kKeyTime, frameTime);
    205 
    206     if (videoBufferMode == kMetadataBufferTypeNativeHandleSource) {
    207         ((VideoNativeHandleMetadata*)((*newBuffer)->data()))->pHandle =
    208             native_handle_clone(
    209                 ((VideoNativeHandleMetadata*)(sourceBuffer.data()))->pHandle);
    210     }
    211 }
    212 
    213 void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBufferBase& sourceBuffer) {
    214     ALOGV("fillLastReadBufferCopy");
    215     int64_t frameTime;
    216     CHECK(sourceBuffer.meta_data().findInt64(kKeyTime, &frameTime));
    217     createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy,
    218         metaDataStoredInVideoBuffers());
    219     mLastReadBufferCopy->add_ref();
    220     mLastReadBufferCopy->setObserver(this);
    221 }
    222 
    223 status_t CameraSourceTimeLapse::read(
    224         MediaBufferBase **buffer, const ReadOptions *options) {
    225     ALOGV("read");
    226     if (mLastReadBufferCopy == NULL) {
    227         mLastReadStatus = CameraSource::read(buffer, options);
    228 
    229         // mQuickStop may have turned to true while read was blocked.
    230         // Make a copy of the buffer in that case.
    231         Mutex::Autolock autoLock(mQuickStopLock);
    232         if (mQuickStop && *buffer) {
    233             fillLastReadBufferCopy(**buffer);
    234         }
    235         return mLastReadStatus;
    236     } else {
    237         (*buffer) = mLastReadBufferCopy;
    238         (*buffer)->add_ref();
    239         return mLastReadStatus;
    240     }
    241 }
    242 
    243 sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(
    244         const sp<IMemory> &source_data) {
    245 
    246     ALOGV("createIMemoryCopy");
    247     size_t source_size = source_data->size();
    248     void* source_pointer = source_data->pointer();
    249 
    250     sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
    251     sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
    252     memcpy(newMemory->pointer(), source_pointer, source_size);
    253     return newMemory;
    254 }
    255 
    256 bool CameraSourceTimeLapse::skipCurrentFrame(int64_t /* timestampUs */) {
    257     ALOGV("skipCurrentFrame");
    258     if (mSkipCurrentFrame) {
    259         mSkipCurrentFrame = false;
    260         return true;
    261     } else {
    262         return false;
    263     }
    264 }
    265 
    266 bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
    267     ALOGV("skipFrameAndModifyTimeStamp");
    268     if (mLastTimeLapseFrameRealTimestampUs == 0) {
    269         // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
    270         // to current time (timestampUs) and save frame data.
    271         ALOGV("dataCallbackTimestamp timelapse: initial frame");
    272 
    273         mLastTimeLapseFrameRealTimestampUs = *timestampUs;
    274         return false;
    275     }
    276 
    277     {
    278         Mutex::Autolock autoLock(mQuickStopLock);
    279 
    280         // mForceRead may be set to true by startQuickReadReturns(). In that
    281         // case don't skip this frame.
    282         if (mForceRead) {
    283             ALOGV("dataCallbackTimestamp timelapse: forced read");
    284             mForceRead = false;
    285             *timestampUs =
    286                 mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
    287 
    288             // Really make sure that this video recording frame will not be dropped.
    289             if (*timestampUs < mStartTimeUs) {
    290                 ALOGI("set timestampUs to start time stamp %" PRId64 " us", mStartTimeUs);
    291                 *timestampUs = mStartTimeUs;
    292             }
    293             return false;
    294         }
    295     }
    296 
    297     // Workaround to bypass the first 2 input frames for skipping.
    298     // The first 2 output frames from the encoder are: decoder specific info and
    299     // the compressed video frame data for the first input video frame.
    300     if (mNumFramesEncoded >= 1 && *timestampUs <
    301         (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenFrameCaptureUs)) {
    302         // Skip all frames from last encoded frame until
    303         // sufficient time (mTimeBetweenFrameCaptureUs) has passed.
    304         // Tell the camera to release its recording frame and return.
    305         ALOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
    306         return true;
    307     } else {
    308         // Desired frame has arrived after mTimeBetweenFrameCaptureUs time:
    309         // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
    310         // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
    311         // of the last encoded frame's time stamp.
    312         ALOGV("dataCallbackTimestamp timelapse: got timelapse frame");
    313 
    314         mLastTimeLapseFrameRealTimestampUs = *timestampUs;
    315         *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
    316         return false;
    317     }
    318     return false;
    319 }
    320 
    321 void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
    322             const sp<IMemory> &data) {
    323     ALOGV("dataCallbackTimestamp");
    324     mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
    325     CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
    326 }
    327 
    328 void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
    329             native_handle_t* handle) {
    330     ALOGV("recordingFrameHandleCallbackTimestamp");
    331     mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
    332     CameraSource::recordingFrameHandleCallbackTimestamp(timestampUs, handle);
    333 }
    334 
    335 void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestampBatch(
    336         const std::vector<int64_t>& timestampsUs,
    337         const std::vector<native_handle_t*>& handles) {
    338     ALOGV("recordingFrameHandleCallbackTimestampBatch");
    339     int n = timestampsUs.size();
    340     for (int i = 0; i < n; i++) {
    341         // Don't do batching for CameraSourceTimeLapse for now
    342         recordingFrameHandleCallbackTimestamp(timestampsUs[i], handles[i]);
    343     }
    344 }
    345 
    346 void CameraSourceTimeLapse::processBufferQueueFrame(BufferItem& buffer) {
    347     ALOGV("processBufferQueueFrame");
    348     int64_t timestampUs = buffer.mTimestamp / 1000;
    349     mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
    350     buffer.mTimestamp = timestampUs * 1000;
    351     CameraSource::processBufferQueueFrame(buffer);
    352 }
    353 
    354 }  // namespace android
    355