1 /* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 //#define LOG_NDEBUG 0 18 #define LOG_TAG "CameraSourceTimeLapse" 19 20 #include <binder/IPCThreadState.h> 21 #include <binder/MemoryBase.h> 22 #include <binder/MemoryHeapBase.h> 23 #include <media/stagefright/foundation/ADebug.h> 24 #include <media/stagefright/CameraSource.h> 25 #include <media/stagefright/CameraSourceTimeLapse.h> 26 #include <media/stagefright/MetaData.h> 27 #include <camera/Camera.h> 28 #include <camera/CameraParameters.h> 29 #include <utils/String8.h> 30 #include <utils/Vector.h> 31 32 namespace android { 33 34 // static 35 CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera( 36 const sp<ICamera> &camera, 37 const sp<ICameraRecordingProxy> &proxy, 38 int32_t cameraId, 39 const String16& clientName, 40 uid_t clientUid, 41 Size videoSize, 42 int32_t videoFrameRate, 43 const sp<IGraphicBufferProducer>& surface, 44 int64_t timeBetweenFrameCaptureUs, 45 bool storeMetaDataInVideoBuffers) { 46 47 CameraSourceTimeLapse *source = new 48 CameraSourceTimeLapse(camera, proxy, cameraId, 49 clientName, clientUid, 50 videoSize, videoFrameRate, surface, 51 timeBetweenFrameCaptureUs, 52 storeMetaDataInVideoBuffers); 53 54 if (source != NULL) { 55 if (source->initCheck() != OK) { 56 delete source; 57 return NULL; 58 } 59 } 60 return source; 61 } 62 63 CameraSourceTimeLapse::CameraSourceTimeLapse( 64 const sp<ICamera>& camera, 65 const sp<ICameraRecordingProxy>& proxy, 66 int32_t cameraId, 67 const String16& clientName, 68 uid_t clientUid, 69 Size videoSize, 70 int32_t videoFrameRate, 71 const sp<IGraphicBufferProducer>& surface, 72 int64_t timeBetweenFrameCaptureUs, 73 bool storeMetaDataInVideoBuffers) 74 : CameraSource(camera, proxy, cameraId, clientName, clientUid, 75 videoSize, videoFrameRate, surface, 76 storeMetaDataInVideoBuffers), 77 mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate), 78 mLastTimeLapseFrameRealTimestampUs(0), 79 mSkipCurrentFrame(false) { 80 81 mTimeBetweenFrameCaptureUs = timeBetweenFrameCaptureUs; 82 ALOGD("starting time lapse mode: %lld us", 83 mTimeBetweenFrameCaptureUs); 84 85 mVideoWidth = videoSize.width; 86 mVideoHeight = videoSize.height; 87 88 if (!trySettingVideoSize(videoSize.width, videoSize.height)) { 89 mInitCheck = NO_INIT; 90 } 91 92 // Initialize quick stop variables. 93 mQuickStop = false; 94 mForceRead = false; 95 mLastReadBufferCopy = NULL; 96 mStopWaitingForIdleCamera = false; 97 } 98 99 CameraSourceTimeLapse::~CameraSourceTimeLapse() { 100 if (mLastReadBufferCopy) { 101 mLastReadBufferCopy->release(); 102 mLastReadBufferCopy = NULL; 103 } 104 } 105 106 void CameraSourceTimeLapse::startQuickReadReturns() { 107 ALOGV("startQuickReadReturns"); 108 Mutex::Autolock autoLock(mQuickStopLock); 109 110 // Enable quick stop mode. 111 mQuickStop = true; 112 113 // Force dataCallbackTimestamp() coming from the video camera to 114 // not skip the next frame as we want read() to get a get a frame 115 // right away. 116 mForceRead = true; 117 } 118 119 bool CameraSourceTimeLapse::trySettingVideoSize( 120 int32_t width, int32_t height) { 121 122 ALOGV("trySettingVideoSize"); 123 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 124 String8 s = mCamera->getParameters(); 125 126 CameraParameters params(s); 127 Vector<Size> supportedSizes; 128 params.getSupportedVideoSizes(supportedSizes); 129 bool videoOutputSupported = false; 130 if (supportedSizes.size() == 0) { 131 params.getSupportedPreviewSizes(supportedSizes); 132 } else { 133 videoOutputSupported = true; 134 } 135 136 bool videoSizeSupported = false; 137 for (uint32_t i = 0; i < supportedSizes.size(); ++i) { 138 int32_t pictureWidth = supportedSizes[i].width; 139 int32_t pictureHeight = supportedSizes[i].height; 140 141 if ((pictureWidth == width) && (pictureHeight == height)) { 142 videoSizeSupported = true; 143 } 144 } 145 146 bool isSuccessful = false; 147 if (videoSizeSupported) { 148 ALOGV("Video size (%d, %d) is supported", width, height); 149 if (videoOutputSupported) { 150 params.setVideoSize(width, height); 151 } else { 152 params.setPreviewSize(width, height); 153 } 154 if (mCamera->setParameters(params.flatten()) == OK) { 155 isSuccessful = true; 156 } else { 157 ALOGE("Failed to set preview size to %dx%d", width, height); 158 isSuccessful = false; 159 } 160 } 161 162 IPCThreadState::self()->restoreCallingIdentity(token); 163 return isSuccessful; 164 } 165 166 void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) { 167 ALOGV("signalBufferReturned"); 168 Mutex::Autolock autoLock(mQuickStopLock); 169 if (mQuickStop && (buffer == mLastReadBufferCopy)) { 170 buffer->setObserver(NULL); 171 buffer->release(); 172 } else { 173 return CameraSource::signalBufferReturned(buffer); 174 } 175 } 176 177 void createMediaBufferCopy( 178 const MediaBuffer& sourceBuffer, 179 int64_t frameTime, 180 MediaBuffer **newBuffer) { 181 182 ALOGV("createMediaBufferCopy"); 183 size_t sourceSize = sourceBuffer.size(); 184 void* sourcePointer = sourceBuffer.data(); 185 186 (*newBuffer) = new MediaBuffer(sourceSize); 187 memcpy((*newBuffer)->data(), sourcePointer, sourceSize); 188 189 (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime); 190 } 191 192 void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) { 193 ALOGV("fillLastReadBufferCopy"); 194 int64_t frameTime; 195 CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime)); 196 createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy); 197 mLastReadBufferCopy->add_ref(); 198 mLastReadBufferCopy->setObserver(this); 199 } 200 201 status_t CameraSourceTimeLapse::read( 202 MediaBuffer **buffer, const ReadOptions *options) { 203 ALOGV("read"); 204 if (mLastReadBufferCopy == NULL) { 205 mLastReadStatus = CameraSource::read(buffer, options); 206 207 // mQuickStop may have turned to true while read was blocked. 208 // Make a copy of the buffer in that case. 209 Mutex::Autolock autoLock(mQuickStopLock); 210 if (mQuickStop && *buffer) { 211 fillLastReadBufferCopy(**buffer); 212 } 213 return mLastReadStatus; 214 } else { 215 (*buffer) = mLastReadBufferCopy; 216 (*buffer)->add_ref(); 217 return mLastReadStatus; 218 } 219 } 220 221 sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy( 222 const sp<IMemory> &source_data) { 223 224 ALOGV("createIMemoryCopy"); 225 size_t source_size = source_data->size(); 226 void* source_pointer = source_data->pointer(); 227 228 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size); 229 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size); 230 memcpy(newMemory->pointer(), source_pointer, source_size); 231 return newMemory; 232 } 233 234 bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { 235 ALOGV("skipCurrentFrame"); 236 if (mSkipCurrentFrame) { 237 mSkipCurrentFrame = false; 238 return true; 239 } else { 240 return false; 241 } 242 } 243 244 bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) { 245 ALOGV("skipFrameAndModifyTimeStamp"); 246 if (mLastTimeLapseFrameRealTimestampUs == 0) { 247 // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs 248 // to current time (timestampUs) and save frame data. 249 ALOGV("dataCallbackTimestamp timelapse: initial frame"); 250 251 mLastTimeLapseFrameRealTimestampUs = *timestampUs; 252 return false; 253 } 254 255 { 256 Mutex::Autolock autoLock(mQuickStopLock); 257 258 // mForceRead may be set to true by startQuickReadReturns(). In that 259 // case don't skip this frame. 260 if (mForceRead) { 261 ALOGV("dataCallbackTimestamp timelapse: forced read"); 262 mForceRead = false; 263 *timestampUs = 264 mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; 265 266 // Really make sure that this video recording frame will not be dropped. 267 if (*timestampUs < mStartTimeUs) { 268 ALOGI("set timestampUs to start time stamp %lld us", mStartTimeUs); 269 *timestampUs = mStartTimeUs; 270 } 271 return false; 272 } 273 } 274 275 // Workaround to bypass the first 2 input frames for skipping. 276 // The first 2 output frames from the encoder are: decoder specific info and 277 // the compressed video frame data for the first input video frame. 278 if (mNumFramesEncoded >= 1 && *timestampUs < 279 (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenFrameCaptureUs)) { 280 // Skip all frames from last encoded frame until 281 // sufficient time (mTimeBetweenFrameCaptureUs) has passed. 282 // Tell the camera to release its recording frame and return. 283 ALOGV("dataCallbackTimestamp timelapse: skipping intermediate frame"); 284 return true; 285 } else { 286 // Desired frame has arrived after mTimeBetweenFrameCaptureUs time: 287 // - Reset mLastTimeLapseFrameRealTimestampUs to current time. 288 // - Artificially modify timestampUs to be one frame time (1/framerate) ahead 289 // of the last encoded frame's time stamp. 290 ALOGV("dataCallbackTimestamp timelapse: got timelapse frame"); 291 292 mLastTimeLapseFrameRealTimestampUs = *timestampUs; 293 *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; 294 return false; 295 } 296 return false; 297 } 298 299 void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, 300 const sp<IMemory> &data) { 301 ALOGV("dataCallbackTimestamp"); 302 mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs); 303 CameraSource::dataCallbackTimestamp(timestampUs, msgType, data); 304 } 305 306 } // namespace android 307