1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 18 package android.filterpacks.videosink; 19 20 import android.filterfw.core.Filter; 21 import android.filterfw.core.FilterContext; 22 import android.filterfw.core.Frame; 23 import android.filterfw.core.FrameFormat; 24 import android.filterfw.core.GenerateFieldPort; 25 import android.filterfw.core.GLFrame; 26 import android.filterfw.core.MutableFrameFormat; 27 import android.filterfw.core.ShaderProgram; 28 import android.filterfw.format.ImageFormat; 29 import android.filterfw.geometry.Point; 30 import android.filterfw.geometry.Quad; 31 import android.media.MediaRecorder; 32 import android.media.CamcorderProfile; 33 import android.filterfw.core.GLEnvironment; 34 35 import java.io.IOException; 36 import java.io.FileDescriptor; 37 38 import android.util.Log; 39 40 /** @hide */ 41 public class MediaEncoderFilter extends Filter { 42 43 /** User-visible parameters */ 44 45 /** Recording state. When set to false, recording will stop, or will not 46 * start if not yet running the graph. Instead, frames are simply ignored. 47 * When switched back to true, recording will restart. This allows a single 48 * graph to both provide preview and to record video. If this is false, 49 * recording settings can be updated while the graph is running. 50 */ 51 @GenerateFieldPort(name = "recording", hasDefault = true) 52 private boolean mRecording = true; 53 54 /** Filename to save the output. */ 55 @GenerateFieldPort(name = "outputFile", hasDefault = true) 56 private String mOutputFile = new String("/sdcard/MediaEncoderOut.mp4"); 57 58 /** File Descriptor to save the output. */ 59 @GenerateFieldPort(name = "outputFileDescriptor", hasDefault = true) 60 private FileDescriptor mFd = null; 61 62 /** Input audio source. If not set, no audio will be recorded. 63 * Select from the values in MediaRecorder.AudioSource 64 */ 65 @GenerateFieldPort(name = "audioSource", hasDefault = true) 66 private int mAudioSource = NO_AUDIO_SOURCE; 67 68 /** Media recorder info listener, which needs to implement 69 * MediaRecorder.OnInfoListener. Set this to receive notifications about 70 * recording events. 71 */ 72 @GenerateFieldPort(name = "infoListener", hasDefault = true) 73 private MediaRecorder.OnInfoListener mInfoListener = null; 74 75 /** Media recorder error listener, which needs to implement 76 * MediaRecorder.OnErrorListener. Set this to receive notifications about 77 * recording errors. 78 */ 79 @GenerateFieldPort(name = "errorListener", hasDefault = true) 80 private MediaRecorder.OnErrorListener mErrorListener = null; 81 82 /** Media recording done callback, which needs to implement OnRecordingDoneListener. 83 * Set this to finalize media upon completion of media recording. 84 */ 85 @GenerateFieldPort(name = "recordingDoneListener", hasDefault = true) 86 private OnRecordingDoneListener mRecordingDoneListener = null; 87 88 /** Orientation hint. Used for indicating proper video playback orientation. 89 * Units are in degrees of clockwise rotation, valid values are (0, 90, 180, 90 * 270). 91 */ 92 @GenerateFieldPort(name = "orientationHint", hasDefault = true) 93 private int mOrientationHint = 0; 94 95 /** Camcorder profile to use. Select from the profiles available in 96 * android.media.CamcorderProfile. If this field is set, it overrides 97 * settings to width, height, framerate, outputFormat, and videoEncoder. 98 */ 99 @GenerateFieldPort(name = "recordingProfile", hasDefault = true) 100 private CamcorderProfile mProfile = null; 101 102 /** Frame width to be encoded, defaults to 320. 103 * Actual received frame size has to match this */ 104 @GenerateFieldPort(name = "width", hasDefault = true) 105 private int mWidth = 0; 106 107 /** Frame height to to be encoded, defaults to 240. 108 * Actual received frame size has to match */ 109 @GenerateFieldPort(name = "height", hasDefault = true) 110 private int mHeight = 0; 111 112 /** Stream framerate to encode the frames at. 113 * By default, frames are encoded at 30 FPS*/ 114 @GenerateFieldPort(name = "framerate", hasDefault = true) 115 private int mFps = 30; 116 117 /** The output format to encode the frames in. 118 * Choose an output format from the options in 119 * android.media.MediaRecorder.OutputFormat */ 120 @GenerateFieldPort(name = "outputFormat", hasDefault = true) 121 private int mOutputFormat = MediaRecorder.OutputFormat.MPEG_4; 122 123 /** The videoencoder to encode the frames with. 124 * Choose a videoencoder from the options in 125 * android.media.MediaRecorder.VideoEncoder */ 126 @GenerateFieldPort(name = "videoEncoder", hasDefault = true) 127 private int mVideoEncoder = MediaRecorder.VideoEncoder.H264; 128 129 /** The input region to read from the frame. The corners of this quad are 130 * mapped to the output rectangle. The input frame ranges from (0,0)-(1,1), 131 * top-left to bottom-right. The corners of the quad are specified in the 132 * order bottom-left, bottom-right, top-left, top-right. 133 */ 134 @GenerateFieldPort(name = "inputRegion", hasDefault = true) 135 private Quad mSourceRegion; 136 137 /** The maximum filesize (in bytes) of the recording session. 138 * By default, it will be 0 and will be passed on to the MediaRecorder. 139 * If the limit is zero or negative, MediaRecorder will disable the limit*/ 140 @GenerateFieldPort(name = "maxFileSize", hasDefault = true) 141 private long mMaxFileSize = 0; 142 143 /** The maximum duration (in milliseconds) of the recording session. 144 * By default, it will be 0 and will be passed on to the MediaRecorder. 145 * If the limit is zero or negative, MediaRecorder will record indefinitely*/ 146 @GenerateFieldPort(name = "maxDurationMs", hasDefault = true) 147 private int mMaxDurationMs = 0; 148 149 /** TimeLapse Interval between frames. 150 * By default, it will be 0. Whether the recording is timelapsed 151 * is inferred based on its value being greater than 0 */ 152 @GenerateFieldPort(name = "timelapseRecordingIntervalUs", hasDefault = true) 153 private long mTimeBetweenTimeLapseFrameCaptureUs = 0; 154 155 // End of user visible parameters 156 157 private static final int NO_AUDIO_SOURCE = -1; 158 159 private int mSurfaceId; 160 private ShaderProgram mProgram; 161 private GLFrame mScreen; 162 163 private boolean mRecordingActive = false; 164 private long mTimestampNs = 0; 165 private long mLastTimeLapseFrameRealTimestampNs = 0; 166 private int mNumFramesEncoded = 0; 167 // Used to indicate whether recording is timelapsed. 168 // Inferred based on (mTimeBetweenTimeLapseFrameCaptureUs > 0) 169 private boolean mCaptureTimeLapse = false; 170 171 private boolean mLogVerbose; 172 private static final String TAG = "MediaEncoderFilter"; 173 174 // Our hook to the encoder 175 private MediaRecorder mMediaRecorder; 176 177 /** Callback to be called when media recording completes. */ 178 179 public interface OnRecordingDoneListener { 180 public void onRecordingDone(); 181 } 182 183 public MediaEncoderFilter(String name) { 184 super(name); 185 Point bl = new Point(0, 0); 186 Point br = new Point(1, 0); 187 Point tl = new Point(0, 1); 188 Point tr = new Point(1, 1); 189 mSourceRegion = new Quad(bl, br, tl, tr); 190 mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE); 191 } 192 193 @Override 194 public void setupPorts() { 195 // Add input port- will accept RGBA GLFrames 196 addMaskedInputPort("videoframe", ImageFormat.create(ImageFormat.COLORSPACE_RGBA, 197 FrameFormat.TARGET_GPU)); 198 } 199 200 @Override 201 public void fieldPortValueUpdated(String name, FilterContext context) { 202 if (mLogVerbose) Log.v(TAG, "Port " + name + " has been updated"); 203 if (name.equals("recording")) return; 204 if (name.equals("inputRegion")) { 205 if (isOpen()) updateSourceRegion(); 206 return; 207 } 208 // TODO: Not sure if it is possible to update the maxFileSize 209 // when the recording is going on. For now, not doing that. 210 if (isOpen() && mRecordingActive) { 211 throw new RuntimeException("Cannot change recording parameters" 212 + " when the filter is recording!"); 213 } 214 } 215 216 private void updateSourceRegion() { 217 // Flip source quad to map to OpenGL origin 218 Quad flippedRegion = new Quad(); 219 flippedRegion.p0 = mSourceRegion.p2; 220 flippedRegion.p1 = mSourceRegion.p3; 221 flippedRegion.p2 = mSourceRegion.p0; 222 flippedRegion.p3 = mSourceRegion.p1; 223 mProgram.setSourceRegion(flippedRegion); 224 } 225 226 // update the MediaRecorderParams based on the variables. 227 // These have to be in certain order as per the MediaRecorder 228 // documentation 229 private void updateMediaRecorderParams() { 230 mCaptureTimeLapse = mTimeBetweenTimeLapseFrameCaptureUs > 0; 231 final int GRALLOC_BUFFER = 2; 232 mMediaRecorder.setVideoSource(GRALLOC_BUFFER); 233 if (!mCaptureTimeLapse && (mAudioSource != NO_AUDIO_SOURCE)) { 234 mMediaRecorder.setAudioSource(mAudioSource); 235 } 236 if (mProfile != null) { 237 mMediaRecorder.setProfile(mProfile); 238 mFps = mProfile.videoFrameRate; 239 // If width and height are set larger than 0, then those 240 // overwrite the ones in the profile. 241 if (mWidth > 0 && mHeight > 0) { 242 mMediaRecorder.setVideoSize(mWidth, mHeight); 243 } 244 } else { 245 mMediaRecorder.setOutputFormat(mOutputFormat); 246 mMediaRecorder.setVideoEncoder(mVideoEncoder); 247 mMediaRecorder.setVideoSize(mWidth, mHeight); 248 mMediaRecorder.setVideoFrameRate(mFps); 249 } 250 mMediaRecorder.setOrientationHint(mOrientationHint); 251 mMediaRecorder.setOnInfoListener(mInfoListener); 252 mMediaRecorder.setOnErrorListener(mErrorListener); 253 if (mFd != null) { 254 mMediaRecorder.setOutputFile(mFd); 255 } else { 256 mMediaRecorder.setOutputFile(mOutputFile); 257 } 258 try { 259 mMediaRecorder.setMaxFileSize(mMaxFileSize); 260 } catch (Exception e) { 261 // Following the logic in VideoCamera.java (in Camera app) 262 // We are going to ignore failure of setMaxFileSize here, as 263 // a) The composer selected may simply not support it, or 264 // b) The underlying media framework may not handle 64-bit range 265 // on the size restriction. 266 Log.w(TAG, "Setting maxFileSize on MediaRecorder unsuccessful! " 267 + e.getMessage()); 268 } 269 mMediaRecorder.setMaxDuration(mMaxDurationMs); 270 } 271 272 @Override 273 public void prepare(FilterContext context) { 274 if (mLogVerbose) Log.v(TAG, "Preparing"); 275 276 mProgram = ShaderProgram.createIdentity(context); 277 278 mRecordingActive = false; 279 } 280 281 @Override 282 public void open(FilterContext context) { 283 if (mLogVerbose) Log.v(TAG, "Opening"); 284 updateSourceRegion(); 285 if (mRecording) startRecording(context); 286 } 287 288 private void startRecording(FilterContext context) { 289 if (mLogVerbose) Log.v(TAG, "Starting recording"); 290 291 // Create a frame representing the screen 292 MutableFrameFormat screenFormat = new MutableFrameFormat( 293 FrameFormat.TYPE_BYTE, FrameFormat.TARGET_GPU); 294 screenFormat.setBytesPerSample(4); 295 296 int width, height; 297 boolean widthHeightSpecified = mWidth > 0 && mHeight > 0; 298 // If width and height are specified, then use those instead 299 // of that in the profile. 300 if (mProfile != null && !widthHeightSpecified) { 301 width = mProfile.videoFrameWidth; 302 height = mProfile.videoFrameHeight; 303 } else { 304 width = mWidth; 305 height = mHeight; 306 } 307 screenFormat.setDimensions(width, height); 308 mScreen = (GLFrame)context.getFrameManager().newBoundFrame( 309 screenFormat, GLFrame.EXISTING_FBO_BINDING, 0); 310 311 // Initialize the media recorder 312 313 mMediaRecorder = new MediaRecorder(); 314 updateMediaRecorderParams(); 315 316 try { 317 mMediaRecorder.prepare(); 318 } catch (IllegalStateException e) { 319 throw e; 320 } catch (IOException e) { 321 throw new RuntimeException("IOException in" 322 + "MediaRecorder.prepare()!", e); 323 } catch (Exception e) { 324 throw new RuntimeException("Unknown Exception in" 325 + "MediaRecorder.prepare()!", e); 326 } 327 // Make sure start() is called before trying to 328 // register the surface. The native window handle needed to create 329 // the surface is initiated in start() 330 mMediaRecorder.start(); 331 if (mLogVerbose) Log.v(TAG, "Open: registering surface from Mediarecorder"); 332 mSurfaceId = context.getGLEnvironment(). 333 registerSurfaceFromMediaRecorder(mMediaRecorder); 334 mNumFramesEncoded = 0; 335 mRecordingActive = true; 336 } 337 338 public boolean skipFrameAndModifyTimestamp(long timestampNs) { 339 // first frame- encode. Don't skip 340 if (mNumFramesEncoded == 0) { 341 mLastTimeLapseFrameRealTimestampNs = timestampNs; 342 mTimestampNs = timestampNs; 343 if (mLogVerbose) Log.v(TAG, "timelapse: FIRST frame, last real t= " 344 + mLastTimeLapseFrameRealTimestampNs + 345 ", setting t = " + mTimestampNs ); 346 return false; 347 } 348 349 // Workaround to bypass the first 2 input frames for skipping. 350 // The first 2 output frames from the encoder are: decoder specific info and 351 // the compressed video frame data for the first input video frame. 352 if (mNumFramesEncoded >= 2 && timestampNs < 353 (mLastTimeLapseFrameRealTimestampNs + 1000L * mTimeBetweenTimeLapseFrameCaptureUs)) { 354 // If 2 frames have been already encoded, 355 // Skip all frames from last encoded frame until 356 // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed. 357 if (mLogVerbose) Log.v(TAG, "timelapse: skipping intermediate frame"); 358 return true; 359 } else { 360 // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time: 361 // - Reset mLastTimeLapseFrameRealTimestampNs to current time. 362 // - Artificially modify timestampNs to be one frame time (1/framerate) ahead 363 // of the last encoded frame's time stamp. 364 if (mLogVerbose) Log.v(TAG, "timelapse: encoding frame, Timestamp t = " + timestampNs + 365 ", last real t= " + mLastTimeLapseFrameRealTimestampNs + 366 ", interval = " + mTimeBetweenTimeLapseFrameCaptureUs); 367 mLastTimeLapseFrameRealTimestampNs = timestampNs; 368 mTimestampNs = mTimestampNs + (1000000000L / (long)mFps); 369 if (mLogVerbose) Log.v(TAG, "timelapse: encoding frame, setting t = " 370 + mTimestampNs + ", delta t = " + (1000000000L / (long)mFps) + 371 ", fps = " + mFps ); 372 return false; 373 } 374 } 375 376 @Override 377 public void process(FilterContext context) { 378 GLEnvironment glEnv = context.getGLEnvironment(); 379 // Get input frame 380 Frame input = pullInput("videoframe"); 381 382 // Check if recording needs to start 383 if (!mRecordingActive && mRecording) { 384 startRecording(context); 385 } 386 // Check if recording needs to stop 387 if (mRecordingActive && !mRecording) { 388 stopRecording(context); 389 } 390 391 if (!mRecordingActive) return; 392 393 if (mCaptureTimeLapse) { 394 if (skipFrameAndModifyTimestamp(input.getTimestamp())) { 395 return; 396 } 397 } else { 398 mTimestampNs = input.getTimestamp(); 399 } 400 401 // Activate our surface 402 glEnv.activateSurfaceWithId(mSurfaceId); 403 404 // Process 405 mProgram.process(input, mScreen); 406 407 // Set timestamp from input 408 glEnv.setSurfaceTimestamp(mTimestampNs); 409 // And swap buffers 410 glEnv.swapBuffers(); 411 mNumFramesEncoded++; 412 } 413 414 private void stopRecording(FilterContext context) { 415 if (mLogVerbose) Log.v(TAG, "Stopping recording"); 416 417 mRecordingActive = false; 418 mNumFramesEncoded = 0; 419 GLEnvironment glEnv = context.getGLEnvironment(); 420 // The following call will switch the surface_id to 0 421 // (thus, calling eglMakeCurrent on surface with id 0) and 422 // then call eglDestroy on the surface. Hence, this will 423 // call disconnect the SurfaceMediaSource, which is needed to 424 // be called before calling Stop on the mediarecorder 425 if (mLogVerbose) Log.v(TAG, String.format("Unregistering surface %d", mSurfaceId)); 426 glEnv.unregisterSurfaceId(mSurfaceId); 427 try { 428 mMediaRecorder.stop(); 429 } catch (RuntimeException e) { 430 throw new MediaRecorderStopException("MediaRecorder.stop() failed!", e); 431 } 432 mMediaRecorder.release(); 433 mMediaRecorder = null; 434 435 mScreen.release(); 436 mScreen = null; 437 438 // Use an EffectsRecorder callback to forward a media finalization 439 // call so that it creates the video thumbnail, and whatever else needs 440 // to be done to finalize media. 441 if (mRecordingDoneListener != null) { 442 mRecordingDoneListener.onRecordingDone(); 443 } 444 } 445 446 @Override 447 public void close(FilterContext context) { 448 if (mLogVerbose) Log.v(TAG, "Closing"); 449 if (mRecordingActive) stopRecording(context); 450 } 451 452 @Override 453 public void tearDown(FilterContext context) { 454 // Release all the resources associated with the MediaRecorder 455 // and GLFrame members 456 if (mMediaRecorder != null) { 457 mMediaRecorder.release(); 458 } 459 if (mScreen != null) { 460 mScreen.release(); 461 } 462 463 } 464 465 } 466