1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 18 package android.filterpacks.videosink; 19 20 import android.content.Context; 21 import android.filterfw.core.Filter; 22 import android.filterfw.core.FilterContext; 23 import android.filterfw.core.Frame; 24 import android.filterfw.core.FrameFormat; 25 import android.filterfw.core.FrameManager; 26 import android.filterfw.core.GenerateFieldPort; 27 import android.filterfw.core.GenerateFinalPort; 28 import android.filterfw.core.GLFrame; 29 import android.filterfw.core.KeyValueMap; 30 import android.filterfw.core.MutableFrameFormat; 31 import android.filterfw.core.NativeFrame; 32 import android.filterfw.core.Program; 33 import android.filterfw.core.ShaderProgram; 34 import android.filterfw.format.ImageFormat; 35 import android.filterfw.geometry.Point; 36 import android.filterfw.geometry.Quad; 37 import android.os.ConditionVariable; 38 import android.media.MediaRecorder; 39 import android.media.CamcorderProfile; 40 import android.filterfw.core.GLEnvironment; 41 42 import java.io.IOException; 43 import java.io.FileDescriptor; 44 import java.util.List; 45 import java.util.Set; 46 47 import android.util.Log; 48 49 /** @hide */ 50 public class MediaEncoderFilter extends Filter { 51 52 /** User-visible parameters */ 53 54 /** Recording state. When set to false, recording will stop, or will not 55 * start if not yet running the graph. Instead, frames are simply ignored. 56 * When switched back to true, recording will restart. This allows a single 57 * graph to both provide preview and to record video. If this is false, 58 * recording settings can be updated while the graph is running. 59 */ 60 @GenerateFieldPort(name = "recording", hasDefault = true) 61 private boolean mRecording = true; 62 63 /** Filename to save the output. */ 64 @GenerateFieldPort(name = "outputFile", hasDefault = true) 65 private String mOutputFile = new String("/sdcard/MediaEncoderOut.mp4"); 66 67 /** File Descriptor to save the output. */ 68 @GenerateFieldPort(name = "outputFileDescriptor", hasDefault = true) 69 private FileDescriptor mFd = null; 70 71 /** Input audio source. If not set, no audio will be recorded. 72 * Select from the values in MediaRecorder.AudioSource 73 */ 74 @GenerateFieldPort(name = "audioSource", hasDefault = true) 75 private int mAudioSource = NO_AUDIO_SOURCE; 76 77 /** Media recorder info listener, which needs to implement 78 * MediaRecorder.OnInfoListener. Set this to receive notifications about 79 * recording events. 80 */ 81 @GenerateFieldPort(name = "infoListener", hasDefault = true) 82 private MediaRecorder.OnInfoListener mInfoListener = null; 83 84 /** Media recorder error listener, which needs to implement 85 * MediaRecorder.OnErrorListener. Set this to receive notifications about 86 * recording errors. 87 */ 88 @GenerateFieldPort(name = "errorListener", hasDefault = true) 89 private MediaRecorder.OnErrorListener mErrorListener = null; 90 91 /** Media recording done callback, which needs to implement OnRecordingDoneListener. 92 * Set this to finalize media upon completion of media recording. 93 */ 94 @GenerateFieldPort(name = "recordingDoneListener", hasDefault = true) 95 private OnRecordingDoneListener mRecordingDoneListener = null; 96 97 /** Orientation hint. Used for indicating proper video playback orientation. 98 * Units are in degrees of clockwise rotation, valid values are (0, 90, 180, 99 * 270). 100 */ 101 @GenerateFieldPort(name = "orientationHint", hasDefault = true) 102 private int mOrientationHint = 0; 103 104 /** Camcorder profile to use. Select from the profiles available in 105 * android.media.CamcorderProfile. If this field is set, it overrides 106 * settings to width, height, framerate, outputFormat, and videoEncoder. 107 */ 108 @GenerateFieldPort(name = "recordingProfile", hasDefault = true) 109 private CamcorderProfile mProfile = null; 110 111 /** Frame width to be encoded, defaults to 320. 112 * Actual received frame size has to match this */ 113 @GenerateFieldPort(name = "width", hasDefault = true) 114 private int mWidth = 320; 115 116 /** Frame height to to be encoded, defaults to 240. 117 * Actual received frame size has to match */ 118 @GenerateFieldPort(name = "height", hasDefault = true) 119 private int mHeight = 240; 120 121 /** Stream framerate to encode the frames at. 122 * By default, frames are encoded at 30 FPS*/ 123 @GenerateFieldPort(name = "framerate", hasDefault = true) 124 private int mFps = 30; 125 126 /** The output format to encode the frames in. 127 * Choose an output format from the options in 128 * android.media.MediaRecorder.OutputFormat */ 129 @GenerateFieldPort(name = "outputFormat", hasDefault = true) 130 private int mOutputFormat = MediaRecorder.OutputFormat.MPEG_4; 131 132 /** The videoencoder to encode the frames with. 133 * Choose a videoencoder from the options in 134 * android.media.MediaRecorder.VideoEncoder */ 135 @GenerateFieldPort(name = "videoEncoder", hasDefault = true) 136 private int mVideoEncoder = MediaRecorder.VideoEncoder.H264; 137 138 /** The input region to read from the frame. The corners of this quad are 139 * mapped to the output rectangle. The input frame ranges from (0,0)-(1,1), 140 * top-left to bottom-right. The corners of the quad are specified in the 141 * order bottom-left, bottom-right, top-left, top-right. 142 */ 143 @GenerateFieldPort(name = "inputRegion", hasDefault = true) 144 private Quad mSourceRegion; 145 146 /** The maximum filesize (in bytes) of the recording session. 147 * By default, it will be 0 and will be passed on to the MediaRecorder. 148 * If the limit is zero or negative, MediaRecorder will disable the limit*/ 149 @GenerateFieldPort(name = "maxFileSize", hasDefault = true) 150 private long mMaxFileSize = 0; 151 152 /** The maximum duration (in milliseconds) of the recording session. 153 * By default, it will be 0 and will be passed on to the MediaRecorder. 154 * If the limit is zero or negative, MediaRecorder will record indefinitely*/ 155 @GenerateFieldPort(name = "maxDurationMs", hasDefault = true) 156 private int mMaxDurationMs = 0; 157 158 /** TimeLapse Interval between frames. 159 * By default, it will be 0. Whether the recording is timelapsed 160 * is inferred based on its value being greater than 0 */ 161 @GenerateFieldPort(name = "timelapseRecordingIntervalUs", hasDefault = true) 162 private long mTimeBetweenTimeLapseFrameCaptureUs = 0; 163 164 // End of user visible parameters 165 166 private static final int NO_AUDIO_SOURCE = -1; 167 168 private int mSurfaceId; 169 private ShaderProgram mProgram; 170 private GLFrame mScreen; 171 172 private boolean mRecordingActive = false; 173 private long mTimestampNs = 0; 174 private long mLastTimeLapseFrameRealTimestampNs = 0; 175 private int mNumFramesEncoded = 0; 176 // Used to indicate whether recording is timelapsed. 177 // Inferred based on (mTimeBetweenTimeLapseFrameCaptureUs > 0) 178 private boolean mCaptureTimeLapse = false; 179 180 private boolean mLogVerbose; 181 private static final String TAG = "MediaEncoderFilter"; 182 183 // Our hook to the encoder 184 private MediaRecorder mMediaRecorder; 185 186 /** Callback to be called when media recording completes. */ 187 188 public interface OnRecordingDoneListener { 189 public void onRecordingDone(); 190 } 191 192 public MediaEncoderFilter(String name) { 193 super(name); 194 Point bl = new Point(0, 0); 195 Point br = new Point(1, 0); 196 Point tl = new Point(0, 1); 197 Point tr = new Point(1, 1); 198 mSourceRegion = new Quad(bl, br, tl, tr); 199 mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE); 200 } 201 202 @Override 203 public void setupPorts() { 204 // Add input port- will accept RGBA GLFrames 205 addMaskedInputPort("videoframe", ImageFormat.create(ImageFormat.COLORSPACE_RGBA, 206 FrameFormat.TARGET_GPU)); 207 } 208 209 @Override 210 public void fieldPortValueUpdated(String name, FilterContext context) { 211 if (mLogVerbose) Log.v(TAG, "Port " + name + " has been updated"); 212 if (name.equals("recording")) return; 213 if (name.equals("inputRegion")) { 214 if (isOpen()) updateSourceRegion(); 215 return; 216 } 217 // TODO: Not sure if it is possible to update the maxFileSize 218 // when the recording is going on. For now, not doing that. 219 if (isOpen() && mRecordingActive) { 220 throw new RuntimeException("Cannot change recording parameters" 221 + " when the filter is recording!"); 222 } 223 } 224 225 private void updateSourceRegion() { 226 // Flip source quad to map to OpenGL origin 227 Quad flippedRegion = new Quad(); 228 flippedRegion.p0 = mSourceRegion.p2; 229 flippedRegion.p1 = mSourceRegion.p3; 230 flippedRegion.p2 = mSourceRegion.p0; 231 flippedRegion.p3 = mSourceRegion.p1; 232 mProgram.setSourceRegion(flippedRegion); 233 } 234 235 // update the MediaRecorderParams based on the variables. 236 // These have to be in certain order as per the MediaRecorder 237 // documentation 238 private void updateMediaRecorderParams() { 239 mCaptureTimeLapse = mTimeBetweenTimeLapseFrameCaptureUs > 0; 240 final int GRALLOC_BUFFER = 2; 241 mMediaRecorder.setVideoSource(GRALLOC_BUFFER); 242 if (!mCaptureTimeLapse && (mAudioSource != NO_AUDIO_SOURCE)) { 243 mMediaRecorder.setAudioSource(mAudioSource); 244 } 245 if (mProfile != null) { 246 mMediaRecorder.setProfile(mProfile); 247 mFps = mProfile.videoFrameRate; 248 } else { 249 mMediaRecorder.setOutputFormat(mOutputFormat); 250 mMediaRecorder.setVideoEncoder(mVideoEncoder); 251 mMediaRecorder.setVideoSize(mWidth, mHeight); 252 mMediaRecorder.setVideoFrameRate(mFps); 253 } 254 mMediaRecorder.setOrientationHint(mOrientationHint); 255 mMediaRecorder.setOnInfoListener(mInfoListener); 256 mMediaRecorder.setOnErrorListener(mErrorListener); 257 if (mFd != null) { 258 mMediaRecorder.setOutputFile(mFd); 259 } else { 260 mMediaRecorder.setOutputFile(mOutputFile); 261 } 262 try { 263 mMediaRecorder.setMaxFileSize(mMaxFileSize); 264 } catch (Exception e) { 265 // Following the logic in VideoCamera.java (in Camera app) 266 // We are going to ignore failure of setMaxFileSize here, as 267 // a) The composer selected may simply not support it, or 268 // b) The underlying media framework may not handle 64-bit range 269 // on the size restriction. 270 Log.w(TAG, "Setting maxFileSize on MediaRecorder unsuccessful! " 271 + e.getMessage()); 272 } 273 mMediaRecorder.setMaxDuration(mMaxDurationMs); 274 } 275 276 @Override 277 public void prepare(FilterContext context) { 278 if (mLogVerbose) Log.v(TAG, "Preparing"); 279 280 mProgram = ShaderProgram.createIdentity(context); 281 282 mRecordingActive = false; 283 } 284 285 @Override 286 public void open(FilterContext context) { 287 if (mLogVerbose) Log.v(TAG, "Opening"); 288 updateSourceRegion(); 289 if (mRecording) startRecording(context); 290 } 291 292 private void startRecording(FilterContext context) { 293 if (mLogVerbose) Log.v(TAG, "Starting recording"); 294 295 // Create a frame representing the screen 296 MutableFrameFormat screenFormat = new MutableFrameFormat( 297 FrameFormat.TYPE_BYTE, FrameFormat.TARGET_GPU); 298 screenFormat.setBytesPerSample(4); 299 300 int width, height; 301 if (mProfile != null) { 302 width = mProfile.videoFrameWidth; 303 height = mProfile.videoFrameHeight; 304 } else { 305 width = mWidth; 306 height = mHeight; 307 } 308 screenFormat.setDimensions(width, height); 309 mScreen = (GLFrame)context.getFrameManager().newBoundFrame( 310 screenFormat, GLFrame.EXISTING_FBO_BINDING, 0); 311 312 // Initialize the media recorder 313 314 mMediaRecorder = new MediaRecorder(); 315 updateMediaRecorderParams(); 316 317 try { 318 mMediaRecorder.prepare(); 319 } catch (IllegalStateException e) { 320 throw e; 321 } catch (IOException e) { 322 throw new RuntimeException("IOException in" 323 + "MediaRecorder.prepare()!", e); 324 } catch (Exception e) { 325 throw new RuntimeException("Unknown Exception in" 326 + "MediaRecorder.prepare()!", e); 327 } 328 // Make sure start() is called before trying to 329 // register the surface. The native window handle needed to create 330 // the surface is initiated in start() 331 mMediaRecorder.start(); 332 if (mLogVerbose) Log.v(TAG, "Open: registering surface from Mediarecorder"); 333 mSurfaceId = context.getGLEnvironment(). 334 registerSurfaceFromMediaRecorder(mMediaRecorder); 335 mNumFramesEncoded = 0; 336 mRecordingActive = true; 337 } 338 339 public boolean skipFrameAndModifyTimestamp(long timestampNs) { 340 // first frame- encode. Don't skip 341 if (mNumFramesEncoded == 0) { 342 mLastTimeLapseFrameRealTimestampNs = timestampNs; 343 mTimestampNs = timestampNs; 344 if (mLogVerbose) Log.v(TAG, "timelapse: FIRST frame, last real t= " 345 + mLastTimeLapseFrameRealTimestampNs + 346 ", setting t = " + mTimestampNs ); 347 return false; 348 } 349 350 // Workaround to bypass the first 2 input frames for skipping. 351 // The first 2 output frames from the encoder are: decoder specific info and 352 // the compressed video frame data for the first input video frame. 353 if (mNumFramesEncoded >= 2 && timestampNs < 354 (mLastTimeLapseFrameRealTimestampNs + 1000L * mTimeBetweenTimeLapseFrameCaptureUs)) { 355 // If 2 frames have been already encoded, 356 // Skip all frames from last encoded frame until 357 // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed. 358 if (mLogVerbose) Log.v(TAG, "timelapse: skipping intermediate frame"); 359 return true; 360 } else { 361 // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time: 362 // - Reset mLastTimeLapseFrameRealTimestampNs to current time. 363 // - Artificially modify timestampNs to be one frame time (1/framerate) ahead 364 // of the last encoded frame's time stamp. 365 if (mLogVerbose) Log.v(TAG, "timelapse: encoding frame, Timestamp t = " + timestampNs + 366 ", last real t= " + mLastTimeLapseFrameRealTimestampNs + 367 ", interval = " + mTimeBetweenTimeLapseFrameCaptureUs); 368 mLastTimeLapseFrameRealTimestampNs = timestampNs; 369 mTimestampNs = mTimestampNs + (1000000000L / (long)mFps); 370 if (mLogVerbose) Log.v(TAG, "timelapse: encoding frame, setting t = " 371 + mTimestampNs + ", delta t = " + (1000000000L / (long)mFps) + 372 ", fps = " + mFps ); 373 return false; 374 } 375 } 376 377 @Override 378 public void process(FilterContext context) { 379 if (mLogVerbose) Log.v(TAG, "Starting frame processing"); 380 381 GLEnvironment glEnv = context.getGLEnvironment(); 382 // Get input frame 383 Frame input = pullInput("videoframe"); 384 385 // Check if recording needs to start 386 if (!mRecordingActive && mRecording) { 387 startRecording(context); 388 } 389 // Check if recording needs to stop 390 if (mRecordingActive && !mRecording) { 391 stopRecording(context); 392 } 393 394 if (!mRecordingActive) return; 395 396 if (mCaptureTimeLapse) { 397 if (skipFrameAndModifyTimestamp(input.getTimestamp())) { 398 return; 399 } 400 } else { 401 mTimestampNs = input.getTimestamp(); 402 } 403 404 // Activate our surface 405 glEnv.activateSurfaceWithId(mSurfaceId); 406 407 // Process 408 mProgram.process(input, mScreen); 409 410 // Set timestamp from input 411 glEnv.setSurfaceTimestamp(mTimestampNs); 412 // And swap buffers 413 glEnv.swapBuffers(); 414 mNumFramesEncoded++; 415 if (mLogVerbose) Log.v(TAG, "numFramesEncoded = " + mNumFramesEncoded); 416 } 417 418 private void stopRecording(FilterContext context) { 419 if (mLogVerbose) Log.v(TAG, "Stopping recording"); 420 421 mRecordingActive = false; 422 mNumFramesEncoded = 0; 423 GLEnvironment glEnv = context.getGLEnvironment(); 424 // The following call will switch the surface_id to 0 425 // (thus, calling eglMakeCurrent on surface with id 0) and 426 // then call eglDestroy on the surface. Hence, this will 427 // call disconnect the SurfaceMediaSource, which is needed to 428 // be called before calling Stop on the mediarecorder 429 if (mLogVerbose) Log.v(TAG, String.format("Unregistering surface %d", mSurfaceId)); 430 glEnv.unregisterSurfaceId(mSurfaceId); 431 try { 432 mMediaRecorder.stop(); 433 } catch (RuntimeException e) { 434 throw new MediaRecorderStopException("MediaRecorder.stop() failed!", e); 435 } 436 mMediaRecorder.release(); 437 mMediaRecorder = null; 438 439 mScreen.release(); 440 mScreen = null; 441 442 // Use an EffectsRecorder callback to forward a media finalization 443 // call so that it creates the video thumbnail, and whatever else needs 444 // to be done to finalize media. 445 if (mRecordingDoneListener != null) { 446 mRecordingDoneListener.onRecordingDone(); 447 } 448 } 449 450 @Override 451 public void close(FilterContext context) { 452 if (mLogVerbose) Log.v(TAG, "Closing"); 453 if (mRecordingActive) stopRecording(context); 454 } 455 456 @Override 457 public void tearDown(FilterContext context) { 458 // Release all the resources associated with the MediaRecorder 459 // and GLFrame members 460 if (mMediaRecorder != null) { 461 mMediaRecorder.release(); 462 } 463 if (mScreen != null) { 464 mScreen.release(); 465 } 466 467 } 468 469 } 470